aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorilezhankin <ilezhankin@yandex-team.ru>2022-02-10 16:45:55 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:55 +0300
commit1d125034f06575234f83f24f08677955133f140e (patch)
treeec05fbbd61dc118d5de37f206ab978cff58774bd
parent3a7a498715ef1b66f5054455421b845e45e3a653 (diff)
downloadydb-1d125034f06575234f83f24f08677955133f140e.tar.gz
Restoring authorship annotation for <ilezhankin@yandex-team.ru>. Commit 1 of 2.
-rw-r--r--contrib/libs/pdqsort/license.txt32
-rw-r--r--contrib/libs/pdqsort/pdqsort.h1088
-rw-r--r--contrib/libs/pdqsort/ya.make22
-rw-r--r--contrib/libs/ya.make4
-rw-r--r--contrib/python/Pygments/py2/LICENSE48
-rw-r--r--contrib/python/Pygments/py2/pygments/__init__.py152
-rw-r--r--contrib/python/Pygments/py2/pygments/cmdline.py1022
-rw-r--r--contrib/python/Pygments/py2/pygments/console.py114
-rw-r--r--contrib/python/Pygments/py2/pygments/filter.py138
-rw-r--r--contrib/python/Pygments/py2/pygments/filters/__init__.py698
-rw-r--r--contrib/python/Pygments/py2/pygments/formatter.py186
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/__init__.py232
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/_mapping.py166
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/bbcode.py216
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/html.py1672
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/img.py1104
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/irc.py288
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/latex.py946
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/other.py320
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/rtf.py288
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/svg.py304
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/terminal.py224
-rw-r--r--contrib/python/Pygments/py2/pygments/formatters/terminal256.py552
-rw-r--r--contrib/python/Pygments/py2/pygments/lexer.py1722
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/__init__.py536
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py3280
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py434
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py124
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py16
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_lasso_builtins.py10638
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py452
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_mapping.py888
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py2340
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_openedge_builtins.py5092
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_php_builtins.py9480
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_postgres_builtins.py1240
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_scilab_builtins.py6186
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_sourcemod_builtins.py2324
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_stan_builtins.py814
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/_vim_builtins.py3876
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/actionscript.py474
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/agile.py46
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/algebra.py434
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ambient.py150
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/apl.py192
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/archetype.py634
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/asm.py726
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/automation.py742
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/basic.py994
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/business.py1010
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/c_cpp.py438
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/c_like.py830
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/chapel.py178
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/compiled.py64
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/configs.py1552
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/console.py226
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/csound.py352
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/css.py724
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/d.py500
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dalvik.py248
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/data.py1042
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/diff.py208
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dotnet.py1298
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dsls.py1248
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/dylan.py554
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ecl.py248
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/eiffel.py128
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/elm.py222
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/erlang.py1004
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/esoteric.py344
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ezhil.py116
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/factor.py686
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/fantom.py498
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/felix.py542
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/fortran.py396
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/foxpro.py854
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/functional.py40
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/go.py200
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/grammar_notation.py254
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/graph.py142
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/graphics.py1056
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/haskell.py1640
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/haxe.py1858
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/hdl.py598
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/hexdump.py170
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/html.py1148
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/idl.py510
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/igor.py156
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/inferno.py188
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/installers.py642
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/int_fiction.py2682
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/iolang.py122
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/j.py280
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/javascript.py2704
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/julia.py240
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/jvm.py3060
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/lisp.py3148
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/make.py396
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/markup.py992
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/math.py40
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/matlab.py1286
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ml.py1524
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/modeling.py682
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/modula2.py3118
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/nimrod.py294
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/nit.py126
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/nix.py270
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/oberon.py202
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/objective.py948
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ooc.py168
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/other.py78
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/parasail.py154
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/parsers.py1656
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/pascal.py1250
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/pawn.py350
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/perl.py1186
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/php.py468
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/praat.py484
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/prolog.py554
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/python.py1314
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/qvt.py206
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/r.py368
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/rdf.py442
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/rebol.py846
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/resource.py162
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/roboconf.py162
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/robotframework.py1114
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/ruby.py1006
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/rust.py294
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/scripting.py2334
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/shell.py1498
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/smalltalk.py388
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/snobol.py164
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/special.py200
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/sql.py976
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/supercollider.py168
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/tcl.py288
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/templates.py4266
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/testing.py408
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/text.py48
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/textedit.py330
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/textfmts.py566
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/theorem.py866
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/trafficscript.py106
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/urbi.py264
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/web.py46
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/webmisc.py1924
-rw-r--r--contrib/python/Pygments/py2/pygments/lexers/x10.py136
-rw-r--r--contrib/python/Pygments/py2/pygments/modeline.py76
-rw-r--r--contrib/python/Pygments/py2/pygments/plugin.py112
-rw-r--r--contrib/python/Pygments/py2/pygments/regexopt.py180
-rw-r--r--contrib/python/Pygments/py2/pygments/scanner.py204
-rw-r--r--contrib/python/Pygments/py2/pygments/sphinxext.py312
-rw-r--r--contrib/python/Pygments/py2/pygments/style.py230
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/__init__.py152
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/algol.py124
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/algol_nu.py124
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/arduino.py180
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/autumn.py128
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/borland.py100
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/bw.py96
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/colorful.py160
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/default.py144
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/emacs.py142
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/friendly.py142
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/fruity.py82
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/igor.py56
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/lovelace.py184
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/manni.py148
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/monokai.py208
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/murphy.py158
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/native.py128
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/paraiso_dark.py248
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/paraiso_light.py248
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/pastie.py148
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/perldoc.py134
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/rrt.py64
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/tango.py280
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/trac.py124
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/vim.py124
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/vs.py74
-rw-r--r--contrib/python/Pygments/py2/pygments/styles/xcode.py100
-rw-r--r--contrib/python/Pygments/py2/pygments/token.py370
-rw-r--r--contrib/python/Pygments/py2/pygments/unistring.py360
-rw-r--r--contrib/python/Pygments/py2/pygments/util.py766
-rw-r--r--contrib/python/Pygments/py2/ya.make372
-rw-r--r--contrib/python/Pygments/py3/LICENSE48
-rw-r--r--contrib/python/Pygments/py3/pygments/__init__.py142
-rw-r--r--contrib/python/Pygments/py3/pygments/cmdline.py690
-rw-r--r--contrib/python/Pygments/py3/pygments/console.py112
-rw-r--r--contrib/python/Pygments/py3/pygments/filter.py126
-rw-r--r--contrib/python/Pygments/py3/pygments/filters/__init__.py658
-rw-r--r--contrib/python/Pygments/py3/pygments/formatter.py178
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/__init__.py222
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/_mapping.py160
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/bbcode.py214
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/html.py1410
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/img.py1018
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/irc.py280
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/latex.py882
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/other.py286
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/rtf.py234
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/svg.py296
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal.py206
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal256.py530
-rw-r--r--contrib/python/Pygments/py3/pygments/lexer.py1638
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/__init__.py518
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py3278
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py432
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py108
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py10636
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py446
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mapping.py778
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py2338
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py4322
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py9470
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py1208
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py6182
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py2318
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py812
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py3874
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/actionscript.py432
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/agile.py44
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/algebra.py428
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ambient.py136
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/apl.py162
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/archetype.py586
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asm.py652
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/automation.py738
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/basic.py914
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/business.py980
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_cpp.py346
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_like.py798
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/chapel.py148
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/compiled.py62
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/configs.py1412
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/console.py218
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/csound.py342
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/css.py664
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/d.py468
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dalvik.py228
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/data.py834
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/diff.py172
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dotnet.py1152
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dsls.py1180
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dylan.py522
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ecl.py236
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/eiffel.py110
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/elm.py204
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/erlang.py920
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/esoteric.py326
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ezhil.py96
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/factor.py528
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fantom.py440
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/felix.py516
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fortran.py370
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/foxpro.py850
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/functional.py38
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/go.py184
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py242
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graph.py134
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphics.py1008
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haskell.py1528
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haxe.py1836
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hdl.py496
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hexdump.py140
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/html.py1108
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/idl.py506
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/igor.py154
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/inferno.py186
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/installers.py596
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/int_fiction.py2654
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/iolang.py118
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/j.py276
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/javascript.py2432
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/julia.py172
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jvm.py2596
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/lisp.py3100
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/make.py372
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/markup.py960
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/math.py38
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/matlab.py1126
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ml.py1508
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modeling.py678
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modula2.py3102
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nimrod.py290
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nit.py124
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nix.py268
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/oberon.py200
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/objective.py938
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ooc.py166
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/other.py76
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parasail.py152
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parsers.py1504
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pascal.py1240
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pawn.py342
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/perl.py906
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/php.py456
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/praat.py482
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/prolog.py532
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/python.py1270
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/qvt.py204
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/r.py354
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rdf.py412
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rebol.py844
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/resource.py158
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/roboconf.py160
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/robotframework.py1054
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ruby.py934
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rust.py214
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/scripting.py2292
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/shell.py1438
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smalltalk.py384
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/snobol.py162
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/special.py142
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sql.py812
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/supercollider.py160
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tcl.py280
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/templates.py4046
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/testing.py398
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/text.py46
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textedit.py312
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textfmts.py556
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/theorem.py786
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/trafficscript.py98
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/urbi.py258
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/web.py44
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webmisc.py1860
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/x10.py124
-rw-r--r--contrib/python/Pygments/py3/pygments/modeline.py74
-rw-r--r--contrib/python/Pygments/py3/pygments/plugin.py110
-rw-r--r--contrib/python/Pygments/py3/pygments/regexopt.py176
-rw-r--r--contrib/python/Pygments/py3/pygments/scanner.py200
-rw-r--r--contrib/python/Pygments/py3/pygments/sphinxext.py306
-rw-r--r--contrib/python/Pygments/py3/pygments/style.py222
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/__init__.py144
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol.py122
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol_nu.py122
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/arduino.py178
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/autumn.py126
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/borland.py98
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/bw.py94
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/colorful.py158
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/default.py118
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/emacs.py140
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/friendly.py140
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/fruity.py80
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/igor.py54
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/lovelace.py182
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/manni.py146
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/monokai.py202
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/murphy.py156
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/native.py126
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py240
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_light.py240
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/pastie.py146
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/perldoc.py132
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/rrt.py60
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/tango.py276
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/trac.py122
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vim.py122
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vs.py72
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/xcode.py98
-rw-r--r--contrib/python/Pygments/py3/pygments/token.py368
-rw-r--r--contrib/python/Pygments/py3/pygments/unistring.py212
-rw-r--r--contrib/python/Pygments/py3/pygments/util.py576
-rw-r--r--contrib/python/Pygments/py3/ya.make372
-rw-r--r--contrib/python/Pygments/ya.make4
-rw-r--r--contrib/python/ya.make2
-rw-r--r--contrib/restricted/cityhash-1.0.2/COPYING38
-rw-r--r--contrib/restricted/cityhash-1.0.2/city.cc958
-rw-r--r--contrib/restricted/cityhash-1.0.2/city.h208
-rw-r--r--contrib/restricted/cityhash-1.0.2/citycrc.h102
-rw-r--r--contrib/restricted/cityhash-1.0.2/config.h250
-rw-r--r--contrib/restricted/cityhash-1.0.2/ya.make32
-rw-r--r--contrib/restricted/ya.make2
-rw-r--r--util/digest/ya.make4
-rw-r--r--util/folder/path.cpp34
-rw-r--r--util/folder/path.h6
-rw-r--r--util/network/sock.h44
-rw-r--r--util/network/socket.h10
-rw-r--r--util/random/entropy.cpp18
-rw-r--r--util/random/entropy.h4
-rw-r--r--util/random/random.cpp14
-rw-r--r--util/random/random.h10
-rw-r--r--ydb/library/yql/udfs/common/clickhouse/client/base/common/SimpleCache.h148
-rw-r--r--ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp52
-rw-r--r--ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.h10
-rw-r--r--ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.cpp214
-rw-r--r--ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.h38
391 files changed, 140926 insertions, 140926 deletions
diff --git a/contrib/libs/pdqsort/license.txt b/contrib/libs/pdqsort/license.txt
index c1503f9121..77e85e9034 100644
--- a/contrib/libs/pdqsort/license.txt
+++ b/contrib/libs/pdqsort/license.txt
@@ -1,16 +1,16 @@
-Copyright (c) 2015 Orson Peters <orsonpeters@gmail.com>
-
-This software is provided 'as-is', without any express or implied warranty. In no event will the
-authors be held liable for any damages arising from the use of this software.
-
-Permission is granted to anyone to use this software for any purpose, including commercial
-applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
-1. The origin of this software must not be misrepresented; you must not claim that you wrote the
- original software. If you use this software in a product, an acknowledgment in the product
- documentation would be appreciated but is not required.
-
-2. Altered source versions must be plainly marked as such, and must not be misrepresented as
- being the original software.
-
-3. This notice may not be removed or altered from any source distribution.
+Copyright (c) 2015 Orson Peters <orsonpeters@gmail.com>
+
+This software is provided 'as-is', without any express or implied warranty. In no event will the
+authors be held liable for any damages arising from the use of this software.
+
+Permission is granted to anyone to use this software for any purpose, including commercial
+applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim that you wrote the
+ original software. If you use this software in a product, an acknowledgment in the product
+ documentation would be appreciated but is not required.
+
+2. Altered source versions must be plainly marked as such, and must not be misrepresented as
+ being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
diff --git a/contrib/libs/pdqsort/pdqsort.h b/contrib/libs/pdqsort/pdqsort.h
index 93ca1e9781..371393370b 100644
--- a/contrib/libs/pdqsort/pdqsort.h
+++ b/contrib/libs/pdqsort/pdqsort.h
@@ -1,544 +1,544 @@
-/*
- pdqsort.h - Pattern-defeating quicksort.
-
- Copyright (c) 2015 Orson Peters
-
- This software is provided 'as-is', without any express or implied warranty. In no event will the
- authors be held liable for any damages arising from the use of this software.
-
- Permission is granted to anyone to use this software for any purpose, including commercial
- applications, and to alter it and redistribute it freely, subject to the following restrictions:
-
- 1. The origin of this software must not be misrepresented; you must not claim that you wrote the
- original software. If you use this software in a product, an acknowledgment in the product
- documentation would be appreciated but is not required.
-
- 2. Altered source versions must be plainly marked as such, and must not be misrepresented as
- being the original software.
-
- 3. This notice may not be removed or altered from any source distribution.
-*/
-
-
-#ifndef PDQSORT_H
-#define PDQSORT_H
-
-#include <algorithm>
-#include <cstddef>
-#include <functional>
-#include <utility>
-#include <iterator>
-
-#if __cplusplus >= 201103L
- #include <cstdint>
- #include <type_traits>
- #define PDQSORT_PREFER_MOVE(x) std::move(x)
-#else
- #define PDQSORT_PREFER_MOVE(x) (x)
-#endif
-
-
-namespace pdqsort_detail {
- enum {
- // Partitions below this size are sorted using insertion sort.
- insertion_sort_threshold = 24,
-
- // Partitions above this size use Tukey's ninther to select the pivot.
- ninther_threshold = 128,
-
- // When we detect an already sorted partition, attempt an insertion sort that allows this
- // amount of element moves before giving up.
- partial_insertion_sort_limit = 8,
-
- // Must be multiple of 8 due to loop unrolling, and < 256 to fit in unsigned char.
- block_size = 64,
-
- // Cacheline size, assumes power of two.
- cacheline_size = 64
-
- };
-
-#if __cplusplus >= 201103L
- template<class T> struct is_default_compare : std::false_type { };
- template<class T> struct is_default_compare<std::less<T>> : std::true_type { };
- template<class T> struct is_default_compare<std::greater<T>> : std::true_type { };
-#endif
-
- // Returns floor(log2(n)), assumes n > 0.
- template<class T>
- inline int log2(T n) {
- int log = 0;
- while (n >>= 1) ++log;
- return log;
- }
-
- // Sorts [begin, end) using insertion sort with the given comparison function.
- template<class Iter, class Compare>
- inline void insertion_sort(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- if (begin == end) return;
-
- for (Iter cur = begin + 1; cur != end; ++cur) {
- Iter sift = cur;
- Iter sift_1 = cur - 1;
-
- // Compare first so we can avoid 2 moves for an element already positioned correctly.
- if (comp(*sift, *sift_1)) {
- T tmp = PDQSORT_PREFER_MOVE(*sift);
-
- do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
- while (sift != begin && comp(tmp, *--sift_1));
-
- *sift = PDQSORT_PREFER_MOVE(tmp);
- }
- }
- }
-
- // Sorts [begin, end) using insertion sort with the given comparison function. Assumes
- // *(begin - 1) is an element smaller than or equal to any element in [begin, end).
- template<class Iter, class Compare>
- inline void unguarded_insertion_sort(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- if (begin == end) return;
-
- for (Iter cur = begin + 1; cur != end; ++cur) {
- Iter sift = cur;
- Iter sift_1 = cur - 1;
-
- // Compare first so we can avoid 2 moves for an element already positioned correctly.
- if (comp(*sift, *sift_1)) {
- T tmp = PDQSORT_PREFER_MOVE(*sift);
-
- do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
- while (comp(tmp, *--sift_1));
-
- *sift = PDQSORT_PREFER_MOVE(tmp);
- }
- }
- }
-
- // Attempts to use insertion sort on [begin, end). Will return false if more than
- // partial_insertion_sort_limit elements were moved, and abort sorting. Otherwise it will
- // successfully sort and return true.
- template<class Iter, class Compare>
- inline bool partial_insertion_sort(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- if (begin == end) return true;
-
- std::size_t limit = 0;
- for (Iter cur = begin + 1; cur != end; ++cur) {
- Iter sift = cur;
- Iter sift_1 = cur - 1;
-
- // Compare first so we can avoid 2 moves for an element already positioned correctly.
- if (comp(*sift, *sift_1)) {
- T tmp = PDQSORT_PREFER_MOVE(*sift);
-
- do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
- while (sift != begin && comp(tmp, *--sift_1));
-
- *sift = PDQSORT_PREFER_MOVE(tmp);
- limit += cur - sift;
- }
-
- if (limit > partial_insertion_sort_limit) return false;
- }
-
- return true;
- }
-
- template<class Iter, class Compare>
- inline void sort2(Iter a, Iter b, Compare comp) {
- if (comp(*b, *a)) std::iter_swap(a, b);
- }
-
- // Sorts the elements *a, *b and *c using comparison function comp.
- template<class Iter, class Compare>
- inline void sort3(Iter a, Iter b, Iter c, Compare comp) {
- sort2(a, b, comp);
- sort2(b, c, comp);
- sort2(a, b, comp);
- }
-
- template<class T>
- inline T* align_cacheline(T* p) {
-#if defined(UINTPTR_MAX) && __cplusplus >= 201103L
- std::uintptr_t ip = reinterpret_cast<std::uintptr_t>(p);
-#else
- std::size_t ip = reinterpret_cast<std::size_t>(p);
-#endif
- ip = (ip + cacheline_size - 1) & -cacheline_size;
- return reinterpret_cast<T*>(ip);
- }
-
- template<class Iter>
- inline void swap_offsets(Iter first, Iter last,
- unsigned char* offsets_l, unsigned char* offsets_r,
- int num, bool use_swaps) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- if (use_swaps) {
- // This case is needed for the descending distribution, where we need
- // to have proper swapping for pdqsort to remain O(n).
- for (int i = 0; i < num; ++i) {
- std::iter_swap(first + offsets_l[i], last - offsets_r[i]);
- }
- } else if (num > 0) {
- Iter l = first + offsets_l[0]; Iter r = last - offsets_r[0];
- T tmp(PDQSORT_PREFER_MOVE(*l)); *l = PDQSORT_PREFER_MOVE(*r);
- for (int i = 1; i < num; ++i) {
- l = first + offsets_l[i]; *r = PDQSORT_PREFER_MOVE(*l);
- r = last - offsets_r[i]; *l = PDQSORT_PREFER_MOVE(*r);
- }
- *r = PDQSORT_PREFER_MOVE(tmp);
- }
- }
-
- // Partitions [begin, end) around pivot *begin using comparison function comp. Elements equal
- // to the pivot are put in the right-hand partition. Returns the position of the pivot after
- // partitioning and whether the passed sequence already was correctly partitioned. Assumes the
- // pivot is a median of at least 3 elements and that [begin, end) is at least
- // insertion_sort_threshold long. Uses branchless partitioning.
- template<class Iter, class Compare>
- inline std::pair<Iter, bool> partition_right_branchless(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
-
- // Move pivot into local for speed.
- T pivot(PDQSORT_PREFER_MOVE(*begin));
- Iter first = begin;
- Iter last = end;
-
- // Find the first element greater than or equal than the pivot (the median of 3 guarantees
- // this exists).
- while (comp(*++first, pivot));
-
- // Find the first element strictly smaller than the pivot. We have to guard this search if
- // there was no element before *first.
- if (first - 1 == begin) while (first < last && !comp(*--last, pivot));
- else while ( !comp(*--last, pivot));
-
- // If the first pair of elements that should be swapped to partition are the same element,
- // the passed in sequence already was correctly partitioned.
- bool already_partitioned = first >= last;
- if (!already_partitioned) {
- std::iter_swap(first, last);
- ++first;
- }
-
- // The following branchless partitioning is derived from "BlockQuicksort: How Branch
- // Mispredictions don’t affect Quicksort" by Stefan Edelkamp and Armin Weiss.
- unsigned char offsets_l_storage[block_size + cacheline_size];
- unsigned char offsets_r_storage[block_size + cacheline_size];
- unsigned char* offsets_l = align_cacheline(offsets_l_storage);
- unsigned char* offsets_r = align_cacheline(offsets_r_storage);
- int num_l, num_r, start_l, start_r;
- num_l = num_r = start_l = start_r = 0;
-
- while (last - first > 2 * block_size) {
- // Fill up offset blocks with elements that are on the wrong side.
- if (num_l == 0) {
- start_l = 0;
- Iter it = first;
- for (unsigned char i = 0; i < block_size;) {
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- }
- }
- if (num_r == 0) {
- start_r = 0;
- Iter it = last;
- for (unsigned char i = 0; i < block_size;) {
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- }
- }
-
- // Swap elements and update block sizes and first/last boundaries.
- int num = std::min(num_l, num_r);
- swap_offsets(first, last, offsets_l + start_l, offsets_r + start_r,
- num, num_l == num_r);
- num_l -= num; num_r -= num;
- start_l += num; start_r += num;
- if (num_l == 0) first += block_size;
- if (num_r == 0) last -= block_size;
- }
-
- int l_size = 0, r_size = 0;
- int unknown_left = (int)(last - first) - ((num_r || num_l) ? block_size : 0);
- if (num_r) {
- // Handle leftover block by assigning the unknown elements to the other block.
- l_size = unknown_left;
- r_size = block_size;
- } else if (num_l) {
- l_size = block_size;
- r_size = unknown_left;
- } else {
- // No leftover block, split the unknown elements in two blocks.
- l_size = unknown_left/2;
- r_size = unknown_left - l_size;
- }
-
- // Fill offset buffers if needed.
- if (unknown_left && !num_l) {
- start_l = 0;
- Iter it = first;
- for (unsigned char i = 0; i < l_size;) {
- offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
- }
- }
- if (unknown_left && !num_r) {
- start_r = 0;
- Iter it = last;
- for (unsigned char i = 0; i < r_size;) {
- offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
- }
- }
-
- int num = std::min(num_l, num_r);
- swap_offsets(first, last, offsets_l + start_l, offsets_r + start_r, num, num_l == num_r);
- num_l -= num; num_r -= num;
- start_l += num; start_r += num;
- if (num_l == 0) first += l_size;
- if (num_r == 0) last -= r_size;
-
- // We have now fully identified [first, last)'s proper position. Swap the last elements.
- if (num_l) {
- offsets_l += start_l;
- while (num_l--) std::iter_swap(first + offsets_l[num_l], --last);
- first = last;
- }
- if (num_r) {
- offsets_r += start_r;
- while (num_r--) std::iter_swap(last - offsets_r[num_r], first), ++first;
- last = first;
- }
-
- // Put the pivot in the right place.
- Iter pivot_pos = first - 1;
- *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
- *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
-
- return std::make_pair(pivot_pos, already_partitioned);
- }
-
- // Partitions [begin, end) around pivot *begin using comparison function comp. Elements equal
- // to the pivot are put in the right-hand partition. Returns the position of the pivot after
- // partitioning and whether the passed sequence already was correctly partitioned. Assumes the
- // pivot is a median of at least 3 elements and that [begin, end) is at least
- // insertion_sort_threshold long.
- template<class Iter, class Compare>
- inline std::pair<Iter, bool> partition_right(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
-
- // Move pivot into local for speed.
- T pivot(PDQSORT_PREFER_MOVE(*begin));
-
- Iter first = begin;
- Iter last = end;
-
- // Find the first element greater than or equal than the pivot (the median of 3 guarantees
- // this exists).
- while (comp(*++first, pivot));
-
- // Find the first element strictly smaller than the pivot. We have to guard this search if
- // there was no element before *first.
- if (first - 1 == begin) while (first < last && !comp(*--last, pivot));
- else while ( !comp(*--last, pivot));
-
- // If the first pair of elements that should be swapped to partition are the same element,
- // the passed in sequence already was correctly partitioned.
- bool already_partitioned = first >= last;
-
- // Keep swapping pairs of elements that are on the wrong side of the pivot. Previously
- // swapped pairs guard the searches, which is why the first iteration is special-cased
- // above.
- while (first < last) {
- std::iter_swap(first, last);
- while (comp(*++first, pivot));
- while (!comp(*--last, pivot));
- }
-
- // Put the pivot in the right place.
- Iter pivot_pos = first - 1;
- *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
- *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
-
- return std::make_pair(pivot_pos, already_partitioned);
- }
-
- // Similar function to the one above, except elements equal to the pivot are put to the left of
- // the pivot and it doesn't check or return if the passed sequence already was partitioned.
- // Since this is rarely used (the many equal case), and in that case pdqsort already has O(n)
- // performance, no block quicksort is applied here for simplicity.
- template<class Iter, class Compare>
- inline Iter partition_left(Iter begin, Iter end, Compare comp) {
- typedef typename std::iterator_traits<Iter>::value_type T;
-
- T pivot(PDQSORT_PREFER_MOVE(*begin));
- Iter first = begin;
- Iter last = end;
-
- while (comp(pivot, *--last));
-
- if (last + 1 == end) while (first < last && !comp(pivot, *++first));
- else while ( !comp(pivot, *++first));
-
- while (first < last) {
- std::iter_swap(first, last);
- while (comp(pivot, *--last));
- while (!comp(pivot, *++first));
- }
-
- Iter pivot_pos = last;
- *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
- *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
-
- return pivot_pos;
- }
-
-
- template<class Iter, class Compare, bool Branchless>
- inline void pdqsort_loop(Iter begin, Iter end, Compare comp, int bad_allowed, bool leftmost = true) {
- typedef typename std::iterator_traits<Iter>::difference_type diff_t;
-
- // Use a while loop for tail recursion elimination.
- while (true) {
- diff_t size = end - begin;
-
- // Insertion sort is faster for small arrays.
- if (size < insertion_sort_threshold) {
- if (leftmost) insertion_sort(begin, end, comp);
- else unguarded_insertion_sort(begin, end, comp);
- return;
- }
-
- // Choose pivot as median of 3 or pseudomedian of 9.
- diff_t s2 = size / 2;
- if (size > ninther_threshold) {
- sort3(begin, begin + s2, end - 1, comp);
- sort3(begin + 1, begin + (s2 - 1), end - 2, comp);
- sort3(begin + 2, begin + (s2 + 1), end - 3, comp);
- sort3(begin + (s2 - 1), begin + s2, begin + (s2 + 1), comp);
- std::iter_swap(begin, begin + s2);
- } else sort3(begin + s2, begin, end - 1, comp);
-
- // If *(begin - 1) is the end of the right partition of a previous partition operation
- // there is no element in [begin, end) that is smaller than *(begin - 1). Then if our
- // pivot compares equal to *(begin - 1) we change strategy, putting equal elements in
- // the left partition, greater elements in the right partition. We do not have to
- // recurse on the left partition, since it's sorted (all equal).
- if (!leftmost && !comp(*(begin - 1), *begin)) {
- begin = partition_left(begin, end, comp) + 1;
- continue;
- }
-
- // Partition and get results.
- std::pair<Iter, bool> part_result =
- Branchless ? partition_right_branchless(begin, end, comp)
- : partition_right(begin, end, comp);
- Iter pivot_pos = part_result.first;
- bool already_partitioned = part_result.second;
-
- // Check for a highly unbalanced partition.
- diff_t l_size = pivot_pos - begin;
- diff_t r_size = end - (pivot_pos + 1);
- bool highly_unbalanced = l_size < size / 8 || r_size < size / 8;
-
- // If we got a highly unbalanced partition we shuffle elements to break many patterns.
- if (highly_unbalanced) {
- // If we had too many bad partitions, switch to heapsort to guarantee O(n log n).
- if (--bad_allowed == 0) {
- std::make_heap(begin, end, comp);
- std::sort_heap(begin, end, comp);
- return;
- }
-
- if (l_size >= insertion_sort_threshold) {
- std::iter_swap(begin, begin + l_size / 4);
- std::iter_swap(pivot_pos - 1, pivot_pos - l_size / 4);
-
- if (l_size > ninther_threshold) {
- std::iter_swap(begin + 1, begin + (l_size / 4 + 1));
- std::iter_swap(begin + 2, begin + (l_size / 4 + 2));
- std::iter_swap(pivot_pos - 2, pivot_pos - (l_size / 4 + 1));
- std::iter_swap(pivot_pos - 3, pivot_pos - (l_size / 4 + 2));
- }
- }
-
- if (r_size >= insertion_sort_threshold) {
- std::iter_swap(pivot_pos + 1, pivot_pos + (1 + r_size / 4));
- std::iter_swap(end - 1, end - r_size / 4);
-
- if (r_size > ninther_threshold) {
- std::iter_swap(pivot_pos + 2, pivot_pos + (2 + r_size / 4));
- std::iter_swap(pivot_pos + 3, pivot_pos + (3 + r_size / 4));
- std::iter_swap(end - 2, end - (1 + r_size / 4));
- std::iter_swap(end - 3, end - (2 + r_size / 4));
- }
- }
- } else {
- // If we were decently balanced and we tried to sort an already partitioned
- // sequence try to use insertion sort.
- if (already_partitioned && partial_insertion_sort(begin, pivot_pos, comp)
- && partial_insertion_sort(pivot_pos + 1, end, comp)) return;
- }
-
- // Sort the left partition first using recursion and do tail recursion elimination for
- // the right-hand partition.
- pdqsort_loop<Iter, Compare, Branchless>(begin, pivot_pos, comp, bad_allowed, leftmost);
- begin = pivot_pos + 1;
- leftmost = false;
- }
- }
-}
-
-
-template<class Iter, class Compare>
-inline void pdqsort(Iter begin, Iter end, Compare comp) {
- if (begin == end) return;
-
-#if __cplusplus >= 201103L
- pdqsort_detail::pdqsort_loop<Iter, Compare,
- pdqsort_detail::is_default_compare<typename std::decay<Compare>::type>::value &&
- std::is_arithmetic<typename std::iterator_traits<Iter>::value_type>::value>(
- begin, end, comp, pdqsort_detail::log2(end - begin));
-#else
- pdqsort_detail::pdqsort_loop<Iter, Compare, false>(
- begin, end, comp, pdqsort_detail::log2(end - begin));
-#endif
-}
-
-template<class Iter>
-inline void pdqsort(Iter begin, Iter end) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- pdqsort(begin, end, std::less<T>());
-}
-
-template<class Iter, class Compare>
-inline void pdqsort_branchless(Iter begin, Iter end, Compare comp) {
- if (begin == end) return;
- pdqsort_detail::pdqsort_loop<Iter, Compare, true>(
- begin, end, comp, pdqsort_detail::log2(end - begin));
-}
-
-template<class Iter>
-inline void pdqsort_branchless(Iter begin, Iter end) {
- typedef typename std::iterator_traits<Iter>::value_type T;
- pdqsort_branchless(begin, end, std::less<T>());
-}
-
-
-#undef PDQSORT_PREFER_MOVE
-
-#endif
+/*
+ pdqsort.h - Pattern-defeating quicksort.
+
+ Copyright (c) 2015 Orson Peters
+
+ This software is provided 'as-is', without any express or implied warranty. In no event will the
+ authors be held liable for any damages arising from the use of this software.
+
+ Permission is granted to anyone to use this software for any purpose, including commercial
+ applications, and to alter it and redistribute it freely, subject to the following restrictions:
+
+ 1. The origin of this software must not be misrepresented; you must not claim that you wrote the
+ original software. If you use this software in a product, an acknowledgment in the product
+ documentation would be appreciated but is not required.
+
+ 2. Altered source versions must be plainly marked as such, and must not be misrepresented as
+ being the original software.
+
+ 3. This notice may not be removed or altered from any source distribution.
+*/
+
+
+#ifndef PDQSORT_H
+#define PDQSORT_H
+
+#include <algorithm>
+#include <cstddef>
+#include <functional>
+#include <utility>
+#include <iterator>
+
+#if __cplusplus >= 201103L
+ #include <cstdint>
+ #include <type_traits>
+ #define PDQSORT_PREFER_MOVE(x) std::move(x)
+#else
+ #define PDQSORT_PREFER_MOVE(x) (x)
+#endif
+
+
+namespace pdqsort_detail {
+ enum {
+ // Partitions below this size are sorted using insertion sort.
+ insertion_sort_threshold = 24,
+
+ // Partitions above this size use Tukey's ninther to select the pivot.
+ ninther_threshold = 128,
+
+ // When we detect an already sorted partition, attempt an insertion sort that allows this
+ // amount of element moves before giving up.
+ partial_insertion_sort_limit = 8,
+
+ // Must be multiple of 8 due to loop unrolling, and < 256 to fit in unsigned char.
+ block_size = 64,
+
+ // Cacheline size, assumes power of two.
+ cacheline_size = 64
+
+ };
+
+#if __cplusplus >= 201103L
+ template<class T> struct is_default_compare : std::false_type { };
+ template<class T> struct is_default_compare<std::less<T>> : std::true_type { };
+ template<class T> struct is_default_compare<std::greater<T>> : std::true_type { };
+#endif
+
+ // Returns floor(log2(n)), assumes n > 0.
+ template<class T>
+ inline int log2(T n) {
+ int log = 0;
+ while (n >>= 1) ++log;
+ return log;
+ }
+
+ // Sorts [begin, end) using insertion sort with the given comparison function.
+ template<class Iter, class Compare>
+ inline void insertion_sort(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ if (begin == end) return;
+
+ for (Iter cur = begin + 1; cur != end; ++cur) {
+ Iter sift = cur;
+ Iter sift_1 = cur - 1;
+
+ // Compare first so we can avoid 2 moves for an element already positioned correctly.
+ if (comp(*sift, *sift_1)) {
+ T tmp = PDQSORT_PREFER_MOVE(*sift);
+
+ do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
+ while (sift != begin && comp(tmp, *--sift_1));
+
+ *sift = PDQSORT_PREFER_MOVE(tmp);
+ }
+ }
+ }
+
+ // Sorts [begin, end) using insertion sort with the given comparison function. Assumes
+ // *(begin - 1) is an element smaller than or equal to any element in [begin, end).
+ template<class Iter, class Compare>
+ inline void unguarded_insertion_sort(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ if (begin == end) return;
+
+ for (Iter cur = begin + 1; cur != end; ++cur) {
+ Iter sift = cur;
+ Iter sift_1 = cur - 1;
+
+ // Compare first so we can avoid 2 moves for an element already positioned correctly.
+ if (comp(*sift, *sift_1)) {
+ T tmp = PDQSORT_PREFER_MOVE(*sift);
+
+ do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
+ while (comp(tmp, *--sift_1));
+
+ *sift = PDQSORT_PREFER_MOVE(tmp);
+ }
+ }
+ }
+
+ // Attempts to use insertion sort on [begin, end). Will return false if more than
+ // partial_insertion_sort_limit elements were moved, and abort sorting. Otherwise it will
+ // successfully sort and return true.
+ template<class Iter, class Compare>
+ inline bool partial_insertion_sort(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ if (begin == end) return true;
+
+ std::size_t limit = 0;
+ for (Iter cur = begin + 1; cur != end; ++cur) {
+ Iter sift = cur;
+ Iter sift_1 = cur - 1;
+
+ // Compare first so we can avoid 2 moves for an element already positioned correctly.
+ if (comp(*sift, *sift_1)) {
+ T tmp = PDQSORT_PREFER_MOVE(*sift);
+
+ do { *sift-- = PDQSORT_PREFER_MOVE(*sift_1); }
+ while (sift != begin && comp(tmp, *--sift_1));
+
+ *sift = PDQSORT_PREFER_MOVE(tmp);
+ limit += cur - sift;
+ }
+
+ if (limit > partial_insertion_sort_limit) return false;
+ }
+
+ return true;
+ }
+
+ template<class Iter, class Compare>
+ inline void sort2(Iter a, Iter b, Compare comp) {
+ if (comp(*b, *a)) std::iter_swap(a, b);
+ }
+
+ // Sorts the elements *a, *b and *c using comparison function comp.
+ template<class Iter, class Compare>
+ inline void sort3(Iter a, Iter b, Iter c, Compare comp) {
+ sort2(a, b, comp);
+ sort2(b, c, comp);
+ sort2(a, b, comp);
+ }
+
+ template<class T>
+ inline T* align_cacheline(T* p) {
+#if defined(UINTPTR_MAX) && __cplusplus >= 201103L
+ std::uintptr_t ip = reinterpret_cast<std::uintptr_t>(p);
+#else
+ std::size_t ip = reinterpret_cast<std::size_t>(p);
+#endif
+ ip = (ip + cacheline_size - 1) & -cacheline_size;
+ return reinterpret_cast<T*>(ip);
+ }
+
+ template<class Iter>
+ inline void swap_offsets(Iter first, Iter last,
+ unsigned char* offsets_l, unsigned char* offsets_r,
+ int num, bool use_swaps) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ if (use_swaps) {
+ // This case is needed for the descending distribution, where we need
+ // to have proper swapping for pdqsort to remain O(n).
+ for (int i = 0; i < num; ++i) {
+ std::iter_swap(first + offsets_l[i], last - offsets_r[i]);
+ }
+ } else if (num > 0) {
+ Iter l = first + offsets_l[0]; Iter r = last - offsets_r[0];
+ T tmp(PDQSORT_PREFER_MOVE(*l)); *l = PDQSORT_PREFER_MOVE(*r);
+ for (int i = 1; i < num; ++i) {
+ l = first + offsets_l[i]; *r = PDQSORT_PREFER_MOVE(*l);
+ r = last - offsets_r[i]; *l = PDQSORT_PREFER_MOVE(*r);
+ }
+ *r = PDQSORT_PREFER_MOVE(tmp);
+ }
+ }
+
+ // Partitions [begin, end) around pivot *begin using comparison function comp. Elements equal
+ // to the pivot are put in the right-hand partition. Returns the position of the pivot after
+ // partitioning and whether the passed sequence already was correctly partitioned. Assumes the
+ // pivot is a median of at least 3 elements and that [begin, end) is at least
+ // insertion_sort_threshold long. Uses branchless partitioning.
+ template<class Iter, class Compare>
+ inline std::pair<Iter, bool> partition_right_branchless(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+
+ // Move pivot into local for speed.
+ T pivot(PDQSORT_PREFER_MOVE(*begin));
+ Iter first = begin;
+ Iter last = end;
+
+ // Find the first element greater than or equal than the pivot (the median of 3 guarantees
+ // this exists).
+ while (comp(*++first, pivot));
+
+ // Find the first element strictly smaller than the pivot. We have to guard this search if
+ // there was no element before *first.
+ if (first - 1 == begin) while (first < last && !comp(*--last, pivot));
+ else while ( !comp(*--last, pivot));
+
+ // If the first pair of elements that should be swapped to partition are the same element,
+ // the passed in sequence already was correctly partitioned.
+ bool already_partitioned = first >= last;
+ if (!already_partitioned) {
+ std::iter_swap(first, last);
+ ++first;
+ }
+
+ // The following branchless partitioning is derived from "BlockQuicksort: How Branch
+ // Mispredictions don’t affect Quicksort" by Stefan Edelkamp and Armin Weiss.
+ unsigned char offsets_l_storage[block_size + cacheline_size];
+ unsigned char offsets_r_storage[block_size + cacheline_size];
+ unsigned char* offsets_l = align_cacheline(offsets_l_storage);
+ unsigned char* offsets_r = align_cacheline(offsets_r_storage);
+ int num_l, num_r, start_l, start_r;
+ num_l = num_r = start_l = start_r = 0;
+
+ while (last - first > 2 * block_size) {
+ // Fill up offset blocks with elements that are on the wrong side.
+ if (num_l == 0) {
+ start_l = 0;
+ Iter it = first;
+ for (unsigned char i = 0; i < block_size;) {
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ }
+ }
+ if (num_r == 0) {
+ start_r = 0;
+ Iter it = last;
+ for (unsigned char i = 0; i < block_size;) {
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ }
+ }
+
+ // Swap elements and update block sizes and first/last boundaries.
+ int num = std::min(num_l, num_r);
+ swap_offsets(first, last, offsets_l + start_l, offsets_r + start_r,
+ num, num_l == num_r);
+ num_l -= num; num_r -= num;
+ start_l += num; start_r += num;
+ if (num_l == 0) first += block_size;
+ if (num_r == 0) last -= block_size;
+ }
+
+ int l_size = 0, r_size = 0;
+ int unknown_left = (int)(last - first) - ((num_r || num_l) ? block_size : 0);
+ if (num_r) {
+ // Handle leftover block by assigning the unknown elements to the other block.
+ l_size = unknown_left;
+ r_size = block_size;
+ } else if (num_l) {
+ l_size = block_size;
+ r_size = unknown_left;
+ } else {
+ // No leftover block, split the unknown elements in two blocks.
+ l_size = unknown_left/2;
+ r_size = unknown_left - l_size;
+ }
+
+ // Fill offset buffers if needed.
+ if (unknown_left && !num_l) {
+ start_l = 0;
+ Iter it = first;
+ for (unsigned char i = 0; i < l_size;) {
+ offsets_l[num_l] = i++; num_l += !comp(*it, pivot); ++it;
+ }
+ }
+ if (unknown_left && !num_r) {
+ start_r = 0;
+ Iter it = last;
+ for (unsigned char i = 0; i < r_size;) {
+ offsets_r[num_r] = ++i; num_r += comp(*--it, pivot);
+ }
+ }
+
+ int num = std::min(num_l, num_r);
+ swap_offsets(first, last, offsets_l + start_l, offsets_r + start_r, num, num_l == num_r);
+ num_l -= num; num_r -= num;
+ start_l += num; start_r += num;
+ if (num_l == 0) first += l_size;
+ if (num_r == 0) last -= r_size;
+
+ // We have now fully identified [first, last)'s proper position. Swap the last elements.
+ if (num_l) {
+ offsets_l += start_l;
+ while (num_l--) std::iter_swap(first + offsets_l[num_l], --last);
+ first = last;
+ }
+ if (num_r) {
+ offsets_r += start_r;
+ while (num_r--) std::iter_swap(last - offsets_r[num_r], first), ++first;
+ last = first;
+ }
+
+ // Put the pivot in the right place.
+ Iter pivot_pos = first - 1;
+ *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
+ *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
+
+ return std::make_pair(pivot_pos, already_partitioned);
+ }
+
+ // Partitions [begin, end) around pivot *begin using comparison function comp. Elements equal
+ // to the pivot are put in the right-hand partition. Returns the position of the pivot after
+ // partitioning and whether the passed sequence already was correctly partitioned. Assumes the
+ // pivot is a median of at least 3 elements and that [begin, end) is at least
+ // insertion_sort_threshold long.
+ template<class Iter, class Compare>
+ inline std::pair<Iter, bool> partition_right(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+
+ // Move pivot into local for speed.
+ T pivot(PDQSORT_PREFER_MOVE(*begin));
+
+ Iter first = begin;
+ Iter last = end;
+
+ // Find the first element greater than or equal than the pivot (the median of 3 guarantees
+ // this exists).
+ while (comp(*++first, pivot));
+
+ // Find the first element strictly smaller than the pivot. We have to guard this search if
+ // there was no element before *first.
+ if (first - 1 == begin) while (first < last && !comp(*--last, pivot));
+ else while ( !comp(*--last, pivot));
+
+ // If the first pair of elements that should be swapped to partition are the same element,
+ // the passed in sequence already was correctly partitioned.
+ bool already_partitioned = first >= last;
+
+ // Keep swapping pairs of elements that are on the wrong side of the pivot. Previously
+ // swapped pairs guard the searches, which is why the first iteration is special-cased
+ // above.
+ while (first < last) {
+ std::iter_swap(first, last);
+ while (comp(*++first, pivot));
+ while (!comp(*--last, pivot));
+ }
+
+ // Put the pivot in the right place.
+ Iter pivot_pos = first - 1;
+ *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
+ *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
+
+ return std::make_pair(pivot_pos, already_partitioned);
+ }
+
+ // Similar function to the one above, except elements equal to the pivot are put to the left of
+ // the pivot and it doesn't check or return if the passed sequence already was partitioned.
+ // Since this is rarely used (the many equal case), and in that case pdqsort already has O(n)
+ // performance, no block quicksort is applied here for simplicity.
+ template<class Iter, class Compare>
+ inline Iter partition_left(Iter begin, Iter end, Compare comp) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+
+ T pivot(PDQSORT_PREFER_MOVE(*begin));
+ Iter first = begin;
+ Iter last = end;
+
+ while (comp(pivot, *--last));
+
+ if (last + 1 == end) while (first < last && !comp(pivot, *++first));
+ else while ( !comp(pivot, *++first));
+
+ while (first < last) {
+ std::iter_swap(first, last);
+ while (comp(pivot, *--last));
+ while (!comp(pivot, *++first));
+ }
+
+ Iter pivot_pos = last;
+ *begin = PDQSORT_PREFER_MOVE(*pivot_pos);
+ *pivot_pos = PDQSORT_PREFER_MOVE(pivot);
+
+ return pivot_pos;
+ }
+
+
+ template<class Iter, class Compare, bool Branchless>
+ inline void pdqsort_loop(Iter begin, Iter end, Compare comp, int bad_allowed, bool leftmost = true) {
+ typedef typename std::iterator_traits<Iter>::difference_type diff_t;
+
+ // Use a while loop for tail recursion elimination.
+ while (true) {
+ diff_t size = end - begin;
+
+ // Insertion sort is faster for small arrays.
+ if (size < insertion_sort_threshold) {
+ if (leftmost) insertion_sort(begin, end, comp);
+ else unguarded_insertion_sort(begin, end, comp);
+ return;
+ }
+
+ // Choose pivot as median of 3 or pseudomedian of 9.
+ diff_t s2 = size / 2;
+ if (size > ninther_threshold) {
+ sort3(begin, begin + s2, end - 1, comp);
+ sort3(begin + 1, begin + (s2 - 1), end - 2, comp);
+ sort3(begin + 2, begin + (s2 + 1), end - 3, comp);
+ sort3(begin + (s2 - 1), begin + s2, begin + (s2 + 1), comp);
+ std::iter_swap(begin, begin + s2);
+ } else sort3(begin + s2, begin, end - 1, comp);
+
+ // If *(begin - 1) is the end of the right partition of a previous partition operation
+ // there is no element in [begin, end) that is smaller than *(begin - 1). Then if our
+ // pivot compares equal to *(begin - 1) we change strategy, putting equal elements in
+ // the left partition, greater elements in the right partition. We do not have to
+ // recurse on the left partition, since it's sorted (all equal).
+ if (!leftmost && !comp(*(begin - 1), *begin)) {
+ begin = partition_left(begin, end, comp) + 1;
+ continue;
+ }
+
+ // Partition and get results.
+ std::pair<Iter, bool> part_result =
+ Branchless ? partition_right_branchless(begin, end, comp)
+ : partition_right(begin, end, comp);
+ Iter pivot_pos = part_result.first;
+ bool already_partitioned = part_result.second;
+
+ // Check for a highly unbalanced partition.
+ diff_t l_size = pivot_pos - begin;
+ diff_t r_size = end - (pivot_pos + 1);
+ bool highly_unbalanced = l_size < size / 8 || r_size < size / 8;
+
+ // If we got a highly unbalanced partition we shuffle elements to break many patterns.
+ if (highly_unbalanced) {
+ // If we had too many bad partitions, switch to heapsort to guarantee O(n log n).
+ if (--bad_allowed == 0) {
+ std::make_heap(begin, end, comp);
+ std::sort_heap(begin, end, comp);
+ return;
+ }
+
+ if (l_size >= insertion_sort_threshold) {
+ std::iter_swap(begin, begin + l_size / 4);
+ std::iter_swap(pivot_pos - 1, pivot_pos - l_size / 4);
+
+ if (l_size > ninther_threshold) {
+ std::iter_swap(begin + 1, begin + (l_size / 4 + 1));
+ std::iter_swap(begin + 2, begin + (l_size / 4 + 2));
+ std::iter_swap(pivot_pos - 2, pivot_pos - (l_size / 4 + 1));
+ std::iter_swap(pivot_pos - 3, pivot_pos - (l_size / 4 + 2));
+ }
+ }
+
+ if (r_size >= insertion_sort_threshold) {
+ std::iter_swap(pivot_pos + 1, pivot_pos + (1 + r_size / 4));
+ std::iter_swap(end - 1, end - r_size / 4);
+
+ if (r_size > ninther_threshold) {
+ std::iter_swap(pivot_pos + 2, pivot_pos + (2 + r_size / 4));
+ std::iter_swap(pivot_pos + 3, pivot_pos + (3 + r_size / 4));
+ std::iter_swap(end - 2, end - (1 + r_size / 4));
+ std::iter_swap(end - 3, end - (2 + r_size / 4));
+ }
+ }
+ } else {
+ // If we were decently balanced and we tried to sort an already partitioned
+ // sequence try to use insertion sort.
+ if (already_partitioned && partial_insertion_sort(begin, pivot_pos, comp)
+ && partial_insertion_sort(pivot_pos + 1, end, comp)) return;
+ }
+
+ // Sort the left partition first using recursion and do tail recursion elimination for
+ // the right-hand partition.
+ pdqsort_loop<Iter, Compare, Branchless>(begin, pivot_pos, comp, bad_allowed, leftmost);
+ begin = pivot_pos + 1;
+ leftmost = false;
+ }
+ }
+}
+
+
+template<class Iter, class Compare>
+inline void pdqsort(Iter begin, Iter end, Compare comp) {
+ if (begin == end) return;
+
+#if __cplusplus >= 201103L
+ pdqsort_detail::pdqsort_loop<Iter, Compare,
+ pdqsort_detail::is_default_compare<typename std::decay<Compare>::type>::value &&
+ std::is_arithmetic<typename std::iterator_traits<Iter>::value_type>::value>(
+ begin, end, comp, pdqsort_detail::log2(end - begin));
+#else
+ pdqsort_detail::pdqsort_loop<Iter, Compare, false>(
+ begin, end, comp, pdqsort_detail::log2(end - begin));
+#endif
+}
+
+template<class Iter>
+inline void pdqsort(Iter begin, Iter end) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ pdqsort(begin, end, std::less<T>());
+}
+
+template<class Iter, class Compare>
+inline void pdqsort_branchless(Iter begin, Iter end, Compare comp) {
+ if (begin == end) return;
+ pdqsort_detail::pdqsort_loop<Iter, Compare, true>(
+ begin, end, comp, pdqsort_detail::log2(end - begin));
+}
+
+template<class Iter>
+inline void pdqsort_branchless(Iter begin, Iter end) {
+ typedef typename std::iterator_traits<Iter>::value_type T;
+ pdqsort_branchless(begin, end, std::less<T>());
+}
+
+
+#undef PDQSORT_PREFER_MOVE
+
+#endif
diff --git a/contrib/libs/pdqsort/ya.make b/contrib/libs/pdqsort/ya.make
index bba3e8e78d..80365132b9 100644
--- a/contrib/libs/pdqsort/ya.make
+++ b/contrib/libs/pdqsort/ya.make
@@ -2,19 +2,19 @@ OWNER(
g:cpp-contrib
g:clickhouse
)
-
-# Origin: https://github.com/orlp/pdqsort
-
-LIBRARY()
-
+
+# Origin: https://github.com/orlp/pdqsort
+
+LIBRARY()
+
LICENSE(Zlib)
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-VERSION(978bc36a9bd4143a54b2551cfd9ce8a6afd6d04c)
-
-NO_UTIL()
-
-NO_RUNTIME()
+VERSION(978bc36a9bd4143a54b2551cfd9ce8a6afd6d04c)
+
+NO_UTIL()
-END()
+NO_RUNTIME()
+
+END()
diff --git a/contrib/libs/ya.make b/contrib/libs/ya.make
index 9c4640fdcf..3940878cae 100644
--- a/contrib/libs/ya.make
+++ b/contrib/libs/ya.make
@@ -141,7 +141,7 @@ RECURSE(
libcpuid
libcroco
libde265
- libdivide
+ libdivide
libdivsufsort2
libeatmydata/dynamic
libev
@@ -265,7 +265,7 @@ RECURSE(
pango
pcre
pcre2
- pdqsort
+ pdqsort
pffft
pfr
picohttpparser
diff --git a/contrib/python/Pygments/py2/LICENSE b/contrib/python/Pygments/py2/LICENSE
index 13d1c74b49..ffe68f1d41 100644
--- a/contrib/python/Pygments/py2/LICENSE
+++ b/contrib/python/Pygments/py2/LICENSE
@@ -1,25 +1,25 @@
Copyright (c) 2006-2019 by the respective authors (see AUTHORS file).
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-* Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/contrib/python/Pygments/py2/pygments/__init__.py b/contrib/python/Pygments/py2/pygments/__init__.py
index 89efc350ee..38f22fc931 100644
--- a/contrib/python/Pygments/py2/pygments/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/__init__.py
@@ -1,90 +1,90 @@
-# -*- coding: utf-8 -*-
-"""
- Pygments
- ~~~~~~~~
-
- Pygments is a syntax highlighting package written in Python.
-
- It is a generic syntax highlighter for general use in all kinds of software
- such as forum systems, wikis or other applications that need to prettify
- source code. Highlights are:
-
- * a wide range of common languages and markup formats is supported
- * special attention is paid to details, increasing quality by a fair amount
- * support for new languages and formats are added easily
- * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
- formats that PIL supports, and ANSI sequences
- * it is usable as a command-line tool and as a library
- * ... and it highlights even Brainfuck!
-
+# -*- coding: utf-8 -*-
+"""
+ Pygments
+ ~~~~~~~~
+
+ Pygments is a syntax highlighting package written in Python.
+
+ It is a generic syntax highlighter for general use in all kinds of software
+ such as forum systems, wikis or other applications that need to prettify
+ source code. Highlights are:
+
+ * a wide range of common languages and markup formats is supported
+ * special attention is paid to details, increasing quality by a fair amount
+ * support for new languages and formats are added easily
+ * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
+ formats that PIL supports, and ANSI sequences
+ * it is usable as a command-line tool and as a library
+ * ... and it highlights even Brainfuck!
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
-
+
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
-
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
+ :license: BSD, see LICENSE for details.
+"""
import sys
-
+
from pygments.util import StringIO, BytesIO
__version__ = '2.5.2'
-__docformat__ = 'restructuredtext'
-
-__all__ = ['lex', 'format', 'highlight']
-
-
-def lex(code, lexer):
- """
- Lex ``code`` with ``lexer`` and return an iterable of tokens.
- """
- try:
- return lexer.get_tokens(code)
- except TypeError as err:
+__docformat__ = 'restructuredtext'
+
+__all__ = ['lex', 'format', 'highlight']
+
+
+def lex(code, lexer):
+ """
+ Lex ``code`` with ``lexer`` and return an iterable of tokens.
+ """
+ try:
+ return lexer.get_tokens(code)
+ except TypeError as err:
if (isinstance(err.args[0], str) and
('unbound method get_tokens' in err.args[0] or
'missing 1 required positional argument' in err.args[0])):
- raise TypeError('lex() argument must be a lexer instance, '
- 'not a class')
- raise
-
-
-def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
- """
- Format a tokenlist ``tokens`` with the formatter ``formatter``.
-
- If ``outfile`` is given and a valid file object (an object
- with a ``write`` method), the result will be written to it, otherwise
- it is returned as a string.
- """
- try:
- if not outfile:
- realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
- formatter.format(tokens, realoutfile)
- return realoutfile.getvalue()
- else:
- formatter.format(tokens, outfile)
- except TypeError as err:
+ raise TypeError('lex() argument must be a lexer instance, '
+ 'not a class')
+ raise
+
+
+def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
+ """
+ Format a tokenlist ``tokens`` with the formatter ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ try:
+ if not outfile:
+ realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
+ formatter.format(tokens, realoutfile)
+ return realoutfile.getvalue()
+ else:
+ formatter.format(tokens, outfile)
+ except TypeError as err:
if (isinstance(err.args[0], str) and
('unbound method format' in err.args[0] or
'missing 1 required positional argument' in err.args[0])):
- raise TypeError('format() argument must be a formatter instance, '
- 'not a class')
- raise
-
-
-def highlight(code, lexer, formatter, outfile=None):
- """
- Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
-
- If ``outfile`` is given and a valid file object (an object
- with a ``write`` method), the result will be written to it, otherwise
- it is returned as a string.
- """
- return format(lex(code, lexer), formatter, outfile)
-
-
-if __name__ == '__main__': # pragma: no cover
- from pygments.cmdline import main
- sys.exit(main(sys.argv))
+ raise TypeError('format() argument must be a formatter instance, '
+ 'not a class')
+ raise
+
+
+def highlight(code, lexer, formatter, outfile=None):
+ """
+ Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ return format(lex(code, lexer), formatter, outfile)
+
+
+if __name__ == '__main__': # pragma: no cover
+ from pygments.cmdline import main
+ sys.exit(main(sys.argv))
diff --git a/contrib/python/Pygments/py2/pygments/cmdline.py b/contrib/python/Pygments/py2/pygments/cmdline.py
index 34752d66b2..549d43ae53 100644
--- a/contrib/python/Pygments/py2/pygments/cmdline.py
+++ b/contrib/python/Pygments/py2/pygments/cmdline.py
@@ -1,64 +1,64 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.cmdline
- ~~~~~~~~~~~~~~~~
-
- Command line interface.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.cmdline
+ ~~~~~~~~~~~~~~~~
+
+ Command line interface.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
import os
-import sys
-import getopt
-from textwrap import dedent
-
-from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline, \
- guess_decode, guess_decode_from_terminal, terminal_encoding
-from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+import sys
+import getopt
+from textwrap import dedent
+
+from pygments import __version__, highlight
+from pygments.util import ClassNotFound, OptionError, docstring_headline, \
+ guess_decode, guess_decode_from_terminal, terminal_encoding
+from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
-from pygments.lexers.special import TextLexer
-from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
-from pygments.formatters import get_all_formatters, get_formatter_by_name, \
+from pygments.lexers.special import TextLexer
+from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
+from pygments.formatters import get_all_formatters, get_formatter_by_name, \
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
-from pygments.formatters.terminal import TerminalFormatter
+from pygments.formatters.terminal import TerminalFormatter
from pygments.formatters.terminal256 import Terminal256Formatter
-from pygments.filters import get_all_filters, find_filter_class
-from pygments.styles import get_all_styles, get_style_by_name
-
-
-USAGE = """\
-Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
+from pygments.filters import get_all_filters, find_filter_class
+from pygments.styles import get_all_styles, get_style_by_name
+
+
+USAGE = """\
+Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
[-O <options>] [-P <option=value>] [-s] [-v] [-x] [-o <outfile>] [<infile>]
-
- %s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
- %s -L [<which> ...]
- %s -N <filename>
- %s -H <type> <name>
- %s -h | -V
-
-Highlight the input file and write the result to <outfile>.
-
-If no input file is given, use stdin, if -o is not given, use stdout.
-
-If -s is passed, lexing will be done in "streaming" mode, reading and
-highlighting one line at a time. This will only work properly with
-lexers that have no constructs spanning multiple lines!
-
-<lexer> is a lexer name (query all lexer names with -L). If -l is not
-given, the lexer is guessed from the extension of the input file name
-(this obviously doesn't work if the input is stdin). If -g is passed,
-attempt to guess the lexer from the file contents, or pass through as
-plain text if this fails (this can work for stdin).
-
-Likewise, <formatter> is a formatter name, and will be guessed from
-the extension of the output file name. If no output file is given,
-the terminal formatter will be used by default.
-
+
+ %s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
+ %s -L [<which> ...]
+ %s -N <filename>
+ %s -H <type> <name>
+ %s -h | -V
+
+Highlight the input file and write the result to <outfile>.
+
+If no input file is given, use stdin, if -o is not given, use stdout.
+
+If -s is passed, lexing will be done in "streaming" mode, reading and
+highlighting one line at a time. This will only work properly with
+lexers that have no constructs spanning multiple lines!
+
+<lexer> is a lexer name (query all lexer names with -L). If -l is not
+given, the lexer is guessed from the extension of the input file name
+(this obviously doesn't work if the input is stdin). If -g is passed,
+attempt to guess the lexer from the file contents, or pass through as
+plain text if this fails (this can work for stdin).
+
+Likewise, <formatter> is a formatter name, and will be guessed from
+the extension of the output file name. If no output file is given,
+the terminal formatter will be used by default.
+
The additional option -x allows custom lexers and formatters to be
loaded from a .py file relative to the current working directory. For
example, ``-l ./customlexer.py -x``. By default, this option expects a
@@ -67,274 +67,274 @@ specify your own class name with a colon (``-l ./lexer.py:MyLexer``).
Users should be very careful not to use this option with untrusted files,
because it will import and run them.
-With the -O option, you can give the lexer and formatter a comma-
-separated list of options, e.g. ``-O bg=light,python=cool``.
-
-The -P option adds lexer and formatter options like the -O option, but
-you can only give one option per -P. That way, the option value may
-contain commas and equals signs, which it can't with -O, e.g.
-``-P "heading=Pygments, the Python highlighter".
-
-With the -F option, you can add filters to the token stream, you can
-give options in the same way as for -O after a colon (note: there must
-not be spaces around the colon).
-
-The -O, -P and -F options can be given multiple times.
-
-With the -S option, print out style definitions for style <style>
-for formatter <formatter>. The argument given by -a is formatter
-dependent.
-
-The -L option lists lexers, formatters, styles or filters -- set
-`which` to the thing you want to list (e.g. "styles"), or omit it to
-list everything.
-
-The -N option guesses and prints out a lexer name based solely on
-the given filename. It does not take input or highlight anything.
-If no specific lexer can be determined "text" is returned.
-
-The -H option prints detailed help for the object <name> of type <type>,
-where <type> is one of "lexer", "formatter" or "filter".
-
-The -s option processes lines one at a time until EOF, rather than
-waiting to process the entire file. This only works for stdin, and
-is intended for streaming input such as you get from 'tail -f'.
-Example usage: "tail -f sql.log | pygmentize -s -l sql"
-
-The -v option prints a detailed traceback on unhandled exceptions,
-which is useful for debugging and bug reports.
-
-The -h option prints this help.
-The -V option prints the package version.
-"""
-
-
-def _parse_options(o_strs):
- opts = {}
- if not o_strs:
- return opts
- for o_str in o_strs:
- if not o_str.strip():
- continue
- o_args = o_str.split(',')
- for o_arg in o_args:
- o_arg = o_arg.strip()
- try:
- o_key, o_val = o_arg.split('=', 1)
- o_key = o_key.strip()
- o_val = o_val.strip()
- except ValueError:
- opts[o_arg] = True
- else:
- opts[o_key] = o_val
- return opts
-
-
-def _parse_filters(f_strs):
- filters = []
- if not f_strs:
- return filters
- for f_str in f_strs:
- if ':' in f_str:
- fname, fopts = f_str.split(':', 1)
- filters.append((fname, _parse_options([fopts])))
- else:
- filters.append((f_str, {}))
- return filters
-
-
-def _print_help(what, name):
- try:
- if what == 'lexer':
- cls = get_lexer_by_name(name)
- print("Help on the %s lexer:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'formatter':
- cls = find_formatter_class(name)
- print("Help on the %s formatter:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'filter':
- cls = find_filter_class(name)
- print("Help on the %s filter:" % name)
- print(dedent(cls.__doc__))
- return 0
- except (AttributeError, ValueError):
- print("%s not found!" % what, file=sys.stderr)
- return 1
-
-
-def _print_list(what):
- if what == 'lexer':
- print()
- print("Lexers:")
- print("~~~~~~~")
-
- info = []
- for fullname, names, exts, _ in get_all_lexers():
- tup = (', '.join(names)+':', fullname,
- exts and '(filenames ' + ', '.join(exts) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'formatter':
- print()
- print("Formatters:")
- print("~~~~~~~~~~~")
-
- info = []
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
- '(filenames ' + ', '.join(cls.filenames) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'filter':
- print()
- print("Filters:")
- print("~~~~~~~~")
-
- for name in get_all_filters():
- cls = find_filter_class(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
- elif what == 'style':
- print()
- print("Styles:")
- print("~~~~~~~")
-
- for name in get_all_styles():
- cls = get_style_by_name(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
-
-def main_inner(popts, args, usage):
- opts = {}
- O_opts = []
- P_opts = []
- F_opts = []
- for opt, arg in popts:
- if opt == '-O':
- O_opts.append(arg)
- elif opt == '-P':
- P_opts.append(arg)
- elif opt == '-F':
- F_opts.append(arg)
- opts[opt] = arg
-
- if opts.pop('-h', None) is not None:
- print(usage)
- return 0
-
- if opts.pop('-V', None) is not None:
+With the -O option, you can give the lexer and formatter a comma-
+separated list of options, e.g. ``-O bg=light,python=cool``.
+
+The -P option adds lexer and formatter options like the -O option, but
+you can only give one option per -P. That way, the option value may
+contain commas and equals signs, which it can't with -O, e.g.
+``-P "heading=Pygments, the Python highlighter".
+
+With the -F option, you can add filters to the token stream, you can
+give options in the same way as for -O after a colon (note: there must
+not be spaces around the colon).
+
+The -O, -P and -F options can be given multiple times.
+
+With the -S option, print out style definitions for style <style>
+for formatter <formatter>. The argument given by -a is formatter
+dependent.
+
+The -L option lists lexers, formatters, styles or filters -- set
+`which` to the thing you want to list (e.g. "styles"), or omit it to
+list everything.
+
+The -N option guesses and prints out a lexer name based solely on
+the given filename. It does not take input or highlight anything.
+If no specific lexer can be determined "text" is returned.
+
+The -H option prints detailed help for the object <name> of type <type>,
+where <type> is one of "lexer", "formatter" or "filter".
+
+The -s option processes lines one at a time until EOF, rather than
+waiting to process the entire file. This only works for stdin, and
+is intended for streaming input such as you get from 'tail -f'.
+Example usage: "tail -f sql.log | pygmentize -s -l sql"
+
+The -v option prints a detailed traceback on unhandled exceptions,
+which is useful for debugging and bug reports.
+
+The -h option prints this help.
+The -V option prints the package version.
+"""
+
+
+def _parse_options(o_strs):
+ opts = {}
+ if not o_strs:
+ return opts
+ for o_str in o_strs:
+ if not o_str.strip():
+ continue
+ o_args = o_str.split(',')
+ for o_arg in o_args:
+ o_arg = o_arg.strip()
+ try:
+ o_key, o_val = o_arg.split('=', 1)
+ o_key = o_key.strip()
+ o_val = o_val.strip()
+ except ValueError:
+ opts[o_arg] = True
+ else:
+ opts[o_key] = o_val
+ return opts
+
+
+def _parse_filters(f_strs):
+ filters = []
+ if not f_strs:
+ return filters
+ for f_str in f_strs:
+ if ':' in f_str:
+ fname, fopts = f_str.split(':', 1)
+ filters.append((fname, _parse_options([fopts])))
+ else:
+ filters.append((f_str, {}))
+ return filters
+
+
+def _print_help(what, name):
+ try:
+ if what == 'lexer':
+ cls = get_lexer_by_name(name)
+ print("Help on the %s lexer:" % cls.name)
+ print(dedent(cls.__doc__))
+ elif what == 'formatter':
+ cls = find_formatter_class(name)
+ print("Help on the %s formatter:" % cls.name)
+ print(dedent(cls.__doc__))
+ elif what == 'filter':
+ cls = find_filter_class(name)
+ print("Help on the %s filter:" % name)
+ print(dedent(cls.__doc__))
+ return 0
+ except (AttributeError, ValueError):
+ print("%s not found!" % what, file=sys.stderr)
+ return 1
+
+
+def _print_list(what):
+ if what == 'lexer':
+ print()
+ print("Lexers:")
+ print("~~~~~~~")
+
+ info = []
+ for fullname, names, exts, _ in get_all_lexers():
+ tup = (', '.join(names)+':', fullname,
+ exts and '(filenames ' + ', '.join(exts) + ')' or '')
+ info.append(tup)
+ info.sort()
+ for i in info:
+ print(('* %s\n %s %s') % i)
+
+ elif what == 'formatter':
+ print()
+ print("Formatters:")
+ print("~~~~~~~~~~~")
+
+ info = []
+ for cls in get_all_formatters():
+ doc = docstring_headline(cls)
+ tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
+ '(filenames ' + ', '.join(cls.filenames) + ')' or '')
+ info.append(tup)
+ info.sort()
+ for i in info:
+ print(('* %s\n %s %s') % i)
+
+ elif what == 'filter':
+ print()
+ print("Filters:")
+ print("~~~~~~~~")
+
+ for name in get_all_filters():
+ cls = find_filter_class(name)
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
+
+ elif what == 'style':
+ print()
+ print("Styles:")
+ print("~~~~~~~")
+
+ for name in get_all_styles():
+ cls = get_style_by_name(name)
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
+
+
+def main_inner(popts, args, usage):
+ opts = {}
+ O_opts = []
+ P_opts = []
+ F_opts = []
+ for opt, arg in popts:
+ if opt == '-O':
+ O_opts.append(arg)
+ elif opt == '-P':
+ P_opts.append(arg)
+ elif opt == '-F':
+ F_opts.append(arg)
+ opts[opt] = arg
+
+ if opts.pop('-h', None) is not None:
+ print(usage)
+ return 0
+
+ if opts.pop('-V', None) is not None:
print('Pygments version %s, (c) 2006-2019 by Georg Brandl.' % __version__)
- return 0
-
- # handle ``pygmentize -L``
- L_opt = opts.pop('-L', None)
- if L_opt is not None:
- if opts:
- print(usage, file=sys.stderr)
- return 2
-
- # print version
- main(['', '-V'])
- if not args:
- args = ['lexer', 'formatter', 'filter', 'style']
- for arg in args:
- _print_list(arg.rstrip('s'))
- return 0
-
- # handle ``pygmentize -H``
- H_opt = opts.pop('-H', None)
- if H_opt is not None:
- if opts or len(args) != 2:
- print(usage, file=sys.stderr)
- return 2
-
- what, name = args # pylint: disable=unbalanced-tuple-unpacking
- if what not in ('lexer', 'formatter', 'filter'):
- print(usage, file=sys.stderr)
- return 2
-
- return _print_help(what, name)
-
- # parse -O options
- parsed_opts = _parse_options(O_opts)
- opts.pop('-O', None)
-
- # parse -P options
- for p_opt in P_opts:
- try:
- name, value = p_opt.split('=', 1)
- except ValueError:
- parsed_opts[p_opt] = True
- else:
- parsed_opts[name] = value
- opts.pop('-P', None)
-
- # encodings
- inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
- outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
-
- # handle ``pygmentize -N``
- infn = opts.pop('-N', None)
- if infn is not None:
- lexer = find_lexer_class_for_filename(infn)
- if lexer is None:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
- # handle ``pygmentize -S``
- S_opt = opts.pop('-S', None)
- a_opt = opts.pop('-a', None)
- if S_opt is not None:
- f_opt = opts.pop('-f', None)
- if not f_opt:
- print(usage, file=sys.stderr)
- return 2
- if opts or args:
- print(usage, file=sys.stderr)
- return 2
-
- try:
- parsed_opts['style'] = S_opt
- fmter = get_formatter_by_name(f_opt, **parsed_opts)
- except ClassNotFound as err:
- print(err, file=sys.stderr)
- return 1
-
- print(fmter.get_style_defs(a_opt or ''))
- return 0
-
- # if no -S is given, -a is not allowed
- if a_opt is not None:
- print(usage, file=sys.stderr)
- return 2
-
- # parse -F options
- F_opts = _parse_filters(F_opts)
- opts.pop('-F', None)
-
+ return 0
+
+ # handle ``pygmentize -L``
+ L_opt = opts.pop('-L', None)
+ if L_opt is not None:
+ if opts:
+ print(usage, file=sys.stderr)
+ return 2
+
+ # print version
+ main(['', '-V'])
+ if not args:
+ args = ['lexer', 'formatter', 'filter', 'style']
+ for arg in args:
+ _print_list(arg.rstrip('s'))
+ return 0
+
+ # handle ``pygmentize -H``
+ H_opt = opts.pop('-H', None)
+ if H_opt is not None:
+ if opts or len(args) != 2:
+ print(usage, file=sys.stderr)
+ return 2
+
+ what, name = args # pylint: disable=unbalanced-tuple-unpacking
+ if what not in ('lexer', 'formatter', 'filter'):
+ print(usage, file=sys.stderr)
+ return 2
+
+ return _print_help(what, name)
+
+ # parse -O options
+ parsed_opts = _parse_options(O_opts)
+ opts.pop('-O', None)
+
+ # parse -P options
+ for p_opt in P_opts:
+ try:
+ name, value = p_opt.split('=', 1)
+ except ValueError:
+ parsed_opts[p_opt] = True
+ else:
+ parsed_opts[name] = value
+ opts.pop('-P', None)
+
+ # encodings
+ inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+ outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
+ # handle ``pygmentize -N``
+ infn = opts.pop('-N', None)
+ if infn is not None:
+ lexer = find_lexer_class_for_filename(infn)
+ if lexer is None:
+ lexer = TextLexer
+
+ print(lexer.aliases[0])
+ return 0
+
+ # handle ``pygmentize -S``
+ S_opt = opts.pop('-S', None)
+ a_opt = opts.pop('-a', None)
+ if S_opt is not None:
+ f_opt = opts.pop('-f', None)
+ if not f_opt:
+ print(usage, file=sys.stderr)
+ return 2
+ if opts or args:
+ print(usage, file=sys.stderr)
+ return 2
+
+ try:
+ parsed_opts['style'] = S_opt
+ fmter = get_formatter_by_name(f_opt, **parsed_opts)
+ except ClassNotFound as err:
+ print(err, file=sys.stderr)
+ return 1
+
+ print(fmter.get_style_defs(a_opt or ''))
+ return 0
+
+ # if no -S is given, -a is not allowed
+ if a_opt is not None:
+ print(usage, file=sys.stderr)
+ return 2
+
+ # parse -F options
+ F_opts = _parse_filters(F_opts)
+ opts.pop('-F', None)
+
allow_custom_lexer_formatter = False
# -x: allow custom (eXternal) lexers and formatters
if opts.pop('-x', None) is not None:
allow_custom_lexer_formatter = True
- # select lexer
- lexer = None
-
- # given by name?
- lexername = opts.pop('-l', None)
- if lexername:
+ # select lexer
+ lexer = None
+
+ # given by name?
+ lexername = opts.pop('-l', None)
+ if lexername:
# custom lexer, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in lexername:
try:
@@ -353,82 +353,82 @@ def main_inner(popts, args, usage):
except (OptionError, ClassNotFound) as err:
print('Error:', err, file=sys.stderr)
return 1
-
- # read input code
- code = None
-
- if args:
- if len(args) > 1:
- print(usage, file=sys.stderr)
- return 2
-
- if '-s' in opts:
- print('Error: -s option not usable when input file specified',
- file=sys.stderr)
- return 2
-
- infn = args[0]
- try:
- with open(infn, 'rb') as infp:
- code = infp.read()
- except Exception as err:
- print('Error: cannot read infile:', err, file=sys.stderr)
- return 1
- if not inencoding:
- code, inencoding = guess_decode(code)
-
- # do we have to guess the lexer?
- if not lexer:
- try:
- lexer = get_lexer_for_filename(infn, code, **parsed_opts)
- except ClassNotFound as err:
- if '-g' in opts:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
- else:
- print('Error:', err, file=sys.stderr)
- return 1
- except OptionError as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- elif '-s' not in opts: # treat stdin as full file (-s support is later)
- # read code from terminal, always in binary mode since we want to
- # decode ourselves and be tolerant with it
- if sys.version_info > (3,):
- # Python 3: we have to use .buffer to get a binary stream
- code = sys.stdin.buffer.read()
- else:
- code = sys.stdin.read()
- if not inencoding:
- code, inencoding = guess_decode_from_terminal(code, sys.stdin)
- # else the lexer will do the decoding
- if not lexer:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
-
- else: # -s option needs a lexer with -l
- if not lexer:
- print('Error: when using -s a lexer has to be selected with -l',
- file=sys.stderr)
- return 2
-
- # process filters
- for fname, fopts in F_opts:
- try:
- lexer.add_filter(fname, **fopts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # select formatter
- outfn = opts.pop('-o', None)
- fmter = opts.pop('-f', None)
- if fmter:
+
+ # read input code
+ code = None
+
+ if args:
+ if len(args) > 1:
+ print(usage, file=sys.stderr)
+ return 2
+
+ if '-s' in opts:
+ print('Error: -s option not usable when input file specified',
+ file=sys.stderr)
+ return 2
+
+ infn = args[0]
+ try:
+ with open(infn, 'rb') as infp:
+ code = infp.read()
+ except Exception as err:
+ print('Error: cannot read infile:', err, file=sys.stderr)
+ return 1
+ if not inencoding:
+ code, inencoding = guess_decode(code)
+
+ # do we have to guess the lexer?
+ if not lexer:
+ try:
+ lexer = get_lexer_for_filename(infn, code, **parsed_opts)
+ except ClassNotFound as err:
+ if '-g' in opts:
+ try:
+ lexer = guess_lexer(code, **parsed_opts)
+ except ClassNotFound:
+ lexer = TextLexer(**parsed_opts)
+ else:
+ print('Error:', err, file=sys.stderr)
+ return 1
+ except OptionError as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
+ elif '-s' not in opts: # treat stdin as full file (-s support is later)
+ # read code from terminal, always in binary mode since we want to
+ # decode ourselves and be tolerant with it
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ code = sys.stdin.buffer.read()
+ else:
+ code = sys.stdin.read()
+ if not inencoding:
+ code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+ # else the lexer will do the decoding
+ if not lexer:
+ try:
+ lexer = guess_lexer(code, **parsed_opts)
+ except ClassNotFound:
+ lexer = TextLexer(**parsed_opts)
+
+ else: # -s option needs a lexer with -l
+ if not lexer:
+ print('Error: when using -s a lexer has to be selected with -l',
+ file=sys.stderr)
+ return 2
+
+ # process filters
+ for fname, fopts in F_opts:
+ try:
+ lexer.add_filter(fname, **fopts)
+ except ClassNotFound as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
+ # select formatter
+ outfn = opts.pop('-o', None)
+ fmter = opts.pop('-f', None)
+ if fmter:
# custom formatter, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in fmter:
try:
@@ -447,127 +447,127 @@ def main_inner(popts, args, usage):
except (OptionError, ClassNotFound) as err:
print('Error:', err, file=sys.stderr)
return 1
-
- if outfn:
- if not fmter:
- try:
- fmter = get_formatter_for_filename(outfn, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
- try:
- outfile = open(outfn, 'wb')
- except Exception as err:
- print('Error: cannot open outfile:', err, file=sys.stderr)
- return 1
- else:
- if not fmter:
+
+ if outfn:
+ if not fmter:
+ try:
+ fmter = get_formatter_for_filename(outfn, **parsed_opts)
+ except (OptionError, ClassNotFound) as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+ try:
+ outfile = open(outfn, 'wb')
+ except Exception as err:
+ print('Error: cannot open outfile:', err, file=sys.stderr)
+ return 1
+ else:
+ if not fmter:
if '256' in os.environ.get('TERM', ''):
fmter = Terminal256Formatter(**parsed_opts)
else:
fmter = TerminalFormatter(**parsed_opts)
- if sys.version_info > (3,):
- # Python 3: we have to use .buffer to get a binary stream
- outfile = sys.stdout.buffer
- else:
- outfile = sys.stdout
-
- # determine output encoding if not explicitly selected
- if not outencoding:
- if outfn:
- # output file? use lexer encoding for now (can still be None)
- fmter.encoding = inencoding
- else:
- # else use terminal encoding
- fmter.encoding = terminal_encoding(sys.stdout)
-
- # provide coloring under Windows, if possible
- if not outfn and sys.platform in ('win32', 'cygwin') and \
- fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
- # unfortunately colorama doesn't support binary streams on Py3
- if sys.version_info > (3,):
- from pygments.util import UnclosingTextIOWrapper
- outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
- fmter.encoding = None
- try:
- import colorama.initialise
- except ImportError:
- pass
- else:
- outfile = colorama.initialise.wrap_stream(
- outfile, convert=None, strip=None, autoreset=False, wrap=True)
-
- # When using the LaTeX formatter and the option `escapeinside` is
- # specified, we need a special lexer which collects escaped text
- # before running the chosen language lexer.
- escapeinside = parsed_opts.get('escapeinside', '')
- if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
- left = escapeinside[0]
- right = escapeinside[1]
- lexer = LatexEmbeddedLexer(left, right, lexer)
-
- # ... and do it!
- if '-s' not in opts:
- # process whole input as per normal...
- highlight(code, lexer, fmter, outfile)
- return 0
- else:
- # line by line processing of stdin (eg: for 'tail -f')...
- try:
- while 1:
- if sys.version_info > (3,):
- # Python 3: we have to use .buffer to get a binary stream
- line = sys.stdin.buffer.readline()
- else:
- line = sys.stdin.readline()
- if not line:
- break
- if not inencoding:
- line = guess_decode_from_terminal(line, sys.stdin)[0]
- highlight(line, lexer, fmter, outfile)
- if hasattr(outfile, 'flush'):
- outfile.flush()
- return 0
- except KeyboardInterrupt: # pragma: no cover
- return 0
-
-
-def main(args=sys.argv):
- """
- Main command line entry point.
- """
- usage = USAGE % ((args[0],) * 6)
-
- try:
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ outfile = sys.stdout.buffer
+ else:
+ outfile = sys.stdout
+
+ # determine output encoding if not explicitly selected
+ if not outencoding:
+ if outfn:
+ # output file? use lexer encoding for now (can still be None)
+ fmter.encoding = inencoding
+ else:
+ # else use terminal encoding
+ fmter.encoding = terminal_encoding(sys.stdout)
+
+ # provide coloring under Windows, if possible
+ if not outfn and sys.platform in ('win32', 'cygwin') and \
+ fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
+ # unfortunately colorama doesn't support binary streams on Py3
+ if sys.version_info > (3,):
+ from pygments.util import UnclosingTextIOWrapper
+ outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
+ fmter.encoding = None
+ try:
+ import colorama.initialise
+ except ImportError:
+ pass
+ else:
+ outfile = colorama.initialise.wrap_stream(
+ outfile, convert=None, strip=None, autoreset=False, wrap=True)
+
+ # When using the LaTeX formatter and the option `escapeinside` is
+ # specified, we need a special lexer which collects escaped text
+ # before running the chosen language lexer.
+ escapeinside = parsed_opts.get('escapeinside', '')
+ if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
+ left = escapeinside[0]
+ right = escapeinside[1]
+ lexer = LatexEmbeddedLexer(left, right, lexer)
+
+ # ... and do it!
+ if '-s' not in opts:
+ # process whole input as per normal...
+ highlight(code, lexer, fmter, outfile)
+ return 0
+ else:
+ # line by line processing of stdin (eg: for 'tail -f')...
+ try:
+ while 1:
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ line = sys.stdin.buffer.readline()
+ else:
+ line = sys.stdin.readline()
+ if not line:
+ break
+ if not inencoding:
+ line = guess_decode_from_terminal(line, sys.stdin)[0]
+ highlight(line, lexer, fmter, outfile)
+ if hasattr(outfile, 'flush'):
+ outfile.flush()
+ return 0
+ except KeyboardInterrupt: # pragma: no cover
+ return 0
+
+
+def main(args=sys.argv):
+ """
+ Main command line entry point.
+ """
+ usage = USAGE % ((args[0],) * 6)
+
+ try:
popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:vhVHgsx")
- except getopt.GetoptError:
- print(usage, file=sys.stderr)
- return 2
-
- try:
- return main_inner(popts, args, usage)
- except Exception:
- if '-v' in dict(popts):
- print(file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print('An unhandled exception occurred while highlighting.',
- file=sys.stderr)
- print('Please report the whole traceback to the issue tracker at',
- file=sys.stderr)
+ except getopt.GetoptError:
+ print(usage, file=sys.stderr)
+ return 2
+
+ try:
+ return main_inner(popts, args, usage)
+ except Exception:
+ if '-v' in dict(popts):
+ print(file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print('An unhandled exception occurred while highlighting.',
+ file=sys.stderr)
+ print('Please report the whole traceback to the issue tracker at',
+ file=sys.stderr)
print('<https://github.com/pygments/pygments/issues>.',
- file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print(file=sys.stderr)
- raise
- import traceback
- info = traceback.format_exception(*sys.exc_info())
- msg = info[-1].strip()
- if len(info) >= 3:
- # extract relevant file and position info
- msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
- print(file=sys.stderr)
- print('*** Error while highlighting:', file=sys.stderr)
- print(msg, file=sys.stderr)
- print('*** If this is a bug you want to report, please rerun with -v.',
- file=sys.stderr)
- return 1
+ file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print(file=sys.stderr)
+ raise
+ import traceback
+ info = traceback.format_exception(*sys.exc_info())
+ msg = info[-1].strip()
+ if len(info) >= 3:
+ # extract relevant file and position info
+ msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
+ print(file=sys.stderr)
+ print('*** Error while highlighting:', file=sys.stderr)
+ print(msg, file=sys.stderr)
+ print('*** If this is a bug you want to report, please rerun with -v.',
+ file=sys.stderr)
+ return 1
diff --git a/contrib/python/Pygments/py2/pygments/console.py b/contrib/python/Pygments/py2/pygments/console.py
index a05b256e92..d46290eae2 100644
--- a/contrib/python/Pygments/py2/pygments/console.py
+++ b/contrib/python/Pygments/py2/pygments/console.py
@@ -1,71 +1,71 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.console
- ~~~~~~~~~~~~~~~~
-
- Format colored console output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.console
+ ~~~~~~~~~~~~~~~~
+
+ Format colored console output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-esc = "\x1b["
-
-codes = {}
+ :license: BSD, see LICENSE for details.
+"""
+
+esc = "\x1b["
+
+codes = {}
codes[""] = ""
codes["reset"] = esc + "39;49;00m"
-
+
codes["bold"] = esc + "01m"
codes["faint"] = esc + "02m"
codes["standout"] = esc + "03m"
-codes["underline"] = esc + "04m"
+codes["underline"] = esc + "04m"
codes["blink"] = esc + "05m"
codes["overline"] = esc + "06m"
-
+
dark_colors = ["black", "red", "green", "yellow", "blue",
"magenta", "cyan", "gray"]
light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
"brightmagenta", "brightcyan", "white"]
-
-x = 30
-for d, l in zip(dark_colors, light_colors):
- codes[d] = esc + "%im" % x
+
+x = 30
+for d, l in zip(dark_colors, light_colors):
+ codes[d] = esc + "%im" % x
codes[l] = esc + "%im" % (60 + x)
- x += 1
-
-del d, l, x
-
+ x += 1
+
+del d, l, x
+
codes["white"] = codes["bold"]
-
-
-def reset_color():
- return codes["reset"]
-
-
-def colorize(color_key, text):
- return codes[color_key] + text + codes["reset"]
-
-
-def ansiformat(attr, text):
- """
- Format ``text`` with a color and/or some attributes::
-
- color normal color
- *color* bold color
- _color_ underlined color
- +color+ blinking color
- """
- result = []
- if attr[:1] == attr[-1:] == '+':
- result.append(codes['blink'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '*':
- result.append(codes['bold'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '_':
- result.append(codes['underline'])
- attr = attr[1:-1]
- result.append(codes[attr])
- result.append(text)
- result.append(codes['reset'])
- return ''.join(result)
+
+
+def reset_color():
+ return codes["reset"]
+
+
+def colorize(color_key, text):
+ return codes[color_key] + text + codes["reset"]
+
+
+def ansiformat(attr, text):
+ """
+ Format ``text`` with a color and/or some attributes::
+
+ color normal color
+ *color* bold color
+ _color_ underlined color
+ +color+ blinking color
+ """
+ result = []
+ if attr[:1] == attr[-1:] == '+':
+ result.append(codes['blink'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '*':
+ result.append(codes['bold'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '_':
+ result.append(codes['underline'])
+ attr = attr[1:-1]
+ result.append(codes[attr])
+ result.append(text)
+ result.append(codes['reset'])
+ return ''.join(result)
diff --git a/contrib/python/Pygments/py2/pygments/filter.py b/contrib/python/Pygments/py2/pygments/filter.py
index 7f81920bc9..60327e8a28 100644
--- a/contrib/python/Pygments/py2/pygments/filter.py
+++ b/contrib/python/Pygments/py2/pygments/filter.py
@@ -1,74 +1,74 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.filter
- ~~~~~~~~~~~~~~~
-
- Module that implements the default filter.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.filter
+ ~~~~~~~~~~~~~~~
+
+ Module that implements the default filter.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-def apply_filters(stream, filters, lexer=None):
- """
- Use this method to apply an iterable of filters to
- a stream. If lexer is given it's forwarded to the
- filter, otherwise the filter receives `None`.
- """
- def _apply(filter_, stream):
- for token in filter_.filter(lexer, stream):
- yield token
- for filter_ in filters:
- stream = _apply(filter_, stream)
- return stream
-
-
-def simplefilter(f):
- """
- Decorator that converts a function into a filter::
-
- @simplefilter
- def lowercase(self, lexer, stream, options):
- for ttype, value in stream:
- yield ttype, value.lower()
- """
- return type(f.__name__, (FunctionFilter,), {
+ :license: BSD, see LICENSE for details.
+"""
+
+
+def apply_filters(stream, filters, lexer=None):
+ """
+ Use this method to apply an iterable of filters to
+ a stream. If lexer is given it's forwarded to the
+ filter, otherwise the filter receives `None`.
+ """
+ def _apply(filter_, stream):
+ for token in filter_.filter(lexer, stream):
+ yield token
+ for filter_ in filters:
+ stream = _apply(filter_, stream)
+ return stream
+
+
+def simplefilter(f):
+ """
+ Decorator that converts a function into a filter::
+
+ @simplefilter
+ def lowercase(self, lexer, stream, options):
+ for ttype, value in stream:
+ yield ttype, value.lower()
+ """
+ return type(f.__name__, (FunctionFilter,), {
'__module__': getattr(f, '__module__'),
'__doc__': f.__doc__,
'function': f,
})
-
-
-class Filter(object):
- """
- Default filter. Subclass this class or use the `simplefilter`
- decorator to create own filters.
- """
-
- def __init__(self, **options):
- self.options = options
-
- def filter(self, lexer, stream):
- raise NotImplementedError()
-
-
-class FunctionFilter(Filter):
- """
- Abstract class used by `simplefilter` to create simple
- function filters on the fly. The `simplefilter` decorator
- automatically creates subclasses of this class for
- functions passed to it.
- """
- function = None
-
- def __init__(self, **options):
- if not hasattr(self, 'function'):
- raise TypeError('%r used without bound function' %
- self.__class__.__name__)
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- # pylint: disable=not-callable
- for ttype, value in self.function(lexer, stream, self.options):
- yield ttype, value
+
+
+class Filter(object):
+ """
+ Default filter. Subclass this class or use the `simplefilter`
+ decorator to create own filters.
+ """
+
+ def __init__(self, **options):
+ self.options = options
+
+ def filter(self, lexer, stream):
+ raise NotImplementedError()
+
+
+class FunctionFilter(Filter):
+ """
+ Abstract class used by `simplefilter` to create simple
+ function filters on the fly. The `simplefilter` decorator
+ automatically creates subclasses of this class for
+ functions passed to it.
+ """
+ function = None
+
+ def __init__(self, **options):
+ if not hasattr(self, 'function'):
+ raise TypeError('%r used without bound function' %
+ self.__class__.__name__)
+ Filter.__init__(self, **options)
+
+ def filter(self, lexer, stream):
+ # pylint: disable=not-callable
+ for ttype, value in self.function(lexer, stream, self.options):
+ yield ttype, value
diff --git a/contrib/python/Pygments/py2/pygments/filters/__init__.py b/contrib/python/Pygments/py2/pygments/filters/__init__.py
index 3fe6caa7d2..0787a0249d 100644
--- a/contrib/python/Pygments/py2/pygments/filters/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/filters/__init__.py
@@ -1,350 +1,350 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.filters
- ~~~~~~~~~~~~~~~~
-
- Module containing filter lookup functions and default
- filters.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.filters
+ ~~~~~~~~~~~~~~~~
+
+ Module containing filter lookup functions and default
+ filters.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
- string_to_tokentype
-from pygments.filter import Filter
-from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
- get_choice_opt, ClassNotFound, OptionError, text_type, string_types
-from pygments.plugin import find_plugin_filters
-
-
-def find_filter_class(filtername):
- """Lookup a filter by name. Return None if not found."""
- if filtername in FILTERS:
- return FILTERS[filtername]
- for name, cls in find_plugin_filters():
- if name == filtername:
- return cls
- return None
-
-
-def get_filter_by_name(filtername, **options):
- """Return an instantiated filter.
-
- Options are passed to the filter initializer if wanted.
- Raise a ClassNotFound if not found.
- """
- cls = find_filter_class(filtername)
- if cls:
- return cls(**options)
- else:
- raise ClassNotFound('filter %r not found' % filtername)
-
-
-def get_all_filters():
- """Return a generator of all filter names."""
- for name in FILTERS:
- yield name
- for name, _ in find_plugin_filters():
- yield name
-
-
-def _replace_special(ttype, value, regex, specialttype,
- replacefunc=lambda x: x):
- last = 0
- for match in regex.finditer(value):
- start, end = match.start(), match.end()
- if start != last:
- yield ttype, value[last:start]
- yield specialttype, replacefunc(value[start:end])
- last = end
- if last != len(value):
- yield ttype, value[last:]
-
-
-class CodeTagFilter(Filter):
- """Highlight special code tags in comments and docstrings.
-
- Options accepted:
-
- `codetags` : list of strings
- A list of strings that are flagged as code tags. The default is to
- highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- tags = get_list_opt(options, 'codetags',
- ['XXX', 'TODO', 'BUG', 'NOTE'])
- self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
- re.escape(tag) for tag in tags if tag
- ]))
-
- def filter(self, lexer, stream):
- regex = self.tag_re
- for ttype, value in stream:
- if ttype in String.Doc or \
- ttype in Comment and \
- ttype not in Comment.Preproc:
- for sttype, svalue in _replace_special(ttype, value, regex,
- Comment.Special):
- yield sttype, svalue
- else:
- yield ttype, value
-
-
-class KeywordCaseFilter(Filter):
- """Convert keywords to lowercase or uppercase or capitalize them, which
- means first letter uppercase, rest lowercase.
-
- This can be useful e.g. if you highlight Pascal code and want to adapt the
- code to your styleguide.
-
- Options accepted:
-
- `case` : string
- The casing to convert keywords to. Must be one of ``'lower'``,
- ``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- case = get_choice_opt(options, 'case',
- ['lower', 'upper', 'capitalize'], 'lower')
- self.convert = getattr(text_type, case)
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Keyword:
- yield ttype, self.convert(value)
- else:
- yield ttype, value
-
-
-class NameHighlightFilter(Filter):
- """Highlight a normal Name (and Name.*) token with a different token type.
-
- Example::
-
- filter = NameHighlightFilter(
- names=['foo', 'bar', 'baz'],
- tokentype=Name.Function,
- )
-
- This would highlight the names "foo", "bar" and "baz"
- as functions. `Name.Function` is the default token type.
-
- Options accepted:
-
- `names` : list of strings
- A list of names that should be given the different token type.
- There is no default.
- `tokentype` : TokenType or string
- A token type or a string containing a token type name that is
- used for highlighting the strings in `names`. The default is
- `Name.Function`.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.names = set(get_list_opt(options, 'names', []))
- tokentype = options.get('tokentype')
- if tokentype:
- self.tokentype = string_to_tokentype(tokentype)
- else:
- self.tokentype = Name.Function
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Name and value in self.names:
- yield self.tokentype, value
- else:
- yield ttype, value
-
-
-class ErrorToken(Exception):
- pass
-
-
-class RaiseOnErrorTokenFilter(Filter):
- """Raise an exception when the lexer generates an error token.
-
- Options accepted:
-
- `excclass` : Exception class
- The exception class to raise.
- The default is `pygments.filters.ErrorToken`.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.exception = options.get('excclass', ErrorToken)
- try:
- # issubclass() will raise TypeError if first argument is not a class
- if not issubclass(self.exception, Exception):
- raise TypeError
- except TypeError:
- raise OptionError('excclass option is not an exception class')
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype is Error:
- raise self.exception(value)
- yield ttype, value
-
-
-class VisibleWhitespaceFilter(Filter):
- """Convert tabs, newlines and/or spaces to visible characters.
-
- Options accepted:
-
- `spaces` : string or bool
- If this is a one-character string, spaces will be replaces by this string.
- If it is another true value, spaces will be replaced by ``·`` (unicode
- MIDDLE DOT). If it is a false value, spaces will not be replaced. The
- default is ``False``.
- `tabs` : string or bool
- The same as for `spaces`, but the default replacement character is ``»``
- (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
- is ``False``. Note: this will not work if the `tabsize` option for the
- lexer is nonzero, as tabs will already have been expanded then.
- `tabsize` : int
- If tabs are to be replaced by this filter (see the `tabs` option), this
- is the total number of characters that a tab should be expanded to.
- The default is ``8``.
- `newlines` : string or bool
- The same as for `spaces`, but the default replacement character is ``¶``
- (unicode PILCROW SIGN). The default value is ``False``.
- `wstokentype` : bool
- If true, give whitespace the special `Whitespace` token type. This allows
- styling the visible whitespace differently (e.g. greyed out), but it can
- disrupt background colors. The default is ``True``.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- for name, default in [('spaces', u'·'),
- ('tabs', u'»'),
- ('newlines', u'¶')]:
- opt = options.get(name, False)
- if isinstance(opt, string_types) and len(opt) == 1:
- setattr(self, name, opt)
- else:
- setattr(self, name, (opt and default or ''))
- tabsize = get_int_opt(options, 'tabsize', 8)
- if self.tabs:
- self.tabs += ' ' * (tabsize - 1)
- if self.newlines:
- self.newlines += '\n'
- self.wstt = get_bool_opt(options, 'wstokentype', True)
-
- def filter(self, lexer, stream):
- if self.wstt:
- spaces = self.spaces or u' '
- tabs = self.tabs or u'\t'
- newlines = self.newlines or u'\n'
- regex = re.compile(r'\s')
- def replacefunc(wschar):
- if wschar == ' ':
- return spaces
- elif wschar == '\t':
- return tabs
- elif wschar == '\n':
- return newlines
- return wschar
-
- for ttype, value in stream:
- for sttype, svalue in _replace_special(ttype, value, regex,
- Whitespace, replacefunc):
- yield sttype, svalue
- else:
- spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
- # simpler processing
- for ttype, value in stream:
- if spaces:
- value = value.replace(' ', spaces)
- if tabs:
- value = value.replace('\t', tabs)
- if newlines:
- value = value.replace('\n', newlines)
- yield ttype, value
-
-
-class GobbleFilter(Filter):
- """Gobbles source code lines (eats initial characters).
-
- This filter drops the first ``n`` characters off every line of code. This
- may be useful when the source code fed to the lexer is indented by a fixed
- amount of space that isn't desired in the output.
-
- Options accepted:
-
- `n` : int
- The number of characters to gobble.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.n = get_int_opt(options, 'n', 0)
-
- def gobble(self, value, left):
- if left < len(value):
- return value[left:], 0
- else:
- return u'', left - len(value)
-
- def filter(self, lexer, stream):
- n = self.n
- left = n # How many characters left to gobble.
- for ttype, value in stream:
- # Remove ``left`` tokens from first line, ``n`` from all others.
- parts = value.split('\n')
- (parts[0], left) = self.gobble(parts[0], left)
- for i in range(1, len(parts)):
- (parts[i], left) = self.gobble(parts[i], n)
- value = u'\n'.join(parts)
-
- if value != '':
- yield ttype, value
-
-
-class TokenMergeFilter(Filter):
- """Merges consecutive tokens with the same token type in the output
- stream of a lexer.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- current_type = None
- current_value = None
- for ttype, value in stream:
- if ttype is current_type:
- current_value += value
- else:
- if current_type is not None:
- yield current_type, current_value
- current_type = ttype
- current_value = value
- if current_type is not None:
- yield current_type, current_value
-
-
-FILTERS = {
- 'codetagify': CodeTagFilter,
- 'keywordcase': KeywordCaseFilter,
- 'highlight': NameHighlightFilter,
- 'raiseonerror': RaiseOnErrorTokenFilter,
- 'whitespace': VisibleWhitespaceFilter,
- 'gobble': GobbleFilter,
- 'tokenmerge': TokenMergeFilter,
-}
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
+ string_to_tokentype
+from pygments.filter import Filter
+from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
+ get_choice_opt, ClassNotFound, OptionError, text_type, string_types
+from pygments.plugin import find_plugin_filters
+
+
+def find_filter_class(filtername):
+ """Lookup a filter by name. Return None if not found."""
+ if filtername in FILTERS:
+ return FILTERS[filtername]
+ for name, cls in find_plugin_filters():
+ if name == filtername:
+ return cls
+ return None
+
+
+def get_filter_by_name(filtername, **options):
+ """Return an instantiated filter.
+
+ Options are passed to the filter initializer if wanted.
+ Raise a ClassNotFound if not found.
+ """
+ cls = find_filter_class(filtername)
+ if cls:
+ return cls(**options)
+ else:
+ raise ClassNotFound('filter %r not found' % filtername)
+
+
+def get_all_filters():
+ """Return a generator of all filter names."""
+ for name in FILTERS:
+ yield name
+ for name, _ in find_plugin_filters():
+ yield name
+
+
+def _replace_special(ttype, value, regex, specialttype,
+ replacefunc=lambda x: x):
+ last = 0
+ for match in regex.finditer(value):
+ start, end = match.start(), match.end()
+ if start != last:
+ yield ttype, value[last:start]
+ yield specialttype, replacefunc(value[start:end])
+ last = end
+ if last != len(value):
+ yield ttype, value[last:]
+
+
+class CodeTagFilter(Filter):
+ """Highlight special code tags in comments and docstrings.
+
+ Options accepted:
+
+ `codetags` : list of strings
+ A list of strings that are flagged as code tags. The default is to
+ highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ tags = get_list_opt(options, 'codetags',
+ ['XXX', 'TODO', 'BUG', 'NOTE'])
+ self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+ re.escape(tag) for tag in tags if tag
+ ]))
+
+ def filter(self, lexer, stream):
+ regex = self.tag_re
+ for ttype, value in stream:
+ if ttype in String.Doc or \
+ ttype in Comment and \
+ ttype not in Comment.Preproc:
+ for sttype, svalue in _replace_special(ttype, value, regex,
+ Comment.Special):
+ yield sttype, svalue
+ else:
+ yield ttype, value
+
+
+class KeywordCaseFilter(Filter):
+ """Convert keywords to lowercase or uppercase or capitalize them, which
+ means first letter uppercase, rest lowercase.
+
+ This can be useful e.g. if you highlight Pascal code and want to adapt the
+ code to your styleguide.
+
+ Options accepted:
+
+ `case` : string
+ The casing to convert keywords to. Must be one of ``'lower'``,
+ ``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ case = get_choice_opt(options, 'case',
+ ['lower', 'upper', 'capitalize'], 'lower')
+ self.convert = getattr(text_type, case)
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype in Keyword:
+ yield ttype, self.convert(value)
+ else:
+ yield ttype, value
+
+
+class NameHighlightFilter(Filter):
+ """Highlight a normal Name (and Name.*) token with a different token type.
+
+ Example::
+
+ filter = NameHighlightFilter(
+ names=['foo', 'bar', 'baz'],
+ tokentype=Name.Function,
+ )
+
+ This would highlight the names "foo", "bar" and "baz"
+ as functions. `Name.Function` is the default token type.
+
+ Options accepted:
+
+ `names` : list of strings
+ A list of names that should be given the different token type.
+ There is no default.
+ `tokentype` : TokenType or string
+ A token type or a string containing a token type name that is
+ used for highlighting the strings in `names`. The default is
+ `Name.Function`.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.names = set(get_list_opt(options, 'names', []))
+ tokentype = options.get('tokentype')
+ if tokentype:
+ self.tokentype = string_to_tokentype(tokentype)
+ else:
+ self.tokentype = Name.Function
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype in Name and value in self.names:
+ yield self.tokentype, value
+ else:
+ yield ttype, value
+
+
+class ErrorToken(Exception):
+ pass
+
+
+class RaiseOnErrorTokenFilter(Filter):
+ """Raise an exception when the lexer generates an error token.
+
+ Options accepted:
+
+ `excclass` : Exception class
+ The exception class to raise.
+ The default is `pygments.filters.ErrorToken`.
+
+ .. versionadded:: 0.8
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.exception = options.get('excclass', ErrorToken)
+ try:
+ # issubclass() will raise TypeError if first argument is not a class
+ if not issubclass(self.exception, Exception):
+ raise TypeError
+ except TypeError:
+ raise OptionError('excclass option is not an exception class')
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype is Error:
+ raise self.exception(value)
+ yield ttype, value
+
+
+class VisibleWhitespaceFilter(Filter):
+ """Convert tabs, newlines and/or spaces to visible characters.
+
+ Options accepted:
+
+ `spaces` : string or bool
+ If this is a one-character string, spaces will be replaces by this string.
+ If it is another true value, spaces will be replaced by ``·`` (unicode
+ MIDDLE DOT). If it is a false value, spaces will not be replaced. The
+ default is ``False``.
+ `tabs` : string or bool
+ The same as for `spaces`, but the default replacement character is ``»``
+ (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
+ is ``False``. Note: this will not work if the `tabsize` option for the
+ lexer is nonzero, as tabs will already have been expanded then.
+ `tabsize` : int
+ If tabs are to be replaced by this filter (see the `tabs` option), this
+ is the total number of characters that a tab should be expanded to.
+ The default is ``8``.
+ `newlines` : string or bool
+ The same as for `spaces`, but the default replacement character is ``¶``
+ (unicode PILCROW SIGN). The default value is ``False``.
+ `wstokentype` : bool
+ If true, give whitespace the special `Whitespace` token type. This allows
+ styling the visible whitespace differently (e.g. greyed out), but it can
+ disrupt background colors. The default is ``True``.
+
+ .. versionadded:: 0.8
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ for name, default in [('spaces', u'·'),
+ ('tabs', u'»'),
+ ('newlines', u'¶')]:
+ opt = options.get(name, False)
+ if isinstance(opt, string_types) and len(opt) == 1:
+ setattr(self, name, opt)
+ else:
+ setattr(self, name, (opt and default or ''))
+ tabsize = get_int_opt(options, 'tabsize', 8)
+ if self.tabs:
+ self.tabs += ' ' * (tabsize - 1)
+ if self.newlines:
+ self.newlines += '\n'
+ self.wstt = get_bool_opt(options, 'wstokentype', True)
+
+ def filter(self, lexer, stream):
+ if self.wstt:
+ spaces = self.spaces or u' '
+ tabs = self.tabs or u'\t'
+ newlines = self.newlines or u'\n'
+ regex = re.compile(r'\s')
+ def replacefunc(wschar):
+ if wschar == ' ':
+ return spaces
+ elif wschar == '\t':
+ return tabs
+ elif wschar == '\n':
+ return newlines
+ return wschar
+
+ for ttype, value in stream:
+ for sttype, svalue in _replace_special(ttype, value, regex,
+ Whitespace, replacefunc):
+ yield sttype, svalue
+ else:
+ spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
+ # simpler processing
+ for ttype, value in stream:
+ if spaces:
+ value = value.replace(' ', spaces)
+ if tabs:
+ value = value.replace('\t', tabs)
+ if newlines:
+ value = value.replace('\n', newlines)
+ yield ttype, value
+
+
+class GobbleFilter(Filter):
+ """Gobbles source code lines (eats initial characters).
+
+ This filter drops the first ``n`` characters off every line of code. This
+ may be useful when the source code fed to the lexer is indented by a fixed
+ amount of space that isn't desired in the output.
+
+ Options accepted:
+
+ `n` : int
+ The number of characters to gobble.
+
+ .. versionadded:: 1.2
+ """
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.n = get_int_opt(options, 'n', 0)
+
+ def gobble(self, value, left):
+ if left < len(value):
+ return value[left:], 0
+ else:
+ return u'', left - len(value)
+
+ def filter(self, lexer, stream):
+ n = self.n
+ left = n # How many characters left to gobble.
+ for ttype, value in stream:
+ # Remove ``left`` tokens from first line, ``n`` from all others.
+ parts = value.split('\n')
+ (parts[0], left) = self.gobble(parts[0], left)
+ for i in range(1, len(parts)):
+ (parts[i], left) = self.gobble(parts[i], n)
+ value = u'\n'.join(parts)
+
+ if value != '':
+ yield ttype, value
+
+
+class TokenMergeFilter(Filter):
+ """Merges consecutive tokens with the same token type in the output
+ stream of a lexer.
+
+ .. versionadded:: 1.2
+ """
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+
+ def filter(self, lexer, stream):
+ current_type = None
+ current_value = None
+ for ttype, value in stream:
+ if ttype is current_type:
+ current_value += value
+ else:
+ if current_type is not None:
+ yield current_type, current_value
+ current_type = ttype
+ current_value = value
+ if current_type is not None:
+ yield current_type, current_value
+
+
+FILTERS = {
+ 'codetagify': CodeTagFilter,
+ 'keywordcase': KeywordCaseFilter,
+ 'highlight': NameHighlightFilter,
+ 'raiseonerror': RaiseOnErrorTokenFilter,
+ 'whitespace': VisibleWhitespaceFilter,
+ 'gobble': GobbleFilter,
+ 'tokenmerge': TokenMergeFilter,
+}
diff --git a/contrib/python/Pygments/py2/pygments/formatter.py b/contrib/python/Pygments/py2/pygments/formatter.py
index f09f6e3cb8..ec9e86c46b 100644
--- a/contrib/python/Pygments/py2/pygments/formatter.py
+++ b/contrib/python/Pygments/py2/pygments/formatter.py
@@ -1,95 +1,95 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatter
- ~~~~~~~~~~~~~~~~~~
-
- Base formatter class.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatter
+ ~~~~~~~~~~~~~~~~~~
+
+ Base formatter class.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import codecs
-
-from pygments.util import get_bool_opt, string_types
-from pygments.styles import get_style_by_name
-
-__all__ = ['Formatter']
-
-
-def _lookup_style(style):
- if isinstance(style, string_types):
- return get_style_by_name(style)
- return style
-
-
-class Formatter(object):
- """
- Converts a token stream to text.
-
- Options accepted:
-
- ``style``
- The style to use, can be a string or a Style subclass
- (default: "default"). Not used by e.g. the
- TerminalFormatter.
- ``full``
- Tells the formatter to output a "full" document, i.e.
- a complete self-contained document. This doesn't have
- any effect for some formatters (default: false).
- ``title``
- If ``full`` is true, the title that should be used to
- caption the document (default: '').
- ``encoding``
- If given, must be an encoding name. This will be used to
- convert the Unicode token strings to byte strings in the
- output. If it is "" or None, Unicode strings will be written
- to the output file, which most file-like objects do not
- support (default: None).
- ``outencoding``
- Overrides ``encoding`` if given.
- """
-
- #: Name of the formatter
- name = None
-
- #: Shortcuts for the formatter
- aliases = []
-
- #: fn match rules
- filenames = []
-
- #: If True, this formatter outputs Unicode strings when no encoding
- #: option is given.
- unicodeoutput = True
-
- def __init__(self, **options):
- self.style = _lookup_style(options.get('style', 'default'))
+ :license: BSD, see LICENSE for details.
+"""
+
+import codecs
+
+from pygments.util import get_bool_opt, string_types
+from pygments.styles import get_style_by_name
+
+__all__ = ['Formatter']
+
+
+def _lookup_style(style):
+ if isinstance(style, string_types):
+ return get_style_by_name(style)
+ return style
+
+
+class Formatter(object):
+ """
+ Converts a token stream to text.
+
+ Options accepted:
+
+ ``style``
+ The style to use, can be a string or a Style subclass
+ (default: "default"). Not used by e.g. the
+ TerminalFormatter.
+ ``full``
+ Tells the formatter to output a "full" document, i.e.
+ a complete self-contained document. This doesn't have
+ any effect for some formatters (default: false).
+ ``title``
+ If ``full`` is true, the title that should be used to
+ caption the document (default: '').
+ ``encoding``
+ If given, must be an encoding name. This will be used to
+ convert the Unicode token strings to byte strings in the
+ output. If it is "" or None, Unicode strings will be written
+ to the output file, which most file-like objects do not
+ support (default: None).
+ ``outencoding``
+ Overrides ``encoding`` if given.
+ """
+
+ #: Name of the formatter
+ name = None
+
+ #: Shortcuts for the formatter
+ aliases = []
+
+ #: fn match rules
+ filenames = []
+
+ #: If True, this formatter outputs Unicode strings when no encoding
+ #: option is given.
+ unicodeoutput = True
+
+ def __init__(self, **options):
+ self.style = _lookup_style(options.get('style', 'default'))
self.full = get_bool_opt(options, 'full', False)
- self.title = options.get('title', '')
- self.encoding = options.get('encoding', None) or None
- if self.encoding in ('guess', 'chardet'):
- # can happen for e.g. pygmentize -O encoding=guess
- self.encoding = 'utf-8'
- self.encoding = options.get('outencoding') or self.encoding
- self.options = options
-
- def get_style_defs(self, arg=''):
- """
- Return the style definitions for the current style as a string.
-
- ``arg`` is an additional argument whose meaning depends on the
- formatter used. Note that ``arg`` can also be a list or tuple
- for some formatters like the html formatter.
- """
- return ''
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
- """
- if self.encoding:
- # wrap the outfile in a StreamWriter
- outfile = codecs.lookup(self.encoding)[3](outfile)
- return self.format_unencoded(tokensource, outfile)
+ self.title = options.get('title', '')
+ self.encoding = options.get('encoding', None) or None
+ if self.encoding in ('guess', 'chardet'):
+ # can happen for e.g. pygmentize -O encoding=guess
+ self.encoding = 'utf-8'
+ self.encoding = options.get('outencoding') or self.encoding
+ self.options = options
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the style definitions for the current style as a string.
+
+ ``arg`` is an additional argument whose meaning depends on the
+ formatter used. Note that ``arg`` can also be a list or tuple
+ for some formatters like the html formatter.
+ """
+ return ''
+
+ def format(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+ """
+ if self.encoding:
+ # wrap the outfile in a StreamWriter
+ outfile = codecs.lookup(self.encoding)[3](outfile)
+ return self.format_unencoded(tokensource, outfile)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/__init__.py b/contrib/python/Pygments/py2/pygments/formatters/__init__.py
index 6f1130a801..864deba329 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/__init__.py
@@ -1,84 +1,84 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments formatters.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters
+ ~~~~~~~~~~~~~~~~~~~
+
+ Pygments formatters.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.formatters._mapping import FORMATTERS
-from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound, itervalues
-
-__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+import types
+import fnmatch
+from os.path import basename
+
+from pygments.formatters._mapping import FORMATTERS
+from pygments.plugin import find_plugin_formatters
+from pygments.util import ClassNotFound, itervalues
+
+__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
-
-_formatter_cache = {} # classes by name
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_formatters(module_name):
- """Load a formatter (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for formatter_name in mod.__all__:
- cls = getattr(mod, formatter_name)
- _formatter_cache[cls.name] = cls
-
-
-def get_all_formatters():
- """Return a generator for all formatter classes."""
- # NB: this returns formatter classes, not info like get_all_lexers().
- for info in itervalues(FORMATTERS):
- if info[1] not in _formatter_cache:
- _load_formatters(info[0])
- yield _formatter_cache[info[1]]
- for _, formatter in find_plugin_formatters():
- yield formatter
-
-
-def find_formatter_class(alias):
- """Lookup a formatter by alias.
-
- Returns None if not found.
- """
- for module_name, name, aliases, _, _ in itervalues(FORMATTERS):
- if alias in aliases:
- if name not in _formatter_cache:
- _load_formatters(module_name)
- return _formatter_cache[name]
- for _, cls in find_plugin_formatters():
- if alias in cls.aliases:
- return cls
-
-
-def get_formatter_by_name(_alias, **options):
- """Lookup and instantiate a formatter by alias.
-
- Raises ClassNotFound if not found.
- """
- cls = find_formatter_class(_alias)
- if cls is None:
- raise ClassNotFound("no formatter found for name %r" % _alias)
- return cls(**options)
-
-
+
+_formatter_cache = {} # classes by name
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
+
+
+def _load_formatters(module_name):
+ """Load a formatter (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for formatter_name in mod.__all__:
+ cls = getattr(mod, formatter_name)
+ _formatter_cache[cls.name] = cls
+
+
+def get_all_formatters():
+ """Return a generator for all formatter classes."""
+ # NB: this returns formatter classes, not info like get_all_lexers().
+ for info in itervalues(FORMATTERS):
+ if info[1] not in _formatter_cache:
+ _load_formatters(info[0])
+ yield _formatter_cache[info[1]]
+ for _, formatter in find_plugin_formatters():
+ yield formatter
+
+
+def find_formatter_class(alias):
+ """Lookup a formatter by alias.
+
+ Returns None if not found.
+ """
+ for module_name, name, aliases, _, _ in itervalues(FORMATTERS):
+ if alias in aliases:
+ if name not in _formatter_cache:
+ _load_formatters(module_name)
+ return _formatter_cache[name]
+ for _, cls in find_plugin_formatters():
+ if alias in cls.aliases:
+ return cls
+
+
+def get_formatter_by_name(_alias, **options):
+ """Lookup and instantiate a formatter by alias.
+
+ Raises ClassNotFound if not found.
+ """
+ cls = find_formatter_class(_alias)
+ if cls is None:
+ raise ClassNotFound("no formatter found for name %r" % _alias)
+ return cls(**options)
+
+
def load_formatter_from_file(filename, formattername="CustomFormatter",
**options):
"""Load a formatter from a file.
@@ -115,40 +115,40 @@ def load_formatter_from_file(filename, formattername="CustomFormatter",
raise ClassNotFound('error when loading custom formatter: %s' % err)
-def get_formatter_for_filename(fn, **options):
- """Lookup and instantiate a formatter by filename pattern.
-
- Raises ClassNotFound if not found.
- """
- fn = basename(fn)
- for modname, name, _, filenames, _ in itervalues(FORMATTERS):
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _formatter_cache:
- _load_formatters(modname)
- return _formatter_cache[name](**options)
- for cls in find_plugin_formatters():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- return cls(**options)
- raise ClassNotFound("no formatter found for file name %r" % fn)
-
-
-class _automodule(types.ModuleType):
- """Automatically import formatters."""
-
- def __getattr__(self, name):
- info = FORMATTERS.get(name)
- if info:
- _load_formatters(info[0])
- cls = _formatter_cache[info[1]]
- setattr(self, name, cls)
- return cls
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
+def get_formatter_for_filename(fn, **options):
+ """Lookup and instantiate a formatter by filename pattern.
+
+ Raises ClassNotFound if not found.
+ """
+ fn = basename(fn)
+ for modname, name, _, filenames, _ in itervalues(FORMATTERS):
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _formatter_cache:
+ _load_formatters(modname)
+ return _formatter_cache[name](**options)
+ for cls in find_plugin_formatters():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ return cls(**options)
+ raise ClassNotFound("no formatter found for file name %r" % fn)
+
+
+class _automodule(types.ModuleType):
+ """Automatically import formatters."""
+
+ def __getattr__(self, name):
+ info = FORMATTERS.get(name)
+ if info:
+ _load_formatters(info[0])
+ cls = _formatter_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/contrib/python/Pygments/py2/pygments/formatters/_mapping.py b/contrib/python/Pygments/py2/pygments/formatters/_mapping.py
index 5086e51970..29f22e689c 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/_mapping.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/_mapping.py
@@ -1,85 +1,85 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters._mapping
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter mapping definitions. This file is generated by itself. Everytime
- you change something on a builtin formatter definition, run this script from
- the formatters folder to update it.
-
- Do not alter the FORMATTERS dictionary by hand.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters._mapping
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin formatter definition, run this script from
+ the formatters folder to update it.
+
+ Do not alter the FORMATTERS dictionary by hand.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-FORMATTERS = {
- 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
- 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
- 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
- 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
- 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
- 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
- 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
- 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
- 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
-}
-
-if __name__ == '__main__': # pragma: no cover
- import sys
- import os
-
- # lookup formatters
- found_formatters = []
- imports = []
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- from pygments.util import docstring_headline
-
- for root, dirs, files in os.walk('.'):
- for filename in files:
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.formatters%s.%s' % (
- root[1:].replace('/', '.'), filename[:-3])
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for formatter_name in module.__all__:
- formatter = getattr(module, formatter_name)
- found_formatters.append(
- '%r: %r' % (formatter_name,
- (module_name,
- formatter.name,
- tuple(formatter.aliases),
- tuple(formatter.filenames),
- docstring_headline(formatter))))
- # sort them to make the diff minimal
- found_formatters.sort()
-
- # extract useful sourcecode from this file
- with open(__file__) as fp:
- content = fp.read()
- # replace crnl to nl for Windows.
- #
- # Note that, originally, contributers should keep nl of master
- # repository, for example by using some kind of automatic
- # management EOL, like `EolExtension
- # <https://www.mercurial-scm.org/wiki/EolExtension>`.
- content = content.replace("\r\n", "\n")
- header = content[:content.find('FORMATTERS = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- # write new file
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+FORMATTERS = {
+ 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
+ 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
+ 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
+ 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
+ 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
+ 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
+ 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
+ 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
+ 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
+}
+
+if __name__ == '__main__': # pragma: no cover
+ import sys
+ import os
+
+ # lookup formatters
+ found_formatters = []
+ imports = []
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+ from pygments.util import docstring_headline
+
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.formatters%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for formatter_name in module.__all__:
+ formatter = getattr(module, formatter_name)
+ found_formatters.append(
+ '%r: %r' % (formatter_name,
+ (module_name,
+ formatter.name,
+ tuple(formatter.aliases),
+ tuple(formatter.filenames),
+ docstring_headline(formatter))))
+ # sort them to make the diff minimal
+ found_formatters.sort()
+
+ # extract useful sourcecode from this file
+ with open(__file__) as fp:
+ content = fp.read()
+ # replace crnl to nl for Windows.
+ #
+ # Note that, originally, contributers should keep nl of master
+ # repository, for example by using some kind of automatic
+ # management EOL, like `EolExtension
+ # <https://www.mercurial-scm.org/wiki/EolExtension>`.
+ content = content.replace("\r\n", "\n")
+ header = content[:content.find('FORMATTERS = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
with open(__file__, 'w') as fp:
- fp.write(header)
- fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
- fp.write(footer)
-
- print ('=== %d formatters processed.' % len(found_formatters))
+ fp.write(header)
+ fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
+ fp.write(footer)
+
+ print ('=== %d formatters processed.' % len(found_formatters))
diff --git a/contrib/python/Pygments/py2/pygments/formatters/bbcode.py b/contrib/python/Pygments/py2/pygments/formatters/bbcode.py
index 784aee3ae7..1016695c4b 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/bbcode.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/bbcode.py
@@ -1,109 +1,109 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.bbcode
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- BBcode formatter.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.bbcode
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ BBcode formatter.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt
-
-__all__ = ['BBCodeFormatter']
-
-
-class BBCodeFormatter(Formatter):
- """
- Format tokens with BBcodes. These formatting codes are used by many
- bulletin boards, so you can highlight your sourcecode with pygments before
- posting it there.
-
- This formatter has no support for background colors and borders, as there
- are no common BBcode tags for that.
-
- Some board systems (e.g. phpBB) don't support colors in their [code] tag,
- so you can't use the highlighting together with that tag.
- Text in a [code] tag usually is shown with a monospace font (which this
- formatter can do with the ``monofont`` option) and no spaces (which you
- need for indentation) are removed.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `codetag`
- If set to true, put the output into ``[code]`` tags (default:
- ``false``)
-
- `monofont`
- If set to true, add a tag to show the code with a monospace font
- (default: ``false``).
- """
- name = 'BBCode'
- aliases = ['bbcode', 'bb']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self._code = get_bool_opt(options, 'codetag', False)
- self._mono = get_bool_opt(options, 'monofont', False)
-
- self.styles = {}
- self._make_styles()
-
- def _make_styles(self):
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '[color=#%s]' % ndef['color']
- end = '[/color]' + end
- if ndef['bold']:
- start += '[b]'
- end = '[/b]' + end
- if ndef['italic']:
- start += '[i]'
- end = '[/i]' + end
- if ndef['underline']:
- start += '[u]'
- end = '[/u]' + end
- # there are no common BBcodes for background-color and border
-
- self.styles[ttype] = start, end
-
- def format_unencoded(self, tokensource, outfile):
- if self._code:
- outfile.write('[code]')
- if self._mono:
- outfile.write('[font=monospace]')
-
- lastval = ''
- lasttype = None
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += value
- else:
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
- lastval = value
- lasttype = ttype
-
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
-
- if self._mono:
- outfile.write('[/font]')
- if self._code:
- outfile.write('[/code]')
- if self._code or self._mono:
- outfile.write('\n')
+ :license: BSD, see LICENSE for details.
+"""
+
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt
+
+__all__ = ['BBCodeFormatter']
+
+
+class BBCodeFormatter(Formatter):
+ """
+ Format tokens with BBcodes. These formatting codes are used by many
+ bulletin boards, so you can highlight your sourcecode with pygments before
+ posting it there.
+
+ This formatter has no support for background colors and borders, as there
+ are no common BBcode tags for that.
+
+ Some board systems (e.g. phpBB) don't support colors in their [code] tag,
+ so you can't use the highlighting together with that tag.
+ Text in a [code] tag usually is shown with a monospace font (which this
+ formatter can do with the ``monofont`` option) and no spaces (which you
+ need for indentation) are removed.
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `codetag`
+ If set to true, put the output into ``[code]`` tags (default:
+ ``false``)
+
+ `monofont`
+ If set to true, add a tag to show the code with a monospace font
+ (default: ``false``).
+ """
+ name = 'BBCode'
+ aliases = ['bbcode', 'bb']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self._code = get_bool_opt(options, 'codetag', False)
+ self._mono = get_bool_opt(options, 'monofont', False)
+
+ self.styles = {}
+ self._make_styles()
+
+ def _make_styles(self):
+ for ttype, ndef in self.style:
+ start = end = ''
+ if ndef['color']:
+ start += '[color=#%s]' % ndef['color']
+ end = '[/color]' + end
+ if ndef['bold']:
+ start += '[b]'
+ end = '[/b]' + end
+ if ndef['italic']:
+ start += '[i]'
+ end = '[/i]' + end
+ if ndef['underline']:
+ start += '[u]'
+ end = '[/u]' + end
+ # there are no common BBcodes for background-color and border
+
+ self.styles[ttype] = start, end
+
+ def format_unencoded(self, tokensource, outfile):
+ if self._code:
+ outfile.write('[code]')
+ if self._mono:
+ outfile.write('[font=monospace]')
+
+ lastval = ''
+ lasttype = None
+
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ if ttype == lasttype:
+ lastval += value
+ else:
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+ lastval = value
+ lasttype = ttype
+
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+
+ if self._mono:
+ outfile.write('[/font]')
+ if self._code:
+ outfile.write('[/code]')
+ if self._code or self._mono:
+ outfile.write('\n')
diff --git a/contrib/python/Pygments/py2/pygments/formatters/html.py b/contrib/python/Pygments/py2/pygments/formatters/html.py
index 042f04cfb1..a2d171cd41 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/html.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/html.py
@@ -1,880 +1,880 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.html
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for HTML output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.html
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for HTML output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-import os.path
-
-from pygments.formatter import Formatter
-from pygments.token import Token, Text, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- StringIO, string_types, iteritems
-
-try:
- import ctags
-except ImportError:
- ctags = None
-
-__all__ = ['HtmlFormatter']
-
-
-_escape_html_table = {
- ord('&'): u'&amp;',
- ord('<'): u'&lt;',
- ord('>'): u'&gt;',
- ord('"'): u'&quot;',
- ord("'"): u'&#39;',
-}
-
-
-def escape_html(text, table=_escape_html_table):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.translate(table)
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+import os.path
+
+from pygments.formatter import Formatter
+from pygments.token import Token, Text, STANDARD_TYPES
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ StringIO, string_types, iteritems
+
+try:
+ import ctags
+except ImportError:
+ ctags = None
+
+__all__ = ['HtmlFormatter']
+
+
+_escape_html_table = {
+ ord('&'): u'&amp;',
+ ord('<'): u'&lt;',
+ ord('>'): u'&gt;',
+ ord('"'): u'&quot;',
+ ord("'"): u'&#39;',
+}
+
+
+def escape_html(text, table=_escape_html_table):
+ """Escape &, <, > as well as single and double quotes for HTML."""
+ return text.translate(table)
+
def webify(color):
if color.startswith('calc') or color.startswith('var'):
return color
else:
return '#' + color
-
-def _get_ttype_class(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = '-' + ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-CSSFILE_TEMPLATE = '''\
+
+def _get_ttype_class(ttype):
+ fname = STANDARD_TYPES.get(ttype)
+ if fname:
+ return fname
+ aname = ''
+ while fname is None:
+ aname = '-' + ttype[-1] + aname
+ ttype = ttype.parent
+ fname = STANDARD_TYPES.get(ttype)
+ return fname + aname
+
+
+CSSFILE_TEMPLATE = '''\
/*
generated by Pygments <http://pygments.org>
Copyright 2006-2019 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
-td.linenos { background-color: #f0f0f0; padding-right: 10px; }
-span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; }
-pre { line-height: 125%%; }
-%(styledefs)s
-'''
-
-DOC_HEADER = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
+td.linenos { background-color: #f0f0f0; padding-right: 10px; }
+span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; }
+pre { line-height: 125%%; }
+%(styledefs)s
+'''
+
+DOC_HEADER = '''\
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
<!--
generated by Pygments <http://pygments.org>
Copyright 2006-2019 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <style type="text/css">
-''' + CSSFILE_TEMPLATE + '''
- </style>
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_HEADER_EXTERNALCSS = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <link rel="stylesheet" href="%(cssfile)s" type="text/css">
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_FOOTER = '''\
-</body>
-</html>
-'''
-
-
-class HtmlFormatter(Formatter):
- r"""
- Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
- in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
- option.
-
- If the `linenos` option is set to ``"table"``, the ``<pre>`` is
- additionally wrapped inside a ``<table>`` which has one row and two
- cells: one containing the line numbers and one containing the code.
- Example:
-
- .. sourcecode:: html
-
- <div class="highlight" >
- <table><tr>
- <td class="linenos" title="click to toggle"
- onclick="with (this.firstChild.style)
- { display = (display == '') ? 'none' : '' }">
- <pre>1
- 2</pre>
- </td>
- <td class="code">
- <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
- <span class="Ke">pass</span>
- </pre>
- </td>
- </tr></table></div>
-
- (whitespace added to improve clarity).
-
- Wrapping can be disabled using the `nowrap` option.
-
- A list of lines can be specified using the `hl_lines` option to make these
- lines highlighted (as of Pygments 0.11).
-
- With the `full` option, a complete HTML 4 document is output, including
- the style definitions inside a ``<style>`` tag, or in a separate file if
- the `cssfile` option is given.
-
- When `tagsfile` is set to the path of a ctags index file, it is used to
- generate hyperlinks from names to their definition. You must enable
- `lineanchors` and run ctags with the `-n` option for this to work. The
- `python-ctags` module from PyPI must be installed to use this feature;
- otherwise a `RuntimeError` will be raised.
-
- The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
- containing CSS rules for the CSS classes used by the formatter. The
- argument `arg` can be used to specify additional CSS selectors that
- are prepended to the classes. A call `fmter.get_style_defs('td .code')`
- would result in the following CSS classes:
-
- .. sourcecode:: css
-
- td .code .kw { font-weight: bold; color: #00FF00 }
- td .code .cm { color: #999999 }
- ...
-
- If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
- `get_style_defs()` method to request multiple prefixes for the tokens:
-
- .. sourcecode:: python
-
- formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
-
- The output would then look like this:
-
- .. sourcecode:: css
-
- div.syntax pre .kw,
- pre.syntax .kw { font-weight: bold; color: #00FF00 }
- div.syntax pre .cm,
- pre.syntax .cm { color: #999999 }
- ...
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
- tag. This disables most other options (default: ``False``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``). This option has no effect if the `cssfile`
- and `noclobber_cssfile` option are given and the file specified in
- `cssfile` exists.
-
- `noclasses`
- If set to true, token ``<span>`` tags will not use CSS classes, but
- inline styles. This is not recommended for larger pieces of code since
- it increases output size by quite a bit (default: ``False``).
-
- `classprefix`
- Since the token types use relatively short class names, they may clash
- with some of your own class names. In this case you can use the
- `classprefix` option to give a string to prepend to all Pygments-generated
- CSS class names for token types.
- Note that this option also affects the output of `get_style_defs()`.
-
- `cssclass`
- CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
- If you set this option, the default selector for `get_style_defs()`
- will be this class.
-
- .. versionadded:: 0.9
- If you select the ``'table'`` line numbers, the wrapping table will
- have a CSS class of this string plus ``'table'``, the default is
- accordingly ``'highlighttable'``.
-
- `cssstyles`
- Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
-
- `prestyles`
- Inline CSS styles for the ``<pre>`` tag (default: ``''``).
-
- .. versionadded:: 0.11
-
- `cssfile`
- If the `full` option is true and this option is given, it must be the
- name of an external file. If the filename does not include an absolute
- path, the file's path will be assumed to be relative to the main output
- file's path, if the latter can be found. The stylesheet is then written
- to this file instead of the HTML file.
-
- .. versionadded:: 0.6
-
- `noclobber_cssfile`
- If `cssfile` is given and the specified file exists, the css file will
- not be overwritten. This allows the use of the `full` option in
- combination with a user specified css file. Default is ``False``.
-
- .. versionadded:: 1.1
-
- `linenos`
- If set to ``'table'``, output line numbers as a table with two cells,
- one containing the line numbers, the other the whole code. This is
- copy-and-paste-friendly, but may cause alignment problems with some
- browsers or fonts. If set to ``'inline'``, the line numbers will be
- integrated in the ``<pre>`` tag that contains the code (that setting
- is *new in Pygments 0.8*).
-
- For compatibility with Pygments 0.7 and earlier, every true value
- except ``'inline'`` means the same as ``'table'`` (in particular, that
- means also ``True``).
-
- The default value is ``False``, which means no line numbers at all.
-
- **Note:** with the default ("table") line number mechanism, the line
- numbers and code can have different line heights in Internet Explorer
- unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
- CSS property (you get the default line spacing with ``line-height:
- 125%``).
-
- `hl_lines`
- Specify a list of lines to be highlighted.
-
- .. versionadded:: 0.11
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenospecial`
- If set to a number n > 0, every nth line number is given the CSS
- class ``"special"`` (default: ``0``).
-
- `nobackground`
- If set to ``True``, the formatter won't output the background color
- for the wrapping element (this automatically defaults to ``False``
- when there is no wrapping element [eg: no argument for the
- `get_syntax_defs` method given]) (default: ``False``).
-
- .. versionadded:: 0.6
-
- `lineseparator`
- This string is output between lines of code. It defaults to ``"\n"``,
- which is enough to break a line inside ``<pre>`` tags, but you can
- e.g. set it to ``"<br>"`` to get HTML line breaks.
-
- .. versionadded:: 0.7
-
- `lineanchors`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in an anchor tag with a ``name`` of ``foo-linenumber``.
- This allows easy linking to certain lines.
-
- .. versionadded:: 0.9
-
- `linespans`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in a span tag with an ``id`` of ``foo-linenumber``.
- This allows easy access to lines via javascript.
-
- .. versionadded:: 1.6
-
- `anchorlinenos`
- If set to `True`, will wrap line numbers in <a> tags. Used in
- combination with `linenos` and `lineanchors`.
-
- `tagsfile`
- If set to the path of a ctags file, wrap names in anchor tags that
- link to their definitions. `lineanchors` should be used, and the
- tags file should specify line numbers (see the `-n` option to ctags).
-
- .. versionadded:: 1.6
-
- `tagurlformat`
- A string formatting pattern used to generate links to ctags definitions.
- Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
- Defaults to an empty string, resulting in just `#prefix-number` links.
-
- .. versionadded:: 1.6
-
- `filename`
+<html>
+<head>
+ <title>%(title)s</title>
+ <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
+ <style type="text/css">
+''' + CSSFILE_TEMPLATE + '''
+ </style>
+</head>
+<body>
+<h2>%(title)s</h2>
+
+'''
+
+DOC_HEADER_EXTERNALCSS = '''\
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
+
+<html>
+<head>
+ <title>%(title)s</title>
+ <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
+ <link rel="stylesheet" href="%(cssfile)s" type="text/css">
+</head>
+<body>
+<h2>%(title)s</h2>
+
+'''
+
+DOC_FOOTER = '''\
+</body>
+</html>
+'''
+
+
+class HtmlFormatter(Formatter):
+ r"""
+ Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
+ in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
+ option.
+
+ If the `linenos` option is set to ``"table"``, the ``<pre>`` is
+ additionally wrapped inside a ``<table>`` which has one row and two
+ cells: one containing the line numbers and one containing the code.
+ Example:
+
+ .. sourcecode:: html
+
+ <div class="highlight" >
+ <table><tr>
+ <td class="linenos" title="click to toggle"
+ onclick="with (this.firstChild.style)
+ { display = (display == '') ? 'none' : '' }">
+ <pre>1
+ 2</pre>
+ </td>
+ <td class="code">
+ <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
+ <span class="Ke">pass</span>
+ </pre>
+ </td>
+ </tr></table></div>
+
+ (whitespace added to improve clarity).
+
+ Wrapping can be disabled using the `nowrap` option.
+
+ A list of lines can be specified using the `hl_lines` option to make these
+ lines highlighted (as of Pygments 0.11).
+
+ With the `full` option, a complete HTML 4 document is output, including
+ the style definitions inside a ``<style>`` tag, or in a separate file if
+ the `cssfile` option is given.
+
+ When `tagsfile` is set to the path of a ctags index file, it is used to
+ generate hyperlinks from names to their definition. You must enable
+ `lineanchors` and run ctags with the `-n` option for this to work. The
+ `python-ctags` module from PyPI must be installed to use this feature;
+ otherwise a `RuntimeError` will be raised.
+
+ The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
+ containing CSS rules for the CSS classes used by the formatter. The
+ argument `arg` can be used to specify additional CSS selectors that
+ are prepended to the classes. A call `fmter.get_style_defs('td .code')`
+ would result in the following CSS classes:
+
+ .. sourcecode:: css
+
+ td .code .kw { font-weight: bold; color: #00FF00 }
+ td .code .cm { color: #999999 }
+ ...
+
+ If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
+ `get_style_defs()` method to request multiple prefixes for the tokens:
+
+ .. sourcecode:: python
+
+ formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
+
+ The output would then look like this:
+
+ .. sourcecode:: css
+
+ div.syntax pre .kw,
+ pre.syntax .kw { font-weight: bold; color: #00FF00 }
+ div.syntax pre .cm,
+ pre.syntax .cm { color: #999999 }
+ ...
+
+ Additional options accepted:
+
+ `nowrap`
+ If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
+ tag. This disables most other options (default: ``False``).
+
+ `full`
+ Tells the formatter to output a "full" document, i.e. a complete
+ self-contained document (default: ``False``).
+
+ `title`
+ If `full` is true, the title that should be used to caption the
+ document (default: ``''``).
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``). This option has no effect if the `cssfile`
+ and `noclobber_cssfile` option are given and the file specified in
+ `cssfile` exists.
+
+ `noclasses`
+ If set to true, token ``<span>`` tags will not use CSS classes, but
+ inline styles. This is not recommended for larger pieces of code since
+ it increases output size by quite a bit (default: ``False``).
+
+ `classprefix`
+ Since the token types use relatively short class names, they may clash
+ with some of your own class names. In this case you can use the
+ `classprefix` option to give a string to prepend to all Pygments-generated
+ CSS class names for token types.
+ Note that this option also affects the output of `get_style_defs()`.
+
+ `cssclass`
+ CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
+ If you set this option, the default selector for `get_style_defs()`
+ will be this class.
+
+ .. versionadded:: 0.9
+ If you select the ``'table'`` line numbers, the wrapping table will
+ have a CSS class of this string plus ``'table'``, the default is
+ accordingly ``'highlighttable'``.
+
+ `cssstyles`
+ Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
+
+ `prestyles`
+ Inline CSS styles for the ``<pre>`` tag (default: ``''``).
+
+ .. versionadded:: 0.11
+
+ `cssfile`
+ If the `full` option is true and this option is given, it must be the
+ name of an external file. If the filename does not include an absolute
+ path, the file's path will be assumed to be relative to the main output
+ file's path, if the latter can be found. The stylesheet is then written
+ to this file instead of the HTML file.
+
+ .. versionadded:: 0.6
+
+ `noclobber_cssfile`
+ If `cssfile` is given and the specified file exists, the css file will
+ not be overwritten. This allows the use of the `full` option in
+ combination with a user specified css file. Default is ``False``.
+
+ .. versionadded:: 1.1
+
+ `linenos`
+ If set to ``'table'``, output line numbers as a table with two cells,
+ one containing the line numbers, the other the whole code. This is
+ copy-and-paste-friendly, but may cause alignment problems with some
+ browsers or fonts. If set to ``'inline'``, the line numbers will be
+ integrated in the ``<pre>`` tag that contains the code (that setting
+ is *new in Pygments 0.8*).
+
+ For compatibility with Pygments 0.7 and earlier, every true value
+ except ``'inline'`` means the same as ``'table'`` (in particular, that
+ means also ``True``).
+
+ The default value is ``False``, which means no line numbers at all.
+
+ **Note:** with the default ("table") line number mechanism, the line
+ numbers and code can have different line heights in Internet Explorer
+ unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
+ CSS property (you get the default line spacing with ``line-height:
+ 125%``).
+
+ `hl_lines`
+ Specify a list of lines to be highlighted.
+
+ .. versionadded:: 0.11
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ `linenospecial`
+ If set to a number n > 0, every nth line number is given the CSS
+ class ``"special"`` (default: ``0``).
+
+ `nobackground`
+ If set to ``True``, the formatter won't output the background color
+ for the wrapping element (this automatically defaults to ``False``
+ when there is no wrapping element [eg: no argument for the
+ `get_syntax_defs` method given]) (default: ``False``).
+
+ .. versionadded:: 0.6
+
+ `lineseparator`
+ This string is output between lines of code. It defaults to ``"\n"``,
+ which is enough to break a line inside ``<pre>`` tags, but you can
+ e.g. set it to ``"<br>"`` to get HTML line breaks.
+
+ .. versionadded:: 0.7
+
+ `lineanchors`
+ If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
+ output line in an anchor tag with a ``name`` of ``foo-linenumber``.
+ This allows easy linking to certain lines.
+
+ .. versionadded:: 0.9
+
+ `linespans`
+ If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
+ output line in a span tag with an ``id`` of ``foo-linenumber``.
+ This allows easy access to lines via javascript.
+
+ .. versionadded:: 1.6
+
+ `anchorlinenos`
+ If set to `True`, will wrap line numbers in <a> tags. Used in
+ combination with `linenos` and `lineanchors`.
+
+ `tagsfile`
+ If set to the path of a ctags file, wrap names in anchor tags that
+ link to their definitions. `lineanchors` should be used, and the
+ tags file should specify line numbers (see the `-n` option to ctags).
+
+ .. versionadded:: 1.6
+
+ `tagurlformat`
+ A string formatting pattern used to generate links to ctags definitions.
+ Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
+ Defaults to an empty string, resulting in just `#prefix-number` links.
+
+ .. versionadded:: 1.6
+
+ `filename`
A string used to generate a filename when rendering ``<pre>`` blocks,
- for example if displaying source code.
-
- .. versionadded:: 2.1
-
+ for example if displaying source code.
+
+ .. versionadded:: 2.1
+
`wrapcode`
Wrap the code inside ``<pre>`` blocks using ``<code>``, as recommended
by the HTML5 specification.
-
+
.. versionadded:: 2.4
- **Subclassing the HTML formatter**
-
- .. versionadded:: 0.7
-
- The HTML formatter is now built in a way that allows easy subclassing, thus
- customizing the output HTML code. The `format()` method calls
- `self._format_lines()` which returns a generator that yields tuples of ``(1,
- line)``, where the ``1`` indicates that the ``line`` is a line of the
- formatted source code.
-
- If the `nowrap` option is set, the generator is the iterated over and the
- resulting HTML is output.
-
- Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
- other generators. These may add some HTML code to the one generated by
- `_format_lines()`, either by modifying the lines generated by the latter,
- then yielding them again with ``(1, line)``, and/or by yielding other HTML
- code before or after the lines, with ``(0, html)``. The distinction between
- source lines and other code makes it possible to wrap the generator multiple
- times.
-
- The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
-
- A custom `HtmlFormatter` subclass could look like this:
-
- .. sourcecode:: python
-
- class CodeHtmlFormatter(HtmlFormatter):
-
- def wrap(self, source, outfile):
- return self._wrap_code(source)
-
- def _wrap_code(self, source):
- yield 0, '<code>'
- for i, t in source:
- if i == 1:
- # it's a line of formatted code
- t += '<br>'
- yield i, t
- yield 0, '</code>'
-
- This results in wrapping the formatted lines with a ``<code>`` tag, where the
- source lines are broken using ``<br>`` tags.
-
- After calling `wrap()`, the `format()` method also adds the "line numbers"
- and/or "full document" wrappers if the respective options are set. Then, all
- HTML yielded by the wrapped generator is output.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.title = self._decodeifneeded(self.title)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.noclasses = get_bool_opt(options, 'noclasses', False)
- self.classprefix = options.get('classprefix', '')
- self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
- self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
- self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
- self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
- self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
- self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
- self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
- self.filename = self._decodeifneeded(options.get('filename', ''))
+ **Subclassing the HTML formatter**
+
+ .. versionadded:: 0.7
+
+ The HTML formatter is now built in a way that allows easy subclassing, thus
+ customizing the output HTML code. The `format()` method calls
+ `self._format_lines()` which returns a generator that yields tuples of ``(1,
+ line)``, where the ``1`` indicates that the ``line`` is a line of the
+ formatted source code.
+
+ If the `nowrap` option is set, the generator is the iterated over and the
+ resulting HTML is output.
+
+ Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
+ other generators. These may add some HTML code to the one generated by
+ `_format_lines()`, either by modifying the lines generated by the latter,
+ then yielding them again with ``(1, line)``, and/or by yielding other HTML
+ code before or after the lines, with ``(0, html)``. The distinction between
+ source lines and other code makes it possible to wrap the generator multiple
+ times.
+
+ The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
+
+ A custom `HtmlFormatter` subclass could look like this:
+
+ .. sourcecode:: python
+
+ class CodeHtmlFormatter(HtmlFormatter):
+
+ def wrap(self, source, outfile):
+ return self._wrap_code(source)
+
+ def _wrap_code(self, source):
+ yield 0, '<code>'
+ for i, t in source:
+ if i == 1:
+ # it's a line of formatted code
+ t += '<br>'
+ yield i, t
+ yield 0, '</code>'
+
+ This results in wrapping the formatted lines with a ``<code>`` tag, where the
+ source lines are broken using ``<br>`` tags.
+
+ After calling `wrap()`, the `format()` method also adds the "line numbers"
+ and/or "full document" wrappers if the respective options are set. Then, all
+ HTML yielded by the wrapped generator is output.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.title = self._decodeifneeded(self.title)
+ self.nowrap = get_bool_opt(options, 'nowrap', False)
+ self.noclasses = get_bool_opt(options, 'noclasses', False)
+ self.classprefix = options.get('classprefix', '')
+ self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
+ self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
+ self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
+ self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
+ self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
+ self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
+ self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
+ self.filename = self._decodeifneeded(options.get('filename', ''))
self.wrapcode = get_bool_opt(options, 'wrapcode', False)
-
- if self.tagsfile:
- if not ctags:
- raise RuntimeError('The "ctags" package must to be installed '
- 'to be able to use the "tagsfile" feature.')
- self._ctags = ctags.CTags(self.tagsfile)
-
- linenos = options.get('linenos', False)
- if linenos == 'inline':
- self.linenos = 2
- elif linenos:
- # compatibility with <= 0.7
- self.linenos = 1
- else:
- self.linenos = 0
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
- self.nobackground = get_bool_opt(options, 'nobackground', False)
+
+ if self.tagsfile:
+ if not ctags:
+ raise RuntimeError('The "ctags" package must to be installed '
+ 'to be able to use the "tagsfile" feature.')
+ self._ctags = ctags.CTags(self.tagsfile)
+
+ linenos = options.get('linenos', False)
+ if linenos == 'inline':
+ self.linenos = 2
+ elif linenos:
+ # compatibility with <= 0.7
+ self.linenos = 1
+ else:
+ self.linenos = 0
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
self.lineseparator = options.get('lineseparator', u'\n')
- self.lineanchors = options.get('lineanchors', '')
- self.linespans = options.get('linespans', '')
- self.anchorlinenos = options.get('anchorlinenos', False)
- self.hl_lines = set()
- for lineno in get_list_opt(options, 'hl_lines', []):
- try:
- self.hl_lines.add(int(lineno))
- except ValueError:
- pass
-
- self._create_stylesheet()
-
- def _get_css_class(self, ttype):
- """Return the css class of this token type prefixed with
- the classprefix option."""
- ttypeclass = _get_ttype_class(ttype)
- if ttypeclass:
- return self.classprefix + ttypeclass
- return ''
-
- def _get_css_classes(self, ttype):
- """Return the css classes of this token type prefixed with
- the classprefix option."""
- cls = self._get_css_class(ttype)
- while ttype not in STANDARD_TYPES:
- ttype = ttype.parent
- cls = self._get_css_class(ttype) + ' ' + cls
- return cls
-
- def _create_stylesheet(self):
- t2c = self.ttype2class = {Token: ''}
- c2s = self.class2style = {}
- for ttype, ndef in self.style:
- name = self._get_css_class(ttype)
- style = ''
- if ndef['color']:
+ self.lineanchors = options.get('lineanchors', '')
+ self.linespans = options.get('linespans', '')
+ self.anchorlinenos = options.get('anchorlinenos', False)
+ self.hl_lines = set()
+ for lineno in get_list_opt(options, 'hl_lines', []):
+ try:
+ self.hl_lines.add(int(lineno))
+ except ValueError:
+ pass
+
+ self._create_stylesheet()
+
+ def _get_css_class(self, ttype):
+ """Return the css class of this token type prefixed with
+ the classprefix option."""
+ ttypeclass = _get_ttype_class(ttype)
+ if ttypeclass:
+ return self.classprefix + ttypeclass
+ return ''
+
+ def _get_css_classes(self, ttype):
+ """Return the css classes of this token type prefixed with
+ the classprefix option."""
+ cls = self._get_css_class(ttype)
+ while ttype not in STANDARD_TYPES:
+ ttype = ttype.parent
+ cls = self._get_css_class(ttype) + ' ' + cls
+ return cls
+
+ def _create_stylesheet(self):
+ t2c = self.ttype2class = {Token: ''}
+ c2s = self.class2style = {}
+ for ttype, ndef in self.style:
+ name = self._get_css_class(ttype)
+ style = ''
+ if ndef['color']:
style += 'color: %s; ' % webify(ndef['color'])
- if ndef['bold']:
- style += 'font-weight: bold; '
- if ndef['italic']:
- style += 'font-style: italic; '
- if ndef['underline']:
- style += 'text-decoration: underline; '
- if ndef['bgcolor']:
+ if ndef['bold']:
+ style += 'font-weight: bold; '
+ if ndef['italic']:
+ style += 'font-style: italic; '
+ if ndef['underline']:
+ style += 'text-decoration: underline; '
+ if ndef['bgcolor']:
style += 'background-color: %s; ' % webify(ndef['bgcolor'])
- if ndef['border']:
+ if ndef['border']:
style += 'border: 1px solid %s; ' % webify(ndef['border'])
- if style:
- t2c[ttype] = name
- # save len(ttype) to enable ordering the styles by
- # hierarchy (necessary for CSS cascading rules!)
- c2s[name] = (style[:-2], ttype, len(ttype))
-
- def get_style_defs(self, arg=None):
- """
- Return CSS style definitions for the classes produced by the current
- highlighting style. ``arg`` can be a string or list of selectors to
- insert before the token type classes.
- """
- if arg is None:
- arg = ('cssclass' in self.options and '.'+self.cssclass or '')
- if isinstance(arg, string_types):
- args = [arg]
- else:
- args = list(arg)
-
- def prefix(cls):
- if cls:
- cls = '.' + cls
- tmp = []
- for arg in args:
- tmp.append((arg and arg + ' ' or '') + cls)
- return ', '.join(tmp)
-
- styles = [(level, ttype, cls, style)
- for cls, (style, ttype, level) in iteritems(self.class2style)
- if cls and style]
- styles.sort()
- lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
- for (level, ttype, cls, style) in styles]
- if arg and not self.nobackground and \
- self.style.background_color is not None:
- text_style = ''
- if Text in self.ttype2class:
- text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
- lines.insert(0, '%s { background: %s;%s }' %
- (prefix(''), self.style.background_color, text_style))
- if self.style.highlight_color is not None:
- lines.insert(0, '%s.hll { background-color: %s }' %
- (prefix(''), self.style.highlight_color))
- return '\n'.join(lines)
-
- def _decodeifneeded(self, value):
- if isinstance(value, bytes):
- if self.encoding:
- return value.decode(self.encoding)
- return value.decode()
- return value
-
- def _wrap_full(self, inner, outfile):
- if self.cssfile:
- if os.path.isabs(self.cssfile):
- # it's an absolute filename
- cssfilename = self.cssfile
- else:
- try:
- filename = outfile.name
- if not filename or filename[0] == '<':
- # pseudo files, e.g. name == '<fdopen>'
- raise AttributeError
- cssfilename = os.path.join(os.path.dirname(filename),
- self.cssfile)
- except AttributeError:
- print('Note: Cannot determine output file name, '
- 'using current directory as base for the CSS file name',
- file=sys.stderr)
- cssfilename = self.cssfile
- # write CSS file only if noclobber_cssfile isn't given as an option.
- try:
- if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
+ if style:
+ t2c[ttype] = name
+ # save len(ttype) to enable ordering the styles by
+ # hierarchy (necessary for CSS cascading rules!)
+ c2s[name] = (style[:-2], ttype, len(ttype))
+
+ def get_style_defs(self, arg=None):
+ """
+ Return CSS style definitions for the classes produced by the current
+ highlighting style. ``arg`` can be a string or list of selectors to
+ insert before the token type classes.
+ """
+ if arg is None:
+ arg = ('cssclass' in self.options and '.'+self.cssclass or '')
+ if isinstance(arg, string_types):
+ args = [arg]
+ else:
+ args = list(arg)
+
+ def prefix(cls):
+ if cls:
+ cls = '.' + cls
+ tmp = []
+ for arg in args:
+ tmp.append((arg and arg + ' ' or '') + cls)
+ return ', '.join(tmp)
+
+ styles = [(level, ttype, cls, style)
+ for cls, (style, ttype, level) in iteritems(self.class2style)
+ if cls and style]
+ styles.sort()
+ lines = ['%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
+ for (level, ttype, cls, style) in styles]
+ if arg and not self.nobackground and \
+ self.style.background_color is not None:
+ text_style = ''
+ if Text in self.ttype2class:
+ text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
+ lines.insert(0, '%s { background: %s;%s }' %
+ (prefix(''), self.style.background_color, text_style))
+ if self.style.highlight_color is not None:
+ lines.insert(0, '%s.hll { background-color: %s }' %
+ (prefix(''), self.style.highlight_color))
+ return '\n'.join(lines)
+
+ def _decodeifneeded(self, value):
+ if isinstance(value, bytes):
+ if self.encoding:
+ return value.decode(self.encoding)
+ return value.decode()
+ return value
+
+ def _wrap_full(self, inner, outfile):
+ if self.cssfile:
+ if os.path.isabs(self.cssfile):
+ # it's an absolute filename
+ cssfilename = self.cssfile
+ else:
+ try:
+ filename = outfile.name
+ if not filename or filename[0] == '<':
+ # pseudo files, e.g. name == '<fdopen>'
+ raise AttributeError
+ cssfilename = os.path.join(os.path.dirname(filename),
+ self.cssfile)
+ except AttributeError:
+ print('Note: Cannot determine output file name, '
+ 'using current directory as base for the CSS file name',
+ file=sys.stderr)
+ cssfilename = self.cssfile
+ # write CSS file only if noclobber_cssfile isn't given as an option.
+ try:
+ if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
with open(cssfilename, "w") as cf:
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
- except IOError as err:
- err.strerror = 'Error writing CSS file: ' + err.strerror
- raise
-
- yield 0, (DOC_HEADER_EXTERNALCSS %
- dict(title=self.title,
- cssfile=self.cssfile,
- encoding=self.encoding))
- else:
- yield 0, (DOC_HEADER %
- dict(title=self.title,
- styledefs=self.get_style_defs('body'),
- encoding=self.encoding))
-
- for t, line in inner:
- yield t, line
- yield 0, DOC_FOOTER
-
- def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO()
- lncount = 0
- for t, line in inner:
- if t:
- lncount += 1
- dummyoutfile.write(line)
-
- fl = self.linenostart
- mw = len(str(lncount + fl - 1))
- sp = self.linenospecial
- st = self.linenostep
- la = self.lineanchors
- aln = self.anchorlinenos
- nocls = self.noclasses
- if sp:
- lines = []
-
- for i in range(fl, fl+lncount):
- if i % st == 0:
- if i % sp == 0:
- if aln:
- lines.append('<a href="#%s-%d" class="special">%*d</a>' %
- (la, i, mw, i))
- else:
- lines.append('<span class="special">%*d</span>' % (mw, i))
- else:
- if aln:
- lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
- else:
- lines.append('%*d' % (mw, i))
- else:
- lines.append('')
- ls = '\n'.join(lines)
- else:
- lines = []
- for i in range(fl, fl+lncount):
- if i % st == 0:
- if aln:
- lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
- else:
- lines.append('%*d' % (mw, i))
- else:
- lines.append('')
- ls = '\n'.join(lines)
-
- # in case you wonder about the seemingly redundant <div> here: since the
- # content in the other cell also is wrapped in a div, some browsers in
- # some configurations seem to mess up the formatting...
- if nocls:
- yield 0, ('<table class="%stable">' % self.cssclass +
- '<tr><td><div class="linenodiv" '
- 'style="background-color: #f0f0f0; padding-right: 10px">'
- '<pre style="line-height: 125%">' +
- ls + '</pre></div></td><td class="code">')
- else:
- yield 0, ('<table class="%stable">' % self.cssclass +
- '<tr><td class="linenos"><div class="linenodiv"><pre>' +
- ls + '</pre></div></td><td class="code">')
- yield 0, dummyoutfile.getvalue()
- yield 0, '</td></tr></table>'
-
- def _wrap_inlinelinenos(self, inner):
- # need a list of lines since we need the width of a single number :(
- lines = list(inner)
- sp = self.linenospecial
- st = self.linenostep
- num = self.linenostart
- mw = len(str(len(lines) + num - 1))
-
- if self.noclasses:
- if sp:
- for t, line in lines:
- if num % sp == 0:
- style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
- else:
- style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
- yield 1, '<span style="%s">%*s </span>' % (
- style, mw, (num % st and ' ' or num)) + line
- num += 1
- else:
- for t, line in lines:
- yield 1, ('<span style="background-color: #f0f0f0; '
- 'padding: 0 5px 0 5px">%*s </span>' % (
- mw, (num % st and ' ' or num)) + line)
- num += 1
- elif sp:
- for t, line in lines:
- yield 1, '<span class="lineno%s">%*s </span>' % (
- num % sp == 0 and ' special' or '', mw,
- (num % st and ' ' or num)) + line
- num += 1
- else:
- for t, line in lines:
- yield 1, '<span class="lineno">%*s </span>' % (
- mw, (num % st and ' ' or num)) + line
- num += 1
-
- def _wrap_lineanchors(self, inner):
- s = self.lineanchors
- # subtract 1 since we have to increment i *before* yielding
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- yield 1, '<a name="%s-%d"></a>' % (s, i) + line
- else:
- yield 0, line
-
- def _wrap_linespans(self, inner):
- s = self.linespans
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
- else:
- yield 0, line
-
- def _wrap_div(self, inner):
- style = []
- if (self.noclasses and not self.nobackground and
- self.style.background_color is not None):
- style.append('background: %s' % (self.style.background_color,))
- if self.cssstyles:
- style.append(self.cssstyles)
- style = '; '.join(style)
-
- yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
- (style and (' style="%s"' % style)) + '>')
- for tup in inner:
- yield tup
- yield 0, '</div>\n'
-
- def _wrap_pre(self, inner):
- style = []
- if self.prestyles:
- style.append(self.prestyles)
- if self.noclasses:
- style.append('line-height: 125%')
- style = '; '.join(style)
-
- if self.filename:
- yield 0, ('<span class="filename">' + self.filename + '</span>')
-
+ except IOError as err:
+ err.strerror = 'Error writing CSS file: ' + err.strerror
+ raise
+
+ yield 0, (DOC_HEADER_EXTERNALCSS %
+ dict(title=self.title,
+ cssfile=self.cssfile,
+ encoding=self.encoding))
+ else:
+ yield 0, (DOC_HEADER %
+ dict(title=self.title,
+ styledefs=self.get_style_defs('body'),
+ encoding=self.encoding))
+
+ for t, line in inner:
+ yield t, line
+ yield 0, DOC_FOOTER
+
+ def _wrap_tablelinenos(self, inner):
+ dummyoutfile = StringIO()
+ lncount = 0
+ for t, line in inner:
+ if t:
+ lncount += 1
+ dummyoutfile.write(line)
+
+ fl = self.linenostart
+ mw = len(str(lncount + fl - 1))
+ sp = self.linenospecial
+ st = self.linenostep
+ la = self.lineanchors
+ aln = self.anchorlinenos
+ nocls = self.noclasses
+ if sp:
+ lines = []
+
+ for i in range(fl, fl+lncount):
+ if i % st == 0:
+ if i % sp == 0:
+ if aln:
+ lines.append('<a href="#%s-%d" class="special">%*d</a>' %
+ (la, i, mw, i))
+ else:
+ lines.append('<span class="special">%*d</span>' % (mw, i))
+ else:
+ if aln:
+ lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
+ else:
+ lines.append('%*d' % (mw, i))
+ else:
+ lines.append('')
+ ls = '\n'.join(lines)
+ else:
+ lines = []
+ for i in range(fl, fl+lncount):
+ if i % st == 0:
+ if aln:
+ lines.append('<a href="#%s-%d">%*d</a>' % (la, i, mw, i))
+ else:
+ lines.append('%*d' % (mw, i))
+ else:
+ lines.append('')
+ ls = '\n'.join(lines)
+
+ # in case you wonder about the seemingly redundant <div> here: since the
+ # content in the other cell also is wrapped in a div, some browsers in
+ # some configurations seem to mess up the formatting...
+ if nocls:
+ yield 0, ('<table class="%stable">' % self.cssclass +
+ '<tr><td><div class="linenodiv" '
+ 'style="background-color: #f0f0f0; padding-right: 10px">'
+ '<pre style="line-height: 125%">' +
+ ls + '</pre></div></td><td class="code">')
+ else:
+ yield 0, ('<table class="%stable">' % self.cssclass +
+ '<tr><td class="linenos"><div class="linenodiv"><pre>' +
+ ls + '</pre></div></td><td class="code">')
+ yield 0, dummyoutfile.getvalue()
+ yield 0, '</td></tr></table>'
+
+ def _wrap_inlinelinenos(self, inner):
+ # need a list of lines since we need the width of a single number :(
+ lines = list(inner)
+ sp = self.linenospecial
+ st = self.linenostep
+ num = self.linenostart
+ mw = len(str(len(lines) + num - 1))
+
+ if self.noclasses:
+ if sp:
+ for t, line in lines:
+ if num % sp == 0:
+ style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
+ else:
+ style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
+ yield 1, '<span style="%s">%*s </span>' % (
+ style, mw, (num % st and ' ' or num)) + line
+ num += 1
+ else:
+ for t, line in lines:
+ yield 1, ('<span style="background-color: #f0f0f0; '
+ 'padding: 0 5px 0 5px">%*s </span>' % (
+ mw, (num % st and ' ' or num)) + line)
+ num += 1
+ elif sp:
+ for t, line in lines:
+ yield 1, '<span class="lineno%s">%*s </span>' % (
+ num % sp == 0 and ' special' or '', mw,
+ (num % st and ' ' or num)) + line
+ num += 1
+ else:
+ for t, line in lines:
+ yield 1, '<span class="lineno">%*s </span>' % (
+ mw, (num % st and ' ' or num)) + line
+ num += 1
+
+ def _wrap_lineanchors(self, inner):
+ s = self.lineanchors
+ # subtract 1 since we have to increment i *before* yielding
+ i = self.linenostart - 1
+ for t, line in inner:
+ if t:
+ i += 1
+ yield 1, '<a name="%s-%d"></a>' % (s, i) + line
+ else:
+ yield 0, line
+
+ def _wrap_linespans(self, inner):
+ s = self.linespans
+ i = self.linenostart - 1
+ for t, line in inner:
+ if t:
+ i += 1
+ yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
+ else:
+ yield 0, line
+
+ def _wrap_div(self, inner):
+ style = []
+ if (self.noclasses and not self.nobackground and
+ self.style.background_color is not None):
+ style.append('background: %s' % (self.style.background_color,))
+ if self.cssstyles:
+ style.append(self.cssstyles)
+ style = '; '.join(style)
+
+ yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
+ (style and (' style="%s"' % style)) + '>')
+ for tup in inner:
+ yield tup
+ yield 0, '</div>\n'
+
+ def _wrap_pre(self, inner):
+ style = []
+ if self.prestyles:
+ style.append(self.prestyles)
+ if self.noclasses:
+ style.append('line-height: 125%')
+ style = '; '.join(style)
+
+ if self.filename:
+ yield 0, ('<span class="filename">' + self.filename + '</span>')
+
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
- for tup in inner:
- yield tup
- yield 0, '</pre>'
-
+ for tup in inner:
+ yield tup
+ yield 0, '</pre>'
+
def _wrap_code(self, inner):
yield 0, '<code>'
for tup in inner:
yield tup
yield 0, '</code>'
- def _format_lines(self, tokensource):
- """
- Just format the tokens, without any wrapping tags.
- Yield individual lines.
- """
- nocls = self.noclasses
- lsep = self.lineseparator
- # for <span style=""> lookup only
- getcls = self.ttype2class.get
- c2s = self.class2style
- escape_table = _escape_html_table
- tagsfile = self.tagsfile
-
- lspan = ''
- line = []
- for ttype, value in tokensource:
- if nocls:
- cclass = getcls(ttype)
- while cclass is None:
- ttype = ttype.parent
- cclass = getcls(ttype)
- cspan = cclass and '<span style="%s">' % c2s[cclass][0] or ''
- else:
- cls = self._get_css_classes(ttype)
- cspan = cls and '<span class="%s">' % cls or ''
-
- parts = value.translate(escape_table).split('\n')
-
- if tagsfile and ttype in Token.Name:
- filename, linenumber = self._lookup_ctag(value)
- if linenumber:
- base, filename = os.path.split(filename)
- if base:
- base += '/'
- filename, extension = os.path.splitext(filename)
- url = self.tagurlformat % {'path': base, 'fname': filename,
- 'fext': extension}
- parts[0] = "<a href=\"%s#%s-%d\">%s" % \
- (url, self.lineanchors, linenumber, parts[0])
- parts[-1] = parts[-1] + "</a>"
-
- # for all but the last line
- for part in parts[:-1]:
- if line:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, part,
- (cspan and '</span>'), lsep))
- else: # both are the same
- line.extend((part, (lspan and '</span>'), lsep))
- yield 1, ''.join(line)
- line = []
- elif part:
- yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
- else:
- yield 1, lsep
- # for the last line
- if line and parts[-1]:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, parts[-1]))
- lspan = cspan
- else:
- line.append(parts[-1])
- elif parts[-1]:
- line = [cspan, parts[-1]]
- lspan = cspan
- # else we neither have to open a new span nor set lspan
-
- if line:
- line.extend(((lspan and '</span>'), lsep))
- yield 1, ''.join(line)
-
- def _lookup_ctag(self, token):
- entry = ctags.TagEntry()
- if self._ctags.find(entry, token, 0):
- return entry['file'], entry['lineNumber']
- else:
- return None, None
-
- def _highlight_lines(self, tokensource):
- """
- Highlighted the lines specified in the `hl_lines` option by
- post-processing the token stream coming from `_format_lines`.
- """
- hls = self.hl_lines
-
- for i, (t, value) in enumerate(tokensource):
- if t != 1:
- yield t, value
- if i + 1 in hls: # i + 1 because Python indexes start at 0
- if self.noclasses:
- style = ''
- if self.style.highlight_color is not None:
- style = (' style="background-color: %s"' %
- (self.style.highlight_color,))
- yield 1, '<span%s>%s</span>' % (style, value)
- else:
- yield 1, '<span class="hll">%s</span>' % value
- else:
- yield 1, value
-
- def wrap(self, source, outfile):
- """
- Wrap the ``source``, which is a generator yielding
- individual lines, in custom generators. See docstring
- for `format`. Can be overridden.
- """
+ def _format_lines(self, tokensource):
+ """
+ Just format the tokens, without any wrapping tags.
+ Yield individual lines.
+ """
+ nocls = self.noclasses
+ lsep = self.lineseparator
+ # for <span style=""> lookup only
+ getcls = self.ttype2class.get
+ c2s = self.class2style
+ escape_table = _escape_html_table
+ tagsfile = self.tagsfile
+
+ lspan = ''
+ line = []
+ for ttype, value in tokensource:
+ if nocls:
+ cclass = getcls(ttype)
+ while cclass is None:
+ ttype = ttype.parent
+ cclass = getcls(ttype)
+ cspan = cclass and '<span style="%s">' % c2s[cclass][0] or ''
+ else:
+ cls = self._get_css_classes(ttype)
+ cspan = cls and '<span class="%s">' % cls or ''
+
+ parts = value.translate(escape_table).split('\n')
+
+ if tagsfile and ttype in Token.Name:
+ filename, linenumber = self._lookup_ctag(value)
+ if linenumber:
+ base, filename = os.path.split(filename)
+ if base:
+ base += '/'
+ filename, extension = os.path.splitext(filename)
+ url = self.tagurlformat % {'path': base, 'fname': filename,
+ 'fext': extension}
+ parts[0] = "<a href=\"%s#%s-%d\">%s" % \
+ (url, self.lineanchors, linenumber, parts[0])
+ parts[-1] = parts[-1] + "</a>"
+
+ # for all but the last line
+ for part in parts[:-1]:
+ if line:
+ if lspan != cspan:
+ line.extend(((lspan and '</span>'), cspan, part,
+ (cspan and '</span>'), lsep))
+ else: # both are the same
+ line.extend((part, (lspan and '</span>'), lsep))
+ yield 1, ''.join(line)
+ line = []
+ elif part:
+ yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
+ else:
+ yield 1, lsep
+ # for the last line
+ if line and parts[-1]:
+ if lspan != cspan:
+ line.extend(((lspan and '</span>'), cspan, parts[-1]))
+ lspan = cspan
+ else:
+ line.append(parts[-1])
+ elif parts[-1]:
+ line = [cspan, parts[-1]]
+ lspan = cspan
+ # else we neither have to open a new span nor set lspan
+
+ if line:
+ line.extend(((lspan and '</span>'), lsep))
+ yield 1, ''.join(line)
+
+ def _lookup_ctag(self, token):
+ entry = ctags.TagEntry()
+ if self._ctags.find(entry, token, 0):
+ return entry['file'], entry['lineNumber']
+ else:
+ return None, None
+
+ def _highlight_lines(self, tokensource):
+ """
+ Highlighted the lines specified in the `hl_lines` option by
+ post-processing the token stream coming from `_format_lines`.
+ """
+ hls = self.hl_lines
+
+ for i, (t, value) in enumerate(tokensource):
+ if t != 1:
+ yield t, value
+ if i + 1 in hls: # i + 1 because Python indexes start at 0
+ if self.noclasses:
+ style = ''
+ if self.style.highlight_color is not None:
+ style = (' style="background-color: %s"' %
+ (self.style.highlight_color,))
+ yield 1, '<span%s>%s</span>' % (style, value)
+ else:
+ yield 1, '<span class="hll">%s</span>' % value
+ else:
+ yield 1, value
+
+ def wrap(self, source, outfile):
+ """
+ Wrap the ``source``, which is a generator yielding
+ individual lines, in custom generators. See docstring
+ for `format`. Can be overridden.
+ """
if self.wrapcode:
return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
else:
return self._wrap_div(self._wrap_pre(source))
-
- def format_unencoded(self, tokensource, outfile):
- """
- The formatting process uses several nested generators; which of
- them are used is determined by the user's options.
-
- Each generator should take at least one argument, ``inner``,
- and wrap the pieces of text generated by this.
-
- Always yield 2-tuples: (code, text). If "code" is 1, the text
- is part of the original tokensource being highlighted, if it's
- 0, the text is some piece of wrapping. This makes it possible to
- use several different wrappers that process the original source
- linewise, e.g. line number generators.
- """
- source = self._format_lines(tokensource)
- if self.hl_lines:
- source = self._highlight_lines(source)
- if not self.nowrap:
- if self.linenos == 2:
- source = self._wrap_inlinelinenos(source)
- if self.lineanchors:
- source = self._wrap_lineanchors(source)
- if self.linespans:
- source = self._wrap_linespans(source)
- source = self.wrap(source, outfile)
- if self.linenos == 1:
- source = self._wrap_tablelinenos(source)
- if self.full:
- source = self._wrap_full(source, outfile)
-
- for t, piece in source:
- outfile.write(piece)
+
+ def format_unencoded(self, tokensource, outfile):
+ """
+ The formatting process uses several nested generators; which of
+ them are used is determined by the user's options.
+
+ Each generator should take at least one argument, ``inner``,
+ and wrap the pieces of text generated by this.
+
+ Always yield 2-tuples: (code, text). If "code" is 1, the text
+ is part of the original tokensource being highlighted, if it's
+ 0, the text is some piece of wrapping. This makes it possible to
+ use several different wrappers that process the original source
+ linewise, e.g. line number generators.
+ """
+ source = self._format_lines(tokensource)
+ if self.hl_lines:
+ source = self._highlight_lines(source)
+ if not self.nowrap:
+ if self.linenos == 2:
+ source = self._wrap_inlinelinenos(source)
+ if self.lineanchors:
+ source = self._wrap_lineanchors(source)
+ if self.linespans:
+ source = self._wrap_linespans(source)
+ source = self.wrap(source, outfile)
+ if self.linenos == 1:
+ source = self._wrap_tablelinenos(source)
+ if self.full:
+ source = self._wrap_full(source, outfile)
+
+ for t, piece in source:
+ outfile.write(piece)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/img.py b/contrib/python/Pygments/py2/pygments/formatters/img.py
index 6bb3364458..95056cc54a 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/img.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/img.py
@@ -1,93 +1,93 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.img
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pixmap output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.img
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for Pixmap output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import os
-import sys
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- get_choice_opt, xrange
-
-import subprocess
-
-# Import this carefully
-try:
- from PIL import Image, ImageDraw, ImageFont
- pil_available = True
-except ImportError:
- pil_available = False
-
-try:
- import _winreg
-except ImportError:
- try:
- import winreg as _winreg
- except ImportError:
- _winreg = None
-
-__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
- 'BmpImageFormatter']
-
-
-# For some unknown reason every font calls it something different
-STYLES = {
- 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
- 'ITALIC': ['Oblique', 'Italic'],
- 'BOLD': ['Bold'],
- 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
-}
-
-# A sane default for modern systems
+import sys
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ get_choice_opt, xrange
+
+import subprocess
+
+# Import this carefully
+try:
+ from PIL import Image, ImageDraw, ImageFont
+ pil_available = True
+except ImportError:
+ pil_available = False
+
+try:
+ import _winreg
+except ImportError:
+ try:
+ import winreg as _winreg
+ except ImportError:
+ _winreg = None
+
+__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
+ 'BmpImageFormatter']
+
+
+# For some unknown reason every font calls it something different
+STYLES = {
+ 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
+ 'ITALIC': ['Oblique', 'Italic'],
+ 'BOLD': ['Bold'],
+ 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
+}
+
+# A sane default for modern systems
DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
-DEFAULT_FONT_NAME_WIN = 'Courier New'
+DEFAULT_FONT_NAME_WIN = 'Courier New'
DEFAULT_FONT_NAME_MAC = 'Menlo'
-
-
-class PilNotAvailable(ImportError):
- """When Python imaging library is not available"""
-
-
-class FontNotFound(Exception):
- """When there are no usable fonts specified"""
-
-
-class FontManager(object):
- """
- Manages a set of fonts: normal, italic, bold, etc...
- """
-
- def __init__(self, font_name, font_size=14):
- self.font_name = font_name
- self.font_size = font_size
- self.fonts = {}
- self.encoding = None
- if sys.platform.startswith('win'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_WIN
- self._create_win()
+
+
+class PilNotAvailable(ImportError):
+ """When Python imaging library is not available"""
+
+
+class FontNotFound(Exception):
+ """When there are no usable fonts specified"""
+
+
+class FontManager(object):
+ """
+ Manages a set of fonts: normal, italic, bold, etc...
+ """
+
+ def __init__(self, font_name, font_size=14):
+ self.font_name = font_name
+ self.font_size = font_size
+ self.fonts = {}
+ self.encoding = None
+ if sys.platform.startswith('win'):
+ if not font_name:
+ self.font_name = DEFAULT_FONT_NAME_WIN
+ self._create_win()
elif sys.platform.startswith('darwin'):
if not font_name:
self.font_name = DEFAULT_FONT_NAME_MAC
self._create_mac()
- else:
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_NIX
- self._create_nix()
-
- def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
- stdout=subprocess.PIPE, stderr=None)
- stdout, _ = proc.communicate()
- if proc.returncode == 0:
- lines = stdout.splitlines()
+ else:
+ if not font_name:
+ self.font_name = DEFAULT_FONT_NAME_NIX
+ self._create_nix()
+
+ def _get_nix_font_path(self, name, style):
+ proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
+ stdout=subprocess.PIPE, stderr=None)
+ stdout, _ = proc.communicate()
+ if proc.returncode == 0:
+ lines = stdout.splitlines()
for line in lines:
if line.startswith(b'Fontconfig warning:'):
continue
@@ -95,28 +95,28 @@ class FontManager(object):
if path:
return path
return None
-
- def _create_nix(self):
- for name in STYLES['NORMAL']:
- path = self._get_nix_font_path(self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_nix_font_path(self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
+
+ def _create_nix(self):
+ for name in STYLES['NORMAL']:
+ path = self._get_nix_font_path(self.font_name, name)
+ if path is not None:
+ self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ raise FontNotFound('No usable fonts named: "%s"' %
+ self.font_name)
+ for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
+ for stylename in STYLES[style]:
+ path = self._get_nix_font_path(self.font_name, stylename)
+ if path is not None:
+ self.fonts[style] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ if style == 'BOLDITALIC':
+ self.fonts[style] = self.fonts['BOLD']
+ else:
+ self.fonts[style] = self.fonts['NORMAL']
+
def _get_mac_font_path(self, font_map, name, style):
return font_map.get((name + ' ' + style).strip().lower())
@@ -148,455 +148,455 @@ class FontManager(object):
else:
self.fonts[style] = self.fonts['NORMAL']
- def _lookup_win(self, key, basename, styles, fail=False):
- for suffix in ('', ' (TrueType)'):
- for style in styles:
- try:
- valname = '%s%s%s' % (basename, style and ' '+style, suffix)
- val, _ = _winreg.QueryValueEx(key, valname)
- return val
- except EnvironmentError:
- continue
- else:
- if fail:
- raise FontNotFound('Font %s (%s) not found in registry' %
- (basename, styles[0]))
- return None
-
- def _create_win(self):
- try:
- key = _winreg.OpenKey(
- _winreg.HKEY_LOCAL_MACHINE,
- r'Software\Microsoft\Windows NT\CurrentVersion\Fonts')
- except EnvironmentError:
- try:
- key = _winreg.OpenKey(
- _winreg.HKEY_LOCAL_MACHINE,
- r'Software\Microsoft\Windows\CurrentVersion\Fonts')
- except EnvironmentError:
- raise FontNotFound('Can\'t open Windows font registry key')
- try:
- path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- path = self._lookup_win(key, self.font_name, STYLES[style])
- if path:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
- finally:
- _winreg.CloseKey(key)
-
- def get_char_size(self):
- """
- Get the character size.
- """
- return self.fonts['NORMAL'].getsize('M')
-
- def get_font(self, bold, oblique):
- """
- Get the font based on bold and italic flags.
- """
- if bold and oblique:
- return self.fonts['BOLDITALIC']
- elif bold:
- return self.fonts['BOLD']
- elif oblique:
- return self.fonts['ITALIC']
- else:
- return self.fonts['NORMAL']
-
-
-class ImageFormatter(Formatter):
- """
- Create a PNG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 0.10
-
- Additional options accepted:
-
- `image_format`
- An image format to output to that is recognised by PIL, these include:
-
- * "PNG" (default)
- * "JPEG"
- * "BMP"
- * "GIF"
-
- `line_pad`
- The extra spacing (in pixels) between each line of text.
-
- Default: 2
-
- `font_name`
- The font name to be used as the base font from which others, such as
- bold and italic fonts will be generated. This really should be a
- monospace font to look sane.
-
+ def _lookup_win(self, key, basename, styles, fail=False):
+ for suffix in ('', ' (TrueType)'):
+ for style in styles:
+ try:
+ valname = '%s%s%s' % (basename, style and ' '+style, suffix)
+ val, _ = _winreg.QueryValueEx(key, valname)
+ return val
+ except EnvironmentError:
+ continue
+ else:
+ if fail:
+ raise FontNotFound('Font %s (%s) not found in registry' %
+ (basename, styles[0]))
+ return None
+
+ def _create_win(self):
+ try:
+ key = _winreg.OpenKey(
+ _winreg.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\Windows NT\CurrentVersion\Fonts')
+ except EnvironmentError:
+ try:
+ key = _winreg.OpenKey(
+ _winreg.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\Windows\CurrentVersion\Fonts')
+ except EnvironmentError:
+ raise FontNotFound('Can\'t open Windows font registry key')
+ try:
+ path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
+ self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
+ for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
+ path = self._lookup_win(key, self.font_name, STYLES[style])
+ if path:
+ self.fonts[style] = ImageFont.truetype(path, self.font_size)
+ else:
+ if style == 'BOLDITALIC':
+ self.fonts[style] = self.fonts['BOLD']
+ else:
+ self.fonts[style] = self.fonts['NORMAL']
+ finally:
+ _winreg.CloseKey(key)
+
+ def get_char_size(self):
+ """
+ Get the character size.
+ """
+ return self.fonts['NORMAL'].getsize('M')
+
+ def get_font(self, bold, oblique):
+ """
+ Get the font based on bold and italic flags.
+ """
+ if bold and oblique:
+ return self.fonts['BOLDITALIC']
+ elif bold:
+ return self.fonts['BOLD']
+ elif oblique:
+ return self.fonts['ITALIC']
+ else:
+ return self.fonts['NORMAL']
+
+
+class ImageFormatter(Formatter):
+ """
+ Create a PNG image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 0.10
+
+ Additional options accepted:
+
+ `image_format`
+ An image format to output to that is recognised by PIL, these include:
+
+ * "PNG" (default)
+ * "JPEG"
+ * "BMP"
+ * "GIF"
+
+ `line_pad`
+ The extra spacing (in pixels) between each line of text.
+
+ Default: 2
+
+ `font_name`
+ The font name to be used as the base font from which others, such as
+ bold and italic fonts will be generated. This really should be a
+ monospace font to look sane.
+
Default: "Courier New" on Windows, "Menlo" on Mac OS, and
"DejaVu Sans Mono" on \\*nix
-
- `font_size`
- The font size in points to be used.
-
- Default: 14
-
- `image_pad`
- The padding, in pixels to be used at each edge of the resulting image.
-
- Default: 10
-
- `line_numbers`
- Whether line numbers should be shown: True/False
-
- Default: True
-
- `line_number_start`
- The line number of the first line.
-
- Default: 1
-
- `line_number_step`
- The step used when printing line numbers.
-
- Default: 1
-
- `line_number_bg`
- The background colour (in "#123456" format) of the line number bar, or
- None to use the style background color.
-
- Default: "#eed"
-
- `line_number_fg`
- The text color of the line numbers (in "#123456"-like format).
-
- Default: "#886"
-
- `line_number_chars`
- The number of columns of line numbers allowable in the line number
- margin.
-
- Default: 2
-
- `line_number_bold`
- Whether line numbers will be bold: True/False
-
- Default: False
-
- `line_number_italic`
- Whether line numbers will be italicized: True/False
-
- Default: False
-
- `line_number_separator`
- Whether a line will be drawn between the line number area and the
- source code area: True/False
-
- Default: True
-
- `line_number_pad`
- The horizontal padding (in pixels) between the line number margin, and
- the source code area.
-
- Default: 6
-
- `hl_lines`
- Specify a list of lines to be highlighted.
-
- .. versionadded:: 1.2
-
- Default: empty list
-
- `hl_color`
- Specify the color for highlighting lines.
-
- .. versionadded:: 1.2
-
- Default: highlight color of the selected style
- """
-
- # Required by the pygments mapper
- name = 'img'
- aliases = ['img', 'IMG', 'png']
- filenames = ['*.png']
-
- unicodeoutput = False
-
- default_image_format = 'png'
-
- def __init__(self, **options):
- """
- See the class docstring for explanation of options.
- """
- if not pil_available:
- raise PilNotAvailable(
- 'Python Imaging Library is required for this formatter')
- Formatter.__init__(self, **options)
- self.encoding = 'latin1' # let pygments.format() do the right thing
- # Read the style
- self.styles = dict(self.style)
- if self.style.background_color is None:
- self.background_color = '#fff'
- else:
- self.background_color = self.style.background_color
- # Image options
- self.image_format = get_choice_opt(
- options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
- self.default_image_format, normcase=True)
- self.image_pad = get_int_opt(options, 'image_pad', 10)
- self.line_pad = get_int_opt(options, 'line_pad', 2)
- # The fonts
- fontsize = get_int_opt(options, 'font_size', 14)
- self.fonts = FontManager(options.get('font_name', ''), fontsize)
- self.fontw, self.fonth = self.fonts.get_char_size()
- # Line number options
- self.line_number_fg = options.get('line_number_fg', '#886')
- self.line_number_bg = options.get('line_number_bg', '#eed')
- self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
- self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
- self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
- self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
- self.line_numbers = get_bool_opt(options, 'line_numbers', True)
- self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
- self.line_number_step = get_int_opt(options, 'line_number_step', 1)
- self.line_number_start = get_int_opt(options, 'line_number_start', 1)
- if self.line_numbers:
- self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
- else:
- self.line_number_width = 0
- self.hl_lines = []
- hl_lines_str = get_list_opt(options, 'hl_lines', [])
- for line in hl_lines_str:
- try:
- self.hl_lines.append(int(line))
- except ValueError:
- pass
- self.hl_color = options.get('hl_color',
- self.style.highlight_color) or '#f90'
- self.drawables = []
-
- def get_style_defs(self, arg=''):
- raise NotImplementedError('The -S option is meaningless for the image '
- 'formatter. Use -O style=<stylename> instead.')
-
- def _get_line_height(self):
- """
- Get the height of a line.
- """
- return self.fonth + self.line_pad
-
- def _get_line_y(self, lineno):
- """
- Get the Y coordinate of a line number.
- """
- return lineno * self._get_line_height() + self.image_pad
-
- def _get_char_width(self):
- """
- Get the width of a character.
- """
- return self.fontw
-
- def _get_char_x(self, charno):
- """
- Get the X coordinate of a character position.
- """
- return charno * self.fontw + self.image_pad + self.line_number_width
-
- def _get_text_pos(self, charno, lineno):
- """
- Get the actual position for a character and line position.
- """
- return self._get_char_x(charno), self._get_line_y(lineno)
-
- def _get_linenumber_pos(self, lineno):
- """
- Get the actual position for the start of a line number.
- """
- return (self.image_pad, self._get_line_y(lineno))
-
- def _get_text_color(self, style):
- """
- Get the correct color for the token from the style.
- """
- if style['color'] is not None:
- fill = '#' + style['color']
- else:
- fill = '#000'
- return fill
-
- def _get_style_font(self, style):
- """
- Get the correct font for the style.
- """
- return self.fonts.get_font(style['bold'], style['italic'])
-
- def _get_image_size(self, maxcharno, maxlineno):
- """
- Get the required image size.
- """
- return (self._get_char_x(maxcharno) + self.image_pad,
- self._get_line_y(maxlineno + 0) + self.image_pad)
-
- def _draw_linenumber(self, posno, lineno):
- """
- Remember a line number drawable to paint later.
- """
- self._draw_text(
- self._get_linenumber_pos(posno),
- str(lineno).rjust(self.line_number_chars),
- font=self.fonts.get_font(self.line_number_bold,
- self.line_number_italic),
- fill=self.line_number_fg,
- )
-
- def _draw_text(self, pos, text, font, **kw):
- """
- Remember a single drawable tuple to paint later.
- """
- self.drawables.append((pos, text, font, kw))
-
- def _create_drawables(self, tokensource):
- """
- Create drawables for the token content.
- """
- lineno = charno = maxcharno = 0
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- style = self.styles[ttype]
- # TODO: make sure tab expansion happens earlier in the chain. It
- # really ought to be done on the input, as to do it right here is
- # quite complex.
- value = value.expandtabs(4)
- lines = value.splitlines(True)
- # print lines
- for i, line in enumerate(lines):
- temp = line.rstrip('\n')
- if temp:
- self._draw_text(
- self._get_text_pos(charno, lineno),
- temp,
- font = self._get_style_font(style),
- fill = self._get_text_color(style)
- )
- charno += len(temp)
- maxcharno = max(maxcharno, charno)
- if line.endswith('\n'):
- # add a line for each extra line in the value
- charno = 0
- lineno += 1
- self.maxcharno = maxcharno
- self.maxlineno = lineno
-
- def _draw_line_numbers(self):
- """
- Create drawables for the line numbers.
- """
- if not self.line_numbers:
- return
- for p in xrange(self.maxlineno):
- n = p + self.line_number_start
- if (n % self.line_number_step) == 0:
- self._draw_linenumber(p, n)
-
- def _paint_line_number_bg(self, im):
- """
- Paint the line number background on the image.
- """
- if not self.line_numbers:
- return
- if self.line_number_fg is None:
- return
- draw = ImageDraw.Draw(im)
- recth = im.size[-1]
- rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0), (rectw, recth)],
- fill=self.line_number_bg)
+
+ `font_size`
+ The font size in points to be used.
+
+ Default: 14
+
+ `image_pad`
+ The padding, in pixels to be used at each edge of the resulting image.
+
+ Default: 10
+
+ `line_numbers`
+ Whether line numbers should be shown: True/False
+
+ Default: True
+
+ `line_number_start`
+ The line number of the first line.
+
+ Default: 1
+
+ `line_number_step`
+ The step used when printing line numbers.
+
+ Default: 1
+
+ `line_number_bg`
+ The background colour (in "#123456" format) of the line number bar, or
+ None to use the style background color.
+
+ Default: "#eed"
+
+ `line_number_fg`
+ The text color of the line numbers (in "#123456"-like format).
+
+ Default: "#886"
+
+ `line_number_chars`
+ The number of columns of line numbers allowable in the line number
+ margin.
+
+ Default: 2
+
+ `line_number_bold`
+ Whether line numbers will be bold: True/False
+
+ Default: False
+
+ `line_number_italic`
+ Whether line numbers will be italicized: True/False
+
+ Default: False
+
+ `line_number_separator`
+ Whether a line will be drawn between the line number area and the
+ source code area: True/False
+
+ Default: True
+
+ `line_number_pad`
+ The horizontal padding (in pixels) between the line number margin, and
+ the source code area.
+
+ Default: 6
+
+ `hl_lines`
+ Specify a list of lines to be highlighted.
+
+ .. versionadded:: 1.2
+
+ Default: empty list
+
+ `hl_color`
+ Specify the color for highlighting lines.
+
+ .. versionadded:: 1.2
+
+ Default: highlight color of the selected style
+ """
+
+ # Required by the pygments mapper
+ name = 'img'
+ aliases = ['img', 'IMG', 'png']
+ filenames = ['*.png']
+
+ unicodeoutput = False
+
+ default_image_format = 'png'
+
+ def __init__(self, **options):
+ """
+ See the class docstring for explanation of options.
+ """
+ if not pil_available:
+ raise PilNotAvailable(
+ 'Python Imaging Library is required for this formatter')
+ Formatter.__init__(self, **options)
+ self.encoding = 'latin1' # let pygments.format() do the right thing
+ # Read the style
+ self.styles = dict(self.style)
+ if self.style.background_color is None:
+ self.background_color = '#fff'
+ else:
+ self.background_color = self.style.background_color
+ # Image options
+ self.image_format = get_choice_opt(
+ options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
+ self.default_image_format, normcase=True)
+ self.image_pad = get_int_opt(options, 'image_pad', 10)
+ self.line_pad = get_int_opt(options, 'line_pad', 2)
+ # The fonts
+ fontsize = get_int_opt(options, 'font_size', 14)
+ self.fonts = FontManager(options.get('font_name', ''), fontsize)
+ self.fontw, self.fonth = self.fonts.get_char_size()
+ # Line number options
+ self.line_number_fg = options.get('line_number_fg', '#886')
+ self.line_number_bg = options.get('line_number_bg', '#eed')
+ self.line_number_chars = get_int_opt(options,
+ 'line_number_chars', 2)
+ self.line_number_bold = get_bool_opt(options,
+ 'line_number_bold', False)
+ self.line_number_italic = get_bool_opt(options,
+ 'line_number_italic', False)
+ self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
+ self.line_numbers = get_bool_opt(options, 'line_numbers', True)
+ self.line_number_separator = get_bool_opt(options,
+ 'line_number_separator', True)
+ self.line_number_step = get_int_opt(options, 'line_number_step', 1)
+ self.line_number_start = get_int_opt(options, 'line_number_start', 1)
+ if self.line_numbers:
+ self.line_number_width = (self.fontw * self.line_number_chars +
+ self.line_number_pad * 2)
+ else:
+ self.line_number_width = 0
+ self.hl_lines = []
+ hl_lines_str = get_list_opt(options, 'hl_lines', [])
+ for line in hl_lines_str:
+ try:
+ self.hl_lines.append(int(line))
+ except ValueError:
+ pass
+ self.hl_color = options.get('hl_color',
+ self.style.highlight_color) or '#f90'
+ self.drawables = []
+
+ def get_style_defs(self, arg=''):
+ raise NotImplementedError('The -S option is meaningless for the image '
+ 'formatter. Use -O style=<stylename> instead.')
+
+ def _get_line_height(self):
+ """
+ Get the height of a line.
+ """
+ return self.fonth + self.line_pad
+
+ def _get_line_y(self, lineno):
+ """
+ Get the Y coordinate of a line number.
+ """
+ return lineno * self._get_line_height() + self.image_pad
+
+ def _get_char_width(self):
+ """
+ Get the width of a character.
+ """
+ return self.fontw
+
+ def _get_char_x(self, charno):
+ """
+ Get the X coordinate of a character position.
+ """
+ return charno * self.fontw + self.image_pad + self.line_number_width
+
+ def _get_text_pos(self, charno, lineno):
+ """
+ Get the actual position for a character and line position.
+ """
+ return self._get_char_x(charno), self._get_line_y(lineno)
+
+ def _get_linenumber_pos(self, lineno):
+ """
+ Get the actual position for the start of a line number.
+ """
+ return (self.image_pad, self._get_line_y(lineno))
+
+ def _get_text_color(self, style):
+ """
+ Get the correct color for the token from the style.
+ """
+ if style['color'] is not None:
+ fill = '#' + style['color']
+ else:
+ fill = '#000'
+ return fill
+
+ def _get_style_font(self, style):
+ """
+ Get the correct font for the style.
+ """
+ return self.fonts.get_font(style['bold'], style['italic'])
+
+ def _get_image_size(self, maxcharno, maxlineno):
+ """
+ Get the required image size.
+ """
+ return (self._get_char_x(maxcharno) + self.image_pad,
+ self._get_line_y(maxlineno + 0) + self.image_pad)
+
+ def _draw_linenumber(self, posno, lineno):
+ """
+ Remember a line number drawable to paint later.
+ """
+ self._draw_text(
+ self._get_linenumber_pos(posno),
+ str(lineno).rjust(self.line_number_chars),
+ font=self.fonts.get_font(self.line_number_bold,
+ self.line_number_italic),
+ fill=self.line_number_fg,
+ )
+
+ def _draw_text(self, pos, text, font, **kw):
+ """
+ Remember a single drawable tuple to paint later.
+ """
+ self.drawables.append((pos, text, font, kw))
+
+ def _create_drawables(self, tokensource):
+ """
+ Create drawables for the token content.
+ """
+ lineno = charno = maxcharno = 0
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ style = self.styles[ttype]
+ # TODO: make sure tab expansion happens earlier in the chain. It
+ # really ought to be done on the input, as to do it right here is
+ # quite complex.
+ value = value.expandtabs(4)
+ lines = value.splitlines(True)
+ # print lines
+ for i, line in enumerate(lines):
+ temp = line.rstrip('\n')
+ if temp:
+ self._draw_text(
+ self._get_text_pos(charno, lineno),
+ temp,
+ font = self._get_style_font(style),
+ fill = self._get_text_color(style)
+ )
+ charno += len(temp)
+ maxcharno = max(maxcharno, charno)
+ if line.endswith('\n'):
+ # add a line for each extra line in the value
+ charno = 0
+ lineno += 1
+ self.maxcharno = maxcharno
+ self.maxlineno = lineno
+
+ def _draw_line_numbers(self):
+ """
+ Create drawables for the line numbers.
+ """
+ if not self.line_numbers:
+ return
+ for p in xrange(self.maxlineno):
+ n = p + self.line_number_start
+ if (n % self.line_number_step) == 0:
+ self._draw_linenumber(p, n)
+
+ def _paint_line_number_bg(self, im):
+ """
+ Paint the line number background on the image.
+ """
+ if not self.line_numbers:
+ return
+ if self.line_number_fg is None:
+ return
+ draw = ImageDraw.Draw(im)
+ recth = im.size[-1]
+ rectw = self.image_pad + self.line_number_width - self.line_number_pad
+ draw.rectangle([(0, 0), (rectw, recth)],
+ fill=self.line_number_bg)
if self.line_number_separator:
draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
- del draw
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- This implementation calculates where it should draw each token on the
- pixmap, then calculates the required pixmap size and draws the items.
- """
- self._create_drawables(tokensource)
- self._draw_line_numbers()
- im = Image.new(
- 'RGB',
- self._get_image_size(self.maxcharno, self.maxlineno),
- self.background_color
- )
- self._paint_line_number_bg(im)
- draw = ImageDraw.Draw(im)
- # Highlight
- if self.hl_lines:
- x = self.image_pad + self.line_number_width - self.line_number_pad + 1
- recth = self._get_line_height()
- rectw = im.size[0] - x
- for linenumber in self.hl_lines:
- y = self._get_line_y(linenumber - 1)
- draw.rectangle([(x, y), (x + rectw, y + recth)],
- fill=self.hl_color)
- for pos, value, font, kw in self.drawables:
- draw.text(pos, value, font=font, **kw)
- im.save(outfile, self.image_format.upper())
-
-
-# Add one formatter per format, so that the "-f gif" option gives the correct result
-# when used in pygmentize.
-
-class GifImageFormatter(ImageFormatter):
- """
- Create a GIF image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_gif'
- aliases = ['gif']
- filenames = ['*.gif']
- default_image_format = 'gif'
-
-
-class JpgImageFormatter(ImageFormatter):
- """
- Create a JPEG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_jpg'
- aliases = ['jpg', 'jpeg']
- filenames = ['*.jpg']
- default_image_format = 'jpeg'
-
-
-class BmpImageFormatter(ImageFormatter):
- """
- Create a bitmap image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_bmp'
- aliases = ['bmp', 'bitmap']
- filenames = ['*.bmp']
- default_image_format = 'bmp'
+ del draw
+
+ def format(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+
+ This implementation calculates where it should draw each token on the
+ pixmap, then calculates the required pixmap size and draws the items.
+ """
+ self._create_drawables(tokensource)
+ self._draw_line_numbers()
+ im = Image.new(
+ 'RGB',
+ self._get_image_size(self.maxcharno, self.maxlineno),
+ self.background_color
+ )
+ self._paint_line_number_bg(im)
+ draw = ImageDraw.Draw(im)
+ # Highlight
+ if self.hl_lines:
+ x = self.image_pad + self.line_number_width - self.line_number_pad + 1
+ recth = self._get_line_height()
+ rectw = im.size[0] - x
+ for linenumber in self.hl_lines:
+ y = self._get_line_y(linenumber - 1)
+ draw.rectangle([(x, y), (x + rectw, y + recth)],
+ fill=self.hl_color)
+ for pos, value, font, kw in self.drawables:
+ draw.text(pos, value, font=font, **kw)
+ im.save(outfile, self.image_format.upper())
+
+
+# Add one formatter per format, so that the "-f gif" option gives the correct result
+# when used in pygmentize.
+
+class GifImageFormatter(ImageFormatter):
+ """
+ Create a GIF image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_gif'
+ aliases = ['gif']
+ filenames = ['*.gif']
+ default_image_format = 'gif'
+
+
+class JpgImageFormatter(ImageFormatter):
+ """
+ Create a JPEG image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_jpg'
+ aliases = ['jpg', 'jpeg']
+ filenames = ['*.jpg']
+ default_image_format = 'jpeg'
+
+
+class BmpImageFormatter(ImageFormatter):
+ """
+ Create a bitmap image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_bmp'
+ aliases = ['bmp', 'bitmap']
+ filenames = ['*.bmp']
+ default_image_format = 'bmp'
diff --git a/contrib/python/Pygments/py2/pygments/formatters/irc.py b/contrib/python/Pygments/py2/pygments/formatters/irc.py
index 0650492a01..45f330f3ca 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/irc.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/irc.py
@@ -1,30 +1,30 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.irc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for IRC output
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.irc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for IRC output
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.util import get_choice_opt
-
-
-__all__ = ['IRCFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-IRC_COLORS = {
- Token: ('', ''),
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+
+from pygments.formatter import Formatter
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+from pygments.util import get_choice_opt
+
+
+__all__ = ['IRCFormatter']
+
+
+#: Map token types to a tuple of color values for light and dark
+#: backgrounds.
+IRC_COLORS = {
+ Token: ('', ''),
+
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
@@ -43,140 +43,140 @@ IRC_COLORS = {
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
-
+
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
+ Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
Generic.Error: ('brightred', 'brightred'),
-
+
Error: ('_brightred_', '_brightred_'),
-}
-
-
-IRC_COLOR_MAP = {
- 'white': 0,
- 'black': 1,
+}
+
+
+IRC_COLOR_MAP = {
+ 'white': 0,
+ 'black': 1,
'blue': 2,
'brightgreen': 3,
'brightred': 4,
'yellow': 5,
'magenta': 6,
- 'orange': 7,
+ 'orange': 7,
'green': 7, #compat w/ ansi
'brightyellow': 8,
- 'lightgreen': 9,
+ 'lightgreen': 9,
'brightcyan': 9, # compat w/ ansi
'cyan': 10,
- 'lightblue': 11,
+ 'lightblue': 11,
'red': 11, # compat w/ ansi
'brightblue': 12,
'brightmagenta': 13,
'brightblack': 14,
'gray': 15,
-}
-
-def ircformat(color, text):
- if len(color) < 1:
- return text
- add = sub = ''
- if '_' in color: # italic
- add += '\x1D'
- sub = '\x1D' + sub
- color = color.strip('_')
- if '*' in color: # bold
- add += '\x02'
- sub = '\x02' + sub
- color = color.strip('*')
- # underline (\x1F) not supported
- # backgrounds (\x03FF,BB) not supported
- if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
- add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
- sub = '\x03' + sub
- return add + text + sub
- return '<'+add+'>'+text+'</'+sub+'>'
-
-
-class IRCFormatter(Formatter):
- r"""
- Format tokens with IRC color sequences
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers in the output as well
- (default: ``False`` = no line numbers).
- """
- name = 'IRC'
- aliases = ['irc', 'IRC']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("\n%04d: " % self._lineno)
-
- def _format_unencoded_with_lineno(self, tokensource, outfile):
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- if value.endswith("\n"):
- self._write_lineno(outfile)
- value = value[:-1]
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype[:-1]
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- self._write_lineno(outfile)
- if line:
- outfile.write(ircformat(color, line[:-1]))
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
-
- outfile.write("\n")
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._format_unencoded_with_lineno(tokensource, outfile)
- return
-
- for ttype, value in tokensource:
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype[:-1]
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(ircformat(color, line))
- outfile.write('\n')
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
+}
+
+def ircformat(color, text):
+ if len(color) < 1:
+ return text
+ add = sub = ''
+ if '_' in color: # italic
+ add += '\x1D'
+ sub = '\x1D' + sub
+ color = color.strip('_')
+ if '*' in color: # bold
+ add += '\x02'
+ sub = '\x02' + sub
+ color = color.strip('*')
+ # underline (\x1F) not supported
+ # backgrounds (\x03FF,BB) not supported
+ if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
+ add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
+ sub = '\x03' + sub
+ return add + text + sub
+ return '<'+add+'>'+text+'</'+sub+'>'
+
+
+class IRCFormatter(Formatter):
+ r"""
+ Format tokens with IRC color sequences
+
+ The `get_style_defs()` method doesn't do anything special since there is
+ no support for common styles.
+
+ Options accepted:
+
+ `bg`
+ Set to ``"light"`` or ``"dark"`` depending on the terminal's background
+ (default: ``"light"``).
+
+ `colorscheme`
+ A dictionary mapping token types to (lightbg, darkbg) color names or
+ ``None`` (default: ``None`` = use builtin colorscheme).
+
+ `linenos`
+ Set to ``True`` to have line numbers in the output as well
+ (default: ``False`` = no line numbers).
+ """
+ name = 'IRC'
+ aliases = ['irc', 'IRC']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.darkbg = get_choice_opt(options, 'bg',
+ ['light', 'dark'], 'light') == 'dark'
+ self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
+
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("\n%04d: " % self._lineno)
+
+ def _format_unencoded_with_lineno(self, tokensource, outfile):
+ self._write_lineno(outfile)
+
+ for ttype, value in tokensource:
+ if value.endswith("\n"):
+ self._write_lineno(outfile)
+ value = value[:-1]
+ color = self.colorscheme.get(ttype)
+ while color is None:
+ ttype = ttype[:-1]
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ self._write_lineno(outfile)
+ if line:
+ outfile.write(ircformat(color, line[:-1]))
+ if spl[-1]:
+ outfile.write(ircformat(color, spl[-1]))
+ else:
+ outfile.write(value)
+
+ outfile.write("\n")
+
+ def format_unencoded(self, tokensource, outfile):
+ if self.linenos:
+ self._format_unencoded_with_lineno(tokensource, outfile)
+ return
+
+ for ttype, value in tokensource:
+ color = self.colorscheme.get(ttype)
+ while color is None:
+ ttype = ttype[:-1]
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write(ircformat(color, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write(ircformat(color, spl[-1]))
+ else:
+ outfile.write(value)
diff --git a/contrib/python/Pygments/py2/pygments/formatters/latex.py b/contrib/python/Pygments/py2/pygments/formatters/latex.py
index 7f6aa9e307..86c1a18d36 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/latex.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/latex.py
@@ -1,418 +1,418 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.latex
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for LaTeX fancyvrb output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.latex
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for LaTeX fancyvrb output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import division
-
-from pygments.formatter import Formatter
-from pygments.lexer import Lexer
-from pygments.token import Token, STANDARD_TYPES
-from pygments.util import get_bool_opt, get_int_opt, StringIO, xrange, \
- iteritems
-
-
-__all__ = ['LatexFormatter']
-
-
-def escape_tex(text, commandprefix):
- return text.replace('\\', '\x00'). \
- replace('{', '\x01'). \
- replace('}', '\x02'). \
- replace('\x00', r'\%sZbs{}' % commandprefix). \
- replace('\x01', r'\%sZob{}' % commandprefix). \
- replace('\x02', r'\%sZcb{}' % commandprefix). \
- replace('^', r'\%sZca{}' % commandprefix). \
- replace('_', r'\%sZus{}' % commandprefix). \
- replace('&', r'\%sZam{}' % commandprefix). \
- replace('<', r'\%sZlt{}' % commandprefix). \
- replace('>', r'\%sZgt{}' % commandprefix). \
- replace('#', r'\%sZsh{}' % commandprefix). \
- replace('%', r'\%sZpc{}' % commandprefix). \
- replace('$', r'\%sZdl{}' % commandprefix). \
- replace('-', r'\%sZhy{}' % commandprefix). \
- replace("'", r'\%sZsq{}' % commandprefix). \
- replace('"', r'\%sZdq{}' % commandprefix). \
- replace('~', r'\%sZti{}' % commandprefix)
-
-
-DOC_TEMPLATE = r'''
-\documentclass{%(docclass)s}
-\usepackage{fancyvrb}
-\usepackage{color}
-\usepackage[%(encoding)s]{inputenc}
-%(preamble)s
-
-%(styledefs)s
-
-\begin{document}
-
-\section*{%(title)s}
-
-%(code)s
-\end{document}
-'''
-
-## Small explanation of the mess below :)
-#
-# The previous version of the LaTeX formatter just assigned a command to
-# each token type defined in the current style. That obviously is
-# problematic if the highlighted code is produced for a different style
-# than the style commands themselves.
-#
-# This version works much like the HTML formatter which assigns multiple
-# CSS classes to each <span> tag, from the most specific to the least
-# specific token type, thus falling back to the parent token type if one
-# is not defined. Here, the classes are there too and use the same short
-# forms given in token.STANDARD_TYPES.
-#
-# Highlighted code now only uses one custom command, which by default is
-# \PY and selectable by the commandprefix option (and in addition the
-# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
-# backwards compatibility purposes).
-#
-# \PY has two arguments: the classes, separated by +, and the text to
-# render in that style. The classes are resolved into the respective
-# style commands by magic, which serves to ignore unknown classes.
-#
-# The magic macros are:
-# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
-# to render in \PY@do. Their definition determines the style.
-# * \PY@reset resets \PY@it etc. to do nothing.
-# * \PY@toks parses the list of classes, using magic inspired by the
-# keyval package (but modified to use plusses instead of commas
-# because fancyvrb redefines commas inside its environments).
-# * \PY@tok processes one class, calling the \PY@tok@classname command
-# if it exists.
-# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
-# for its class.
-# * \PY resets the style, parses the classnames and then calls \PY@do.
-#
-# Tip: to read this code, print it out in substituted form using e.g.
-# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
-
-STYLE_TEMPLATE = r'''
-\makeatletter
-\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
- \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
- \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
-\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
-\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
- \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
-\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
- \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
-\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
-
-%(styles)s
-
-\def\%(cp)sZbs{\char`\\}
-\def\%(cp)sZus{\char`\_}
-\def\%(cp)sZob{\char`\{}
-\def\%(cp)sZcb{\char`\}}
-\def\%(cp)sZca{\char`\^}
-\def\%(cp)sZam{\char`\&}
-\def\%(cp)sZlt{\char`\<}
-\def\%(cp)sZgt{\char`\>}
-\def\%(cp)sZsh{\char`\#}
-\def\%(cp)sZpc{\char`\%%}
-\def\%(cp)sZdl{\char`\$}
-\def\%(cp)sZhy{\char`\-}
-\def\%(cp)sZsq{\char`\'}
-\def\%(cp)sZdq{\char`\"}
-\def\%(cp)sZti{\char`\~}
-%% for compatibility with earlier versions
-\def\%(cp)sZat{@}
-\def\%(cp)sZlb{[}
-\def\%(cp)sZrb{]}
-\makeatother
-'''
-
-
-def _get_ttype_name(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-class LatexFormatter(Formatter):
- r"""
- Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
- standard packages.
-
- Without the `full` option, code is formatted as one ``Verbatim``
- environment, like this:
-
- .. sourcecode:: latex
-
- \begin{Verbatim}[commandchars=\\\{\}]
- \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
- \PY{k}{pass}
- \end{Verbatim}
-
- The special command used here (``\PY``) and all the other macros it needs
- are output by the `get_style_defs` method.
-
- With the `full` option, a complete LaTeX document is output, including
- the command definitions in the preamble.
-
- The `get_style_defs()` method of a `LatexFormatter` returns a string
- containing ``\def`` commands defining the macros needed inside the
- ``Verbatim`` environments.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `docclass`
- If the `full` option is enabled, this is the document class to use
- (default: ``'article'``).
-
- `preamble`
- If the `full` option is enabled, this can be further preamble commands,
- e.g. ``\usepackage`` (default: ``''``).
-
- `linenos`
- If set to ``True``, output line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `verboptions`
- Additional options given to the Verbatim environment (see the *fancyvrb*
- docs for possible values) (default: ``''``).
-
- `commandprefix`
- The LaTeX commands used to produce colored output are constructed
- using this prefix and some letters (default: ``'PY'``).
-
- .. versionadded:: 0.7
- .. versionchanged:: 0.10
- The default is now ``'PY'`` instead of ``'C'``.
-
- `texcomments`
- If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
- in comment tokens is not escaped so that LaTeX can render it (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `mathescape`
- If set to ``True``, enables LaTeX math mode escape in comments. That
- is, ``'$...$'`` inside a comment will trigger math mode (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `escapeinside`
- If set to a string of length 2, enables escaping to LaTeX. Text
- delimited by these 2 characters is read as LaTeX code and
- typeset accordingly. It has no effect in string literals. It has
- no effect in comments if `texcomments` or `mathescape` is
- set. (default: ``''``).
-
- .. versionadded:: 2.0
-
- `envname`
- Allows you to pick an alternative environment name replacing Verbatim.
- The alternate environment still has to support Verbatim's option syntax.
- (default: ``'Verbatim'``).
-
- .. versionadded:: 2.0
- """
- name = 'LaTeX'
- aliases = ['latex', 'tex']
- filenames = ['*.tex']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.docclass = options.get('docclass', 'article')
- self.preamble = options.get('preamble', '')
- self.linenos = get_bool_opt(options, 'linenos', False)
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.verboptions = options.get('verboptions', '')
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.commandprefix = options.get('commandprefix', 'PY')
- self.texcomments = get_bool_opt(options, 'texcomments', False)
- self.mathescape = get_bool_opt(options, 'mathescape', False)
- self.escapeinside = options.get('escapeinside', '')
- if len(self.escapeinside) == 2:
- self.left = self.escapeinside[0]
- self.right = self.escapeinside[1]
- else:
- self.escapeinside = ''
- self.envname = options.get('envname', u'Verbatim')
-
- self._create_stylesheet()
-
- def _create_stylesheet(self):
- t2n = self.ttype2name = {Token: ''}
- c2d = self.cmd2def = {}
- cp = self.commandprefix
-
- def rgbcolor(col):
- if col:
- return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
- for i in (0, 2, 4)])
- else:
- return '1,1,1'
-
- for ttype, ndef in self.style:
- name = _get_ttype_name(ttype)
- cmndef = ''
- if ndef['bold']:
- cmndef += r'\let\$$@bf=\textbf'
- if ndef['italic']:
- cmndef += r'\let\$$@it=\textit'
- if ndef['underline']:
- cmndef += r'\let\$$@ul=\underline'
- if ndef['roman']:
- cmndef += r'\let\$$@ff=\textrm'
- if ndef['sans']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['mono']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['color']:
- cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
- rgbcolor(ndef['color']))
- if ndef['border']:
- cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
- r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}' %
- (rgbcolor(ndef['border']),
- rgbcolor(ndef['bgcolor'])))
- elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
- r'\colorbox[rgb]{%s}{\strut ##1}}' %
- rgbcolor(ndef['bgcolor']))
- if cmndef == '':
- continue
- cmndef = cmndef.replace('$$', cp)
- t2n[ttype] = name
- c2d[name] = cmndef
-
- def get_style_defs(self, arg=''):
- """
- Return the command sequences needed to define the commands
- used to format text in the verbatim environment. ``arg`` is ignored.
- """
- cp = self.commandprefix
- styles = []
- for name, definition in iteritems(self.cmd2def):
- styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' %
- (cp, name, definition))
- return STYLE_TEMPLATE % {'cp': self.commandprefix,
- 'styles': '\n'.join(styles)}
-
- def format_unencoded(self, tokensource, outfile):
- # TODO: add support for background colors
- t2n = self.ttype2name
- cp = self.commandprefix
-
- if self.full:
- realoutfile = outfile
- outfile = StringIO()
-
- outfile.write(u'\\begin{' + self.envname + u'}[commandchars=\\\\\\{\\}')
- if self.linenos:
- start, step = self.linenostart, self.linenostep
- outfile.write(u',numbers=left' +
- (start and u',firstnumber=%d' % start or u'') +
- (step and u',stepnumber=%d' % step or u''))
- if self.mathescape or self.texcomments or self.escapeinside:
- outfile.write(u',codes={\\catcode`\\$=3\\catcode`\\^=7\\catcode`\\_=8}')
- if self.verboptions:
- outfile.write(u',' + self.verboptions)
- outfile.write(u']\n')
-
- for ttype, value in tokensource:
- if ttype in Token.Comment:
- if self.texcomments:
- # Try to guess comment starting lexeme and escape it ...
- start = value[0:1]
- for i in xrange(1, len(value)):
- if start[0] != value[i]:
- break
- start += value[i]
-
- value = value[len(start):]
- start = escape_tex(start, cp)
-
- # ... but do not escape inside comment.
- value = start + value
- elif self.mathescape:
- # Only escape parts not inside a math environment.
- parts = value.split('$')
- in_math = False
- for i, part in enumerate(parts):
- if not in_math:
- parts[i] = escape_tex(part, cp)
- in_math = not in_math
- value = '$'.join(parts)
- elif self.escapeinside:
- text = value
- value = ''
- while text:
- a, sep1, text = text.partition(self.left)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- value += escape_tex(a, cp) + b
- else:
- value += escape_tex(a + sep1 + b, cp)
- else:
- value += escape_tex(a, cp)
- else:
- value = escape_tex(value, cp)
- elif ttype not in Token.Escape:
- value = escape_tex(value, cp)
- styles = []
- while ttype is not Token:
- try:
- styles.append(t2n[ttype])
- except KeyError:
- # not in current style
- styles.append(_get_ttype_name(ttype))
- ttype = ttype.parent
- styleval = '+'.join(reversed(styles))
- if styleval:
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
- outfile.write('\n')
- if spl[-1]:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
- else:
- outfile.write(value)
-
- outfile.write(u'\\end{' + self.envname + u'}\n')
-
- if self.full:
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import division
+
+from pygments.formatter import Formatter
+from pygments.lexer import Lexer
+from pygments.token import Token, STANDARD_TYPES
+from pygments.util import get_bool_opt, get_int_opt, StringIO, xrange, \
+ iteritems
+
+
+__all__ = ['LatexFormatter']
+
+
+def escape_tex(text, commandprefix):
+ return text.replace('\\', '\x00'). \
+ replace('{', '\x01'). \
+ replace('}', '\x02'). \
+ replace('\x00', r'\%sZbs{}' % commandprefix). \
+ replace('\x01', r'\%sZob{}' % commandprefix). \
+ replace('\x02', r'\%sZcb{}' % commandprefix). \
+ replace('^', r'\%sZca{}' % commandprefix). \
+ replace('_', r'\%sZus{}' % commandprefix). \
+ replace('&', r'\%sZam{}' % commandprefix). \
+ replace('<', r'\%sZlt{}' % commandprefix). \
+ replace('>', r'\%sZgt{}' % commandprefix). \
+ replace('#', r'\%sZsh{}' % commandprefix). \
+ replace('%', r'\%sZpc{}' % commandprefix). \
+ replace('$', r'\%sZdl{}' % commandprefix). \
+ replace('-', r'\%sZhy{}' % commandprefix). \
+ replace("'", r'\%sZsq{}' % commandprefix). \
+ replace('"', r'\%sZdq{}' % commandprefix). \
+ replace('~', r'\%sZti{}' % commandprefix)
+
+
+DOC_TEMPLATE = r'''
+\documentclass{%(docclass)s}
+\usepackage{fancyvrb}
+\usepackage{color}
+\usepackage[%(encoding)s]{inputenc}
+%(preamble)s
+
+%(styledefs)s
+
+\begin{document}
+
+\section*{%(title)s}
+
+%(code)s
+\end{document}
+'''
+
+## Small explanation of the mess below :)
+#
+# The previous version of the LaTeX formatter just assigned a command to
+# each token type defined in the current style. That obviously is
+# problematic if the highlighted code is produced for a different style
+# than the style commands themselves.
+#
+# This version works much like the HTML formatter which assigns multiple
+# CSS classes to each <span> tag, from the most specific to the least
+# specific token type, thus falling back to the parent token type if one
+# is not defined. Here, the classes are there too and use the same short
+# forms given in token.STANDARD_TYPES.
+#
+# Highlighted code now only uses one custom command, which by default is
+# \PY and selectable by the commandprefix option (and in addition the
+# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
+# backwards compatibility purposes).
+#
+# \PY has two arguments: the classes, separated by +, and the text to
+# render in that style. The classes are resolved into the respective
+# style commands by magic, which serves to ignore unknown classes.
+#
+# The magic macros are:
+# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
+# to render in \PY@do. Their definition determines the style.
+# * \PY@reset resets \PY@it etc. to do nothing.
+# * \PY@toks parses the list of classes, using magic inspired by the
+# keyval package (but modified to use plusses instead of commas
+# because fancyvrb redefines commas inside its environments).
+# * \PY@tok processes one class, calling the \PY@tok@classname command
+# if it exists.
+# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
+# for its class.
+# * \PY resets the style, parses the classnames and then calls \PY@do.
+#
+# Tip: to read this code, print it out in substituted form using e.g.
+# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
+
+STYLE_TEMPLATE = r'''
+\makeatletter
+\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
+ \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
+ \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
+\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
+\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
+ \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
+\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
+ \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
+\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
+
+%(styles)s
+
+\def\%(cp)sZbs{\char`\\}
+\def\%(cp)sZus{\char`\_}
+\def\%(cp)sZob{\char`\{}
+\def\%(cp)sZcb{\char`\}}
+\def\%(cp)sZca{\char`\^}
+\def\%(cp)sZam{\char`\&}
+\def\%(cp)sZlt{\char`\<}
+\def\%(cp)sZgt{\char`\>}
+\def\%(cp)sZsh{\char`\#}
+\def\%(cp)sZpc{\char`\%%}
+\def\%(cp)sZdl{\char`\$}
+\def\%(cp)sZhy{\char`\-}
+\def\%(cp)sZsq{\char`\'}
+\def\%(cp)sZdq{\char`\"}
+\def\%(cp)sZti{\char`\~}
+%% for compatibility with earlier versions
+\def\%(cp)sZat{@}
+\def\%(cp)sZlb{[}
+\def\%(cp)sZrb{]}
+\makeatother
+'''
+
+
+def _get_ttype_name(ttype):
+ fname = STANDARD_TYPES.get(ttype)
+ if fname:
+ return fname
+ aname = ''
+ while fname is None:
+ aname = ttype[-1] + aname
+ ttype = ttype.parent
+ fname = STANDARD_TYPES.get(ttype)
+ return fname + aname
+
+
+class LatexFormatter(Formatter):
+ r"""
+ Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
+ standard packages.
+
+ Without the `full` option, code is formatted as one ``Verbatim``
+ environment, like this:
+
+ .. sourcecode:: latex
+
+ \begin{Verbatim}[commandchars=\\\{\}]
+ \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
+ \PY{k}{pass}
+ \end{Verbatim}
+
+ The special command used here (``\PY``) and all the other macros it needs
+ are output by the `get_style_defs` method.
+
+ With the `full` option, a complete LaTeX document is output, including
+ the command definitions in the preamble.
+
+ The `get_style_defs()` method of a `LatexFormatter` returns a string
+ containing ``\def`` commands defining the macros needed inside the
+ ``Verbatim`` environments.
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `full`
+ Tells the formatter to output a "full" document, i.e. a complete
+ self-contained document (default: ``False``).
+
+ `title`
+ If `full` is true, the title that should be used to caption the
+ document (default: ``''``).
+
+ `docclass`
+ If the `full` option is enabled, this is the document class to use
+ (default: ``'article'``).
+
+ `preamble`
+ If the `full` option is enabled, this can be further preamble commands,
+ e.g. ``\usepackage`` (default: ``''``).
+
+ `linenos`
+ If set to ``True``, output line numbers (default: ``False``).
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ `verboptions`
+ Additional options given to the Verbatim environment (see the *fancyvrb*
+ docs for possible values) (default: ``''``).
+
+ `commandprefix`
+ The LaTeX commands used to produce colored output are constructed
+ using this prefix and some letters (default: ``'PY'``).
+
+ .. versionadded:: 0.7
+ .. versionchanged:: 0.10
+ The default is now ``'PY'`` instead of ``'C'``.
+
+ `texcomments`
+ If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
+ in comment tokens is not escaped so that LaTeX can render it (default:
+ ``False``).
+
+ .. versionadded:: 1.2
+
+ `mathescape`
+ If set to ``True``, enables LaTeX math mode escape in comments. That
+ is, ``'$...$'`` inside a comment will trigger math mode (default:
+ ``False``).
+
+ .. versionadded:: 1.2
+
+ `escapeinside`
+ If set to a string of length 2, enables escaping to LaTeX. Text
+ delimited by these 2 characters is read as LaTeX code and
+ typeset accordingly. It has no effect in string literals. It has
+ no effect in comments if `texcomments` or `mathescape` is
+ set. (default: ``''``).
+
+ .. versionadded:: 2.0
+
+ `envname`
+ Allows you to pick an alternative environment name replacing Verbatim.
+ The alternate environment still has to support Verbatim's option syntax.
+ (default: ``'Verbatim'``).
+
+ .. versionadded:: 2.0
+ """
+ name = 'LaTeX'
+ aliases = ['latex', 'tex']
+ filenames = ['*.tex']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.docclass = options.get('docclass', 'article')
+ self.preamble = options.get('preamble', '')
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.verboptions = options.get('verboptions', '')
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
+ self.commandprefix = options.get('commandprefix', 'PY')
+ self.texcomments = get_bool_opt(options, 'texcomments', False)
+ self.mathescape = get_bool_opt(options, 'mathescape', False)
+ self.escapeinside = options.get('escapeinside', '')
+ if len(self.escapeinside) == 2:
+ self.left = self.escapeinside[0]
+ self.right = self.escapeinside[1]
+ else:
+ self.escapeinside = ''
+ self.envname = options.get('envname', u'Verbatim')
+
+ self._create_stylesheet()
+
+ def _create_stylesheet(self):
+ t2n = self.ttype2name = {Token: ''}
+ c2d = self.cmd2def = {}
+ cp = self.commandprefix
+
+ def rgbcolor(col):
+ if col:
+ return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
+ for i in (0, 2, 4)])
+ else:
+ return '1,1,1'
+
+ for ttype, ndef in self.style:
+ name = _get_ttype_name(ttype)
+ cmndef = ''
+ if ndef['bold']:
+ cmndef += r'\let\$$@bf=\textbf'
+ if ndef['italic']:
+ cmndef += r'\let\$$@it=\textit'
+ if ndef['underline']:
+ cmndef += r'\let\$$@ul=\underline'
+ if ndef['roman']:
+ cmndef += r'\let\$$@ff=\textrm'
+ if ndef['sans']:
+ cmndef += r'\let\$$@ff=\textsf'
+ if ndef['mono']:
+ cmndef += r'\let\$$@ff=\textsf'
+ if ndef['color']:
+ cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
+ rgbcolor(ndef['color']))
+ if ndef['border']:
+ cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
+ r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}' %
+ (rgbcolor(ndef['border']),
+ rgbcolor(ndef['bgcolor'])))
+ elif ndef['bgcolor']:
+ cmndef += (r'\def\$$@bc##1{\setlength{\fboxsep}{0pt}'
+ r'\colorbox[rgb]{%s}{\strut ##1}}' %
+ rgbcolor(ndef['bgcolor']))
+ if cmndef == '':
+ continue
+ cmndef = cmndef.replace('$$', cp)
+ t2n[ttype] = name
+ c2d[name] = cmndef
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the command sequences needed to define the commands
+ used to format text in the verbatim environment. ``arg`` is ignored.
+ """
+ cp = self.commandprefix
+ styles = []
+ for name, definition in iteritems(self.cmd2def):
+ styles.append(r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' %
+ (cp, name, definition))
+ return STYLE_TEMPLATE % {'cp': self.commandprefix,
+ 'styles': '\n'.join(styles)}
+
+ def format_unencoded(self, tokensource, outfile):
+ # TODO: add support for background colors
+ t2n = self.ttype2name
+ cp = self.commandprefix
+
+ if self.full:
+ realoutfile = outfile
+ outfile = StringIO()
+
+ outfile.write(u'\\begin{' + self.envname + u'}[commandchars=\\\\\\{\\}')
+ if self.linenos:
+ start, step = self.linenostart, self.linenostep
+ outfile.write(u',numbers=left' +
+ (start and u',firstnumber=%d' % start or u'') +
+ (step and u',stepnumber=%d' % step or u''))
+ if self.mathescape or self.texcomments or self.escapeinside:
+ outfile.write(u',codes={\\catcode`\\$=3\\catcode`\\^=7\\catcode`\\_=8}')
+ if self.verboptions:
+ outfile.write(u',' + self.verboptions)
+ outfile.write(u']\n')
+
+ for ttype, value in tokensource:
+ if ttype in Token.Comment:
+ if self.texcomments:
+ # Try to guess comment starting lexeme and escape it ...
+ start = value[0:1]
+ for i in xrange(1, len(value)):
+ if start[0] != value[i]:
+ break
+ start += value[i]
+
+ value = value[len(start):]
+ start = escape_tex(start, cp)
+
+ # ... but do not escape inside comment.
+ value = start + value
+ elif self.mathescape:
+ # Only escape parts not inside a math environment.
+ parts = value.split('$')
+ in_math = False
+ for i, part in enumerate(parts):
+ if not in_math:
+ parts[i] = escape_tex(part, cp)
+ in_math = not in_math
+ value = '$'.join(parts)
+ elif self.escapeinside:
+ text = value
+ value = ''
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ value += escape_tex(a, cp) + b
+ else:
+ value += escape_tex(a + sep1 + b, cp)
+ else:
+ value += escape_tex(a, cp)
+ else:
+ value = escape_tex(value, cp)
+ elif ttype not in Token.Escape:
+ value = escape_tex(value, cp)
+ styles = []
+ while ttype is not Token:
+ try:
+ styles.append(t2n[ttype])
+ except KeyError:
+ # not in current style
+ styles.append(_get_ttype_name(ttype))
+ ttype = ttype.parent
+ styleval = '+'.join(reversed(styles))
+ if styleval:
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
+ else:
+ outfile.write(value)
+
+ outfile.write(u'\\end{' + self.envname + u'}\n')
+
+ if self.full:
encoding = self.encoding or 'utf8'
# map known existings encodings from LaTeX distribution
encoding = {
@@ -420,63 +420,63 @@ class LatexFormatter(Formatter):
'latin_1': 'latin1',
'iso_8859_1': 'latin1',
}.get(encoding.replace('-', '_'), encoding)
- realoutfile.write(DOC_TEMPLATE %
- dict(docclass = self.docclass,
- preamble = self.preamble,
- title = self.title,
+ realoutfile.write(DOC_TEMPLATE %
+ dict(docclass = self.docclass,
+ preamble = self.preamble,
+ title = self.title,
encoding = encoding,
- styledefs = self.get_style_defs(),
- code = outfile.getvalue()))
-
-
-class LatexEmbeddedLexer(Lexer):
- """
- This lexer takes one lexer as argument, the lexer for the language
- being formatted, and the left and right delimiters for escaped text.
-
- First everything is scanned using the language lexer to obtain
- strings and comments. All other consecutive tokens are merged and
- the resulting text is scanned for escaped segments, which are given
- the Token.Escape type. Finally text that is not escaped is scanned
- again with the language lexer.
- """
- def __init__(self, left, right, lang, **options):
- self.left = left
- self.right = right
- self.lang = lang
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- buf = ''
- idx = 0
- for i, t, v in self.lang.get_tokens_unprocessed(text):
- if t in Token.Comment or t in Token.String:
- if buf:
- for x in self.get_tokens_aux(idx, buf):
- yield x
- buf = ''
- yield i, t, v
- else:
- if not buf:
- idx = i
- buf += v
- if buf:
- for x in self.get_tokens_aux(idx, buf):
- yield x
-
- def get_tokens_aux(self, index, text):
- while text:
- a, sep1, text = text.partition(self.left)
- if a:
- for i, t, v in self.lang.get_tokens_unprocessed(a):
- yield index + i, t, v
- index += len(a)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- yield index + len(sep1), Token.Escape, b
- index += len(sep1) + len(b) + len(sep2)
- else:
- yield index, Token.Error, sep1
- index += len(sep1)
- text = b
+ styledefs = self.get_style_defs(),
+ code = outfile.getvalue()))
+
+
+class LatexEmbeddedLexer(Lexer):
+ """
+ This lexer takes one lexer as argument, the lexer for the language
+ being formatted, and the left and right delimiters for escaped text.
+
+ First everything is scanned using the language lexer to obtain
+ strings and comments. All other consecutive tokens are merged and
+ the resulting text is scanned for escaped segments, which are given
+ the Token.Escape type. Finally text that is not escaped is scanned
+ again with the language lexer.
+ """
+ def __init__(self, left, right, lang, **options):
+ self.left = left
+ self.right = right
+ self.lang = lang
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ buf = ''
+ idx = 0
+ for i, t, v in self.lang.get_tokens_unprocessed(text):
+ if t in Token.Comment or t in Token.String:
+ if buf:
+ for x in self.get_tokens_aux(idx, buf):
+ yield x
+ buf = ''
+ yield i, t, v
+ else:
+ if not buf:
+ idx = i
+ buf += v
+ if buf:
+ for x in self.get_tokens_aux(idx, buf):
+ yield x
+
+ def get_tokens_aux(self, index, text):
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if a:
+ for i, t, v in self.lang.get_tokens_unprocessed(a):
+ yield index + i, t, v
+ index += len(a)
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ yield index + len(sep1), Token.Escape, b
+ index += len(sep1) + len(b) + len(sep2)
+ else:
+ yield index, Token.Error, sep1
+ index += len(sep1)
+ text = b
diff --git a/contrib/python/Pygments/py2/pygments/formatters/other.py b/contrib/python/Pygments/py2/pygments/formatters/other.py
index c09eff0cb4..c85cf6e26f 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/other.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/other.py
@@ -1,164 +1,164 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.other
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Other formatters: NullFormatter, RawTokenFormatter.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.other
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Other formatters: NullFormatter, RawTokenFormatter.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
from pygments.util import get_choice_opt
-from pygments.token import Token
-from pygments.console import colorize
-
-__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
-
-
-class NullFormatter(Formatter):
- """
- Output the text unchanged without any formatting.
- """
- name = 'Text only'
- aliases = ['text', 'null']
- filenames = ['*.txt']
-
- def format(self, tokensource, outfile):
- enc = self.encoding
- for ttype, value in tokensource:
- if enc:
- outfile.write(value.encode(enc))
- else:
- outfile.write(value)
-
-
-class RawTokenFormatter(Formatter):
- r"""
- Format tokens as a raw representation for storing token streams.
-
- The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
- be converted to a token stream with the `RawTokenLexer`, described in the
- :doc:`lexer list <lexers>`.
-
- Only two options are accepted:
-
- `compress`
- If set to ``'gz'`` or ``'bz2'``, compress the output with the given
- compression algorithm after encoding (default: ``''``).
- `error_color`
- If set to a color name, highlight error tokens using that color. If
- set but with no value, defaults to ``'red'``.
-
- .. versionadded:: 0.11
-
- """
- name = 'Raw tokens'
- aliases = ['raw', 'tokens']
- filenames = ['*.raw']
-
- unicodeoutput = False
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- # We ignore self.encoding if it is set, since it gets set for lexer
- # and formatter if given with -Oencoding on the command line.
- # The RawTokenFormatter outputs only ASCII. Override here.
- self.encoding = 'ascii' # let pygments.format() do the right thing
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- self.error_color = options.get('error_color', None)
- if self.error_color is True:
- self.error_color = 'red'
- if self.error_color is not None:
- try:
- colorize(self.error_color, '')
- except KeyError:
- raise ValueError("Invalid color %r specified" %
- self.error_color)
-
- def format(self, tokensource, outfile):
- try:
- outfile.write(b'')
- except TypeError:
- raise TypeError('The raw tokens formatter needs a binary '
- 'output file')
- if self.compress == 'gz':
- import gzip
- outfile = gzip.GzipFile('', 'wb', 9, outfile)
-
- def write(text):
- outfile.write(text.encode())
- flush = outfile.flush
- elif self.compress == 'bz2':
- import bz2
- compressor = bz2.BZ2Compressor(9)
-
- def write(text):
- outfile.write(compressor.compress(text.encode()))
-
- def flush():
- outfile.write(compressor.flush())
- outfile.flush()
- else:
- def write(text):
- outfile.write(text.encode())
- flush = outfile.flush
-
- if self.error_color:
- for ttype, value in tokensource:
- line = "%s\t%r\n" % (ttype, value)
- if ttype is Token.Error:
- write(colorize(self.error_color, line))
- else:
- write(line)
- else:
- for ttype, value in tokensource:
- write("%s\t%r\n" % (ttype, value))
- flush()
-
-
-TESTCASE_BEFORE = u'''\
+from pygments.token import Token
+from pygments.console import colorize
+
+__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
+
+
+class NullFormatter(Formatter):
+ """
+ Output the text unchanged without any formatting.
+ """
+ name = 'Text only'
+ aliases = ['text', 'null']
+ filenames = ['*.txt']
+
+ def format(self, tokensource, outfile):
+ enc = self.encoding
+ for ttype, value in tokensource:
+ if enc:
+ outfile.write(value.encode(enc))
+ else:
+ outfile.write(value)
+
+
+class RawTokenFormatter(Formatter):
+ r"""
+ Format tokens as a raw representation for storing token streams.
+
+ The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
+ be converted to a token stream with the `RawTokenLexer`, described in the
+ :doc:`lexer list <lexers>`.
+
+ Only two options are accepted:
+
+ `compress`
+ If set to ``'gz'`` or ``'bz2'``, compress the output with the given
+ compression algorithm after encoding (default: ``''``).
+ `error_color`
+ If set to a color name, highlight error tokens using that color. If
+ set but with no value, defaults to ``'red'``.
+
+ .. versionadded:: 0.11
+
+ """
+ name = 'Raw tokens'
+ aliases = ['raw', 'tokens']
+ filenames = ['*.raw']
+
+ unicodeoutput = False
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ # We ignore self.encoding if it is set, since it gets set for lexer
+ # and formatter if given with -Oencoding on the command line.
+ # The RawTokenFormatter outputs only ASCII. Override here.
+ self.encoding = 'ascii' # let pygments.format() do the right thing
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
+ self.error_color = options.get('error_color', None)
+ if self.error_color is True:
+ self.error_color = 'red'
+ if self.error_color is not None:
+ try:
+ colorize(self.error_color, '')
+ except KeyError:
+ raise ValueError("Invalid color %r specified" %
+ self.error_color)
+
+ def format(self, tokensource, outfile):
+ try:
+ outfile.write(b'')
+ except TypeError:
+ raise TypeError('The raw tokens formatter needs a binary '
+ 'output file')
+ if self.compress == 'gz':
+ import gzip
+ outfile = gzip.GzipFile('', 'wb', 9, outfile)
+
+ def write(text):
+ outfile.write(text.encode())
+ flush = outfile.flush
+ elif self.compress == 'bz2':
+ import bz2
+ compressor = bz2.BZ2Compressor(9)
+
+ def write(text):
+ outfile.write(compressor.compress(text.encode()))
+
+ def flush():
+ outfile.write(compressor.flush())
+ outfile.flush()
+ else:
+ def write(text):
+ outfile.write(text.encode())
+ flush = outfile.flush
+
+ if self.error_color:
+ for ttype, value in tokensource:
+ line = "%s\t%r\n" % (ttype, value)
+ if ttype is Token.Error:
+ write(colorize(self.error_color, line))
+ else:
+ write(line)
+ else:
+ for ttype, value in tokensource:
+ write("%s\t%r\n" % (ttype, value))
+ flush()
+
+
+TESTCASE_BEFORE = u'''\
def testNeedsName(lexer):
- fragment = %r
- tokens = [
-'''
-TESTCASE_AFTER = u'''\
- ]
+ fragment = %r
+ tokens = [
+'''
+TESTCASE_AFTER = u'''\
+ ]
assert list(lexer.get_tokens(fragment)) == tokens
-'''
-
-
-class TestcaseFormatter(Formatter):
- """
- Format tokens as appropriate for a new testcase.
-
- .. versionadded:: 2.0
- """
- name = 'Testcase'
- aliases = ['testcase']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- if self.encoding is not None and self.encoding != 'utf-8':
- raise ValueError("Only None and utf-8 are allowed encodings.")
-
- def format(self, tokensource, outfile):
- indentation = ' ' * 12
- rawbuf = []
- outbuf = []
- for ttype, value in tokensource:
- rawbuf.append(value)
- outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
-
- before = TESTCASE_BEFORE % (u''.join(rawbuf),)
- during = u''.join(outbuf)
- after = TESTCASE_AFTER
- if self.encoding is None:
- outfile.write(before + during + after)
- else:
- outfile.write(before.encode('utf-8'))
- outfile.write(during.encode('utf-8'))
- outfile.write(after.encode('utf-8'))
- outfile.flush()
+'''
+
+
+class TestcaseFormatter(Formatter):
+ """
+ Format tokens as appropriate for a new testcase.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Testcase'
+ aliases = ['testcase']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ if self.encoding is not None and self.encoding != 'utf-8':
+ raise ValueError("Only None and utf-8 are allowed encodings.")
+
+ def format(self, tokensource, outfile):
+ indentation = ' ' * 12
+ rawbuf = []
+ outbuf = []
+ for ttype, value in tokensource:
+ rawbuf.append(value)
+ outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
+
+ before = TESTCASE_BEFORE % (u''.join(rawbuf),)
+ during = u''.join(outbuf)
+ after = TESTCASE_AFTER
+ if self.encoding is None:
+ outfile.write(before + during + after)
+ else:
+ outfile.write(before.encode('utf-8'))
+ outfile.write(during.encode('utf-8'))
+ outfile.write(after.encode('utf-8'))
+ outfile.flush()
diff --git a/contrib/python/Pygments/py2/pygments/formatters/rtf.py b/contrib/python/Pygments/py2/pygments/formatters/rtf.py
index 1246db2a73..13eced3e30 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/rtf.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/rtf.py
@@ -1,147 +1,147 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.rtf
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- A formatter that generates RTF files.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.rtf
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ A formatter that generates RTF files.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import get_int_opt, _surrogatepair
-
-
-__all__ = ['RtfFormatter']
-
-
-class RtfFormatter(Formatter):
- """
- Format tokens as RTF markup. This formatter automatically outputs full RTF
- documents with color information and other useful stuff. Perfect for Copy and
- Paste into Microsoft(R) Word(R) documents.
-
- Please note that ``encoding`` and ``outencoding`` options are ignored.
- The RTF format is ASCII natively, but handles unicode characters correctly
- thanks to escape sequences.
-
- .. versionadded:: 0.6
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `fontface`
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+from pygments.util import get_int_opt, _surrogatepair
+
+
+__all__ = ['RtfFormatter']
+
+
+class RtfFormatter(Formatter):
+ """
+ Format tokens as RTF markup. This formatter automatically outputs full RTF
+ documents with color information and other useful stuff. Perfect for Copy and
+ Paste into Microsoft(R) Word(R) documents.
+
+ Please note that ``encoding`` and ``outencoding`` options are ignored.
+ The RTF format is ASCII natively, but handles unicode characters correctly
+ thanks to escape sequences.
+
+ .. versionadded:: 0.6
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `fontface`
The used font family, for example ``Bitstream Vera Sans``. Defaults to
- some generic font which is supposed to have fixed width.
-
- `fontsize`
- Size of the font used. Size is specified in half points. The
- default is 24 half-points, giving a size 12 font.
-
- .. versionadded:: 2.0
- """
- name = 'RTF'
- aliases = ['rtf']
- filenames = ['*.rtf']
-
- def __init__(self, **options):
- r"""
- Additional options accepted:
-
- ``fontface``
- Name of the font used. Could for example be ``'Courier New'``
- to further specify the default which is ``'\fmodern'``. The RTF
- specification claims that ``\fmodern`` are "Fixed-pitch serif
- and sans serif fonts". Hope every RTF implementation thinks
- the same about modern...
-
- """
- Formatter.__init__(self, **options)
- self.fontface = options.get('fontface') or ''
- self.fontsize = get_int_opt(options, 'fontsize', 0)
-
- def _escape(self, text):
- return text.replace(u'\\', u'\\\\') \
- .replace(u'{', u'\\{') \
- .replace(u'}', u'\\}')
-
- def _escape_text(self, text):
+ some generic font which is supposed to have fixed width.
+
+ `fontsize`
+ Size of the font used. Size is specified in half points. The
+ default is 24 half-points, giving a size 12 font.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RTF'
+ aliases = ['rtf']
+ filenames = ['*.rtf']
+
+ def __init__(self, **options):
+ r"""
+ Additional options accepted:
+
+ ``fontface``
+ Name of the font used. Could for example be ``'Courier New'``
+ to further specify the default which is ``'\fmodern'``. The RTF
+ specification claims that ``\fmodern`` are "Fixed-pitch serif
+ and sans serif fonts". Hope every RTF implementation thinks
+ the same about modern...
+
+ """
+ Formatter.__init__(self, **options)
+ self.fontface = options.get('fontface') or ''
+ self.fontsize = get_int_opt(options, 'fontsize', 0)
+
+ def _escape(self, text):
+ return text.replace(u'\\', u'\\\\') \
+ .replace(u'{', u'\\{') \
+ .replace(u'}', u'\\}')
+
+ def _escape_text(self, text):
# empty strings, should give a small performance improvement
- if not text:
- return u''
-
- # escape text
- text = self._escape(text)
-
- buf = []
- for c in text:
- cn = ord(c)
- if cn < (2**7):
- # ASCII character
- buf.append(str(c))
- elif (2**7) <= cn < (2**16):
- # single unicode escape sequence
- buf.append(u'{\\u%d}' % cn)
- elif (2**16) <= cn:
- # RTF limits unicode to 16 bits.
- # Force surrogate pairs
- buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn))
-
- return u''.join(buf).replace(u'\n', u'\\par\n')
-
- def format_unencoded(self, tokensource, outfile):
- # rtf 1.8 header
- outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0'
- u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- u'{\\colortbl;' % (self.fontface and
- u' ' + self._escape(self.fontface) or
- u''))
-
- # convert colors and save them in a mapping to access them later.
- color_mapping = {}
- offset = 1
- for _, style in self.style:
- for color in style['color'], style['bgcolor'], style['border']:
- if color and color not in color_mapping:
- color_mapping[color] = offset
- outfile.write(u'\\red%d\\green%d\\blue%d;' % (
- int(color[0:2], 16),
- int(color[2:4], 16),
- int(color[4:6], 16)
- ))
- offset += 1
- outfile.write(u'}\\f0 ')
- if self.fontsize:
- outfile.write(u'\\fs%d' % (self.fontsize))
-
- # highlight stream
- for ttype, value in tokensource:
- while not self.style.styles_token(ttype) and ttype.parent:
- ttype = ttype.parent
- style = self.style.style_for_token(ttype)
- buf = []
- if style['bgcolor']:
- buf.append(u'\\cb%d' % color_mapping[style['bgcolor']])
- if style['color']:
- buf.append(u'\\cf%d' % color_mapping[style['color']])
- if style['bold']:
- buf.append(u'\\b')
- if style['italic']:
- buf.append(u'\\i')
- if style['underline']:
- buf.append(u'\\ul')
- if style['border']:
- buf.append(u'\\chbrdr\\chcfpat%d' %
- color_mapping[style['border']])
- start = u''.join(buf)
- if start:
- outfile.write(u'{%s ' % start)
- outfile.write(self._escape_text(value))
- if start:
- outfile.write(u'}')
-
- outfile.write(u'}')
+ if not text:
+ return u''
+
+ # escape text
+ text = self._escape(text)
+
+ buf = []
+ for c in text:
+ cn = ord(c)
+ if cn < (2**7):
+ # ASCII character
+ buf.append(str(c))
+ elif (2**7) <= cn < (2**16):
+ # single unicode escape sequence
+ buf.append(u'{\\u%d}' % cn)
+ elif (2**16) <= cn:
+ # RTF limits unicode to 16 bits.
+ # Force surrogate pairs
+ buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn))
+
+ return u''.join(buf).replace(u'\n', u'\\par\n')
+
+ def format_unencoded(self, tokensource, outfile):
+ # rtf 1.8 header
+ outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0'
+ u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ u'{\\colortbl;' % (self.fontface and
+ u' ' + self._escape(self.fontface) or
+ u''))
+
+ # convert colors and save them in a mapping to access them later.
+ color_mapping = {}
+ offset = 1
+ for _, style in self.style:
+ for color in style['color'], style['bgcolor'], style['border']:
+ if color and color not in color_mapping:
+ color_mapping[color] = offset
+ outfile.write(u'\\red%d\\green%d\\blue%d;' % (
+ int(color[0:2], 16),
+ int(color[2:4], 16),
+ int(color[4:6], 16)
+ ))
+ offset += 1
+ outfile.write(u'}\\f0 ')
+ if self.fontsize:
+ outfile.write(u'\\fs%d' % (self.fontsize))
+
+ # highlight stream
+ for ttype, value in tokensource:
+ while not self.style.styles_token(ttype) and ttype.parent:
+ ttype = ttype.parent
+ style = self.style.style_for_token(ttype)
+ buf = []
+ if style['bgcolor']:
+ buf.append(u'\\cb%d' % color_mapping[style['bgcolor']])
+ if style['color']:
+ buf.append(u'\\cf%d' % color_mapping[style['color']])
+ if style['bold']:
+ buf.append(u'\\b')
+ if style['italic']:
+ buf.append(u'\\i')
+ if style['underline']:
+ buf.append(u'\\ul')
+ if style['border']:
+ buf.append(u'\\chbrdr\\chcfpat%d' %
+ color_mapping[style['border']])
+ start = u''.join(buf)
+ if start:
+ outfile.write(u'{%s ' % start)
+ outfile.write(self._escape_text(value))
+ if start:
+ outfile.write(u'}')
+
+ outfile.write(u'}')
diff --git a/contrib/python/Pygments/py2/pygments/formatters/svg.py b/contrib/python/Pygments/py2/pygments/formatters/svg.py
index ccfd2b3fff..6125a403f5 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/svg.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/svg.py
@@ -1,153 +1,153 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.svg
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for SVG output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.svg
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for SVG output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['SvgFormatter']
-
-
-def escape_html(text):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.replace('&', '&amp;'). \
- replace('<', '&lt;'). \
- replace('>', '&gt;'). \
- replace('"', '&quot;'). \
- replace("'", '&#39;')
-
-
-class2style = {}
-
-class SvgFormatter(Formatter):
- """
- Format tokens as an SVG graphics file. This formatter is still experimental.
- Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
- coordinates containing ``<tspan>`` elements with the individual token styles.
-
- By default, this formatter outputs a full SVG document including doctype
- declaration and the ``<svg>`` root element.
-
- .. versionadded:: 0.9
-
- Additional options accepted:
-
- `nowrap`
- Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
- don't add a XML declaration and a doctype. If true, the `fontfamily`
- and `fontsize` options are ignored. Defaults to ``False``.
-
- `fontfamily`
- The value to give the wrapping ``<g>`` element's ``font-family``
- attribute, defaults to ``"monospace"``.
-
- `fontsize`
- The value to give the wrapping ``<g>`` element's ``font-size``
- attribute, defaults to ``"14px"``.
-
- `xoffset`
- Starting offset in X direction, defaults to ``0``.
-
- `yoffset`
- Starting offset in Y direction, defaults to the font size if it is given
- in pixels, or ``20`` else. (This is necessary since text coordinates
- refer to the text baseline, not the top edge.)
-
- `ystep`
- Offset to add to the Y coordinate for each subsequent line. This should
- roughly be the text size plus 5. It defaults to that value if the text
- size is given in pixels, or ``25`` else.
-
- `spacehack`
- Convert spaces in the source to ``&#160;``, which are non-breaking
- spaces. SVG provides the ``xml:space`` attribute to control how
- whitespace inside tags is handled, in theory, the ``preserve`` value
- could be used to keep all whitespace as-is. However, many current SVG
- viewers don't obey that rule, so this option is provided as a workaround
- and defaults to ``True``.
- """
- name = 'SVG'
- aliases = ['svg']
- filenames = ['*.svg']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.fontfamily = options.get('fontfamily', 'monospace')
- self.fontsize = options.get('fontsize', '14px')
- self.xoffset = get_int_opt(options, 'xoffset', 0)
- fs = self.fontsize.strip()
- if fs.endswith('px'): fs = fs[:-2].strip()
- try:
- int_fs = int(fs)
- except:
- int_fs = 20
- self.yoffset = get_int_opt(options, 'yoffset', int_fs)
- self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
- self.spacehack = get_bool_opt(options, 'spacehack', True)
- self._stylecache = {}
-
- def format_unencoded(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- For our implementation we put all lines in their own 'line group'.
- """
- x = self.xoffset
- y = self.yoffset
- if not self.nowrap:
- if self.encoding:
- outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
- self.encoding)
- else:
- outfile.write('<?xml version="1.0"?>\n')
- outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
- '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
- 'svg10.dtd">\n')
- outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write('<g font-family="%s" font-size="%s">\n' %
- (self.fontfamily, self.fontsize))
- outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (x, y))
- for ttype, value in tokensource:
- style = self._get_style(ttype)
- tspan = style and '<tspan' + style + '>' or ''
- tspanend = tspan and '</tspan>' or ''
- value = escape_html(value)
- if self.spacehack:
- value = value.expandtabs().replace(' ', '&#160;')
- parts = value.split('\n')
- for part in parts[:-1]:
- outfile.write(tspan + part + tspanend)
- y += self.ystep
- outfile.write('</text>\n<text x="%s" y="%s" '
- 'xml:space="preserve">' % (x, y))
- outfile.write(tspan + parts[-1] + tspanend)
- outfile.write('</text>')
-
- if not self.nowrap:
- outfile.write('</g></svg>\n')
-
- def _get_style(self, tokentype):
- if tokentype in self._stylecache:
- return self._stylecache[tokentype]
- otokentype = tokentype
- while not self.style.styles_token(tokentype):
- tokentype = tokentype.parent
- value = self.style.style_for_token(tokentype)
- result = ''
- if value['color']:
- result = ' fill="#' + value['color'] + '"'
- if value['bold']:
- result += ' font-weight="bold"'
- if value['italic']:
- result += ' font-style="italic"'
- self._stylecache[otokentype] = result
- return result
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt, get_int_opt
+
+__all__ = ['SvgFormatter']
+
+
+def escape_html(text):
+ """Escape &, <, > as well as single and double quotes for HTML."""
+ return text.replace('&', '&amp;'). \
+ replace('<', '&lt;'). \
+ replace('>', '&gt;'). \
+ replace('"', '&quot;'). \
+ replace("'", '&#39;')
+
+
+class2style = {}
+
+class SvgFormatter(Formatter):
+ """
+ Format tokens as an SVG graphics file. This formatter is still experimental.
+ Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
+ coordinates containing ``<tspan>`` elements with the individual token styles.
+
+ By default, this formatter outputs a full SVG document including doctype
+ declaration and the ``<svg>`` root element.
+
+ .. versionadded:: 0.9
+
+ Additional options accepted:
+
+ `nowrap`
+ Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
+ don't add a XML declaration and a doctype. If true, the `fontfamily`
+ and `fontsize` options are ignored. Defaults to ``False``.
+
+ `fontfamily`
+ The value to give the wrapping ``<g>`` element's ``font-family``
+ attribute, defaults to ``"monospace"``.
+
+ `fontsize`
+ The value to give the wrapping ``<g>`` element's ``font-size``
+ attribute, defaults to ``"14px"``.
+
+ `xoffset`
+ Starting offset in X direction, defaults to ``0``.
+
+ `yoffset`
+ Starting offset in Y direction, defaults to the font size if it is given
+ in pixels, or ``20`` else. (This is necessary since text coordinates
+ refer to the text baseline, not the top edge.)
+
+ `ystep`
+ Offset to add to the Y coordinate for each subsequent line. This should
+ roughly be the text size plus 5. It defaults to that value if the text
+ size is given in pixels, or ``25`` else.
+
+ `spacehack`
+ Convert spaces in the source to ``&#160;``, which are non-breaking
+ spaces. SVG provides the ``xml:space`` attribute to control how
+ whitespace inside tags is handled, in theory, the ``preserve`` value
+ could be used to keep all whitespace as-is. However, many current SVG
+ viewers don't obey that rule, so this option is provided as a workaround
+ and defaults to ``True``.
+ """
+ name = 'SVG'
+ aliases = ['svg']
+ filenames = ['*.svg']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.nowrap = get_bool_opt(options, 'nowrap', False)
+ self.fontfamily = options.get('fontfamily', 'monospace')
+ self.fontsize = options.get('fontsize', '14px')
+ self.xoffset = get_int_opt(options, 'xoffset', 0)
+ fs = self.fontsize.strip()
+ if fs.endswith('px'): fs = fs[:-2].strip()
+ try:
+ int_fs = int(fs)
+ except:
+ int_fs = 20
+ self.yoffset = get_int_opt(options, 'yoffset', int_fs)
+ self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
+ self.spacehack = get_bool_opt(options, 'spacehack', True)
+ self._stylecache = {}
+
+ def format_unencoded(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+
+ For our implementation we put all lines in their own 'line group'.
+ """
+ x = self.xoffset
+ y = self.yoffset
+ if not self.nowrap:
+ if self.encoding:
+ outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
+ self.encoding)
+ else:
+ outfile.write('<?xml version="1.0"?>\n')
+ outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
+ '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
+ 'svg10.dtd">\n')
+ outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
+ outfile.write('<g font-family="%s" font-size="%s">\n' %
+ (self.fontfamily, self.fontsize))
+ outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (x, y))
+ for ttype, value in tokensource:
+ style = self._get_style(ttype)
+ tspan = style and '<tspan' + style + '>' or ''
+ tspanend = tspan and '</tspan>' or ''
+ value = escape_html(value)
+ if self.spacehack:
+ value = value.expandtabs().replace(' ', '&#160;')
+ parts = value.split('\n')
+ for part in parts[:-1]:
+ outfile.write(tspan + part + tspanend)
+ y += self.ystep
+ outfile.write('</text>\n<text x="%s" y="%s" '
+ 'xml:space="preserve">' % (x, y))
+ outfile.write(tspan + parts[-1] + tspanend)
+ outfile.write('</text>')
+
+ if not self.nowrap:
+ outfile.write('</g></svg>\n')
+
+ def _get_style(self, tokentype):
+ if tokentype in self._stylecache:
+ return self._stylecache[tokentype]
+ otokentype = tokentype
+ while not self.style.styles_token(tokentype):
+ tokentype = tokentype.parent
+ value = self.style.style_for_token(tokentype)
+ result = ''
+ if value['color']:
+ result = ' fill="#' + value['color'] + '"'
+ if value['bold']:
+ result += ' font-weight="bold"'
+ if value['italic']:
+ result += ' font-style="italic"'
+ self._stylecache[otokentype] = result
+ return result
diff --git a/contrib/python/Pygments/py2/pygments/formatters/terminal.py b/contrib/python/Pygments/py2/pygments/formatters/terminal.py
index e60bde912f..a9749e25e7 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/terminal.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/terminal.py
@@ -1,31 +1,31 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.terminal
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for terminal output with ANSI sequences.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.terminal
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for terminal output with ANSI sequences.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.console import ansiformat
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TerminalFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-TERMINAL_COLORS = {
- Token: ('', ''),
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+
+from pygments.formatter import Formatter
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+from pygments.console import ansiformat
+from pygments.util import get_choice_opt
+
+
+__all__ = ['TerminalFormatter']
+
+
+#: Map token types to a tuple of color values for light and dark
+#: backgrounds.
+TERMINAL_COLORS = {
+ Token: ('', ''),
+
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
@@ -44,93 +44,93 @@ TERMINAL_COLORS = {
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
-
+
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
+ Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Prompt: ('**', '**'),
+ Generic.Prompt: ('**', '**'),
Generic.Error: ('brightred', 'brightred'),
-
+
Error: ('_brightred_', '_brightred_'),
-}
-
-
-class TerminalFormatter(Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a text console.
- Color sequences are terminated at newlines, so that paging the output
- works correctly.
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal'
- aliases = ['terminal', 'console']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def format(self, tokensource, outfile):
- # hack: if the output is a terminal and has an encoding set,
- # use that to avoid unicode encode problems
- if not self.encoding and hasattr(outfile, "encoding") and \
- hasattr(outfile, "isatty") and outfile.isatty() and \
- sys.version_info < (3,):
- self.encoding = outfile.encoding
- return Formatter.format(self, tokensource, outfile)
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def _get_color(self, ttype):
- # self.colorscheme is a dict containing usually generic types, so we
- # have to walk the tree of dots. The base Token type must be a key,
- # even if it's empty string, as in the default above.
- colors = self.colorscheme.get(ttype)
- while colors is None:
- ttype = ttype.parent
- colors = self.colorscheme.get(ttype)
- return colors[self.darkbg]
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self._get_color(ttype)
-
- for line in value.splitlines(True):
- if color:
- outfile.write(ansiformat(color, line.rstrip('\n')))
- else:
- outfile.write(line.rstrip('\n'))
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if self.linenos:
- outfile.write("\n")
+}
+
+
+class TerminalFormatter(Formatter):
+ r"""
+ Format tokens with ANSI color sequences, for output in a text console.
+ Color sequences are terminated at newlines, so that paging the output
+ works correctly.
+
+ The `get_style_defs()` method doesn't do anything special since there is
+ no support for common styles.
+
+ Options accepted:
+
+ `bg`
+ Set to ``"light"`` or ``"dark"`` depending on the terminal's background
+ (default: ``"light"``).
+
+ `colorscheme`
+ A dictionary mapping token types to (lightbg, darkbg) color names or
+ ``None`` (default: ``None`` = use builtin colorscheme).
+
+ `linenos`
+ Set to ``True`` to have line numbers on the terminal output as well
+ (default: ``False`` = no line numbers).
+ """
+ name = 'Terminal'
+ aliases = ['terminal', 'console']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.darkbg = get_choice_opt(options, 'bg',
+ ['light', 'dark'], 'light') == 'dark'
+ self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
+
+ def format(self, tokensource, outfile):
+ # hack: if the output is a terminal and has an encoding set,
+ # use that to avoid unicode encode problems
+ if not self.encoding and hasattr(outfile, "encoding") and \
+ hasattr(outfile, "isatty") and outfile.isatty() and \
+ sys.version_info < (3,):
+ self.encoding = outfile.encoding
+ return Formatter.format(self, tokensource, outfile)
+
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
+
+ def _get_color(self, ttype):
+ # self.colorscheme is a dict containing usually generic types, so we
+ # have to walk the tree of dots. The base Token type must be a key,
+ # even if it's empty string, as in the default above.
+ colors = self.colorscheme.get(ttype)
+ while colors is None:
+ ttype = ttype.parent
+ colors = self.colorscheme.get(ttype)
+ return colors[self.darkbg]
+
+ def format_unencoded(self, tokensource, outfile):
+ if self.linenos:
+ self._write_lineno(outfile)
+
+ for ttype, value in tokensource:
+ color = self._get_color(ttype)
+
+ for line in value.splitlines(True):
+ if color:
+ outfile.write(ansiformat(color, line.rstrip('\n')))
+ else:
+ outfile.write(line.rstrip('\n'))
+ if line.endswith('\n'):
+ if self.linenos:
+ self._write_lineno(outfile)
+ else:
+ outfile.write('\n')
+
+ if self.linenos:
+ outfile.write("\n")
diff --git a/contrib/python/Pygments/py2/pygments/formatters/terminal256.py b/contrib/python/Pygments/py2/pygments/formatters/terminal256.py
index 43ec01c24b..ac5814b004 100644
--- a/contrib/python/Pygments/py2/pygments/formatters/terminal256.py
+++ b/contrib/python/Pygments/py2/pygments/formatters/terminal256.py
@@ -1,54 +1,54 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.formatters.terminal256
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for 256-color terminal output with ANSI sequences.
-
- RGB-to-XTERM color conversion routines adapted from xterm256-conv
- tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
- by Wolfgang Frisch.
-
- Formatter version 1.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.formatters.terminal256
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for 256-color terminal output with ANSI sequences.
+
+ RGB-to-XTERM color conversion routines adapted from xterm256-conv
+ tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
+ by Wolfgang Frisch.
+
+ Formatter version 1.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# TODO:
-# - Options to map style's bold/underline/italic/border attributes
-# to some ANSI attrbutes (something like 'italic=underline')
-# - An option to output "style RGB to xterm RGB/index" conversion table
-# - An option to indicate that we are running in "reverse background"
-# xterm. This means that default colors are white-on-black, not
-# black-on-while, so colors like "white background" need to be converted
-# to "white background, black foreground", etc...
-
-import sys
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+# TODO:
+# - Options to map style's bold/underline/italic/border attributes
+# to some ANSI attrbutes (something like 'italic=underline')
+# - An option to output "style RGB to xterm RGB/index" conversion table
+# - An option to indicate that we are running in "reverse background"
+# xterm. This means that default colors are white-on-black, not
+# black-on-while, so colors like "white background" need to be converted
+# to "white background, black foreground", etc...
+
+import sys
+
+from pygments.formatter import Formatter
from pygments.console import codes
from pygments.style import ansicolors
-
-
-__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
-
-
-class EscapeSequence:
- def __init__(self, fg=None, bg=None, bold=False, underline=False):
- self.fg = fg
- self.bg = bg
- self.bold = bold
- self.underline = underline
-
- def escape(self, attrs):
- if len(attrs):
- return "\x1b[" + ";".join(attrs) + "m"
- return ""
-
- def color_string(self):
- attrs = []
- if self.fg is not None:
+
+
+__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
+
+
+class EscapeSequence:
+ def __init__(self, fg=None, bg=None, bold=False, underline=False):
+ self.fg = fg
+ self.bg = bg
+ self.bold = bold
+ self.underline = underline
+
+ def escape(self, attrs):
+ if len(attrs):
+ return "\x1b[" + ";".join(attrs) + "m"
+ return ""
+
+ def color_string(self):
+ attrs = []
+ if self.fg is not None:
if self.fg in ansicolors:
esc = codes[self.fg.replace('ansi','')]
if ';01m' in esc:
@@ -57,54 +57,54 @@ class EscapeSequence:
attrs.append(esc[2:4])
else:
attrs.extend(("38", "5", "%i" % self.fg))
- if self.bg is not None:
+ if self.bg is not None:
if self.bg in ansicolors:
esc = codes[self.bg.replace('ansi','')]
# extract fg color code, add 10 for bg.
attrs.append(str(int(esc[2:4])+10))
else:
attrs.extend(("48", "5", "%i" % self.bg))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- return self.escape(attrs)
-
- def true_color_string(self):
- attrs = []
- if self.fg:
- attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
- if self.bg:
- attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- return self.escape(attrs)
-
- def reset_string(self):
- attrs = []
- if self.fg is not None:
- attrs.append("39")
- if self.bg is not None:
- attrs.append("49")
- if self.bold or self.underline:
- attrs.append("00")
- return self.escape(attrs)
-
-
-class Terminal256Formatter(Formatter):
- """
- Format tokens with ANSI color sequences, for output in a 256-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- The formatter takes colors from a style defined by the `style` option
- and converts them to nearest ANSI 256-color escape sequences. Bold and
- underline attributes from the style are preserved (and displayed).
-
- .. versionadded:: 0.9
-
+ if self.bold:
+ attrs.append("01")
+ if self.underline:
+ attrs.append("04")
+ return self.escape(attrs)
+
+ def true_color_string(self):
+ attrs = []
+ if self.fg:
+ attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
+ if self.bg:
+ attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
+ if self.bold:
+ attrs.append("01")
+ if self.underline:
+ attrs.append("04")
+ return self.escape(attrs)
+
+ def reset_string(self):
+ attrs = []
+ if self.fg is not None:
+ attrs.append("39")
+ if self.bg is not None:
+ attrs.append("49")
+ if self.bold or self.underline:
+ attrs.append("00")
+ return self.escape(attrs)
+
+
+class Terminal256Formatter(Formatter):
+ """
+ Format tokens with ANSI color sequences, for output in a 256-color
+ terminal or console. Like in `TerminalFormatter` color sequences
+ are terminated at newlines, so that paging the output works correctly.
+
+ The formatter takes colors from a style defined by the `style` option
+ and converts them to nearest ANSI 256-color escape sequences. Bold and
+ underline attributes from the style are preserved (and displayed).
+
+ .. versionadded:: 0.9
+
.. versionchanged:: 2.2
If the used style defines foreground colors in the form ``#ansi*``, then
`Terminal256Formatter` will map these to non extended foreground color.
@@ -116,200 +116,200 @@ class Terminal256Formatter(Formatter):
See :ref:`this table <new-ansi-color-names>` for more information.
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
- """
- name = 'Terminal256'
- aliases = ['terminal256', 'console256', '256']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.xterm_colors = []
- self.best_match = {}
- self.style_string = {}
-
- self.usebold = 'nobold' not in options
- self.useunderline = 'nounderline' not in options
-
- self._build_color_table() # build an RGB-to-256 color conversion table
- self._setup_styles() # convert selected style's colors to term. colors
-
- def _build_color_table(self):
- # colors 0..15: 16 basic colors
-
- self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
- self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
- self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
- self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
- self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
- self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
- self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
- self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
- self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
- self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
- self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
- self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
- self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
- self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
- self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
- self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
-
- # colors 16..232: the 6x6x6 color cube
-
- valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
-
- for i in range(217):
- r = valuerange[(i // 36) % 6]
- g = valuerange[(i // 6) % 6]
- b = valuerange[i % 6]
- self.xterm_colors.append((r, g, b))
-
- # colors 233..253: grayscale
-
- for i in range(1, 22):
- v = 8 + i * 10
- self.xterm_colors.append((v, v, v))
-
- def _closest_color(self, r, g, b):
- distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
- match = 0
-
- for i in range(0, 254):
- values = self.xterm_colors[i]
-
- rd = r - values[0]
- gd = g - values[1]
- bd = b - values[2]
- d = rd*rd + gd*gd + bd*bd
-
- if d < distance:
- match = i
- distance = d
- return match
-
- def _color_index(self, color):
- index = self.best_match.get(color, None)
+ Options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+ """
+ name = 'Terminal256'
+ aliases = ['terminal256', 'console256', '256']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+
+ self.xterm_colors = []
+ self.best_match = {}
+ self.style_string = {}
+
+ self.usebold = 'nobold' not in options
+ self.useunderline = 'nounderline' not in options
+
+ self._build_color_table() # build an RGB-to-256 color conversion table
+ self._setup_styles() # convert selected style's colors to term. colors
+
+ def _build_color_table(self):
+ # colors 0..15: 16 basic colors
+
+ self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
+ self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
+ self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
+ self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
+ self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
+ self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
+ self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
+ self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
+ self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
+ self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
+ self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
+ self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
+ self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
+ self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
+ self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
+ self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
+
+ # colors 16..232: the 6x6x6 color cube
+
+ valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
+
+ for i in range(217):
+ r = valuerange[(i // 36) % 6]
+ g = valuerange[(i // 6) % 6]
+ b = valuerange[i % 6]
+ self.xterm_colors.append((r, g, b))
+
+ # colors 233..253: grayscale
+
+ for i in range(1, 22):
+ v = 8 + i * 10
+ self.xterm_colors.append((v, v, v))
+
+ def _closest_color(self, r, g, b):
+ distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
+ match = 0
+
+ for i in range(0, 254):
+ values = self.xterm_colors[i]
+
+ rd = r - values[0]
+ gd = g - values[1]
+ bd = b - values[2]
+ d = rd*rd + gd*gd + bd*bd
+
+ if d < distance:
+ match = i
+ distance = d
+ return match
+
+ def _color_index(self, color):
+ index = self.best_match.get(color, None)
if color in ansicolors:
# strip the `ansi/#ansi` part and look up code
index = color
self.best_match[color] = index
- if index is None:
- try:
- rgb = int(str(color), 16)
- except ValueError:
- rgb = 0
-
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- index = self._closest_color(r, g, b)
- self.best_match[color] = index
- return index
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
+ if index is None:
+ try:
+ rgb = int(str(color), 16)
+ except ValueError:
+ rgb = 0
+
+ r = (rgb >> 16) & 0xff
+ g = (rgb >> 8) & 0xff
+ b = rgb & 0xff
+ index = self._closest_color(r, g, b)
+ self.best_match[color] = index
+ return index
+
+ def _setup_styles(self):
+ for ttype, ndef in self.style:
+ escape = EscapeSequence()
# get foreground from ansicolor if set
if ndef['ansicolor']:
escape.fg = self._color_index(ndef['ansicolor'])
elif ndef['color']:
- escape.fg = self._color_index(ndef['color'])
+ escape.fg = self._color_index(ndef['color'])
if ndef['bgansicolor']:
escape.bg = self._color_index(ndef['bgansicolor'])
elif ndef['bgcolor']:
- escape.bg = self._color_index(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- self.style_string[str(ttype)] = (escape.color_string(),
- escape.reset_string())
-
- def format(self, tokensource, outfile):
- # hack: if the output is a terminal and has an encoding set,
- # use that to avoid unicode encode problems
- if not self.encoding and hasattr(outfile, "encoding") and \
- hasattr(outfile, "isatty") and outfile.isatty() and \
- sys.version_info < (3,):
- self.encoding = outfile.encoding
- return Formatter.format(self, tokensource, outfile)
-
- def format_unencoded(self, tokensource, outfile):
- for ttype, value in tokensource:
- not_found = True
- while ttype and not_found:
- try:
- # outfile.write( "<" + str(ttype) + ">" )
- on, off = self.style_string[str(ttype)]
-
- # Like TerminalFormatter, add "reset colors" escape sequence
- # on newline.
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(on + line + off)
- outfile.write('\n')
- if spl[-1]:
- outfile.write(on + spl[-1] + off)
-
- not_found = False
- # outfile.write( '#' + str(ttype) + '#' )
-
- except KeyError:
- # ottype = ttype
- ttype = ttype[:-1]
- # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
-
- if not_found:
- outfile.write(value)
-
-
-class TerminalTrueColorFormatter(Terminal256Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a true-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- .. versionadded:: 2.1
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
- """
- name = 'TerminalTrueColor'
- aliases = ['terminal16m', 'console16m', '16m']
- filenames = []
-
- def _build_color_table(self):
- pass
-
- def _color_tuple(self, color):
- try:
- rgb = int(str(color), 16)
- except ValueError:
- return None
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- return (r, g, b)
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- if ndef['color']:
- escape.fg = self._color_tuple(ndef['color'])
- if ndef['bgcolor']:
- escape.bg = self._color_tuple(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- self.style_string[str(ttype)] = (escape.true_color_string(),
- escape.reset_string())
+ escape.bg = self._color_index(ndef['bgcolor'])
+ if self.usebold and ndef['bold']:
+ escape.bold = True
+ if self.useunderline and ndef['underline']:
+ escape.underline = True
+ self.style_string[str(ttype)] = (escape.color_string(),
+ escape.reset_string())
+
+ def format(self, tokensource, outfile):
+ # hack: if the output is a terminal and has an encoding set,
+ # use that to avoid unicode encode problems
+ if not self.encoding and hasattr(outfile, "encoding") and \
+ hasattr(outfile, "isatty") and outfile.isatty() and \
+ sys.version_info < (3,):
+ self.encoding = outfile.encoding
+ return Formatter.format(self, tokensource, outfile)
+
+ def format_unencoded(self, tokensource, outfile):
+ for ttype, value in tokensource:
+ not_found = True
+ while ttype and not_found:
+ try:
+ # outfile.write( "<" + str(ttype) + ">" )
+ on, off = self.style_string[str(ttype)]
+
+ # Like TerminalFormatter, add "reset colors" escape sequence
+ # on newline.
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write(on + line + off)
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write(on + spl[-1] + off)
+
+ not_found = False
+ # outfile.write( '#' + str(ttype) + '#' )
+
+ except KeyError:
+ # ottype = ttype
+ ttype = ttype[:-1]
+ # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
+
+ if not_found:
+ outfile.write(value)
+
+
+class TerminalTrueColorFormatter(Terminal256Formatter):
+ r"""
+ Format tokens with ANSI color sequences, for output in a true-color
+ terminal or console. Like in `TerminalFormatter` color sequences
+ are terminated at newlines, so that paging the output works correctly.
+
+ .. versionadded:: 2.1
+
+ Options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+ """
+ name = 'TerminalTrueColor'
+ aliases = ['terminal16m', 'console16m', '16m']
+ filenames = []
+
+ def _build_color_table(self):
+ pass
+
+ def _color_tuple(self, color):
+ try:
+ rgb = int(str(color), 16)
+ except ValueError:
+ return None
+ r = (rgb >> 16) & 0xff
+ g = (rgb >> 8) & 0xff
+ b = rgb & 0xff
+ return (r, g, b)
+
+ def _setup_styles(self):
+ for ttype, ndef in self.style:
+ escape = EscapeSequence()
+ if ndef['color']:
+ escape.fg = self._color_tuple(ndef['color'])
+ if ndef['bgcolor']:
+ escape.bg = self._color_tuple(ndef['bgcolor'])
+ if self.usebold and ndef['bold']:
+ escape.bold = True
+ if self.useunderline and ndef['underline']:
+ escape.underline = True
+ self.style_string[str(ttype)] = (escape.true_color_string(),
+ escape.reset_string())
diff --git a/contrib/python/Pygments/py2/pygments/lexer.py b/contrib/python/Pygments/py2/pygments/lexer.py
index 56a7e1e8e3..3a4684afc2 100644
--- a/contrib/python/Pygments/py2/pygments/lexer.py
+++ b/contrib/python/Pygments/py2/pygments/lexer.py
@@ -1,651 +1,651 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexer
- ~~~~~~~~~~~~~~
-
- Base lexer classes.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexer
+ ~~~~~~~~~~~~~~
+
+ Base lexer classes.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-import re
-import sys
-import time
-
-from pygments.filter import apply_filters, Filter
-from pygments.filters import get_filter_by_name
-from pygments.token import Error, Text, Other, _TokenType
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode
-from pygments.regexopt import regex_opt
-
-__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
- 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
- 'default', 'words']
-
-
-_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
- (b'\xff\xfe\0\0', 'utf-32'),
- (b'\0\0\xfe\xff', 'utf-32be'),
- (b'\xff\xfe', 'utf-16'),
- (b'\xfe\xff', 'utf-16be')]
-
-_default_analyse = staticmethod(lambda x: 0.0)
-
-
-class LexerMeta(type):
- """
- This metaclass automagically converts ``analyse_text`` methods into
- static methods which always return float values.
- """
-
- def __new__(mcs, name, bases, d):
- if 'analyse_text' in d:
- d['analyse_text'] = make_analysator(d['analyse_text'])
- return type.__new__(mcs, name, bases, d)
-
-
-@add_metaclass(LexerMeta)
-class Lexer(object):
- """
- Lexer for a specific language.
-
- Basic options recognized:
- ``stripnl``
- Strip leading and trailing newlines from the input (default: True).
- ``stripall``
- Strip all leading and trailing whitespace from the input
- (default: False).
- ``ensurenl``
- Make sure that the input ends with a newline (default: True). This
- is required for some lexers that consume input linewise.
-
- .. versionadded:: 1.3
-
- ``tabsize``
- If given and greater than 0, expand tabs in the input (default: 0).
- ``encoding``
- If given, must be an encoding name. This encoding will be used to
- convert the input string to Unicode, if it is not already a Unicode
- string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
- Latin1 detection. Can also be ``'chardet'`` to use the chardet
- library, if it is installed.
- ``inencoding``
- Overrides the ``encoding`` if given.
- """
-
- #: Name of the lexer
- name = None
-
- #: Shortcuts for the lexer
- aliases = []
-
- #: File name globs
- filenames = []
-
- #: Secondary file name globs
- alias_filenames = []
-
- #: MIME types
- mimetypes = []
-
- #: Priority, should multiple lexers match and no content is provided
- priority = 0
-
- def __init__(self, **options):
- self.options = options
- self.stripnl = get_bool_opt(options, 'stripnl', True)
- self.stripall = get_bool_opt(options, 'stripall', False)
- self.ensurenl = get_bool_opt(options, 'ensurenl', True)
- self.tabsize = get_int_opt(options, 'tabsize', 0)
- self.encoding = options.get('encoding', 'guess')
- self.encoding = options.get('inencoding') or self.encoding
- self.filters = []
- for filter_ in get_list_opt(options, 'filters', ()):
- self.add_filter(filter_)
-
- def __repr__(self):
- if self.options:
- return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
- self.options)
- else:
- return '<pygments.lexers.%s>' % self.__class__.__name__
-
- def add_filter(self, filter_, **options):
- """
- Add a new stream filter to this lexer.
- """
- if not isinstance(filter_, Filter):
- filter_ = get_filter_by_name(filter_, **options)
- self.filters.append(filter_)
-
- def analyse_text(text):
- """
- Has to return a float between ``0`` and ``1`` that indicates
- if a lexer wants to highlight this text. Used by ``guess_lexer``.
- If this method returns ``0`` it won't highlight it in any case, if
- it returns ``1`` highlighting with this lexer is guaranteed.
-
- The `LexerMeta` metaclass automatically wraps this function so
- that it works like a static method (no ``self`` or ``cls``
- parameter) and the return value is automatically converted to
- `float`. If the return value is an object that is boolean `False`
- it's the same as if the return values was ``0.0``.
- """
-
- def get_tokens(self, text, unfiltered=False):
- """
- Return an iterable of (tokentype, value) pairs generated from
- `text`. If `unfiltered` is set to `True`, the filtering mechanism
- is bypassed even if filters are defined.
-
- Also preprocess the text, i.e. expand tabs and strip it if
- wanted and applies registered filters.
- """
- if not isinstance(text, text_type):
- if self.encoding == 'guess':
- text, _ = guess_decode(text)
- elif self.encoding == 'chardet':
- try:
- import chardet
- except ImportError:
- raise ImportError('To enable chardet encoding guessing, '
- 'please install the chardet library '
- 'from http://chardet.feedparser.org/')
- # check for BOM first
- decoded = None
- for bom, encoding in _encoding_map:
- if text.startswith(bom):
- decoded = text[len(bom):].decode(encoding, 'replace')
- break
- # no BOM found, so use chardet
- if decoded is None:
- enc = chardet.detect(text[:1024]) # Guess using first 1KB
- decoded = text.decode(enc.get('encoding') or 'utf-8',
- 'replace')
- text = decoded
- else:
- text = text.decode(self.encoding)
- if text.startswith(u'\ufeff'):
- text = text[len(u'\ufeff'):]
- else:
- if text.startswith(u'\ufeff'):
- text = text[len(u'\ufeff'):]
-
- # text now *is* a unicode string
- text = text.replace('\r\n', '\n')
- text = text.replace('\r', '\n')
- if self.stripall:
- text = text.strip()
- elif self.stripnl:
- text = text.strip('\n')
- if self.tabsize > 0:
- text = text.expandtabs(self.tabsize)
- if self.ensurenl and not text.endswith('\n'):
- text += '\n'
-
- def streamer():
- for _, t, v in self.get_tokens_unprocessed(text):
- yield t, v
- stream = streamer()
- if not unfiltered:
- stream = apply_filters(stream, self.filters, self)
- return stream
-
- def get_tokens_unprocessed(self, text):
- """
- Return an iterable of (index, tokentype, value) pairs where "index"
- is the starting position of the token within the input text.
-
- In subclasses, implement this method as a generator to
- maximize effectiveness.
- """
- raise NotImplementedError
-
-
-class DelegatingLexer(Lexer):
- """
- This lexer takes two lexer as arguments. A root lexer and
- a language lexer. First everything is scanned using the language
- lexer, afterwards all ``Other`` tokens are lexed using the root
- lexer.
-
- The lexers from the ``template`` lexer package use this base lexer.
- """
-
- def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
- self.root_lexer = _root_lexer(**options)
- self.language_lexer = _language_lexer(**options)
- self.needle = _needle
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- buffered = ''
- insertions = []
- lng_buffer = []
- for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
- if t is self.needle:
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- lng_buffer = []
- buffered += v
- else:
- lng_buffer.append((i, t, v))
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- return do_insertions(insertions,
- self.root_lexer.get_tokens_unprocessed(buffered))
-
-
-# ------------------------------------------------------------------------------
-# RegexLexer and ExtendedRegexLexer
-#
-
-
-class include(str): # pylint: disable=invalid-name
- """
- Indicates that a state should include rules from another state.
- """
- pass
-
-
-class _inherit(object):
- """
- Indicates the a state should inherit from its superclass.
- """
- def __repr__(self):
- return 'inherit'
-
-inherit = _inherit() # pylint: disable=invalid-name
-
-
-class combined(tuple): # pylint: disable=invalid-name
- """
- Indicates a state combined from multiple states.
- """
-
- def __new__(cls, *args):
- return tuple.__new__(cls, args)
-
- def __init__(self, *args):
- # tuple.__init__ doesn't do anything
- pass
-
-
-class _PseudoMatch(object):
- """
- A pseudo match object constructed from a string.
- """
-
- def __init__(self, start, text):
- self._text = text
- self._start = start
-
- def start(self, arg=None):
- return self._start
-
- def end(self, arg=None):
- return self._start + len(self._text)
-
- def group(self, arg=None):
- if arg:
- raise IndexError('No such group')
- return self._text
-
- def groups(self):
- return (self._text,)
-
- def groupdict(self):
- return {}
-
-
-def bygroups(*args):
- """
- Callback that yields multiple actions for each group in the match.
- """
- def callback(lexer, match, ctx=None):
- for i, action in enumerate(args):
- if action is None:
- continue
- elif type(action) is _TokenType:
- data = match.group(i + 1)
- if data:
- yield match.start(i + 1), action, data
- else:
- data = match.group(i + 1)
- if data is not None:
- if ctx:
- ctx.pos = match.start(i + 1)
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import re
+import sys
+import time
+
+from pygments.filter import apply_filters, Filter
+from pygments.filters import get_filter_by_name
+from pygments.token import Error, Text, Other, _TokenType
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode
+from pygments.regexopt import regex_opt
+
+__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
+ 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
+ 'default', 'words']
+
+
+_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
+ (b'\xff\xfe\0\0', 'utf-32'),
+ (b'\0\0\xfe\xff', 'utf-32be'),
+ (b'\xff\xfe', 'utf-16'),
+ (b'\xfe\xff', 'utf-16be')]
+
+_default_analyse = staticmethod(lambda x: 0.0)
+
+
+class LexerMeta(type):
+ """
+ This metaclass automagically converts ``analyse_text`` methods into
+ static methods which always return float values.
+ """
+
+ def __new__(mcs, name, bases, d):
+ if 'analyse_text' in d:
+ d['analyse_text'] = make_analysator(d['analyse_text'])
+ return type.__new__(mcs, name, bases, d)
+
+
+@add_metaclass(LexerMeta)
+class Lexer(object):
+ """
+ Lexer for a specific language.
+
+ Basic options recognized:
+ ``stripnl``
+ Strip leading and trailing newlines from the input (default: True).
+ ``stripall``
+ Strip all leading and trailing whitespace from the input
+ (default: False).
+ ``ensurenl``
+ Make sure that the input ends with a newline (default: True). This
+ is required for some lexers that consume input linewise.
+
+ .. versionadded:: 1.3
+
+ ``tabsize``
+ If given and greater than 0, expand tabs in the input (default: 0).
+ ``encoding``
+ If given, must be an encoding name. This encoding will be used to
+ convert the input string to Unicode, if it is not already a Unicode
+ string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
+ Latin1 detection. Can also be ``'chardet'`` to use the chardet
+ library, if it is installed.
+ ``inencoding``
+ Overrides the ``encoding`` if given.
+ """
+
+ #: Name of the lexer
+ name = None
+
+ #: Shortcuts for the lexer
+ aliases = []
+
+ #: File name globs
+ filenames = []
+
+ #: Secondary file name globs
+ alias_filenames = []
+
+ #: MIME types
+ mimetypes = []
+
+ #: Priority, should multiple lexers match and no content is provided
+ priority = 0
+
+ def __init__(self, **options):
+ self.options = options
+ self.stripnl = get_bool_opt(options, 'stripnl', True)
+ self.stripall = get_bool_opt(options, 'stripall', False)
+ self.ensurenl = get_bool_opt(options, 'ensurenl', True)
+ self.tabsize = get_int_opt(options, 'tabsize', 0)
+ self.encoding = options.get('encoding', 'guess')
+ self.encoding = options.get('inencoding') or self.encoding
+ self.filters = []
+ for filter_ in get_list_opt(options, 'filters', ()):
+ self.add_filter(filter_)
+
+ def __repr__(self):
+ if self.options:
+ return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
+ self.options)
+ else:
+ return '<pygments.lexers.%s>' % self.__class__.__name__
+
+ def add_filter(self, filter_, **options):
+ """
+ Add a new stream filter to this lexer.
+ """
+ if not isinstance(filter_, Filter):
+ filter_ = get_filter_by_name(filter_, **options)
+ self.filters.append(filter_)
+
+ def analyse_text(text):
+ """
+ Has to return a float between ``0`` and ``1`` that indicates
+ if a lexer wants to highlight this text. Used by ``guess_lexer``.
+ If this method returns ``0`` it won't highlight it in any case, if
+ it returns ``1`` highlighting with this lexer is guaranteed.
+
+ The `LexerMeta` metaclass automatically wraps this function so
+ that it works like a static method (no ``self`` or ``cls``
+ parameter) and the return value is automatically converted to
+ `float`. If the return value is an object that is boolean `False`
+ it's the same as if the return values was ``0.0``.
+ """
+
+ def get_tokens(self, text, unfiltered=False):
+ """
+ Return an iterable of (tokentype, value) pairs generated from
+ `text`. If `unfiltered` is set to `True`, the filtering mechanism
+ is bypassed even if filters are defined.
+
+ Also preprocess the text, i.e. expand tabs and strip it if
+ wanted and applies registered filters.
+ """
+ if not isinstance(text, text_type):
+ if self.encoding == 'guess':
+ text, _ = guess_decode(text)
+ elif self.encoding == 'chardet':
+ try:
+ import chardet
+ except ImportError:
+ raise ImportError('To enable chardet encoding guessing, '
+ 'please install the chardet library '
+ 'from http://chardet.feedparser.org/')
+ # check for BOM first
+ decoded = None
+ for bom, encoding in _encoding_map:
+ if text.startswith(bom):
+ decoded = text[len(bom):].decode(encoding, 'replace')
+ break
+ # no BOM found, so use chardet
+ if decoded is None:
+ enc = chardet.detect(text[:1024]) # Guess using first 1KB
+ decoded = text.decode(enc.get('encoding') or 'utf-8',
+ 'replace')
+ text = decoded
+ else:
+ text = text.decode(self.encoding)
+ if text.startswith(u'\ufeff'):
+ text = text[len(u'\ufeff'):]
+ else:
+ if text.startswith(u'\ufeff'):
+ text = text[len(u'\ufeff'):]
+
+ # text now *is* a unicode string
+ text = text.replace('\r\n', '\n')
+ text = text.replace('\r', '\n')
+ if self.stripall:
+ text = text.strip()
+ elif self.stripnl:
+ text = text.strip('\n')
+ if self.tabsize > 0:
+ text = text.expandtabs(self.tabsize)
+ if self.ensurenl and not text.endswith('\n'):
+ text += '\n'
+
+ def streamer():
+ for _, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+ stream = streamer()
+ if not unfiltered:
+ stream = apply_filters(stream, self.filters, self)
+ return stream
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Return an iterable of (index, tokentype, value) pairs where "index"
+ is the starting position of the token within the input text.
+
+ In subclasses, implement this method as a generator to
+ maximize effectiveness.
+ """
+ raise NotImplementedError
+
+
+class DelegatingLexer(Lexer):
+ """
+ This lexer takes two lexer as arguments. A root lexer and
+ a language lexer. First everything is scanned using the language
+ lexer, afterwards all ``Other`` tokens are lexed using the root
+ lexer.
+
+ The lexers from the ``template`` lexer package use this base lexer.
+ """
+
+ def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
+ self.root_lexer = _root_lexer(**options)
+ self.language_lexer = _language_lexer(**options)
+ self.needle = _needle
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ buffered = ''
+ insertions = []
+ lng_buffer = []
+ for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
+ if t is self.needle:
+ if lng_buffer:
+ insertions.append((len(buffered), lng_buffer))
+ lng_buffer = []
+ buffered += v
+ else:
+ lng_buffer.append((i, t, v))
+ if lng_buffer:
+ insertions.append((len(buffered), lng_buffer))
+ return do_insertions(insertions,
+ self.root_lexer.get_tokens_unprocessed(buffered))
+
+
+# ------------------------------------------------------------------------------
+# RegexLexer and ExtendedRegexLexer
+#
+
+
+class include(str): # pylint: disable=invalid-name
+ """
+ Indicates that a state should include rules from another state.
+ """
+ pass
+
+
+class _inherit(object):
+ """
+ Indicates the a state should inherit from its superclass.
+ """
+ def __repr__(self):
+ return 'inherit'
+
+inherit = _inherit() # pylint: disable=invalid-name
+
+
+class combined(tuple): # pylint: disable=invalid-name
+ """
+ Indicates a state combined from multiple states.
+ """
+
+ def __new__(cls, *args):
+ return tuple.__new__(cls, args)
+
+ def __init__(self, *args):
+ # tuple.__init__ doesn't do anything
+ pass
+
+
+class _PseudoMatch(object):
+ """
+ A pseudo match object constructed from a string.
+ """
+
+ def __init__(self, start, text):
+ self._text = text
+ self._start = start
+
+ def start(self, arg=None):
+ return self._start
+
+ def end(self, arg=None):
+ return self._start + len(self._text)
+
+ def group(self, arg=None):
+ if arg:
+ raise IndexError('No such group')
+ return self._text
+
+ def groups(self):
+ return (self._text,)
+
+ def groupdict(self):
+ return {}
+
+
+def bygroups(*args):
+ """
+ Callback that yields multiple actions for each group in the match.
+ """
+ def callback(lexer, match, ctx=None):
+ for i, action in enumerate(args):
+ if action is None:
+ continue
+ elif type(action) is _TokenType:
+ data = match.group(i + 1)
+ if data:
+ yield match.start(i + 1), action, data
+ else:
+ data = match.group(i + 1)
+ if data is not None:
+ if ctx:
+ ctx.pos = match.start(i + 1)
for item in action(lexer,
_PseudoMatch(match.start(i + 1), data), ctx):
- if item:
- yield item
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
-class _This(object):
- """
- Special singleton used for indicating the caller class.
- Used by ``using``.
- """
-this = _This()
-
-
-def using(_other, **kwargs):
- """
- Callback that processes the match with a different lexer.
-
- The keyword arguments are forwarded to the lexer, except `state` which
- is handled separately.
-
- `state` specifies the state that the new lexer will start in, and can
- be an enumerable such as ('root', 'inline', 'string') or a simple
- string which is assumed to be on top of the root state.
-
- Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
- """
- gt_kwargs = {}
- if 'state' in kwargs:
- s = kwargs.pop('state')
- if isinstance(s, (list, tuple)):
- gt_kwargs['stack'] = s
- else:
- gt_kwargs['stack'] = ('root', s)
-
- if _other is this:
- def callback(lexer, match, ctx=None):
- # if keyword arguments are given the callback
- # function has to create a new lexer instance
- if kwargs:
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = lexer.__class__(**kwargs)
- else:
- lx = lexer
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- else:
- def callback(lexer, match, ctx=None):
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = _other(**kwargs)
-
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
-class default:
- """
- Indicates a state or state action (e.g. #pop) to apply.
- For example default('#pop') is equivalent to ('', Token, '#pop')
- Note that state tuples may be used as well.
-
- .. versionadded:: 2.0
- """
- def __init__(self, state):
- self.state = state
-
-
-class words(Future):
- """
- Indicates a list of literal words that is transformed into an optimized
- regex that matches any of the words.
-
- .. versionadded:: 2.0
- """
- def __init__(self, words, prefix='', suffix=''):
- self.words = words
- self.prefix = prefix
- self.suffix = suffix
-
- def get(self):
- return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
-
-
-class RegexLexerMeta(LexerMeta):
- """
- Metaclass for RegexLexer, creates the self._tokens attribute from
- self.tokens on the first instantiation.
- """
-
- def _process_regex(cls, regex, rflags, state):
- """Preprocess the regular expression component of a token definition."""
- if isinstance(regex, Future):
- regex = regex.get()
- return re.compile(regex, rflags).match
-
- def _process_token(cls, token):
- """Preprocess the token component of a token definition."""
- assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
- return token
-
- def _process_new_state(cls, new_state, unprocessed, processed):
- """Preprocess the state transition action of a token definition."""
- if isinstance(new_state, str):
- # an existing state
- if new_state == '#pop':
- return -1
- elif new_state in unprocessed:
- return (new_state,)
- elif new_state == '#push':
- return new_state
- elif new_state[:5] == '#pop:':
- return -int(new_state[5:])
- else:
- assert False, 'unknown new state %r' % new_state
- elif isinstance(new_state, combined):
- # combine a new state from existing ones
- tmp_state = '_tmp_%d' % cls._tmpname
- cls._tmpname += 1
- itokens = []
- for istate in new_state:
- assert istate != new_state, 'circular state ref %r' % istate
- itokens.extend(cls._process_state(unprocessed,
- processed, istate))
- processed[tmp_state] = itokens
- return (tmp_state,)
- elif isinstance(new_state, tuple):
- # push more than one state
- for istate in new_state:
- assert (istate in unprocessed or
- istate in ('#pop', '#push')), \
- 'unknown new state ' + istate
- return new_state
- else:
- assert False, 'unknown new state def %r' % new_state
-
- def _process_state(cls, unprocessed, processed, state):
- """Preprocess a single state definition."""
+ if item:
+ yield item
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
+class _This(object):
+ """
+ Special singleton used for indicating the caller class.
+ Used by ``using``.
+ """
+this = _This()
+
+
+def using(_other, **kwargs):
+ """
+ Callback that processes the match with a different lexer.
+
+ The keyword arguments are forwarded to the lexer, except `state` which
+ is handled separately.
+
+ `state` specifies the state that the new lexer will start in, and can
+ be an enumerable such as ('root', 'inline', 'string') or a simple
+ string which is assumed to be on top of the root state.
+
+ Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
+ """
+ gt_kwargs = {}
+ if 'state' in kwargs:
+ s = kwargs.pop('state')
+ if isinstance(s, (list, tuple)):
+ gt_kwargs['stack'] = s
+ else:
+ gt_kwargs['stack'] = ('root', s)
+
+ if _other is this:
+ def callback(lexer, match, ctx=None):
+ # if keyword arguments are given the callback
+ # function has to create a new lexer instance
+ if kwargs:
+ # XXX: cache that somehow
+ kwargs.update(lexer.options)
+ lx = lexer.__class__(**kwargs)
+ else:
+ lx = lexer
+ s = match.start()
+ for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ else:
+ def callback(lexer, match, ctx=None):
+ # XXX: cache that somehow
+ kwargs.update(lexer.options)
+ lx = _other(**kwargs)
+
+ s = match.start()
+ for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
+class default:
+ """
+ Indicates a state or state action (e.g. #pop) to apply.
+ For example default('#pop') is equivalent to ('', Token, '#pop')
+ Note that state tuples may be used as well.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, state):
+ self.state = state
+
+
+class words(Future):
+ """
+ Indicates a list of literal words that is transformed into an optimized
+ regex that matches any of the words.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, words, prefix='', suffix=''):
+ self.words = words
+ self.prefix = prefix
+ self.suffix = suffix
+
+ def get(self):
+ return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
+
+
+class RegexLexerMeta(LexerMeta):
+ """
+ Metaclass for RegexLexer, creates the self._tokens attribute from
+ self.tokens on the first instantiation.
+ """
+
+ def _process_regex(cls, regex, rflags, state):
+ """Preprocess the regular expression component of a token definition."""
+ if isinstance(regex, Future):
+ regex = regex.get()
+ return re.compile(regex, rflags).match
+
+ def _process_token(cls, token):
+ """Preprocess the token component of a token definition."""
+ assert type(token) is _TokenType or callable(token), \
+ 'token type must be simple type or callable, not %r' % (token,)
+ return token
+
+ def _process_new_state(cls, new_state, unprocessed, processed):
+ """Preprocess the state transition action of a token definition."""
+ if isinstance(new_state, str):
+ # an existing state
+ if new_state == '#pop':
+ return -1
+ elif new_state in unprocessed:
+ return (new_state,)
+ elif new_state == '#push':
+ return new_state
+ elif new_state[:5] == '#pop:':
+ return -int(new_state[5:])
+ else:
+ assert False, 'unknown new state %r' % new_state
+ elif isinstance(new_state, combined):
+ # combine a new state from existing ones
+ tmp_state = '_tmp_%d' % cls._tmpname
+ cls._tmpname += 1
+ itokens = []
+ for istate in new_state:
+ assert istate != new_state, 'circular state ref %r' % istate
+ itokens.extend(cls._process_state(unprocessed,
+ processed, istate))
+ processed[tmp_state] = itokens
+ return (tmp_state,)
+ elif isinstance(new_state, tuple):
+ # push more than one state
+ for istate in new_state:
+ assert (istate in unprocessed or
+ istate in ('#pop', '#push')), \
+ 'unknown new state ' + istate
+ return new_state
+ else:
+ assert False, 'unknown new state def %r' % new_state
+
+ def _process_state(cls, unprocessed, processed, state):
+ """Preprocess a single state definition."""
assert type(state) is str, "wrong state name %r" % state
- assert state[0] != '#', "invalid state name %r" % state
- if state in processed:
- return processed[state]
- tokens = processed[state] = []
- rflags = cls.flags
- for tdef in unprocessed[state]:
- if isinstance(tdef, include):
- # it's a state reference
- assert tdef != state, "circular state reference %r" % state
- tokens.extend(cls._process_state(unprocessed, processed,
- str(tdef)))
- continue
- if isinstance(tdef, _inherit):
- # should be processed already, but may not in the case of:
- # 1. the state has no counterpart in any parent
- # 2. the state includes more than one 'inherit'
- continue
- if isinstance(tdef, default):
- new_state = cls._process_new_state(tdef.state, unprocessed, processed)
- tokens.append((re.compile('').match, None, new_state))
- continue
-
- assert type(tdef) is tuple, "wrong rule def %r" % tdef
-
- try:
- rex = cls._process_regex(tdef[0], rflags, state)
- except Exception as err:
- raise ValueError("uncompilable regex %r in state %r of %r: %s" %
- (tdef[0], state, cls, err))
-
- token = cls._process_token(tdef[1])
-
- if len(tdef) == 2:
- new_state = None
- else:
- new_state = cls._process_new_state(tdef[2],
- unprocessed, processed)
-
- tokens.append((rex, token, new_state))
- return tokens
-
- def process_tokendef(cls, name, tokendefs=None):
- """Preprocess a dictionary of token definitions."""
- processed = cls._all_tokens[name] = {}
- tokendefs = tokendefs or cls.tokens[name]
- for state in list(tokendefs):
- cls._process_state(tokendefs, processed, state)
- return processed
-
- def get_tokendefs(cls):
- """
- Merge tokens from superclasses in MRO order, returning a single tokendef
- dictionary.
-
- Any state that is not defined by a subclass will be inherited
- automatically. States that *are* defined by subclasses will, by
- default, override that state in the superclass. If a subclass wishes to
- inherit definitions from a superclass, it can use the special value
- "inherit", which will cause the superclass' state definition to be
- included at that point in the state.
- """
- tokens = {}
- inheritable = {}
- for c in cls.__mro__:
- toks = c.__dict__.get('tokens', {})
-
- for state, items in iteritems(toks):
- curitems = tokens.get(state)
- if curitems is None:
- # N.b. because this is assigned by reference, sufficiently
- # deep hierarchies are processed incrementally (e.g. for
- # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
- # will not see any inherits in B).
- tokens[state] = items
- try:
- inherit_ndx = items.index(inherit)
- except ValueError:
- continue
- inheritable[state] = inherit_ndx
- continue
-
- inherit_ndx = inheritable.pop(state, None)
- if inherit_ndx is None:
- continue
-
- # Replace the "inherit" value with the items
- curitems[inherit_ndx:inherit_ndx+1] = items
- try:
- # N.b. this is the index in items (that is, the superclass
- # copy), so offset required when storing below.
- new_inh_ndx = items.index(inherit)
- except ValueError:
- pass
- else:
- inheritable[state] = inherit_ndx + new_inh_ndx
-
- return tokens
-
- def __call__(cls, *args, **kwds):
- """Instantiate cls after preprocessing its token definitions."""
- if '_tokens' not in cls.__dict__:
- cls._all_tokens = {}
- cls._tmpname = 0
- if hasattr(cls, 'token_variants') and cls.token_variants:
- # don't process yet
- pass
- else:
- cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
-
- return type.__call__(cls, *args, **kwds)
-
-
-@add_metaclass(RegexLexerMeta)
-class RegexLexer(Lexer):
- """
- Base for simple stateful regular expression-based lexers.
- Simplifies the lexing process so that you need only
- provide a list of states and regular expressions.
- """
-
- #: Flags for compiling the regular expressions.
- #: Defaults to MULTILINE.
- flags = re.MULTILINE
-
- #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
- #:
- #: The initial state is 'root'.
- #: ``new_state`` can be omitted to signify no state transition.
- #: If it is a string, the state is pushed on the stack and changed.
- #: If it is a tuple of strings, all states are pushed on the stack and
- #: the current state will be the topmost.
- #: It can also be ``combined('state1', 'state2', ...)``
- #: to signify a new, anonymous state combined from the rules of two
- #: or more existing ones.
- #: Furthermore, it can be '#pop' to signify going back one step in
- #: the state stack, or '#push' to push the current state on the stack
- #: again.
- #:
- #: The tuple can also be replaced with ``include('state')``, in which
- #: case the rules from the state named by the string are included in the
- #: current one.
- tokens = {}
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """
- Split ``text`` into (tokentype, text) pairs.
-
- ``stack`` is the inital stack (default: ``['root']``)
- """
- pos = 0
- tokendefs = self._tokens
- statestack = list(stack)
- statetokens = tokendefs[statestack[-1]]
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, pos)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield pos, action, m.group()
- else:
- for item in action(self, m):
- yield item
- pos = m.end()
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
+ assert state[0] != '#', "invalid state name %r" % state
+ if state in processed:
+ return processed[state]
+ tokens = processed[state] = []
+ rflags = cls.flags
+ for tdef in unprocessed[state]:
+ if isinstance(tdef, include):
+ # it's a state reference
+ assert tdef != state, "circular state reference %r" % state
+ tokens.extend(cls._process_state(unprocessed, processed,
+ str(tdef)))
+ continue
+ if isinstance(tdef, _inherit):
+ # should be processed already, but may not in the case of:
+ # 1. the state has no counterpart in any parent
+ # 2. the state includes more than one 'inherit'
+ continue
+ if isinstance(tdef, default):
+ new_state = cls._process_new_state(tdef.state, unprocessed, processed)
+ tokens.append((re.compile('').match, None, new_state))
+ continue
+
+ assert type(tdef) is tuple, "wrong rule def %r" % tdef
+
+ try:
+ rex = cls._process_regex(tdef[0], rflags, state)
+ except Exception as err:
+ raise ValueError("uncompilable regex %r in state %r of %r: %s" %
+ (tdef[0], state, cls, err))
+
+ token = cls._process_token(tdef[1])
+
+ if len(tdef) == 2:
+ new_state = None
+ else:
+ new_state = cls._process_new_state(tdef[2],
+ unprocessed, processed)
+
+ tokens.append((rex, token, new_state))
+ return tokens
+
+ def process_tokendef(cls, name, tokendefs=None):
+ """Preprocess a dictionary of token definitions."""
+ processed = cls._all_tokens[name] = {}
+ tokendefs = tokendefs or cls.tokens[name]
+ for state in list(tokendefs):
+ cls._process_state(tokendefs, processed, state)
+ return processed
+
+ def get_tokendefs(cls):
+ """
+ Merge tokens from superclasses in MRO order, returning a single tokendef
+ dictionary.
+
+ Any state that is not defined by a subclass will be inherited
+ automatically. States that *are* defined by subclasses will, by
+ default, override that state in the superclass. If a subclass wishes to
+ inherit definitions from a superclass, it can use the special value
+ "inherit", which will cause the superclass' state definition to be
+ included at that point in the state.
+ """
+ tokens = {}
+ inheritable = {}
+ for c in cls.__mro__:
+ toks = c.__dict__.get('tokens', {})
+
+ for state, items in iteritems(toks):
+ curitems = tokens.get(state)
+ if curitems is None:
+ # N.b. because this is assigned by reference, sufficiently
+ # deep hierarchies are processed incrementally (e.g. for
+ # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
+ # will not see any inherits in B).
+ tokens[state] = items
+ try:
+ inherit_ndx = items.index(inherit)
+ except ValueError:
+ continue
+ inheritable[state] = inherit_ndx
+ continue
+
+ inherit_ndx = inheritable.pop(state, None)
+ if inherit_ndx is None:
+ continue
+
+ # Replace the "inherit" value with the items
+ curitems[inherit_ndx:inherit_ndx+1] = items
+ try:
+ # N.b. this is the index in items (that is, the superclass
+ # copy), so offset required when storing below.
+ new_inh_ndx = items.index(inherit)
+ except ValueError:
+ pass
+ else:
+ inheritable[state] = inherit_ndx + new_inh_ndx
+
+ return tokens
+
+ def __call__(cls, *args, **kwds):
+ """Instantiate cls after preprocessing its token definitions."""
+ if '_tokens' not in cls.__dict__:
+ cls._all_tokens = {}
+ cls._tmpname = 0
+ if hasattr(cls, 'token_variants') and cls.token_variants:
+ # don't process yet
+ pass
+ else:
+ cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
+
+ return type.__call__(cls, *args, **kwds)
+
+
+@add_metaclass(RegexLexerMeta)
+class RegexLexer(Lexer):
+ """
+ Base for simple stateful regular expression-based lexers.
+ Simplifies the lexing process so that you need only
+ provide a list of states and regular expressions.
+ """
+
+ #: Flags for compiling the regular expressions.
+ #: Defaults to MULTILINE.
+ flags = re.MULTILINE
+
+ #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
+ #:
+ #: The initial state is 'root'.
+ #: ``new_state`` can be omitted to signify no state transition.
+ #: If it is a string, the state is pushed on the stack and changed.
+ #: If it is a tuple of strings, all states are pushed on the stack and
+ #: the current state will be the topmost.
+ #: It can also be ``combined('state1', 'state2', ...)``
+ #: to signify a new, anonymous state combined from the rules of two
+ #: or more existing ones.
+ #: Furthermore, it can be '#pop' to signify going back one step in
+ #: the state stack, or '#push' to push the current state on the stack
+ #: again.
+ #:
+ #: The tuple can also be replaced with ``include('state')``, in which
+ #: case the rules from the state named by the string are included in the
+ #: current one.
+ tokens = {}
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+
+ ``stack`` is the inital stack (default: ``['root']``)
+ """
+ pos = 0
+ tokendefs = self._tokens
+ statestack = list(stack)
+ statetokens = tokendefs[statestack[-1]]
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ m = rexmatch(text, pos)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield pos, action, m.group()
+ else:
+ for item in action(self, m):
+ yield item
+ pos = m.end()
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
if len(statestack) > 1:
statestack.pop()
- elif state == '#push':
- statestack.append(statestack[-1])
- else:
- statestack.append(state)
- elif isinstance(new_state, int):
+ elif state == '#push':
+ statestack.append(statestack[-1])
+ else:
+ statestack.append(state)
+ elif isinstance(new_state, int):
# pop, but keep at least one state on the stack
# (random code leading to unexpected pops should
# not allow exceptions)
@@ -653,229 +653,229 @@ class RegexLexer(Lexer):
del statestack[1:]
else:
del statestack[new_state:]
- elif new_state == '#push':
- statestack.append(statestack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[statestack[-1]]
- break
- else:
- # We are here only if all state tokens have been considered
- # and there was not a match on any of them.
- try:
- if text[pos] == '\n':
- # at EOL, reset state to "root"
- statestack = ['root']
- statetokens = tokendefs['root']
- yield pos, Text, u'\n'
- pos += 1
- continue
- yield pos, Error, text[pos]
- pos += 1
- except IndexError:
- break
-
-
-class LexerContext(object):
- """
- A helper object that holds lexer position data.
- """
-
- def __init__(self, text, pos, stack=None, end=None):
- self.text = text
- self.pos = pos
- self.end = end or len(text) # end=0 not supported ;-)
- self.stack = stack or ['root']
-
- def __repr__(self):
- return 'LexerContext(%r, %r, %r)' % (
- self.text, self.pos, self.stack)
-
-
-class ExtendedRegexLexer(RegexLexer):
- """
- A RegexLexer that uses a context object to store its state.
- """
-
- def get_tokens_unprocessed(self, text=None, context=None):
- """
- Split ``text`` into (tokentype, text) pairs.
- If ``context`` is given, use this lexer context instead.
- """
- tokendefs = self._tokens
- if not context:
- ctx = LexerContext(text, 0)
- statetokens = tokendefs['root']
- else:
- ctx = context
- statetokens = tokendefs[ctx.stack[-1]]
- text = ctx.text
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, ctx.pos, ctx.end)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield ctx.pos, action, m.group()
- ctx.pos = m.end()
- else:
- for item in action(self, m, ctx):
- yield item
- if not new_state:
- # altered the state stack?
- statetokens = tokendefs[ctx.stack[-1]]
- # CAUTION: callback must set ctx.pos!
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
+ elif new_state == '#push':
+ statestack.append(statestack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = tokendefs[statestack[-1]]
+ break
+ else:
+ # We are here only if all state tokens have been considered
+ # and there was not a match on any of them.
+ try:
+ if text[pos] == '\n':
+ # at EOL, reset state to "root"
+ statestack = ['root']
+ statetokens = tokendefs['root']
+ yield pos, Text, u'\n'
+ pos += 1
+ continue
+ yield pos, Error, text[pos]
+ pos += 1
+ except IndexError:
+ break
+
+
+class LexerContext(object):
+ """
+ A helper object that holds lexer position data.
+ """
+
+ def __init__(self, text, pos, stack=None, end=None):
+ self.text = text
+ self.pos = pos
+ self.end = end or len(text) # end=0 not supported ;-)
+ self.stack = stack or ['root']
+
+ def __repr__(self):
+ return 'LexerContext(%r, %r, %r)' % (
+ self.text, self.pos, self.stack)
+
+
+class ExtendedRegexLexer(RegexLexer):
+ """
+ A RegexLexer that uses a context object to store its state.
+ """
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+ If ``context`` is given, use this lexer context instead.
+ """
+ tokendefs = self._tokens
+ if not context:
+ ctx = LexerContext(text, 0)
+ statetokens = tokendefs['root']
+ else:
+ ctx = context
+ statetokens = tokendefs[ctx.stack[-1]]
+ text = ctx.text
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ m = rexmatch(text, ctx.pos, ctx.end)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield ctx.pos, action, m.group()
+ ctx.pos = m.end()
+ else:
+ for item in action(self, m, ctx):
+ yield item
+ if not new_state:
+ # altered the state stack?
+ statetokens = tokendefs[ctx.stack[-1]]
+ # CAUTION: callback must set ctx.pos!
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
if len(ctx.stack) > 1:
ctx.stack.pop()
- elif state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- ctx.stack.append(state)
- elif isinstance(new_state, int):
+ elif state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ ctx.stack.append(state)
+ elif isinstance(new_state, int):
# see RegexLexer for why this check is made
if abs(new_state) >= len(ctx.stack):
del ctx.state[1:]
else:
del ctx.stack[new_state:]
- elif new_state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[ctx.stack[-1]]
- break
- else:
- try:
- if ctx.pos >= ctx.end:
- break
- if text[ctx.pos] == '\n':
- # at EOL, reset state to "root"
- ctx.stack = ['root']
- statetokens = tokendefs['root']
- yield ctx.pos, Text, u'\n'
- ctx.pos += 1
- continue
- yield ctx.pos, Error, text[ctx.pos]
- ctx.pos += 1
- except IndexError:
- break
-
-
-def do_insertions(insertions, tokens):
- """
- Helper for lexers which must combine the results of several
- sublexers.
-
- ``insertions`` is a list of ``(index, itokens)`` pairs.
- Each ``itokens`` iterable should be inserted at position
- ``index`` into the token stream given by the ``tokens``
- argument.
-
- The result is a combined token stream.
-
- TODO: clean up the code here.
- """
- insertions = iter(insertions)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- # no insertions
- for item in tokens:
- yield item
- return
-
- realpos = None
- insleft = True
-
- # iterate over the token stream where we want to insert
- # the tokens from the insertion list.
- for i, t, v in tokens:
- # first iteration. store the postition of first item
- if realpos is None:
- realpos = i
- oldi = 0
- while insleft and i + len(v) >= index:
- tmpval = v[oldi:index - i]
- yield realpos, t, tmpval
- realpos += len(tmpval)
- for it_index, it_token, it_value in itokens:
- yield realpos, it_token, it_value
- realpos += len(it_value)
- oldi = index - i
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
- yield realpos, t, v[oldi:]
- realpos += len(v) - oldi
-
- # leftover tokens
- while insleft:
- # no normal tokens, set realpos to zero
- realpos = realpos or 0
- for p, t, v in itokens:
- yield realpos, t, v
- realpos += len(v)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
-
-
-class ProfilingRegexLexerMeta(RegexLexerMeta):
- """Metaclass for ProfilingRegexLexer, collects regex timing info."""
-
- def _process_regex(cls, regex, rflags, state):
- if isinstance(regex, words):
- rex = regex_opt(regex.words, prefix=regex.prefix,
- suffix=regex.suffix)
- else:
- rex = regex
- compiled = re.compile(rex, rflags)
-
- def match_func(text, pos, endpos=sys.maxsize):
- info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
- t0 = time.time()
- res = compiled.match(text, pos, endpos)
- t1 = time.time()
- info[0] += 1
- info[1] += t1 - t0
- return res
- return match_func
-
-
-@add_metaclass(ProfilingRegexLexerMeta)
-class ProfilingRegexLexer(RegexLexer):
- """Drop-in replacement for RegexLexer that does profiling of its regexes."""
-
- _prof_data = []
- _prof_sort_index = 4 # defaults to time per call
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- # this needs to be a stack, since using(this) will produce nested calls
- self.__class__._prof_data.append({})
- for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
- yield tok
- rawdata = self.__class__._prof_data.pop()
- data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
- n, 1000 * t, 1000 * t / n)
- for ((s, r), (n, t)) in rawdata.items()),
- key=lambda x: x[self._prof_sort_index],
- reverse=True)
- sum_total = sum(x[3] for x in data)
-
- print()
- print('Profiling result for %s lexing %d chars in %.3f ms' %
- (self.__class__.__name__, len(text), sum_total))
- print('=' * 110)
- print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
- print('-' * 110)
- for d in data:
- print('%-20s %-65s %5d %8.4f %8.4f' % d)
- print('=' * 110)
+ elif new_state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = tokendefs[ctx.stack[-1]]
+ break
+ else:
+ try:
+ if ctx.pos >= ctx.end:
+ break
+ if text[ctx.pos] == '\n':
+ # at EOL, reset state to "root"
+ ctx.stack = ['root']
+ statetokens = tokendefs['root']
+ yield ctx.pos, Text, u'\n'
+ ctx.pos += 1
+ continue
+ yield ctx.pos, Error, text[ctx.pos]
+ ctx.pos += 1
+ except IndexError:
+ break
+
+
+def do_insertions(insertions, tokens):
+ """
+ Helper for lexers which must combine the results of several
+ sublexers.
+
+ ``insertions`` is a list of ``(index, itokens)`` pairs.
+ Each ``itokens`` iterable should be inserted at position
+ ``index`` into the token stream given by the ``tokens``
+ argument.
+
+ The result is a combined token stream.
+
+ TODO: clean up the code here.
+ """
+ insertions = iter(insertions)
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ # no insertions
+ for item in tokens:
+ yield item
+ return
+
+ realpos = None
+ insleft = True
+
+ # iterate over the token stream where we want to insert
+ # the tokens from the insertion list.
+ for i, t, v in tokens:
+ # first iteration. store the postition of first item
+ if realpos is None:
+ realpos = i
+ oldi = 0
+ while insleft and i + len(v) >= index:
+ tmpval = v[oldi:index - i]
+ yield realpos, t, tmpval
+ realpos += len(tmpval)
+ for it_index, it_token, it_value in itokens:
+ yield realpos, it_token, it_value
+ realpos += len(it_value)
+ oldi = index - i
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ insleft = False
+ break # not strictly necessary
+ yield realpos, t, v[oldi:]
+ realpos += len(v) - oldi
+
+ # leftover tokens
+ while insleft:
+ # no normal tokens, set realpos to zero
+ realpos = realpos or 0
+ for p, t, v in itokens:
+ yield realpos, t, v
+ realpos += len(v)
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ insleft = False
+ break # not strictly necessary
+
+
+class ProfilingRegexLexerMeta(RegexLexerMeta):
+ """Metaclass for ProfilingRegexLexer, collects regex timing info."""
+
+ def _process_regex(cls, regex, rflags, state):
+ if isinstance(regex, words):
+ rex = regex_opt(regex.words, prefix=regex.prefix,
+ suffix=regex.suffix)
+ else:
+ rex = regex
+ compiled = re.compile(rex, rflags)
+
+ def match_func(text, pos, endpos=sys.maxsize):
+ info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
+ t0 = time.time()
+ res = compiled.match(text, pos, endpos)
+ t1 = time.time()
+ info[0] += 1
+ info[1] += t1 - t0
+ return res
+ return match_func
+
+
+@add_metaclass(ProfilingRegexLexerMeta)
+class ProfilingRegexLexer(RegexLexer):
+ """Drop-in replacement for RegexLexer that does profiling of its regexes."""
+
+ _prof_data = []
+ _prof_sort_index = 4 # defaults to time per call
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ # this needs to be a stack, since using(this) will produce nested calls
+ self.__class__._prof_data.append({})
+ for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ yield tok
+ rawdata = self.__class__._prof_data.pop()
+ data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
+ n, 1000 * t, 1000 * t / n)
+ for ((s, r), (n, t)) in rawdata.items()),
+ key=lambda x: x[self._prof_sort_index],
+ reverse=True)
+ sum_total = sum(x[3] for x in data)
+
+ print()
+ print('Profiling result for %s lexing %d chars in %.3f ms' %
+ (self.__class__.__name__, len(text), sum_total))
+ print('=' * 110)
+ print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
+ print('-' * 110)
+ for d in data:
+ print('%-20s %-65s %5d %8.4f %8.4f' % d)
+ print('=' * 110)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/__init__.py b/contrib/python/Pygments/py2/pygments/lexers/__init__.py
index 100d807660..d8b9cbd37f 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/__init__.py
@@ -1,81 +1,81 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers
- ~~~~~~~~~~~~~~~
-
- Pygments lexers.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers
+ ~~~~~~~~~~~~~~~
+
+ Pygments lexers.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.lexers._mapping import LEXERS
-from pygments.modeline import get_filetype_from_buffer
-from pygments.plugin import find_plugin_lexers
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+import types
+import fnmatch
+from os.path import basename
+
+from pygments.lexers._mapping import LEXERS
+from pygments.modeline import get_filetype_from_buffer
+from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, itervalues, guess_decode, text_type
-
+
COMPAT = {
'Python3Lexer': 'PythonLexer',
'Python3TracebackLexer': 'PythonTracebackLexer',
}
-
-__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
+
+__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
-
-_lexer_cache = {}
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_lexers(module_name):
- """Load a lexer (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for lexer_name in mod.__all__:
- cls = getattr(mod, lexer_name)
- _lexer_cache[cls.name] = cls
-
-
-def get_all_lexers():
- """Return a generator of tuples in the form ``(name, aliases,
- filenames, mimetypes)`` of all know lexers.
- """
- for item in itervalues(LEXERS):
- yield item[1:]
- for lexer in find_plugin_lexers():
- yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
-
-
-def find_lexer_class(name):
- """Lookup a lexer class by name.
-
- Return None if not found.
- """
- if name in _lexer_cache:
- return _lexer_cache[name]
- # lookup builtin lexers
- for module_name, lname, aliases, _, _ in itervalues(LEXERS):
- if name == lname:
- _load_lexers(module_name)
- return _lexer_cache[name]
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if cls.name == name:
- return cls
-
-
+
+_lexer_cache = {}
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
+
+
+def _load_lexers(module_name):
+ """Load a lexer (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for lexer_name in mod.__all__:
+ cls = getattr(mod, lexer_name)
+ _lexer_cache[cls.name] = cls
+
+
+def get_all_lexers():
+ """Return a generator of tuples in the form ``(name, aliases,
+ filenames, mimetypes)`` of all know lexers.
+ """
+ for item in itervalues(LEXERS):
+ yield item[1:]
+ for lexer in find_plugin_lexers():
+ yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
+
+
+def find_lexer_class(name):
+ """Lookup a lexer class by name.
+
+ Return None if not found.
+ """
+ if name in _lexer_cache:
+ return _lexer_cache[name]
+ # lookup builtin lexers
+ for module_name, lname, aliases, _, _ in itervalues(LEXERS):
+ if name == lname:
+ _load_lexers(module_name)
+ return _lexer_cache[name]
+ # continue with lexers from setuptools entrypoints
+ for cls in find_plugin_lexers():
+ if cls.name == name:
+ return cls
+
+
def find_lexer_class_by_name(_alias):
"""Lookup a lexer class by alias.
@@ -98,27 +98,27 @@ def find_lexer_class_by_name(_alias):
raise ClassNotFound('no lexer for alias %r found' % _alias)
-def get_lexer_by_name(_alias, **options):
- """Get a lexer by an alias.
-
- Raises ClassNotFound if not found.
- """
- if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
- # lookup builtin lexers
- for module_name, name, aliases, _, _ in itervalues(LEXERS):
- if _alias.lower() in aliases:
- if name not in _lexer_cache:
- _load_lexers(module_name)
- return _lexer_cache[name](**options)
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if _alias.lower() in cls.aliases:
- return cls(**options)
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
-
+def get_lexer_by_name(_alias, **options):
+ """Get a lexer by an alias.
+
+ Raises ClassNotFound if not found.
+ """
+ if not _alias:
+ raise ClassNotFound('no lexer for alias %r found' % _alias)
+
+ # lookup builtin lexers
+ for module_name, name, aliases, _, _ in itervalues(LEXERS):
+ if _alias.lower() in aliases:
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ return _lexer_cache[name](**options)
+ # continue with lexers from setuptools entrypoints
+ for cls in find_plugin_lexers():
+ if _alias.lower() in cls.aliases:
+ return cls(**options)
+ raise ClassNotFound('no lexer for alias %r found' % _alias)
+
+
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
"""Load a lexer from a file.
@@ -154,145 +154,145 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
raise ClassNotFound('error when loading custom lexer: %s' % err)
-def find_lexer_class_for_filename(_fn, code=None):
- """Get a lexer for a filename.
-
- If multiple lexers match the filename pattern, use ``analyse_text()`` to
- figure out which one is more appropriate.
-
- Returns None if not found.
- """
- matches = []
- fn = basename(_fn)
- for modname, name, _, filenames, _ in itervalues(LEXERS):
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _lexer_cache:
- _load_lexers(modname)
- matches.append((_lexer_cache[name], filename))
- for cls in find_plugin_lexers():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- matches.append((cls, filename))
-
- if sys.version_info > (3,) and isinstance(code, bytes):
- # decode it, since all analyse_text functions expect unicode
- code = guess_decode(code)
-
- def get_rating(info):
- cls, filename = info
- # explicit patterns get a bonus
- bonus = '*' not in filename and 0.5 or 0
- # The class _always_ defines analyse_text because it's included in
- # the Lexer class. The default implementation returns None which
- # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
- # to find lexers which need it overridden.
- if code:
+def find_lexer_class_for_filename(_fn, code=None):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Returns None if not found.
+ """
+ matches = []
+ fn = basename(_fn)
+ for modname, name, _, filenames, _ in itervalues(LEXERS):
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _lexer_cache:
+ _load_lexers(modname)
+ matches.append((_lexer_cache[name], filename))
+ for cls in find_plugin_lexers():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ matches.append((cls, filename))
+
+ if sys.version_info > (3,) and isinstance(code, bytes):
+ # decode it, since all analyse_text functions expect unicode
+ code = guess_decode(code)
+
+ def get_rating(info):
+ cls, filename = info
+ # explicit patterns get a bonus
+ bonus = '*' not in filename and 0.5 or 0
+ # The class _always_ defines analyse_text because it's included in
+ # the Lexer class. The default implementation returns None which
+ # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
+ # to find lexers which need it overridden.
+ if code:
return cls.analyse_text(code) + bonus, cls.__name__
return cls.priority + bonus, cls.__name__
-
- if matches:
- matches.sort(key=get_rating)
- # print "Possible lexers, after sort:", matches
- return matches[-1][0]
-
-
-def get_lexer_for_filename(_fn, code=None, **options):
- """Get a lexer for a filename.
-
- If multiple lexers match the filename pattern, use ``analyse_text()`` to
- figure out which one is more appropriate.
-
- Raises ClassNotFound if not found.
- """
- res = find_lexer_class_for_filename(_fn, code)
- if not res:
- raise ClassNotFound('no lexer for filename %r found' % _fn)
- return res(**options)
-
-
-def get_lexer_for_mimetype(_mime, **options):
- """Get a lexer for a mimetype.
-
- Raises ClassNotFound if not found.
- """
- for modname, name, _, _, mimetypes in itervalues(LEXERS):
- if _mime in mimetypes:
- if name not in _lexer_cache:
- _load_lexers(modname)
- return _lexer_cache[name](**options)
- for cls in find_plugin_lexers():
- if _mime in cls.mimetypes:
- return cls(**options)
- raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-
-
-def _iter_lexerclasses(plugins=True):
- """Return an iterator over all lexer classes."""
- for key in sorted(LEXERS):
- module_name, name = LEXERS[key][:2]
- if name not in _lexer_cache:
- _load_lexers(module_name)
- yield _lexer_cache[name]
- if plugins:
- for lexer in find_plugin_lexers():
- yield lexer
-
-
-def guess_lexer_for_filename(_fn, _text, **options):
- """
- Lookup all lexers that handle those filenames primary (``filenames``)
- or secondary (``alias_filenames``). Then run a text analysis for those
- lexers and choose the best result.
-
- usage::
-
- >>> from pygments.lexers import guess_lexer_for_filename
- >>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
- <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
- >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
- <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
- >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
- <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
- """
- fn = basename(_fn)
- primary = {}
- matching_lexers = set()
- for lexer in _iter_lexerclasses():
- for filename in lexer.filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = True
- for filename in lexer.alias_filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = False
- if not matching_lexers:
- raise ClassNotFound('no lexer for filename %r found' % fn)
- if len(matching_lexers) == 1:
- return matching_lexers.pop()(**options)
- result = []
- for lexer in matching_lexers:
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- result.append((rv, lexer))
-
- def type_sort(t):
- # sort by:
- # - analyse score
- # - is primary filename pattern?
- # - priority
- # - last resort: class name
- return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
- result.sort(key=type_sort)
-
- return result[-1][1](**options)
-
-
-def guess_lexer(_text, **options):
- """Guess a lexer by strong distinctions in the text (eg, shebang)."""
-
+
+ if matches:
+ matches.sort(key=get_rating)
+ # print "Possible lexers, after sort:", matches
+ return matches[-1][0]
+
+
+def get_lexer_for_filename(_fn, code=None, **options):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Raises ClassNotFound if not found.
+ """
+ res = find_lexer_class_for_filename(_fn, code)
+ if not res:
+ raise ClassNotFound('no lexer for filename %r found' % _fn)
+ return res(**options)
+
+
+def get_lexer_for_mimetype(_mime, **options):
+ """Get a lexer for a mimetype.
+
+ Raises ClassNotFound if not found.
+ """
+ for modname, name, _, _, mimetypes in itervalues(LEXERS):
+ if _mime in mimetypes:
+ if name not in _lexer_cache:
+ _load_lexers(modname)
+ return _lexer_cache[name](**options)
+ for cls in find_plugin_lexers():
+ if _mime in cls.mimetypes:
+ return cls(**options)
+ raise ClassNotFound('no lexer for mimetype %r found' % _mime)
+
+
+def _iter_lexerclasses(plugins=True):
+ """Return an iterator over all lexer classes."""
+ for key in sorted(LEXERS):
+ module_name, name = LEXERS[key][:2]
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ yield _lexer_cache[name]
+ if plugins:
+ for lexer in find_plugin_lexers():
+ yield lexer
+
+
+def guess_lexer_for_filename(_fn, _text, **options):
+ """
+ Lookup all lexers that handle those filenames primary (``filenames``)
+ or secondary (``alias_filenames``). Then run a text analysis for those
+ lexers and choose the best result.
+
+ usage::
+
+ >>> from pygments.lexers import guess_lexer_for_filename
+ >>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
+ <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
+ >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
+ <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
+ >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
+ <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
+ """
+ fn = basename(_fn)
+ primary = {}
+ matching_lexers = set()
+ for lexer in _iter_lexerclasses():
+ for filename in lexer.filenames:
+ if _fn_matches(fn, filename):
+ matching_lexers.add(lexer)
+ primary[lexer] = True
+ for filename in lexer.alias_filenames:
+ if _fn_matches(fn, filename):
+ matching_lexers.add(lexer)
+ primary[lexer] = False
+ if not matching_lexers:
+ raise ClassNotFound('no lexer for filename %r found' % fn)
+ if len(matching_lexers) == 1:
+ return matching_lexers.pop()(**options)
+ result = []
+ for lexer in matching_lexers:
+ rv = lexer.analyse_text(_text)
+ if rv == 1.0:
+ return lexer(**options)
+ result.append((rv, lexer))
+
+ def type_sort(t):
+ # sort by:
+ # - analyse score
+ # - is primary filename pattern?
+ # - priority
+ # - last resort: class name
+ return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
+ result.sort(key=type_sort)
+
+ return result[-1][1](**options)
+
+
+def guess_lexer(_text, **options):
+ """Guess a lexer by strong distinctions in the text (eg, shebang)."""
+
if not isinstance(_text, text_type):
inencoding = options.get('inencoding', options.get('encoding'))
if inencoding:
@@ -300,44 +300,44 @@ def guess_lexer(_text, **options):
else:
_text, _ = guess_decode(_text)
- # try to get a vim modeline first
- ft = get_filetype_from_buffer(_text)
-
- if ft is not None:
- try:
- return get_lexer_by_name(ft, **options)
- except ClassNotFound:
- pass
-
- best_lexer = [0.0, None]
- for lexer in _iter_lexerclasses():
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- if rv > best_lexer[0]:
- best_lexer[:] = (rv, lexer)
- if not best_lexer[0] or best_lexer[1] is None:
- raise ClassNotFound('no lexer matching the text found')
- return best_lexer[1](**options)
-
-
-class _automodule(types.ModuleType):
- """Automatically import lexers."""
-
- def __getattr__(self, name):
- info = LEXERS.get(name)
- if info:
- _load_lexers(info[0])
- cls = _lexer_cache[info[1]]
- setattr(self, name, cls)
- return cls
+ # try to get a vim modeline first
+ ft = get_filetype_from_buffer(_text)
+
+ if ft is not None:
+ try:
+ return get_lexer_by_name(ft, **options)
+ except ClassNotFound:
+ pass
+
+ best_lexer = [0.0, None]
+ for lexer in _iter_lexerclasses():
+ rv = lexer.analyse_text(_text)
+ if rv == 1.0:
+ return lexer(**options)
+ if rv > best_lexer[0]:
+ best_lexer[:] = (rv, lexer)
+ if not best_lexer[0] or best_lexer[1] is None:
+ raise ClassNotFound('no lexer matching the text found')
+ return best_lexer[1](**options)
+
+
+class _automodule(types.ModuleType):
+ """Automatically import lexers."""
+
+ def __getattr__(self, name):
+ info = LEXERS.get(name)
+ if info:
+ _load_lexers(info[0])
+ cls = _lexer_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
if name in COMPAT:
return getattr(self, COMPAT[name])
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
index b76c22aba8..af52906355 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_asy_builtins.py
@@ -1,1645 +1,1645 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._asy_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the asy-function names and asy-variable names of
- Asymptote.
-
- Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
- TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
- for function and variable names.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._asy_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the asy-function names and asy-variable names of
+ Asymptote.
+
+ Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
+ TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
+ for function and variable names.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
ASYFUNCNAME = {
- 'AND',
- 'Arc',
- 'ArcArrow',
- 'ArcArrows',
- 'Arrow',
- 'Arrows',
- 'Automatic',
- 'AvantGarde',
- 'BBox',
- 'BWRainbow',
- 'BWRainbow2',
- 'Bar',
- 'Bars',
- 'BeginArcArrow',
- 'BeginArrow',
- 'BeginBar',
- 'BeginDotMargin',
- 'BeginMargin',
- 'BeginPenMargin',
- 'Blank',
- 'Bookman',
- 'Bottom',
- 'BottomTop',
- 'Bounds',
- 'Break',
- 'Broken',
- 'BrokenLog',
- 'Ceil',
- 'Circle',
- 'CircleBarIntervalMarker',
- 'Cos',
- 'Courier',
- 'CrossIntervalMarker',
- 'DefaultFormat',
- 'DefaultLogFormat',
- 'Degrees',
- 'Dir',
- 'DotMargin',
- 'DotMargins',
- 'Dotted',
- 'Draw',
- 'Drawline',
- 'Embed',
- 'EndArcArrow',
- 'EndArrow',
- 'EndBar',
- 'EndDotMargin',
- 'EndMargin',
- 'EndPenMargin',
- 'Fill',
- 'FillDraw',
- 'Floor',
- 'Format',
- 'Full',
- 'Gaussian',
- 'Gaussrand',
- 'Gaussrandpair',
- 'Gradient',
- 'Grayscale',
- 'Helvetica',
- 'Hermite',
- 'HookHead',
- 'InOutTicks',
- 'InTicks',
- 'J',
- 'Label',
- 'Landscape',
- 'Left',
- 'LeftRight',
- 'LeftTicks',
- 'Legend',
- 'Linear',
- 'Link',
- 'Log',
- 'LogFormat',
- 'Margin',
- 'Margins',
- 'Mark',
- 'MidArcArrow',
- 'MidArrow',
- 'NOT',
- 'NewCenturySchoolBook',
- 'NoBox',
- 'NoMargin',
- 'NoModifier',
- 'NoTicks',
- 'NoTicks3',
- 'NoZero',
- 'NoZeroFormat',
- 'None',
- 'OR',
- 'OmitFormat',
- 'OmitTick',
- 'OutTicks',
- 'Ox',
- 'Oy',
- 'Palatino',
- 'PaletteTicks',
- 'Pen',
- 'PenMargin',
- 'PenMargins',
- 'Pentype',
- 'Portrait',
- 'RadialShade',
- 'Rainbow',
- 'Range',
- 'Relative',
- 'Right',
- 'RightTicks',
- 'Rotate',
- 'Round',
- 'SQR',
- 'Scale',
- 'ScaleX',
- 'ScaleY',
- 'ScaleZ',
- 'Seascape',
- 'Shift',
- 'Sin',
- 'Slant',
- 'Spline',
- 'StickIntervalMarker',
- 'Straight',
- 'Symbol',
- 'Tan',
- 'TeXify',
- 'Ticks',
- 'Ticks3',
- 'TildeIntervalMarker',
- 'TimesRoman',
- 'Top',
- 'TrueMargin',
- 'UnFill',
- 'UpsideDown',
- 'Wheel',
- 'X',
- 'XEquals',
- 'XOR',
- 'XY',
- 'XYEquals',
- 'XYZero',
- 'XYgrid',
- 'XZEquals',
- 'XZZero',
- 'XZero',
- 'XZgrid',
- 'Y',
- 'YEquals',
- 'YXgrid',
- 'YZ',
- 'YZEquals',
- 'YZZero',
- 'YZero',
- 'YZgrid',
- 'Z',
- 'ZX',
- 'ZXgrid',
- 'ZYgrid',
- 'ZapfChancery',
- 'ZapfDingbats',
- '_cputime',
- '_draw',
- '_eval',
- '_image',
- '_labelpath',
- '_projection',
- '_strokepath',
- '_texpath',
- 'aCos',
- 'aSin',
- 'aTan',
- 'abort',
- 'abs',
- 'accel',
- 'acos',
- 'acosh',
- 'acot',
- 'acsc',
- 'add',
- 'addArrow',
- 'addMargins',
- 'addSaveFunction',
- 'addnode',
- 'addnodes',
- 'addpenarc',
- 'addpenline',
- 'addseg',
- 'adjust',
- 'alias',
- 'align',
- 'all',
- 'altitude',
- 'angabscissa',
- 'angle',
- 'angpoint',
- 'animate',
- 'annotate',
- 'anticomplementary',
- 'antipedal',
- 'apply',
- 'approximate',
- 'arc',
- 'arcarrowsize',
- 'arccircle',
- 'arcdir',
- 'arcfromcenter',
- 'arcfromfocus',
- 'arclength',
- 'arcnodesnumber',
- 'arcpoint',
- 'arcsubtended',
- 'arcsubtendedcenter',
- 'arctime',
- 'arctopath',
- 'array',
- 'arrow',
- 'arrow2',
- 'arrowbase',
- 'arrowbasepoints',
- 'arrowsize',
- 'asec',
- 'asin',
- 'asinh',
- 'ask',
- 'assert',
- 'asy',
- 'asycode',
- 'asydir',
- 'asyfigure',
- 'asyfilecode',
- 'asyinclude',
- 'asywrite',
- 'atan',
- 'atan2',
- 'atanh',
- 'atbreakpoint',
- 'atexit',
- 'atime',
- 'attach',
- 'attract',
- 'atupdate',
- 'autoformat',
- 'autoscale',
- 'autoscale3',
- 'axes',
- 'axes3',
- 'axialshade',
- 'axis',
- 'axiscoverage',
- 'azimuth',
- 'babel',
- 'background',
- 'bangles',
- 'bar',
- 'barmarksize',
- 'barsize',
- 'basealign',
- 'baseline',
- 'bbox',
- 'beep',
- 'begin',
- 'beginclip',
- 'begingroup',
- 'beginpoint',
- 'between',
- 'bevel',
- 'bezier',
- 'bezierP',
- 'bezierPP',
- 'bezierPPP',
- 'bezulate',
- 'bibliography',
- 'bibliographystyle',
- 'binarytree',
- 'binarytreeNode',
- 'binomial',
- 'binput',
- 'bins',
- 'bisector',
- 'bisectorpoint',
- 'blend',
- 'boutput',
- 'box',
- 'bqe',
- 'breakpoint',
- 'breakpoints',
- 'brick',
- 'buildRestoreDefaults',
- 'buildRestoreThunk',
- 'buildcycle',
- 'bulletcolor',
- 'canonical',
- 'canonicalcartesiansystem',
- 'cartesiansystem',
- 'case1',
- 'case2',
- 'case3',
- 'cbrt',
- 'cd',
- 'ceil',
- 'center',
- 'centerToFocus',
- 'centroid',
- 'cevian',
- 'change2',
- 'changecoordsys',
- 'checkSegment',
- 'checkconditionlength',
- 'checker',
- 'checklengths',
- 'checkposition',
- 'checktriangle',
- 'choose',
- 'circle',
- 'circlebarframe',
- 'circlemarkradius',
- 'circlenodesnumber',
- 'circumcenter',
- 'circumcircle',
- 'clamped',
- 'clear',
- 'clip',
- 'clipdraw',
- 'close',
- 'cmyk',
- 'code',
- 'colatitude',
- 'collect',
- 'collinear',
- 'color',
- 'colorless',
- 'colors',
- 'colorspace',
- 'comma',
- 'compassmark',
- 'complement',
- 'complementary',
- 'concat',
- 'concurrent',
- 'cone',
- 'conic',
- 'conicnodesnumber',
- 'conictype',
- 'conj',
- 'connect',
- 'containmentTree',
- 'contains',
- 'contour',
- 'contour3',
- 'controlSpecifier',
- 'convert',
- 'coordinates',
- 'coordsys',
- 'copy',
- 'cos',
- 'cosh',
- 'cot',
- 'countIntersections',
- 'cputime',
- 'crop',
- 'cropcode',
- 'cross',
- 'crossframe',
- 'crosshatch',
- 'crossmarksize',
- 'csc',
- 'cubicroots',
- 'curabscissa',
- 'curlSpecifier',
- 'curpoint',
- 'currentarrow',
- 'currentexitfunction',
- 'currentmomarrow',
- 'currentpolarconicroutine',
- 'curve',
- 'cut',
- 'cutafter',
- 'cutbefore',
- 'cyclic',
- 'cylinder',
- 'debugger',
- 'deconstruct',
- 'defaultdir',
- 'defaultformat',
- 'defaultpen',
- 'defined',
- 'degenerate',
- 'degrees',
- 'delete',
- 'deletepreamble',
- 'determinant',
- 'diagonal',
- 'diamond',
- 'diffdiv',
- 'dir',
- 'dirSpecifier',
- 'dirtime',
- 'display',
- 'distance',
- 'divisors',
- 'do_overpaint',
- 'dot',
- 'dotframe',
- 'dotsize',
- 'downcase',
- 'draw',
- 'drawAll',
- 'drawDoubleLine',
- 'drawFermion',
- 'drawGhost',
- 'drawGluon',
- 'drawMomArrow',
- 'drawPhoton',
- 'drawScalar',
- 'drawVertex',
- 'drawVertexBox',
- 'drawVertexBoxO',
- 'drawVertexBoxX',
- 'drawVertexO',
- 'drawVertexOX',
- 'drawVertexTriangle',
- 'drawVertexTriangleO',
- 'drawVertexX',
- 'drawarrow',
- 'drawarrow2',
- 'drawline',
- 'drawtick',
- 'duplicate',
- 'elle',
- 'ellipse',
- 'ellipsenodesnumber',
- 'embed',
- 'embed3',
- 'empty',
- 'enclose',
- 'end',
- 'endScript',
- 'endclip',
- 'endgroup',
- 'endl',
- 'endpoint',
- 'endpoints',
- 'eof',
- 'eol',
- 'equation',
- 'equations',
- 'erase',
- 'erasestep',
- 'erf',
- 'erfc',
- 'error',
- 'errorbar',
- 'errorbars',
- 'eval',
- 'excenter',
- 'excircle',
- 'exit',
- 'exitXasyMode',
- 'exitfunction',
- 'exp',
- 'expfactors',
- 'expi',
- 'expm1',
- 'exradius',
- 'extend',
- 'extension',
- 'extouch',
- 'fabs',
- 'factorial',
- 'fermat',
- 'fft',
- 'fhorner',
- 'figure',
- 'file',
- 'filecode',
- 'fill',
- 'filldraw',
- 'filloutside',
- 'fillrule',
- 'filltype',
- 'find',
- 'finite',
- 'finiteDifferenceJacobian',
- 'firstcut',
- 'firstframe',
- 'fit',
- 'fit2',
- 'fixedscaling',
- 'floor',
- 'flush',
- 'fmdefaults',
- 'fmod',
- 'focusToCenter',
- 'font',
- 'fontcommand',
- 'fontsize',
- 'foot',
- 'format',
- 'frac',
- 'frequency',
- 'fromCenter',
- 'fromFocus',
- 'fspline',
- 'functionshade',
- 'gamma',
- 'generate_random_backtrace',
- 'generateticks',
- 'gergonne',
- 'getc',
- 'getint',
- 'getpair',
- 'getreal',
- 'getstring',
- 'gettriple',
- 'gluon',
- 'gouraudshade',
- 'graph',
- 'graphic',
- 'gray',
- 'grestore',
- 'grid',
- 'grid3',
- 'gsave',
- 'halfbox',
- 'hatch',
- 'hdiffdiv',
- 'hermite',
- 'hex',
- 'histogram',
- 'history',
- 'hline',
- 'hprojection',
- 'hsv',
- 'hyperbola',
- 'hyperbolanodesnumber',
- 'hyperlink',
- 'hypot',
- 'identity',
- 'image',
- 'incenter',
- 'incentral',
- 'incircle',
- 'increasing',
- 'incrementposition',
- 'indexedTransform',
- 'indexedfigure',
- 'initXasyMode',
- 'initdefaults',
- 'input',
- 'inradius',
- 'insert',
- 'inside',
- 'integrate',
- 'interactive',
- 'interior',
- 'interp',
- 'interpolate',
- 'intersect',
- 'intersection',
- 'intersectionpoint',
- 'intersectionpoints',
- 'intersections',
- 'intouch',
- 'inverse',
- 'inversion',
- 'invisible',
- 'is3D',
- 'isDuplicate',
- 'isogonal',
- 'isogonalconjugate',
- 'isotomic',
- 'isotomicconjugate',
- 'isparabola',
- 'italic',
- 'item',
- 'key',
- 'kurtosis',
- 'kurtosisexcess',
- 'label',
- 'labelaxis',
- 'labelmargin',
- 'labelpath',
- 'labels',
- 'labeltick',
- 'labelx',
- 'labelx3',
- 'labely',
- 'labely3',
- 'labelz',
- 'labelz3',
- 'lastcut',
- 'latex',
- 'latitude',
- 'latticeshade',
- 'layer',
- 'layout',
- 'ldexp',
- 'leastsquares',
- 'legend',
- 'legenditem',
- 'length',
- 'lift',
- 'light',
- 'limits',
- 'line',
- 'linear',
- 'linecap',
- 'lineinversion',
- 'linejoin',
- 'linemargin',
- 'lineskip',
- 'linetype',
- 'linewidth',
- 'link',
- 'list',
- 'lm_enorm',
- 'lm_evaluate_default',
- 'lm_lmdif',
- 'lm_lmpar',
- 'lm_minimize',
- 'lm_print_default',
- 'lm_print_quiet',
- 'lm_qrfac',
- 'lm_qrsolv',
- 'locale',
- 'locate',
- 'locatefile',
- 'location',
- 'log',
- 'log10',
- 'log1p',
- 'logaxiscoverage',
- 'longitude',
- 'lookup',
- 'magnetize',
- 'makeNode',
- 'makedraw',
- 'makepen',
- 'map',
- 'margin',
- 'markangle',
- 'markangleradius',
- 'markanglespace',
- 'markarc',
- 'marker',
- 'markinterval',
- 'marknodes',
- 'markrightangle',
- 'markuniform',
- 'mass',
- 'masscenter',
- 'massformat',
- 'math',
- 'max',
- 'max3',
- 'maxbezier',
- 'maxbound',
- 'maxcoords',
- 'maxlength',
- 'maxratio',
- 'maxtimes',
- 'mean',
- 'medial',
- 'median',
- 'midpoint',
- 'min',
- 'min3',
- 'minbezier',
- 'minbound',
- 'minipage',
- 'minratio',
- 'mintimes',
- 'miterlimit',
- 'momArrowPath',
- 'momarrowsize',
- 'monotonic',
- 'multifigure',
- 'nativeformat',
- 'natural',
- 'needshipout',
- 'newl',
- 'newpage',
- 'newslide',
- 'newton',
- 'newtree',
- 'nextframe',
- 'nextnormal',
- 'nextpage',
- 'nib',
- 'nodabscissa',
- 'none',
- 'norm',
- 'normalvideo',
- 'notaknot',
- 'nowarn',
- 'numberpage',
- 'nurb',
- 'object',
- 'offset',
- 'onpath',
- 'opacity',
- 'opposite',
- 'orientation',
- 'orig_circlenodesnumber',
- 'orig_circlenodesnumber1',
- 'orig_draw',
- 'orig_ellipsenodesnumber',
- 'orig_ellipsenodesnumber1',
- 'orig_hyperbolanodesnumber',
- 'orig_parabolanodesnumber',
- 'origin',
- 'orthic',
- 'orthocentercenter',
- 'outformat',
- 'outline',
- 'outprefix',
- 'output',
- 'overloadedMessage',
- 'overwrite',
- 'pack',
- 'pad',
- 'pairs',
- 'palette',
- 'parabola',
- 'parabolanodesnumber',
- 'parallel',
- 'partialsum',
- 'path',
- 'path3',
- 'pattern',
- 'pause',
- 'pdf',
- 'pedal',
- 'periodic',
- 'perp',
- 'perpendicular',
- 'perpendicularmark',
- 'phantom',
- 'phi1',
- 'phi2',
- 'phi3',
- 'photon',
- 'piecewisestraight',
- 'point',
- 'polar',
- 'polarconicroutine',
- 'polargraph',
- 'polygon',
- 'postcontrol',
- 'postscript',
- 'pow10',
- 'ppoint',
- 'prc',
- 'prc0',
- 'precision',
- 'precontrol',
- 'prepend',
- 'print_random_addresses',
- 'project',
- 'projection',
- 'purge',
- 'pwhermite',
- 'quadrant',
- 'quadraticroots',
- 'quantize',
- 'quarticroots',
- 'quotient',
- 'radialshade',
- 'radians',
- 'radicalcenter',
- 'radicalline',
- 'radius',
- 'rand',
- 'randompath',
- 'rd',
- 'readline',
- 'realmult',
- 'realquarticroots',
- 'rectangle',
- 'rectangular',
- 'rectify',
- 'reflect',
- 'relabscissa',
- 'relative',
- 'relativedistance',
- 'reldir',
- 'relpoint',
- 'reltime',
- 'remainder',
- 'remark',
- 'removeDuplicates',
- 'rename',
- 'replace',
- 'report',
- 'resetdefaultpen',
- 'restore',
- 'restoredefaults',
- 'reverse',
- 'reversevideo',
- 'rf',
- 'rfind',
- 'rgb',
- 'rgba',
- 'rgbint',
- 'rms',
- 'rotate',
- 'rotateO',
- 'rotation',
- 'round',
- 'roundbox',
- 'roundedpath',
- 'roundrectangle',
- 'samecoordsys',
- 'sameside',
- 'sample',
- 'save',
- 'savedefaults',
- 'saveline',
- 'scale',
- 'scale3',
- 'scaleO',
- 'scaleT',
- 'scaleless',
- 'scientific',
- 'search',
- 'searchtree',
- 'sec',
- 'secondaryX',
- 'secondaryY',
- 'seconds',
- 'section',
- 'sector',
- 'seek',
- 'seekeof',
- 'segment',
- 'sequence',
- 'setpens',
- 'sgn',
- 'sgnd',
- 'sharpangle',
- 'sharpdegrees',
- 'shift',
- 'shiftless',
- 'shipout',
- 'shipout3',
- 'show',
- 'side',
- 'simeq',
- 'simpson',
- 'sin',
- 'single',
- 'sinh',
- 'size',
- 'size3',
- 'skewness',
- 'skip',
- 'slant',
- 'sleep',
- 'slope',
- 'slopefield',
- 'solve',
- 'solveBVP',
- 'sort',
- 'sourceline',
- 'sphere',
- 'split',
- 'sqrt',
- 'square',
- 'srand',
- 'standardizecoordsys',
- 'startScript',
- 'startTrembling',
- 'stdev',
- 'step',
- 'stickframe',
- 'stickmarksize',
- 'stickmarkspace',
- 'stop',
- 'straight',
- 'straightness',
- 'string',
- 'stripdirectory',
- 'stripextension',
- 'stripfile',
- 'strokepath',
- 'subdivide',
- 'subitem',
- 'subpath',
- 'substr',
- 'sum',
- 'surface',
- 'symmedial',
- 'symmedian',
- 'system',
- 'tab',
- 'tableau',
- 'tan',
- 'tangent',
- 'tangential',
- 'tangents',
- 'tanh',
- 'tell',
- 'tensionSpecifier',
- 'tensorshade',
- 'tex',
- 'texcolor',
- 'texify',
- 'texpath',
- 'texpreamble',
- 'texreset',
- 'texshipout',
- 'texsize',
- 'textpath',
- 'thick',
- 'thin',
- 'tick',
- 'tickMax',
- 'tickMax3',
- 'tickMin',
- 'tickMin3',
- 'ticklabelshift',
- 'ticklocate',
- 'tildeframe',
- 'tildemarksize',
- 'tile',
- 'tiling',
- 'time',
- 'times',
- 'title',
- 'titlepage',
- 'topbox',
- 'transform',
- 'transformation',
- 'transpose',
- 'tremble',
- 'trembleFuzz',
- 'tremble_circlenodesnumber',
- 'tremble_circlenodesnumber1',
- 'tremble_draw',
- 'tremble_ellipsenodesnumber',
- 'tremble_ellipsenodesnumber1',
- 'tremble_hyperbolanodesnumber',
- 'tremble_marknodes',
- 'tremble_markuniform',
- 'tremble_parabolanodesnumber',
- 'triangle',
- 'triangleAbc',
- 'triangleabc',
- 'triangulate',
- 'tricoef',
- 'tridiagonal',
- 'trilinear',
- 'trim',
- 'trueMagnetize',
- 'truepoint',
- 'tube',
- 'uncycle',
- 'unfill',
- 'uniform',
- 'unit',
- 'unitrand',
- 'unitsize',
- 'unityroot',
- 'unstraighten',
- 'upcase',
- 'updatefunction',
- 'uperiodic',
- 'upscale',
- 'uptodate',
- 'usepackage',
- 'usersetting',
- 'usetypescript',
- 'usleep',
- 'value',
- 'variance',
- 'variancebiased',
- 'vbox',
- 'vector',
- 'vectorfield',
- 'verbatim',
- 'view',
- 'vline',
- 'vperiodic',
- 'vprojection',
- 'warn',
- 'warning',
- 'windingnumber',
- 'write',
- 'xaxis',
- 'xaxis3',
- 'xaxis3At',
- 'xaxisAt',
- 'xequals',
- 'xinput',
- 'xlimits',
- 'xoutput',
- 'xpart',
- 'xscale',
- 'xscaleO',
- 'xtick',
- 'xtick3',
- 'xtrans',
- 'yaxis',
- 'yaxis3',
- 'yaxis3At',
- 'yaxisAt',
- 'yequals',
- 'ylimits',
- 'ypart',
- 'yscale',
- 'yscaleO',
- 'ytick',
- 'ytick3',
- 'ytrans',
- 'zaxis3',
- 'zaxis3At',
- 'zero',
- 'zero3',
- 'zlimits',
- 'zpart',
- 'ztick',
- 'ztick3',
- 'ztrans'
+ 'AND',
+ 'Arc',
+ 'ArcArrow',
+ 'ArcArrows',
+ 'Arrow',
+ 'Arrows',
+ 'Automatic',
+ 'AvantGarde',
+ 'BBox',
+ 'BWRainbow',
+ 'BWRainbow2',
+ 'Bar',
+ 'Bars',
+ 'BeginArcArrow',
+ 'BeginArrow',
+ 'BeginBar',
+ 'BeginDotMargin',
+ 'BeginMargin',
+ 'BeginPenMargin',
+ 'Blank',
+ 'Bookman',
+ 'Bottom',
+ 'BottomTop',
+ 'Bounds',
+ 'Break',
+ 'Broken',
+ 'BrokenLog',
+ 'Ceil',
+ 'Circle',
+ 'CircleBarIntervalMarker',
+ 'Cos',
+ 'Courier',
+ 'CrossIntervalMarker',
+ 'DefaultFormat',
+ 'DefaultLogFormat',
+ 'Degrees',
+ 'Dir',
+ 'DotMargin',
+ 'DotMargins',
+ 'Dotted',
+ 'Draw',
+ 'Drawline',
+ 'Embed',
+ 'EndArcArrow',
+ 'EndArrow',
+ 'EndBar',
+ 'EndDotMargin',
+ 'EndMargin',
+ 'EndPenMargin',
+ 'Fill',
+ 'FillDraw',
+ 'Floor',
+ 'Format',
+ 'Full',
+ 'Gaussian',
+ 'Gaussrand',
+ 'Gaussrandpair',
+ 'Gradient',
+ 'Grayscale',
+ 'Helvetica',
+ 'Hermite',
+ 'HookHead',
+ 'InOutTicks',
+ 'InTicks',
+ 'J',
+ 'Label',
+ 'Landscape',
+ 'Left',
+ 'LeftRight',
+ 'LeftTicks',
+ 'Legend',
+ 'Linear',
+ 'Link',
+ 'Log',
+ 'LogFormat',
+ 'Margin',
+ 'Margins',
+ 'Mark',
+ 'MidArcArrow',
+ 'MidArrow',
+ 'NOT',
+ 'NewCenturySchoolBook',
+ 'NoBox',
+ 'NoMargin',
+ 'NoModifier',
+ 'NoTicks',
+ 'NoTicks3',
+ 'NoZero',
+ 'NoZeroFormat',
+ 'None',
+ 'OR',
+ 'OmitFormat',
+ 'OmitTick',
+ 'OutTicks',
+ 'Ox',
+ 'Oy',
+ 'Palatino',
+ 'PaletteTicks',
+ 'Pen',
+ 'PenMargin',
+ 'PenMargins',
+ 'Pentype',
+ 'Portrait',
+ 'RadialShade',
+ 'Rainbow',
+ 'Range',
+ 'Relative',
+ 'Right',
+ 'RightTicks',
+ 'Rotate',
+ 'Round',
+ 'SQR',
+ 'Scale',
+ 'ScaleX',
+ 'ScaleY',
+ 'ScaleZ',
+ 'Seascape',
+ 'Shift',
+ 'Sin',
+ 'Slant',
+ 'Spline',
+ 'StickIntervalMarker',
+ 'Straight',
+ 'Symbol',
+ 'Tan',
+ 'TeXify',
+ 'Ticks',
+ 'Ticks3',
+ 'TildeIntervalMarker',
+ 'TimesRoman',
+ 'Top',
+ 'TrueMargin',
+ 'UnFill',
+ 'UpsideDown',
+ 'Wheel',
+ 'X',
+ 'XEquals',
+ 'XOR',
+ 'XY',
+ 'XYEquals',
+ 'XYZero',
+ 'XYgrid',
+ 'XZEquals',
+ 'XZZero',
+ 'XZero',
+ 'XZgrid',
+ 'Y',
+ 'YEquals',
+ 'YXgrid',
+ 'YZ',
+ 'YZEquals',
+ 'YZZero',
+ 'YZero',
+ 'YZgrid',
+ 'Z',
+ 'ZX',
+ 'ZXgrid',
+ 'ZYgrid',
+ 'ZapfChancery',
+ 'ZapfDingbats',
+ '_cputime',
+ '_draw',
+ '_eval',
+ '_image',
+ '_labelpath',
+ '_projection',
+ '_strokepath',
+ '_texpath',
+ 'aCos',
+ 'aSin',
+ 'aTan',
+ 'abort',
+ 'abs',
+ 'accel',
+ 'acos',
+ 'acosh',
+ 'acot',
+ 'acsc',
+ 'add',
+ 'addArrow',
+ 'addMargins',
+ 'addSaveFunction',
+ 'addnode',
+ 'addnodes',
+ 'addpenarc',
+ 'addpenline',
+ 'addseg',
+ 'adjust',
+ 'alias',
+ 'align',
+ 'all',
+ 'altitude',
+ 'angabscissa',
+ 'angle',
+ 'angpoint',
+ 'animate',
+ 'annotate',
+ 'anticomplementary',
+ 'antipedal',
+ 'apply',
+ 'approximate',
+ 'arc',
+ 'arcarrowsize',
+ 'arccircle',
+ 'arcdir',
+ 'arcfromcenter',
+ 'arcfromfocus',
+ 'arclength',
+ 'arcnodesnumber',
+ 'arcpoint',
+ 'arcsubtended',
+ 'arcsubtendedcenter',
+ 'arctime',
+ 'arctopath',
+ 'array',
+ 'arrow',
+ 'arrow2',
+ 'arrowbase',
+ 'arrowbasepoints',
+ 'arrowsize',
+ 'asec',
+ 'asin',
+ 'asinh',
+ 'ask',
+ 'assert',
+ 'asy',
+ 'asycode',
+ 'asydir',
+ 'asyfigure',
+ 'asyfilecode',
+ 'asyinclude',
+ 'asywrite',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'atbreakpoint',
+ 'atexit',
+ 'atime',
+ 'attach',
+ 'attract',
+ 'atupdate',
+ 'autoformat',
+ 'autoscale',
+ 'autoscale3',
+ 'axes',
+ 'axes3',
+ 'axialshade',
+ 'axis',
+ 'axiscoverage',
+ 'azimuth',
+ 'babel',
+ 'background',
+ 'bangles',
+ 'bar',
+ 'barmarksize',
+ 'barsize',
+ 'basealign',
+ 'baseline',
+ 'bbox',
+ 'beep',
+ 'begin',
+ 'beginclip',
+ 'begingroup',
+ 'beginpoint',
+ 'between',
+ 'bevel',
+ 'bezier',
+ 'bezierP',
+ 'bezierPP',
+ 'bezierPPP',
+ 'bezulate',
+ 'bibliography',
+ 'bibliographystyle',
+ 'binarytree',
+ 'binarytreeNode',
+ 'binomial',
+ 'binput',
+ 'bins',
+ 'bisector',
+ 'bisectorpoint',
+ 'blend',
+ 'boutput',
+ 'box',
+ 'bqe',
+ 'breakpoint',
+ 'breakpoints',
+ 'brick',
+ 'buildRestoreDefaults',
+ 'buildRestoreThunk',
+ 'buildcycle',
+ 'bulletcolor',
+ 'canonical',
+ 'canonicalcartesiansystem',
+ 'cartesiansystem',
+ 'case1',
+ 'case2',
+ 'case3',
+ 'cbrt',
+ 'cd',
+ 'ceil',
+ 'center',
+ 'centerToFocus',
+ 'centroid',
+ 'cevian',
+ 'change2',
+ 'changecoordsys',
+ 'checkSegment',
+ 'checkconditionlength',
+ 'checker',
+ 'checklengths',
+ 'checkposition',
+ 'checktriangle',
+ 'choose',
+ 'circle',
+ 'circlebarframe',
+ 'circlemarkradius',
+ 'circlenodesnumber',
+ 'circumcenter',
+ 'circumcircle',
+ 'clamped',
+ 'clear',
+ 'clip',
+ 'clipdraw',
+ 'close',
+ 'cmyk',
+ 'code',
+ 'colatitude',
+ 'collect',
+ 'collinear',
+ 'color',
+ 'colorless',
+ 'colors',
+ 'colorspace',
+ 'comma',
+ 'compassmark',
+ 'complement',
+ 'complementary',
+ 'concat',
+ 'concurrent',
+ 'cone',
+ 'conic',
+ 'conicnodesnumber',
+ 'conictype',
+ 'conj',
+ 'connect',
+ 'containmentTree',
+ 'contains',
+ 'contour',
+ 'contour3',
+ 'controlSpecifier',
+ 'convert',
+ 'coordinates',
+ 'coordsys',
+ 'copy',
+ 'cos',
+ 'cosh',
+ 'cot',
+ 'countIntersections',
+ 'cputime',
+ 'crop',
+ 'cropcode',
+ 'cross',
+ 'crossframe',
+ 'crosshatch',
+ 'crossmarksize',
+ 'csc',
+ 'cubicroots',
+ 'curabscissa',
+ 'curlSpecifier',
+ 'curpoint',
+ 'currentarrow',
+ 'currentexitfunction',
+ 'currentmomarrow',
+ 'currentpolarconicroutine',
+ 'curve',
+ 'cut',
+ 'cutafter',
+ 'cutbefore',
+ 'cyclic',
+ 'cylinder',
+ 'debugger',
+ 'deconstruct',
+ 'defaultdir',
+ 'defaultformat',
+ 'defaultpen',
+ 'defined',
+ 'degenerate',
+ 'degrees',
+ 'delete',
+ 'deletepreamble',
+ 'determinant',
+ 'diagonal',
+ 'diamond',
+ 'diffdiv',
+ 'dir',
+ 'dirSpecifier',
+ 'dirtime',
+ 'display',
+ 'distance',
+ 'divisors',
+ 'do_overpaint',
+ 'dot',
+ 'dotframe',
+ 'dotsize',
+ 'downcase',
+ 'draw',
+ 'drawAll',
+ 'drawDoubleLine',
+ 'drawFermion',
+ 'drawGhost',
+ 'drawGluon',
+ 'drawMomArrow',
+ 'drawPhoton',
+ 'drawScalar',
+ 'drawVertex',
+ 'drawVertexBox',
+ 'drawVertexBoxO',
+ 'drawVertexBoxX',
+ 'drawVertexO',
+ 'drawVertexOX',
+ 'drawVertexTriangle',
+ 'drawVertexTriangleO',
+ 'drawVertexX',
+ 'drawarrow',
+ 'drawarrow2',
+ 'drawline',
+ 'drawtick',
+ 'duplicate',
+ 'elle',
+ 'ellipse',
+ 'ellipsenodesnumber',
+ 'embed',
+ 'embed3',
+ 'empty',
+ 'enclose',
+ 'end',
+ 'endScript',
+ 'endclip',
+ 'endgroup',
+ 'endl',
+ 'endpoint',
+ 'endpoints',
+ 'eof',
+ 'eol',
+ 'equation',
+ 'equations',
+ 'erase',
+ 'erasestep',
+ 'erf',
+ 'erfc',
+ 'error',
+ 'errorbar',
+ 'errorbars',
+ 'eval',
+ 'excenter',
+ 'excircle',
+ 'exit',
+ 'exitXasyMode',
+ 'exitfunction',
+ 'exp',
+ 'expfactors',
+ 'expi',
+ 'expm1',
+ 'exradius',
+ 'extend',
+ 'extension',
+ 'extouch',
+ 'fabs',
+ 'factorial',
+ 'fermat',
+ 'fft',
+ 'fhorner',
+ 'figure',
+ 'file',
+ 'filecode',
+ 'fill',
+ 'filldraw',
+ 'filloutside',
+ 'fillrule',
+ 'filltype',
+ 'find',
+ 'finite',
+ 'finiteDifferenceJacobian',
+ 'firstcut',
+ 'firstframe',
+ 'fit',
+ 'fit2',
+ 'fixedscaling',
+ 'floor',
+ 'flush',
+ 'fmdefaults',
+ 'fmod',
+ 'focusToCenter',
+ 'font',
+ 'fontcommand',
+ 'fontsize',
+ 'foot',
+ 'format',
+ 'frac',
+ 'frequency',
+ 'fromCenter',
+ 'fromFocus',
+ 'fspline',
+ 'functionshade',
+ 'gamma',
+ 'generate_random_backtrace',
+ 'generateticks',
+ 'gergonne',
+ 'getc',
+ 'getint',
+ 'getpair',
+ 'getreal',
+ 'getstring',
+ 'gettriple',
+ 'gluon',
+ 'gouraudshade',
+ 'graph',
+ 'graphic',
+ 'gray',
+ 'grestore',
+ 'grid',
+ 'grid3',
+ 'gsave',
+ 'halfbox',
+ 'hatch',
+ 'hdiffdiv',
+ 'hermite',
+ 'hex',
+ 'histogram',
+ 'history',
+ 'hline',
+ 'hprojection',
+ 'hsv',
+ 'hyperbola',
+ 'hyperbolanodesnumber',
+ 'hyperlink',
+ 'hypot',
+ 'identity',
+ 'image',
+ 'incenter',
+ 'incentral',
+ 'incircle',
+ 'increasing',
+ 'incrementposition',
+ 'indexedTransform',
+ 'indexedfigure',
+ 'initXasyMode',
+ 'initdefaults',
+ 'input',
+ 'inradius',
+ 'insert',
+ 'inside',
+ 'integrate',
+ 'interactive',
+ 'interior',
+ 'interp',
+ 'interpolate',
+ 'intersect',
+ 'intersection',
+ 'intersectionpoint',
+ 'intersectionpoints',
+ 'intersections',
+ 'intouch',
+ 'inverse',
+ 'inversion',
+ 'invisible',
+ 'is3D',
+ 'isDuplicate',
+ 'isogonal',
+ 'isogonalconjugate',
+ 'isotomic',
+ 'isotomicconjugate',
+ 'isparabola',
+ 'italic',
+ 'item',
+ 'key',
+ 'kurtosis',
+ 'kurtosisexcess',
+ 'label',
+ 'labelaxis',
+ 'labelmargin',
+ 'labelpath',
+ 'labels',
+ 'labeltick',
+ 'labelx',
+ 'labelx3',
+ 'labely',
+ 'labely3',
+ 'labelz',
+ 'labelz3',
+ 'lastcut',
+ 'latex',
+ 'latitude',
+ 'latticeshade',
+ 'layer',
+ 'layout',
+ 'ldexp',
+ 'leastsquares',
+ 'legend',
+ 'legenditem',
+ 'length',
+ 'lift',
+ 'light',
+ 'limits',
+ 'line',
+ 'linear',
+ 'linecap',
+ 'lineinversion',
+ 'linejoin',
+ 'linemargin',
+ 'lineskip',
+ 'linetype',
+ 'linewidth',
+ 'link',
+ 'list',
+ 'lm_enorm',
+ 'lm_evaluate_default',
+ 'lm_lmdif',
+ 'lm_lmpar',
+ 'lm_minimize',
+ 'lm_print_default',
+ 'lm_print_quiet',
+ 'lm_qrfac',
+ 'lm_qrsolv',
+ 'locale',
+ 'locate',
+ 'locatefile',
+ 'location',
+ 'log',
+ 'log10',
+ 'log1p',
+ 'logaxiscoverage',
+ 'longitude',
+ 'lookup',
+ 'magnetize',
+ 'makeNode',
+ 'makedraw',
+ 'makepen',
+ 'map',
+ 'margin',
+ 'markangle',
+ 'markangleradius',
+ 'markanglespace',
+ 'markarc',
+ 'marker',
+ 'markinterval',
+ 'marknodes',
+ 'markrightangle',
+ 'markuniform',
+ 'mass',
+ 'masscenter',
+ 'massformat',
+ 'math',
+ 'max',
+ 'max3',
+ 'maxbezier',
+ 'maxbound',
+ 'maxcoords',
+ 'maxlength',
+ 'maxratio',
+ 'maxtimes',
+ 'mean',
+ 'medial',
+ 'median',
+ 'midpoint',
+ 'min',
+ 'min3',
+ 'minbezier',
+ 'minbound',
+ 'minipage',
+ 'minratio',
+ 'mintimes',
+ 'miterlimit',
+ 'momArrowPath',
+ 'momarrowsize',
+ 'monotonic',
+ 'multifigure',
+ 'nativeformat',
+ 'natural',
+ 'needshipout',
+ 'newl',
+ 'newpage',
+ 'newslide',
+ 'newton',
+ 'newtree',
+ 'nextframe',
+ 'nextnormal',
+ 'nextpage',
+ 'nib',
+ 'nodabscissa',
+ 'none',
+ 'norm',
+ 'normalvideo',
+ 'notaknot',
+ 'nowarn',
+ 'numberpage',
+ 'nurb',
+ 'object',
+ 'offset',
+ 'onpath',
+ 'opacity',
+ 'opposite',
+ 'orientation',
+ 'orig_circlenodesnumber',
+ 'orig_circlenodesnumber1',
+ 'orig_draw',
+ 'orig_ellipsenodesnumber',
+ 'orig_ellipsenodesnumber1',
+ 'orig_hyperbolanodesnumber',
+ 'orig_parabolanodesnumber',
+ 'origin',
+ 'orthic',
+ 'orthocentercenter',
+ 'outformat',
+ 'outline',
+ 'outprefix',
+ 'output',
+ 'overloadedMessage',
+ 'overwrite',
+ 'pack',
+ 'pad',
+ 'pairs',
+ 'palette',
+ 'parabola',
+ 'parabolanodesnumber',
+ 'parallel',
+ 'partialsum',
+ 'path',
+ 'path3',
+ 'pattern',
+ 'pause',
+ 'pdf',
+ 'pedal',
+ 'periodic',
+ 'perp',
+ 'perpendicular',
+ 'perpendicularmark',
+ 'phantom',
+ 'phi1',
+ 'phi2',
+ 'phi3',
+ 'photon',
+ 'piecewisestraight',
+ 'point',
+ 'polar',
+ 'polarconicroutine',
+ 'polargraph',
+ 'polygon',
+ 'postcontrol',
+ 'postscript',
+ 'pow10',
+ 'ppoint',
+ 'prc',
+ 'prc0',
+ 'precision',
+ 'precontrol',
+ 'prepend',
+ 'print_random_addresses',
+ 'project',
+ 'projection',
+ 'purge',
+ 'pwhermite',
+ 'quadrant',
+ 'quadraticroots',
+ 'quantize',
+ 'quarticroots',
+ 'quotient',
+ 'radialshade',
+ 'radians',
+ 'radicalcenter',
+ 'radicalline',
+ 'radius',
+ 'rand',
+ 'randompath',
+ 'rd',
+ 'readline',
+ 'realmult',
+ 'realquarticroots',
+ 'rectangle',
+ 'rectangular',
+ 'rectify',
+ 'reflect',
+ 'relabscissa',
+ 'relative',
+ 'relativedistance',
+ 'reldir',
+ 'relpoint',
+ 'reltime',
+ 'remainder',
+ 'remark',
+ 'removeDuplicates',
+ 'rename',
+ 'replace',
+ 'report',
+ 'resetdefaultpen',
+ 'restore',
+ 'restoredefaults',
+ 'reverse',
+ 'reversevideo',
+ 'rf',
+ 'rfind',
+ 'rgb',
+ 'rgba',
+ 'rgbint',
+ 'rms',
+ 'rotate',
+ 'rotateO',
+ 'rotation',
+ 'round',
+ 'roundbox',
+ 'roundedpath',
+ 'roundrectangle',
+ 'samecoordsys',
+ 'sameside',
+ 'sample',
+ 'save',
+ 'savedefaults',
+ 'saveline',
+ 'scale',
+ 'scale3',
+ 'scaleO',
+ 'scaleT',
+ 'scaleless',
+ 'scientific',
+ 'search',
+ 'searchtree',
+ 'sec',
+ 'secondaryX',
+ 'secondaryY',
+ 'seconds',
+ 'section',
+ 'sector',
+ 'seek',
+ 'seekeof',
+ 'segment',
+ 'sequence',
+ 'setpens',
+ 'sgn',
+ 'sgnd',
+ 'sharpangle',
+ 'sharpdegrees',
+ 'shift',
+ 'shiftless',
+ 'shipout',
+ 'shipout3',
+ 'show',
+ 'side',
+ 'simeq',
+ 'simpson',
+ 'sin',
+ 'single',
+ 'sinh',
+ 'size',
+ 'size3',
+ 'skewness',
+ 'skip',
+ 'slant',
+ 'sleep',
+ 'slope',
+ 'slopefield',
+ 'solve',
+ 'solveBVP',
+ 'sort',
+ 'sourceline',
+ 'sphere',
+ 'split',
+ 'sqrt',
+ 'square',
+ 'srand',
+ 'standardizecoordsys',
+ 'startScript',
+ 'startTrembling',
+ 'stdev',
+ 'step',
+ 'stickframe',
+ 'stickmarksize',
+ 'stickmarkspace',
+ 'stop',
+ 'straight',
+ 'straightness',
+ 'string',
+ 'stripdirectory',
+ 'stripextension',
+ 'stripfile',
+ 'strokepath',
+ 'subdivide',
+ 'subitem',
+ 'subpath',
+ 'substr',
+ 'sum',
+ 'surface',
+ 'symmedial',
+ 'symmedian',
+ 'system',
+ 'tab',
+ 'tableau',
+ 'tan',
+ 'tangent',
+ 'tangential',
+ 'tangents',
+ 'tanh',
+ 'tell',
+ 'tensionSpecifier',
+ 'tensorshade',
+ 'tex',
+ 'texcolor',
+ 'texify',
+ 'texpath',
+ 'texpreamble',
+ 'texreset',
+ 'texshipout',
+ 'texsize',
+ 'textpath',
+ 'thick',
+ 'thin',
+ 'tick',
+ 'tickMax',
+ 'tickMax3',
+ 'tickMin',
+ 'tickMin3',
+ 'ticklabelshift',
+ 'ticklocate',
+ 'tildeframe',
+ 'tildemarksize',
+ 'tile',
+ 'tiling',
+ 'time',
+ 'times',
+ 'title',
+ 'titlepage',
+ 'topbox',
+ 'transform',
+ 'transformation',
+ 'transpose',
+ 'tremble',
+ 'trembleFuzz',
+ 'tremble_circlenodesnumber',
+ 'tremble_circlenodesnumber1',
+ 'tremble_draw',
+ 'tremble_ellipsenodesnumber',
+ 'tremble_ellipsenodesnumber1',
+ 'tremble_hyperbolanodesnumber',
+ 'tremble_marknodes',
+ 'tremble_markuniform',
+ 'tremble_parabolanodesnumber',
+ 'triangle',
+ 'triangleAbc',
+ 'triangleabc',
+ 'triangulate',
+ 'tricoef',
+ 'tridiagonal',
+ 'trilinear',
+ 'trim',
+ 'trueMagnetize',
+ 'truepoint',
+ 'tube',
+ 'uncycle',
+ 'unfill',
+ 'uniform',
+ 'unit',
+ 'unitrand',
+ 'unitsize',
+ 'unityroot',
+ 'unstraighten',
+ 'upcase',
+ 'updatefunction',
+ 'uperiodic',
+ 'upscale',
+ 'uptodate',
+ 'usepackage',
+ 'usersetting',
+ 'usetypescript',
+ 'usleep',
+ 'value',
+ 'variance',
+ 'variancebiased',
+ 'vbox',
+ 'vector',
+ 'vectorfield',
+ 'verbatim',
+ 'view',
+ 'vline',
+ 'vperiodic',
+ 'vprojection',
+ 'warn',
+ 'warning',
+ 'windingnumber',
+ 'write',
+ 'xaxis',
+ 'xaxis3',
+ 'xaxis3At',
+ 'xaxisAt',
+ 'xequals',
+ 'xinput',
+ 'xlimits',
+ 'xoutput',
+ 'xpart',
+ 'xscale',
+ 'xscaleO',
+ 'xtick',
+ 'xtick3',
+ 'xtrans',
+ 'yaxis',
+ 'yaxis3',
+ 'yaxis3At',
+ 'yaxisAt',
+ 'yequals',
+ 'ylimits',
+ 'ypart',
+ 'yscale',
+ 'yscaleO',
+ 'ytick',
+ 'ytick3',
+ 'ytrans',
+ 'zaxis3',
+ 'zaxis3At',
+ 'zero',
+ 'zero3',
+ 'zlimits',
+ 'zpart',
+ 'ztick',
+ 'ztick3',
+ 'ztrans'
}
-
+
ASYVARNAME = {
- 'AliceBlue',
- 'Align',
- 'Allow',
- 'AntiqueWhite',
- 'Apricot',
- 'Aqua',
- 'Aquamarine',
- 'Aspect',
- 'Azure',
- 'BeginPoint',
- 'Beige',
- 'Bisque',
- 'Bittersweet',
- 'Black',
- 'BlanchedAlmond',
- 'Blue',
- 'BlueGreen',
- 'BlueViolet',
- 'Both',
- 'Break',
- 'BrickRed',
- 'Brown',
- 'BurlyWood',
- 'BurntOrange',
- 'CCW',
- 'CW',
- 'CadetBlue',
- 'CarnationPink',
- 'Center',
- 'Centered',
- 'Cerulean',
- 'Chartreuse',
- 'Chocolate',
- 'Coeff',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'Crop',
- 'Cyan',
- 'Dandelion',
- 'DarkBlue',
- 'DarkCyan',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkMagenta',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkRed',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DefaultHead',
- 'DimGray',
- 'DodgerBlue',
- 'Dotted',
- 'Draw',
- 'E',
- 'ENE',
- 'EPS',
- 'ESE',
- 'E_Euler',
- 'E_PC',
- 'E_RK2',
- 'E_RK3BS',
- 'Emerald',
- 'EndPoint',
- 'Euler',
- 'Fill',
- 'FillDraw',
- 'FireBrick',
- 'FloralWhite',
- 'ForestGreen',
- 'Fuchsia',
- 'Gainsboro',
- 'GhostWhite',
- 'Gold',
- 'Goldenrod',
- 'Gray',
- 'Green',
- 'GreenYellow',
- 'Honeydew',
- 'HookHead',
- 'Horizontal',
- 'HotPink',
- 'I',
- 'IgnoreAspect',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'JOIN_IN',
- 'JOIN_OUT',
- 'JungleGreen',
- 'Khaki',
- 'LM_DWARF',
- 'LM_MACHEP',
- 'LM_SQRT_DWARF',
- 'LM_SQRT_GIANT',
- 'LM_USERTOL',
- 'Label',
- 'Lavender',
- 'LavenderBlush',
- 'LawnGreen',
- 'LeftJustified',
- 'LeftSide',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrodYellow',
- 'LightGreen',
- 'LightGrey',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'Lime',
- 'LimeGreen',
- 'Linear',
- 'Linen',
- 'Log',
- 'Logarithmic',
- 'Magenta',
- 'Mahogany',
- 'Mark',
- 'MarkFill',
- 'Maroon',
- 'Max',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'Melon',
- 'MidPoint',
- 'MidnightBlue',
- 'Min',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'Move',
- 'MoveQuiet',
- 'Mulberry',
- 'N',
- 'NE',
- 'NNE',
- 'NNW',
- 'NW',
- 'NavajoWhite',
- 'Navy',
- 'NavyBlue',
- 'NoAlign',
- 'NoCrop',
- 'NoFill',
- 'NoSide',
- 'OldLace',
- 'Olive',
- 'OliveDrab',
- 'OliveGreen',
- 'Orange',
- 'OrangeRed',
- 'Orchid',
- 'Ox',
- 'Oy',
- 'PC',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'Peach',
- 'PeachPuff',
- 'Periwinkle',
- 'Peru',
- 'PineGreen',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'ProcessBlue',
- 'Purple',
- 'RK2',
- 'RK3',
- 'RK3BS',
- 'RK4',
- 'RK5',
- 'RK5DP',
- 'RK5F',
- 'RawSienna',
- 'Red',
- 'RedOrange',
- 'RedViolet',
- 'Rhodamine',
- 'RightJustified',
- 'RightSide',
- 'RosyBrown',
- 'RoyalBlue',
- 'RoyalPurple',
- 'RubineRed',
- 'S',
- 'SE',
- 'SSE',
- 'SSW',
- 'SW',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sepia',
- 'Sienna',
- 'Silver',
- 'SimpleHead',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Suppress',
- 'SuppressQuiet',
- 'Tan',
- 'TeXHead',
- 'Teal',
- 'TealBlue',
- 'Thistle',
- 'Ticksize',
- 'Tomato',
- 'Turquoise',
- 'UnFill',
- 'VERSION',
- 'Value',
- 'Vertical',
- 'Violet',
- 'VioletRed',
- 'W',
- 'WNW',
- 'WSW',
- 'Wheat',
- 'White',
- 'WhiteSmoke',
- 'WildStrawberry',
- 'XYAlign',
- 'YAlign',
- 'Yellow',
- 'YellowGreen',
- 'YellowOrange',
- 'addpenarc',
- 'addpenline',
- 'align',
- 'allowstepping',
- 'angularsystem',
- 'animationdelay',
- 'appendsuffix',
- 'arcarrowangle',
- 'arcarrowfactor',
- 'arrow2sizelimit',
- 'arrowangle',
- 'arrowbarb',
- 'arrowdir',
- 'arrowfactor',
- 'arrowhookfactor',
- 'arrowlength',
- 'arrowsizelimit',
- 'arrowtexfactor',
- 'authorpen',
- 'axis',
- 'axiscoverage',
- 'axislabelfactor',
- 'background',
- 'backgroundcolor',
- 'backgroundpen',
- 'barfactor',
- 'barmarksizefactor',
- 'basealign',
- 'baselinetemplate',
- 'beveljoin',
- 'bigvertexpen',
- 'bigvertexsize',
- 'black',
- 'blue',
- 'bm',
- 'bottom',
- 'bp',
- 'brown',
- 'bullet',
- 'byfoci',
- 'byvertices',
- 'camerafactor',
- 'chartreuse',
- 'circlemarkradiusfactor',
- 'circlenodesnumberfactor',
- 'circleprecision',
- 'circlescale',
- 'cm',
- 'codefile',
- 'codepen',
- 'codeskip',
- 'colorPen',
- 'coloredNodes',
- 'coloredSegments',
- 'conditionlength',
- 'conicnodesfactor',
- 'count',
- 'cputimeformat',
- 'crossmarksizefactor',
- 'currentcoordsys',
- 'currentlight',
- 'currentpatterns',
- 'currentpen',
- 'currentpicture',
- 'currentposition',
- 'currentprojection',
- 'curvilinearsystem',
- 'cuttings',
- 'cyan',
- 'darkblue',
- 'darkbrown',
- 'darkcyan',
- 'darkgray',
- 'darkgreen',
- 'darkgrey',
- 'darkmagenta',
- 'darkolive',
- 'darkred',
- 'dashdotted',
- 'dashed',
- 'datepen',
- 'dateskip',
- 'debuggerlines',
- 'debugging',
- 'deepblue',
- 'deepcyan',
- 'deepgray',
- 'deepgreen',
- 'deepgrey',
- 'deepmagenta',
- 'deepred',
- 'default',
- 'defaultControl',
- 'defaultS',
- 'defaultbackpen',
- 'defaultcoordsys',
- 'defaultfilename',
- 'defaultformat',
- 'defaultmassformat',
- 'defaultpen',
- 'diagnostics',
- 'differentlengths',
- 'dot',
- 'dotfactor',
- 'dotframe',
- 'dotted',
- 'doublelinepen',
- 'doublelinespacing',
- 'down',
- 'duplicateFuzz',
- 'ellipsenodesnumberfactor',
- 'eps',
- 'epsgeo',
- 'epsilon',
- 'evenodd',
- 'extendcap',
- 'fermionpen',
- 'figureborder',
- 'figuremattpen',
- 'firstnode',
- 'firststep',
- 'foregroundcolor',
- 'fuchsia',
- 'fuzz',
- 'gapfactor',
- 'ghostpen',
- 'gluonamplitude',
- 'gluonpen',
- 'gluonratio',
- 'gray',
- 'green',
- 'grey',
- 'hatchepsilon',
- 'havepagenumber',
- 'heavyblue',
- 'heavycyan',
- 'heavygray',
- 'heavygreen',
- 'heavygrey',
- 'heavymagenta',
- 'heavyred',
- 'hline',
- 'hwratio',
- 'hyperbolanodesnumberfactor',
- 'identity4',
- 'ignore',
- 'inXasyMode',
- 'inch',
- 'inches',
- 'includegraphicscommand',
- 'inf',
- 'infinity',
- 'institutionpen',
- 'intMax',
- 'intMin',
- 'invert',
- 'invisible',
- 'itempen',
- 'itemskip',
- 'itemstep',
- 'labelmargin',
- 'landscape',
- 'lastnode',
- 'left',
- 'legendhskip',
- 'legendlinelength',
- 'legendmargin',
- 'legendmarkersize',
- 'legendmaxrelativewidth',
- 'legendvskip',
- 'lightblue',
- 'lightcyan',
- 'lightgray',
- 'lightgreen',
- 'lightgrey',
- 'lightmagenta',
- 'lightolive',
- 'lightred',
- 'lightyellow',
- 'linemargin',
- 'lm_infmsg',
- 'lm_shortmsg',
- 'longdashdotted',
- 'longdashed',
- 'magenta',
- 'magneticPoints',
- 'magneticRadius',
- 'mantissaBits',
- 'markangleradius',
- 'markangleradiusfactor',
- 'markanglespace',
- 'markanglespacefactor',
- 'mediumblue',
- 'mediumcyan',
- 'mediumgray',
- 'mediumgreen',
- 'mediumgrey',
- 'mediummagenta',
- 'mediumred',
- 'mediumyellow',
- 'middle',
- 'minDistDefault',
- 'minblockheight',
- 'minblockwidth',
- 'mincirclediameter',
- 'minipagemargin',
- 'minipagewidth',
- 'minvertexangle',
- 'miterjoin',
- 'mm',
- 'momarrowfactor',
- 'momarrowlength',
- 'momarrowmargin',
- 'momarrowoffset',
- 'momarrowpen',
- 'monoPen',
- 'morepoints',
- 'nCircle',
- 'newbulletcolor',
- 'ngraph',
- 'nil',
- 'nmesh',
- 'nobasealign',
- 'nodeMarginDefault',
- 'nodesystem',
- 'nomarker',
- 'nopoint',
- 'noprimary',
- 'nullpath',
- 'nullpen',
- 'numarray',
- 'ocgindex',
- 'oldbulletcolor',
- 'olive',
- 'orange',
- 'origin',
- 'overpaint',
- 'page',
- 'pageheight',
- 'pagemargin',
- 'pagenumberalign',
- 'pagenumberpen',
- 'pagenumberposition',
- 'pagewidth',
- 'paleblue',
- 'palecyan',
- 'palegray',
- 'palegreen',
- 'palegrey',
- 'palemagenta',
- 'palered',
- 'paleyellow',
- 'parabolanodesnumberfactor',
- 'perpfactor',
- 'phi',
- 'photonamplitude',
- 'photonpen',
- 'photonratio',
- 'pi',
- 'pink',
- 'plain',
- 'plus',
- 'preamblenodes',
- 'pt',
- 'purple',
- 'r3',
- 'r4a',
- 'r4b',
- 'randMax',
- 'realDigits',
- 'realEpsilon',
- 'realMax',
- 'realMin',
- 'red',
- 'relativesystem',
- 'reverse',
- 'right',
- 'roundcap',
- 'roundjoin',
- 'royalblue',
- 'salmon',
- 'saveFunctions',
- 'scalarpen',
- 'sequencereal',
- 'settings',
- 'shipped',
- 'signedtrailingzero',
- 'solid',
- 'springgreen',
- 'sqrtEpsilon',
- 'squarecap',
- 'squarepen',
- 'startposition',
- 'stdin',
- 'stdout',
- 'stepfactor',
- 'stepfraction',
- 'steppagenumberpen',
- 'stepping',
- 'stickframe',
- 'stickmarksizefactor',
- 'stickmarkspacefactor',
- 'textpen',
- 'ticksize',
- 'tildeframe',
- 'tildemarksizefactor',
- 'tinv',
- 'titlealign',
- 'titlepagepen',
- 'titlepageposition',
- 'titlepen',
- 'titleskip',
- 'top',
- 'trailingzero',
- 'treeLevelStep',
- 'treeMinNodeWidth',
- 'treeNodeStep',
- 'trembleAngle',
- 'trembleFrequency',
- 'trembleRandom',
- 'tremblingMode',
- 'undefined',
- 'unitcircle',
- 'unitsquare',
- 'up',
- 'urlpen',
- 'urlskip',
- 'version',
- 'vertexpen',
- 'vertexsize',
- 'viewportmargin',
- 'viewportsize',
- 'vline',
- 'white',
- 'wye',
- 'xformStack',
- 'yellow',
- 'ylabelwidth',
- 'zerotickfuzz',
- 'zerowinding'
+ 'AliceBlue',
+ 'Align',
+ 'Allow',
+ 'AntiqueWhite',
+ 'Apricot',
+ 'Aqua',
+ 'Aquamarine',
+ 'Aspect',
+ 'Azure',
+ 'BeginPoint',
+ 'Beige',
+ 'Bisque',
+ 'Bittersweet',
+ 'Black',
+ 'BlanchedAlmond',
+ 'Blue',
+ 'BlueGreen',
+ 'BlueViolet',
+ 'Both',
+ 'Break',
+ 'BrickRed',
+ 'Brown',
+ 'BurlyWood',
+ 'BurntOrange',
+ 'CCW',
+ 'CW',
+ 'CadetBlue',
+ 'CarnationPink',
+ 'Center',
+ 'Centered',
+ 'Cerulean',
+ 'Chartreuse',
+ 'Chocolate',
+ 'Coeff',
+ 'Coral',
+ 'CornflowerBlue',
+ 'Cornsilk',
+ 'Crimson',
+ 'Crop',
+ 'Cyan',
+ 'Dandelion',
+ 'DarkBlue',
+ 'DarkCyan',
+ 'DarkGoldenrod',
+ 'DarkGray',
+ 'DarkGreen',
+ 'DarkKhaki',
+ 'DarkMagenta',
+ 'DarkOliveGreen',
+ 'DarkOrange',
+ 'DarkOrchid',
+ 'DarkRed',
+ 'DarkSalmon',
+ 'DarkSeaGreen',
+ 'DarkSlateBlue',
+ 'DarkSlateGray',
+ 'DarkTurquoise',
+ 'DarkViolet',
+ 'DeepPink',
+ 'DeepSkyBlue',
+ 'DefaultHead',
+ 'DimGray',
+ 'DodgerBlue',
+ 'Dotted',
+ 'Draw',
+ 'E',
+ 'ENE',
+ 'EPS',
+ 'ESE',
+ 'E_Euler',
+ 'E_PC',
+ 'E_RK2',
+ 'E_RK3BS',
+ 'Emerald',
+ 'EndPoint',
+ 'Euler',
+ 'Fill',
+ 'FillDraw',
+ 'FireBrick',
+ 'FloralWhite',
+ 'ForestGreen',
+ 'Fuchsia',
+ 'Gainsboro',
+ 'GhostWhite',
+ 'Gold',
+ 'Goldenrod',
+ 'Gray',
+ 'Green',
+ 'GreenYellow',
+ 'Honeydew',
+ 'HookHead',
+ 'Horizontal',
+ 'HotPink',
+ 'I',
+ 'IgnoreAspect',
+ 'IndianRed',
+ 'Indigo',
+ 'Ivory',
+ 'JOIN_IN',
+ 'JOIN_OUT',
+ 'JungleGreen',
+ 'Khaki',
+ 'LM_DWARF',
+ 'LM_MACHEP',
+ 'LM_SQRT_DWARF',
+ 'LM_SQRT_GIANT',
+ 'LM_USERTOL',
+ 'Label',
+ 'Lavender',
+ 'LavenderBlush',
+ 'LawnGreen',
+ 'LeftJustified',
+ 'LeftSide',
+ 'LemonChiffon',
+ 'LightBlue',
+ 'LightCoral',
+ 'LightCyan',
+ 'LightGoldenrodYellow',
+ 'LightGreen',
+ 'LightGrey',
+ 'LightPink',
+ 'LightSalmon',
+ 'LightSeaGreen',
+ 'LightSkyBlue',
+ 'LightSlateGray',
+ 'LightSteelBlue',
+ 'LightYellow',
+ 'Lime',
+ 'LimeGreen',
+ 'Linear',
+ 'Linen',
+ 'Log',
+ 'Logarithmic',
+ 'Magenta',
+ 'Mahogany',
+ 'Mark',
+ 'MarkFill',
+ 'Maroon',
+ 'Max',
+ 'MediumAquamarine',
+ 'MediumBlue',
+ 'MediumOrchid',
+ 'MediumPurple',
+ 'MediumSeaGreen',
+ 'MediumSlateBlue',
+ 'MediumSpringGreen',
+ 'MediumTurquoise',
+ 'MediumVioletRed',
+ 'Melon',
+ 'MidPoint',
+ 'MidnightBlue',
+ 'Min',
+ 'MintCream',
+ 'MistyRose',
+ 'Moccasin',
+ 'Move',
+ 'MoveQuiet',
+ 'Mulberry',
+ 'N',
+ 'NE',
+ 'NNE',
+ 'NNW',
+ 'NW',
+ 'NavajoWhite',
+ 'Navy',
+ 'NavyBlue',
+ 'NoAlign',
+ 'NoCrop',
+ 'NoFill',
+ 'NoSide',
+ 'OldLace',
+ 'Olive',
+ 'OliveDrab',
+ 'OliveGreen',
+ 'Orange',
+ 'OrangeRed',
+ 'Orchid',
+ 'Ox',
+ 'Oy',
+ 'PC',
+ 'PaleGoldenrod',
+ 'PaleGreen',
+ 'PaleTurquoise',
+ 'PaleVioletRed',
+ 'PapayaWhip',
+ 'Peach',
+ 'PeachPuff',
+ 'Periwinkle',
+ 'Peru',
+ 'PineGreen',
+ 'Pink',
+ 'Plum',
+ 'PowderBlue',
+ 'ProcessBlue',
+ 'Purple',
+ 'RK2',
+ 'RK3',
+ 'RK3BS',
+ 'RK4',
+ 'RK5',
+ 'RK5DP',
+ 'RK5F',
+ 'RawSienna',
+ 'Red',
+ 'RedOrange',
+ 'RedViolet',
+ 'Rhodamine',
+ 'RightJustified',
+ 'RightSide',
+ 'RosyBrown',
+ 'RoyalBlue',
+ 'RoyalPurple',
+ 'RubineRed',
+ 'S',
+ 'SE',
+ 'SSE',
+ 'SSW',
+ 'SW',
+ 'SaddleBrown',
+ 'Salmon',
+ 'SandyBrown',
+ 'SeaGreen',
+ 'Seashell',
+ 'Sepia',
+ 'Sienna',
+ 'Silver',
+ 'SimpleHead',
+ 'SkyBlue',
+ 'SlateBlue',
+ 'SlateGray',
+ 'Snow',
+ 'SpringGreen',
+ 'SteelBlue',
+ 'Suppress',
+ 'SuppressQuiet',
+ 'Tan',
+ 'TeXHead',
+ 'Teal',
+ 'TealBlue',
+ 'Thistle',
+ 'Ticksize',
+ 'Tomato',
+ 'Turquoise',
+ 'UnFill',
+ 'VERSION',
+ 'Value',
+ 'Vertical',
+ 'Violet',
+ 'VioletRed',
+ 'W',
+ 'WNW',
+ 'WSW',
+ 'Wheat',
+ 'White',
+ 'WhiteSmoke',
+ 'WildStrawberry',
+ 'XYAlign',
+ 'YAlign',
+ 'Yellow',
+ 'YellowGreen',
+ 'YellowOrange',
+ 'addpenarc',
+ 'addpenline',
+ 'align',
+ 'allowstepping',
+ 'angularsystem',
+ 'animationdelay',
+ 'appendsuffix',
+ 'arcarrowangle',
+ 'arcarrowfactor',
+ 'arrow2sizelimit',
+ 'arrowangle',
+ 'arrowbarb',
+ 'arrowdir',
+ 'arrowfactor',
+ 'arrowhookfactor',
+ 'arrowlength',
+ 'arrowsizelimit',
+ 'arrowtexfactor',
+ 'authorpen',
+ 'axis',
+ 'axiscoverage',
+ 'axislabelfactor',
+ 'background',
+ 'backgroundcolor',
+ 'backgroundpen',
+ 'barfactor',
+ 'barmarksizefactor',
+ 'basealign',
+ 'baselinetemplate',
+ 'beveljoin',
+ 'bigvertexpen',
+ 'bigvertexsize',
+ 'black',
+ 'blue',
+ 'bm',
+ 'bottom',
+ 'bp',
+ 'brown',
+ 'bullet',
+ 'byfoci',
+ 'byvertices',
+ 'camerafactor',
+ 'chartreuse',
+ 'circlemarkradiusfactor',
+ 'circlenodesnumberfactor',
+ 'circleprecision',
+ 'circlescale',
+ 'cm',
+ 'codefile',
+ 'codepen',
+ 'codeskip',
+ 'colorPen',
+ 'coloredNodes',
+ 'coloredSegments',
+ 'conditionlength',
+ 'conicnodesfactor',
+ 'count',
+ 'cputimeformat',
+ 'crossmarksizefactor',
+ 'currentcoordsys',
+ 'currentlight',
+ 'currentpatterns',
+ 'currentpen',
+ 'currentpicture',
+ 'currentposition',
+ 'currentprojection',
+ 'curvilinearsystem',
+ 'cuttings',
+ 'cyan',
+ 'darkblue',
+ 'darkbrown',
+ 'darkcyan',
+ 'darkgray',
+ 'darkgreen',
+ 'darkgrey',
+ 'darkmagenta',
+ 'darkolive',
+ 'darkred',
+ 'dashdotted',
+ 'dashed',
+ 'datepen',
+ 'dateskip',
+ 'debuggerlines',
+ 'debugging',
+ 'deepblue',
+ 'deepcyan',
+ 'deepgray',
+ 'deepgreen',
+ 'deepgrey',
+ 'deepmagenta',
+ 'deepred',
+ 'default',
+ 'defaultControl',
+ 'defaultS',
+ 'defaultbackpen',
+ 'defaultcoordsys',
+ 'defaultfilename',
+ 'defaultformat',
+ 'defaultmassformat',
+ 'defaultpen',
+ 'diagnostics',
+ 'differentlengths',
+ 'dot',
+ 'dotfactor',
+ 'dotframe',
+ 'dotted',
+ 'doublelinepen',
+ 'doublelinespacing',
+ 'down',
+ 'duplicateFuzz',
+ 'ellipsenodesnumberfactor',
+ 'eps',
+ 'epsgeo',
+ 'epsilon',
+ 'evenodd',
+ 'extendcap',
+ 'fermionpen',
+ 'figureborder',
+ 'figuremattpen',
+ 'firstnode',
+ 'firststep',
+ 'foregroundcolor',
+ 'fuchsia',
+ 'fuzz',
+ 'gapfactor',
+ 'ghostpen',
+ 'gluonamplitude',
+ 'gluonpen',
+ 'gluonratio',
+ 'gray',
+ 'green',
+ 'grey',
+ 'hatchepsilon',
+ 'havepagenumber',
+ 'heavyblue',
+ 'heavycyan',
+ 'heavygray',
+ 'heavygreen',
+ 'heavygrey',
+ 'heavymagenta',
+ 'heavyred',
+ 'hline',
+ 'hwratio',
+ 'hyperbolanodesnumberfactor',
+ 'identity4',
+ 'ignore',
+ 'inXasyMode',
+ 'inch',
+ 'inches',
+ 'includegraphicscommand',
+ 'inf',
+ 'infinity',
+ 'institutionpen',
+ 'intMax',
+ 'intMin',
+ 'invert',
+ 'invisible',
+ 'itempen',
+ 'itemskip',
+ 'itemstep',
+ 'labelmargin',
+ 'landscape',
+ 'lastnode',
+ 'left',
+ 'legendhskip',
+ 'legendlinelength',
+ 'legendmargin',
+ 'legendmarkersize',
+ 'legendmaxrelativewidth',
+ 'legendvskip',
+ 'lightblue',
+ 'lightcyan',
+ 'lightgray',
+ 'lightgreen',
+ 'lightgrey',
+ 'lightmagenta',
+ 'lightolive',
+ 'lightred',
+ 'lightyellow',
+ 'linemargin',
+ 'lm_infmsg',
+ 'lm_shortmsg',
+ 'longdashdotted',
+ 'longdashed',
+ 'magenta',
+ 'magneticPoints',
+ 'magneticRadius',
+ 'mantissaBits',
+ 'markangleradius',
+ 'markangleradiusfactor',
+ 'markanglespace',
+ 'markanglespacefactor',
+ 'mediumblue',
+ 'mediumcyan',
+ 'mediumgray',
+ 'mediumgreen',
+ 'mediumgrey',
+ 'mediummagenta',
+ 'mediumred',
+ 'mediumyellow',
+ 'middle',
+ 'minDistDefault',
+ 'minblockheight',
+ 'minblockwidth',
+ 'mincirclediameter',
+ 'minipagemargin',
+ 'minipagewidth',
+ 'minvertexangle',
+ 'miterjoin',
+ 'mm',
+ 'momarrowfactor',
+ 'momarrowlength',
+ 'momarrowmargin',
+ 'momarrowoffset',
+ 'momarrowpen',
+ 'monoPen',
+ 'morepoints',
+ 'nCircle',
+ 'newbulletcolor',
+ 'ngraph',
+ 'nil',
+ 'nmesh',
+ 'nobasealign',
+ 'nodeMarginDefault',
+ 'nodesystem',
+ 'nomarker',
+ 'nopoint',
+ 'noprimary',
+ 'nullpath',
+ 'nullpen',
+ 'numarray',
+ 'ocgindex',
+ 'oldbulletcolor',
+ 'olive',
+ 'orange',
+ 'origin',
+ 'overpaint',
+ 'page',
+ 'pageheight',
+ 'pagemargin',
+ 'pagenumberalign',
+ 'pagenumberpen',
+ 'pagenumberposition',
+ 'pagewidth',
+ 'paleblue',
+ 'palecyan',
+ 'palegray',
+ 'palegreen',
+ 'palegrey',
+ 'palemagenta',
+ 'palered',
+ 'paleyellow',
+ 'parabolanodesnumberfactor',
+ 'perpfactor',
+ 'phi',
+ 'photonamplitude',
+ 'photonpen',
+ 'photonratio',
+ 'pi',
+ 'pink',
+ 'plain',
+ 'plus',
+ 'preamblenodes',
+ 'pt',
+ 'purple',
+ 'r3',
+ 'r4a',
+ 'r4b',
+ 'randMax',
+ 'realDigits',
+ 'realEpsilon',
+ 'realMax',
+ 'realMin',
+ 'red',
+ 'relativesystem',
+ 'reverse',
+ 'right',
+ 'roundcap',
+ 'roundjoin',
+ 'royalblue',
+ 'salmon',
+ 'saveFunctions',
+ 'scalarpen',
+ 'sequencereal',
+ 'settings',
+ 'shipped',
+ 'signedtrailingzero',
+ 'solid',
+ 'springgreen',
+ 'sqrtEpsilon',
+ 'squarecap',
+ 'squarepen',
+ 'startposition',
+ 'stdin',
+ 'stdout',
+ 'stepfactor',
+ 'stepfraction',
+ 'steppagenumberpen',
+ 'stepping',
+ 'stickframe',
+ 'stickmarksizefactor',
+ 'stickmarkspacefactor',
+ 'textpen',
+ 'ticksize',
+ 'tildeframe',
+ 'tildemarksizefactor',
+ 'tinv',
+ 'titlealign',
+ 'titlepagepen',
+ 'titlepageposition',
+ 'titlepen',
+ 'titleskip',
+ 'top',
+ 'trailingzero',
+ 'treeLevelStep',
+ 'treeMinNodeWidth',
+ 'treeNodeStep',
+ 'trembleAngle',
+ 'trembleFrequency',
+ 'trembleRandom',
+ 'tremblingMode',
+ 'undefined',
+ 'unitcircle',
+ 'unitsquare',
+ 'up',
+ 'urlpen',
+ 'urlskip',
+ 'version',
+ 'vertexpen',
+ 'vertexsize',
+ 'viewportmargin',
+ 'viewportsize',
+ 'vline',
+ 'white',
+ 'wye',
+ 'xformStack',
+ 'yellow',
+ 'ylabelwidth',
+ 'zerotickfuzz',
+ 'zerowinding'
}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
index 7722e81f53..1318551242 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_cl_builtins.py
@@ -1,232 +1,232 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._cl_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- ANSI Common Lisp builtins.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._cl_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ ANSI Common Lisp builtins.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
BUILTIN_FUNCTIONS = { # 638 functions
- '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
- 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
- 'adjustable-array-p', 'adjust-array', 'allocate-instance',
- 'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
- 'apropos-list', 'aref', 'arithmetic-error-operands',
- 'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
- 'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
- 'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
- 'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
- 'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
- 'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
- 'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
- 'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
- 'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
- 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
- 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
- 'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
- 'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
- 'characterp', 'char-code', 'char-downcase', 'char-equal',
- 'char-greaterp', 'char-int', 'char-lessp', 'char-name',
- 'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
- 'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
- 'close', 'clrhash', 'code-char', 'coerce', 'compile',
- 'compiled-function-p', 'compile-file', 'compile-file-pathname',
- 'compiler-macro-function', 'complement', 'complex', 'complexp',
- 'compute-applicable-methods', 'compute-restarts', 'concatenate',
- 'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
- 'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
- 'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
- 'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
- 'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
- 'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
- 'delete-package', 'denominator', 'deposit-field', 'describe',
- 'describe-object', 'digit-char', 'digit-char-p', 'directory',
- 'directory-namestring', 'disassemble', 'documentation', 'dpb',
- 'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
- 'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
- 'enough-namestring', 'ensure-directories-exist',
- 'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
- 'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
- 'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
- 'file-error-pathname', 'file-length', 'file-namestring',
- 'file-position', 'file-string-length', 'file-write-date',
- 'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
- 'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
- 'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
- 'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
- 'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
- 'fround', 'ftruncate', 'funcall', 'function-keywords',
- 'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
- 'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
- 'gethash', 'get-internal-real-time', 'get-internal-run-time',
- 'get-macro-character', 'get-output-stream-string', 'get-properties',
- 'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
- 'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'host-namestring', 'identity', 'imagpart', 'import',
- 'initialize-instance', 'input-stream-p', 'inspect',
- 'integer-decode-float', 'integer-length', 'integerp',
- 'interactive-stream-p', 'intern', 'intersection',
- 'invalid-method-error', 'invoke-debugger', 'invoke-restart',
- 'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
- 'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
- 'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
- 'listen', 'list-length', 'listp', 'load',
- 'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
- 'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
- 'logical-pathname-translations', 'logior', 'lognand', 'lognor',
- 'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
- 'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
- 'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
- 'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
- 'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
- 'make-instance', 'make-instances-obsolete', 'make-list',
- 'make-load-form', 'make-load-form-saving-slots', 'make-package',
- 'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
- 'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
- 'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
- 'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
- 'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
- 'merge', 'merge-pathnames', 'method-combination-error',
- 'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
- 'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
- 'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
- 'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
- 'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
- 'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
- 'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
- 'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
- 'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
- 'package-name', 'package-nicknames', 'packagep',
- 'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
- 'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
- 'pathname-device', 'pathname-directory', 'pathname-host',
- 'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
- 'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
- 'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
- 'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
- 'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
- 'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
- 'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
- 'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
- 'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
- 'read-from-string', 'read-line', 'read-preserving-whitespace',
- 'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
- 'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
- 'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
- 'remprop', 'rename-file', 'rename-package', 'replace', 'require',
- 'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
- 'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
- 'search', 'second', 'set', 'set-difference',
- 'set-dispatch-macro-character', 'set-exclusive-or',
- 'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
- 'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
- 'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
- 'simple-condition-format-arguments', 'simple-condition-format-control',
- 'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
- 'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
- 'slot-unbound', 'slot-value', 'software-type', 'software-version',
- 'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
- 'standard-char-p', 'store-value', 'stream-element-type',
- 'stream-error-stream', 'stream-external-format', 'streamp', 'string',
- 'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
- 'string-capitalize', 'string-downcase', 'string-equal',
- 'string-greaterp', 'string-left-trim', 'string-lessp',
- 'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
- 'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
- 'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
- 'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
- 'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
- 'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
- 'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
- 'translate-logical-pathname', 'translate-pathname', 'tree-equal',
- 'truename', 'truncate', 'two-way-stream-input-stream',
- 'two-way-stream-output-stream', 'type-error-datum',
- 'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
- 'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
- 'update-instance-for-different-class',
- 'update-instance-for-redefined-class', 'upgraded-array-element-type',
- 'upgraded-complex-part-type', 'upper-case-p', 'use-package',
- 'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
- 'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
- 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
- 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
- 'y-or-n-p', 'zerop',
+ '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
+ 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
+ 'adjustable-array-p', 'adjust-array', 'allocate-instance',
+ 'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
+ 'apropos-list', 'aref', 'arithmetic-error-operands',
+ 'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
+ 'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
+ 'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
+ 'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
+ 'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
+ 'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
+ 'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
+ 'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
+ 'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
+ 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
+ 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
+ 'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
+ 'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
+ 'characterp', 'char-code', 'char-downcase', 'char-equal',
+ 'char-greaterp', 'char-int', 'char-lessp', 'char-name',
+ 'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
+ 'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
+ 'close', 'clrhash', 'code-char', 'coerce', 'compile',
+ 'compiled-function-p', 'compile-file', 'compile-file-pathname',
+ 'compiler-macro-function', 'complement', 'complex', 'complexp',
+ 'compute-applicable-methods', 'compute-restarts', 'concatenate',
+ 'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
+ 'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
+ 'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
+ 'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
+ 'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
+ 'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
+ 'delete-package', 'denominator', 'deposit-field', 'describe',
+ 'describe-object', 'digit-char', 'digit-char-p', 'directory',
+ 'directory-namestring', 'disassemble', 'documentation', 'dpb',
+ 'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
+ 'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
+ 'enough-namestring', 'ensure-directories-exist',
+ 'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
+ 'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
+ 'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
+ 'file-error-pathname', 'file-length', 'file-namestring',
+ 'file-position', 'file-string-length', 'file-write-date',
+ 'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
+ 'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
+ 'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
+ 'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
+ 'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
+ 'fround', 'ftruncate', 'funcall', 'function-keywords',
+ 'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
+ 'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
+ 'gethash', 'get-internal-real-time', 'get-internal-run-time',
+ 'get-macro-character', 'get-output-stream-string', 'get-properties',
+ 'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
+ 'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
+ 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
+ 'host-namestring', 'identity', 'imagpart', 'import',
+ 'initialize-instance', 'input-stream-p', 'inspect',
+ 'integer-decode-float', 'integer-length', 'integerp',
+ 'interactive-stream-p', 'intern', 'intersection',
+ 'invalid-method-error', 'invoke-debugger', 'invoke-restart',
+ 'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
+ 'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
+ 'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
+ 'listen', 'list-length', 'listp', 'load',
+ 'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
+ 'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
+ 'logical-pathname-translations', 'logior', 'lognand', 'lognor',
+ 'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
+ 'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
+ 'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
+ 'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
+ 'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
+ 'make-instance', 'make-instances-obsolete', 'make-list',
+ 'make-load-form', 'make-load-form-saving-slots', 'make-package',
+ 'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
+ 'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
+ 'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
+ 'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
+ 'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
+ 'merge', 'merge-pathnames', 'method-combination-error',
+ 'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
+ 'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
+ 'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
+ 'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
+ 'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
+ 'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
+ 'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
+ 'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
+ 'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
+ 'package-name', 'package-nicknames', 'packagep',
+ 'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
+ 'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
+ 'pathname-device', 'pathname-directory', 'pathname-host',
+ 'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
+ 'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
+ 'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
+ 'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
+ 'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
+ 'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
+ 'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
+ 'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
+ 'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
+ 'read-from-string', 'read-line', 'read-preserving-whitespace',
+ 'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
+ 'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
+ 'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
+ 'remprop', 'rename-file', 'rename-package', 'replace', 'require',
+ 'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
+ 'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
+ 'search', 'second', 'set', 'set-difference',
+ 'set-dispatch-macro-character', 'set-exclusive-or',
+ 'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
+ 'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
+ 'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
+ 'simple-condition-format-arguments', 'simple-condition-format-control',
+ 'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
+ 'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
+ 'slot-unbound', 'slot-value', 'software-type', 'software-version',
+ 'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
+ 'standard-char-p', 'store-value', 'stream-element-type',
+ 'stream-error-stream', 'stream-external-format', 'streamp', 'string',
+ 'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
+ 'string-capitalize', 'string-downcase', 'string-equal',
+ 'string-greaterp', 'string-left-trim', 'string-lessp',
+ 'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
+ 'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
+ 'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
+ 'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
+ 'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
+ 'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
+ 'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
+ 'translate-logical-pathname', 'translate-pathname', 'tree-equal',
+ 'truename', 'truncate', 'two-way-stream-input-stream',
+ 'two-way-stream-output-stream', 'type-error-datum',
+ 'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
+ 'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
+ 'update-instance-for-different-class',
+ 'update-instance-for-redefined-class', 'upgraded-array-element-type',
+ 'upgraded-complex-part-type', 'upper-case-p', 'use-package',
+ 'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
+ 'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
+ 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
+ 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
+ 'y-or-n-p', 'zerop',
}
-
+
SPECIAL_FORMS = {
- 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
- 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
- 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
- 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
- 'unwind-protect',
+ 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
+ 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
+ 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
+ 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
+ 'unwind-protect',
}
-
+
MACROS = {
- 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
- 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
- 'define-compiler-macro', 'define-condition', 'define-method-combination',
- 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
- 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
- 'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
- 'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
- 'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
- 'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
- 'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
- 'multiple-value-setq', 'nth-value', 'or', 'pop',
- 'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
- 'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
- 'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
- 'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
- 'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
- 'with-condition-restarts', 'with-hash-table-iterator',
- 'with-input-from-string', 'with-open-file', 'with-open-stream',
- 'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
- 'with-slots', 'with-standard-io-syntax',
+ 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
+ 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
+ 'define-compiler-macro', 'define-condition', 'define-method-combination',
+ 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
+ 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
+ 'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
+ 'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
+ 'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
+ 'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
+ 'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
+ 'multiple-value-setq', 'nth-value', 'or', 'pop',
+ 'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
+ 'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
+ 'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
+ 'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
+ 'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
+ 'with-condition-restarts', 'with-hash-table-iterator',
+ 'with-input-from-string', 'with-open-file', 'with-open-stream',
+ 'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
+ 'with-slots', 'with-standard-io-syntax',
}
-
+
LAMBDA_LIST_KEYWORDS = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
+ '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
+ '&rest', '&whole',
}
-
+
DECLARATIONS = {
- 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
- 'ignorable', 'notinline', 'type',
+ 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
+ 'ignorable', 'notinline', 'type',
}
-
+
BUILTIN_TYPES = {
- 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
- 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
- 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
- 'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
- 'simple-vector', 'standard-char', 'unsigned-byte',
-
- # Condition Types
- 'arithmetic-error', 'cell-error', 'condition', 'control-error',
- 'division-by-zero', 'end-of-file', 'error', 'file-error',
- 'floating-point-inexact', 'floating-point-overflow',
- 'floating-point-underflow', 'floating-point-invalid-operation',
- 'parse-error', 'package-error', 'print-not-readable', 'program-error',
- 'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
- 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
- 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
- 'undefined-function', 'warning',
+ 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
+ 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
+ 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
+ 'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
+ 'simple-vector', 'standard-char', 'unsigned-byte',
+
+ # Condition Types
+ 'arithmetic-error', 'cell-error', 'condition', 'control-error',
+ 'division-by-zero', 'end-of-file', 'error', 'file-error',
+ 'floating-point-inexact', 'floating-point-overflow',
+ 'floating-point-underflow', 'floating-point-invalid-operation',
+ 'parse-error', 'package-error', 'print-not-readable', 'program-error',
+ 'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
+ 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
+ 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
+ 'undefined-function', 'warning',
}
-
+
BUILTIN_CLASSES = {
- 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
- 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
- 'file-stream', 'float', 'function', 'generic-function', 'hash-table',
- 'integer', 'list', 'logical-pathname', 'method-combination', 'method',
- 'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
- 'real', 'random-state', 'restart', 'sequence', 'standard-class',
- 'standard-generic-function', 'standard-method', 'standard-object',
- 'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
- 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
+ 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
+ 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
+ 'file-stream', 'float', 'function', 'generic-function', 'hash-table',
+ 'integer', 'list', 'logical-pathname', 'method-combination', 'method',
+ 'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
+ 'real', 'random-state', 'restart', 'sequence', 'standard-class',
+ 'standard-generic-function', 'standard-method', 'standard-object',
+ 'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
+ 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
index 2cf4443851..4b148f0514 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_cocoa_builtins.py
@@ -1,73 +1,73 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._cocoa_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file defines a set of types used across Cocoa frameworks from Apple.
- There is a list of @interfaces, @protocols and some other (structs, unions)
-
- File may be also used as standalone generator for aboves.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._cocoa_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file defines a set of types used across Cocoa frameworks from Apple.
+ There is a list of @interfaces, @protocols and some other (structs, unions)
+
+ File may be also used as standalone generator for aboves.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
COCOA_INTERFACES = {'UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'PKPayment', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'PKPaymentSummaryItem', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'HKWorkoutType', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'PKShippingMethod', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'HKCorrelationType', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'UICollectionViewLayoutAttributes', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'HMUser', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSHTTPCookie', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'UIFontDescriptor', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'PKPaymentPass', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'HKWorkout', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'CIQRCodeFeature', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'PKPaymentRequest', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'PKPaymentToken', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'PKPaymentAuthorizationViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'HKWorkoutEvent', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'UIKeyCommand', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'}
COCOA_PROTOCOLS = {'SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'PKPaymentAuthorizationViewControllerDelegate', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'}
COCOA_PRIMITIVES = {'ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'}
-
-if __name__ == '__main__': # pragma: no cover
- import os
- import re
-
- FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.1.sdk/System/Library/Frameworks/'
- frameworks = os.listdir(FRAMEWORKS_PATH)
-
- all_interfaces = set()
- all_protocols = set()
- all_primitives = set()
- for framework in frameworks:
- frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
- if not os.path.exists(frameworkHeadersDir):
- continue
-
- headerFilenames = os.listdir(frameworkHeadersDir)
-
- for f in headerFilenames:
- if not f.endswith('.h'):
- continue
-
- headerFilePath = frameworkHeadersDir + f
+
+if __name__ == '__main__': # pragma: no cover
+ import os
+ import re
+
+ FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.1.sdk/System/Library/Frameworks/'
+ frameworks = os.listdir(FRAMEWORKS_PATH)
+
+ all_interfaces = set()
+ all_protocols = set()
+ all_primitives = set()
+ for framework in frameworks:
+ frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
+ if not os.path.exists(frameworkHeadersDir):
+ continue
+
+ headerFilenames = os.listdir(frameworkHeadersDir)
+
+ for f in headerFilenames:
+ if not f.endswith('.h'):
+ continue
+
+ headerFilePath = frameworkHeadersDir + f
with open(headerFilePath) as f:
content = f.read()
res = re.findall(r'(?<=@interface )\w+', content)
- for r in res:
- all_interfaces.add(r)
-
+ for r in res:
+ all_interfaces.add(r)
+
res = re.findall(r'(?<=@protocol )\w+', content)
- for r in res:
- all_protocols.add(r)
-
+ for r in res:
+ all_protocols.add(r)
+
res = re.findall(r'(?<=typedef enum )\w+', content)
- for r in res:
- all_primitives.add(r)
-
+ for r in res:
+ all_primitives.add(r)
+
res = re.findall(r'(?<=typedef struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
+ for r in res:
+ all_primitives.add(r)
+
res = re.findall(r'(?<=typedef const struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
-
- print("ALL interfaces: \n")
- print(all_interfaces)
-
- print("\nALL protocols: \n")
- print(all_protocols)
-
- print("\nALL primitives: \n")
- print(all_primitives)
+ for r in res:
+ all_primitives.add(r)
+
+
+ print("ALL interfaces: \n")
+ print(all_interfaces)
+
+ print("\nALL protocols: \n")
+ print(all_protocols)
+
+ print("\nALL primitives: \n")
+ print(all_primitives)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
index 72e1fe39e9..10ce2a6a1e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_csound_builtins.py
@@ -1,12 +1,12 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._csound_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._csound_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
# Opcodes in Csound 6.13.0 using:
# python3 -c "
# import re
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_lasso_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_lasso_builtins.py
index 1d2719da13..e895f64ec0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_lasso_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_lasso_builtins.py
@@ -1,5327 +1,5327 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._lasso_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Built-in Lasso types, traits, methods, and members.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._lasso_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Built-in Lasso types, traits, methods, and members.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-BUILTINS = {
- 'Types': (
- 'array',
- 'atbegin',
- 'boolean',
- 'bson_iter',
- 'bson',
- 'bytes_document_body',
- 'bytes',
- 'cache_server_element',
- 'cache_server',
- 'capture',
- 'client_address',
- 'client_ip',
- 'component_container',
- 'component_render_state',
- 'component',
- 'curl',
- 'curltoken',
- 'currency',
- 'custom',
- 'data_document',
- 'database_registry',
- 'date',
- 'dateandtime',
- 'dbgp_packet',
- 'dbgp_server',
- 'debugging_stack',
- 'decimal',
- 'delve',
- 'dir',
- 'dirdesc',
- 'dns_response',
- 'document_base',
- 'document_body',
- 'document_header',
- 'dsinfo',
- 'duration',
- 'eacher',
- 'email_compose',
- 'email_parse',
- 'email_pop',
- 'email_queue_impl_base',
- 'email_queue_impl',
- 'email_smtp',
- 'email_stage_impl_base',
- 'email_stage_impl',
- 'fastcgi_each_fcgi_param',
- 'fastcgi_server',
- 'fcgi_record',
- 'fcgi_request',
- 'file',
- 'filedesc',
- 'filemaker_datasource',
- 'generateforeachkeyed',
- 'generateforeachunkeyed',
- 'generateseries',
- 'hash_map',
- 'html_atomic_element',
- 'html_attr',
- 'html_base',
- 'html_binary',
- 'html_br',
- 'html_cdata',
- 'html_container_element',
- 'html_div',
- 'html_document_body',
- 'html_document_head',
- 'html_eol',
- 'html_fieldset',
- 'html_form',
- 'html_h1',
- 'html_h2',
- 'html_h3',
- 'html_h4',
- 'html_h5',
- 'html_h6',
- 'html_hr',
- 'html_img',
- 'html_input',
- 'html_json',
- 'html_label',
- 'html_legend',
- 'html_link',
- 'html_meta',
- 'html_object',
- 'html_option',
- 'html_raw',
- 'html_script',
- 'html_select',
- 'html_span',
- 'html_style',
- 'html_table',
- 'html_td',
- 'html_text',
- 'html_th',
- 'html_tr',
- 'http_document_header',
- 'http_document',
- 'http_error',
- 'http_header_field',
- 'http_server_connection_handler_globals',
- 'http_server_connection_handler',
- 'http_server_request_logger_thread',
- 'http_server_web_connection',
- 'http_server',
- 'image',
- 'include_cache',
- 'inline_type',
- 'integer',
- 'java_jnienv',
- 'jbyte',
- 'jbytearray',
- 'jchar',
- 'jchararray',
- 'jfieldid',
- 'jfloat',
- 'jint',
- 'jmethodid',
- 'jobject',
- 'jshort',
- 'json_decode',
- 'json_encode',
- 'json_literal',
- 'json_object',
- 'keyword',
- 'lassoapp_compiledsrc_appsource',
- 'lassoapp_compiledsrc_fileresource',
- 'lassoapp_content_rep_halt',
- 'lassoapp_dirsrc_appsource',
- 'lassoapp_dirsrc_fileresource',
- 'lassoapp_installer',
- 'lassoapp_livesrc_appsource',
- 'lassoapp_livesrc_fileresource',
- 'lassoapp_long_expiring_bytes',
- 'lassoapp_manualsrc_appsource',
- 'lassoapp_zip_file_server',
- 'lassoapp_zipsrc_appsource',
- 'lassoapp_zipsrc_fileresource',
- 'ldap',
- 'library_thread_loader',
- 'list_node',
- 'list',
- 'locale',
- 'log_impl_base',
- 'log_impl',
- 'magick_image',
- 'map_node',
- 'map',
- 'memberstream',
- 'memory_session_driver_impl_entry',
- 'memory_session_driver_impl',
- 'memory_session_driver',
- 'mime_reader',
- 'mongo_client',
- 'mongo_collection',
- 'mongo_cursor',
- 'mustache_ctx',
- 'mysql_session_driver_impl',
- 'mysql_session_driver',
- 'net_named_pipe',
- 'net_tcp_ssl',
- 'net_tcp',
- 'net_udp_packet',
- 'net_udp',
- 'null',
- 'odbc_session_driver_impl',
- 'odbc_session_driver',
- 'opaque',
- 'os_process',
- 'pair_compare',
- 'pair',
- 'pairup',
- 'pdf_barcode',
- 'pdf_chunk',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_hyphenator',
- 'pdf_image',
- 'pdf_list',
- 'pdf_paragraph',
- 'pdf_phrase',
- 'pdf_read',
- 'pdf_table',
- 'pdf_text',
- 'pdf_typebase',
- 'percent',
- 'portal_impl',
- 'queriable_groupby',
- 'queriable_grouping',
- 'queriable_groupjoin',
- 'queriable_join',
- 'queriable_orderby',
- 'queriable_orderbydescending',
- 'queriable_select',
- 'queriable_selectmany',
- 'queriable_skip',
- 'queriable_take',
- 'queriable_thenby',
- 'queriable_thenbydescending',
- 'queriable_where',
- 'queue',
- 'raw_document_body',
- 'regexp',
- 'repeat',
- 'scientific',
- 'security_registry',
- 'serialization_element',
- 'serialization_object_identity_compare',
- 'serialization_reader',
- 'serialization_writer_ref',
- 'serialization_writer_standin',
- 'serialization_writer',
- 'session_delete_expired_thread',
- 'set',
- 'signature',
- 'sourcefile',
- 'sqlite_column',
- 'sqlite_currentrow',
- 'sqlite_db',
- 'sqlite_results',
- 'sqlite_session_driver_impl_entry',
- 'sqlite_session_driver_impl',
- 'sqlite_session_driver',
- 'sqlite_table',
- 'sqlite3_stmt',
- 'sqlite3',
- 'staticarray',
- 'string',
- 'sys_process',
- 'tag',
- 'text_document',
- 'tie',
- 'timeonly',
- 'trait',
- 'tree_base',
- 'tree_node',
- 'tree_nullnode',
- 'ucal',
- 'usgcpu',
- 'usgvm',
- 'void',
- 'web_error_atend',
- 'web_node_base',
- 'web_node_content_representation_css_specialized',
- 'web_node_content_representation_html_specialized',
- 'web_node_content_representation_js_specialized',
- 'web_node_content_representation_xhr_container',
- 'web_node_echo',
- 'web_node_root',
- 'web_request_impl',
- 'web_request',
- 'web_response_impl',
- 'web_response',
- 'web_router',
- 'websocket_handler',
- 'worker_pool',
- 'xml_attr',
- 'xml_cdatasection',
- 'xml_characterdata',
- 'xml_comment',
- 'xml_document',
- 'xml_documentfragment',
- 'xml_documenttype',
- 'xml_domimplementation',
- 'xml_element',
- 'xml_entity',
- 'xml_entityreference',
- 'xml_namednodemap_attr',
- 'xml_namednodemap_ht',
- 'xml_namednodemap',
- 'xml_node',
- 'xml_nodelist',
- 'xml_notation',
- 'xml_processinginstruction',
- 'xml_text',
- 'xmlstream',
- 'zip_file_impl',
- 'zip_file',
- 'zip_impl',
- 'zip',
- ),
- 'Traits': (
- 'any',
- 'formattingbase',
- 'html_attributed',
- 'html_element_coreattrs',
- 'html_element_eventsattrs',
- 'html_element_i18nattrs',
- 'lassoapp_capabilities',
- 'lassoapp_resource',
- 'lassoapp_source',
- 'queriable_asstring',
- 'session_driver',
- 'trait_array',
- 'trait_asstring',
- 'trait_backcontractible',
- 'trait_backended',
- 'trait_backexpandable',
- 'trait_close',
- 'trait_contractible',
- 'trait_decompose_assignment',
- 'trait_doubleended',
- 'trait_each_sub',
- 'trait_encodeurl',
- 'trait_endedfullymutable',
- 'trait_expandable',
- 'trait_file',
- 'trait_finite',
- 'trait_finiteforeach',
- 'trait_foreach',
- 'trait_foreachtextelement',
- 'trait_frontcontractible',
- 'trait_frontended',
- 'trait_frontexpandable',
- 'trait_fullymutable',
- 'trait_generator',
- 'trait_generatorcentric',
- 'trait_hashable',
- 'trait_json_serialize',
- 'trait_keyed',
- 'trait_keyedfinite',
- 'trait_keyedforeach',
- 'trait_keyedmutable',
- 'trait_list',
- 'trait_map',
- 'trait_net',
- 'trait_pathcomponents',
- 'trait_positionallykeyed',
- 'trait_positionallysearchable',
- 'trait_queriable',
- 'trait_queriablelambda',
- 'trait_readbytes',
- 'trait_readstring',
- 'trait_scalar',
- 'trait_searchable',
- 'trait_serializable',
- 'trait_setencoding',
- 'trait_setoperations',
- 'trait_stack',
- 'trait_treenode',
- 'trait_writebytes',
- 'trait_writestring',
- 'trait_xml_elementcompat',
- 'trait_xml_nodecompat',
- 'web_connection',
- 'web_node_container',
- 'web_node_content_css_specialized',
- 'web_node_content_document',
- 'web_node_content_html_specialized',
- 'web_node_content_js_specialized',
- 'web_node_content_json_specialized',
- 'web_node_content_representation',
- 'web_node_content',
- 'web_node_postable',
- 'web_node',
- ),
- 'Unbound Methods': (
- 'abort_clear',
- 'abort_now',
- 'abort',
- 'action_param',
- 'action_params',
- 'action_statement',
- 'admin_authorization',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_initialize',
- 'admin_lassoservicepath',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'all',
- 'auth_admin',
- 'auth_check',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'bw',
- 'capture_nearestloopabort',
- 'capture_nearestloopcontinue',
- 'capture_nearestloopcount',
- 'checked',
- 'cipher_decrypt_private',
- 'cipher_decrypt_public',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt_private',
- 'cipher_encrypt_public',
- 'cipher_encrypt',
- 'cipher_generate_key',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'cipher_open',
- 'cipher_seal',
- 'cipher_sign',
- 'cipher_verify',
- 'client_addr',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparam',
- 'client_getparams',
- 'client_headers',
- 'client_integertoip',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparam',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column_name',
- 'column_names',
- 'column_type',
- 'column',
- 'compress',
- 'content_addheader',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_replaceheader',
- 'content_type',
- 'cookie_set',
- 'cookie',
- 'curl_easy_cleanup',
- 'curl_easy_duphandle',
- 'curl_easy_getinfo',
- 'curl_easy_init',
- 'curl_easy_reset',
- 'curl_easy_setopt',
- 'curl_easy_strerror',
- 'curl_getdate',
- 'curl_http_version_1_0',
- 'curl_http_version_1_1',
- 'curl_http_version_none',
- 'curl_ipresolve_v4',
- 'curl_ipresolve_v6',
- 'curl_ipresolve_whatever',
- 'curl_multi_perform',
- 'curl_multi_result',
- 'curl_netrc_ignored',
- 'curl_netrc_optional',
- 'curl_netrc_required',
+ :license: BSD, see LICENSE for details.
+"""
+
+BUILTINS = {
+ 'Types': (
+ 'array',
+ 'atbegin',
+ 'boolean',
+ 'bson_iter',
+ 'bson',
+ 'bytes_document_body',
+ 'bytes',
+ 'cache_server_element',
+ 'cache_server',
+ 'capture',
+ 'client_address',
+ 'client_ip',
+ 'component_container',
+ 'component_render_state',
+ 'component',
+ 'curl',
+ 'curltoken',
+ 'currency',
+ 'custom',
+ 'data_document',
+ 'database_registry',
+ 'date',
+ 'dateandtime',
+ 'dbgp_packet',
+ 'dbgp_server',
+ 'debugging_stack',
+ 'decimal',
+ 'delve',
+ 'dir',
+ 'dirdesc',
+ 'dns_response',
+ 'document_base',
+ 'document_body',
+ 'document_header',
+ 'dsinfo',
+ 'duration',
+ 'eacher',
+ 'email_compose',
+ 'email_parse',
+ 'email_pop',
+ 'email_queue_impl_base',
+ 'email_queue_impl',
+ 'email_smtp',
+ 'email_stage_impl_base',
+ 'email_stage_impl',
+ 'fastcgi_each_fcgi_param',
+ 'fastcgi_server',
+ 'fcgi_record',
+ 'fcgi_request',
+ 'file',
+ 'filedesc',
+ 'filemaker_datasource',
+ 'generateforeachkeyed',
+ 'generateforeachunkeyed',
+ 'generateseries',
+ 'hash_map',
+ 'html_atomic_element',
+ 'html_attr',
+ 'html_base',
+ 'html_binary',
+ 'html_br',
+ 'html_cdata',
+ 'html_container_element',
+ 'html_div',
+ 'html_document_body',
+ 'html_document_head',
+ 'html_eol',
+ 'html_fieldset',
+ 'html_form',
+ 'html_h1',
+ 'html_h2',
+ 'html_h3',
+ 'html_h4',
+ 'html_h5',
+ 'html_h6',
+ 'html_hr',
+ 'html_img',
+ 'html_input',
+ 'html_json',
+ 'html_label',
+ 'html_legend',
+ 'html_link',
+ 'html_meta',
+ 'html_object',
+ 'html_option',
+ 'html_raw',
+ 'html_script',
+ 'html_select',
+ 'html_span',
+ 'html_style',
+ 'html_table',
+ 'html_td',
+ 'html_text',
+ 'html_th',
+ 'html_tr',
+ 'http_document_header',
+ 'http_document',
+ 'http_error',
+ 'http_header_field',
+ 'http_server_connection_handler_globals',
+ 'http_server_connection_handler',
+ 'http_server_request_logger_thread',
+ 'http_server_web_connection',
+ 'http_server',
+ 'image',
+ 'include_cache',
+ 'inline_type',
+ 'integer',
+ 'java_jnienv',
+ 'jbyte',
+ 'jbytearray',
+ 'jchar',
+ 'jchararray',
+ 'jfieldid',
+ 'jfloat',
+ 'jint',
+ 'jmethodid',
+ 'jobject',
+ 'jshort',
+ 'json_decode',
+ 'json_encode',
+ 'json_literal',
+ 'json_object',
+ 'keyword',
+ 'lassoapp_compiledsrc_appsource',
+ 'lassoapp_compiledsrc_fileresource',
+ 'lassoapp_content_rep_halt',
+ 'lassoapp_dirsrc_appsource',
+ 'lassoapp_dirsrc_fileresource',
+ 'lassoapp_installer',
+ 'lassoapp_livesrc_appsource',
+ 'lassoapp_livesrc_fileresource',
+ 'lassoapp_long_expiring_bytes',
+ 'lassoapp_manualsrc_appsource',
+ 'lassoapp_zip_file_server',
+ 'lassoapp_zipsrc_appsource',
+ 'lassoapp_zipsrc_fileresource',
+ 'ldap',
+ 'library_thread_loader',
+ 'list_node',
+ 'list',
+ 'locale',
+ 'log_impl_base',
+ 'log_impl',
+ 'magick_image',
+ 'map_node',
+ 'map',
+ 'memberstream',
+ 'memory_session_driver_impl_entry',
+ 'memory_session_driver_impl',
+ 'memory_session_driver',
+ 'mime_reader',
+ 'mongo_client',
+ 'mongo_collection',
+ 'mongo_cursor',
+ 'mustache_ctx',
+ 'mysql_session_driver_impl',
+ 'mysql_session_driver',
+ 'net_named_pipe',
+ 'net_tcp_ssl',
+ 'net_tcp',
+ 'net_udp_packet',
+ 'net_udp',
+ 'null',
+ 'odbc_session_driver_impl',
+ 'odbc_session_driver',
+ 'opaque',
+ 'os_process',
+ 'pair_compare',
+ 'pair',
+ 'pairup',
+ 'pdf_barcode',
+ 'pdf_chunk',
+ 'pdf_color',
+ 'pdf_doc',
+ 'pdf_font',
+ 'pdf_hyphenator',
+ 'pdf_image',
+ 'pdf_list',
+ 'pdf_paragraph',
+ 'pdf_phrase',
+ 'pdf_read',
+ 'pdf_table',
+ 'pdf_text',
+ 'pdf_typebase',
+ 'percent',
+ 'portal_impl',
+ 'queriable_groupby',
+ 'queriable_grouping',
+ 'queriable_groupjoin',
+ 'queriable_join',
+ 'queriable_orderby',
+ 'queriable_orderbydescending',
+ 'queriable_select',
+ 'queriable_selectmany',
+ 'queriable_skip',
+ 'queriable_take',
+ 'queriable_thenby',
+ 'queriable_thenbydescending',
+ 'queriable_where',
+ 'queue',
+ 'raw_document_body',
+ 'regexp',
+ 'repeat',
+ 'scientific',
+ 'security_registry',
+ 'serialization_element',
+ 'serialization_object_identity_compare',
+ 'serialization_reader',
+ 'serialization_writer_ref',
+ 'serialization_writer_standin',
+ 'serialization_writer',
+ 'session_delete_expired_thread',
+ 'set',
+ 'signature',
+ 'sourcefile',
+ 'sqlite_column',
+ 'sqlite_currentrow',
+ 'sqlite_db',
+ 'sqlite_results',
+ 'sqlite_session_driver_impl_entry',
+ 'sqlite_session_driver_impl',
+ 'sqlite_session_driver',
+ 'sqlite_table',
+ 'sqlite3_stmt',
+ 'sqlite3',
+ 'staticarray',
+ 'string',
+ 'sys_process',
+ 'tag',
+ 'text_document',
+ 'tie',
+ 'timeonly',
+ 'trait',
+ 'tree_base',
+ 'tree_node',
+ 'tree_nullnode',
+ 'ucal',
+ 'usgcpu',
+ 'usgvm',
+ 'void',
+ 'web_error_atend',
+ 'web_node_base',
+ 'web_node_content_representation_css_specialized',
+ 'web_node_content_representation_html_specialized',
+ 'web_node_content_representation_js_specialized',
+ 'web_node_content_representation_xhr_container',
+ 'web_node_echo',
+ 'web_node_root',
+ 'web_request_impl',
+ 'web_request',
+ 'web_response_impl',
+ 'web_response',
+ 'web_router',
+ 'websocket_handler',
+ 'worker_pool',
+ 'xml_attr',
+ 'xml_cdatasection',
+ 'xml_characterdata',
+ 'xml_comment',
+ 'xml_document',
+ 'xml_documentfragment',
+ 'xml_documenttype',
+ 'xml_domimplementation',
+ 'xml_element',
+ 'xml_entity',
+ 'xml_entityreference',
+ 'xml_namednodemap_attr',
+ 'xml_namednodemap_ht',
+ 'xml_namednodemap',
+ 'xml_node',
+ 'xml_nodelist',
+ 'xml_notation',
+ 'xml_processinginstruction',
+ 'xml_text',
+ 'xmlstream',
+ 'zip_file_impl',
+ 'zip_file',
+ 'zip_impl',
+ 'zip',
+ ),
+ 'Traits': (
+ 'any',
+ 'formattingbase',
+ 'html_attributed',
+ 'html_element_coreattrs',
+ 'html_element_eventsattrs',
+ 'html_element_i18nattrs',
+ 'lassoapp_capabilities',
+ 'lassoapp_resource',
+ 'lassoapp_source',
+ 'queriable_asstring',
+ 'session_driver',
+ 'trait_array',
+ 'trait_asstring',
+ 'trait_backcontractible',
+ 'trait_backended',
+ 'trait_backexpandable',
+ 'trait_close',
+ 'trait_contractible',
+ 'trait_decompose_assignment',
+ 'trait_doubleended',
+ 'trait_each_sub',
+ 'trait_encodeurl',
+ 'trait_endedfullymutable',
+ 'trait_expandable',
+ 'trait_file',
+ 'trait_finite',
+ 'trait_finiteforeach',
+ 'trait_foreach',
+ 'trait_foreachtextelement',
+ 'trait_frontcontractible',
+ 'trait_frontended',
+ 'trait_frontexpandable',
+ 'trait_fullymutable',
+ 'trait_generator',
+ 'trait_generatorcentric',
+ 'trait_hashable',
+ 'trait_json_serialize',
+ 'trait_keyed',
+ 'trait_keyedfinite',
+ 'trait_keyedforeach',
+ 'trait_keyedmutable',
+ 'trait_list',
+ 'trait_map',
+ 'trait_net',
+ 'trait_pathcomponents',
+ 'trait_positionallykeyed',
+ 'trait_positionallysearchable',
+ 'trait_queriable',
+ 'trait_queriablelambda',
+ 'trait_readbytes',
+ 'trait_readstring',
+ 'trait_scalar',
+ 'trait_searchable',
+ 'trait_serializable',
+ 'trait_setencoding',
+ 'trait_setoperations',
+ 'trait_stack',
+ 'trait_treenode',
+ 'trait_writebytes',
+ 'trait_writestring',
+ 'trait_xml_elementcompat',
+ 'trait_xml_nodecompat',
+ 'web_connection',
+ 'web_node_container',
+ 'web_node_content_css_specialized',
+ 'web_node_content_document',
+ 'web_node_content_html_specialized',
+ 'web_node_content_js_specialized',
+ 'web_node_content_json_specialized',
+ 'web_node_content_representation',
+ 'web_node_content',
+ 'web_node_postable',
+ 'web_node',
+ ),
+ 'Unbound Methods': (
+ 'abort_clear',
+ 'abort_now',
+ 'abort',
+ 'action_param',
+ 'action_params',
+ 'action_statement',
+ 'admin_authorization',
+ 'admin_currentgroups',
+ 'admin_currentuserid',
+ 'admin_currentusername',
+ 'admin_getpref',
+ 'admin_initialize',
+ 'admin_lassoservicepath',
+ 'admin_removepref',
+ 'admin_setpref',
+ 'admin_userexists',
+ 'all',
+ 'auth_admin',
+ 'auth_check',
+ 'auth_custom',
+ 'auth_group',
+ 'auth_prompt',
+ 'auth_user',
+ 'bom_utf16be',
+ 'bom_utf16le',
+ 'bom_utf32be',
+ 'bom_utf32le',
+ 'bom_utf8',
+ 'bw',
+ 'capture_nearestloopabort',
+ 'capture_nearestloopcontinue',
+ 'capture_nearestloopcount',
+ 'checked',
+ 'cipher_decrypt_private',
+ 'cipher_decrypt_public',
+ 'cipher_decrypt',
+ 'cipher_digest',
+ 'cipher_encrypt_private',
+ 'cipher_encrypt_public',
+ 'cipher_encrypt',
+ 'cipher_generate_key',
+ 'cipher_hmac',
+ 'cipher_keylength',
+ 'cipher_list',
+ 'cipher_open',
+ 'cipher_seal',
+ 'cipher_sign',
+ 'cipher_verify',
+ 'client_addr',
+ 'client_authorization',
+ 'client_browser',
+ 'client_contentlength',
+ 'client_contenttype',
+ 'client_cookielist',
+ 'client_cookies',
+ 'client_encoding',
+ 'client_formmethod',
+ 'client_getargs',
+ 'client_getparam',
+ 'client_getparams',
+ 'client_headers',
+ 'client_integertoip',
+ 'client_iptointeger',
+ 'client_password',
+ 'client_postargs',
+ 'client_postparam',
+ 'client_postparams',
+ 'client_type',
+ 'client_url',
+ 'client_username',
+ 'cn',
+ 'column_name',
+ 'column_names',
+ 'column_type',
+ 'column',
+ 'compress',
+ 'content_addheader',
+ 'content_body',
+ 'content_encoding',
+ 'content_header',
+ 'content_replaceheader',
+ 'content_type',
+ 'cookie_set',
+ 'cookie',
+ 'curl_easy_cleanup',
+ 'curl_easy_duphandle',
+ 'curl_easy_getinfo',
+ 'curl_easy_init',
+ 'curl_easy_reset',
+ 'curl_easy_setopt',
+ 'curl_easy_strerror',
+ 'curl_getdate',
+ 'curl_http_version_1_0',
+ 'curl_http_version_1_1',
+ 'curl_http_version_none',
+ 'curl_ipresolve_v4',
+ 'curl_ipresolve_v6',
+ 'curl_ipresolve_whatever',
+ 'curl_multi_perform',
+ 'curl_multi_result',
+ 'curl_netrc_ignored',
+ 'curl_netrc_optional',
+ 'curl_netrc_required',
'curl_sslversion_default',
'curl_sslversion_sslv2',
'curl_sslversion_sslv3',
'curl_sslversion_tlsv1',
- 'curl_version_asynchdns',
- 'curl_version_debug',
- 'curl_version_gssnegotiate',
- 'curl_version_idn',
- 'curl_version_info',
- 'curl_version_ipv6',
- 'curl_version_kerberos4',
- 'curl_version_largefile',
- 'curl_version_libz',
- 'curl_version_ntlm',
- 'curl_version_spnego',
- 'curl_version_ssl',
- 'curl_version',
- 'curlauth_any',
- 'curlauth_anysafe',
- 'curlauth_basic',
- 'curlauth_digest',
- 'curlauth_gssnegotiate',
- 'curlauth_none',
- 'curlauth_ntlm',
- 'curle_aborted_by_callback',
- 'curle_bad_calling_order',
- 'curle_bad_content_encoding',
- 'curle_bad_download_resume',
- 'curle_bad_function_argument',
- 'curle_bad_password_entered',
- 'curle_couldnt_connect',
- 'curle_couldnt_resolve_host',
- 'curle_couldnt_resolve_proxy',
- 'curle_failed_init',
- 'curle_file_couldnt_read_file',
- 'curle_filesize_exceeded',
- 'curle_ftp_access_denied',
- 'curle_ftp_cant_get_host',
- 'curle_ftp_cant_reconnect',
- 'curle_ftp_couldnt_get_size',
- 'curle_ftp_couldnt_retr_file',
- 'curle_ftp_couldnt_set_ascii',
- 'curle_ftp_couldnt_set_binary',
- 'curle_ftp_couldnt_use_rest',
- 'curle_ftp_port_failed',
- 'curle_ftp_quote_error',
- 'curle_ftp_ssl_failed',
- 'curle_ftp_user_password_incorrect',
- 'curle_ftp_weird_227_format',
- 'curle_ftp_weird_pass_reply',
- 'curle_ftp_weird_pasv_reply',
- 'curle_ftp_weird_server_reply',
- 'curle_ftp_weird_user_reply',
- 'curle_ftp_write_error',
- 'curle_function_not_found',
- 'curle_got_nothing',
- 'curle_http_post_error',
- 'curle_http_range_error',
- 'curle_http_returned_error',
- 'curle_interface_failed',
- 'curle_ldap_cannot_bind',
- 'curle_ldap_invalid_url',
- 'curle_ldap_search_failed',
- 'curle_library_not_found',
- 'curle_login_denied',
- 'curle_malformat_user',
- 'curle_obsolete',
- 'curle_ok',
- 'curle_operation_timeouted',
- 'curle_out_of_memory',
- 'curle_partial_file',
- 'curle_read_error',
- 'curle_recv_error',
- 'curle_send_error',
- 'curle_send_fail_rewind',
- 'curle_share_in_use',
- 'curle_ssl_cacert',
- 'curle_ssl_certproblem',
- 'curle_ssl_cipher',
- 'curle_ssl_connect_error',
- 'curle_ssl_engine_initfailed',
- 'curle_ssl_engine_notfound',
- 'curle_ssl_engine_setfailed',
- 'curle_ssl_peer_certificate',
- 'curle_telnet_option_syntax',
- 'curle_too_many_redirects',
- 'curle_unknown_telnet_option',
- 'curle_unsupported_protocol',
- 'curle_url_malformat_user',
- 'curle_url_malformat',
- 'curle_write_error',
- 'curlftpauth_default',
- 'curlftpauth_ssl',
- 'curlftpauth_tls',
- 'curlftpssl_all',
- 'curlftpssl_control',
- 'curlftpssl_last',
- 'curlftpssl_none',
- 'curlftpssl_try',
- 'curlinfo_connect_time',
- 'curlinfo_content_length_download',
- 'curlinfo_content_length_upload',
- 'curlinfo_content_type',
- 'curlinfo_effective_url',
- 'curlinfo_filetime',
- 'curlinfo_header_size',
- 'curlinfo_http_connectcode',
- 'curlinfo_httpauth_avail',
- 'curlinfo_namelookup_time',
- 'curlinfo_num_connects',
- 'curlinfo_os_errno',
- 'curlinfo_pretransfer_time',
- 'curlinfo_proxyauth_avail',
- 'curlinfo_redirect_count',
- 'curlinfo_redirect_time',
- 'curlinfo_request_size',
- 'curlinfo_response_code',
- 'curlinfo_size_download',
- 'curlinfo_size_upload',
- 'curlinfo_speed_download',
- 'curlinfo_speed_upload',
- 'curlinfo_ssl_engines',
- 'curlinfo_ssl_verifyresult',
- 'curlinfo_starttransfer_time',
- 'curlinfo_total_time',
- 'curlmsg_done',
- 'curlopt_autoreferer',
- 'curlopt_buffersize',
- 'curlopt_cainfo',
- 'curlopt_capath',
- 'curlopt_connecttimeout',
- 'curlopt_cookie',
- 'curlopt_cookiefile',
- 'curlopt_cookiejar',
- 'curlopt_cookiesession',
- 'curlopt_crlf',
- 'curlopt_customrequest',
- 'curlopt_dns_use_global_cache',
- 'curlopt_egdsocket',
- 'curlopt_encoding',
- 'curlopt_failonerror',
- 'curlopt_filetime',
- 'curlopt_followlocation',
- 'curlopt_forbid_reuse',
- 'curlopt_fresh_connect',
- 'curlopt_ftp_account',
- 'curlopt_ftp_create_missing_dirs',
- 'curlopt_ftp_response_timeout',
- 'curlopt_ftp_ssl',
- 'curlopt_ftp_use_eprt',
- 'curlopt_ftp_use_epsv',
- 'curlopt_ftpappend',
- 'curlopt_ftplistonly',
- 'curlopt_ftpport',
- 'curlopt_ftpsslauth',
- 'curlopt_header',
- 'curlopt_http_version',
- 'curlopt_http200aliases',
- 'curlopt_httpauth',
- 'curlopt_httpget',
- 'curlopt_httpheader',
- 'curlopt_httppost',
- 'curlopt_httpproxytunnel',
- 'curlopt_infilesize_large',
- 'curlopt_infilesize',
- 'curlopt_interface',
- 'curlopt_ipresolve',
- 'curlopt_krb4level',
- 'curlopt_low_speed_limit',
- 'curlopt_low_speed_time',
- 'curlopt_mail_from',
- 'curlopt_mail_rcpt',
- 'curlopt_maxconnects',
- 'curlopt_maxfilesize_large',
- 'curlopt_maxfilesize',
- 'curlopt_maxredirs',
- 'curlopt_netrc_file',
- 'curlopt_netrc',
- 'curlopt_nobody',
- 'curlopt_noprogress',
- 'curlopt_port',
- 'curlopt_post',
- 'curlopt_postfields',
- 'curlopt_postfieldsize_large',
- 'curlopt_postfieldsize',
- 'curlopt_postquote',
- 'curlopt_prequote',
- 'curlopt_proxy',
- 'curlopt_proxyauth',
- 'curlopt_proxyport',
- 'curlopt_proxytype',
- 'curlopt_proxyuserpwd',
- 'curlopt_put',
- 'curlopt_quote',
- 'curlopt_random_file',
- 'curlopt_range',
- 'curlopt_readdata',
- 'curlopt_referer',
- 'curlopt_resume_from_large',
- 'curlopt_resume_from',
- 'curlopt_ssl_cipher_list',
- 'curlopt_ssl_verifyhost',
- 'curlopt_ssl_verifypeer',
- 'curlopt_sslcert',
- 'curlopt_sslcerttype',
- 'curlopt_sslengine_default',
- 'curlopt_sslengine',
- 'curlopt_sslkey',
- 'curlopt_sslkeypasswd',
- 'curlopt_sslkeytype',
- 'curlopt_sslversion',
- 'curlopt_tcp_nodelay',
- 'curlopt_timecondition',
- 'curlopt_timeout',
- 'curlopt_timevalue',
- 'curlopt_transfertext',
- 'curlopt_unrestricted_auth',
- 'curlopt_upload',
- 'curlopt_url',
- 'curlopt_use_ssl',
- 'curlopt_useragent',
- 'curlopt_userpwd',
- 'curlopt_verbose',
- 'curlopt_writedata',
- 'curlproxy_http',
- 'curlproxy_socks4',
- 'curlproxy_socks5',
- 'database_adddefaultsqlitehost',
- 'database_database',
- 'database_initialize',
- 'database_name',
- 'database_qs',
- 'database_table_database_tables',
- 'database_table_datasource_databases',
- 'database_table_datasource_hosts',
- 'database_table_datasources',
- 'database_table_table_fields',
- 'database_util_cleanpath',
- 'dbgp_stop_stack_name',
- 'debugging_break',
- 'debugging_breakpoint_get',
- 'debugging_breakpoint_list',
- 'debugging_breakpoint_remove',
- 'debugging_breakpoint_set',
- 'debugging_breakpoint_update',
- 'debugging_context_locals',
- 'debugging_context_self',
- 'debugging_context_vars',
- 'debugging_detach',
- 'debugging_enabled',
- 'debugging_get_context',
- 'debugging_get_stack',
- 'debugging_run',
- 'debugging_step_in',
- 'debugging_step_out',
- 'debugging_step_over',
- 'debugging_stop',
- 'debugging_terminate',
- 'decimal_random',
- 'decompress',
- 'decrypt_blowfish',
- 'define_atbegin',
- 'define_atend',
- 'dns_default',
- 'dns_lookup',
- 'document',
- 'email_attachment_mime_type',
- 'email_batch',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_fix_address_list',
- 'email_fix_address',
- 'email_fs_error_clean',
- 'email_immediate',
- 'email_initialize',
- 'email_merge',
- 'email_mxlookup',
- 'email_pop_priv_extract',
- 'email_pop_priv_quote',
- 'email_pop_priv_substring',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_qheader',
- 'encoding_iso88591',
- 'encoding_utf8',
- 'encrypt_blowfish',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eol',
- 'eq',
- 'error_code_aborted',
- 'error_code_dividebyzero',
- 'error_code_filenotfound',
- 'error_code_invalidparameter',
- 'error_code_methodnotfound',
- 'error_code_networkerror',
- 'error_code_noerror',
- 'error_code_resnotfound',
- 'error_code_runtimeassertion',
- 'error_code',
- 'error_msg_aborted',
- 'error_msg_dividebyzero',
- 'error_msg_filenotfound',
- 'error_msg_invalidparameter',
- 'error_msg_methodnotfound',
- 'error_msg_networkerror',
- 'error_msg_noerror',
- 'error_msg_resnotfound',
- 'error_msg_runtimeassertion',
- 'error_msg',
- 'error_obj',
- 'error_pop',
- 'error_push',
- 'error_reset',
- 'error_stack',
- 'escape_tag',
- 'evdns_resolve_ipv4',
- 'evdns_resolve_ipv6',
- 'evdns_resolve_reverse_ipv6',
- 'evdns_resolve_reverse',
- 'ew',
- 'fail_if',
- 'fail_ifnot',
- 'fail_now',
- 'fail',
- 'failure_clear',
- 'fastcgi_createfcgirequest',
- 'fastcgi_handlecon',
- 'fastcgi_handlereq',
- 'fastcgi_initialize',
- 'fastcgi_initiate_request',
- 'fcgi_abort_request',
- 'fcgi_authorize',
- 'fcgi_begin_request',
- 'fcgi_bodychunksize',
- 'fcgi_cant_mpx_conn',
- 'fcgi_data',
- 'fcgi_end_request',
- 'fcgi_filter',
- 'fcgi_get_values_result',
- 'fcgi_get_values',
- 'fcgi_keep_conn',
- 'fcgi_makeendrequestbody',
- 'fcgi_makestdoutbody',
- 'fcgi_max_conns',
- 'fcgi_max_reqs',
- 'fcgi_mpxs_conns',
- 'fcgi_null_request_id',
- 'fcgi_overloaded',
- 'fcgi_params',
- 'fcgi_read_timeout_seconds',
- 'fcgi_readparam',
- 'fcgi_request_complete',
- 'fcgi_responder',
- 'fcgi_stderr',
- 'fcgi_stdin',
- 'fcgi_stdout',
- 'fcgi_unknown_role',
- 'fcgi_unknown_type',
- 'fcgi_version_1',
- 'fcgi_x_stdin',
- 'field_name',
- 'field_names',
- 'field',
- 'file_copybuffersize',
- 'file_defaultencoding',
- 'file_forceroot',
- 'file_modechar',
- 'file_modeline',
- 'file_stderr',
- 'file_stdin',
- 'file_stdout',
- 'file_tempfile',
- 'filemakerds_initialize',
- 'filemakerds',
- 'found_count',
- 'ft',
- 'ftp_deletefile',
- 'ftp_getdata',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putdata',
- 'ftp_putfile',
- 'full',
- 'generateforeach',
- 'gt',
- 'gte',
- 'handle_failure',
- 'handle',
- 'hash_primes',
- 'html_comment',
- 'http_char_colon',
- 'http_char_cr',
- 'http_char_htab',
- 'http_char_lf',
- 'http_char_question',
- 'http_char_space',
- 'http_default_files',
- 'http_read_headers',
- 'http_read_timeout_secs',
- 'http_server_apps_path',
- 'http_server_request_logger',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'include_cache_compare',
- 'include_currentpath',
- 'include_filepath',
- 'include_localpath',
- 'include_once',
- 'include_path',
- 'include_raw',
- 'include_url',
- 'include',
- 'includes',
- 'inline_colinfo_name_pos',
- 'inline_colinfo_type_pos',
- 'inline_colinfo_valuelist_pos',
- 'inline_columninfo_pos',
- 'inline_foundcount_pos',
- 'inline_namedget',
- 'inline_namedput',
- 'inline_resultrows_pos',
- 'inline_scopeget',
- 'inline_scopepop',
- 'inline_scopepush',
- 'inline',
- 'integer_bitor',
- 'integer_random',
- 'io_dir_dt_blk',
- 'io_dir_dt_chr',
- 'io_dir_dt_dir',
- 'io_dir_dt_fifo',
- 'io_dir_dt_lnk',
- 'io_dir_dt_reg',
- 'io_dir_dt_sock',
- 'io_dir_dt_unknown',
- 'io_dir_dt_wht',
- 'io_file_access',
- 'io_file_chdir',
- 'io_file_chmod',
- 'io_file_chown',
- 'io_file_dirname',
- 'io_file_f_dupfd',
- 'io_file_f_getfd',
- 'io_file_f_getfl',
- 'io_file_f_getlk',
- 'io_file_f_rdlck',
- 'io_file_f_setfd',
- 'io_file_f_setfl',
- 'io_file_f_setlk',
- 'io_file_f_setlkw',
- 'io_file_f_test',
- 'io_file_f_tlock',
- 'io_file_f_ulock',
- 'io_file_f_unlck',
- 'io_file_f_wrlck',
- 'io_file_fd_cloexec',
- 'io_file_fioasync',
- 'io_file_fioclex',
- 'io_file_fiodtype',
- 'io_file_fiogetown',
- 'io_file_fionbio',
- 'io_file_fionclex',
- 'io_file_fionread',
- 'io_file_fiosetown',
- 'io_file_getcwd',
- 'io_file_lchown',
- 'io_file_link',
- 'io_file_lockf',
- 'io_file_lstat_atime',
- 'io_file_lstat_mode',
- 'io_file_lstat_mtime',
- 'io_file_lstat_size',
- 'io_file_mkdir',
- 'io_file_mkfifo',
- 'io_file_mkstemp',
- 'io_file_o_append',
- 'io_file_o_async',
- 'io_file_o_creat',
- 'io_file_o_excl',
- 'io_file_o_exlock',
- 'io_file_o_fsync',
- 'io_file_o_nofollow',
- 'io_file_o_nonblock',
- 'io_file_o_rdonly',
- 'io_file_o_rdwr',
- 'io_file_o_shlock',
- 'io_file_o_sync',
- 'io_file_o_trunc',
- 'io_file_o_wronly',
- 'io_file_pipe',
- 'io_file_readlink',
- 'io_file_realpath',
- 'io_file_remove',
- 'io_file_rename',
- 'io_file_rmdir',
- 'io_file_s_ifblk',
- 'io_file_s_ifchr',
- 'io_file_s_ifdir',
- 'io_file_s_ififo',
- 'io_file_s_iflnk',
- 'io_file_s_ifmt',
- 'io_file_s_ifreg',
- 'io_file_s_ifsock',
- 'io_file_s_irgrp',
- 'io_file_s_iroth',
- 'io_file_s_irusr',
- 'io_file_s_irwxg',
- 'io_file_s_irwxo',
- 'io_file_s_irwxu',
- 'io_file_s_isgid',
- 'io_file_s_isuid',
- 'io_file_s_isvtx',
- 'io_file_s_iwgrp',
- 'io_file_s_iwoth',
- 'io_file_s_iwusr',
- 'io_file_s_ixgrp',
- 'io_file_s_ixoth',
- 'io_file_s_ixusr',
- 'io_file_seek_cur',
- 'io_file_seek_end',
- 'io_file_seek_set',
- 'io_file_stat_atime',
- 'io_file_stat_mode',
- 'io_file_stat_mtime',
- 'io_file_stat_size',
- 'io_file_stderr',
- 'io_file_stdin',
- 'io_file_stdout',
- 'io_file_symlink',
- 'io_file_tempnam',
- 'io_file_truncate',
- 'io_file_umask',
- 'io_file_unlink',
- 'io_net_accept',
- 'io_net_af_inet',
- 'io_net_af_inet6',
- 'io_net_af_unix',
- 'io_net_bind',
- 'io_net_connect',
- 'io_net_getpeername',
- 'io_net_getsockname',
- 'io_net_ipproto_ip',
- 'io_net_ipproto_udp',
- 'io_net_listen',
- 'io_net_msg_oob',
- 'io_net_msg_peek',
- 'io_net_msg_waitall',
- 'io_net_recv',
- 'io_net_recvfrom',
- 'io_net_send',
- 'io_net_sendto',
- 'io_net_shut_rd',
- 'io_net_shut_rdwr',
- 'io_net_shut_wr',
- 'io_net_shutdown',
- 'io_net_so_acceptconn',
- 'io_net_so_broadcast',
- 'io_net_so_debug',
- 'io_net_so_dontroute',
- 'io_net_so_error',
- 'io_net_so_keepalive',
- 'io_net_so_linger',
- 'io_net_so_oobinline',
- 'io_net_so_rcvbuf',
- 'io_net_so_rcvlowat',
- 'io_net_so_rcvtimeo',
- 'io_net_so_reuseaddr',
- 'io_net_so_sndbuf',
- 'io_net_so_sndlowat',
- 'io_net_so_sndtimeo',
- 'io_net_so_timestamp',
- 'io_net_so_type',
- 'io_net_so_useloopback',
- 'io_net_sock_dgram',
- 'io_net_sock_raw',
- 'io_net_sock_rdm',
- 'io_net_sock_seqpacket',
- 'io_net_sock_stream',
- 'io_net_socket',
- 'io_net_sol_socket',
- 'io_net_ssl_accept',
- 'io_net_ssl_begin',
- 'io_net_ssl_connect',
- 'io_net_ssl_end',
- 'io_net_ssl_error',
- 'io_net_ssl_errorstring',
- 'io_net_ssl_funcerrorstring',
- 'io_net_ssl_liberrorstring',
- 'io_net_ssl_read',
- 'io_net_ssl_reasonerrorstring',
- 'io_net_ssl_setacceptstate',
- 'io_net_ssl_setconnectstate',
- 'io_net_ssl_setverifylocations',
- 'io_net_ssl_shutdown',
- 'io_net_ssl_usecertificatechainfile',
- 'io_net_ssl_useprivatekeyfile',
- 'io_net_ssl_write',
- 'java_jvm_create',
- 'java_jvm_getenv',
- 'jdbc_initialize',
- 'json_back_slash',
- 'json_back_space',
- 'json_close_array',
- 'json_close_object',
- 'json_colon',
- 'json_comma',
- 'json_consume_array',
- 'json_consume_object',
- 'json_consume_string',
- 'json_consume_token',
- 'json_cr',
- 'json_debug',
- 'json_deserialize',
- 'json_e_lower',
- 'json_e_upper',
- 'json_f_lower',
- 'json_form_feed',
- 'json_forward_slash',
- 'json_lf',
- 'json_n_lower',
- 'json_negative',
- 'json_open_array',
- 'json_open_object',
- 'json_period',
+ 'curl_version_asynchdns',
+ 'curl_version_debug',
+ 'curl_version_gssnegotiate',
+ 'curl_version_idn',
+ 'curl_version_info',
+ 'curl_version_ipv6',
+ 'curl_version_kerberos4',
+ 'curl_version_largefile',
+ 'curl_version_libz',
+ 'curl_version_ntlm',
+ 'curl_version_spnego',
+ 'curl_version_ssl',
+ 'curl_version',
+ 'curlauth_any',
+ 'curlauth_anysafe',
+ 'curlauth_basic',
+ 'curlauth_digest',
+ 'curlauth_gssnegotiate',
+ 'curlauth_none',
+ 'curlauth_ntlm',
+ 'curle_aborted_by_callback',
+ 'curle_bad_calling_order',
+ 'curle_bad_content_encoding',
+ 'curle_bad_download_resume',
+ 'curle_bad_function_argument',
+ 'curle_bad_password_entered',
+ 'curle_couldnt_connect',
+ 'curle_couldnt_resolve_host',
+ 'curle_couldnt_resolve_proxy',
+ 'curle_failed_init',
+ 'curle_file_couldnt_read_file',
+ 'curle_filesize_exceeded',
+ 'curle_ftp_access_denied',
+ 'curle_ftp_cant_get_host',
+ 'curle_ftp_cant_reconnect',
+ 'curle_ftp_couldnt_get_size',
+ 'curle_ftp_couldnt_retr_file',
+ 'curle_ftp_couldnt_set_ascii',
+ 'curle_ftp_couldnt_set_binary',
+ 'curle_ftp_couldnt_use_rest',
+ 'curle_ftp_port_failed',
+ 'curle_ftp_quote_error',
+ 'curle_ftp_ssl_failed',
+ 'curle_ftp_user_password_incorrect',
+ 'curle_ftp_weird_227_format',
+ 'curle_ftp_weird_pass_reply',
+ 'curle_ftp_weird_pasv_reply',
+ 'curle_ftp_weird_server_reply',
+ 'curle_ftp_weird_user_reply',
+ 'curle_ftp_write_error',
+ 'curle_function_not_found',
+ 'curle_got_nothing',
+ 'curle_http_post_error',
+ 'curle_http_range_error',
+ 'curle_http_returned_error',
+ 'curle_interface_failed',
+ 'curle_ldap_cannot_bind',
+ 'curle_ldap_invalid_url',
+ 'curle_ldap_search_failed',
+ 'curle_library_not_found',
+ 'curle_login_denied',
+ 'curle_malformat_user',
+ 'curle_obsolete',
+ 'curle_ok',
+ 'curle_operation_timeouted',
+ 'curle_out_of_memory',
+ 'curle_partial_file',
+ 'curle_read_error',
+ 'curle_recv_error',
+ 'curle_send_error',
+ 'curle_send_fail_rewind',
+ 'curle_share_in_use',
+ 'curle_ssl_cacert',
+ 'curle_ssl_certproblem',
+ 'curle_ssl_cipher',
+ 'curle_ssl_connect_error',
+ 'curle_ssl_engine_initfailed',
+ 'curle_ssl_engine_notfound',
+ 'curle_ssl_engine_setfailed',
+ 'curle_ssl_peer_certificate',
+ 'curle_telnet_option_syntax',
+ 'curle_too_many_redirects',
+ 'curle_unknown_telnet_option',
+ 'curle_unsupported_protocol',
+ 'curle_url_malformat_user',
+ 'curle_url_malformat',
+ 'curle_write_error',
+ 'curlftpauth_default',
+ 'curlftpauth_ssl',
+ 'curlftpauth_tls',
+ 'curlftpssl_all',
+ 'curlftpssl_control',
+ 'curlftpssl_last',
+ 'curlftpssl_none',
+ 'curlftpssl_try',
+ 'curlinfo_connect_time',
+ 'curlinfo_content_length_download',
+ 'curlinfo_content_length_upload',
+ 'curlinfo_content_type',
+ 'curlinfo_effective_url',
+ 'curlinfo_filetime',
+ 'curlinfo_header_size',
+ 'curlinfo_http_connectcode',
+ 'curlinfo_httpauth_avail',
+ 'curlinfo_namelookup_time',
+ 'curlinfo_num_connects',
+ 'curlinfo_os_errno',
+ 'curlinfo_pretransfer_time',
+ 'curlinfo_proxyauth_avail',
+ 'curlinfo_redirect_count',
+ 'curlinfo_redirect_time',
+ 'curlinfo_request_size',
+ 'curlinfo_response_code',
+ 'curlinfo_size_download',
+ 'curlinfo_size_upload',
+ 'curlinfo_speed_download',
+ 'curlinfo_speed_upload',
+ 'curlinfo_ssl_engines',
+ 'curlinfo_ssl_verifyresult',
+ 'curlinfo_starttransfer_time',
+ 'curlinfo_total_time',
+ 'curlmsg_done',
+ 'curlopt_autoreferer',
+ 'curlopt_buffersize',
+ 'curlopt_cainfo',
+ 'curlopt_capath',
+ 'curlopt_connecttimeout',
+ 'curlopt_cookie',
+ 'curlopt_cookiefile',
+ 'curlopt_cookiejar',
+ 'curlopt_cookiesession',
+ 'curlopt_crlf',
+ 'curlopt_customrequest',
+ 'curlopt_dns_use_global_cache',
+ 'curlopt_egdsocket',
+ 'curlopt_encoding',
+ 'curlopt_failonerror',
+ 'curlopt_filetime',
+ 'curlopt_followlocation',
+ 'curlopt_forbid_reuse',
+ 'curlopt_fresh_connect',
+ 'curlopt_ftp_account',
+ 'curlopt_ftp_create_missing_dirs',
+ 'curlopt_ftp_response_timeout',
+ 'curlopt_ftp_ssl',
+ 'curlopt_ftp_use_eprt',
+ 'curlopt_ftp_use_epsv',
+ 'curlopt_ftpappend',
+ 'curlopt_ftplistonly',
+ 'curlopt_ftpport',
+ 'curlopt_ftpsslauth',
+ 'curlopt_header',
+ 'curlopt_http_version',
+ 'curlopt_http200aliases',
+ 'curlopt_httpauth',
+ 'curlopt_httpget',
+ 'curlopt_httpheader',
+ 'curlopt_httppost',
+ 'curlopt_httpproxytunnel',
+ 'curlopt_infilesize_large',
+ 'curlopt_infilesize',
+ 'curlopt_interface',
+ 'curlopt_ipresolve',
+ 'curlopt_krb4level',
+ 'curlopt_low_speed_limit',
+ 'curlopt_low_speed_time',
+ 'curlopt_mail_from',
+ 'curlopt_mail_rcpt',
+ 'curlopt_maxconnects',
+ 'curlopt_maxfilesize_large',
+ 'curlopt_maxfilesize',
+ 'curlopt_maxredirs',
+ 'curlopt_netrc_file',
+ 'curlopt_netrc',
+ 'curlopt_nobody',
+ 'curlopt_noprogress',
+ 'curlopt_port',
+ 'curlopt_post',
+ 'curlopt_postfields',
+ 'curlopt_postfieldsize_large',
+ 'curlopt_postfieldsize',
+ 'curlopt_postquote',
+ 'curlopt_prequote',
+ 'curlopt_proxy',
+ 'curlopt_proxyauth',
+ 'curlopt_proxyport',
+ 'curlopt_proxytype',
+ 'curlopt_proxyuserpwd',
+ 'curlopt_put',
+ 'curlopt_quote',
+ 'curlopt_random_file',
+ 'curlopt_range',
+ 'curlopt_readdata',
+ 'curlopt_referer',
+ 'curlopt_resume_from_large',
+ 'curlopt_resume_from',
+ 'curlopt_ssl_cipher_list',
+ 'curlopt_ssl_verifyhost',
+ 'curlopt_ssl_verifypeer',
+ 'curlopt_sslcert',
+ 'curlopt_sslcerttype',
+ 'curlopt_sslengine_default',
+ 'curlopt_sslengine',
+ 'curlopt_sslkey',
+ 'curlopt_sslkeypasswd',
+ 'curlopt_sslkeytype',
+ 'curlopt_sslversion',
+ 'curlopt_tcp_nodelay',
+ 'curlopt_timecondition',
+ 'curlopt_timeout',
+ 'curlopt_timevalue',
+ 'curlopt_transfertext',
+ 'curlopt_unrestricted_auth',
+ 'curlopt_upload',
+ 'curlopt_url',
+ 'curlopt_use_ssl',
+ 'curlopt_useragent',
+ 'curlopt_userpwd',
+ 'curlopt_verbose',
+ 'curlopt_writedata',
+ 'curlproxy_http',
+ 'curlproxy_socks4',
+ 'curlproxy_socks5',
+ 'database_adddefaultsqlitehost',
+ 'database_database',
+ 'database_initialize',
+ 'database_name',
+ 'database_qs',
+ 'database_table_database_tables',
+ 'database_table_datasource_databases',
+ 'database_table_datasource_hosts',
+ 'database_table_datasources',
+ 'database_table_table_fields',
+ 'database_util_cleanpath',
+ 'dbgp_stop_stack_name',
+ 'debugging_break',
+ 'debugging_breakpoint_get',
+ 'debugging_breakpoint_list',
+ 'debugging_breakpoint_remove',
+ 'debugging_breakpoint_set',
+ 'debugging_breakpoint_update',
+ 'debugging_context_locals',
+ 'debugging_context_self',
+ 'debugging_context_vars',
+ 'debugging_detach',
+ 'debugging_enabled',
+ 'debugging_get_context',
+ 'debugging_get_stack',
+ 'debugging_run',
+ 'debugging_step_in',
+ 'debugging_step_out',
+ 'debugging_step_over',
+ 'debugging_stop',
+ 'debugging_terminate',
+ 'decimal_random',
+ 'decompress',
+ 'decrypt_blowfish',
+ 'define_atbegin',
+ 'define_atend',
+ 'dns_default',
+ 'dns_lookup',
+ 'document',
+ 'email_attachment_mime_type',
+ 'email_batch',
+ 'email_digestchallenge',
+ 'email_digestresponse',
+ 'email_extract',
+ 'email_findemails',
+ 'email_fix_address_list',
+ 'email_fix_address',
+ 'email_fs_error_clean',
+ 'email_immediate',
+ 'email_initialize',
+ 'email_merge',
+ 'email_mxlookup',
+ 'email_pop_priv_extract',
+ 'email_pop_priv_quote',
+ 'email_pop_priv_substring',
+ 'email_queue',
+ 'email_result',
+ 'email_safeemail',
+ 'email_send',
+ 'email_status',
+ 'email_token',
+ 'email_translatebreakstocrlf',
+ 'encode_qheader',
+ 'encoding_iso88591',
+ 'encoding_utf8',
+ 'encrypt_blowfish',
+ 'encrypt_crammd5',
+ 'encrypt_hmac',
+ 'encrypt_md5',
+ 'eol',
+ 'eq',
+ 'error_code_aborted',
+ 'error_code_dividebyzero',
+ 'error_code_filenotfound',
+ 'error_code_invalidparameter',
+ 'error_code_methodnotfound',
+ 'error_code_networkerror',
+ 'error_code_noerror',
+ 'error_code_resnotfound',
+ 'error_code_runtimeassertion',
+ 'error_code',
+ 'error_msg_aborted',
+ 'error_msg_dividebyzero',
+ 'error_msg_filenotfound',
+ 'error_msg_invalidparameter',
+ 'error_msg_methodnotfound',
+ 'error_msg_networkerror',
+ 'error_msg_noerror',
+ 'error_msg_resnotfound',
+ 'error_msg_runtimeassertion',
+ 'error_msg',
+ 'error_obj',
+ 'error_pop',
+ 'error_push',
+ 'error_reset',
+ 'error_stack',
+ 'escape_tag',
+ 'evdns_resolve_ipv4',
+ 'evdns_resolve_ipv6',
+ 'evdns_resolve_reverse_ipv6',
+ 'evdns_resolve_reverse',
+ 'ew',
+ 'fail_if',
+ 'fail_ifnot',
+ 'fail_now',
+ 'fail',
+ 'failure_clear',
+ 'fastcgi_createfcgirequest',
+ 'fastcgi_handlecon',
+ 'fastcgi_handlereq',
+ 'fastcgi_initialize',
+ 'fastcgi_initiate_request',
+ 'fcgi_abort_request',
+ 'fcgi_authorize',
+ 'fcgi_begin_request',
+ 'fcgi_bodychunksize',
+ 'fcgi_cant_mpx_conn',
+ 'fcgi_data',
+ 'fcgi_end_request',
+ 'fcgi_filter',
+ 'fcgi_get_values_result',
+ 'fcgi_get_values',
+ 'fcgi_keep_conn',
+ 'fcgi_makeendrequestbody',
+ 'fcgi_makestdoutbody',
+ 'fcgi_max_conns',
+ 'fcgi_max_reqs',
+ 'fcgi_mpxs_conns',
+ 'fcgi_null_request_id',
+ 'fcgi_overloaded',
+ 'fcgi_params',
+ 'fcgi_read_timeout_seconds',
+ 'fcgi_readparam',
+ 'fcgi_request_complete',
+ 'fcgi_responder',
+ 'fcgi_stderr',
+ 'fcgi_stdin',
+ 'fcgi_stdout',
+ 'fcgi_unknown_role',
+ 'fcgi_unknown_type',
+ 'fcgi_version_1',
+ 'fcgi_x_stdin',
+ 'field_name',
+ 'field_names',
+ 'field',
+ 'file_copybuffersize',
+ 'file_defaultencoding',
+ 'file_forceroot',
+ 'file_modechar',
+ 'file_modeline',
+ 'file_stderr',
+ 'file_stdin',
+ 'file_stdout',
+ 'file_tempfile',
+ 'filemakerds_initialize',
+ 'filemakerds',
+ 'found_count',
+ 'ft',
+ 'ftp_deletefile',
+ 'ftp_getdata',
+ 'ftp_getfile',
+ 'ftp_getlisting',
+ 'ftp_putdata',
+ 'ftp_putfile',
+ 'full',
+ 'generateforeach',
+ 'gt',
+ 'gte',
+ 'handle_failure',
+ 'handle',
+ 'hash_primes',
+ 'html_comment',
+ 'http_char_colon',
+ 'http_char_cr',
+ 'http_char_htab',
+ 'http_char_lf',
+ 'http_char_question',
+ 'http_char_space',
+ 'http_default_files',
+ 'http_read_headers',
+ 'http_read_timeout_secs',
+ 'http_server_apps_path',
+ 'http_server_request_logger',
+ 'if_empty',
+ 'if_false',
+ 'if_null',
+ 'if_true',
+ 'include_cache_compare',
+ 'include_currentpath',
+ 'include_filepath',
+ 'include_localpath',
+ 'include_once',
+ 'include_path',
+ 'include_raw',
+ 'include_url',
+ 'include',
+ 'includes',
+ 'inline_colinfo_name_pos',
+ 'inline_colinfo_type_pos',
+ 'inline_colinfo_valuelist_pos',
+ 'inline_columninfo_pos',
+ 'inline_foundcount_pos',
+ 'inline_namedget',
+ 'inline_namedput',
+ 'inline_resultrows_pos',
+ 'inline_scopeget',
+ 'inline_scopepop',
+ 'inline_scopepush',
+ 'inline',
+ 'integer_bitor',
+ 'integer_random',
+ 'io_dir_dt_blk',
+ 'io_dir_dt_chr',
+ 'io_dir_dt_dir',
+ 'io_dir_dt_fifo',
+ 'io_dir_dt_lnk',
+ 'io_dir_dt_reg',
+ 'io_dir_dt_sock',
+ 'io_dir_dt_unknown',
+ 'io_dir_dt_wht',
+ 'io_file_access',
+ 'io_file_chdir',
+ 'io_file_chmod',
+ 'io_file_chown',
+ 'io_file_dirname',
+ 'io_file_f_dupfd',
+ 'io_file_f_getfd',
+ 'io_file_f_getfl',
+ 'io_file_f_getlk',
+ 'io_file_f_rdlck',
+ 'io_file_f_setfd',
+ 'io_file_f_setfl',
+ 'io_file_f_setlk',
+ 'io_file_f_setlkw',
+ 'io_file_f_test',
+ 'io_file_f_tlock',
+ 'io_file_f_ulock',
+ 'io_file_f_unlck',
+ 'io_file_f_wrlck',
+ 'io_file_fd_cloexec',
+ 'io_file_fioasync',
+ 'io_file_fioclex',
+ 'io_file_fiodtype',
+ 'io_file_fiogetown',
+ 'io_file_fionbio',
+ 'io_file_fionclex',
+ 'io_file_fionread',
+ 'io_file_fiosetown',
+ 'io_file_getcwd',
+ 'io_file_lchown',
+ 'io_file_link',
+ 'io_file_lockf',
+ 'io_file_lstat_atime',
+ 'io_file_lstat_mode',
+ 'io_file_lstat_mtime',
+ 'io_file_lstat_size',
+ 'io_file_mkdir',
+ 'io_file_mkfifo',
+ 'io_file_mkstemp',
+ 'io_file_o_append',
+ 'io_file_o_async',
+ 'io_file_o_creat',
+ 'io_file_o_excl',
+ 'io_file_o_exlock',
+ 'io_file_o_fsync',
+ 'io_file_o_nofollow',
+ 'io_file_o_nonblock',
+ 'io_file_o_rdonly',
+ 'io_file_o_rdwr',
+ 'io_file_o_shlock',
+ 'io_file_o_sync',
+ 'io_file_o_trunc',
+ 'io_file_o_wronly',
+ 'io_file_pipe',
+ 'io_file_readlink',
+ 'io_file_realpath',
+ 'io_file_remove',
+ 'io_file_rename',
+ 'io_file_rmdir',
+ 'io_file_s_ifblk',
+ 'io_file_s_ifchr',
+ 'io_file_s_ifdir',
+ 'io_file_s_ififo',
+ 'io_file_s_iflnk',
+ 'io_file_s_ifmt',
+ 'io_file_s_ifreg',
+ 'io_file_s_ifsock',
+ 'io_file_s_irgrp',
+ 'io_file_s_iroth',
+ 'io_file_s_irusr',
+ 'io_file_s_irwxg',
+ 'io_file_s_irwxo',
+ 'io_file_s_irwxu',
+ 'io_file_s_isgid',
+ 'io_file_s_isuid',
+ 'io_file_s_isvtx',
+ 'io_file_s_iwgrp',
+ 'io_file_s_iwoth',
+ 'io_file_s_iwusr',
+ 'io_file_s_ixgrp',
+ 'io_file_s_ixoth',
+ 'io_file_s_ixusr',
+ 'io_file_seek_cur',
+ 'io_file_seek_end',
+ 'io_file_seek_set',
+ 'io_file_stat_atime',
+ 'io_file_stat_mode',
+ 'io_file_stat_mtime',
+ 'io_file_stat_size',
+ 'io_file_stderr',
+ 'io_file_stdin',
+ 'io_file_stdout',
+ 'io_file_symlink',
+ 'io_file_tempnam',
+ 'io_file_truncate',
+ 'io_file_umask',
+ 'io_file_unlink',
+ 'io_net_accept',
+ 'io_net_af_inet',
+ 'io_net_af_inet6',
+ 'io_net_af_unix',
+ 'io_net_bind',
+ 'io_net_connect',
+ 'io_net_getpeername',
+ 'io_net_getsockname',
+ 'io_net_ipproto_ip',
+ 'io_net_ipproto_udp',
+ 'io_net_listen',
+ 'io_net_msg_oob',
+ 'io_net_msg_peek',
+ 'io_net_msg_waitall',
+ 'io_net_recv',
+ 'io_net_recvfrom',
+ 'io_net_send',
+ 'io_net_sendto',
+ 'io_net_shut_rd',
+ 'io_net_shut_rdwr',
+ 'io_net_shut_wr',
+ 'io_net_shutdown',
+ 'io_net_so_acceptconn',
+ 'io_net_so_broadcast',
+ 'io_net_so_debug',
+ 'io_net_so_dontroute',
+ 'io_net_so_error',
+ 'io_net_so_keepalive',
+ 'io_net_so_linger',
+ 'io_net_so_oobinline',
+ 'io_net_so_rcvbuf',
+ 'io_net_so_rcvlowat',
+ 'io_net_so_rcvtimeo',
+ 'io_net_so_reuseaddr',
+ 'io_net_so_sndbuf',
+ 'io_net_so_sndlowat',
+ 'io_net_so_sndtimeo',
+ 'io_net_so_timestamp',
+ 'io_net_so_type',
+ 'io_net_so_useloopback',
+ 'io_net_sock_dgram',
+ 'io_net_sock_raw',
+ 'io_net_sock_rdm',
+ 'io_net_sock_seqpacket',
+ 'io_net_sock_stream',
+ 'io_net_socket',
+ 'io_net_sol_socket',
+ 'io_net_ssl_accept',
+ 'io_net_ssl_begin',
+ 'io_net_ssl_connect',
+ 'io_net_ssl_end',
+ 'io_net_ssl_error',
+ 'io_net_ssl_errorstring',
+ 'io_net_ssl_funcerrorstring',
+ 'io_net_ssl_liberrorstring',
+ 'io_net_ssl_read',
+ 'io_net_ssl_reasonerrorstring',
+ 'io_net_ssl_setacceptstate',
+ 'io_net_ssl_setconnectstate',
+ 'io_net_ssl_setverifylocations',
+ 'io_net_ssl_shutdown',
+ 'io_net_ssl_usecertificatechainfile',
+ 'io_net_ssl_useprivatekeyfile',
+ 'io_net_ssl_write',
+ 'java_jvm_create',
+ 'java_jvm_getenv',
+ 'jdbc_initialize',
+ 'json_back_slash',
+ 'json_back_space',
+ 'json_close_array',
+ 'json_close_object',
+ 'json_colon',
+ 'json_comma',
+ 'json_consume_array',
+ 'json_consume_object',
+ 'json_consume_string',
+ 'json_consume_token',
+ 'json_cr',
+ 'json_debug',
+ 'json_deserialize',
+ 'json_e_lower',
+ 'json_e_upper',
+ 'json_f_lower',
+ 'json_form_feed',
+ 'json_forward_slash',
+ 'json_lf',
+ 'json_n_lower',
+ 'json_negative',
+ 'json_open_array',
+ 'json_open_object',
+ 'json_period',
'json_positive',
- 'json_quote_double',
- 'json_rpccall',
- 'json_serialize',
- 'json_t_lower',
- 'json_tab',
- 'json_white_space',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_currentaction',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_methodexists',
- 'lasso_tagexists',
- 'lasso_uniqueid',
- 'lasso_version',
- 'lassoapp_current_app',
- 'lassoapp_current_include',
- 'lassoapp_do_with_include',
- 'lassoapp_exists',
- 'lassoapp_find_missing_file',
- 'lassoapp_format_mod_date',
- 'lassoapp_get_capabilities_name',
- 'lassoapp_include_current',
- 'lassoapp_include',
- 'lassoapp_initialize_db',
- 'lassoapp_initialize',
- 'lassoapp_invoke_resource',
- 'lassoapp_issourcefileextension',
- 'lassoapp_link',
- 'lassoapp_load_module',
- 'lassoapp_mime_get',
- 'lassoapp_mime_type_appcache',
- 'lassoapp_mime_type_css',
- 'lassoapp_mime_type_csv',
- 'lassoapp_mime_type_doc',
- 'lassoapp_mime_type_docx',
- 'lassoapp_mime_type_eof',
- 'lassoapp_mime_type_eot',
- 'lassoapp_mime_type_gif',
- 'lassoapp_mime_type_html',
- 'lassoapp_mime_type_ico',
- 'lassoapp_mime_type_jpg',
- 'lassoapp_mime_type_js',
- 'lassoapp_mime_type_lasso',
- 'lassoapp_mime_type_map',
- 'lassoapp_mime_type_pdf',
- 'lassoapp_mime_type_png',
- 'lassoapp_mime_type_ppt',
- 'lassoapp_mime_type_rss',
- 'lassoapp_mime_type_svg',
- 'lassoapp_mime_type_swf',
- 'lassoapp_mime_type_tif',
- 'lassoapp_mime_type_ttf',
- 'lassoapp_mime_type_txt',
- 'lassoapp_mime_type_woff',
- 'lassoapp_mime_type_xaml',
- 'lassoapp_mime_type_xap',
- 'lassoapp_mime_type_xbap',
- 'lassoapp_mime_type_xhr',
- 'lassoapp_mime_type_xml',
- 'lassoapp_mime_type_zip',
- 'lassoapp_path_to_method_name',
- 'lassoapp_settingsdb',
- 'layout_name',
- 'lcapi_datasourceadd',
- 'lcapi_datasourcecloseconnection',
- 'lcapi_datasourcedelete',
- 'lcapi_datasourceduplicate',
- 'lcapi_datasourceexecsql',
- 'lcapi_datasourcefindall',
- 'lcapi_datasourceimage',
- 'lcapi_datasourceinfo',
- 'lcapi_datasourceinit',
- 'lcapi_datasourcematchesname',
- 'lcapi_datasourcenames',
- 'lcapi_datasourcenothing',
- 'lcapi_datasourceopand',
- 'lcapi_datasourceopany',
- 'lcapi_datasourceopbw',
- 'lcapi_datasourceopct',
- 'lcapi_datasourceopeq',
- 'lcapi_datasourceopew',
- 'lcapi_datasourceopft',
- 'lcapi_datasourceopgt',
- 'lcapi_datasourceopgteq',
- 'lcapi_datasourceopin',
- 'lcapi_datasourceoplt',
- 'lcapi_datasourceoplteq',
- 'lcapi_datasourceopnbw',
- 'lcapi_datasourceopnct',
- 'lcapi_datasourceopneq',
- 'lcapi_datasourceopnew',
- 'lcapi_datasourceopnin',
- 'lcapi_datasourceopno',
- 'lcapi_datasourceopnot',
- 'lcapi_datasourceopnrx',
- 'lcapi_datasourceopor',
- 'lcapi_datasourceoprx',
- 'lcapi_datasourcepreparesql',
- 'lcapi_datasourceprotectionnone',
- 'lcapi_datasourceprotectionreadonly',
- 'lcapi_datasourcerandom',
- 'lcapi_datasourceschemanames',
- 'lcapi_datasourcescripts',
- 'lcapi_datasourcesearch',
- 'lcapi_datasourcesortascending',
- 'lcapi_datasourcesortcustom',
- 'lcapi_datasourcesortdescending',
- 'lcapi_datasourcetablenames',
- 'lcapi_datasourceterm',
- 'lcapi_datasourcetickle',
- 'lcapi_datasourcetypeblob',
- 'lcapi_datasourcetypeboolean',
- 'lcapi_datasourcetypedate',
- 'lcapi_datasourcetypedecimal',
- 'lcapi_datasourcetypeinteger',
- 'lcapi_datasourcetypestring',
- 'lcapi_datasourceunpreparesql',
- 'lcapi_datasourceupdate',
- 'lcapi_fourchartointeger',
- 'lcapi_listdatasources',
- 'lcapi_loadmodule',
- 'lcapi_loadmodules',
- 'lcapi_updatedatasourceslist',
- 'ldap_scope_base',
+ 'json_quote_double',
+ 'json_rpccall',
+ 'json_serialize',
+ 'json_t_lower',
+ 'json_tab',
+ 'json_white_space',
+ 'keycolumn_name',
+ 'keycolumn_value',
+ 'keyfield_name',
+ 'keyfield_value',
+ 'lasso_currentaction',
+ 'lasso_errorreporting',
+ 'lasso_executiontimelimit',
+ 'lasso_methodexists',
+ 'lasso_tagexists',
+ 'lasso_uniqueid',
+ 'lasso_version',
+ 'lassoapp_current_app',
+ 'lassoapp_current_include',
+ 'lassoapp_do_with_include',
+ 'lassoapp_exists',
+ 'lassoapp_find_missing_file',
+ 'lassoapp_format_mod_date',
+ 'lassoapp_get_capabilities_name',
+ 'lassoapp_include_current',
+ 'lassoapp_include',
+ 'lassoapp_initialize_db',
+ 'lassoapp_initialize',
+ 'lassoapp_invoke_resource',
+ 'lassoapp_issourcefileextension',
+ 'lassoapp_link',
+ 'lassoapp_load_module',
+ 'lassoapp_mime_get',
+ 'lassoapp_mime_type_appcache',
+ 'lassoapp_mime_type_css',
+ 'lassoapp_mime_type_csv',
+ 'lassoapp_mime_type_doc',
+ 'lassoapp_mime_type_docx',
+ 'lassoapp_mime_type_eof',
+ 'lassoapp_mime_type_eot',
+ 'lassoapp_mime_type_gif',
+ 'lassoapp_mime_type_html',
+ 'lassoapp_mime_type_ico',
+ 'lassoapp_mime_type_jpg',
+ 'lassoapp_mime_type_js',
+ 'lassoapp_mime_type_lasso',
+ 'lassoapp_mime_type_map',
+ 'lassoapp_mime_type_pdf',
+ 'lassoapp_mime_type_png',
+ 'lassoapp_mime_type_ppt',
+ 'lassoapp_mime_type_rss',
+ 'lassoapp_mime_type_svg',
+ 'lassoapp_mime_type_swf',
+ 'lassoapp_mime_type_tif',
+ 'lassoapp_mime_type_ttf',
+ 'lassoapp_mime_type_txt',
+ 'lassoapp_mime_type_woff',
+ 'lassoapp_mime_type_xaml',
+ 'lassoapp_mime_type_xap',
+ 'lassoapp_mime_type_xbap',
+ 'lassoapp_mime_type_xhr',
+ 'lassoapp_mime_type_xml',
+ 'lassoapp_mime_type_zip',
+ 'lassoapp_path_to_method_name',
+ 'lassoapp_settingsdb',
+ 'layout_name',
+ 'lcapi_datasourceadd',
+ 'lcapi_datasourcecloseconnection',
+ 'lcapi_datasourcedelete',
+ 'lcapi_datasourceduplicate',
+ 'lcapi_datasourceexecsql',
+ 'lcapi_datasourcefindall',
+ 'lcapi_datasourceimage',
+ 'lcapi_datasourceinfo',
+ 'lcapi_datasourceinit',
+ 'lcapi_datasourcematchesname',
+ 'lcapi_datasourcenames',
+ 'lcapi_datasourcenothing',
+ 'lcapi_datasourceopand',
+ 'lcapi_datasourceopany',
+ 'lcapi_datasourceopbw',
+ 'lcapi_datasourceopct',
+ 'lcapi_datasourceopeq',
+ 'lcapi_datasourceopew',
+ 'lcapi_datasourceopft',
+ 'lcapi_datasourceopgt',
+ 'lcapi_datasourceopgteq',
+ 'lcapi_datasourceopin',
+ 'lcapi_datasourceoplt',
+ 'lcapi_datasourceoplteq',
+ 'lcapi_datasourceopnbw',
+ 'lcapi_datasourceopnct',
+ 'lcapi_datasourceopneq',
+ 'lcapi_datasourceopnew',
+ 'lcapi_datasourceopnin',
+ 'lcapi_datasourceopno',
+ 'lcapi_datasourceopnot',
+ 'lcapi_datasourceopnrx',
+ 'lcapi_datasourceopor',
+ 'lcapi_datasourceoprx',
+ 'lcapi_datasourcepreparesql',
+ 'lcapi_datasourceprotectionnone',
+ 'lcapi_datasourceprotectionreadonly',
+ 'lcapi_datasourcerandom',
+ 'lcapi_datasourceschemanames',
+ 'lcapi_datasourcescripts',
+ 'lcapi_datasourcesearch',
+ 'lcapi_datasourcesortascending',
+ 'lcapi_datasourcesortcustom',
+ 'lcapi_datasourcesortdescending',
+ 'lcapi_datasourcetablenames',
+ 'lcapi_datasourceterm',
+ 'lcapi_datasourcetickle',
+ 'lcapi_datasourcetypeblob',
+ 'lcapi_datasourcetypeboolean',
+ 'lcapi_datasourcetypedate',
+ 'lcapi_datasourcetypedecimal',
+ 'lcapi_datasourcetypeinteger',
+ 'lcapi_datasourcetypestring',
+ 'lcapi_datasourceunpreparesql',
+ 'lcapi_datasourceupdate',
+ 'lcapi_fourchartointeger',
+ 'lcapi_listdatasources',
+ 'lcapi_loadmodule',
+ 'lcapi_loadmodules',
+ 'lcapi_updatedatasourceslist',
+ 'ldap_scope_base',
'ldap_scope_children',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'library_once',
- 'library',
- 'ljapi_initialize',
- 'locale_availablelocales',
- 'locale_canada',
- 'locale_canadafrench',
- 'locale_china',
- 'locale_chinese',
- 'locale_default',
- 'locale_english',
- 'locale_format_style_date_time',
- 'locale_format_style_default',
- 'locale_format_style_full',
- 'locale_format_style_long',
- 'locale_format_style_medium',
- 'locale_format_style_none',
- 'locale_format_style_short',
- 'locale_format',
- 'locale_france',
- 'locale_french',
- 'locale_german',
- 'locale_germany',
- 'locale_isocountries',
- 'locale_isolanguages',
- 'locale_italian',
- 'locale_italy',
- 'locale_japan',
- 'locale_japanese',
- 'locale_korea',
- 'locale_korean',
- 'locale_prc',
- 'locale_setdefault',
- 'locale_simplifiedchinese',
- 'locale_taiwan',
- 'locale_traditionalchinese',
- 'locale_uk',
- 'locale_us',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_initialize',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_max_file_size',
- 'log_setdestination',
- 'log_sql',
- 'log_trim_file_size',
- 'log_warning',
- 'log',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'loop_key_pop',
- 'loop_key_push',
- 'loop_key',
- 'loop_pop',
- 'loop_push',
- 'loop_value_pop',
- 'loop_value_push',
- 'loop_value',
- 'loop',
- 'lt',
- 'lte',
- 'main_thread_only',
- 'max',
- 'maxrecords_value',
- 'median',
- 'method_name',
- 'micros',
- 'millis',
- 'min',
- 'minimal',
- 'mongo_insert_continue_on_error',
- 'mongo_insert_no_validate',
- 'mongo_insert_none',
- 'mongo_query_await_data',
- 'mongo_query_exhaust',
- 'mongo_query_no_cursor_timeout',
- 'mongo_query_none',
- 'mongo_query_oplog_replay',
- 'mongo_query_partial',
- 'mongo_query_slave_ok',
- 'mongo_query_tailable_cursor',
- 'mongo_remove_none',
- 'mongo_remove_single_remove',
- 'mongo_update_multi_update',
- 'mongo_update_no_validate',
- 'mongo_update_none',
- 'mongo_update_upsert',
- 'mustache_compile_file',
- 'mustache_compile_string',
- 'mustache_include',
- 'mysqlds',
- 'namespace_global',
- 'namespace_import',
- 'namespace_using',
- 'nbw',
- 'ncn',
- 'neq',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'new',
- 'none',
- 'nrx',
- 'nslookup',
- 'odbc_session_driver_mssql',
- 'odbc',
- 'output_none',
- 'output',
- 'pdf_package',
- 'pdf_rectangle',
- 'pdf_serve',
- 'pi',
- 'portal',
- 'postgresql',
- 'process',
- 'protect_now',
- 'protect',
- 'queriable_average',
- 'queriable_defaultcompare',
- 'queriable_do',
- 'queriable_internal_combinebindings',
- 'queriable_max',
- 'queriable_min',
- 'queriable_qsort',
- 'queriable_reversecompare',
- 'queriable_sum',
- 'random_seed',
- 'range',
- 'records_array',
- 'records_map',
- 'records',
- 'redirect_url',
- 'referer_url',
- 'referrer_url',
- 'register_thread',
- 'register',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'response_root',
- 'resultset_count',
- 'resultset',
- 'resultsets',
- 'rows_array',
- 'rows_impl',
- 'rows',
- 'rx',
- 'schema_name',
- 'security_database',
- 'security_default_realm',
- 'security_initialize',
- 'security_table_groups',
- 'security_table_ug_map',
- 'security_table_users',
- 'selected',
- 'series',
- 'server_admin',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_protocol',
- 'server_push',
- 'server_signature',
- 'server_software',
- 'session_abort',
- 'session_addvar',
- 'session_decorate',
- 'session_deleteexpired',
- 'session_end',
- 'session_getdefaultdriver',
- 'session_id',
- 'session_initialize',
- 'session_removevar',
- 'session_result',
- 'session_setdefaultdriver',
- 'session_start',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_id',
- 'site_name',
- 'skiprecords_value',
- 'sleep',
- 'split_thread',
- 'sqlite_abort',
- 'sqlite_auth',
- 'sqlite_blob',
- 'sqlite_busy',
- 'sqlite_cantopen',
- 'sqlite_constraint',
- 'sqlite_corrupt',
- 'sqlite_createdb',
- 'sqlite_done',
- 'sqlite_empty',
- 'sqlite_error',
- 'sqlite_float',
- 'sqlite_format',
- 'sqlite_full',
- 'sqlite_integer',
- 'sqlite_internal',
- 'sqlite_interrupt',
- 'sqlite_ioerr',
- 'sqlite_locked',
- 'sqlite_mismatch',
- 'sqlite_misuse',
- 'sqlite_nolfs',
- 'sqlite_nomem',
- 'sqlite_notadb',
- 'sqlite_notfound',
- 'sqlite_null',
- 'sqlite_ok',
- 'sqlite_perm',
- 'sqlite_protocol',
- 'sqlite_range',
- 'sqlite_readonly',
- 'sqlite_row',
- 'sqlite_schema',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'sqlite_text',
- 'sqlite_toobig',
- 'sqliteconnector',
- 'staticarray_join',
- 'stdout',
- 'stdoutnl',
- 'string_validcharset',
- 'suspend',
- 'sys_appspath',
- 'sys_chroot',
- 'sys_clock',
- 'sys_clockspersec',
- 'sys_credits',
- 'sys_databasespath',
- 'sys_detach_exec',
- 'sys_difftime',
- 'sys_dll_ext',
- 'sys_drand48',
- 'sys_environ',
- 'sys_eol',
- 'sys_erand48',
- 'sys_errno',
- 'sys_exec_pid_to_os_pid',
- 'sys_exec',
- 'sys_exit',
- 'sys_fork',
- 'sys_garbagecollect',
- 'sys_getbytessincegc',
- 'sys_getchar',
- 'sys_getegid',
- 'sys_getenv',
- 'sys_geteuid',
- 'sys_getgid',
- 'sys_getgrnam',
- 'sys_getheapfreebytes',
- 'sys_getheapsize',
- 'sys_getlogin',
- 'sys_getpid',
- 'sys_getppid',
- 'sys_getpwnam',
- 'sys_getpwuid',
- 'sys_getstartclock',
- 'sys_getthreadcount',
- 'sys_getuid',
- 'sys_growheapby',
- 'sys_homepath',
- 'sys_is_full_path',
- 'sys_is_windows',
- 'sys_isfullpath',
- 'sys_iswindows',
- 'sys_iterate',
- 'sys_jrand48',
- 'sys_kill_exec',
- 'sys_kill',
- 'sys_lcong48',
- 'sys_librariespath',
- 'sys_listtraits',
- 'sys_listtypes',
- 'sys_listunboundmethods',
- 'sys_loadlibrary',
- 'sys_lrand48',
- 'sys_masterhomepath',
- 'sys_mrand48',
- 'sys_nrand48',
- 'sys_pid_exec',
- 'sys_pointersize',
- 'sys_rand',
- 'sys_random',
- 'sys_seed48',
- 'sys_setenv',
- 'sys_setgid',
- 'sys_setsid',
- 'sys_setuid',
- 'sys_sigabrt',
- 'sys_sigalrm',
- 'sys_sigbus',
- 'sys_sigchld',
- 'sys_sigcont',
- 'sys_sigfpe',
- 'sys_sighup',
- 'sys_sigill',
- 'sys_sigint',
- 'sys_sigkill',
- 'sys_sigpipe',
- 'sys_sigprof',
- 'sys_sigquit',
- 'sys_sigsegv',
- 'sys_sigstop',
- 'sys_sigsys',
- 'sys_sigterm',
- 'sys_sigtrap',
- 'sys_sigtstp',
- 'sys_sigttin',
- 'sys_sigttou',
- 'sys_sigurg',
- 'sys_sigusr1',
- 'sys_sigusr2',
- 'sys_sigvtalrm',
- 'sys_sigxcpu',
- 'sys_sigxfsz',
- 'sys_srand',
- 'sys_srand48',
- 'sys_srandom',
- 'sys_strerror',
- 'sys_supportpath',
- 'sys_test_exec',
- 'sys_time',
- 'sys_uname',
- 'sys_unsetenv',
- 'sys_usercapimodulepath',
- 'sys_userstartuppath',
- 'sys_version',
- 'sys_wait_exec',
- 'sys_waitpid',
- 'sys_wcontinued',
- 'sys_while',
- 'sys_wnohang',
- 'sys_wuntraced',
- 'table_name',
- 'tag_exists',
- 'tag_name',
- 'thread_var_get',
- 'thread_var_pop',
- 'thread_var_push',
- 'threadvar_find',
- 'threadvar_get',
- 'threadvar_set_asrt',
- 'threadvar_set',
- 'timer',
- 'token_value',
- 'treemap',
- 'u_lb_alphabetic',
- 'u_lb_ambiguous',
- 'u_lb_break_after',
- 'u_lb_break_before',
- 'u_lb_break_both',
- 'u_lb_break_symbols',
- 'u_lb_carriage_return',
- 'u_lb_close_punctuation',
- 'u_lb_combining_mark',
- 'u_lb_complex_context',
- 'u_lb_contingent_break',
- 'u_lb_exclamation',
- 'u_lb_glue',
- 'u_lb_h2',
- 'u_lb_h3',
- 'u_lb_hyphen',
- 'u_lb_ideographic',
- 'u_lb_infix_numeric',
- 'u_lb_inseparable',
- 'u_lb_jl',
- 'u_lb_jt',
- 'u_lb_jv',
- 'u_lb_line_feed',
- 'u_lb_mandatory_break',
- 'u_lb_next_line',
- 'u_lb_nonstarter',
- 'u_lb_numeric',
- 'u_lb_open_punctuation',
- 'u_lb_postfix_numeric',
- 'u_lb_prefix_numeric',
- 'u_lb_quotation',
- 'u_lb_space',
- 'u_lb_surrogate',
- 'u_lb_unknown',
- 'u_lb_word_joiner',
- 'u_lb_zwspace',
- 'u_nt_decimal',
- 'u_nt_digit',
- 'u_nt_none',
- 'u_nt_numeric',
- 'u_sb_aterm',
- 'u_sb_close',
- 'u_sb_format',
- 'u_sb_lower',
- 'u_sb_numeric',
- 'u_sb_oletter',
- 'u_sb_other',
- 'u_sb_sep',
- 'u_sb_sp',
- 'u_sb_sterm',
- 'u_sb_upper',
- 'u_wb_aletter',
- 'u_wb_extendnumlet',
- 'u_wb_format',
- 'u_wb_katakana',
- 'u_wb_midletter',
- 'u_wb_midnum',
- 'u_wb_numeric',
- 'u_wb_other',
- 'ucal_ampm',
- 'ucal_dayofmonth',
- 'ucal_dayofweek',
- 'ucal_dayofweekinmonth',
- 'ucal_dayofyear',
- 'ucal_daysinfirstweek',
- 'ucal_dowlocal',
- 'ucal_dstoffset',
- 'ucal_era',
- 'ucal_extendedyear',
- 'ucal_firstdayofweek',
- 'ucal_hour',
- 'ucal_hourofday',
- 'ucal_julianday',
- 'ucal_lenient',
- 'ucal_listtimezones',
- 'ucal_millisecond',
- 'ucal_millisecondsinday',
- 'ucal_minute',
- 'ucal_month',
- 'ucal_second',
- 'ucal_weekofmonth',
- 'ucal_weekofyear',
- 'ucal_year',
- 'ucal_yearwoy',
- 'ucal_zoneoffset',
- 'uchar_age',
- 'uchar_alphabetic',
- 'uchar_ascii_hex_digit',
- 'uchar_bidi_class',
- 'uchar_bidi_control',
- 'uchar_bidi_mirrored',
- 'uchar_bidi_mirroring_glyph',
- 'uchar_block',
- 'uchar_canonical_combining_class',
- 'uchar_case_folding',
- 'uchar_case_sensitive',
- 'uchar_dash',
- 'uchar_decomposition_type',
- 'uchar_default_ignorable_code_point',
- 'uchar_deprecated',
- 'uchar_diacritic',
- 'uchar_east_asian_width',
- 'uchar_extender',
- 'uchar_full_composition_exclusion',
- 'uchar_general_category_mask',
- 'uchar_general_category',
- 'uchar_grapheme_base',
- 'uchar_grapheme_cluster_break',
- 'uchar_grapheme_extend',
- 'uchar_grapheme_link',
- 'uchar_hangul_syllable_type',
- 'uchar_hex_digit',
- 'uchar_hyphen',
- 'uchar_id_continue',
- 'uchar_ideographic',
- 'uchar_ids_binary_operator',
- 'uchar_ids_trinary_operator',
- 'uchar_iso_comment',
- 'uchar_join_control',
- 'uchar_joining_group',
- 'uchar_joining_type',
- 'uchar_lead_canonical_combining_class',
- 'uchar_line_break',
- 'uchar_logical_order_exception',
- 'uchar_lowercase_mapping',
- 'uchar_lowercase',
- 'uchar_math',
- 'uchar_name',
- 'uchar_nfc_inert',
- 'uchar_nfc_quick_check',
- 'uchar_nfd_inert',
- 'uchar_nfd_quick_check',
- 'uchar_nfkc_inert',
- 'uchar_nfkc_quick_check',
- 'uchar_nfkd_inert',
- 'uchar_nfkd_quick_check',
- 'uchar_noncharacter_code_point',
- 'uchar_numeric_type',
- 'uchar_numeric_value',
- 'uchar_pattern_syntax',
- 'uchar_pattern_white_space',
- 'uchar_posix_alnum',
- 'uchar_posix_blank',
- 'uchar_posix_graph',
- 'uchar_posix_print',
- 'uchar_posix_xdigit',
- 'uchar_quotation_mark',
- 'uchar_radical',
- 'uchar_s_term',
- 'uchar_script',
- 'uchar_segment_starter',
- 'uchar_sentence_break',
- 'uchar_simple_case_folding',
- 'uchar_simple_lowercase_mapping',
- 'uchar_simple_titlecase_mapping',
- 'uchar_simple_uppercase_mapping',
- 'uchar_soft_dotted',
- 'uchar_terminal_punctuation',
- 'uchar_titlecase_mapping',
- 'uchar_trail_canonical_combining_class',
- 'uchar_unicode_1_name',
- 'uchar_unified_ideograph',
- 'uchar_uppercase_mapping',
- 'uchar_uppercase',
- 'uchar_variation_selector',
- 'uchar_white_space',
- 'uchar_word_break',
- 'uchar_xid_continue',
- 'uncompress',
- 'usage',
- 'uuid_compare',
- 'uuid_copy',
- 'uuid_generate_random',
- 'uuid_generate_time',
- 'uuid_generate',
- 'uuid_is_null',
- 'uuid_parse',
- 'uuid_unparse_lower',
- 'uuid_unparse_upper',
- 'uuid_unparse',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var_keys',
- 'var_values',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'web_handlefcgirequest',
- 'web_node_content_representation_css',
- 'web_node_content_representation_html',
- 'web_node_content_representation_js',
- 'web_node_content_representation_xhr',
- 'web_node_forpath',
- 'web_nodes_initialize',
- 'web_nodes_normalizeextension',
- 'web_nodes_processcontentnode',
- 'web_nodes_requesthandler',
- 'web_response_nodesentry',
- 'web_router_database',
- 'web_router_initialize',
- 'websocket_handler_timeout',
- 'wexitstatus',
- 'wifcontinued',
- 'wifexited',
- 'wifsignaled',
- 'wifstopped',
- 'wstopsig',
- 'wtermsig',
- 'xml_transform',
- 'xml',
- 'zip_add_dir',
- 'zip_add',
- 'zip_checkcons',
- 'zip_close',
- 'zip_cm_bzip2',
- 'zip_cm_default',
- 'zip_cm_deflate',
- 'zip_cm_deflate64',
- 'zip_cm_implode',
- 'zip_cm_pkware_implode',
- 'zip_cm_reduce_1',
- 'zip_cm_reduce_2',
- 'zip_cm_reduce_3',
- 'zip_cm_reduce_4',
- 'zip_cm_shrink',
- 'zip_cm_store',
- 'zip_create',
- 'zip_delete',
- 'zip_em_3des_112',
- 'zip_em_3des_168',
- 'zip_em_aes_128',
- 'zip_em_aes_192',
- 'zip_em_aes_256',
- 'zip_em_des',
- 'zip_em_none',
- 'zip_em_rc2_old',
- 'zip_em_rc2',
- 'zip_em_rc4',
- 'zip_em_trad_pkware',
- 'zip_em_unknown',
- 'zip_er_changed',
- 'zip_er_close',
- 'zip_er_compnotsupp',
- 'zip_er_crc',
- 'zip_er_deleted',
- 'zip_er_eof',
- 'zip_er_exists',
- 'zip_er_incons',
- 'zip_er_internal',
- 'zip_er_inval',
- 'zip_er_memory',
- 'zip_er_multidisk',
- 'zip_er_noent',
- 'zip_er_nozip',
- 'zip_er_ok',
- 'zip_er_open',
- 'zip_er_read',
- 'zip_er_remove',
- 'zip_er_rename',
- 'zip_er_seek',
- 'zip_er_tmpopen',
- 'zip_er_write',
- 'zip_er_zipclosed',
- 'zip_er_zlib',
- 'zip_error_get_sys_type',
- 'zip_error_get',
- 'zip_error_to_str',
- 'zip_et_none',
- 'zip_et_sys',
- 'zip_et_zlib',
- 'zip_excl',
- 'zip_fclose',
- 'zip_file_error_get',
- 'zip_file_strerror',
- 'zip_fl_compressed',
- 'zip_fl_nocase',
- 'zip_fl_nodir',
- 'zip_fl_unchanged',
- 'zip_fopen_index',
- 'zip_fopen',
- 'zip_fread',
- 'zip_get_archive_comment',
- 'zip_get_file_comment',
- 'zip_get_name',
- 'zip_get_num_files',
- 'zip_name_locate',
- 'zip_open',
- 'zip_rename',
- 'zip_replace',
- 'zip_set_archive_comment',
- 'zip_set_file_comment',
- 'zip_stat_index',
- 'zip_stat',
- 'zip_strerror',
- 'zip_unchange_all',
- 'zip_unchange_archive',
- 'zip_unchange',
- 'zlib_version',
- ),
- 'Lasso 8 Tags': (
- '__char',
- '__sync_timestamp__',
- '_admin_addgroup',
- '_admin_adduser',
- '_admin_defaultconnector',
- '_admin_defaultconnectornames',
- '_admin_defaultdatabase',
- '_admin_defaultfield',
- '_admin_defaultgroup',
- '_admin_defaulthost',
- '_admin_defaulttable',
- '_admin_defaultuser',
- '_admin_deleteconnector',
- '_admin_deletedatabase',
- '_admin_deletefield',
- '_admin_deletegroup',
- '_admin_deletehost',
- '_admin_deletetable',
- '_admin_deleteuser',
- '_admin_duplicategroup',
- '_admin_internaldatabase',
- '_admin_listconnectors',
- '_admin_listdatabases',
- '_admin_listfields',
- '_admin_listgroups',
- '_admin_listhosts',
- '_admin_listtables',
- '_admin_listusers',
- '_admin_refreshconnector',
- '_admin_refreshsecurity',
- '_admin_servicepath',
- '_admin_updateconnector',
- '_admin_updatedatabase',
- '_admin_updatefield',
- '_admin_updategroup',
- '_admin_updatehost',
- '_admin_updatetable',
- '_admin_updateuser',
- '_chartfx_activation_string',
- '_chartfx_getchallengestring',
- '_chop_args',
- '_chop_mimes',
- '_client_addr_old',
- '_client_address_old',
- '_client_ip_old',
- '_database_names',
- '_datasource_reload',
- '_date_current',
- '_date_format',
- '_date_msec',
- '_date_parse',
- '_execution_timelimit',
- '_file_chmod',
- '_initialize',
- '_jdbc_acceptsurl',
- '_jdbc_debug',
- '_jdbc_deletehost',
- '_jdbc_driverclasses',
- '_jdbc_driverinfo',
- '_jdbc_metainfo',
- '_jdbc_propertyinfo',
- '_jdbc_setdriver',
- '_lasso_param',
- '_log_helper',
- '_proc_noparam',
- '_proc_withparam',
- '_recursion_limit',
- '_request_param',
- '_security_binaryexpiration',
- '_security_flushcaches',
- '_security_isserialized',
- '_security_serialexpiration',
- '_srand',
- '_strict_literals',
- '_substring',
- '_xmlrpc_exconverter',
- '_xmlrpc_inconverter',
- '_xmlrpc_xmlinconverter',
- 'abort',
- 'action_addinfo',
- 'action_addrecord',
- 'action_param',
- 'action_params',
- 'action_setfoundcount',
- 'action_setrecordid',
- 'action_settotalcount',
- 'action_statement',
- 'admin_allowedfileroots',
- 'admin_changeuser',
- 'admin_createuser',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_groupassignuser',
- 'admin_grouplistusers',
- 'admin_groupremoveuser',
- 'admin_lassoservicepath',
- 'admin_listgroups',
- 'admin_refreshlicensing',
- 'admin_refreshsecurity',
- 'admin_reloaddatasource',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'admin_userlistgroups',
- 'all',
- 'and',
- 'array',
- 'array_iterator',
- 'auth',
- 'auth_admin',
- 'auth_auth',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'base64',
- 'bean',
- 'bigint',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'boolean',
- 'bw',
- 'bytes',
- 'cache',
- 'cache_delete',
- 'cache_empty',
- 'cache_exists',
- 'cache_fetch',
- 'cache_internal',
- 'cache_maintenance',
- 'cache_object',
- 'cache_preferences',
- 'cache_store',
- 'case',
- 'chartfx',
- 'chartfx_records',
- 'chartfx_serve',
- 'checked',
- 'choice_list',
- 'choice_listitem',
- 'choicelistitem',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'click_text',
- 'client_addr',
- 'client_address',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparams',
- 'client_headers',
- 'client_ip',
- 'client_ipfrominteger',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column',
- 'column_name',
- 'column_names',
- 'compare_beginswith',
- 'compare_contains',
- 'compare_endswith',
- 'compare_equalto',
- 'compare_greaterthan',
- 'compare_greaterthanorequals',
- 'compare_greaterthanorequls',
- 'compare_lessthan',
- 'compare_lessthanorequals',
- 'compare_notbeginswith',
- 'compare_notcontains',
- 'compare_notendswith',
- 'compare_notequalto',
- 'compare_notregexp',
- 'compare_regexp',
- 'compare_strictequalto',
- 'compare_strictnotequalto',
- 'compiler_removecacheddoc',
- 'compiler_setdefaultparserflags',
- 'compress',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_type',
- 'cookie',
- 'cookie_set',
- 'curl_ftp_getfile',
- 'curl_ftp_getlisting',
- 'curl_ftp_putfile',
- 'curl_include_url',
- 'currency',
- 'database_changecolumn',
- 'database_changefield',
- 'database_createcolumn',
- 'database_createfield',
- 'database_createtable',
- 'database_fmcontainer',
- 'database_hostinfo',
- 'database_inline',
- 'database_name',
- 'database_nameitem',
- 'database_names',
- 'database_realname',
- 'database_removecolumn',
- 'database_removefield',
- 'database_removetable',
- 'database_repeating',
- 'database_repeating_valueitem',
- 'database_repeatingvalueitem',
- 'database_schemanameitem',
- 'database_schemanames',
- 'database_tablecolumn',
- 'database_tablenameitem',
- 'database_tablenames',
- 'datasource_name',
- 'datasource_register',
- 'date',
- 'date__date_current',
- 'date__date_format',
- 'date__date_msec',
- 'date__date_parse',
- 'date_add',
- 'date_date',
- 'date_difference',
- 'date_duration',
- 'date_format',
- 'date_getcurrentdate',
- 'date_getday',
- 'date_getdayofweek',
- 'date_gethour',
- 'date_getlocaltimezone',
- 'date_getminute',
- 'date_getmonth',
- 'date_getsecond',
- 'date_gettime',
- 'date_getyear',
- 'date_gmttolocal',
- 'date_localtogmt',
- 'date_maximum',
- 'date_minimum',
- 'date_msec',
- 'date_setformat',
- 'date_subtract',
- 'db_layoutnameitem',
- 'db_layoutnames',
- 'db_nameitem',
- 'db_names',
- 'db_tablenameitem',
- 'db_tablenames',
- 'dbi_column_names',
- 'dbi_field_names',
- 'decimal',
- 'decimal_setglobaldefaultprecision',
- 'decode_base64',
- 'decode_bheader',
- 'decode_hex',
- 'decode_html',
- 'decode_json',
- 'decode_qheader',
- 'decode_quotedprintable',
- 'decode_quotedprintablebytes',
- 'decode_url',
- 'decode_xml',
- 'decompress',
- 'decrypt_blowfish',
- 'decrypt_blowfish2',
- 'default',
- 'define_atbegin',
- 'define_atend',
- 'define_constant',
- 'define_prototype',
- 'define_tag',
- 'define_tagp',
- 'define_type',
- 'define_typep',
- 'deserialize',
- 'directory_directorynameitem',
- 'directory_lister',
- 'directory_nameitem',
- 'directorynameitem',
- 'dns_default',
- 'dns_lookup',
- 'dns_response',
- 'duration',
- 'else',
- 'email_batch',
- 'email_compose',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_immediate',
- 'email_merge',
- 'email_mxerror',
- 'email_mxlookup',
- 'email_parse',
- 'email_pop',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_smtp',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_base64',
- 'encode_bheader',
- 'encode_break',
- 'encode_breaks',
- 'encode_crc32',
- 'encode_hex',
- 'encode_html',
- 'encode_htmltoxml',
- 'encode_json',
- 'encode_qheader',
- 'encode_quotedprintable',
- 'encode_quotedprintablebytes',
- 'encode_set',
- 'encode_smart',
- 'encode_sql',
- 'encode_sql92',
- 'encode_stricturl',
- 'encode_url',
- 'encode_xml',
- 'encrypt_blowfish',
- 'encrypt_blowfish2',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eq',
- 'error_adderror',
- 'error_code',
- 'error_code_aborted',
- 'error_code_assert',
- 'error_code_bof',
- 'error_code_connectioninvalid',
- 'error_code_couldnotclosefile',
- 'error_code_couldnotcreateoropenfile',
- 'error_code_couldnotdeletefile',
- 'error_code_couldnotdisposememory',
- 'error_code_couldnotlockmemory',
- 'error_code_couldnotreadfromfile',
- 'error_code_couldnotunlockmemory',
- 'error_code_couldnotwritetofile',
- 'error_code_criterianotmet',
- 'error_code_datasourceerror',
- 'error_code_directoryfull',
- 'error_code_diskfull',
- 'error_code_dividebyzero',
- 'error_code_eof',
- 'error_code_failure',
- 'error_code_fieldrestriction',
- 'error_code_file',
- 'error_code_filealreadyexists',
- 'error_code_filecorrupt',
- 'error_code_fileinvalid',
- 'error_code_fileinvalidaccessmode',
- 'error_code_fileisclosed',
- 'error_code_fileisopen',
- 'error_code_filelocked',
- 'error_code_filenotfound',
- 'error_code_fileunlocked',
- 'error_code_httpfilenotfound',
- 'error_code_illegalinstruction',
- 'error_code_illegaluseoffrozeninstance',
- 'error_code_invaliddatabase',
- 'error_code_invalidfilename',
- 'error_code_invalidmemoryobject',
- 'error_code_invalidparameter',
- 'error_code_invalidpassword',
- 'error_code_invalidpathname',
- 'error_code_invalidusername',
- 'error_code_ioerror',
- 'error_code_loopaborted',
- 'error_code_memory',
- 'error_code_network',
- 'error_code_nilpointer',
- 'error_code_noerr',
- 'error_code_nopermission',
- 'error_code_outofmemory',
- 'error_code_outofstackspace',
- 'error_code_overflow',
- 'error_code_postconditionfailed',
- 'error_code_preconditionfailed',
- 'error_code_resnotfound',
- 'error_code_resource',
- 'error_code_streamreaderror',
- 'error_code_streamwriteerror',
- 'error_code_syntaxerror',
- 'error_code_tagnotfound',
- 'error_code_unknownerror',
- 'error_code_varnotfound',
- 'error_code_volumedoesnotexist',
- 'error_code_webactionnotsupported',
- 'error_code_webadderror',
- 'error_code_webdeleteerror',
- 'error_code_webmodulenotfound',
- 'error_code_webnosuchobject',
- 'error_code_webrepeatingrelatedfield',
- 'error_code_webrequiredfieldmissing',
- 'error_code_webtimeout',
- 'error_code_webupdateerror',
- 'error_columnrestriction',
- 'error_currenterror',
- 'error_databaseconnectionunavailable',
- 'error_databasetimeout',
- 'error_deleteerror',
- 'error_fieldrestriction',
- 'error_filenotfound',
- 'error_invaliddatabase',
- 'error_invalidpassword',
- 'error_invalidusername',
- 'error_modulenotfound',
- 'error_msg',
- 'error_msg_aborted',
- 'error_msg_assert',
- 'error_msg_bof',
- 'error_msg_connectioninvalid',
- 'error_msg_couldnotclosefile',
- 'error_msg_couldnotcreateoropenfile',
- 'error_msg_couldnotdeletefile',
- 'error_msg_couldnotdisposememory',
- 'error_msg_couldnotlockmemory',
- 'error_msg_couldnotreadfromfile',
- 'error_msg_couldnotunlockmemory',
- 'error_msg_couldnotwritetofile',
- 'error_msg_criterianotmet',
- 'error_msg_datasourceerror',
- 'error_msg_directoryfull',
- 'error_msg_diskfull',
- 'error_msg_dividebyzero',
- 'error_msg_eof',
- 'error_msg_failure',
- 'error_msg_fieldrestriction',
- 'error_msg_file',
- 'error_msg_filealreadyexists',
- 'error_msg_filecorrupt',
- 'error_msg_fileinvalid',
- 'error_msg_fileinvalidaccessmode',
- 'error_msg_fileisclosed',
- 'error_msg_fileisopen',
- 'error_msg_filelocked',
- 'error_msg_filenotfound',
- 'error_msg_fileunlocked',
- 'error_msg_httpfilenotfound',
- 'error_msg_illegalinstruction',
- 'error_msg_illegaluseoffrozeninstance',
- 'error_msg_invaliddatabase',
- 'error_msg_invalidfilename',
- 'error_msg_invalidmemoryobject',
- 'error_msg_invalidparameter',
- 'error_msg_invalidpassword',
- 'error_msg_invalidpathname',
- 'error_msg_invalidusername',
- 'error_msg_ioerror',
- 'error_msg_loopaborted',
- 'error_msg_memory',
- 'error_msg_network',
- 'error_msg_nilpointer',
- 'error_msg_noerr',
- 'error_msg_nopermission',
- 'error_msg_outofmemory',
- 'error_msg_outofstackspace',
- 'error_msg_overflow',
- 'error_msg_postconditionfailed',
- 'error_msg_preconditionfailed',
- 'error_msg_resnotfound',
- 'error_msg_resource',
- 'error_msg_streamreaderror',
- 'error_msg_streamwriteerror',
- 'error_msg_syntaxerror',
- 'error_msg_tagnotfound',
- 'error_msg_unknownerror',
- 'error_msg_varnotfound',
- 'error_msg_volumedoesnotexist',
- 'error_msg_webactionnotsupported',
- 'error_msg_webadderror',
- 'error_msg_webdeleteerror',
- 'error_msg_webmodulenotfound',
- 'error_msg_webnosuchobject',
- 'error_msg_webrepeatingrelatedfield',
- 'error_msg_webrequiredfieldmissing',
- 'error_msg_webtimeout',
- 'error_msg_webupdateerror',
- 'error_noerror',
- 'error_nopermission',
- 'error_norecordsfound',
- 'error_outofmemory',
- 'error_pop',
- 'error_push',
- 'error_reqcolumnmissing',
- 'error_reqfieldmissing',
- 'error_requiredcolumnmissing',
- 'error_requiredfieldmissing',
- 'error_reset',
- 'error_seterrorcode',
- 'error_seterrormessage',
- 'error_updateerror',
- 'euro',
- 'event_schedule',
- 'ew',
- 'fail',
- 'fail_if',
- 'false',
- 'field',
- 'field_name',
- 'field_names',
- 'file',
- 'file_autoresolvefullpaths',
- 'file_chmod',
- 'file_control',
- 'file_copy',
- 'file_create',
- 'file_creationdate',
- 'file_currenterror',
- 'file_delete',
- 'file_exists',
- 'file_getlinecount',
- 'file_getsize',
- 'file_isdirectory',
- 'file_listdirectory',
- 'file_moddate',
- 'file_modechar',
- 'file_modeline',
- 'file_move',
- 'file_openread',
- 'file_openreadwrite',
- 'file_openwrite',
- 'file_openwriteappend',
- 'file_openwritetruncate',
- 'file_probeeol',
- 'file_processuploads',
- 'file_read',
- 'file_readline',
- 'file_rename',
- 'file_serve',
- 'file_setsize',
- 'file_stream',
- 'file_streamcopy',
- 'file_uploads',
- 'file_waitread',
- 'file_waittimeout',
- 'file_waitwrite',
- 'file_write',
- 'find_soap_ops',
- 'form_param',
- 'found_count',
- 'ft',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putfile',
- 'full',
- 'global',
- 'global_defined',
- 'global_remove',
- 'global_reset',
- 'globals',
- 'gt',
- 'gte',
- 'handle',
- 'handle_error',
- 'header',
- 'html_comment',
- 'http_getfile',
- 'ical_alarm',
- 'ical_attribute',
- 'ical_calendar',
- 'ical_daylight',
- 'ical_event',
- 'ical_freebusy',
- 'ical_item',
- 'ical_journal',
- 'ical_parse',
- 'ical_standard',
- 'ical_timezone',
- 'ical_todo',
- 'if',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'image',
- 'image_url',
- 'img',
- 'include',
- 'include_cgi',
- 'include_currentpath',
- 'include_once',
- 'include_raw',
- 'include_url',
- 'inline',
- 'integer',
- 'iterate',
- 'iterator',
- 'java',
- 'java_bean',
- 'json_records',
- 'json_rpccall',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_comment',
- 'lasso_currentaction',
- 'lasso_datasourceis',
- 'lasso_datasourceis4d',
- 'lasso_datasourceisfilemaker',
- 'lasso_datasourceisfilemaker7',
- 'lasso_datasourceisfilemaker9',
- 'lasso_datasourceisfilemakersa',
- 'lasso_datasourceisjdbc',
- 'lasso_datasourceislassomysql',
- 'lasso_datasourceismysql',
- 'lasso_datasourceisodbc',
- 'lasso_datasourceisopenbase',
- 'lasso_datasourceisoracle',
- 'lasso_datasourceispostgresql',
- 'lasso_datasourceisspotlight',
- 'lasso_datasourceissqlite',
- 'lasso_datasourceissqlserver',
- 'lasso_datasourcemodulename',
- 'lasso_datatype',
- 'lasso_disableondemand',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_parser',
- 'lasso_process',
- 'lasso_sessionid',
- 'lasso_siteid',
- 'lasso_siteisrunning',
- 'lasso_sitename',
- 'lasso_siterestart',
- 'lasso_sitestart',
- 'lasso_sitestop',
- 'lasso_tagexists',
- 'lasso_tagmodulename',
- 'lasso_uniqueid',
- 'lasso_updatecheck',
- 'lasso_uptime',
- 'lasso_version',
- 'lassoapp_create',
- 'lassoapp_dump',
- 'lassoapp_flattendir',
- 'lassoapp_getappdata',
- 'lassoapp_link',
- 'lassoapp_list',
- 'lassoapp_process',
- 'lassoapp_unitize',
- 'layout_name',
- 'ldap',
- 'ldap_scope_base',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'ldml',
- 'ldml_ldml',
- 'library',
- 'library_once',
- 'link',
- 'link_currentaction',
- 'link_currentactionparams',
- 'link_currentactionurl',
- 'link_currentgroup',
- 'link_currentgroupparams',
- 'link_currentgroupurl',
- 'link_currentrecord',
- 'link_currentrecordparams',
- 'link_currentrecordurl',
- 'link_currentsearch',
- 'link_currentsearchparams',
- 'link_currentsearchurl',
- 'link_detail',
- 'link_detailparams',
- 'link_detailurl',
- 'link_firstgroup',
- 'link_firstgroupparams',
- 'link_firstgroupurl',
- 'link_firstrecord',
- 'link_firstrecordparams',
- 'link_firstrecordurl',
- 'link_lastgroup',
- 'link_lastgroupparams',
- 'link_lastgroupurl',
- 'link_lastrecord',
- 'link_lastrecordparams',
- 'link_lastrecordurl',
- 'link_nextgroup',
- 'link_nextgroupparams',
- 'link_nextgroupurl',
- 'link_nextrecord',
- 'link_nextrecordparams',
- 'link_nextrecordurl',
- 'link_params',
- 'link_prevgroup',
- 'link_prevgroupparams',
- 'link_prevgroupurl',
- 'link_prevrecord',
- 'link_prevrecordparams',
- 'link_prevrecordurl',
- 'link_setformat',
- 'link_url',
- 'list',
- 'list_additem',
- 'list_fromlist',
- 'list_fromstring',
- 'list_getitem',
- 'list_itemcount',
- 'list_iterator',
- 'list_removeitem',
- 'list_replaceitem',
- 'list_reverseiterator',
- 'list_tostring',
- 'literal',
- 'ljax_end',
- 'ljax_hastarget',
- 'ljax_include',
- 'ljax_start',
- 'ljax_target',
- 'local',
- 'local_defined',
- 'local_remove',
- 'local_reset',
- 'locale_format',
- 'locals',
- 'log',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_setdestination',
- 'log_sql',
- 'log_warning',
- 'logicalop_value',
- 'logicaloperator_value',
- 'loop',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'lt',
- 'lte',
- 'magick_image',
- 'map',
- 'map_iterator',
- 'match_comparator',
- 'match_notrange',
- 'match_notregexp',
- 'match_range',
- 'match_regexp',
- 'math_abs',
- 'math_acos',
- 'math_add',
- 'math_asin',
- 'math_atan',
- 'math_atan2',
- 'math_ceil',
- 'math_converteuro',
- 'math_cos',
- 'math_div',
- 'math_exp',
- 'math_floor',
- 'math_internal_rand',
- 'math_internal_randmax',
- 'math_internal_srand',
- 'math_ln',
- 'math_log',
- 'math_log10',
- 'math_max',
- 'math_min',
- 'math_mod',
- 'math_mult',
- 'math_pow',
- 'math_random',
- 'math_range',
- 'math_rint',
- 'math_roman',
- 'math_round',
- 'math_sin',
- 'math_sqrt',
- 'math_sub',
- 'math_tan',
- 'maxrecords_value',
- 'memory_session_driver',
- 'mime_type',
- 'minimal',
- 'misc__srand',
- 'misc_randomnumber',
- 'misc_roman',
- 'misc_valid_creditcard',
- 'mysql_session_driver',
- 'named_param',
- 'namespace_current',
- 'namespace_delimiter',
- 'namespace_exists',
- 'namespace_file_fullpathexists',
- 'namespace_global',
- 'namespace_import',
- 'namespace_load',
- 'namespace_page',
- 'namespace_unload',
- 'namespace_using',
- 'neq',
- 'net',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'no_default_output',
- 'none',
- 'noprocess',
- 'not',
- 'nrx',
- 'nslookup',
- 'null',
- 'object',
- 'once',
- 'oneoff',
- 'op_logicalvalue',
- 'operator_logicalvalue',
- 'option',
- 'or',
- 'os_process',
- 'output',
- 'output_none',
- 'pair',
- 'params_up',
- 'pdf_barcode',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_image',
- 'pdf_list',
- 'pdf_read',
- 'pdf_serve',
- 'pdf_table',
- 'pdf_text',
- 'percent',
- 'portal',
- 'postcondition',
- 'precondition',
- 'prettyprintingnsmap',
- 'prettyprintingtypemap',
- 'priorityqueue',
- 'private',
- 'proc_convert',
- 'proc_convertbody',
- 'proc_convertone',
- 'proc_extract',
- 'proc_extractone',
- 'proc_find',
- 'proc_first',
- 'proc_foreach',
- 'proc_get',
- 'proc_join',
- 'proc_lasso',
- 'proc_last',
- 'proc_map_entry',
- 'proc_null',
- 'proc_regexp',
- 'proc_xml',
- 'proc_xslt',
- 'process',
- 'protect',
- 'queue',
- 'rand',
- 'randomnumber',
- 'raw',
- 'recid_value',
- 'record_count',
- 'recordcount',
- 'recordid_value',
- 'records',
- 'records_array',
- 'records_map',
- 'redirect_url',
- 'reference',
- 'referer',
- 'referer_url',
- 'referrer',
- 'referrer_url',
- 'regexp',
- 'repeating',
- 'repeating_valueitem',
- 'repeatingvalueitem',
- 'repetition',
- 'req_column',
- 'req_field',
- 'required_column',
- 'required_field',
- 'response_fileexists',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'resultset',
- 'resultset_count',
- 'return',
- 'return_value',
- 'reverseiterator',
- 'roman',
- 'row_count',
- 'rows',
- 'rows_array',
- 'run_children',
- 'rx',
- 'schema_name',
- 'scientific',
- 'search_args',
- 'search_arguments',
- 'search_columnitem',
- 'search_fielditem',
- 'search_operatoritem',
- 'search_opitem',
- 'search_valueitem',
- 'searchfielditem',
- 'searchoperatoritem',
- 'searchopitem',
- 'searchvalueitem',
- 'select',
- 'selected',
- 'self',
- 'serialize',
- 'series',
- 'server_date',
- 'server_day',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_push',
- 'server_siteisrunning',
- 'server_sitestart',
- 'server_sitestop',
- 'server_time',
- 'session_abort',
- 'session_addoutputfilter',
- 'session_addvar',
- 'session_addvariable',
- 'session_deleteexpired',
- 'session_driver',
- 'session_end',
- 'session_id',
- 'session_removevar',
- 'session_removevariable',
- 'session_result',
- 'session_setdriver',
- 'session_start',
- 'set',
- 'set_iterator',
- 'set_reverseiterator',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_atbegin',
- 'site_id',
- 'site_name',
- 'site_restart',
- 'skiprecords_value',
- 'sleep',
- 'soap_convertpartstopairs',
- 'soap_definetag',
- 'soap_info',
- 'soap_lastrequest',
- 'soap_lastresponse',
- 'soap_stub',
- 'sort_args',
- 'sort_arguments',
- 'sort_columnitem',
- 'sort_fielditem',
- 'sort_orderitem',
- 'sortcolumnitem',
- 'sortfielditem',
- 'sortorderitem',
- 'sqlite_createdb',
- 'sqlite_session_driver',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'srand',
- 'stack',
- 'stock_quote',
- 'string',
- 'string_charfromname',
- 'string_concatenate',
- 'string_countfields',
- 'string_endswith',
- 'string_extract',
- 'string_findposition',
- 'string_findregexp',
- 'string_fordigit',
- 'string_getfield',
- 'string_getunicodeversion',
- 'string_insert',
- 'string_isalpha',
- 'string_isalphanumeric',
- 'string_isdigit',
- 'string_ishexdigit',
- 'string_islower',
- 'string_isnumeric',
- 'string_ispunctuation',
- 'string_isspace',
- 'string_isupper',
- 'string_length',
- 'string_lowercase',
- 'string_remove',
- 'string_removeleading',
- 'string_removetrailing',
- 'string_replace',
- 'string_replaceregexp',
- 'string_todecimal',
- 'string_tointeger',
- 'string_uppercase',
- 'string_validcharset',
- 'table_name',
- 'table_realname',
- 'tag',
- 'tag_name',
- 'tags',
- 'tags_find',
- 'tags_list',
- 'tcp_close',
- 'tcp_open',
- 'tcp_send',
- 'tcp_tcp_close',
- 'tcp_tcp_open',
- 'tcp_tcp_send',
- 'thread_abort',
- 'thread_atomic',
- 'thread_event',
- 'thread_exists',
- 'thread_getcurrentid',
- 'thread_getpriority',
- 'thread_info',
- 'thread_list',
- 'thread_lock',
- 'thread_pipe',
- 'thread_priority_default',
- 'thread_priority_high',
- 'thread_priority_low',
- 'thread_rwlock',
- 'thread_semaphore',
- 'thread_setpriority',
- 'token_value',
- 'total_records',
- 'treemap',
- 'treemap_iterator',
- 'true',
- 'url_rewrite',
- 'valid_creditcard',
- 'valid_date',
- 'valid_email',
- 'valid_url',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var',
- 'var_defined',
- 'var_remove',
- 'var_reset',
- 'var_set',
- 'variable',
- 'variable_defined',
- 'variable_set',
- 'variables',
- 'variant_count',
- 'vars',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'while',
- 'wsdl_extract',
- 'wsdl_getbinding',
- 'wsdl_getbindingforoperation',
- 'wsdl_getbindingoperations',
- 'wsdl_getmessagenamed',
- 'wsdl_getmessageparts',
- 'wsdl_getmessagetriofromporttype',
- 'wsdl_getopbodystyle',
- 'wsdl_getopbodyuse',
- 'wsdl_getoperation',
- 'wsdl_getoplocation',
- 'wsdl_getopmessagetypes',
- 'wsdl_getopsoapaction',
- 'wsdl_getportaddress',
- 'wsdl_getportsforservice',
- 'wsdl_getporttype',
- 'wsdl_getporttypeoperation',
- 'wsdl_getservicedocumentation',
- 'wsdl_getservices',
- 'wsdl_gettargetnamespace',
- 'wsdl_issoapoperation',
- 'wsdl_listoperations',
- 'wsdl_maketest',
- 'xml',
- 'xml_extract',
- 'xml_rpc',
- 'xml_rpccall',
- 'xml_rw',
- 'xml_serve',
- 'xml_transform',
- 'xml_xml',
- 'xml_xmlstream',
- 'xmlstream',
- 'xsd_attribute',
- 'xsd_blankarraybase',
- 'xsd_blankbase',
- 'xsd_buildtype',
- 'xsd_cache',
- 'xsd_checkcardinality',
- 'xsd_continueall',
- 'xsd_continueannotation',
- 'xsd_continueany',
- 'xsd_continueanyattribute',
- 'xsd_continueattribute',
- 'xsd_continueattributegroup',
- 'xsd_continuechoice',
- 'xsd_continuecomplexcontent',
- 'xsd_continuecomplextype',
- 'xsd_continuedocumentation',
- 'xsd_continueextension',
- 'xsd_continuegroup',
- 'xsd_continuekey',
- 'xsd_continuelist',
- 'xsd_continuerestriction',
- 'xsd_continuesequence',
- 'xsd_continuesimplecontent',
- 'xsd_continuesimpletype',
- 'xsd_continueunion',
- 'xsd_deserialize',
- 'xsd_fullyqualifyname',
- 'xsd_generate',
- 'xsd_generateblankfromtype',
- 'xsd_generateblanksimpletype',
- 'xsd_generatetype',
- 'xsd_getschematype',
- 'xsd_issimpletype',
- 'xsd_loadschema',
- 'xsd_lookupnamespaceuri',
- 'xsd_lookuptype',
- 'xsd_processany',
- 'xsd_processattribute',
- 'xsd_processattributegroup',
- 'xsd_processcomplextype',
- 'xsd_processelement',
- 'xsd_processgroup',
- 'xsd_processimport',
- 'xsd_processinclude',
- 'xsd_processschema',
- 'xsd_processsimpletype',
- 'xsd_ref',
- 'xsd_type',
- )
-}
-MEMBERS = {
- 'Member Methods': (
- 'abort',
- 'abs',
- 'accept_charset',
- 'accept',
- 'acceptconnections',
- 'acceptdeserializedelement',
- 'acceptnossl',
- 'acceptpost',
- 'accesskey',
- 'acos',
- 'acosh',
- 'action',
- 'actionparams',
- 'active_tick',
- 'add',
- 'addatend',
- 'addattachment',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addcolumninfo',
- 'addcombobox',
- 'addcomment',
- 'addcomponent',
- 'addcomponents',
- 'addcss',
- 'adddatabasetable',
- 'adddatasource',
- 'adddatasourcedatabase',
- 'adddatasourcehost',
- 'adddir',
- 'adddirpath',
- 'addendjs',
- 'addendjstext',
- 'adderror',
- 'addfavicon',
- 'addfile',
- 'addgroup',
- 'addheader',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addjs',
- 'addjstext',
- 'addlist',
- 'addmathfunctions',
- 'addmember',
- 'addoneheaderline',
- 'addpage',
- 'addparagraph',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addpostdispatch',
- 'addpredispatch',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addrow',
- 'addsection',
- 'addselectlist',
- 'addset',
- 'addsubmitbutton',
- 'addsubnode',
- 'addtable',
- 'addtask',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'addtobuffer',
- 'addtrait',
- 'adduser',
- 'addusertogroup',
- 'addwarning',
- 'addzip',
- 'allocobject',
- 'am',
- 'ampm',
- 'annotate',
- 'answer',
- 'apop',
- 'append',
- 'appendarray',
- 'appendarraybegin',
- 'appendarrayend',
- 'appendbool',
- 'appendbytes',
- 'appendchar',
- 'appendchild',
- 'appendcolon',
- 'appendcomma',
- 'appenddata',
- 'appenddatetime',
- 'appenddbpointer',
- 'appenddecimal',
- 'appenddocument',
- 'appendimagetolist',
- 'appendinteger',
- 'appendnowutc',
- 'appendnull',
- 'appendoid',
- 'appendregex',
- 'appendreplacement',
- 'appendstring',
- 'appendtail',
- 'appendtime',
- 'applyheatcolors',
- 'appmessage',
- 'appname',
- 'appprefix',
- 'appstatus',
- 'arc',
- 'archive',
- 'arguments',
- 'argumentvalue',
- 'asarray',
- 'asarraystring',
- 'asasync',
- 'asbytes',
- 'ascopy',
- 'ascopydeep',
- 'asdecimal',
- 'asgenerator',
- 'asin',
- 'asinh',
- 'asinteger',
- 'askeyedgenerator',
- 'aslazystring',
- 'aslist',
- 'asraw',
- 'asstaticarray',
- 'asstring',
- 'asstringhex',
- 'asstringoct',
- 'asxml',
- 'atan',
- 'atan2',
- 'atanh',
- 'atend',
- 'atends',
- 'atime',
- 'attributecount',
- 'attributes',
- 'attrs',
- 'auth',
- 'authenticate',
- 'authorize',
- 'autocollectbuffer',
- 'average',
- 'back',
- 'basename',
- 'basepaths',
- 'baseuri',
- 'bcc',
- 'beginssl',
- 'beginswith',
- 'begintls',
- 'bestcharset',
- 'bind_blob',
- 'bind_double',
- 'bind_int',
- 'bind_null',
- 'bind_parameter_index',
- 'bind_text',
- 'bind',
- 'bindcount',
- 'bindone',
- 'bindparam',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'bodybytes',
- 'boundary',
- 'bptoxml',
- 'bptypetostr',
- 'bucketnumber',
- 'buff',
- 'buildquery',
- 'businessdaysbetween',
- 'by',
- 'bytes',
- 'cachedappprefix',
- 'cachedroot',
- 'callboolean',
- 'callbooleanmethod',
- 'callbytemethod',
- 'callcharmethod',
- 'calldoublemethod',
- 'calledname',
- 'callfirst',
- 'callfloat',
- 'callfloatmethod',
- 'callint',
- 'callintmethod',
- 'calllongmethod',
- 'callnonvirtualbooleanmethod',
- 'callnonvirtualbytemethod',
- 'callnonvirtualcharmethod',
- 'callnonvirtualdoublemethod',
- 'callnonvirtualfloatmethod',
- 'callnonvirtualintmethod',
- 'callnonvirtuallongmethod',
- 'callnonvirtualobjectmethod',
- 'callnonvirtualshortmethod',
- 'callnonvirtualvoidmethod',
- 'callobject',
- 'callobjectmethod',
- 'callshortmethod',
- 'callsite_col',
- 'callsite_file',
- 'callsite_line',
- 'callstack',
- 'callstaticboolean',
- 'callstaticbooleanmethod',
- 'callstaticbytemethod',
- 'callstaticcharmethod',
- 'callstaticdoublemethod',
- 'callstaticfloatmethod',
- 'callstaticint',
- 'callstaticintmethod',
- 'callstaticlongmethod',
- 'callstaticobject',
- 'callstaticobjectmethod',
- 'callstaticshortmethod',
- 'callstaticstring',
- 'callstaticvoidmethod',
- 'callstring',
- 'callvoid',
- 'callvoidmethod',
- 'cancel',
- 'cap',
- 'capa',
- 'capabilities',
- 'capi',
- 'cbrt',
- 'cc',
- 'ceil',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'checkdebugging',
- 'checked',
- 'checkuser',
- 'childnodes',
- 'chk',
- 'chmod',
- 'choosecolumntype',
- 'chown',
- 'chunked',
- 'circle',
- 'class',
- 'classid',
- 'clear',
- 'clonenode',
- 'close',
- 'closepath',
- 'closeprepared',
- 'closewrite',
- 'code',
- 'codebase',
- 'codetype',
- 'colmap',
- 'colorspace',
- 'column_blob',
- 'column_count',
- 'column_decltype',
- 'column_double',
- 'column_int64',
- 'column_name',
- 'column_text',
- 'column_type',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'componentdelimiter',
- 'components',
- 'composite',
- 'compress',
- 'concat',
- 'condtoint',
- 'configureds',
- 'configuredskeys',
- 'connect',
- 'connection',
- 'connectionhandler',
- 'connhandler',
- 'consume_domain',
- 'consume_label',
- 'consume_message',
- 'consume_rdata',
- 'consume_string',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'content',
- 'contentlength',
- 'contents',
- 'contenttype',
- 'continuation',
- 'continuationpacket',
- 'continuationpoint',
- 'continuationstack',
- 'continue',
- 'contrast',
- 'conventionaltop',
- 'convert',
- 'cookie',
- 'cookies',
- 'cookiesarray',
- 'cookiesary',
- 'copyto',
- 'cos',
- 'cosh',
- 'count',
- 'countkeys',
- 'country',
- 'countusersbygroup',
- 'crc',
- 'create',
- 'createattribute',
- 'createattributens',
- 'createcdatasection',
- 'createcomment',
- 'createdocument',
- 'createdocumentfragment',
- 'createdocumenttype',
- 'createelement',
- 'createelementns',
- 'createentityreference',
- 'createindex',
- 'createprocessinginstruction',
- 'createtable',
- 'createtextnode',
- 'criteria',
- 'crop',
- 'csscontent',
- 'curl',
- 'current',
- 'currentfile',
- 'curveto',
- 'd',
- 'data',
- 'databasecolumnnames',
- 'databasecolumns',
- 'databasemap',
- 'databasename',
- 'datasourcecolumnnames',
- 'datasourcecolumns',
- 'datasourcemap',
- 'date',
- 'day',
- 'dayofmonth',
- 'dayofweek',
- 'dayofweekinmonth',
- 'dayofyear',
- 'days',
- 'daysbetween',
- 'db',
- 'dbtablestable',
- 'debug',
- 'declare',
- 'decodebase64',
- 'decodehex',
- 'decodehtml',
- 'decodeqp',
- 'decodeurl',
- 'decodexml',
- 'decompose',
- 'decomposeassignment',
- 'defaultcontentrepresentation',
- 'defer',
- 'deg2rad',
- 'dele',
- 'delete',
- 'deletedata',
- 'deleteglobalref',
- 'deletelocalref',
- 'delim',
- 'depth',
- 'dereferencepointer',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detectcharset',
- 'didinclude',
- 'difference',
- 'digit',
- 'dir',
- 'displaycountry',
- 'displaylanguage',
- 'displayname',
- 'displayscript',
- 'displayvariant',
- 'div',
- 'dns_response',
- 'do',
- 'doatbegins',
- 'doatends',
- 'doccomment',
- 'doclose',
- 'doctype',
- 'document',
- 'documentelement',
- 'documentroot',
- 'domainbody',
- 'done',
- 'dosessions',
- 'dowithclose',
- 'dowlocal',
- 'download',
- 'drawtext',
- 'drop',
- 'dropindex',
- 'dsdbtable',
- 'dshoststable',
- 'dsinfo',
- 'dst',
- 'dstable',
- 'dstoffset',
- 'dtdid',
- 'dup',
- 'dup2',
- 'each',
- 'eachbyte',
- 'eachcharacter',
- 'eachchild',
- 'eachcomponent',
- 'eachdir',
- 'eachdirpath',
- 'eachdirpathrecursive',
- 'eachentry',
- 'eachfile',
- 'eachfilename',
- 'eachfilepath',
- 'eachfilepathrecursive',
- 'eachkey',
- 'eachline',
- 'eachlinebreak',
- 'eachmatch',
- 'eachnode',
- 'eachpair',
- 'eachpath',
- 'eachpathrecursive',
- 'eachrow',
- 'eachsub',
- 'eachword',
- 'eachwordbreak',
- 'element',
- 'eligiblepath',
- 'eligiblepaths',
- 'encodebase64',
- 'encodehex',
- 'encodehtml',
- 'encodehtmltoxml',
- 'encodemd5',
- 'encodepassword',
- 'encodeqp',
- 'encodesql',
- 'encodesql92',
- 'encodeurl',
- 'encodevalue',
- 'encodexml',
- 'encoding',
- 'enctype',
- 'end',
- 'endjs',
- 'endssl',
- 'endswith',
- 'endtls',
- 'enhance',
- 'ensurestopped',
- 'entities',
- 'entry',
- 'env',
- 'equals',
- 'era',
- 'erf',
- 'erfc',
- 'err',
- 'errcode',
- 'errmsg',
- 'error',
- 'errors',
- 'errstack',
- 'escape_member',
- 'establisherrorstate',
- 'exceptioncheck',
- 'exceptionclear',
- 'exceptiondescribe',
- 'exceptionoccurred',
- 'exchange',
- 'execinits',
- 'execinstalls',
- 'execute',
- 'executelazy',
- 'executenow',
- 'exists',
- 'exit',
- 'exitcode',
- 'exp',
- 'expire',
- 'expireminutes',
- 'expiresminutes',
- 'expm1',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportas',
- 'exportbytes',
- 'exportfdf',
- 'exportpointerbits',
- 'exportsigned16bits',
- 'exportsigned32bits',
- 'exportsigned64bits',
- 'exportsigned8bits',
- 'exportstring',
- 'expose',
- 'extendedyear',
- 'extensiondelimiter',
- 'extensions',
- 'extract',
- 'extractfast',
- 'extractfastone',
- 'extractimage',
- 'extractone',
- 'f',
- 'fabs',
- 'fail',
- 'failnoconnectionhandler',
- 'family',
- 'fatalerror',
- 'fcgireq',
- 'fchdir',
- 'fchmod',
- 'fchown',
- 'fd',
- 'features',
- 'fetchdata',
- 'fieldnames',
- 'fieldposition',
- 'fieldstable',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'filename',
- 'filenames',
- 'filequeue',
- 'fileuploads',
- 'fileuploadsary',
- 'filterinputcolumn',
- 'finalize',
- 'find',
- 'findall',
- 'findandmodify',
- 'findbucket',
- 'findcase',
- 'findclass',
- 'findcount',
- 'finddescendant',
- 'findfirst',
- 'findinclude',
- 'findinctx',
- 'findindex',
- 'findlast',
- 'findpattern',
- 'findposition',
- 'findsymbols',
- 'first',
- 'firstchild',
- 'firstcomponent',
- 'firstdayofweek',
- 'firstnode',
- 'fixformat',
- 'flags',
- 'fliph',
- 'flipv',
- 'floor',
- 'flush',
- 'foldcase',
- 'foo',
- 'for',
- 'forcedrowid',
- 'foreach',
- 'foreachaccept',
- 'foreachbyte',
- 'foreachcharacter',
- 'foreachchild',
- 'foreachday',
- 'foreachentry',
- 'foreachfile',
- 'foreachfilename',
- 'foreachkey',
- 'foreachline',
- 'foreachlinebreak',
- 'foreachmatch',
- 'foreachnode',
- 'foreachpair',
- 'foreachpathcomponent',
- 'foreachrow',
- 'foreachspool',
- 'foreachsub',
- 'foreachwordbreak',
- 'form',
- 'format',
- 'formatas',
- 'formatcontextelement',
- 'formatcontextelements',
- 'formatnumber',
- 'free',
- 'frexp',
- 'from',
- 'fromname',
- 'fromport',
- 'fromreflectedfield',
- 'fromreflectedmethod',
- 'front',
- 'fsync',
- 'ftpdeletefile',
- 'ftpgetlisting',
- 'ftruncate',
- 'fullpath',
- 'fx',
- 'gamma',
- 'gatewayinterface',
- 'gen',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getappsource',
- 'getarraylength',
- 'getattr',
- 'getattribute',
- 'getattributenamespace',
- 'getattributenode',
- 'getattributenodens',
- 'getattributens',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbold',
- 'getbooleanarrayelements',
- 'getbooleanarrayregion',
- 'getbooleanfield',
- 'getbordercolor',
- 'getborderwidth',
- 'getbytearrayelements',
- 'getbytearrayregion',
- 'getbytefield',
- 'getchararrayelements',
- 'getchararrayregion',
- 'getcharfield',
- 'getclass',
- 'getcode',
- 'getcolor',
- 'getcolumn',
- 'getcolumncount',
- 'getcolumns',
- 'getdatabasebyalias',
- 'getdatabasebyid',
- 'getdatabasebyname',
- 'getdatabasehost',
- 'getdatabasetable',
- 'getdatabasetablebyalias',
- 'getdatabasetablebyid',
- 'getdatabasetablepart',
- 'getdatasource',
- 'getdatasourcedatabase',
- 'getdatasourcedatabasebyid',
- 'getdatasourcehost',
- 'getdatasourceid',
- 'getdatasourcename',
- 'getdefaultstorage',
- 'getdoublearrayelements',
- 'getdoublearrayregion',
- 'getdoublefield',
- 'getelementbyid',
- 'getelementsbytagname',
- 'getelementsbytagnamens',
- 'getencoding',
- 'getface',
- 'getfield',
- 'getfieldid',
- 'getfile',
- 'getfloatarrayelements',
- 'getfloatarrayregion',
- 'getfloatfield',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getgroup',
- 'getgroupid',
- 'getheader',
- 'getheaders',
- 'gethostdatabase',
- 'gethtmlattr',
- 'gethtmlattrstring',
- 'getinclude',
- 'getintarrayelements',
- 'getintarrayregion',
- 'getintfield',
- 'getisocomment',
- 'getitalic',
- 'getlasterror',
- 'getlcapitype',
- 'getlibrary',
- 'getlongarrayelements',
- 'getlongarrayregion',
- 'getlongfield',
- 'getmargins',
- 'getmethodid',
- 'getmode',
- 'getnameditem',
- 'getnameditemns',
- 'getnode',
- 'getnumericvalue',
- 'getobjectarrayelement',
- 'getobjectclass',
- 'getobjectfield',
- 'getpadding',
- 'getpagenumber',
- 'getparts',
- 'getprefs',
- 'getpropertyvalue',
- 'getprowcount',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getset',
- 'getshortarrayelements',
- 'getshortarrayregion',
- 'getshortfield',
- 'getsize',
- 'getsortfieldspart',
- 'getspacing',
- 'getstaticbooleanfield',
- 'getstaticbytefield',
- 'getstaticcharfield',
- 'getstaticdoublefield',
- 'getstaticfieldid',
- 'getstaticfloatfield',
- 'getstaticintfield',
- 'getstaticlongfield',
- 'getstaticmethodid',
- 'getstaticobjectfield',
- 'getstaticshortfield',
- 'getstatus',
- 'getstringchars',
- 'getstringlength',
- 'getstyle',
- 'getsupportedencodings',
- 'gettablebyid',
- 'gettext',
- 'gettextalignment',
- 'gettextsize',
- 'gettrigger',
- 'gettype',
- 'getunderline',
- 'getuniquealiasname',
- 'getuser',
- 'getuserbykey',
- 'getuserid',
- 'getversion',
- 'getzipfilebytes',
- 'givenblock',
- 'gmt',
- 'gotconnection',
- 'gotfileupload',
- 'groupby',
- 'groupcolumns',
- 'groupcount',
- 'groupjoin',
- 'handlebreakpointget',
- 'handlebreakpointlist',
- 'handlebreakpointremove',
- 'handlebreakpointset',
- 'handlebreakpointupdate',
- 'handlecontextget',
- 'handlecontextnames',
- 'handlecontinuation',
- 'handledefinitionbody',
- 'handledefinitionhead',
- 'handledefinitionresource',
- 'handledevconnection',
- 'handleevalexpired',
- 'handlefeatureget',
- 'handlefeatureset',
- 'handlelassoappcontent',
- 'handlelassoappresponse',
- 'handlenested',
- 'handlenormalconnection',
- 'handlepop',
- 'handleresource',
- 'handlesource',
- 'handlestackget',
- 'handlestderr',
- 'handlestdin',
- 'handlestdout',
- 'handshake',
- 'hasattribute',
- 'hasattributens',
- 'hasattributes',
- 'hasbinaryproperty',
- 'haschildnodes',
- 'hasexpired',
- 'hasfeature',
- 'hasfield',
- 'hash',
- 'hashtmlattr',
- 'hasmethod',
- 'hastable',
- 'hastrailingcomponent',
- 'hasvalue',
- 'head',
- 'header',
- 'headerbytes',
- 'headers',
- 'headersarray',
- 'headersmap',
- 'height',
- 'histogram',
- 'home',
- 'host',
- 'hostcolumnnames',
- 'hostcolumnnames2',
- 'hostcolumns',
- 'hostcolumns2',
- 'hostdatasource',
- 'hostextra',
- 'hostid',
- 'hostisdynamic',
- 'hostmap',
- 'hostmap2',
- 'hostname',
- 'hostpassword',
- 'hostport',
- 'hostschema',
- 'hosttableencoding',
- 'hosttonet16',
- 'hosttonet32',
- 'hosttonet64',
- 'hostusername',
- 'hour',
- 'hourofampm',
- 'hourofday',
- 'hoursbetween',
- 'href',
- 'hreflang',
- 'htmlcontent',
- 'htmlizestacktrace',
- 'htmlizestacktracelink',
- 'httpaccept',
- 'httpacceptencoding',
- 'httpacceptlanguage',
- 'httpauthorization',
- 'httpcachecontrol',
- 'httpconnection',
- 'httpcookie',
- 'httpequiv',
- 'httphost',
- 'httpreferer',
- 'httpreferrer',
- 'httpuseragent',
- 'hypot',
- 'id',
- 'idealinmemory',
- 'idle',
- 'idmap',
- 'ifempty',
- 'ifkey',
- 'ifnotempty',
- 'ifnotkey',
- 'ignorecase',
- 'ilogb',
- 'imgptr',
- 'implementation',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importas',
- 'importbytes',
- 'importfdf',
- 'importnode',
- 'importpointer',
- 'importstring',
- 'in',
- 'include',
- 'includebytes',
- 'includelibrary',
- 'includelibraryonce',
- 'includeonce',
- 'includes',
- 'includestack',
- 'indaylighttime',
- 'index',
- 'init',
- 'initialize',
- 'initrequest',
- 'inits',
- 'inneroncompare',
- 'input',
- 'inputcolumns',
- 'inputtype',
- 'insert',
- 'insertback',
- 'insertbefore',
- 'insertdata',
- 'insertfirst',
- 'insertfrom',
- 'insertfront',
- 'insertinternal',
- 'insertlast',
- 'insertpage',
- 'install',
- 'installs',
- 'integer',
- 'internalsubset',
- 'interrupt',
- 'intersection',
- 'inttocond',
- 'invoke',
- 'invokeautocollect',
- 'invokeuntil',
- 'invokewhile',
- 'ioctl',
- 'isa',
- 'isalive',
- 'isallof',
- 'isalnum',
- 'isalpha',
- 'isanyof',
- 'isbase',
- 'isblank',
- 'iscntrl',
- 'isdigit',
- 'isdir',
+ 'ldap_scope_onelevel',
+ 'ldap_scope_subtree',
+ 'library_once',
+ 'library',
+ 'ljapi_initialize',
+ 'locale_availablelocales',
+ 'locale_canada',
+ 'locale_canadafrench',
+ 'locale_china',
+ 'locale_chinese',
+ 'locale_default',
+ 'locale_english',
+ 'locale_format_style_date_time',
+ 'locale_format_style_default',
+ 'locale_format_style_full',
+ 'locale_format_style_long',
+ 'locale_format_style_medium',
+ 'locale_format_style_none',
+ 'locale_format_style_short',
+ 'locale_format',
+ 'locale_france',
+ 'locale_french',
+ 'locale_german',
+ 'locale_germany',
+ 'locale_isocountries',
+ 'locale_isolanguages',
+ 'locale_italian',
+ 'locale_italy',
+ 'locale_japan',
+ 'locale_japanese',
+ 'locale_korea',
+ 'locale_korean',
+ 'locale_prc',
+ 'locale_setdefault',
+ 'locale_simplifiedchinese',
+ 'locale_taiwan',
+ 'locale_traditionalchinese',
+ 'locale_uk',
+ 'locale_us',
+ 'log_always',
+ 'log_critical',
+ 'log_deprecated',
+ 'log_destination_console',
+ 'log_destination_database',
+ 'log_destination_file',
+ 'log_detail',
+ 'log_initialize',
+ 'log_level_critical',
+ 'log_level_deprecated',
+ 'log_level_detail',
+ 'log_level_sql',
+ 'log_level_warning',
+ 'log_max_file_size',
+ 'log_setdestination',
+ 'log_sql',
+ 'log_trim_file_size',
+ 'log_warning',
+ 'log',
+ 'loop_abort',
+ 'loop_continue',
+ 'loop_count',
+ 'loop_key_pop',
+ 'loop_key_push',
+ 'loop_key',
+ 'loop_pop',
+ 'loop_push',
+ 'loop_value_pop',
+ 'loop_value_push',
+ 'loop_value',
+ 'loop',
+ 'lt',
+ 'lte',
+ 'main_thread_only',
+ 'max',
+ 'maxrecords_value',
+ 'median',
+ 'method_name',
+ 'micros',
+ 'millis',
+ 'min',
+ 'minimal',
+ 'mongo_insert_continue_on_error',
+ 'mongo_insert_no_validate',
+ 'mongo_insert_none',
+ 'mongo_query_await_data',
+ 'mongo_query_exhaust',
+ 'mongo_query_no_cursor_timeout',
+ 'mongo_query_none',
+ 'mongo_query_oplog_replay',
+ 'mongo_query_partial',
+ 'mongo_query_slave_ok',
+ 'mongo_query_tailable_cursor',
+ 'mongo_remove_none',
+ 'mongo_remove_single_remove',
+ 'mongo_update_multi_update',
+ 'mongo_update_no_validate',
+ 'mongo_update_none',
+ 'mongo_update_upsert',
+ 'mustache_compile_file',
+ 'mustache_compile_string',
+ 'mustache_include',
+ 'mysqlds',
+ 'namespace_global',
+ 'namespace_import',
+ 'namespace_using',
+ 'nbw',
+ 'ncn',
+ 'neq',
+ 'net_connectinprogress',
+ 'net_connectok',
+ 'net_typessl',
+ 'net_typessltcp',
+ 'net_typessludp',
+ 'net_typetcp',
+ 'net_typeudp',
+ 'net_waitread',
+ 'net_waittimeout',
+ 'net_waitwrite',
+ 'new',
+ 'none',
+ 'nrx',
+ 'nslookup',
+ 'odbc_session_driver_mssql',
+ 'odbc',
+ 'output_none',
+ 'output',
+ 'pdf_package',
+ 'pdf_rectangle',
+ 'pdf_serve',
+ 'pi',
+ 'portal',
+ 'postgresql',
+ 'process',
+ 'protect_now',
+ 'protect',
+ 'queriable_average',
+ 'queriable_defaultcompare',
+ 'queriable_do',
+ 'queriable_internal_combinebindings',
+ 'queriable_max',
+ 'queriable_min',
+ 'queriable_qsort',
+ 'queriable_reversecompare',
+ 'queriable_sum',
+ 'random_seed',
+ 'range',
+ 'records_array',
+ 'records_map',
+ 'records',
+ 'redirect_url',
+ 'referer_url',
+ 'referrer_url',
+ 'register_thread',
+ 'register',
+ 'response_filepath',
+ 'response_localpath',
+ 'response_path',
+ 'response_realm',
+ 'response_root',
+ 'resultset_count',
+ 'resultset',
+ 'resultsets',
+ 'rows_array',
+ 'rows_impl',
+ 'rows',
+ 'rx',
+ 'schema_name',
+ 'security_database',
+ 'security_default_realm',
+ 'security_initialize',
+ 'security_table_groups',
+ 'security_table_ug_map',
+ 'security_table_users',
+ 'selected',
+ 'series',
+ 'server_admin',
+ 'server_ip',
+ 'server_name',
+ 'server_port',
+ 'server_protocol',
+ 'server_push',
+ 'server_signature',
+ 'server_software',
+ 'session_abort',
+ 'session_addvar',
+ 'session_decorate',
+ 'session_deleteexpired',
+ 'session_end',
+ 'session_getdefaultdriver',
+ 'session_id',
+ 'session_initialize',
+ 'session_removevar',
+ 'session_result',
+ 'session_setdefaultdriver',
+ 'session_start',
+ 'shown_count',
+ 'shown_first',
+ 'shown_last',
+ 'site_id',
+ 'site_name',
+ 'skiprecords_value',
+ 'sleep',
+ 'split_thread',
+ 'sqlite_abort',
+ 'sqlite_auth',
+ 'sqlite_blob',
+ 'sqlite_busy',
+ 'sqlite_cantopen',
+ 'sqlite_constraint',
+ 'sqlite_corrupt',
+ 'sqlite_createdb',
+ 'sqlite_done',
+ 'sqlite_empty',
+ 'sqlite_error',
+ 'sqlite_float',
+ 'sqlite_format',
+ 'sqlite_full',
+ 'sqlite_integer',
+ 'sqlite_internal',
+ 'sqlite_interrupt',
+ 'sqlite_ioerr',
+ 'sqlite_locked',
+ 'sqlite_mismatch',
+ 'sqlite_misuse',
+ 'sqlite_nolfs',
+ 'sqlite_nomem',
+ 'sqlite_notadb',
+ 'sqlite_notfound',
+ 'sqlite_null',
+ 'sqlite_ok',
+ 'sqlite_perm',
+ 'sqlite_protocol',
+ 'sqlite_range',
+ 'sqlite_readonly',
+ 'sqlite_row',
+ 'sqlite_schema',
+ 'sqlite_setsleepmillis',
+ 'sqlite_setsleeptries',
+ 'sqlite_text',
+ 'sqlite_toobig',
+ 'sqliteconnector',
+ 'staticarray_join',
+ 'stdout',
+ 'stdoutnl',
+ 'string_validcharset',
+ 'suspend',
+ 'sys_appspath',
+ 'sys_chroot',
+ 'sys_clock',
+ 'sys_clockspersec',
+ 'sys_credits',
+ 'sys_databasespath',
+ 'sys_detach_exec',
+ 'sys_difftime',
+ 'sys_dll_ext',
+ 'sys_drand48',
+ 'sys_environ',
+ 'sys_eol',
+ 'sys_erand48',
+ 'sys_errno',
+ 'sys_exec_pid_to_os_pid',
+ 'sys_exec',
+ 'sys_exit',
+ 'sys_fork',
+ 'sys_garbagecollect',
+ 'sys_getbytessincegc',
+ 'sys_getchar',
+ 'sys_getegid',
+ 'sys_getenv',
+ 'sys_geteuid',
+ 'sys_getgid',
+ 'sys_getgrnam',
+ 'sys_getheapfreebytes',
+ 'sys_getheapsize',
+ 'sys_getlogin',
+ 'sys_getpid',
+ 'sys_getppid',
+ 'sys_getpwnam',
+ 'sys_getpwuid',
+ 'sys_getstartclock',
+ 'sys_getthreadcount',
+ 'sys_getuid',
+ 'sys_growheapby',
+ 'sys_homepath',
+ 'sys_is_full_path',
+ 'sys_is_windows',
+ 'sys_isfullpath',
+ 'sys_iswindows',
+ 'sys_iterate',
+ 'sys_jrand48',
+ 'sys_kill_exec',
+ 'sys_kill',
+ 'sys_lcong48',
+ 'sys_librariespath',
+ 'sys_listtraits',
+ 'sys_listtypes',
+ 'sys_listunboundmethods',
+ 'sys_loadlibrary',
+ 'sys_lrand48',
+ 'sys_masterhomepath',
+ 'sys_mrand48',
+ 'sys_nrand48',
+ 'sys_pid_exec',
+ 'sys_pointersize',
+ 'sys_rand',
+ 'sys_random',
+ 'sys_seed48',
+ 'sys_setenv',
+ 'sys_setgid',
+ 'sys_setsid',
+ 'sys_setuid',
+ 'sys_sigabrt',
+ 'sys_sigalrm',
+ 'sys_sigbus',
+ 'sys_sigchld',
+ 'sys_sigcont',
+ 'sys_sigfpe',
+ 'sys_sighup',
+ 'sys_sigill',
+ 'sys_sigint',
+ 'sys_sigkill',
+ 'sys_sigpipe',
+ 'sys_sigprof',
+ 'sys_sigquit',
+ 'sys_sigsegv',
+ 'sys_sigstop',
+ 'sys_sigsys',
+ 'sys_sigterm',
+ 'sys_sigtrap',
+ 'sys_sigtstp',
+ 'sys_sigttin',
+ 'sys_sigttou',
+ 'sys_sigurg',
+ 'sys_sigusr1',
+ 'sys_sigusr2',
+ 'sys_sigvtalrm',
+ 'sys_sigxcpu',
+ 'sys_sigxfsz',
+ 'sys_srand',
+ 'sys_srand48',
+ 'sys_srandom',
+ 'sys_strerror',
+ 'sys_supportpath',
+ 'sys_test_exec',
+ 'sys_time',
+ 'sys_uname',
+ 'sys_unsetenv',
+ 'sys_usercapimodulepath',
+ 'sys_userstartuppath',
+ 'sys_version',
+ 'sys_wait_exec',
+ 'sys_waitpid',
+ 'sys_wcontinued',
+ 'sys_while',
+ 'sys_wnohang',
+ 'sys_wuntraced',
+ 'table_name',
+ 'tag_exists',
+ 'tag_name',
+ 'thread_var_get',
+ 'thread_var_pop',
+ 'thread_var_push',
+ 'threadvar_find',
+ 'threadvar_get',
+ 'threadvar_set_asrt',
+ 'threadvar_set',
+ 'timer',
+ 'token_value',
+ 'treemap',
+ 'u_lb_alphabetic',
+ 'u_lb_ambiguous',
+ 'u_lb_break_after',
+ 'u_lb_break_before',
+ 'u_lb_break_both',
+ 'u_lb_break_symbols',
+ 'u_lb_carriage_return',
+ 'u_lb_close_punctuation',
+ 'u_lb_combining_mark',
+ 'u_lb_complex_context',
+ 'u_lb_contingent_break',
+ 'u_lb_exclamation',
+ 'u_lb_glue',
+ 'u_lb_h2',
+ 'u_lb_h3',
+ 'u_lb_hyphen',
+ 'u_lb_ideographic',
+ 'u_lb_infix_numeric',
+ 'u_lb_inseparable',
+ 'u_lb_jl',
+ 'u_lb_jt',
+ 'u_lb_jv',
+ 'u_lb_line_feed',
+ 'u_lb_mandatory_break',
+ 'u_lb_next_line',
+ 'u_lb_nonstarter',
+ 'u_lb_numeric',
+ 'u_lb_open_punctuation',
+ 'u_lb_postfix_numeric',
+ 'u_lb_prefix_numeric',
+ 'u_lb_quotation',
+ 'u_lb_space',
+ 'u_lb_surrogate',
+ 'u_lb_unknown',
+ 'u_lb_word_joiner',
+ 'u_lb_zwspace',
+ 'u_nt_decimal',
+ 'u_nt_digit',
+ 'u_nt_none',
+ 'u_nt_numeric',
+ 'u_sb_aterm',
+ 'u_sb_close',
+ 'u_sb_format',
+ 'u_sb_lower',
+ 'u_sb_numeric',
+ 'u_sb_oletter',
+ 'u_sb_other',
+ 'u_sb_sep',
+ 'u_sb_sp',
+ 'u_sb_sterm',
+ 'u_sb_upper',
+ 'u_wb_aletter',
+ 'u_wb_extendnumlet',
+ 'u_wb_format',
+ 'u_wb_katakana',
+ 'u_wb_midletter',
+ 'u_wb_midnum',
+ 'u_wb_numeric',
+ 'u_wb_other',
+ 'ucal_ampm',
+ 'ucal_dayofmonth',
+ 'ucal_dayofweek',
+ 'ucal_dayofweekinmonth',
+ 'ucal_dayofyear',
+ 'ucal_daysinfirstweek',
+ 'ucal_dowlocal',
+ 'ucal_dstoffset',
+ 'ucal_era',
+ 'ucal_extendedyear',
+ 'ucal_firstdayofweek',
+ 'ucal_hour',
+ 'ucal_hourofday',
+ 'ucal_julianday',
+ 'ucal_lenient',
+ 'ucal_listtimezones',
+ 'ucal_millisecond',
+ 'ucal_millisecondsinday',
+ 'ucal_minute',
+ 'ucal_month',
+ 'ucal_second',
+ 'ucal_weekofmonth',
+ 'ucal_weekofyear',
+ 'ucal_year',
+ 'ucal_yearwoy',
+ 'ucal_zoneoffset',
+ 'uchar_age',
+ 'uchar_alphabetic',
+ 'uchar_ascii_hex_digit',
+ 'uchar_bidi_class',
+ 'uchar_bidi_control',
+ 'uchar_bidi_mirrored',
+ 'uchar_bidi_mirroring_glyph',
+ 'uchar_block',
+ 'uchar_canonical_combining_class',
+ 'uchar_case_folding',
+ 'uchar_case_sensitive',
+ 'uchar_dash',
+ 'uchar_decomposition_type',
+ 'uchar_default_ignorable_code_point',
+ 'uchar_deprecated',
+ 'uchar_diacritic',
+ 'uchar_east_asian_width',
+ 'uchar_extender',
+ 'uchar_full_composition_exclusion',
+ 'uchar_general_category_mask',
+ 'uchar_general_category',
+ 'uchar_grapheme_base',
+ 'uchar_grapheme_cluster_break',
+ 'uchar_grapheme_extend',
+ 'uchar_grapheme_link',
+ 'uchar_hangul_syllable_type',
+ 'uchar_hex_digit',
+ 'uchar_hyphen',
+ 'uchar_id_continue',
+ 'uchar_ideographic',
+ 'uchar_ids_binary_operator',
+ 'uchar_ids_trinary_operator',
+ 'uchar_iso_comment',
+ 'uchar_join_control',
+ 'uchar_joining_group',
+ 'uchar_joining_type',
+ 'uchar_lead_canonical_combining_class',
+ 'uchar_line_break',
+ 'uchar_logical_order_exception',
+ 'uchar_lowercase_mapping',
+ 'uchar_lowercase',
+ 'uchar_math',
+ 'uchar_name',
+ 'uchar_nfc_inert',
+ 'uchar_nfc_quick_check',
+ 'uchar_nfd_inert',
+ 'uchar_nfd_quick_check',
+ 'uchar_nfkc_inert',
+ 'uchar_nfkc_quick_check',
+ 'uchar_nfkd_inert',
+ 'uchar_nfkd_quick_check',
+ 'uchar_noncharacter_code_point',
+ 'uchar_numeric_type',
+ 'uchar_numeric_value',
+ 'uchar_pattern_syntax',
+ 'uchar_pattern_white_space',
+ 'uchar_posix_alnum',
+ 'uchar_posix_blank',
+ 'uchar_posix_graph',
+ 'uchar_posix_print',
+ 'uchar_posix_xdigit',
+ 'uchar_quotation_mark',
+ 'uchar_radical',
+ 'uchar_s_term',
+ 'uchar_script',
+ 'uchar_segment_starter',
+ 'uchar_sentence_break',
+ 'uchar_simple_case_folding',
+ 'uchar_simple_lowercase_mapping',
+ 'uchar_simple_titlecase_mapping',
+ 'uchar_simple_uppercase_mapping',
+ 'uchar_soft_dotted',
+ 'uchar_terminal_punctuation',
+ 'uchar_titlecase_mapping',
+ 'uchar_trail_canonical_combining_class',
+ 'uchar_unicode_1_name',
+ 'uchar_unified_ideograph',
+ 'uchar_uppercase_mapping',
+ 'uchar_uppercase',
+ 'uchar_variation_selector',
+ 'uchar_white_space',
+ 'uchar_word_break',
+ 'uchar_xid_continue',
+ 'uncompress',
+ 'usage',
+ 'uuid_compare',
+ 'uuid_copy',
+ 'uuid_generate_random',
+ 'uuid_generate_time',
+ 'uuid_generate',
+ 'uuid_is_null',
+ 'uuid_parse',
+ 'uuid_unparse_lower',
+ 'uuid_unparse_upper',
+ 'uuid_unparse',
+ 'value_list',
+ 'value_listitem',
+ 'valuelistitem',
+ 'var_keys',
+ 'var_values',
+ 'wap_isenabled',
+ 'wap_maxbuttons',
+ 'wap_maxcolumns',
+ 'wap_maxhorzpixels',
+ 'wap_maxrows',
+ 'wap_maxvertpixels',
+ 'web_handlefcgirequest',
+ 'web_node_content_representation_css',
+ 'web_node_content_representation_html',
+ 'web_node_content_representation_js',
+ 'web_node_content_representation_xhr',
+ 'web_node_forpath',
+ 'web_nodes_initialize',
+ 'web_nodes_normalizeextension',
+ 'web_nodes_processcontentnode',
+ 'web_nodes_requesthandler',
+ 'web_response_nodesentry',
+ 'web_router_database',
+ 'web_router_initialize',
+ 'websocket_handler_timeout',
+ 'wexitstatus',
+ 'wifcontinued',
+ 'wifexited',
+ 'wifsignaled',
+ 'wifstopped',
+ 'wstopsig',
+ 'wtermsig',
+ 'xml_transform',
+ 'xml',
+ 'zip_add_dir',
+ 'zip_add',
+ 'zip_checkcons',
+ 'zip_close',
+ 'zip_cm_bzip2',
+ 'zip_cm_default',
+ 'zip_cm_deflate',
+ 'zip_cm_deflate64',
+ 'zip_cm_implode',
+ 'zip_cm_pkware_implode',
+ 'zip_cm_reduce_1',
+ 'zip_cm_reduce_2',
+ 'zip_cm_reduce_3',
+ 'zip_cm_reduce_4',
+ 'zip_cm_shrink',
+ 'zip_cm_store',
+ 'zip_create',
+ 'zip_delete',
+ 'zip_em_3des_112',
+ 'zip_em_3des_168',
+ 'zip_em_aes_128',
+ 'zip_em_aes_192',
+ 'zip_em_aes_256',
+ 'zip_em_des',
+ 'zip_em_none',
+ 'zip_em_rc2_old',
+ 'zip_em_rc2',
+ 'zip_em_rc4',
+ 'zip_em_trad_pkware',
+ 'zip_em_unknown',
+ 'zip_er_changed',
+ 'zip_er_close',
+ 'zip_er_compnotsupp',
+ 'zip_er_crc',
+ 'zip_er_deleted',
+ 'zip_er_eof',
+ 'zip_er_exists',
+ 'zip_er_incons',
+ 'zip_er_internal',
+ 'zip_er_inval',
+ 'zip_er_memory',
+ 'zip_er_multidisk',
+ 'zip_er_noent',
+ 'zip_er_nozip',
+ 'zip_er_ok',
+ 'zip_er_open',
+ 'zip_er_read',
+ 'zip_er_remove',
+ 'zip_er_rename',
+ 'zip_er_seek',
+ 'zip_er_tmpopen',
+ 'zip_er_write',
+ 'zip_er_zipclosed',
+ 'zip_er_zlib',
+ 'zip_error_get_sys_type',
+ 'zip_error_get',
+ 'zip_error_to_str',
+ 'zip_et_none',
+ 'zip_et_sys',
+ 'zip_et_zlib',
+ 'zip_excl',
+ 'zip_fclose',
+ 'zip_file_error_get',
+ 'zip_file_strerror',
+ 'zip_fl_compressed',
+ 'zip_fl_nocase',
+ 'zip_fl_nodir',
+ 'zip_fl_unchanged',
+ 'zip_fopen_index',
+ 'zip_fopen',
+ 'zip_fread',
+ 'zip_get_archive_comment',
+ 'zip_get_file_comment',
+ 'zip_get_name',
+ 'zip_get_num_files',
+ 'zip_name_locate',
+ 'zip_open',
+ 'zip_rename',
+ 'zip_replace',
+ 'zip_set_archive_comment',
+ 'zip_set_file_comment',
+ 'zip_stat_index',
+ 'zip_stat',
+ 'zip_strerror',
+ 'zip_unchange_all',
+ 'zip_unchange_archive',
+ 'zip_unchange',
+ 'zlib_version',
+ ),
+ 'Lasso 8 Tags': (
+ '__char',
+ '__sync_timestamp__',
+ '_admin_addgroup',
+ '_admin_adduser',
+ '_admin_defaultconnector',
+ '_admin_defaultconnectornames',
+ '_admin_defaultdatabase',
+ '_admin_defaultfield',
+ '_admin_defaultgroup',
+ '_admin_defaulthost',
+ '_admin_defaulttable',
+ '_admin_defaultuser',
+ '_admin_deleteconnector',
+ '_admin_deletedatabase',
+ '_admin_deletefield',
+ '_admin_deletegroup',
+ '_admin_deletehost',
+ '_admin_deletetable',
+ '_admin_deleteuser',
+ '_admin_duplicategroup',
+ '_admin_internaldatabase',
+ '_admin_listconnectors',
+ '_admin_listdatabases',
+ '_admin_listfields',
+ '_admin_listgroups',
+ '_admin_listhosts',
+ '_admin_listtables',
+ '_admin_listusers',
+ '_admin_refreshconnector',
+ '_admin_refreshsecurity',
+ '_admin_servicepath',
+ '_admin_updateconnector',
+ '_admin_updatedatabase',
+ '_admin_updatefield',
+ '_admin_updategroup',
+ '_admin_updatehost',
+ '_admin_updatetable',
+ '_admin_updateuser',
+ '_chartfx_activation_string',
+ '_chartfx_getchallengestring',
+ '_chop_args',
+ '_chop_mimes',
+ '_client_addr_old',
+ '_client_address_old',
+ '_client_ip_old',
+ '_database_names',
+ '_datasource_reload',
+ '_date_current',
+ '_date_format',
+ '_date_msec',
+ '_date_parse',
+ '_execution_timelimit',
+ '_file_chmod',
+ '_initialize',
+ '_jdbc_acceptsurl',
+ '_jdbc_debug',
+ '_jdbc_deletehost',
+ '_jdbc_driverclasses',
+ '_jdbc_driverinfo',
+ '_jdbc_metainfo',
+ '_jdbc_propertyinfo',
+ '_jdbc_setdriver',
+ '_lasso_param',
+ '_log_helper',
+ '_proc_noparam',
+ '_proc_withparam',
+ '_recursion_limit',
+ '_request_param',
+ '_security_binaryexpiration',
+ '_security_flushcaches',
+ '_security_isserialized',
+ '_security_serialexpiration',
+ '_srand',
+ '_strict_literals',
+ '_substring',
+ '_xmlrpc_exconverter',
+ '_xmlrpc_inconverter',
+ '_xmlrpc_xmlinconverter',
+ 'abort',
+ 'action_addinfo',
+ 'action_addrecord',
+ 'action_param',
+ 'action_params',
+ 'action_setfoundcount',
+ 'action_setrecordid',
+ 'action_settotalcount',
+ 'action_statement',
+ 'admin_allowedfileroots',
+ 'admin_changeuser',
+ 'admin_createuser',
+ 'admin_currentgroups',
+ 'admin_currentuserid',
+ 'admin_currentusername',
+ 'admin_getpref',
+ 'admin_groupassignuser',
+ 'admin_grouplistusers',
+ 'admin_groupremoveuser',
+ 'admin_lassoservicepath',
+ 'admin_listgroups',
+ 'admin_refreshlicensing',
+ 'admin_refreshsecurity',
+ 'admin_reloaddatasource',
+ 'admin_removepref',
+ 'admin_setpref',
+ 'admin_userexists',
+ 'admin_userlistgroups',
+ 'all',
+ 'and',
+ 'array',
+ 'array_iterator',
+ 'auth',
+ 'auth_admin',
+ 'auth_auth',
+ 'auth_custom',
+ 'auth_group',
+ 'auth_prompt',
+ 'auth_user',
+ 'base64',
+ 'bean',
+ 'bigint',
+ 'bom_utf16be',
+ 'bom_utf16le',
+ 'bom_utf32be',
+ 'bom_utf32le',
+ 'bom_utf8',
+ 'boolean',
+ 'bw',
+ 'bytes',
+ 'cache',
+ 'cache_delete',
+ 'cache_empty',
+ 'cache_exists',
+ 'cache_fetch',
+ 'cache_internal',
+ 'cache_maintenance',
+ 'cache_object',
+ 'cache_preferences',
+ 'cache_store',
+ 'case',
+ 'chartfx',
+ 'chartfx_records',
+ 'chartfx_serve',
+ 'checked',
+ 'choice_list',
+ 'choice_listitem',
+ 'choicelistitem',
+ 'cipher_decrypt',
+ 'cipher_digest',
+ 'cipher_encrypt',
+ 'cipher_hmac',
+ 'cipher_keylength',
+ 'cipher_list',
+ 'click_text',
+ 'client_addr',
+ 'client_address',
+ 'client_authorization',
+ 'client_browser',
+ 'client_contentlength',
+ 'client_contenttype',
+ 'client_cookielist',
+ 'client_cookies',
+ 'client_encoding',
+ 'client_formmethod',
+ 'client_getargs',
+ 'client_getparams',
+ 'client_headers',
+ 'client_ip',
+ 'client_ipfrominteger',
+ 'client_iptointeger',
+ 'client_password',
+ 'client_postargs',
+ 'client_postparams',
+ 'client_type',
+ 'client_url',
+ 'client_username',
+ 'cn',
+ 'column',
+ 'column_name',
+ 'column_names',
+ 'compare_beginswith',
+ 'compare_contains',
+ 'compare_endswith',
+ 'compare_equalto',
+ 'compare_greaterthan',
+ 'compare_greaterthanorequals',
+ 'compare_greaterthanorequls',
+ 'compare_lessthan',
+ 'compare_lessthanorequals',
+ 'compare_notbeginswith',
+ 'compare_notcontains',
+ 'compare_notendswith',
+ 'compare_notequalto',
+ 'compare_notregexp',
+ 'compare_regexp',
+ 'compare_strictequalto',
+ 'compare_strictnotequalto',
+ 'compiler_removecacheddoc',
+ 'compiler_setdefaultparserflags',
+ 'compress',
+ 'content_body',
+ 'content_encoding',
+ 'content_header',
+ 'content_type',
+ 'cookie',
+ 'cookie_set',
+ 'curl_ftp_getfile',
+ 'curl_ftp_getlisting',
+ 'curl_ftp_putfile',
+ 'curl_include_url',
+ 'currency',
+ 'database_changecolumn',
+ 'database_changefield',
+ 'database_createcolumn',
+ 'database_createfield',
+ 'database_createtable',
+ 'database_fmcontainer',
+ 'database_hostinfo',
+ 'database_inline',
+ 'database_name',
+ 'database_nameitem',
+ 'database_names',
+ 'database_realname',
+ 'database_removecolumn',
+ 'database_removefield',
+ 'database_removetable',
+ 'database_repeating',
+ 'database_repeating_valueitem',
+ 'database_repeatingvalueitem',
+ 'database_schemanameitem',
+ 'database_schemanames',
+ 'database_tablecolumn',
+ 'database_tablenameitem',
+ 'database_tablenames',
+ 'datasource_name',
+ 'datasource_register',
+ 'date',
+ 'date__date_current',
+ 'date__date_format',
+ 'date__date_msec',
+ 'date__date_parse',
+ 'date_add',
+ 'date_date',
+ 'date_difference',
+ 'date_duration',
+ 'date_format',
+ 'date_getcurrentdate',
+ 'date_getday',
+ 'date_getdayofweek',
+ 'date_gethour',
+ 'date_getlocaltimezone',
+ 'date_getminute',
+ 'date_getmonth',
+ 'date_getsecond',
+ 'date_gettime',
+ 'date_getyear',
+ 'date_gmttolocal',
+ 'date_localtogmt',
+ 'date_maximum',
+ 'date_minimum',
+ 'date_msec',
+ 'date_setformat',
+ 'date_subtract',
+ 'db_layoutnameitem',
+ 'db_layoutnames',
+ 'db_nameitem',
+ 'db_names',
+ 'db_tablenameitem',
+ 'db_tablenames',
+ 'dbi_column_names',
+ 'dbi_field_names',
+ 'decimal',
+ 'decimal_setglobaldefaultprecision',
+ 'decode_base64',
+ 'decode_bheader',
+ 'decode_hex',
+ 'decode_html',
+ 'decode_json',
+ 'decode_qheader',
+ 'decode_quotedprintable',
+ 'decode_quotedprintablebytes',
+ 'decode_url',
+ 'decode_xml',
+ 'decompress',
+ 'decrypt_blowfish',
+ 'decrypt_blowfish2',
+ 'default',
+ 'define_atbegin',
+ 'define_atend',
+ 'define_constant',
+ 'define_prototype',
+ 'define_tag',
+ 'define_tagp',
+ 'define_type',
+ 'define_typep',
+ 'deserialize',
+ 'directory_directorynameitem',
+ 'directory_lister',
+ 'directory_nameitem',
+ 'directorynameitem',
+ 'dns_default',
+ 'dns_lookup',
+ 'dns_response',
+ 'duration',
+ 'else',
+ 'email_batch',
+ 'email_compose',
+ 'email_digestchallenge',
+ 'email_digestresponse',
+ 'email_extract',
+ 'email_findemails',
+ 'email_immediate',
+ 'email_merge',
+ 'email_mxerror',
+ 'email_mxlookup',
+ 'email_parse',
+ 'email_pop',
+ 'email_queue',
+ 'email_result',
+ 'email_safeemail',
+ 'email_send',
+ 'email_smtp',
+ 'email_status',
+ 'email_token',
+ 'email_translatebreakstocrlf',
+ 'encode_base64',
+ 'encode_bheader',
+ 'encode_break',
+ 'encode_breaks',
+ 'encode_crc32',
+ 'encode_hex',
+ 'encode_html',
+ 'encode_htmltoxml',
+ 'encode_json',
+ 'encode_qheader',
+ 'encode_quotedprintable',
+ 'encode_quotedprintablebytes',
+ 'encode_set',
+ 'encode_smart',
+ 'encode_sql',
+ 'encode_sql92',
+ 'encode_stricturl',
+ 'encode_url',
+ 'encode_xml',
+ 'encrypt_blowfish',
+ 'encrypt_blowfish2',
+ 'encrypt_crammd5',
+ 'encrypt_hmac',
+ 'encrypt_md5',
+ 'eq',
+ 'error_adderror',
+ 'error_code',
+ 'error_code_aborted',
+ 'error_code_assert',
+ 'error_code_bof',
+ 'error_code_connectioninvalid',
+ 'error_code_couldnotclosefile',
+ 'error_code_couldnotcreateoropenfile',
+ 'error_code_couldnotdeletefile',
+ 'error_code_couldnotdisposememory',
+ 'error_code_couldnotlockmemory',
+ 'error_code_couldnotreadfromfile',
+ 'error_code_couldnotunlockmemory',
+ 'error_code_couldnotwritetofile',
+ 'error_code_criterianotmet',
+ 'error_code_datasourceerror',
+ 'error_code_directoryfull',
+ 'error_code_diskfull',
+ 'error_code_dividebyzero',
+ 'error_code_eof',
+ 'error_code_failure',
+ 'error_code_fieldrestriction',
+ 'error_code_file',
+ 'error_code_filealreadyexists',
+ 'error_code_filecorrupt',
+ 'error_code_fileinvalid',
+ 'error_code_fileinvalidaccessmode',
+ 'error_code_fileisclosed',
+ 'error_code_fileisopen',
+ 'error_code_filelocked',
+ 'error_code_filenotfound',
+ 'error_code_fileunlocked',
+ 'error_code_httpfilenotfound',
+ 'error_code_illegalinstruction',
+ 'error_code_illegaluseoffrozeninstance',
+ 'error_code_invaliddatabase',
+ 'error_code_invalidfilename',
+ 'error_code_invalidmemoryobject',
+ 'error_code_invalidparameter',
+ 'error_code_invalidpassword',
+ 'error_code_invalidpathname',
+ 'error_code_invalidusername',
+ 'error_code_ioerror',
+ 'error_code_loopaborted',
+ 'error_code_memory',
+ 'error_code_network',
+ 'error_code_nilpointer',
+ 'error_code_noerr',
+ 'error_code_nopermission',
+ 'error_code_outofmemory',
+ 'error_code_outofstackspace',
+ 'error_code_overflow',
+ 'error_code_postconditionfailed',
+ 'error_code_preconditionfailed',
+ 'error_code_resnotfound',
+ 'error_code_resource',
+ 'error_code_streamreaderror',
+ 'error_code_streamwriteerror',
+ 'error_code_syntaxerror',
+ 'error_code_tagnotfound',
+ 'error_code_unknownerror',
+ 'error_code_varnotfound',
+ 'error_code_volumedoesnotexist',
+ 'error_code_webactionnotsupported',
+ 'error_code_webadderror',
+ 'error_code_webdeleteerror',
+ 'error_code_webmodulenotfound',
+ 'error_code_webnosuchobject',
+ 'error_code_webrepeatingrelatedfield',
+ 'error_code_webrequiredfieldmissing',
+ 'error_code_webtimeout',
+ 'error_code_webupdateerror',
+ 'error_columnrestriction',
+ 'error_currenterror',
+ 'error_databaseconnectionunavailable',
+ 'error_databasetimeout',
+ 'error_deleteerror',
+ 'error_fieldrestriction',
+ 'error_filenotfound',
+ 'error_invaliddatabase',
+ 'error_invalidpassword',
+ 'error_invalidusername',
+ 'error_modulenotfound',
+ 'error_msg',
+ 'error_msg_aborted',
+ 'error_msg_assert',
+ 'error_msg_bof',
+ 'error_msg_connectioninvalid',
+ 'error_msg_couldnotclosefile',
+ 'error_msg_couldnotcreateoropenfile',
+ 'error_msg_couldnotdeletefile',
+ 'error_msg_couldnotdisposememory',
+ 'error_msg_couldnotlockmemory',
+ 'error_msg_couldnotreadfromfile',
+ 'error_msg_couldnotunlockmemory',
+ 'error_msg_couldnotwritetofile',
+ 'error_msg_criterianotmet',
+ 'error_msg_datasourceerror',
+ 'error_msg_directoryfull',
+ 'error_msg_diskfull',
+ 'error_msg_dividebyzero',
+ 'error_msg_eof',
+ 'error_msg_failure',
+ 'error_msg_fieldrestriction',
+ 'error_msg_file',
+ 'error_msg_filealreadyexists',
+ 'error_msg_filecorrupt',
+ 'error_msg_fileinvalid',
+ 'error_msg_fileinvalidaccessmode',
+ 'error_msg_fileisclosed',
+ 'error_msg_fileisopen',
+ 'error_msg_filelocked',
+ 'error_msg_filenotfound',
+ 'error_msg_fileunlocked',
+ 'error_msg_httpfilenotfound',
+ 'error_msg_illegalinstruction',
+ 'error_msg_illegaluseoffrozeninstance',
+ 'error_msg_invaliddatabase',
+ 'error_msg_invalidfilename',
+ 'error_msg_invalidmemoryobject',
+ 'error_msg_invalidparameter',
+ 'error_msg_invalidpassword',
+ 'error_msg_invalidpathname',
+ 'error_msg_invalidusername',
+ 'error_msg_ioerror',
+ 'error_msg_loopaborted',
+ 'error_msg_memory',
+ 'error_msg_network',
+ 'error_msg_nilpointer',
+ 'error_msg_noerr',
+ 'error_msg_nopermission',
+ 'error_msg_outofmemory',
+ 'error_msg_outofstackspace',
+ 'error_msg_overflow',
+ 'error_msg_postconditionfailed',
+ 'error_msg_preconditionfailed',
+ 'error_msg_resnotfound',
+ 'error_msg_resource',
+ 'error_msg_streamreaderror',
+ 'error_msg_streamwriteerror',
+ 'error_msg_syntaxerror',
+ 'error_msg_tagnotfound',
+ 'error_msg_unknownerror',
+ 'error_msg_varnotfound',
+ 'error_msg_volumedoesnotexist',
+ 'error_msg_webactionnotsupported',
+ 'error_msg_webadderror',
+ 'error_msg_webdeleteerror',
+ 'error_msg_webmodulenotfound',
+ 'error_msg_webnosuchobject',
+ 'error_msg_webrepeatingrelatedfield',
+ 'error_msg_webrequiredfieldmissing',
+ 'error_msg_webtimeout',
+ 'error_msg_webupdateerror',
+ 'error_noerror',
+ 'error_nopermission',
+ 'error_norecordsfound',
+ 'error_outofmemory',
+ 'error_pop',
+ 'error_push',
+ 'error_reqcolumnmissing',
+ 'error_reqfieldmissing',
+ 'error_requiredcolumnmissing',
+ 'error_requiredfieldmissing',
+ 'error_reset',
+ 'error_seterrorcode',
+ 'error_seterrormessage',
+ 'error_updateerror',
+ 'euro',
+ 'event_schedule',
+ 'ew',
+ 'fail',
+ 'fail_if',
+ 'false',
+ 'field',
+ 'field_name',
+ 'field_names',
+ 'file',
+ 'file_autoresolvefullpaths',
+ 'file_chmod',
+ 'file_control',
+ 'file_copy',
+ 'file_create',
+ 'file_creationdate',
+ 'file_currenterror',
+ 'file_delete',
+ 'file_exists',
+ 'file_getlinecount',
+ 'file_getsize',
+ 'file_isdirectory',
+ 'file_listdirectory',
+ 'file_moddate',
+ 'file_modechar',
+ 'file_modeline',
+ 'file_move',
+ 'file_openread',
+ 'file_openreadwrite',
+ 'file_openwrite',
+ 'file_openwriteappend',
+ 'file_openwritetruncate',
+ 'file_probeeol',
+ 'file_processuploads',
+ 'file_read',
+ 'file_readline',
+ 'file_rename',
+ 'file_serve',
+ 'file_setsize',
+ 'file_stream',
+ 'file_streamcopy',
+ 'file_uploads',
+ 'file_waitread',
+ 'file_waittimeout',
+ 'file_waitwrite',
+ 'file_write',
+ 'find_soap_ops',
+ 'form_param',
+ 'found_count',
+ 'ft',
+ 'ftp_getfile',
+ 'ftp_getlisting',
+ 'ftp_putfile',
+ 'full',
+ 'global',
+ 'global_defined',
+ 'global_remove',
+ 'global_reset',
+ 'globals',
+ 'gt',
+ 'gte',
+ 'handle',
+ 'handle_error',
+ 'header',
+ 'html_comment',
+ 'http_getfile',
+ 'ical_alarm',
+ 'ical_attribute',
+ 'ical_calendar',
+ 'ical_daylight',
+ 'ical_event',
+ 'ical_freebusy',
+ 'ical_item',
+ 'ical_journal',
+ 'ical_parse',
+ 'ical_standard',
+ 'ical_timezone',
+ 'ical_todo',
+ 'if',
+ 'if_empty',
+ 'if_false',
+ 'if_null',
+ 'if_true',
+ 'image',
+ 'image_url',
+ 'img',
+ 'include',
+ 'include_cgi',
+ 'include_currentpath',
+ 'include_once',
+ 'include_raw',
+ 'include_url',
+ 'inline',
+ 'integer',
+ 'iterate',
+ 'iterator',
+ 'java',
+ 'java_bean',
+ 'json_records',
+ 'json_rpccall',
+ 'keycolumn_name',
+ 'keycolumn_value',
+ 'keyfield_name',
+ 'keyfield_value',
+ 'lasso_comment',
+ 'lasso_currentaction',
+ 'lasso_datasourceis',
+ 'lasso_datasourceis4d',
+ 'lasso_datasourceisfilemaker',
+ 'lasso_datasourceisfilemaker7',
+ 'lasso_datasourceisfilemaker9',
+ 'lasso_datasourceisfilemakersa',
+ 'lasso_datasourceisjdbc',
+ 'lasso_datasourceislassomysql',
+ 'lasso_datasourceismysql',
+ 'lasso_datasourceisodbc',
+ 'lasso_datasourceisopenbase',
+ 'lasso_datasourceisoracle',
+ 'lasso_datasourceispostgresql',
+ 'lasso_datasourceisspotlight',
+ 'lasso_datasourceissqlite',
+ 'lasso_datasourceissqlserver',
+ 'lasso_datasourcemodulename',
+ 'lasso_datatype',
+ 'lasso_disableondemand',
+ 'lasso_errorreporting',
+ 'lasso_executiontimelimit',
+ 'lasso_parser',
+ 'lasso_process',
+ 'lasso_sessionid',
+ 'lasso_siteid',
+ 'lasso_siteisrunning',
+ 'lasso_sitename',
+ 'lasso_siterestart',
+ 'lasso_sitestart',
+ 'lasso_sitestop',
+ 'lasso_tagexists',
+ 'lasso_tagmodulename',
+ 'lasso_uniqueid',
+ 'lasso_updatecheck',
+ 'lasso_uptime',
+ 'lasso_version',
+ 'lassoapp_create',
+ 'lassoapp_dump',
+ 'lassoapp_flattendir',
+ 'lassoapp_getappdata',
+ 'lassoapp_link',
+ 'lassoapp_list',
+ 'lassoapp_process',
+ 'lassoapp_unitize',
+ 'layout_name',
+ 'ldap',
+ 'ldap_scope_base',
+ 'ldap_scope_onelevel',
+ 'ldap_scope_subtree',
+ 'ldml',
+ 'ldml_ldml',
+ 'library',
+ 'library_once',
+ 'link',
+ 'link_currentaction',
+ 'link_currentactionparams',
+ 'link_currentactionurl',
+ 'link_currentgroup',
+ 'link_currentgroupparams',
+ 'link_currentgroupurl',
+ 'link_currentrecord',
+ 'link_currentrecordparams',
+ 'link_currentrecordurl',
+ 'link_currentsearch',
+ 'link_currentsearchparams',
+ 'link_currentsearchurl',
+ 'link_detail',
+ 'link_detailparams',
+ 'link_detailurl',
+ 'link_firstgroup',
+ 'link_firstgroupparams',
+ 'link_firstgroupurl',
+ 'link_firstrecord',
+ 'link_firstrecordparams',
+ 'link_firstrecordurl',
+ 'link_lastgroup',
+ 'link_lastgroupparams',
+ 'link_lastgroupurl',
+ 'link_lastrecord',
+ 'link_lastrecordparams',
+ 'link_lastrecordurl',
+ 'link_nextgroup',
+ 'link_nextgroupparams',
+ 'link_nextgroupurl',
+ 'link_nextrecord',
+ 'link_nextrecordparams',
+ 'link_nextrecordurl',
+ 'link_params',
+ 'link_prevgroup',
+ 'link_prevgroupparams',
+ 'link_prevgroupurl',
+ 'link_prevrecord',
+ 'link_prevrecordparams',
+ 'link_prevrecordurl',
+ 'link_setformat',
+ 'link_url',
+ 'list',
+ 'list_additem',
+ 'list_fromlist',
+ 'list_fromstring',
+ 'list_getitem',
+ 'list_itemcount',
+ 'list_iterator',
+ 'list_removeitem',
+ 'list_replaceitem',
+ 'list_reverseiterator',
+ 'list_tostring',
+ 'literal',
+ 'ljax_end',
+ 'ljax_hastarget',
+ 'ljax_include',
+ 'ljax_start',
+ 'ljax_target',
+ 'local',
+ 'local_defined',
+ 'local_remove',
+ 'local_reset',
+ 'locale_format',
+ 'locals',
+ 'log',
+ 'log_always',
+ 'log_critical',
+ 'log_deprecated',
+ 'log_destination_console',
+ 'log_destination_database',
+ 'log_destination_file',
+ 'log_detail',
+ 'log_level_critical',
+ 'log_level_deprecated',
+ 'log_level_detail',
+ 'log_level_sql',
+ 'log_level_warning',
+ 'log_setdestination',
+ 'log_sql',
+ 'log_warning',
+ 'logicalop_value',
+ 'logicaloperator_value',
+ 'loop',
+ 'loop_abort',
+ 'loop_continue',
+ 'loop_count',
+ 'lt',
+ 'lte',
+ 'magick_image',
+ 'map',
+ 'map_iterator',
+ 'match_comparator',
+ 'match_notrange',
+ 'match_notregexp',
+ 'match_range',
+ 'match_regexp',
+ 'math_abs',
+ 'math_acos',
+ 'math_add',
+ 'math_asin',
+ 'math_atan',
+ 'math_atan2',
+ 'math_ceil',
+ 'math_converteuro',
+ 'math_cos',
+ 'math_div',
+ 'math_exp',
+ 'math_floor',
+ 'math_internal_rand',
+ 'math_internal_randmax',
+ 'math_internal_srand',
+ 'math_ln',
+ 'math_log',
+ 'math_log10',
+ 'math_max',
+ 'math_min',
+ 'math_mod',
+ 'math_mult',
+ 'math_pow',
+ 'math_random',
+ 'math_range',
+ 'math_rint',
+ 'math_roman',
+ 'math_round',
+ 'math_sin',
+ 'math_sqrt',
+ 'math_sub',
+ 'math_tan',
+ 'maxrecords_value',
+ 'memory_session_driver',
+ 'mime_type',
+ 'minimal',
+ 'misc__srand',
+ 'misc_randomnumber',
+ 'misc_roman',
+ 'misc_valid_creditcard',
+ 'mysql_session_driver',
+ 'named_param',
+ 'namespace_current',
+ 'namespace_delimiter',
+ 'namespace_exists',
+ 'namespace_file_fullpathexists',
+ 'namespace_global',
+ 'namespace_import',
+ 'namespace_load',
+ 'namespace_page',
+ 'namespace_unload',
+ 'namespace_using',
+ 'neq',
+ 'net',
+ 'net_connectinprogress',
+ 'net_connectok',
+ 'net_typessl',
+ 'net_typessltcp',
+ 'net_typessludp',
+ 'net_typetcp',
+ 'net_typeudp',
+ 'net_waitread',
+ 'net_waittimeout',
+ 'net_waitwrite',
+ 'no_default_output',
+ 'none',
+ 'noprocess',
+ 'not',
+ 'nrx',
+ 'nslookup',
+ 'null',
+ 'object',
+ 'once',
+ 'oneoff',
+ 'op_logicalvalue',
+ 'operator_logicalvalue',
+ 'option',
+ 'or',
+ 'os_process',
+ 'output',
+ 'output_none',
+ 'pair',
+ 'params_up',
+ 'pdf_barcode',
+ 'pdf_color',
+ 'pdf_doc',
+ 'pdf_font',
+ 'pdf_image',
+ 'pdf_list',
+ 'pdf_read',
+ 'pdf_serve',
+ 'pdf_table',
+ 'pdf_text',
+ 'percent',
+ 'portal',
+ 'postcondition',
+ 'precondition',
+ 'prettyprintingnsmap',
+ 'prettyprintingtypemap',
+ 'priorityqueue',
+ 'private',
+ 'proc_convert',
+ 'proc_convertbody',
+ 'proc_convertone',
+ 'proc_extract',
+ 'proc_extractone',
+ 'proc_find',
+ 'proc_first',
+ 'proc_foreach',
+ 'proc_get',
+ 'proc_join',
+ 'proc_lasso',
+ 'proc_last',
+ 'proc_map_entry',
+ 'proc_null',
+ 'proc_regexp',
+ 'proc_xml',
+ 'proc_xslt',
+ 'process',
+ 'protect',
+ 'queue',
+ 'rand',
+ 'randomnumber',
+ 'raw',
+ 'recid_value',
+ 'record_count',
+ 'recordcount',
+ 'recordid_value',
+ 'records',
+ 'records_array',
+ 'records_map',
+ 'redirect_url',
+ 'reference',
+ 'referer',
+ 'referer_url',
+ 'referrer',
+ 'referrer_url',
+ 'regexp',
+ 'repeating',
+ 'repeating_valueitem',
+ 'repeatingvalueitem',
+ 'repetition',
+ 'req_column',
+ 'req_field',
+ 'required_column',
+ 'required_field',
+ 'response_fileexists',
+ 'response_filepath',
+ 'response_localpath',
+ 'response_path',
+ 'response_realm',
+ 'resultset',
+ 'resultset_count',
+ 'return',
+ 'return_value',
+ 'reverseiterator',
+ 'roman',
+ 'row_count',
+ 'rows',
+ 'rows_array',
+ 'run_children',
+ 'rx',
+ 'schema_name',
+ 'scientific',
+ 'search_args',
+ 'search_arguments',
+ 'search_columnitem',
+ 'search_fielditem',
+ 'search_operatoritem',
+ 'search_opitem',
+ 'search_valueitem',
+ 'searchfielditem',
+ 'searchoperatoritem',
+ 'searchopitem',
+ 'searchvalueitem',
+ 'select',
+ 'selected',
+ 'self',
+ 'serialize',
+ 'series',
+ 'server_date',
+ 'server_day',
+ 'server_ip',
+ 'server_name',
+ 'server_port',
+ 'server_push',
+ 'server_siteisrunning',
+ 'server_sitestart',
+ 'server_sitestop',
+ 'server_time',
+ 'session_abort',
+ 'session_addoutputfilter',
+ 'session_addvar',
+ 'session_addvariable',
+ 'session_deleteexpired',
+ 'session_driver',
+ 'session_end',
+ 'session_id',
+ 'session_removevar',
+ 'session_removevariable',
+ 'session_result',
+ 'session_setdriver',
+ 'session_start',
+ 'set',
+ 'set_iterator',
+ 'set_reverseiterator',
+ 'shown_count',
+ 'shown_first',
+ 'shown_last',
+ 'site_atbegin',
+ 'site_id',
+ 'site_name',
+ 'site_restart',
+ 'skiprecords_value',
+ 'sleep',
+ 'soap_convertpartstopairs',
+ 'soap_definetag',
+ 'soap_info',
+ 'soap_lastrequest',
+ 'soap_lastresponse',
+ 'soap_stub',
+ 'sort_args',
+ 'sort_arguments',
+ 'sort_columnitem',
+ 'sort_fielditem',
+ 'sort_orderitem',
+ 'sortcolumnitem',
+ 'sortfielditem',
+ 'sortorderitem',
+ 'sqlite_createdb',
+ 'sqlite_session_driver',
+ 'sqlite_setsleepmillis',
+ 'sqlite_setsleeptries',
+ 'srand',
+ 'stack',
+ 'stock_quote',
+ 'string',
+ 'string_charfromname',
+ 'string_concatenate',
+ 'string_countfields',
+ 'string_endswith',
+ 'string_extract',
+ 'string_findposition',
+ 'string_findregexp',
+ 'string_fordigit',
+ 'string_getfield',
+ 'string_getunicodeversion',
+ 'string_insert',
+ 'string_isalpha',
+ 'string_isalphanumeric',
+ 'string_isdigit',
+ 'string_ishexdigit',
+ 'string_islower',
+ 'string_isnumeric',
+ 'string_ispunctuation',
+ 'string_isspace',
+ 'string_isupper',
+ 'string_length',
+ 'string_lowercase',
+ 'string_remove',
+ 'string_removeleading',
+ 'string_removetrailing',
+ 'string_replace',
+ 'string_replaceregexp',
+ 'string_todecimal',
+ 'string_tointeger',
+ 'string_uppercase',
+ 'string_validcharset',
+ 'table_name',
+ 'table_realname',
+ 'tag',
+ 'tag_name',
+ 'tags',
+ 'tags_find',
+ 'tags_list',
+ 'tcp_close',
+ 'tcp_open',
+ 'tcp_send',
+ 'tcp_tcp_close',
+ 'tcp_tcp_open',
+ 'tcp_tcp_send',
+ 'thread_abort',
+ 'thread_atomic',
+ 'thread_event',
+ 'thread_exists',
+ 'thread_getcurrentid',
+ 'thread_getpriority',
+ 'thread_info',
+ 'thread_list',
+ 'thread_lock',
+ 'thread_pipe',
+ 'thread_priority_default',
+ 'thread_priority_high',
+ 'thread_priority_low',
+ 'thread_rwlock',
+ 'thread_semaphore',
+ 'thread_setpriority',
+ 'token_value',
+ 'total_records',
+ 'treemap',
+ 'treemap_iterator',
+ 'true',
+ 'url_rewrite',
+ 'valid_creditcard',
+ 'valid_date',
+ 'valid_email',
+ 'valid_url',
+ 'value_list',
+ 'value_listitem',
+ 'valuelistitem',
+ 'var',
+ 'var_defined',
+ 'var_remove',
+ 'var_reset',
+ 'var_set',
+ 'variable',
+ 'variable_defined',
+ 'variable_set',
+ 'variables',
+ 'variant_count',
+ 'vars',
+ 'wap_isenabled',
+ 'wap_maxbuttons',
+ 'wap_maxcolumns',
+ 'wap_maxhorzpixels',
+ 'wap_maxrows',
+ 'wap_maxvertpixels',
+ 'while',
+ 'wsdl_extract',
+ 'wsdl_getbinding',
+ 'wsdl_getbindingforoperation',
+ 'wsdl_getbindingoperations',
+ 'wsdl_getmessagenamed',
+ 'wsdl_getmessageparts',
+ 'wsdl_getmessagetriofromporttype',
+ 'wsdl_getopbodystyle',
+ 'wsdl_getopbodyuse',
+ 'wsdl_getoperation',
+ 'wsdl_getoplocation',
+ 'wsdl_getopmessagetypes',
+ 'wsdl_getopsoapaction',
+ 'wsdl_getportaddress',
+ 'wsdl_getportsforservice',
+ 'wsdl_getporttype',
+ 'wsdl_getporttypeoperation',
+ 'wsdl_getservicedocumentation',
+ 'wsdl_getservices',
+ 'wsdl_gettargetnamespace',
+ 'wsdl_issoapoperation',
+ 'wsdl_listoperations',
+ 'wsdl_maketest',
+ 'xml',
+ 'xml_extract',
+ 'xml_rpc',
+ 'xml_rpccall',
+ 'xml_rw',
+ 'xml_serve',
+ 'xml_transform',
+ 'xml_xml',
+ 'xml_xmlstream',
+ 'xmlstream',
+ 'xsd_attribute',
+ 'xsd_blankarraybase',
+ 'xsd_blankbase',
+ 'xsd_buildtype',
+ 'xsd_cache',
+ 'xsd_checkcardinality',
+ 'xsd_continueall',
+ 'xsd_continueannotation',
+ 'xsd_continueany',
+ 'xsd_continueanyattribute',
+ 'xsd_continueattribute',
+ 'xsd_continueattributegroup',
+ 'xsd_continuechoice',
+ 'xsd_continuecomplexcontent',
+ 'xsd_continuecomplextype',
+ 'xsd_continuedocumentation',
+ 'xsd_continueextension',
+ 'xsd_continuegroup',
+ 'xsd_continuekey',
+ 'xsd_continuelist',
+ 'xsd_continuerestriction',
+ 'xsd_continuesequence',
+ 'xsd_continuesimplecontent',
+ 'xsd_continuesimpletype',
+ 'xsd_continueunion',
+ 'xsd_deserialize',
+ 'xsd_fullyqualifyname',
+ 'xsd_generate',
+ 'xsd_generateblankfromtype',
+ 'xsd_generateblanksimpletype',
+ 'xsd_generatetype',
+ 'xsd_getschematype',
+ 'xsd_issimpletype',
+ 'xsd_loadschema',
+ 'xsd_lookupnamespaceuri',
+ 'xsd_lookuptype',
+ 'xsd_processany',
+ 'xsd_processattribute',
+ 'xsd_processattributegroup',
+ 'xsd_processcomplextype',
+ 'xsd_processelement',
+ 'xsd_processgroup',
+ 'xsd_processimport',
+ 'xsd_processinclude',
+ 'xsd_processschema',
+ 'xsd_processsimpletype',
+ 'xsd_ref',
+ 'xsd_type',
+ )
+}
+MEMBERS = {
+ 'Member Methods': (
+ 'abort',
+ 'abs',
+ 'accept_charset',
+ 'accept',
+ 'acceptconnections',
+ 'acceptdeserializedelement',
+ 'acceptnossl',
+ 'acceptpost',
+ 'accesskey',
+ 'acos',
+ 'acosh',
+ 'action',
+ 'actionparams',
+ 'active_tick',
+ 'add',
+ 'addatend',
+ 'addattachment',
+ 'addbarcode',
+ 'addchapter',
+ 'addcheckbox',
+ 'addcolumninfo',
+ 'addcombobox',
+ 'addcomment',
+ 'addcomponent',
+ 'addcomponents',
+ 'addcss',
+ 'adddatabasetable',
+ 'adddatasource',
+ 'adddatasourcedatabase',
+ 'adddatasourcehost',
+ 'adddir',
+ 'adddirpath',
+ 'addendjs',
+ 'addendjstext',
+ 'adderror',
+ 'addfavicon',
+ 'addfile',
+ 'addgroup',
+ 'addheader',
+ 'addhiddenfield',
+ 'addhtmlpart',
+ 'addimage',
+ 'addjavascript',
+ 'addjs',
+ 'addjstext',
+ 'addlist',
+ 'addmathfunctions',
+ 'addmember',
+ 'addoneheaderline',
+ 'addpage',
+ 'addparagraph',
+ 'addpart',
+ 'addpasswordfield',
+ 'addphrase',
+ 'addpostdispatch',
+ 'addpredispatch',
+ 'addradiobutton',
+ 'addradiogroup',
+ 'addresetbutton',
+ 'addrow',
+ 'addsection',
+ 'addselectlist',
+ 'addset',
+ 'addsubmitbutton',
+ 'addsubnode',
+ 'addtable',
+ 'addtask',
+ 'addtext',
+ 'addtextarea',
+ 'addtextfield',
+ 'addtextpart',
+ 'addtobuffer',
+ 'addtrait',
+ 'adduser',
+ 'addusertogroup',
+ 'addwarning',
+ 'addzip',
+ 'allocobject',
+ 'am',
+ 'ampm',
+ 'annotate',
+ 'answer',
+ 'apop',
+ 'append',
+ 'appendarray',
+ 'appendarraybegin',
+ 'appendarrayend',
+ 'appendbool',
+ 'appendbytes',
+ 'appendchar',
+ 'appendchild',
+ 'appendcolon',
+ 'appendcomma',
+ 'appenddata',
+ 'appenddatetime',
+ 'appenddbpointer',
+ 'appenddecimal',
+ 'appenddocument',
+ 'appendimagetolist',
+ 'appendinteger',
+ 'appendnowutc',
+ 'appendnull',
+ 'appendoid',
+ 'appendregex',
+ 'appendreplacement',
+ 'appendstring',
+ 'appendtail',
+ 'appendtime',
+ 'applyheatcolors',
+ 'appmessage',
+ 'appname',
+ 'appprefix',
+ 'appstatus',
+ 'arc',
+ 'archive',
+ 'arguments',
+ 'argumentvalue',
+ 'asarray',
+ 'asarraystring',
+ 'asasync',
+ 'asbytes',
+ 'ascopy',
+ 'ascopydeep',
+ 'asdecimal',
+ 'asgenerator',
+ 'asin',
+ 'asinh',
+ 'asinteger',
+ 'askeyedgenerator',
+ 'aslazystring',
+ 'aslist',
+ 'asraw',
+ 'asstaticarray',
+ 'asstring',
+ 'asstringhex',
+ 'asstringoct',
+ 'asxml',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'atend',
+ 'atends',
+ 'atime',
+ 'attributecount',
+ 'attributes',
+ 'attrs',
+ 'auth',
+ 'authenticate',
+ 'authorize',
+ 'autocollectbuffer',
+ 'average',
+ 'back',
+ 'basename',
+ 'basepaths',
+ 'baseuri',
+ 'bcc',
+ 'beginssl',
+ 'beginswith',
+ 'begintls',
+ 'bestcharset',
+ 'bind_blob',
+ 'bind_double',
+ 'bind_int',
+ 'bind_null',
+ 'bind_parameter_index',
+ 'bind_text',
+ 'bind',
+ 'bindcount',
+ 'bindone',
+ 'bindparam',
+ 'bitand',
+ 'bitclear',
+ 'bitflip',
+ 'bitformat',
+ 'bitnot',
+ 'bitor',
+ 'bitset',
+ 'bitshiftleft',
+ 'bitshiftright',
+ 'bittest',
+ 'bitxor',
+ 'blur',
+ 'body',
+ 'bodybytes',
+ 'boundary',
+ 'bptoxml',
+ 'bptypetostr',
+ 'bucketnumber',
+ 'buff',
+ 'buildquery',
+ 'businessdaysbetween',
+ 'by',
+ 'bytes',
+ 'cachedappprefix',
+ 'cachedroot',
+ 'callboolean',
+ 'callbooleanmethod',
+ 'callbytemethod',
+ 'callcharmethod',
+ 'calldoublemethod',
+ 'calledname',
+ 'callfirst',
+ 'callfloat',
+ 'callfloatmethod',
+ 'callint',
+ 'callintmethod',
+ 'calllongmethod',
+ 'callnonvirtualbooleanmethod',
+ 'callnonvirtualbytemethod',
+ 'callnonvirtualcharmethod',
+ 'callnonvirtualdoublemethod',
+ 'callnonvirtualfloatmethod',
+ 'callnonvirtualintmethod',
+ 'callnonvirtuallongmethod',
+ 'callnonvirtualobjectmethod',
+ 'callnonvirtualshortmethod',
+ 'callnonvirtualvoidmethod',
+ 'callobject',
+ 'callobjectmethod',
+ 'callshortmethod',
+ 'callsite_col',
+ 'callsite_file',
+ 'callsite_line',
+ 'callstack',
+ 'callstaticboolean',
+ 'callstaticbooleanmethod',
+ 'callstaticbytemethod',
+ 'callstaticcharmethod',
+ 'callstaticdoublemethod',
+ 'callstaticfloatmethod',
+ 'callstaticint',
+ 'callstaticintmethod',
+ 'callstaticlongmethod',
+ 'callstaticobject',
+ 'callstaticobjectmethod',
+ 'callstaticshortmethod',
+ 'callstaticstring',
+ 'callstaticvoidmethod',
+ 'callstring',
+ 'callvoid',
+ 'callvoidmethod',
+ 'cancel',
+ 'cap',
+ 'capa',
+ 'capabilities',
+ 'capi',
+ 'cbrt',
+ 'cc',
+ 'ceil',
+ 'chardigitvalue',
+ 'charname',
+ 'charset',
+ 'chartype',
+ 'checkdebugging',
+ 'checked',
+ 'checkuser',
+ 'childnodes',
+ 'chk',
+ 'chmod',
+ 'choosecolumntype',
+ 'chown',
+ 'chunked',
+ 'circle',
+ 'class',
+ 'classid',
+ 'clear',
+ 'clonenode',
+ 'close',
+ 'closepath',
+ 'closeprepared',
+ 'closewrite',
+ 'code',
+ 'codebase',
+ 'codetype',
+ 'colmap',
+ 'colorspace',
+ 'column_blob',
+ 'column_count',
+ 'column_decltype',
+ 'column_double',
+ 'column_int64',
+ 'column_name',
+ 'column_text',
+ 'column_type',
+ 'command',
+ 'comments',
+ 'compare',
+ 'comparecodepointorder',
+ 'componentdelimiter',
+ 'components',
+ 'composite',
+ 'compress',
+ 'concat',
+ 'condtoint',
+ 'configureds',
+ 'configuredskeys',
+ 'connect',
+ 'connection',
+ 'connectionhandler',
+ 'connhandler',
+ 'consume_domain',
+ 'consume_label',
+ 'consume_message',
+ 'consume_rdata',
+ 'consume_string',
+ 'contains',
+ 'content_disposition',
+ 'content_transfer_encoding',
+ 'content_type',
+ 'content',
+ 'contentlength',
+ 'contents',
+ 'contenttype',
+ 'continuation',
+ 'continuationpacket',
+ 'continuationpoint',
+ 'continuationstack',
+ 'continue',
+ 'contrast',
+ 'conventionaltop',
+ 'convert',
+ 'cookie',
+ 'cookies',
+ 'cookiesarray',
+ 'cookiesary',
+ 'copyto',
+ 'cos',
+ 'cosh',
+ 'count',
+ 'countkeys',
+ 'country',
+ 'countusersbygroup',
+ 'crc',
+ 'create',
+ 'createattribute',
+ 'createattributens',
+ 'createcdatasection',
+ 'createcomment',
+ 'createdocument',
+ 'createdocumentfragment',
+ 'createdocumenttype',
+ 'createelement',
+ 'createelementns',
+ 'createentityreference',
+ 'createindex',
+ 'createprocessinginstruction',
+ 'createtable',
+ 'createtextnode',
+ 'criteria',
+ 'crop',
+ 'csscontent',
+ 'curl',
+ 'current',
+ 'currentfile',
+ 'curveto',
+ 'd',
+ 'data',
+ 'databasecolumnnames',
+ 'databasecolumns',
+ 'databasemap',
+ 'databasename',
+ 'datasourcecolumnnames',
+ 'datasourcecolumns',
+ 'datasourcemap',
+ 'date',
+ 'day',
+ 'dayofmonth',
+ 'dayofweek',
+ 'dayofweekinmonth',
+ 'dayofyear',
+ 'days',
+ 'daysbetween',
+ 'db',
+ 'dbtablestable',
+ 'debug',
+ 'declare',
+ 'decodebase64',
+ 'decodehex',
+ 'decodehtml',
+ 'decodeqp',
+ 'decodeurl',
+ 'decodexml',
+ 'decompose',
+ 'decomposeassignment',
+ 'defaultcontentrepresentation',
+ 'defer',
+ 'deg2rad',
+ 'dele',
+ 'delete',
+ 'deletedata',
+ 'deleteglobalref',
+ 'deletelocalref',
+ 'delim',
+ 'depth',
+ 'dereferencepointer',
+ 'describe',
+ 'description',
+ 'deserialize',
+ 'detach',
+ 'detectcharset',
+ 'didinclude',
+ 'difference',
+ 'digit',
+ 'dir',
+ 'displaycountry',
+ 'displaylanguage',
+ 'displayname',
+ 'displayscript',
+ 'displayvariant',
+ 'div',
+ 'dns_response',
+ 'do',
+ 'doatbegins',
+ 'doatends',
+ 'doccomment',
+ 'doclose',
+ 'doctype',
+ 'document',
+ 'documentelement',
+ 'documentroot',
+ 'domainbody',
+ 'done',
+ 'dosessions',
+ 'dowithclose',
+ 'dowlocal',
+ 'download',
+ 'drawtext',
+ 'drop',
+ 'dropindex',
+ 'dsdbtable',
+ 'dshoststable',
+ 'dsinfo',
+ 'dst',
+ 'dstable',
+ 'dstoffset',
+ 'dtdid',
+ 'dup',
+ 'dup2',
+ 'each',
+ 'eachbyte',
+ 'eachcharacter',
+ 'eachchild',
+ 'eachcomponent',
+ 'eachdir',
+ 'eachdirpath',
+ 'eachdirpathrecursive',
+ 'eachentry',
+ 'eachfile',
+ 'eachfilename',
+ 'eachfilepath',
+ 'eachfilepathrecursive',
+ 'eachkey',
+ 'eachline',
+ 'eachlinebreak',
+ 'eachmatch',
+ 'eachnode',
+ 'eachpair',
+ 'eachpath',
+ 'eachpathrecursive',
+ 'eachrow',
+ 'eachsub',
+ 'eachword',
+ 'eachwordbreak',
+ 'element',
+ 'eligiblepath',
+ 'eligiblepaths',
+ 'encodebase64',
+ 'encodehex',
+ 'encodehtml',
+ 'encodehtmltoxml',
+ 'encodemd5',
+ 'encodepassword',
+ 'encodeqp',
+ 'encodesql',
+ 'encodesql92',
+ 'encodeurl',
+ 'encodevalue',
+ 'encodexml',
+ 'encoding',
+ 'enctype',
+ 'end',
+ 'endjs',
+ 'endssl',
+ 'endswith',
+ 'endtls',
+ 'enhance',
+ 'ensurestopped',
+ 'entities',
+ 'entry',
+ 'env',
+ 'equals',
+ 'era',
+ 'erf',
+ 'erfc',
+ 'err',
+ 'errcode',
+ 'errmsg',
+ 'error',
+ 'errors',
+ 'errstack',
+ 'escape_member',
+ 'establisherrorstate',
+ 'exceptioncheck',
+ 'exceptionclear',
+ 'exceptiondescribe',
+ 'exceptionoccurred',
+ 'exchange',
+ 'execinits',
+ 'execinstalls',
+ 'execute',
+ 'executelazy',
+ 'executenow',
+ 'exists',
+ 'exit',
+ 'exitcode',
+ 'exp',
+ 'expire',
+ 'expireminutes',
+ 'expiresminutes',
+ 'expm1',
+ 'export16bits',
+ 'export32bits',
+ 'export64bits',
+ 'export8bits',
+ 'exportas',
+ 'exportbytes',
+ 'exportfdf',
+ 'exportpointerbits',
+ 'exportsigned16bits',
+ 'exportsigned32bits',
+ 'exportsigned64bits',
+ 'exportsigned8bits',
+ 'exportstring',
+ 'expose',
+ 'extendedyear',
+ 'extensiondelimiter',
+ 'extensions',
+ 'extract',
+ 'extractfast',
+ 'extractfastone',
+ 'extractimage',
+ 'extractone',
+ 'f',
+ 'fabs',
+ 'fail',
+ 'failnoconnectionhandler',
+ 'family',
+ 'fatalerror',
+ 'fcgireq',
+ 'fchdir',
+ 'fchmod',
+ 'fchown',
+ 'fd',
+ 'features',
+ 'fetchdata',
+ 'fieldnames',
+ 'fieldposition',
+ 'fieldstable',
+ 'fieldtype',
+ 'fieldvalue',
+ 'file',
+ 'filename',
+ 'filenames',
+ 'filequeue',
+ 'fileuploads',
+ 'fileuploadsary',
+ 'filterinputcolumn',
+ 'finalize',
+ 'find',
+ 'findall',
+ 'findandmodify',
+ 'findbucket',
+ 'findcase',
+ 'findclass',
+ 'findcount',
+ 'finddescendant',
+ 'findfirst',
+ 'findinclude',
+ 'findinctx',
+ 'findindex',
+ 'findlast',
+ 'findpattern',
+ 'findposition',
+ 'findsymbols',
+ 'first',
+ 'firstchild',
+ 'firstcomponent',
+ 'firstdayofweek',
+ 'firstnode',
+ 'fixformat',
+ 'flags',
+ 'fliph',
+ 'flipv',
+ 'floor',
+ 'flush',
+ 'foldcase',
+ 'foo',
+ 'for',
+ 'forcedrowid',
+ 'foreach',
+ 'foreachaccept',
+ 'foreachbyte',
+ 'foreachcharacter',
+ 'foreachchild',
+ 'foreachday',
+ 'foreachentry',
+ 'foreachfile',
+ 'foreachfilename',
+ 'foreachkey',
+ 'foreachline',
+ 'foreachlinebreak',
+ 'foreachmatch',
+ 'foreachnode',
+ 'foreachpair',
+ 'foreachpathcomponent',
+ 'foreachrow',
+ 'foreachspool',
+ 'foreachsub',
+ 'foreachwordbreak',
+ 'form',
+ 'format',
+ 'formatas',
+ 'formatcontextelement',
+ 'formatcontextelements',
+ 'formatnumber',
+ 'free',
+ 'frexp',
+ 'from',
+ 'fromname',
+ 'fromport',
+ 'fromreflectedfield',
+ 'fromreflectedmethod',
+ 'front',
+ 'fsync',
+ 'ftpdeletefile',
+ 'ftpgetlisting',
+ 'ftruncate',
+ 'fullpath',
+ 'fx',
+ 'gamma',
+ 'gatewayinterface',
+ 'gen',
+ 'generatechecksum',
+ 'get',
+ 'getabswidth',
+ 'getalignment',
+ 'getappsource',
+ 'getarraylength',
+ 'getattr',
+ 'getattribute',
+ 'getattributenamespace',
+ 'getattributenode',
+ 'getattributenodens',
+ 'getattributens',
+ 'getbarheight',
+ 'getbarmultiplier',
+ 'getbarwidth',
+ 'getbaseline',
+ 'getbold',
+ 'getbooleanarrayelements',
+ 'getbooleanarrayregion',
+ 'getbooleanfield',
+ 'getbordercolor',
+ 'getborderwidth',
+ 'getbytearrayelements',
+ 'getbytearrayregion',
+ 'getbytefield',
+ 'getchararrayelements',
+ 'getchararrayregion',
+ 'getcharfield',
+ 'getclass',
+ 'getcode',
+ 'getcolor',
+ 'getcolumn',
+ 'getcolumncount',
+ 'getcolumns',
+ 'getdatabasebyalias',
+ 'getdatabasebyid',
+ 'getdatabasebyname',
+ 'getdatabasehost',
+ 'getdatabasetable',
+ 'getdatabasetablebyalias',
+ 'getdatabasetablebyid',
+ 'getdatabasetablepart',
+ 'getdatasource',
+ 'getdatasourcedatabase',
+ 'getdatasourcedatabasebyid',
+ 'getdatasourcehost',
+ 'getdatasourceid',
+ 'getdatasourcename',
+ 'getdefaultstorage',
+ 'getdoublearrayelements',
+ 'getdoublearrayregion',
+ 'getdoublefield',
+ 'getelementbyid',
+ 'getelementsbytagname',
+ 'getelementsbytagnamens',
+ 'getencoding',
+ 'getface',
+ 'getfield',
+ 'getfieldid',
+ 'getfile',
+ 'getfloatarrayelements',
+ 'getfloatarrayregion',
+ 'getfloatfield',
+ 'getfont',
+ 'getformat',
+ 'getfullfontname',
+ 'getgroup',
+ 'getgroupid',
+ 'getheader',
+ 'getheaders',
+ 'gethostdatabase',
+ 'gethtmlattr',
+ 'gethtmlattrstring',
+ 'getinclude',
+ 'getintarrayelements',
+ 'getintarrayregion',
+ 'getintfield',
+ 'getisocomment',
+ 'getitalic',
+ 'getlasterror',
+ 'getlcapitype',
+ 'getlibrary',
+ 'getlongarrayelements',
+ 'getlongarrayregion',
+ 'getlongfield',
+ 'getmargins',
+ 'getmethodid',
+ 'getmode',
+ 'getnameditem',
+ 'getnameditemns',
+ 'getnode',
+ 'getnumericvalue',
+ 'getobjectarrayelement',
+ 'getobjectclass',
+ 'getobjectfield',
+ 'getpadding',
+ 'getpagenumber',
+ 'getparts',
+ 'getprefs',
+ 'getpropertyvalue',
+ 'getprowcount',
+ 'getpsfontname',
+ 'getrange',
+ 'getrowcount',
+ 'getset',
+ 'getshortarrayelements',
+ 'getshortarrayregion',
+ 'getshortfield',
+ 'getsize',
+ 'getsortfieldspart',
+ 'getspacing',
+ 'getstaticbooleanfield',
+ 'getstaticbytefield',
+ 'getstaticcharfield',
+ 'getstaticdoublefield',
+ 'getstaticfieldid',
+ 'getstaticfloatfield',
+ 'getstaticintfield',
+ 'getstaticlongfield',
+ 'getstaticmethodid',
+ 'getstaticobjectfield',
+ 'getstaticshortfield',
+ 'getstatus',
+ 'getstringchars',
+ 'getstringlength',
+ 'getstyle',
+ 'getsupportedencodings',
+ 'gettablebyid',
+ 'gettext',
+ 'gettextalignment',
+ 'gettextsize',
+ 'gettrigger',
+ 'gettype',
+ 'getunderline',
+ 'getuniquealiasname',
+ 'getuser',
+ 'getuserbykey',
+ 'getuserid',
+ 'getversion',
+ 'getzipfilebytes',
+ 'givenblock',
+ 'gmt',
+ 'gotconnection',
+ 'gotfileupload',
+ 'groupby',
+ 'groupcolumns',
+ 'groupcount',
+ 'groupjoin',
+ 'handlebreakpointget',
+ 'handlebreakpointlist',
+ 'handlebreakpointremove',
+ 'handlebreakpointset',
+ 'handlebreakpointupdate',
+ 'handlecontextget',
+ 'handlecontextnames',
+ 'handlecontinuation',
+ 'handledefinitionbody',
+ 'handledefinitionhead',
+ 'handledefinitionresource',
+ 'handledevconnection',
+ 'handleevalexpired',
+ 'handlefeatureget',
+ 'handlefeatureset',
+ 'handlelassoappcontent',
+ 'handlelassoappresponse',
+ 'handlenested',
+ 'handlenormalconnection',
+ 'handlepop',
+ 'handleresource',
+ 'handlesource',
+ 'handlestackget',
+ 'handlestderr',
+ 'handlestdin',
+ 'handlestdout',
+ 'handshake',
+ 'hasattribute',
+ 'hasattributens',
+ 'hasattributes',
+ 'hasbinaryproperty',
+ 'haschildnodes',
+ 'hasexpired',
+ 'hasfeature',
+ 'hasfield',
+ 'hash',
+ 'hashtmlattr',
+ 'hasmethod',
+ 'hastable',
+ 'hastrailingcomponent',
+ 'hasvalue',
+ 'head',
+ 'header',
+ 'headerbytes',
+ 'headers',
+ 'headersarray',
+ 'headersmap',
+ 'height',
+ 'histogram',
+ 'home',
+ 'host',
+ 'hostcolumnnames',
+ 'hostcolumnnames2',
+ 'hostcolumns',
+ 'hostcolumns2',
+ 'hostdatasource',
+ 'hostextra',
+ 'hostid',
+ 'hostisdynamic',
+ 'hostmap',
+ 'hostmap2',
+ 'hostname',
+ 'hostpassword',
+ 'hostport',
+ 'hostschema',
+ 'hosttableencoding',
+ 'hosttonet16',
+ 'hosttonet32',
+ 'hosttonet64',
+ 'hostusername',
+ 'hour',
+ 'hourofampm',
+ 'hourofday',
+ 'hoursbetween',
+ 'href',
+ 'hreflang',
+ 'htmlcontent',
+ 'htmlizestacktrace',
+ 'htmlizestacktracelink',
+ 'httpaccept',
+ 'httpacceptencoding',
+ 'httpacceptlanguage',
+ 'httpauthorization',
+ 'httpcachecontrol',
+ 'httpconnection',
+ 'httpcookie',
+ 'httpequiv',
+ 'httphost',
+ 'httpreferer',
+ 'httpreferrer',
+ 'httpuseragent',
+ 'hypot',
+ 'id',
+ 'idealinmemory',
+ 'idle',
+ 'idmap',
+ 'ifempty',
+ 'ifkey',
+ 'ifnotempty',
+ 'ifnotkey',
+ 'ignorecase',
+ 'ilogb',
+ 'imgptr',
+ 'implementation',
+ 'import16bits',
+ 'import32bits',
+ 'import64bits',
+ 'import8bits',
+ 'importas',
+ 'importbytes',
+ 'importfdf',
+ 'importnode',
+ 'importpointer',
+ 'importstring',
+ 'in',
+ 'include',
+ 'includebytes',
+ 'includelibrary',
+ 'includelibraryonce',
+ 'includeonce',
+ 'includes',
+ 'includestack',
+ 'indaylighttime',
+ 'index',
+ 'init',
+ 'initialize',
+ 'initrequest',
+ 'inits',
+ 'inneroncompare',
+ 'input',
+ 'inputcolumns',
+ 'inputtype',
+ 'insert',
+ 'insertback',
+ 'insertbefore',
+ 'insertdata',
+ 'insertfirst',
+ 'insertfrom',
+ 'insertfront',
+ 'insertinternal',
+ 'insertlast',
+ 'insertpage',
+ 'install',
+ 'installs',
+ 'integer',
+ 'internalsubset',
+ 'interrupt',
+ 'intersection',
+ 'inttocond',
+ 'invoke',
+ 'invokeautocollect',
+ 'invokeuntil',
+ 'invokewhile',
+ 'ioctl',
+ 'isa',
+ 'isalive',
+ 'isallof',
+ 'isalnum',
+ 'isalpha',
+ 'isanyof',
+ 'isbase',
+ 'isblank',
+ 'iscntrl',
+ 'isdigit',
+ 'isdir',
'isdirectory',
- 'isempty',
- 'isemptyelement',
- 'isfirststep',
- 'isfullpath',
- 'isgraph',
- 'ishttps',
- 'isidle',
- 'isinstanceof',
- 'islink',
- 'islower',
- 'ismultipart',
- 'isnan',
- 'isnota',
- 'isnotempty',
- 'isnothing',
- 'iso3country',
- 'iso3language',
- 'isopen',
- 'isprint',
- 'ispunct',
- 'issameobject',
- 'isset',
- 'issourcefile',
- 'isspace',
- 'isssl',
- 'issupported',
- 'istitle',
- 'istruetype',
- 'istype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'isvalid',
- 'iswhitespace',
- 'isxdigit',
- 'isxhr',
- 'item',
- 'j0',
- 'j1',
- 'javascript',
- 'jbarcode',
- 'jcolor',
- 'jfont',
- 'jimage',
- 'jlist',
- 'jn',
- 'jobjectisa',
- 'join',
- 'jread',
- 'jscontent',
- 'jsonfornode',
- 'jsonhtml',
- 'jsonisleaf',
- 'jsonlabel',
- 'jtable',
- 'jtext',
- 'julianday',
- 'kernel',
- 'key',
- 'keycolumns',
- 'keys',
- 'keywords',
- 'kill',
- 'label',
- 'lang',
- 'language',
- 'last_insert_rowid',
- 'last',
- 'lastaccessdate',
- 'lastaccesstime',
- 'lastchild',
- 'lastcomponent',
- 'lasterror',
- 'lastinsertid',
- 'lastnode',
- 'lastpoint',
- 'lasttouched',
- 'lazyvalue',
- 'ldexp',
- 'leaveopen',
- 'left',
- 'length',
- 'lgamma',
- 'line',
- 'linediffers',
- 'linkto',
- 'linktype',
- 'list',
- 'listactivedatasources',
- 'listalldatabases',
- 'listalltables',
- 'listdatabasetables',
- 'listdatasourcedatabases',
- 'listdatasourcehosts',
- 'listdatasources',
- 'listen',
- 'listgroups',
- 'listgroupsbyuser',
- 'listhostdatabases',
- 'listhosts',
- 'listmethods',
- 'listnode',
- 'listusers',
- 'listusersbygroup',
- 'loadcerts',
- 'loaddatasourcehostinfo',
- 'loaddatasourceinfo',
- 'loadlibrary',
- 'localaddress',
- 'localname',
- 'locals',
- 'lock',
- 'log',
- 'log10',
- 'log1p',
- 'logb',
- 'lookupnamespace',
- 'lop',
- 'lowagiefont',
- 'lowercase',
- 'makecolor',
- 'makecolumnlist',
- 'makecolumnmap',
- 'makecookieyumyum',
- 'makefullpath',
- 'makeinheritedcopy',
- 'makenonrelative',
- 'makeurl',
- 'map',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'matchtriggers',
- 'max',
- 'maxinmemory',
- 'maxlength',
- 'maxrows',
- 'maxworkers',
- 'maybeslash',
- 'maybevalue',
- 'md5hex',
- 'media',
- 'members',
- 'merge',
- 'meta',
- 'method',
- 'methodname',
- 'millisecond',
- 'millisecondsinday',
- 'mime_boundary',
- 'mime_contenttype',
- 'mime_hdrs',
- 'mime',
- 'mimes',
- 'min',
- 'minute',
- 'minutesbetween',
- 'moddatestr',
- 'mode',
- 'modf',
- 'modificationdate',
- 'modificationtime',
- 'modulate',
- 'monitorenter',
- 'monitorexit',
- 'month',
- 'moveto',
- 'movetoattribute',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'msg',
- 'mtime',
- 'multiple',
- 'n',
- 'name',
- 'named',
- 'namespaceuri',
- 'needinitialization',
- 'net',
- 'nettohost16',
- 'nettohost32',
- 'nettohost64',
- 'new',
- 'newbooleanarray',
- 'newbytearray',
- 'newchararray',
- 'newdoublearray',
- 'newfloatarray',
- 'newglobalref',
- 'newintarray',
- 'newlongarray',
- 'newobject',
- 'newobjectarray',
- 'newshortarray',
- 'newstring',
- 'next',
- 'nextafter',
- 'nextnode',
- 'nextprime',
- 'nextprune',
- 'nextprunedelta',
- 'nextsibling',
- 'nodeforpath',
- 'nodelist',
- 'nodename',
- 'nodetype',
- 'nodevalue',
- 'noop',
- 'normalize',
- 'notationname',
- 'notations',
- 'novaluelists',
- 'numsets',
- 'object',
- 'objects',
- 'objecttype',
- 'onclick',
- 'oncompare',
- 'oncomparestrict',
- 'onconvert',
- 'oncreate',
- 'ondblclick',
- 'onkeydown',
- 'onkeypress',
- 'onkeyup',
- 'onmousedown',
- 'onmousemove',
- 'onmouseout',
- 'onmouseover',
- 'onmouseup',
- 'onreset',
- 'onsubmit',
- 'ontop',
- 'open',
- 'openappend',
- 'openread',
- 'opentruncate',
- 'openwith',
- 'openwrite',
- 'openwriteonly',
- 'orderby',
- 'orderbydescending',
- 'out',
- 'output',
- 'outputencoding',
- 'ownerdocument',
- 'ownerelement',
- 'padleading',
- 'padtrailing',
- 'padzero',
- 'pagecount',
- 'pagerotation',
- 'pagesize',
- 'param',
- 'paramdescs',
- 'params',
- 'parent',
- 'parentdir',
- 'parentnode',
- 'parse_body',
- 'parse_boundary',
- 'parse_charset',
- 'parse_content_disposition',
- 'parse_content_transfer_encoding',
- 'parse_content_type',
- 'parse_hdrs',
- 'parse_mode',
- 'parse_msg',
- 'parse_parts',
- 'parse_rawhdrs',
- 'parse',
- 'parseas',
- 'parsedocument',
- 'parsenumber',
- 'parseoneheaderline',
- 'pass',
- 'path',
- 'pathinfo',
- 'pathtouri',
- 'pathtranslated',
- 'pause',
- 'payload',
- 'pdifference',
- 'perform',
- 'performonce',
- 'perms',
- 'pid',
- 'pixel',
- 'pm',
- 'polldbg',
- 'pollide',
- 'pop_capa',
- 'pop_cmd',
- 'pop_debug',
- 'pop_err',
- 'pop_get',
- 'pop_ids',
- 'pop_index',
- 'pop_log',
- 'pop_mode',
- 'pop_net',
- 'pop_res',
- 'pop_server',
- 'pop_timeout',
- 'pop_token',
- 'pop',
- 'popctx',
- 'popinclude',
- 'populate',
- 'port',
- 'position',
- 'postdispatch',
- 'postparam',
- 'postparams',
- 'postparamsary',
- 'poststring',
- 'pow',
- 'predispatch',
- 'prefix',
- 'preflight',
- 'prepare',
- 'prepared',
- 'pretty',
- 'prev',
- 'previoussibling',
- 'printsimplemsg',
- 'private_compare',
- 'private_find',
- 'private_findlast',
- 'private_merge',
- 'private_rebalanceforinsert',
- 'private_rebalanceforremove',
- 'private_replaceall',
- 'private_replacefirst',
- 'private_rotateleft',
- 'private_rotateright',
- 'private_setrange',
- 'private_split',
- 'probemimetype',
- 'provides',
- 'proxying',
- 'prune',
- 'publicid',
- 'pullhttpheader',
- 'pullmimepost',
- 'pulloneheaderline',
- 'pullpost',
- 'pullrawpost',
- 'pullrawpostchunks',
- 'pullrequest',
- 'pullrequestline',
- 'push',
- 'pushctx',
- 'pushinclude',
- 'qdarray',
- 'qdcount',
- 'queryparam',
- 'queryparams',
- 'queryparamsary',
- 'querystring',
- 'queue_maintenance',
- 'queue_messages',
- 'queue_status',
- 'queue',
- 'quit',
- 'r',
- 'raw',
- 'rawcontent',
- 'rawdiff',
- 'rawheader',
- 'rawheaders',
- 'rawinvokable',
- 'read',
- 'readattributevalue',
- 'readbytes',
- 'readbytesfully',
- 'readdestinations',
- 'readerror',
- 'readidobjects',
- 'readline',
- 'readmessage',
- 'readnumber',
- 'readobject',
- 'readobjecttcp',
- 'readpacket',
- 'readsomebytes',
- 'readstring',
- 'ready',
- 'realdoc',
- 'realpath',
- 'receivefd',
- 'recipients',
- 'recover',
- 'rect',
- 'rectype',
- 'red',
- 'redirectto',
- 'referrals',
- 'refid',
- 'refobj',
- 'refresh',
- 'rel',
- 'remainder',
- 'remoteaddr',
- 'remoteaddress',
- 'remoteport',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removeattributenode',
- 'removeattributens',
- 'removeback',
- 'removechild',
- 'removedatabasetable',
- 'removedatasource',
- 'removedatasourcedatabase',
- 'removedatasourcehost',
- 'removefield',
- 'removefirst',
- 'removefront',
- 'removegroup',
- 'removelast',
- 'removeleading',
- 'removenameditem',
- 'removenameditemns',
- 'removenode',
- 'removesubnode',
- 'removetrailing',
- 'removeuser',
- 'removeuserfromallgroups',
- 'removeuserfromgroup',
- 'rename',
- 'renderbytes',
- 'renderdocumentbytes',
- 'renderstring',
- 'replace',
- 'replaceall',
- 'replacechild',
- 'replacedata',
- 'replacefirst',
- 'replaceheader',
- 'replacepattern',
- 'representnode',
- 'representnoderesult',
- 'reqid',
- 'requestid',
- 'requestmethod',
- 'requestparams',
- 'requesturi',
- 'requires',
- 'reserve',
- 'reset',
- 'resize',
- 'resolutionh',
- 'resolutionv',
- 'resolvelinks',
- 'resourcedata',
- 'resourceinvokable',
- 'resourcename',
- 'resources',
- 'respond',
- 'restart',
- 'restname',
- 'result',
- 'results',
- 'resume',
- 'retr',
- 'retrieve',
- 'returncolumns',
- 'returntype',
- 'rev',
- 'reverse',
- 'rewind',
- 'right',
- 'rint',
- 'roll',
- 'root',
- 'rootmap',
- 'rotate',
- 'route',
- 'rowsfound',
- 'rset',
- 'rule',
- 'rules',
- 'run',
- 'running',
- 'runonce',
- 's',
- 'sa',
- 'safeexport8bits',
- 'sameas',
- 'save',
- 'savedata',
- 'scalb',
- 'scale',
- 'scanfordatasource',
- 'scantasks',
- 'scanworkers',
- 'schemaname',
- 'scheme',
- 'script',
- 'scriptextensions',
- 'scriptfilename',
- 'scriptname',
- 'scripttype',
- 'scripturi',
- 'scripturl',
- 'scrubkeywords',
- 'search',
- 'searchinbucket',
- 'searchurl',
- 'second',
- 'secondsbetween',
- 'seek',
- 'select',
- 'selected',
- 'selectmany',
- 'self',
- 'send',
- 'sendchunk',
- 'sendfd',
- 'sendfile',
- 'sendpacket',
- 'sendresponse',
- 'separator',
- 'serializationelements',
- 'serialize',
- 'serveraddr',
- 'serveradmin',
- 'servername',
- 'serverport',
- 'serverprotocol',
- 'serversignature',
- 'serversoftware',
- 'sessionsdump',
- 'sessionsmap',
- 'set',
- 'setalignment',
- 'setattr',
- 'setattribute',
- 'setattributenode',
- 'setattributenodens',
- 'setattributens',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setbold',
- 'setbooleanarrayregion',
- 'setbooleanfield',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytearrayregion',
- 'setbytefield',
- 'setchararrayregion',
- 'setcharfield',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setcookie',
- 'setcwd',
- 'setdefaultstorage',
- 'setdestination',
- 'setdoublearrayregion',
- 'setdoublefield',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfindpattern',
- 'setfloatarrayregion',
- 'setfloatfield',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheaders',
- 'sethtmlattr',
- 'setignorecase',
- 'setinput',
- 'setintarrayregion',
- 'setintfield',
- 'setitalic',
- 'setlinewidth',
- 'setlongarrayregion',
- 'setlongfield',
- 'setmarker',
- 'setmaxfilesize',
- 'setmode',
- 'setname',
- 'setnameditem',
- 'setnameditemns',
- 'setobjectarrayelement',
- 'setobjectfield',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setrange',
- 'setreplacepattern',
- 'setshortarrayregion',
- 'setshortfield',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'setstaticbooleanfield',
- 'setstaticbytefield',
- 'setstaticcharfield',
- 'setstaticdoublefield',
- 'setstaticfloatfield',
- 'setstaticintfield',
- 'setstaticlongfield',
- 'setstaticobjectfield',
- 'setstaticshortfield',
- 'setstatus',
- 'settextalignment',
- 'settextsize',
- 'settimezone',
- 'settrait',
- 'setunderline',
- 'sharpen',
- 'shouldabort',
- 'shouldclose',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'shutdownrd',
- 'shutdownrdwr',
- 'shutdownwr',
- 'sin',
- 'sinh',
- 'size',
- 'skip',
- 'skiprows',
- 'sort',
- 'sortcolumns',
- 'source',
- 'sourcecolumn',
- 'sourcefile',
- 'sourceline',
- 'specified',
- 'split',
- 'splitconnection',
- 'splitdebuggingthread',
- 'splitextension',
- 'splittext',
- 'splitthread',
- 'splittoprivatedev',
- 'splituppath',
- 'sql',
- 'sqlite3',
- 'sqrt',
- 'src',
- 'srcpath',
- 'sslerrfail',
- 'stack',
- 'standby',
- 'start',
- 'startone',
- 'startup',
- 'stat',
- 'statement',
- 'statementonly',
- 'stats',
- 'status',
- 'statuscode',
- 'statusmsg',
- 'stdin',
- 'step',
- 'stls',
- 'stop',
- 'stoprunning',
- 'storedata',
- 'stripfirstcomponent',
- 'striplastcomponent',
- 'style',
- 'styletype',
- 'sub',
- 'subject',
- 'subnode',
- 'subnodes',
- 'substringdata',
- 'subtract',
- 'subtraits',
- 'sum',
- 'supportscontentrepresentation',
- 'swapbytes',
- 'systemid',
- 't',
- 'tabindex',
- 'table',
- 'tablecolumnnames',
- 'tablecolumns',
- 'tablehascolumn',
- 'tableizestacktrace',
- 'tableizestacktracelink',
- 'tablemap',
- 'tablename',
- 'tables',
- 'tabs',
- 'tabstr',
- 'tag',
- 'tagname',
- 'take',
- 'tan',
- 'tanh',
- 'target',
- 'tasks',
- 'tb',
- 'tell',
- 'testexitcode',
- 'testlock',
- 'textwidth',
- 'thenby',
- 'thenbydescending',
- 'threadreaddesc',
- 'throw',
- 'thrownew',
- 'time',
- 'timezone',
- 'title',
- 'titlecase',
- 'to',
- 'token',
- 'tolower',
- 'top',
- 'toreflectedfield',
- 'toreflectedmethod',
- 'total_changes',
- 'totitle',
- 'touch',
- 'toupper',
- 'toxmlstring',
- 'trace',
- 'trackingid',
- 'trait',
- 'transform',
- 'trigger',
- 'trim',
- 'trunk',
- 'tryfinderrorfile',
- 'trylock',
- 'tryreadobject',
- 'type',
- 'typename',
- 'uidl',
- 'uncompress',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unspool',
- 'up',
- 'update',
- 'updategroup',
- 'upload',
- 'uppercase',
- 'url',
- 'used',
- 'usemap',
- 'user',
- 'usercolumns',
- 'valid',
- 'validate',
- 'validatesessionstable',
- 'value',
- 'values',
- 'valuetype',
- 'variant',
- 'version',
- 'wait',
- 'waitforcompletion',
- 'warnings',
- 'week',
- 'weekofmonth',
- 'weekofyear',
- 'where',
- 'width',
- 'workers',
- 'workinginputcolumns',
- 'workingkeycolumns',
- 'workingkeyfield_name',
- 'workingreturncolumns',
- 'workingsortcolumns',
- 'write',
- 'writebodybytes',
- 'writebytes',
- 'writeheader',
- 'writeheaderbytes',
- 'writeheaderline',
- 'writeid',
- 'writemessage',
- 'writeobject',
- 'writeobjecttcp',
- 'writestring',
- 'wroteheaders',
- 'xhtml',
- 'xmllang',
- 'y0',
- 'y1',
- 'year',
- 'yearwoy',
- 'yn',
- 'z',
- 'zip',
- 'zipfile',
- 'zipfilename',
- 'zipname',
- 'zips',
- 'zoneoffset',
- ),
- 'Lasso 8 Member Tags': (
- 'accept',
- 'add',
- 'addattachment',
- 'addattribute',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addchild',
- 'addcombobox',
- 'addcomment',
- 'addcontent',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addlist',
- 'addnamespace',
- 'addnextsibling',
- 'addpage',
- 'addparagraph',
- 'addparenttype',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addprevsibling',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addsection',
- 'addselectlist',
- 'addsibling',
- 'addsubmitbutton',
- 'addtable',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'alarms',
- 'annotate',
- 'answer',
- 'append',
- 'appendreplacement',
- 'appendtail',
- 'arc',
- 'asasync',
- 'astype',
- 'atbegin',
- 'atbottom',
- 'atend',
- 'atfarleft',
- 'atfarright',
- 'attop',
- 'attributecount',
- 'attributes',
- 'authenticate',
- 'authorize',
- 'backward',
- 'baseuri',
- 'bcc',
- 'beanproperties',
- 'beginswith',
- 'bind',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'boundary',
- 'bytes',
- 'call',
- 'cancel',
- 'capabilities',
- 'cc',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'children',
- 'circle',
- 'close',
- 'closepath',
- 'closewrite',
- 'code',
- 'colorspace',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'compile',
- 'composite',
- 'connect',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'contents',
- 'contrast',
- 'convert',
- 'crop',
- 'curveto',
- 'data',
- 'date',
- 'day',
- 'daylights',
- 'dayofweek',
- 'dayofyear',
- 'decrement',
- 'delete',
- 'depth',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detachreference',
- 'difference',
- 'digit',
- 'document',
- 'down',
- 'drawtext',
- 'dst',
- 'dump',
- 'endswith',
- 'enhance',
- 'equals',
- 'errors',
- 'eval',
- 'events',
- 'execute',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportfdf',
- 'exportstring',
- 'extract',
- 'extractone',
- 'fieldnames',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'find',
- 'findindex',
- 'findnamespace',
- 'findnamespacebyhref',
- 'findpattern',
- 'findposition',
- 'first',
- 'firstchild',
- 'fliph',
- 'flipv',
- 'flush',
- 'foldcase',
- 'foreach',
- 'format',
- 'forward',
- 'freebusies',
- 'freezetype',
- 'freezevalue',
- 'from',
- 'fulltype',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getattribute',
- 'getattributenamespace',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbordercolor',
- 'getborderwidth',
- 'getcode',
- 'getcolor',
- 'getcolumncount',
- 'getencoding',
- 'getface',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getheaders',
- 'getmargins',
- 'getmethod',
- 'getnumericvalue',
- 'getpadding',
- 'getpagenumber',
- 'getparams',
- 'getproperty',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getsize',
- 'getspacing',
- 'getsupportedencodings',
- 'gettextalignment',
- 'gettextsize',
- 'gettype',
- 'gmt',
- 'groupcount',
- 'hasattribute',
- 'haschildren',
- 'hasvalue',
- 'header',
- 'headers',
- 'height',
- 'histogram',
- 'hosttonet16',
- 'hosttonet32',
- 'hour',
- 'id',
- 'ignorecase',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importfdf',
- 'importstring',
- 'increment',
- 'input',
- 'insert',
- 'insertatcurrent',
- 'insertfirst',
- 'insertfrom',
- 'insertlast',
- 'insertpage',
- 'integer',
- 'intersection',
- 'invoke',
- 'isa',
- 'isalnum',
- 'isalpha',
- 'isbase',
- 'iscntrl',
- 'isdigit',
- 'isemptyelement',
- 'islower',
- 'isopen',
- 'isprint',
- 'isspace',
- 'istitle',
- 'istruetype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'iswhitespace',
- 'iterator',
- 'javascript',
- 'join',
- 'journals',
- 'key',
- 'keys',
- 'last',
- 'lastchild',
- 'lasterror',
- 'left',
- 'length',
- 'line',
- 'listen',
- 'localaddress',
- 'localname',
- 'lock',
- 'lookupnamespace',
- 'lowercase',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'merge',
- 'millisecond',
- 'minute',
- 'mode',
- 'modulate',
- 'month',
- 'moveto',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'name',
- 'namespaces',
- 'namespaceuri',
- 'nettohost16',
- 'nettohost32',
- 'newchild',
- 'next',
- 'nextsibling',
- 'nodetype',
- 'open',
- 'output',
- 'padleading',
- 'padtrailing',
- 'pagecount',
- 'pagesize',
- 'paraminfo',
- 'params',
- 'parent',
- 'path',
- 'pixel',
- 'position',
- 'prefix',
- 'previoussibling',
- 'properties',
- 'rawheaders',
- 'read',
- 'readattributevalue',
- 'readerror',
- 'readfrom',
- 'readline',
- 'readlock',
- 'readstring',
- 'readunlock',
- 'recipients',
- 'rect',
- 'refcount',
- 'referrals',
- 'remoteaddress',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removechild',
- 'removecurrent',
- 'removefirst',
- 'removelast',
- 'removeleading',
- 'removenamespace',
- 'removetrailing',
- 'render',
- 'replace',
- 'replaceall',
- 'replacefirst',
- 'replacepattern',
- 'replacewith',
- 'reserve',
- 'reset',
- 'resolutionh',
- 'resolutionv',
- 'response',
- 'results',
- 'retrieve',
- 'returntype',
- 'reverse',
- 'reverseiterator',
- 'right',
- 'rotate',
- 'run',
- 'save',
- 'scale',
- 'search',
- 'second',
- 'send',
- 'serialize',
- 'set',
- 'setalignment',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setblocking',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytes',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setdatatype',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheight',
- 'setlassodata',
- 'setlinewidth',
- 'setmarker',
- 'setmode',
- 'setname',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setproperty',
- 'setrange',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'settemplate',
- 'settemplatestr',
- 'settextalignment',
- 'settextdata',
- 'settextsize',
- 'settype',
- 'setunderline',
- 'setwidth',
- 'setxmldata',
- 'sharpen',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'signal',
- 'signalall',
- 'size',
- 'smooth',
- 'sort',
- 'sortwith',
- 'split',
- 'standards',
- 'steal',
- 'subject',
- 'substring',
- 'subtract',
- 'swapbytes',
- 'textwidth',
- 'time',
- 'timezones',
- 'titlecase',
- 'to',
- 'todos',
- 'tolower',
- 'totitle',
- 'toupper',
- 'transform',
- 'trim',
- 'type',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unserialize',
- 'up',
- 'uppercase',
- 'value',
- 'values',
- 'valuetype',
- 'wait',
- 'waskeyword',
- 'week',
- 'width',
- 'write',
- 'writelock',
- 'writeto',
- 'writeunlock',
- 'xmllang',
- 'xmlschematype',
- 'year',
- )
-}
+ 'isempty',
+ 'isemptyelement',
+ 'isfirststep',
+ 'isfullpath',
+ 'isgraph',
+ 'ishttps',
+ 'isidle',
+ 'isinstanceof',
+ 'islink',
+ 'islower',
+ 'ismultipart',
+ 'isnan',
+ 'isnota',
+ 'isnotempty',
+ 'isnothing',
+ 'iso3country',
+ 'iso3language',
+ 'isopen',
+ 'isprint',
+ 'ispunct',
+ 'issameobject',
+ 'isset',
+ 'issourcefile',
+ 'isspace',
+ 'isssl',
+ 'issupported',
+ 'istitle',
+ 'istruetype',
+ 'istype',
+ 'isualphabetic',
+ 'isulowercase',
+ 'isupper',
+ 'isuuppercase',
+ 'isuwhitespace',
+ 'isvalid',
+ 'iswhitespace',
+ 'isxdigit',
+ 'isxhr',
+ 'item',
+ 'j0',
+ 'j1',
+ 'javascript',
+ 'jbarcode',
+ 'jcolor',
+ 'jfont',
+ 'jimage',
+ 'jlist',
+ 'jn',
+ 'jobjectisa',
+ 'join',
+ 'jread',
+ 'jscontent',
+ 'jsonfornode',
+ 'jsonhtml',
+ 'jsonisleaf',
+ 'jsonlabel',
+ 'jtable',
+ 'jtext',
+ 'julianday',
+ 'kernel',
+ 'key',
+ 'keycolumns',
+ 'keys',
+ 'keywords',
+ 'kill',
+ 'label',
+ 'lang',
+ 'language',
+ 'last_insert_rowid',
+ 'last',
+ 'lastaccessdate',
+ 'lastaccesstime',
+ 'lastchild',
+ 'lastcomponent',
+ 'lasterror',
+ 'lastinsertid',
+ 'lastnode',
+ 'lastpoint',
+ 'lasttouched',
+ 'lazyvalue',
+ 'ldexp',
+ 'leaveopen',
+ 'left',
+ 'length',
+ 'lgamma',
+ 'line',
+ 'linediffers',
+ 'linkto',
+ 'linktype',
+ 'list',
+ 'listactivedatasources',
+ 'listalldatabases',
+ 'listalltables',
+ 'listdatabasetables',
+ 'listdatasourcedatabases',
+ 'listdatasourcehosts',
+ 'listdatasources',
+ 'listen',
+ 'listgroups',
+ 'listgroupsbyuser',
+ 'listhostdatabases',
+ 'listhosts',
+ 'listmethods',
+ 'listnode',
+ 'listusers',
+ 'listusersbygroup',
+ 'loadcerts',
+ 'loaddatasourcehostinfo',
+ 'loaddatasourceinfo',
+ 'loadlibrary',
+ 'localaddress',
+ 'localname',
+ 'locals',
+ 'lock',
+ 'log',
+ 'log10',
+ 'log1p',
+ 'logb',
+ 'lookupnamespace',
+ 'lop',
+ 'lowagiefont',
+ 'lowercase',
+ 'makecolor',
+ 'makecolumnlist',
+ 'makecolumnmap',
+ 'makecookieyumyum',
+ 'makefullpath',
+ 'makeinheritedcopy',
+ 'makenonrelative',
+ 'makeurl',
+ 'map',
+ 'marker',
+ 'matches',
+ 'matchesstart',
+ 'matchposition',
+ 'matchstring',
+ 'matchtriggers',
+ 'max',
+ 'maxinmemory',
+ 'maxlength',
+ 'maxrows',
+ 'maxworkers',
+ 'maybeslash',
+ 'maybevalue',
+ 'md5hex',
+ 'media',
+ 'members',
+ 'merge',
+ 'meta',
+ 'method',
+ 'methodname',
+ 'millisecond',
+ 'millisecondsinday',
+ 'mime_boundary',
+ 'mime_contenttype',
+ 'mime_hdrs',
+ 'mime',
+ 'mimes',
+ 'min',
+ 'minute',
+ 'minutesbetween',
+ 'moddatestr',
+ 'mode',
+ 'modf',
+ 'modificationdate',
+ 'modificationtime',
+ 'modulate',
+ 'monitorenter',
+ 'monitorexit',
+ 'month',
+ 'moveto',
+ 'movetoattribute',
+ 'movetoattributenamespace',
+ 'movetoelement',
+ 'movetofirstattribute',
+ 'movetonextattribute',
+ 'msg',
+ 'mtime',
+ 'multiple',
+ 'n',
+ 'name',
+ 'named',
+ 'namespaceuri',
+ 'needinitialization',
+ 'net',
+ 'nettohost16',
+ 'nettohost32',
+ 'nettohost64',
+ 'new',
+ 'newbooleanarray',
+ 'newbytearray',
+ 'newchararray',
+ 'newdoublearray',
+ 'newfloatarray',
+ 'newglobalref',
+ 'newintarray',
+ 'newlongarray',
+ 'newobject',
+ 'newobjectarray',
+ 'newshortarray',
+ 'newstring',
+ 'next',
+ 'nextafter',
+ 'nextnode',
+ 'nextprime',
+ 'nextprune',
+ 'nextprunedelta',
+ 'nextsibling',
+ 'nodeforpath',
+ 'nodelist',
+ 'nodename',
+ 'nodetype',
+ 'nodevalue',
+ 'noop',
+ 'normalize',
+ 'notationname',
+ 'notations',
+ 'novaluelists',
+ 'numsets',
+ 'object',
+ 'objects',
+ 'objecttype',
+ 'onclick',
+ 'oncompare',
+ 'oncomparestrict',
+ 'onconvert',
+ 'oncreate',
+ 'ondblclick',
+ 'onkeydown',
+ 'onkeypress',
+ 'onkeyup',
+ 'onmousedown',
+ 'onmousemove',
+ 'onmouseout',
+ 'onmouseover',
+ 'onmouseup',
+ 'onreset',
+ 'onsubmit',
+ 'ontop',
+ 'open',
+ 'openappend',
+ 'openread',
+ 'opentruncate',
+ 'openwith',
+ 'openwrite',
+ 'openwriteonly',
+ 'orderby',
+ 'orderbydescending',
+ 'out',
+ 'output',
+ 'outputencoding',
+ 'ownerdocument',
+ 'ownerelement',
+ 'padleading',
+ 'padtrailing',
+ 'padzero',
+ 'pagecount',
+ 'pagerotation',
+ 'pagesize',
+ 'param',
+ 'paramdescs',
+ 'params',
+ 'parent',
+ 'parentdir',
+ 'parentnode',
+ 'parse_body',
+ 'parse_boundary',
+ 'parse_charset',
+ 'parse_content_disposition',
+ 'parse_content_transfer_encoding',
+ 'parse_content_type',
+ 'parse_hdrs',
+ 'parse_mode',
+ 'parse_msg',
+ 'parse_parts',
+ 'parse_rawhdrs',
+ 'parse',
+ 'parseas',
+ 'parsedocument',
+ 'parsenumber',
+ 'parseoneheaderline',
+ 'pass',
+ 'path',
+ 'pathinfo',
+ 'pathtouri',
+ 'pathtranslated',
+ 'pause',
+ 'payload',
+ 'pdifference',
+ 'perform',
+ 'performonce',
+ 'perms',
+ 'pid',
+ 'pixel',
+ 'pm',
+ 'polldbg',
+ 'pollide',
+ 'pop_capa',
+ 'pop_cmd',
+ 'pop_debug',
+ 'pop_err',
+ 'pop_get',
+ 'pop_ids',
+ 'pop_index',
+ 'pop_log',
+ 'pop_mode',
+ 'pop_net',
+ 'pop_res',
+ 'pop_server',
+ 'pop_timeout',
+ 'pop_token',
+ 'pop',
+ 'popctx',
+ 'popinclude',
+ 'populate',
+ 'port',
+ 'position',
+ 'postdispatch',
+ 'postparam',
+ 'postparams',
+ 'postparamsary',
+ 'poststring',
+ 'pow',
+ 'predispatch',
+ 'prefix',
+ 'preflight',
+ 'prepare',
+ 'prepared',
+ 'pretty',
+ 'prev',
+ 'previoussibling',
+ 'printsimplemsg',
+ 'private_compare',
+ 'private_find',
+ 'private_findlast',
+ 'private_merge',
+ 'private_rebalanceforinsert',
+ 'private_rebalanceforremove',
+ 'private_replaceall',
+ 'private_replacefirst',
+ 'private_rotateleft',
+ 'private_rotateright',
+ 'private_setrange',
+ 'private_split',
+ 'probemimetype',
+ 'provides',
+ 'proxying',
+ 'prune',
+ 'publicid',
+ 'pullhttpheader',
+ 'pullmimepost',
+ 'pulloneheaderline',
+ 'pullpost',
+ 'pullrawpost',
+ 'pullrawpostchunks',
+ 'pullrequest',
+ 'pullrequestline',
+ 'push',
+ 'pushctx',
+ 'pushinclude',
+ 'qdarray',
+ 'qdcount',
+ 'queryparam',
+ 'queryparams',
+ 'queryparamsary',
+ 'querystring',
+ 'queue_maintenance',
+ 'queue_messages',
+ 'queue_status',
+ 'queue',
+ 'quit',
+ 'r',
+ 'raw',
+ 'rawcontent',
+ 'rawdiff',
+ 'rawheader',
+ 'rawheaders',
+ 'rawinvokable',
+ 'read',
+ 'readattributevalue',
+ 'readbytes',
+ 'readbytesfully',
+ 'readdestinations',
+ 'readerror',
+ 'readidobjects',
+ 'readline',
+ 'readmessage',
+ 'readnumber',
+ 'readobject',
+ 'readobjecttcp',
+ 'readpacket',
+ 'readsomebytes',
+ 'readstring',
+ 'ready',
+ 'realdoc',
+ 'realpath',
+ 'receivefd',
+ 'recipients',
+ 'recover',
+ 'rect',
+ 'rectype',
+ 'red',
+ 'redirectto',
+ 'referrals',
+ 'refid',
+ 'refobj',
+ 'refresh',
+ 'rel',
+ 'remainder',
+ 'remoteaddr',
+ 'remoteaddress',
+ 'remoteport',
+ 'remove',
+ 'removeall',
+ 'removeattribute',
+ 'removeattributenode',
+ 'removeattributens',
+ 'removeback',
+ 'removechild',
+ 'removedatabasetable',
+ 'removedatasource',
+ 'removedatasourcedatabase',
+ 'removedatasourcehost',
+ 'removefield',
+ 'removefirst',
+ 'removefront',
+ 'removegroup',
+ 'removelast',
+ 'removeleading',
+ 'removenameditem',
+ 'removenameditemns',
+ 'removenode',
+ 'removesubnode',
+ 'removetrailing',
+ 'removeuser',
+ 'removeuserfromallgroups',
+ 'removeuserfromgroup',
+ 'rename',
+ 'renderbytes',
+ 'renderdocumentbytes',
+ 'renderstring',
+ 'replace',
+ 'replaceall',
+ 'replacechild',
+ 'replacedata',
+ 'replacefirst',
+ 'replaceheader',
+ 'replacepattern',
+ 'representnode',
+ 'representnoderesult',
+ 'reqid',
+ 'requestid',
+ 'requestmethod',
+ 'requestparams',
+ 'requesturi',
+ 'requires',
+ 'reserve',
+ 'reset',
+ 'resize',
+ 'resolutionh',
+ 'resolutionv',
+ 'resolvelinks',
+ 'resourcedata',
+ 'resourceinvokable',
+ 'resourcename',
+ 'resources',
+ 'respond',
+ 'restart',
+ 'restname',
+ 'result',
+ 'results',
+ 'resume',
+ 'retr',
+ 'retrieve',
+ 'returncolumns',
+ 'returntype',
+ 'rev',
+ 'reverse',
+ 'rewind',
+ 'right',
+ 'rint',
+ 'roll',
+ 'root',
+ 'rootmap',
+ 'rotate',
+ 'route',
+ 'rowsfound',
+ 'rset',
+ 'rule',
+ 'rules',
+ 'run',
+ 'running',
+ 'runonce',
+ 's',
+ 'sa',
+ 'safeexport8bits',
+ 'sameas',
+ 'save',
+ 'savedata',
+ 'scalb',
+ 'scale',
+ 'scanfordatasource',
+ 'scantasks',
+ 'scanworkers',
+ 'schemaname',
+ 'scheme',
+ 'script',
+ 'scriptextensions',
+ 'scriptfilename',
+ 'scriptname',
+ 'scripttype',
+ 'scripturi',
+ 'scripturl',
+ 'scrubkeywords',
+ 'search',
+ 'searchinbucket',
+ 'searchurl',
+ 'second',
+ 'secondsbetween',
+ 'seek',
+ 'select',
+ 'selected',
+ 'selectmany',
+ 'self',
+ 'send',
+ 'sendchunk',
+ 'sendfd',
+ 'sendfile',
+ 'sendpacket',
+ 'sendresponse',
+ 'separator',
+ 'serializationelements',
+ 'serialize',
+ 'serveraddr',
+ 'serveradmin',
+ 'servername',
+ 'serverport',
+ 'serverprotocol',
+ 'serversignature',
+ 'serversoftware',
+ 'sessionsdump',
+ 'sessionsmap',
+ 'set',
+ 'setalignment',
+ 'setattr',
+ 'setattribute',
+ 'setattributenode',
+ 'setattributenodens',
+ 'setattributens',
+ 'setbarheight',
+ 'setbarmultiplier',
+ 'setbarwidth',
+ 'setbaseline',
+ 'setbold',
+ 'setbooleanarrayregion',
+ 'setbooleanfield',
+ 'setbordercolor',
+ 'setborderwidth',
+ 'setbytearrayregion',
+ 'setbytefield',
+ 'setchararrayregion',
+ 'setcharfield',
+ 'setcode',
+ 'setcolor',
+ 'setcolorspace',
+ 'setcookie',
+ 'setcwd',
+ 'setdefaultstorage',
+ 'setdestination',
+ 'setdoublearrayregion',
+ 'setdoublefield',
+ 'setencoding',
+ 'setface',
+ 'setfieldvalue',
+ 'setfindpattern',
+ 'setfloatarrayregion',
+ 'setfloatfield',
+ 'setfont',
+ 'setformat',
+ 'setgeneratechecksum',
+ 'setheaders',
+ 'sethtmlattr',
+ 'setignorecase',
+ 'setinput',
+ 'setintarrayregion',
+ 'setintfield',
+ 'setitalic',
+ 'setlinewidth',
+ 'setlongarrayregion',
+ 'setlongfield',
+ 'setmarker',
+ 'setmaxfilesize',
+ 'setmode',
+ 'setname',
+ 'setnameditem',
+ 'setnameditemns',
+ 'setobjectarrayelement',
+ 'setobjectfield',
+ 'setpadding',
+ 'setpagenumber',
+ 'setpagerange',
+ 'setposition',
+ 'setrange',
+ 'setreplacepattern',
+ 'setshortarrayregion',
+ 'setshortfield',
+ 'setshowchecksum',
+ 'setsize',
+ 'setspacing',
+ 'setstaticbooleanfield',
+ 'setstaticbytefield',
+ 'setstaticcharfield',
+ 'setstaticdoublefield',
+ 'setstaticfloatfield',
+ 'setstaticintfield',
+ 'setstaticlongfield',
+ 'setstaticobjectfield',
+ 'setstaticshortfield',
+ 'setstatus',
+ 'settextalignment',
+ 'settextsize',
+ 'settimezone',
+ 'settrait',
+ 'setunderline',
+ 'sharpen',
+ 'shouldabort',
+ 'shouldclose',
+ 'showchecksum',
+ 'showcode39startstop',
+ 'showeanguardbars',
+ 'shutdownrd',
+ 'shutdownrdwr',
+ 'shutdownwr',
+ 'sin',
+ 'sinh',
+ 'size',
+ 'skip',
+ 'skiprows',
+ 'sort',
+ 'sortcolumns',
+ 'source',
+ 'sourcecolumn',
+ 'sourcefile',
+ 'sourceline',
+ 'specified',
+ 'split',
+ 'splitconnection',
+ 'splitdebuggingthread',
+ 'splitextension',
+ 'splittext',
+ 'splitthread',
+ 'splittoprivatedev',
+ 'splituppath',
+ 'sql',
+ 'sqlite3',
+ 'sqrt',
+ 'src',
+ 'srcpath',
+ 'sslerrfail',
+ 'stack',
+ 'standby',
+ 'start',
+ 'startone',
+ 'startup',
+ 'stat',
+ 'statement',
+ 'statementonly',
+ 'stats',
+ 'status',
+ 'statuscode',
+ 'statusmsg',
+ 'stdin',
+ 'step',
+ 'stls',
+ 'stop',
+ 'stoprunning',
+ 'storedata',
+ 'stripfirstcomponent',
+ 'striplastcomponent',
+ 'style',
+ 'styletype',
+ 'sub',
+ 'subject',
+ 'subnode',
+ 'subnodes',
+ 'substringdata',
+ 'subtract',
+ 'subtraits',
+ 'sum',
+ 'supportscontentrepresentation',
+ 'swapbytes',
+ 'systemid',
+ 't',
+ 'tabindex',
+ 'table',
+ 'tablecolumnnames',
+ 'tablecolumns',
+ 'tablehascolumn',
+ 'tableizestacktrace',
+ 'tableizestacktracelink',
+ 'tablemap',
+ 'tablename',
+ 'tables',
+ 'tabs',
+ 'tabstr',
+ 'tag',
+ 'tagname',
+ 'take',
+ 'tan',
+ 'tanh',
+ 'target',
+ 'tasks',
+ 'tb',
+ 'tell',
+ 'testexitcode',
+ 'testlock',
+ 'textwidth',
+ 'thenby',
+ 'thenbydescending',
+ 'threadreaddesc',
+ 'throw',
+ 'thrownew',
+ 'time',
+ 'timezone',
+ 'title',
+ 'titlecase',
+ 'to',
+ 'token',
+ 'tolower',
+ 'top',
+ 'toreflectedfield',
+ 'toreflectedmethod',
+ 'total_changes',
+ 'totitle',
+ 'touch',
+ 'toupper',
+ 'toxmlstring',
+ 'trace',
+ 'trackingid',
+ 'trait',
+ 'transform',
+ 'trigger',
+ 'trim',
+ 'trunk',
+ 'tryfinderrorfile',
+ 'trylock',
+ 'tryreadobject',
+ 'type',
+ 'typename',
+ 'uidl',
+ 'uncompress',
+ 'unescape',
+ 'union',
+ 'uniqueid',
+ 'unlock',
+ 'unspool',
+ 'up',
+ 'update',
+ 'updategroup',
+ 'upload',
+ 'uppercase',
+ 'url',
+ 'used',
+ 'usemap',
+ 'user',
+ 'usercolumns',
+ 'valid',
+ 'validate',
+ 'validatesessionstable',
+ 'value',
+ 'values',
+ 'valuetype',
+ 'variant',
+ 'version',
+ 'wait',
+ 'waitforcompletion',
+ 'warnings',
+ 'week',
+ 'weekofmonth',
+ 'weekofyear',
+ 'where',
+ 'width',
+ 'workers',
+ 'workinginputcolumns',
+ 'workingkeycolumns',
+ 'workingkeyfield_name',
+ 'workingreturncolumns',
+ 'workingsortcolumns',
+ 'write',
+ 'writebodybytes',
+ 'writebytes',
+ 'writeheader',
+ 'writeheaderbytes',
+ 'writeheaderline',
+ 'writeid',
+ 'writemessage',
+ 'writeobject',
+ 'writeobjecttcp',
+ 'writestring',
+ 'wroteheaders',
+ 'xhtml',
+ 'xmllang',
+ 'y0',
+ 'y1',
+ 'year',
+ 'yearwoy',
+ 'yn',
+ 'z',
+ 'zip',
+ 'zipfile',
+ 'zipfilename',
+ 'zipname',
+ 'zips',
+ 'zoneoffset',
+ ),
+ 'Lasso 8 Member Tags': (
+ 'accept',
+ 'add',
+ 'addattachment',
+ 'addattribute',
+ 'addbarcode',
+ 'addchapter',
+ 'addcheckbox',
+ 'addchild',
+ 'addcombobox',
+ 'addcomment',
+ 'addcontent',
+ 'addhiddenfield',
+ 'addhtmlpart',
+ 'addimage',
+ 'addjavascript',
+ 'addlist',
+ 'addnamespace',
+ 'addnextsibling',
+ 'addpage',
+ 'addparagraph',
+ 'addparenttype',
+ 'addpart',
+ 'addpasswordfield',
+ 'addphrase',
+ 'addprevsibling',
+ 'addradiobutton',
+ 'addradiogroup',
+ 'addresetbutton',
+ 'addsection',
+ 'addselectlist',
+ 'addsibling',
+ 'addsubmitbutton',
+ 'addtable',
+ 'addtext',
+ 'addtextarea',
+ 'addtextfield',
+ 'addtextpart',
+ 'alarms',
+ 'annotate',
+ 'answer',
+ 'append',
+ 'appendreplacement',
+ 'appendtail',
+ 'arc',
+ 'asasync',
+ 'astype',
+ 'atbegin',
+ 'atbottom',
+ 'atend',
+ 'atfarleft',
+ 'atfarright',
+ 'attop',
+ 'attributecount',
+ 'attributes',
+ 'authenticate',
+ 'authorize',
+ 'backward',
+ 'baseuri',
+ 'bcc',
+ 'beanproperties',
+ 'beginswith',
+ 'bind',
+ 'bitand',
+ 'bitclear',
+ 'bitflip',
+ 'bitformat',
+ 'bitnot',
+ 'bitor',
+ 'bitset',
+ 'bitshiftleft',
+ 'bitshiftright',
+ 'bittest',
+ 'bitxor',
+ 'blur',
+ 'body',
+ 'boundary',
+ 'bytes',
+ 'call',
+ 'cancel',
+ 'capabilities',
+ 'cc',
+ 'chardigitvalue',
+ 'charname',
+ 'charset',
+ 'chartype',
+ 'children',
+ 'circle',
+ 'close',
+ 'closepath',
+ 'closewrite',
+ 'code',
+ 'colorspace',
+ 'command',
+ 'comments',
+ 'compare',
+ 'comparecodepointorder',
+ 'compile',
+ 'composite',
+ 'connect',
+ 'contains',
+ 'content_disposition',
+ 'content_transfer_encoding',
+ 'content_type',
+ 'contents',
+ 'contrast',
+ 'convert',
+ 'crop',
+ 'curveto',
+ 'data',
+ 'date',
+ 'day',
+ 'daylights',
+ 'dayofweek',
+ 'dayofyear',
+ 'decrement',
+ 'delete',
+ 'depth',
+ 'describe',
+ 'description',
+ 'deserialize',
+ 'detach',
+ 'detachreference',
+ 'difference',
+ 'digit',
+ 'document',
+ 'down',
+ 'drawtext',
+ 'dst',
+ 'dump',
+ 'endswith',
+ 'enhance',
+ 'equals',
+ 'errors',
+ 'eval',
+ 'events',
+ 'execute',
+ 'export16bits',
+ 'export32bits',
+ 'export64bits',
+ 'export8bits',
+ 'exportfdf',
+ 'exportstring',
+ 'extract',
+ 'extractone',
+ 'fieldnames',
+ 'fieldtype',
+ 'fieldvalue',
+ 'file',
+ 'find',
+ 'findindex',
+ 'findnamespace',
+ 'findnamespacebyhref',
+ 'findpattern',
+ 'findposition',
+ 'first',
+ 'firstchild',
+ 'fliph',
+ 'flipv',
+ 'flush',
+ 'foldcase',
+ 'foreach',
+ 'format',
+ 'forward',
+ 'freebusies',
+ 'freezetype',
+ 'freezevalue',
+ 'from',
+ 'fulltype',
+ 'generatechecksum',
+ 'get',
+ 'getabswidth',
+ 'getalignment',
+ 'getattribute',
+ 'getattributenamespace',
+ 'getbarheight',
+ 'getbarmultiplier',
+ 'getbarwidth',
+ 'getbaseline',
+ 'getbordercolor',
+ 'getborderwidth',
+ 'getcode',
+ 'getcolor',
+ 'getcolumncount',
+ 'getencoding',
+ 'getface',
+ 'getfont',
+ 'getformat',
+ 'getfullfontname',
+ 'getheaders',
+ 'getmargins',
+ 'getmethod',
+ 'getnumericvalue',
+ 'getpadding',
+ 'getpagenumber',
+ 'getparams',
+ 'getproperty',
+ 'getpsfontname',
+ 'getrange',
+ 'getrowcount',
+ 'getsize',
+ 'getspacing',
+ 'getsupportedencodings',
+ 'gettextalignment',
+ 'gettextsize',
+ 'gettype',
+ 'gmt',
+ 'groupcount',
+ 'hasattribute',
+ 'haschildren',
+ 'hasvalue',
+ 'header',
+ 'headers',
+ 'height',
+ 'histogram',
+ 'hosttonet16',
+ 'hosttonet32',
+ 'hour',
+ 'id',
+ 'ignorecase',
+ 'import16bits',
+ 'import32bits',
+ 'import64bits',
+ 'import8bits',
+ 'importfdf',
+ 'importstring',
+ 'increment',
+ 'input',
+ 'insert',
+ 'insertatcurrent',
+ 'insertfirst',
+ 'insertfrom',
+ 'insertlast',
+ 'insertpage',
+ 'integer',
+ 'intersection',
+ 'invoke',
+ 'isa',
+ 'isalnum',
+ 'isalpha',
+ 'isbase',
+ 'iscntrl',
+ 'isdigit',
+ 'isemptyelement',
+ 'islower',
+ 'isopen',
+ 'isprint',
+ 'isspace',
+ 'istitle',
+ 'istruetype',
+ 'isualphabetic',
+ 'isulowercase',
+ 'isupper',
+ 'isuuppercase',
+ 'isuwhitespace',
+ 'iswhitespace',
+ 'iterator',
+ 'javascript',
+ 'join',
+ 'journals',
+ 'key',
+ 'keys',
+ 'last',
+ 'lastchild',
+ 'lasterror',
+ 'left',
+ 'length',
+ 'line',
+ 'listen',
+ 'localaddress',
+ 'localname',
+ 'lock',
+ 'lookupnamespace',
+ 'lowercase',
+ 'marker',
+ 'matches',
+ 'matchesstart',
+ 'matchposition',
+ 'matchstring',
+ 'merge',
+ 'millisecond',
+ 'minute',
+ 'mode',
+ 'modulate',
+ 'month',
+ 'moveto',
+ 'movetoattributenamespace',
+ 'movetoelement',
+ 'movetofirstattribute',
+ 'movetonextattribute',
+ 'name',
+ 'namespaces',
+ 'namespaceuri',
+ 'nettohost16',
+ 'nettohost32',
+ 'newchild',
+ 'next',
+ 'nextsibling',
+ 'nodetype',
+ 'open',
+ 'output',
+ 'padleading',
+ 'padtrailing',
+ 'pagecount',
+ 'pagesize',
+ 'paraminfo',
+ 'params',
+ 'parent',
+ 'path',
+ 'pixel',
+ 'position',
+ 'prefix',
+ 'previoussibling',
+ 'properties',
+ 'rawheaders',
+ 'read',
+ 'readattributevalue',
+ 'readerror',
+ 'readfrom',
+ 'readline',
+ 'readlock',
+ 'readstring',
+ 'readunlock',
+ 'recipients',
+ 'rect',
+ 'refcount',
+ 'referrals',
+ 'remoteaddress',
+ 'remove',
+ 'removeall',
+ 'removeattribute',
+ 'removechild',
+ 'removecurrent',
+ 'removefirst',
+ 'removelast',
+ 'removeleading',
+ 'removenamespace',
+ 'removetrailing',
+ 'render',
+ 'replace',
+ 'replaceall',
+ 'replacefirst',
+ 'replacepattern',
+ 'replacewith',
+ 'reserve',
+ 'reset',
+ 'resolutionh',
+ 'resolutionv',
+ 'response',
+ 'results',
+ 'retrieve',
+ 'returntype',
+ 'reverse',
+ 'reverseiterator',
+ 'right',
+ 'rotate',
+ 'run',
+ 'save',
+ 'scale',
+ 'search',
+ 'second',
+ 'send',
+ 'serialize',
+ 'set',
+ 'setalignment',
+ 'setbarheight',
+ 'setbarmultiplier',
+ 'setbarwidth',
+ 'setbaseline',
+ 'setblocking',
+ 'setbordercolor',
+ 'setborderwidth',
+ 'setbytes',
+ 'setcode',
+ 'setcolor',
+ 'setcolorspace',
+ 'setdatatype',
+ 'setencoding',
+ 'setface',
+ 'setfieldvalue',
+ 'setfont',
+ 'setformat',
+ 'setgeneratechecksum',
+ 'setheight',
+ 'setlassodata',
+ 'setlinewidth',
+ 'setmarker',
+ 'setmode',
+ 'setname',
+ 'setpadding',
+ 'setpagenumber',
+ 'setpagerange',
+ 'setposition',
+ 'setproperty',
+ 'setrange',
+ 'setshowchecksum',
+ 'setsize',
+ 'setspacing',
+ 'settemplate',
+ 'settemplatestr',
+ 'settextalignment',
+ 'settextdata',
+ 'settextsize',
+ 'settype',
+ 'setunderline',
+ 'setwidth',
+ 'setxmldata',
+ 'sharpen',
+ 'showchecksum',
+ 'showcode39startstop',
+ 'showeanguardbars',
+ 'signal',
+ 'signalall',
+ 'size',
+ 'smooth',
+ 'sort',
+ 'sortwith',
+ 'split',
+ 'standards',
+ 'steal',
+ 'subject',
+ 'substring',
+ 'subtract',
+ 'swapbytes',
+ 'textwidth',
+ 'time',
+ 'timezones',
+ 'titlecase',
+ 'to',
+ 'todos',
+ 'tolower',
+ 'totitle',
+ 'toupper',
+ 'transform',
+ 'trim',
+ 'type',
+ 'unescape',
+ 'union',
+ 'uniqueid',
+ 'unlock',
+ 'unserialize',
+ 'up',
+ 'uppercase',
+ 'value',
+ 'values',
+ 'valuetype',
+ 'wait',
+ 'waskeyword',
+ 'week',
+ 'width',
+ 'write',
+ 'writelock',
+ 'writeto',
+ 'writeunlock',
+ 'xmllang',
+ 'xmlschematype',
+ 'year',
+ )
+}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
index ca3acb1c10..64753dad6d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_lua_builtins.py
@@ -1,44 +1,44 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._lua_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names and modules of lua functions
- It is able to re-generate itself, but for adding new functions you
- probably have to add some callbacks (see function module_callbacks).
-
- Do not edit the MODULES dict by hand.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._lua_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names and modules of lua functions
+ It is able to re-generate itself, but for adding new functions you
+ probably have to add some callbacks (see function module_callbacks).
+
+ Do not edit the MODULES dict by hand.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-MODULES = {'basic': ('_G',
- '_VERSION',
- 'assert',
- 'collectgarbage',
- 'dofile',
- 'error',
- 'getmetatable',
- 'ipairs',
- 'load',
- 'loadfile',
- 'next',
- 'pairs',
- 'pcall',
- 'print',
- 'rawequal',
- 'rawget',
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+MODULES = {'basic': ('_G',
+ '_VERSION',
+ 'assert',
+ 'collectgarbage',
+ 'dofile',
+ 'error',
+ 'getmetatable',
+ 'ipairs',
+ 'load',
+ 'loadfile',
+ 'next',
+ 'pairs',
+ 'pcall',
+ 'print',
+ 'rawequal',
+ 'rawget',
'rawlen',
- 'rawset',
- 'select',
- 'setmetatable',
- 'tonumber',
- 'tostring',
- 'type',
- 'xpcall'),
+ 'rawset',
+ 'select',
+ 'setmetatable',
+ 'tonumber',
+ 'tostring',
+ 'type',
+ 'xpcall'),
'bit32': ('bit32.arshift',
'bit32.band',
'bit32.bnot',
@@ -51,119 +51,119 @@ MODULES = {'basic': ('_G',
'bit32.replace',
'bit32.rrotate',
'bit32.rshift'),
- 'coroutine': ('coroutine.create',
+ 'coroutine': ('coroutine.create',
'coroutine.isyieldable',
- 'coroutine.resume',
- 'coroutine.running',
- 'coroutine.status',
- 'coroutine.wrap',
- 'coroutine.yield'),
- 'debug': ('debug.debug',
- 'debug.gethook',
- 'debug.getinfo',
- 'debug.getlocal',
- 'debug.getmetatable',
- 'debug.getregistry',
- 'debug.getupvalue',
+ 'coroutine.resume',
+ 'coroutine.running',
+ 'coroutine.status',
+ 'coroutine.wrap',
+ 'coroutine.yield'),
+ 'debug': ('debug.debug',
+ 'debug.gethook',
+ 'debug.getinfo',
+ 'debug.getlocal',
+ 'debug.getmetatable',
+ 'debug.getregistry',
+ 'debug.getupvalue',
'debug.getuservalue',
- 'debug.sethook',
- 'debug.setlocal',
- 'debug.setmetatable',
- 'debug.setupvalue',
+ 'debug.sethook',
+ 'debug.setlocal',
+ 'debug.setmetatable',
+ 'debug.setupvalue',
'debug.setuservalue',
'debug.traceback',
'debug.upvalueid',
'debug.upvaluejoin'),
- 'io': ('io.close',
- 'io.flush',
- 'io.input',
- 'io.lines',
- 'io.open',
- 'io.output',
- 'io.popen',
- 'io.read',
+ 'io': ('io.close',
+ 'io.flush',
+ 'io.input',
+ 'io.lines',
+ 'io.open',
+ 'io.output',
+ 'io.popen',
+ 'io.read',
'io.stderr',
'io.stdin',
'io.stdout',
- 'io.tmpfile',
- 'io.type',
- 'io.write'),
- 'math': ('math.abs',
- 'math.acos',
- 'math.asin',
+ 'io.tmpfile',
+ 'io.type',
+ 'io.write'),
+ 'math': ('math.abs',
+ 'math.acos',
+ 'math.asin',
'math.atan',
- 'math.atan2',
- 'math.ceil',
+ 'math.atan2',
+ 'math.ceil',
'math.cos',
- 'math.cosh',
- 'math.deg',
- 'math.exp',
- 'math.floor',
- 'math.fmod',
- 'math.frexp',
- 'math.huge',
- 'math.ldexp',
- 'math.log',
- 'math.max',
+ 'math.cosh',
+ 'math.deg',
+ 'math.exp',
+ 'math.floor',
+ 'math.fmod',
+ 'math.frexp',
+ 'math.huge',
+ 'math.ldexp',
+ 'math.log',
+ 'math.max',
'math.maxinteger',
- 'math.min',
+ 'math.min',
'math.mininteger',
- 'math.modf',
- 'math.pi',
- 'math.pow',
- 'math.rad',
- 'math.random',
- 'math.randomseed',
+ 'math.modf',
+ 'math.pi',
+ 'math.pow',
+ 'math.rad',
+ 'math.random',
+ 'math.randomseed',
'math.sin',
- 'math.sinh',
- 'math.sqrt',
+ 'math.sinh',
+ 'math.sqrt',
'math.tan',
- 'math.tanh',
+ 'math.tanh',
'math.tointeger',
'math.type',
'math.ult'),
'modules': ('package.config',
- 'package.cpath',
- 'package.loaded',
- 'package.loadlib',
- 'package.path',
- 'package.preload',
+ 'package.cpath',
+ 'package.loaded',
+ 'package.loadlib',
+ 'package.path',
+ 'package.preload',
'package.searchers',
'package.searchpath',
'require'),
- 'os': ('os.clock',
- 'os.date',
- 'os.difftime',
- 'os.execute',
- 'os.exit',
- 'os.getenv',
- 'os.remove',
- 'os.rename',
- 'os.setlocale',
- 'os.time',
- 'os.tmpname'),
- 'string': ('string.byte',
- 'string.char',
- 'string.dump',
- 'string.find',
- 'string.format',
- 'string.gmatch',
- 'string.gsub',
- 'string.len',
- 'string.lower',
- 'string.match',
+ 'os': ('os.clock',
+ 'os.date',
+ 'os.difftime',
+ 'os.execute',
+ 'os.exit',
+ 'os.getenv',
+ 'os.remove',
+ 'os.rename',
+ 'os.setlocale',
+ 'os.time',
+ 'os.tmpname'),
+ 'string': ('string.byte',
+ 'string.char',
+ 'string.dump',
+ 'string.find',
+ 'string.format',
+ 'string.gmatch',
+ 'string.gsub',
+ 'string.len',
+ 'string.lower',
+ 'string.match',
'string.pack',
'string.packsize',
- 'string.rep',
- 'string.reverse',
- 'string.sub',
+ 'string.rep',
+ 'string.reverse',
+ 'string.sub',
'string.unpack',
- 'string.upper'),
- 'table': ('table.concat',
- 'table.insert',
+ 'string.upper'),
+ 'table': ('table.concat',
+ 'table.insert',
'table.move',
'table.pack',
- 'table.remove',
+ 'table.remove',
'table.sort',
'table.unpack'),
'utf8': ('utf8.char',
@@ -172,9 +172,9 @@ MODULES = {'basic': ('_G',
'utf8.codes',
'utf8.len',
'utf8.offset')}
-
-if __name__ == '__main__': # pragma: no cover
- import re
+
+if __name__ == '__main__': # pragma: no cover
+ import re
import sys
# urllib ends up wanting to import a module called 'math' -- if
@@ -183,96 +183,96 @@ if __name__ == '__main__': # pragma: no cover
if sys.path[i].endswith('/lexers'):
del sys.path[i]
- try:
- from urllib import urlopen
- except ImportError:
- from urllib.request import urlopen
- import pprint
-
- # you can't generally find out what module a function belongs to if you
- # have only its name. Because of this, here are some callback functions
- # that recognize if a gioven function belongs to a specific module
- def module_callbacks():
- def is_in_coroutine_module(name):
- return name.startswith('coroutine.')
-
- def is_in_modules_module(name):
- if name in ['require', 'module'] or name.startswith('package'):
- return True
- else:
- return False
-
- def is_in_string_module(name):
- return name.startswith('string.')
-
- def is_in_table_module(name):
- return name.startswith('table.')
-
- def is_in_math_module(name):
- return name.startswith('math')
-
- def is_in_io_module(name):
- return name.startswith('io.')
-
- def is_in_os_module(name):
- return name.startswith('os.')
-
- def is_in_debug_module(name):
- return name.startswith('debug.')
-
- return {'coroutine': is_in_coroutine_module,
- 'modules': is_in_modules_module,
- 'string': is_in_string_module,
- 'table': is_in_table_module,
- 'math': is_in_math_module,
- 'io': is_in_io_module,
- 'os': is_in_os_module,
- 'debug': is_in_debug_module}
-
-
-
- def get_newest_version():
- f = urlopen('http://www.lua.org/manual/')
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
+ import pprint
+
+ # you can't generally find out what module a function belongs to if you
+ # have only its name. Because of this, here are some callback functions
+ # that recognize if a gioven function belongs to a specific module
+ def module_callbacks():
+ def is_in_coroutine_module(name):
+ return name.startswith('coroutine.')
+
+ def is_in_modules_module(name):
+ if name in ['require', 'module'] or name.startswith('package'):
+ return True
+ else:
+ return False
+
+ def is_in_string_module(name):
+ return name.startswith('string.')
+
+ def is_in_table_module(name):
+ return name.startswith('table.')
+
+ def is_in_math_module(name):
+ return name.startswith('math')
+
+ def is_in_io_module(name):
+ return name.startswith('io.')
+
+ def is_in_os_module(name):
+ return name.startswith('os.')
+
+ def is_in_debug_module(name):
+ return name.startswith('debug.')
+
+ return {'coroutine': is_in_coroutine_module,
+ 'modules': is_in_modules_module,
+ 'string': is_in_string_module,
+ 'table': is_in_table_module,
+ 'math': is_in_math_module,
+ 'io': is_in_io_module,
+ 'os': is_in_os_module,
+ 'debug': is_in_debug_module}
+
+
+
+ def get_newest_version():
+ f = urlopen('http://www.lua.org/manual/')
r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
- for line in f:
- m = r.match(line)
- if m is not None:
- return m.groups()[0]
-
- def get_lua_functions(version):
- f = urlopen('http://www.lua.org/manual/%s/' % version)
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ return m.groups()[0]
+
+ def get_lua_functions(version):
+ f = urlopen('http://www.lua.org/manual/%s/' % version)
r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def get_function_module(name):
- for mod, cb in module_callbacks().items():
- if cb(name):
- return mod
- if '.' in name:
- return name.split('.')[0]
- else:
- return 'basic'
-
- def regenerate(filename, modules):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- version = get_newest_version()
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def get_function_module(name):
+ for mod, cb in module_callbacks().items():
+ if cb(name):
+ return mod
+ if '.' in name:
+ return name.split('.')[0]
+ else:
+ return 'basic'
+
+ def regenerate(filename, modules):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
+
+ def run():
+ version = get_newest_version()
functions = set()
for v in ('5.2', version):
print('> Downloading function index for Lua %s' % v)
@@ -280,16 +280,16 @@ if __name__ == '__main__': # pragma: no cover
print('> %d functions found, %d new:' %
(len(f), len(set(f) - functions)))
functions |= set(f)
-
+
functions = sorted(functions)
- modules = {}
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- m = get_function_module(full_function_name)
- modules.setdefault(m, []).append(full_function_name)
+ modules = {}
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ m = get_function_module(full_function_name)
+ modules.setdefault(m, []).append(full_function_name)
modules = {k: tuple(v) for k, v in modules.iteritems()}
-
- regenerate(__file__, modules)
-
- run()
+
+ regenerate(__file__, modules)
+
+ run()
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_mapping.py b/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
index acb71ad94b..8eb0577535 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_mapping.py
@@ -1,529 +1,529 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._mapping
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer mapping definitions. This file is generated by itself. Everytime
- you change something on a builtin lexer definition, run this script from
- the lexers folder to update it.
-
- Do not alter the LEXERS dictionary by hand.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._mapping
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin lexer definition, run this script from
+ the lexers folder to update it.
+
+ Do not alter the LEXERS dictionary by hand.
+
:copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-LEXERS = {
- 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
- 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
- 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
- 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
- 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
- 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
- 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+LEXERS = {
+ 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
+ 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
+ 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
+ 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
+ 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
+ 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
+ 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
+ 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
- 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
- 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
+ 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
+ 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
- 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
- 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
- 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
- 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
- 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
- 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
- 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
- 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
- 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
- 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
- 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
- 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
- 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+ 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
+ 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
+ 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
+ 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
+ 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
+ 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
+ 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
+ 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
+ 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
+ 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
+ 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
+ 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
+ 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
+ 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
- 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
- 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
+ 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
+ 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
- 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
- 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
+ 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
+ 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
- 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
+ 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
- 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
- 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
- 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
+ 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
+ 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)),
- 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
- 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
- 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
+ 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
+ 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
+ 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
'BoaLexer': ('pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()),
- 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
+ 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
- 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
- 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
- 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
- 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
- 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
- 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
- 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
- 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
- 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
- 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
+ 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
+ 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
+ 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
+ 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
+ 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
+ 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
+ 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
+ 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
+ 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
+ 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
+ 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
- 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
- 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
- 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
- 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
+ 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
+ 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
+ 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
+ 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
- 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
- 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
- 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
- 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
- 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
+ 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
+ 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
+ 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
+ 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
+ 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
+ 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
'CleanLexer': ('pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
- 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
- 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
- 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
- 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
- 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
- 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
- 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
- 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
- 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
- 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
- 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
- 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
- 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
- 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
+ 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
+ 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
+ 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
+ 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
+ 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
+ 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
+ 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
+ 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
+ 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
+ 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
+ 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
+ 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
+ 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
+ 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
+ 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
- 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
+ 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
- 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
- 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
- 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
- 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
- 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
- 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
- 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
- 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
- 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
- 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
- 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
- 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
- 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
- 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
+ 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
+ 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
+ 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
+ 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
+ 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
+ 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
+ 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
+ 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
+ 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
+ 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
+ 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
+ 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
+ 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
- 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
+ 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
- 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
- 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
- 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
- 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
- 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
- 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
- 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
- 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
- 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
- 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
- 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
- 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
- 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
- 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
- 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
- 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
- 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
+ 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
+ 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
+ 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
+ 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
+ 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
+ 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+ 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
+ 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
+ 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
+ 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
+ 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
+ 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
+ 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
+ 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
+ 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
+ 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
+ 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
+ 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
- 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
- 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
- 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
- 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
- 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
- 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
+ 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
+ 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
+ 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
+ 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
+ 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
+ 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
+ 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
- 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
- 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
- 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
- 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
+ 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
+ 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
+ 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
- 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
+ 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
- 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
- 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
- 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
+ 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
+ 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
+ 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
'FreeFemLexer': ('pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
- 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
- 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
- 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
- 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
- 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
- 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
- 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
- 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
- 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
- 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
- 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
- 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
+ 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
+ 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
+ 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
+ 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
+ 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
+ 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
+ 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
+ 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
+ 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+ 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
+ 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
+ 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
+ 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
+ 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
+ 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
- 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
- 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
- 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
- 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
- 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
+ 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
+ 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
+ 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
+ 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
- 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
- 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
- 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
- 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
- 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
- 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
- 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
- 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
+ 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
+ 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
+ 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
+ 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
+ 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
+ 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
+ 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
+ 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
- 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
- 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
- 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
- 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
- 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
- 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')),
- 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
- 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
- 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
- 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
- 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
- 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
- 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
- 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
- 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
- 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
- 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
- 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
- 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
- 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
+ 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
+ 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
+ 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
+ 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
+ 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
+ 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf'), ('text/x-ini', 'text/inf')),
+ 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+ 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
+ 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+ 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
+ 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
+ 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
+ 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
+ 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
+ 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
+ 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
+ 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
+ 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', ('json-object',), (), ('application/json-object',)),
- 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
- 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
- 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
- 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
+ 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
+ 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
+ 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
+ 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle', 'juttle'), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
- 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
- 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
- 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
- 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
- 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
- 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
- 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
- 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
- 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
- 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
- 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
- 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
- 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
- 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
- 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
- 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
- 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
- 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
- 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
- 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
- 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
- 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
+ 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
+ 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
+ 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
+ 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
+ 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
+ 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
+ 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
+ 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
+ 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
+ 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
+ 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
+ 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
+ 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
+ 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
+ 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
+ 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
+ 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
+ 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
+ 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
+ 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
+ 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
- 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
- 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
- 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
- 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
- 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
- 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
- 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
- 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
- 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
+ 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
+ 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
+ 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
+ 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
+ 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
+ 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
+ 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
+ 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
'MarkdownLexer': ('pygments.lexers.markup', 'markdown', ('md',), ('*.md',), ('text/x-markdown',)),
- 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
- 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
- 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
- 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
- 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
- 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
- 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
- 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
- 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
- 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
+ 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
+ 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
+ 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
+ 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
+ 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
+ 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
+ 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
+ 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
+ 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
- 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
- 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
- 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
- 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
- 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
- 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
- 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
- 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
- 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
- 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
- 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
- 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
- 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
- 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
- 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
- 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
+ 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
+ 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
+ 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
+ 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
+ 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
+ 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
+ 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
+ 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
+ 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
+ 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
+ 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
+ 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
+ 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
+ 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
+ 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
+ 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
- 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
- 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
- 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
- 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
- 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
+ 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
+ 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
+ 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
+ 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
+ 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
- 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
+ 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nim', 'nimrod'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
- 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
- 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
+ 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
- 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
- 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
- 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
- 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
- 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
- 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
- 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
- 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
- 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
- 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
- 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
- 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
- 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
- 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
- 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
- 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
- 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
- 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
- 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
- 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
- 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
+ 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
+ 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
+ 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
+ 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
+ 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
+ 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
+ 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
+ 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
+ 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
+ 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
+ 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
+ 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
+ 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
+ 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
+ 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
+ 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
+ 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
+ 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
+ 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
+ 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
+ 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
- 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
- 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
- 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
- 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
- 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()),
- 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
- 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
+ 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
+ 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
+ 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
+ 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
+ 'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()),
+ 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
+ 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
+ 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
- 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
- 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
+ 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
+ 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
- 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
- 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
- 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
- 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
+ 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
+ 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
+ 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
+ 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rnc', 'rng-compact'), ('*.rnc',), ()),
- 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
- 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
- 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
- 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
- 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
- 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
- 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
- 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
- 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
- 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
- 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
- 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
- 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
- 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
- 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
+ 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
+ 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
+ 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
+ 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
+ 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
+ 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
+ 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
+ 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
+ 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
+ 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
+ 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
+ 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
+ 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
+ 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resource', 'resourcebundle'), (), ()),
- 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
- 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
- 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
- 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
+ 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
+ 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
+ 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
+ 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
- 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
- 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
- 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
- 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
- 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
+ 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
+ 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
+ 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
+ 'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()),
+ 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
+ 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')),
'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust',)),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
- 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
- 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
+ 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
+ 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
- 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
- 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
- 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
+ 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
+ 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
+ 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
- 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
- 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
- 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
+ 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
+ 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
+ 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
- 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
+ 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sl',), ()),
- 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
+ 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
- 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
+ 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
- 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
- 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
+ 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
+ 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
- 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
- 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
- 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
- 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
- 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
- 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
- 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
+ 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
+ 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
+ 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
+ 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
+ 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
+ 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+ 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
+ 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
- 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
- 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
- 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
- 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
- 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
+ 'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
+ 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
+ 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
+ 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
+ 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml',), ()),
- 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
+ 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
- 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
- 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
- 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
- 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
+ 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
+ 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
+ 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
+ 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)),
- 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
- 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
- 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
- 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
- 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
- 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
- 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
+ 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
+ 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
+ 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
+ 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
+ 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
+ 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
+ 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
- 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
- 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
- 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
- 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
+ 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
+ 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
+ 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
+ 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)),
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
- 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
+ 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
- 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
- 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
- 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
- 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
- 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
- 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
- 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
- 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
- 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
- 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
+ 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
+ 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
+ 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
+ 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
+ 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+ 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
+ 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
+ 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
+ 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
+ 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
+ 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
- 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
- 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
- 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
- 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
- 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
- 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
+ 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
+ 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
+ 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
+ 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
+ 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
+ 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
+ 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
- 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
- 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
+ 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
+ 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
- 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
- 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+ 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
+ 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
- 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
+ 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
-}
-
-if __name__ == '__main__': # pragma: no cover
- import sys
- import os
-
- # lookup lexers
- found_lexers = []
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- for root, dirs, files in os.walk('.'):
- for filename in files:
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.lexers%s.%s' % (
- root[1:].replace('/', '.'), filename[:-3])
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for lexer_name in module.__all__:
- lexer = getattr(module, lexer_name)
- found_lexers.append(
- '%r: %r' % (lexer_name,
- (module_name,
- lexer.name,
- tuple(lexer.aliases),
- tuple(lexer.filenames),
- tuple(lexer.mimetypes))))
- # sort them to make the diff minimal
- found_lexers.sort()
-
- # extract useful sourcecode from this file
- with open(__file__) as fp:
- content = fp.read()
- # replace crnl to nl for Windows.
- #
- # Note that, originally, contributers should keep nl of master
- # repository, for example by using some kind of automatic
- # management EOL, like `EolExtension
- # <https://www.mercurial-scm.org/wiki/EolExtension>`.
- content = content.replace("\r\n", "\n")
- header = content[:content.find('LEXERS = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- # write new file
+}
+
+if __name__ == '__main__': # pragma: no cover
+ import sys
+ import os
+
+ # lookup lexers
+ found_lexers = []
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.lexers%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for lexer_name in module.__all__:
+ lexer = getattr(module, lexer_name)
+ found_lexers.append(
+ '%r: %r' % (lexer_name,
+ (module_name,
+ lexer.name,
+ tuple(lexer.aliases),
+ tuple(lexer.filenames),
+ tuple(lexer.mimetypes))))
+ # sort them to make the diff minimal
+ found_lexers.sort()
+
+ # extract useful sourcecode from this file
+ with open(__file__) as fp:
+ content = fp.read()
+ # replace crnl to nl for Windows.
+ #
+ # Note that, originally, contributers should keep nl of master
+ # repository, for example by using some kind of automatic
+ # management EOL, like `EolExtension
+ # <https://www.mercurial-scm.org/wiki/EolExtension>`.
+ content = content.replace("\r\n", "\n")
+ header = content[:content.find('LEXERS = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
with open(__file__, 'w') as fp:
- fp.write(header)
- fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
- fp.write(footer)
-
- print ('=== %d lexers processed.' % len(found_lexers))
+ fp.write(header)
+ fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
+ fp.write(footer)
+
+ print ('=== %d lexers processed.' % len(found_lexers))
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
index e59fd910d1..e2d293ad68 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_mql_builtins.py
@@ -1,1172 +1,1172 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._mql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtins for the MqlLexer.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._mql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtins for the MqlLexer.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-types = (
- 'AccountBalance',
- 'AccountCompany',
- 'AccountCredit',
- 'AccountCurrency',
- 'AccountEquity',
- 'AccountFreeMarginCheck',
- 'AccountFreeMarginMode',
- 'AccountFreeMargin',
- 'AccountInfoDouble',
- 'AccountInfoInteger',
- 'AccountInfoString',
- 'AccountLeverage',
- 'AccountMargin',
- 'AccountName',
- 'AccountNumber',
- 'AccountProfit',
- 'AccountServer',
- 'AccountStopoutLevel',
- 'AccountStopoutMode',
- 'Alert',
- 'ArrayBsearch',
- 'ArrayCompare',
- 'ArrayCopyRates',
- 'ArrayCopySeries',
- 'ArrayCopy',
- 'ArrayDimension',
- 'ArrayFill',
- 'ArrayFree',
- 'ArrayGetAsSeries',
- 'ArrayInitialize',
- 'ArrayIsDynamic',
- 'ArrayIsSeries',
- 'ArrayMaximum',
- 'ArrayMinimum',
- 'ArrayRange',
- 'ArrayResize',
- 'ArraySetAsSeries',
- 'ArraySize',
- 'ArraySort',
- 'CharArrayToString',
- 'CharToString',
- 'CharToStr',
- 'CheckPointer',
- 'ColorToARGB',
- 'ColorToString',
- 'Comment',
- 'CopyClose',
- 'CopyHigh',
- 'CopyLow',
- 'CopyOpen',
- 'CopyRates',
- 'CopyRealVolume',
- 'CopySpread',
- 'CopyTickVolume',
- 'CopyTime',
- 'DayOfWeek',
- 'DayOfYear',
- 'Day',
- 'DebugBreak',
- 'Digits',
- 'DoubleToString',
- 'DoubleToStr',
- 'EnumToString',
- 'EventChartCustom',
- 'EventKillTimer',
- 'EventSetMillisecondTimer',
- 'EventSetTimer',
- 'ExpertRemove',
- 'FileClose',
- 'FileCopy',
- 'FileDelete',
- 'FileFindClose',
- 'FileFindFirst',
- 'FileFindNext',
- 'FileFlush',
- 'FileGetInteger',
- 'FileIsEnding',
- 'FileIsExist',
- 'FileIsLineEnding',
- 'FileMove',
- 'FileOpenHistory',
- 'FileOpen',
- 'FileReadArray',
- 'FileReadBool',
- 'FileReadDatetime',
- 'FileReadDouble',
- 'FileReadFloat',
- 'FileReadInteger',
- 'FileReadLong',
- 'FileReadNumber',
- 'FileReadString',
- 'FileReadStruct',
- 'FileSeek',
- 'FileSize',
- 'FileTell',
- 'FileWriteArray',
- 'FileWriteDouble',
- 'FileWriteFloat',
- 'FileWriteInteger',
- 'FileWriteLong',
- 'FileWriteString',
- 'FileWriteStruct',
- 'FileWrite',
- 'FolderClean',
- 'FolderCreate',
- 'FolderDelete',
- 'GetLastError',
- 'GetPointer',
- 'GetTickCount',
- 'GlobalVariableCheck',
- 'GlobalVariableDel',
- 'GlobalVariableGet',
- 'GlobalVariableName',
- 'GlobalVariableSetOnCondition',
- 'GlobalVariableSet',
- 'GlobalVariableTemp',
- 'GlobalVariableTime',
- 'GlobalVariablesDeleteAll',
- 'GlobalVariablesFlush',
- 'GlobalVariablesTotal',
- 'HideTestIndicators',
- 'Hour',
- 'IndicatorBuffers',
- 'IndicatorCounted',
- 'IndicatorDigits',
- 'IndicatorSetDouble',
- 'IndicatorSetInteger',
- 'IndicatorSetString',
- 'IndicatorShortName',
- 'IntegerToString',
- 'IsConnected',
- 'IsDemo',
- 'IsDllsAllowed',
- 'IsExpertEnabled',
- 'IsLibrariesAllowed',
- 'IsOptimization',
- 'IsStopped',
- 'IsTesting',
- 'IsTradeAllowed',
- 'IsTradeContextBusy',
- 'IsVisualMode',
- 'MQLInfoInteger',
- 'MQLInfoString',
- 'MarketInfo',
- 'MathAbs',
- 'MathArccos',
- 'MathArcsin',
- 'MathArctan',
- 'MathCeil',
- 'MathCos',
- 'MathExp',
- 'MathFloor',
- 'MathIsValidNumber',
- 'MathLog',
- 'MathMax',
- 'MathMin',
- 'MathMod',
- 'MathPow',
- 'MathRand',
- 'MathRound',
- 'MathSin',
- 'MathSqrt',
- 'MathSrand',
- 'MathTan',
- 'MessageBox',
- 'Minute',
- 'Month',
- 'NormalizeDouble',
- 'ObjectCreate',
- 'ObjectDelete',
- 'ObjectDescription',
- 'ObjectFind',
- 'ObjectGetDouble',
- 'ObjectGetFiboDescription',
- 'ObjectGetInteger',
- 'ObjectGetShiftByValue',
- 'ObjectGetString',
- 'ObjectGetTimeByValue',
- 'ObjectGetValueByShift',
- 'ObjectGetValueByTime',
- 'ObjectGet',
- 'ObjectMove',
- 'ObjectName',
- 'ObjectSetDouble',
- 'ObjectSetFiboDescription',
- 'ObjectSetInteger',
- 'ObjectSetString',
- 'ObjectSetText',
- 'ObjectSet',
- 'ObjectType',
- 'ObjectsDeleteAll',
- 'ObjectsTotal',
- 'OrderCloseBy',
- 'OrderClosePrice',
- 'OrderCloseTime',
- 'OrderClose',
- 'OrderComment',
- 'OrderCommission',
- 'OrderDelete',
- 'OrderExpiration',
- 'OrderLots',
- 'OrderMagicNumber',
- 'OrderModify',
- 'OrderOpenPrice',
- 'OrderOpenTime',
- 'OrderPrint',
- 'OrderProfit',
- 'OrderSelect',
- 'OrderSend',
- 'OrderStopLoss',
- 'OrderSwap',
- 'OrderSymbol',
- 'OrderTakeProfit',
- 'OrderTicket',
- 'OrderType',
- 'OrdersHistoryTotal',
- 'OrdersTotal',
- 'PeriodSeconds',
- 'Period',
- 'PlaySound',
- 'Point',
- 'PrintFormat',
- 'Print',
- 'RefreshRates',
- 'ResetLastError',
- 'ResourceCreate',
- 'ResourceFree',
- 'ResourceReadImage',
- 'ResourceSave',
- 'Seconds',
- 'SendFTP',
- 'SendMail',
- 'SendNotification',
- 'SeriesInfoInteger',
- 'SetIndexArrow',
- 'SetIndexBuffer',
- 'SetIndexDrawBegin',
- 'SetIndexEmptyValue',
- 'SetIndexLabel',
- 'SetIndexShift',
- 'SetIndexStyle',
- 'SetLevelStyle',
- 'SetLevelValue',
- 'ShortArrayToString',
- 'ShortToString',
- 'Sleep',
- 'StrToDouble',
- 'StrToInteger',
- 'StrToTime',
- 'StringAdd',
- 'StringBufferLen',
- 'StringCompare',
- 'StringConcatenate',
- 'StringFill',
- 'StringFind',
- 'StringFormat',
- 'StringGetCharacter',
- 'StringGetChar',
- 'StringInit',
- 'StringLen',
- 'StringReplace',
- 'StringSetCharacter',
- 'StringSetChar',
- 'StringSplit',
- 'StringSubstr',
- 'StringToCharArray',
- 'StringToColor',
- 'StringToDouble',
- 'StringToInteger',
- 'StringToLower',
- 'StringToShortArray',
- 'StringToTime',
- 'StringToUpper',
- 'StringTrimLeft',
- 'StringTrimRight',
- 'StructToTime',
- 'SymbolInfoDouble',
- 'SymbolInfoInteger',
- 'SymbolInfoSessionQuote',
- 'SymbolInfoSessionTrade',
- 'SymbolInfoString',
- 'SymbolInfoTick',
- 'SymbolIsSynchronized',
- 'SymbolName',
- 'SymbolSelect',
- 'SymbolsTotal',
- 'Symbol',
- 'TerminalClose',
- 'TerminalCompany',
- 'TerminalName',
- 'TerminalPath',
- 'TesterStatistics',
- 'TextGetSize',
- 'TextOut',
- 'TextSetFont',
- 'TimeCurrent',
- 'TimeDayOfWeek',
- 'TimeDayOfYear',
- 'TimeDaylightSavings',
- 'TimeDay',
- 'TimeGMTOffset',
- 'TimeGMT',
- 'TimeHour',
- 'TimeLocal',
- 'TimeMinute',
- 'TimeMonth',
- 'TimeSeconds',
- 'TimeToString',
- 'TimeToStruct',
- 'TimeToStr',
- 'TimeTradeServer',
- 'TimeYear',
- 'UninitializeReason',
- 'WindowBarsPerChart',
- 'WindowExpertName',
- 'WindowFind',
- 'WindowFirstVisibleBar',
- 'WindowHandle',
- 'WindowIsVisible',
- 'WindowOnDropped',
- 'WindowPriceMax',
- 'WindowPriceMin',
- 'WindowPriceOnDropped',
- 'WindowRedraw',
- 'WindowScreenShot',
- 'WindowTimeOnDropped',
- 'WindowXOnDropped',
- 'WindowYOnDropped',
- 'WindowsTotal',
- 'Year',
- 'ZeroMemory',
- 'iAC',
- 'iADX',
- 'iAD',
- 'iAO',
- 'iATR',
- 'iAlligator',
- 'iBWMFI',
- 'iBandsOnArray',
- 'iBands',
- 'iBarShift',
- 'iBars',
- 'iBearsPower',
- 'iBullsPower',
- 'iCCIOnArray',
- 'iCCI',
- 'iClose',
- 'iCustom',
- 'iDeMarker',
- 'iEnvelopesOnArray',
- 'iEnvelopes',
- 'iForce',
- 'iFractals',
- 'iGator',
- 'iHighest',
- 'iHigh',
- 'iIchimoku',
- 'iLowest',
- 'iLow',
- 'iMACD',
- 'iMAOnArray',
- 'iMA',
- 'iMFI',
- 'iMomentumOnArray',
- 'iMomentum',
- 'iOBV',
- 'iOpen',
- 'iOsMA',
- 'iRSIOnArray',
- 'iRSI',
- 'iRVI',
- 'iSAR',
- 'iStdDevOnArray',
- 'iStdDev',
- 'iStochastic',
- 'iTime',
- 'iVolume',
- 'iWPR',
-)
-
-constants = (
- 'ACCOUNT_BALANCE',
- 'ACCOUNT_COMPANY',
- 'ACCOUNT_CREDIT',
- 'ACCOUNT_CURRENCY',
- 'ACCOUNT_EQUITY',
- 'ACCOUNT_FREEMARGIN',
- 'ACCOUNT_LEVERAGE',
- 'ACCOUNT_LIMIT_ORDERS',
- 'ACCOUNT_LOGIN',
- 'ACCOUNT_MARGIN',
- 'ACCOUNT_MARGIN_LEVEL',
- 'ACCOUNT_MARGIN_SO_CALL',
- 'ACCOUNT_MARGIN_SO_MODE',
- 'ACCOUNT_MARGIN_SO_SO',
- 'ACCOUNT_NAME',
- 'ACCOUNT_PROFIT',
- 'ACCOUNT_SERVER',
- 'ACCOUNT_STOPOUT_MODE_MONEY',
- 'ACCOUNT_STOPOUT_MODE_PERCENT',
- 'ACCOUNT_TRADE_ALLOWED',
- 'ACCOUNT_TRADE_EXPERT',
- 'ACCOUNT_TRADE_MODE',
- 'ACCOUNT_TRADE_MODE_CONTEST',
- 'ACCOUNT_TRADE_MODE_DEMO',
- 'ACCOUNT_TRADE_MODE_REAL',
- 'ALIGN_CENTER',
- 'ALIGN_LEFT',
- 'ALIGN_RIGHT',
- 'ANCHOR_BOTTOM',
- 'ANCHOR_CENTER',
- 'ANCHOR_LEFT',
- 'ANCHOR_LEFT_LOWER',
- 'ANCHOR_LEFT_UPPER',
- 'ANCHOR_LOWER',
- 'ANCHOR_RIGHT',
- 'ANCHOR_RIGHT_LOWER',
- 'ANCHOR_RIGHT_UPPER',
- 'ANCHOR_TOP',
- 'ANCHOR_UPPER',
- 'BORDER_FLAT',
- 'BORDER_RAISED',
- 'BORDER_SUNKEN',
- 'CHARTEVENT_CHART_CHANGE',
- 'CHARTEVENT_CLICK',
- 'CHARTEVENT_CUSTOM',
- 'CHARTEVENT_CUSTOM_LAST',
- 'CHARTEVENT_KEYDOWN',
- 'CHARTEVENT_MOUSE_MOVE',
- 'CHARTEVENT_OBJECT_CHANGE',
- 'CHARTEVENT_OBJECT_CLICK',
- 'CHARTEVENT_OBJECT_CREATE',
- 'CHARTEVENT_OBJECT_DELETE',
- 'CHARTEVENT_OBJECT_DRAG',
- 'CHARTEVENT_OBJECT_ENDEDIT',
- 'CHARTS_MAX',
- 'CHART_AUTOSCROLL',
- 'CHART_BARS',
- 'CHART_BEGIN',
- 'CHART_BRING_TO_TOP',
- 'CHART_CANDLES',
- 'CHART_COLOR_ASK',
- 'CHART_COLOR_BACKGROUND',
- 'CHART_COLOR_BID',
- 'CHART_COLOR_CANDLE_BEAR',
- 'CHART_COLOR_CANDLE_BULL',
- 'CHART_COLOR_CHART_DOWN',
- 'CHART_COLOR_CHART_LINE',
- 'CHART_COLOR_CHART_UP',
- 'CHART_COLOR_FOREGROUND',
- 'CHART_COLOR_GRID',
- 'CHART_COLOR_LAST',
- 'CHART_COLOR_STOP_LEVEL',
- 'CHART_COLOR_VOLUME',
- 'CHART_COMMENT',
- 'CHART_CURRENT_POS',
- 'CHART_DRAG_TRADE_LEVELS',
- 'CHART_END',
- 'CHART_EVENT_MOUSE_MOVE',
- 'CHART_EVENT_OBJECT_CREATE',
- 'CHART_EVENT_OBJECT_DELETE',
- 'CHART_FIRST_VISIBLE_BAR',
- 'CHART_FIXED_MAX',
- 'CHART_FIXED_MIN',
- 'CHART_FIXED_POSITION',
- 'CHART_FOREGROUND',
- 'CHART_HEIGHT_IN_PIXELS',
- 'CHART_IS_OBJECT',
- 'CHART_LINE',
- 'CHART_MODE',
- 'CHART_MOUSE_SCROLL',
- 'CHART_POINTS_PER_BAR',
- 'CHART_PRICE_MAX',
- 'CHART_PRICE_MIN',
- 'CHART_SCALEFIX',
- 'CHART_SCALEFIX_11',
- 'CHART_SCALE',
- 'CHART_SCALE_PT_PER_BAR',
- 'CHART_SHIFT',
- 'CHART_SHIFT_SIZE',
- 'CHART_SHOW_ASK_LINE',
- 'CHART_SHOW_BID_LINE',
- 'CHART_SHOW_DATE_SCALE',
- 'CHART_SHOW_GRID',
- 'CHART_SHOW_LAST_LINE',
- 'CHART_SHOW_OBJECT_DESCR',
- 'CHART_SHOW_OHLC',
- 'CHART_SHOW_PERIOD_SEP',
- 'CHART_SHOW_PRICE_SCALE',
- 'CHART_SHOW_TRADE_LEVELS',
- 'CHART_SHOW_VOLUMES',
- 'CHART_VISIBLE_BARS',
- 'CHART_VOLUME_HIDE',
- 'CHART_VOLUME_REAL',
- 'CHART_VOLUME_TICK',
- 'CHART_WIDTH_IN_BARS',
- 'CHART_WIDTH_IN_PIXELS',
- 'CHART_WINDOWS_TOTAL',
- 'CHART_WINDOW_HANDLE',
- 'CHART_WINDOW_IS_VISIBLE',
- 'CHART_WINDOW_YDISTANCE',
- 'CHAR_MAX',
- 'CHAR_MIN',
- 'CLR_NONE',
- 'CORNER_LEFT_LOWER',
- 'CORNER_LEFT_UPPER',
- 'CORNER_RIGHT_LOWER',
- 'CORNER_RIGHT_UPPER',
- 'CP_ACP',
- 'CP_MACCP',
- 'CP_OEMCP',
- 'CP_SYMBOL',
- 'CP_THREAD_ACP',
- 'CP_UTF7',
- 'CP_UTF8',
- 'DBL_DIG',
- 'DBL_EPSILON',
- 'DBL_MANT_DIG',
- 'DBL_MAX',
- 'DBL_MAX_10_EXP',
- 'DBL_MAX_EXP',
- 'DBL_MIN',
- 'DBL_MIN_10_EXP',
- 'DBL_MIN_EXP',
- 'DRAW_ARROW',
- 'DRAW_FILLING',
- 'DRAW_HISTOGRAM',
- 'DRAW_LINE',
- 'DRAW_NONE',
- 'DRAW_SECTION',
- 'DRAW_ZIGZAG',
- 'EMPTY',
- 'EMPTY_VALUE',
- 'ERR_ACCOUNT_DISABLED',
- 'ERR_BROKER_BUSY',
- 'ERR_COMMON_ERROR',
- 'ERR_INVALID_ACCOUNT',
- 'ERR_INVALID_PRICE',
- 'ERR_INVALID_STOPS',
- 'ERR_INVALID_TRADE_PARAMETERS',
- 'ERR_INVALID_TRADE_VOLUME',
- 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
- 'ERR_MALFUNCTIONAL_TRADE',
- 'ERR_MARKET_CLOSED',
- 'ERR_NOT_ENOUGH_MONEY',
- 'ERR_NOT_ENOUGH_RIGHTS',
- 'ERR_NO_CONNECTION',
- 'ERR_NO_ERROR',
- 'ERR_NO_RESULT',
- 'ERR_OFF_QUOTES',
- 'ERR_OLD_VERSION',
- 'ERR_ORDER_LOCKED',
- 'ERR_PRICE_CHANGED',
- 'ERR_REQUOTE',
- 'ERR_SERVER_BUSY',
- 'ERR_TOO_FREQUENT_REQUESTS',
- 'ERR_TOO_MANY_REQUESTS',
- 'ERR_TRADE_CONTEXT_BUSY',
- 'ERR_TRADE_DISABLED',
- 'ERR_TRADE_EXPIRATION_DENIED',
- 'ERR_TRADE_HEDGE_PROHIBITED',
- 'ERR_TRADE_MODIFY_DENIED',
- 'ERR_TRADE_PROHIBITED_BY_FIFO',
- 'ERR_TRADE_TIMEOUT',
- 'ERR_TRADE_TOO_MANY_ORDERS',
- 'FILE_ACCESS_DATE',
- 'FILE_ANSI',
- 'FILE_BIN',
- 'FILE_COMMON',
- 'FILE_CREATE_DATE',
- 'FILE_CSV',
- 'FILE_END',
- 'FILE_EXISTS',
- 'FILE_IS_ANSI',
- 'FILE_IS_BINARY',
- 'FILE_IS_COMMON',
- 'FILE_IS_CSV',
- 'FILE_IS_READABLE',
- 'FILE_IS_TEXT',
- 'FILE_IS_WRITABLE',
- 'FILE_LINE_END',
- 'FILE_MODIFY_DATE',
- 'FILE_POSITION',
- 'FILE_READ',
- 'FILE_REWRITE',
- 'FILE_SHARE_READ',
- 'FILE_SHARE_WRITE',
- 'FILE_SIZE',
- 'FILE_TXT',
- 'FILE_UNICODE',
- 'FILE_WRITE',
- 'FLT_DIG',
- 'FLT_EPSILON',
- 'FLT_MANT_DIG',
- 'FLT_MAX',
- 'FLT_MAX_10_EXP',
- 'FLT_MAX_EXP',
- 'FLT_MIN',
- 'FLT_MIN_10_EXP',
- 'FLT_MIN_EXP',
- 'FRIDAY',
- 'GANN_DOWN_TREND',
- 'GANN_UP_TREND',
- 'IDABORT',
- 'IDCANCEL',
- 'IDCONTINUE',
- 'IDIGNORE',
- 'IDNO',
- 'IDOK',
- 'IDRETRY',
- 'IDTRYAGAIN',
- 'IDYES',
- 'INDICATOR_CALCULATIONS',
- 'INDICATOR_COLOR_INDEX',
- 'INDICATOR_DATA',
- 'INDICATOR_DIGITS',
- 'INDICATOR_HEIGHT',
- 'INDICATOR_LEVELCOLOR',
- 'INDICATOR_LEVELSTYLE',
- 'INDICATOR_LEVELS',
- 'INDICATOR_LEVELTEXT',
- 'INDICATOR_LEVELVALUE',
- 'INDICATOR_LEVELWIDTH',
- 'INDICATOR_MAXIMUM',
- 'INDICATOR_MINIMUM',
- 'INDICATOR_SHORTNAME',
- 'INT_MAX',
- 'INT_MIN',
- 'INVALID_HANDLE',
- 'IS_DEBUG_MODE',
- 'IS_PROFILE_MODE',
- 'LICENSE_DEMO',
- 'LICENSE_FREE',
- 'LICENSE_FULL',
- 'LICENSE_TIME',
- 'LONG_MAX',
- 'LONG_MIN',
- 'MB_ABORTRETRYIGNORE',
- 'MB_CANCELTRYCONTINUE',
- 'MB_DEFBUTTON1',
- 'MB_DEFBUTTON2',
- 'MB_DEFBUTTON3',
- 'MB_DEFBUTTON4',
- 'MB_ICONASTERISK',
- 'MB_ICONERROR',
- 'MB_ICONEXCLAMATION',
- 'MB_ICONHAND',
- 'MB_ICONINFORMATION',
- 'MB_ICONQUESTION',
- 'MB_ICONSTOP',
- 'MB_ICONWARNING',
- 'MB_OKCANCEL',
- 'MB_OK',
- 'MB_RETRYCANCEL',
- 'MB_YESNOCANCEL',
- 'MB_YESNO',
- 'MODE_ASK',
- 'MODE_BID',
- 'MODE_CHINKOUSPAN',
- 'MODE_CLOSE',
- 'MODE_DIGITS',
- 'MODE_EMA',
- 'MODE_EXPIRATION',
- 'MODE_FREEZELEVEL',
- 'MODE_GATORJAW',
- 'MODE_GATORLIPS',
- 'MODE_GATORTEETH',
- 'MODE_HIGH',
- 'MODE_KIJUNSEN',
- 'MODE_LOTSIZE',
- 'MODE_LOTSTEP',
- 'MODE_LOWER',
- 'MODE_LOW',
- 'MODE_LWMA',
- 'MODE_MAIN',
- 'MODE_MARGINCALCMODE',
- 'MODE_MARGINHEDGED',
- 'MODE_MARGININIT',
- 'MODE_MARGINMAINTENANCE',
- 'MODE_MARGINREQUIRED',
- 'MODE_MAXLOT',
- 'MODE_MINLOT',
- 'MODE_MINUSDI',
- 'MODE_OPEN',
- 'MODE_PLUSDI',
- 'MODE_POINT',
- 'MODE_PROFITCALCMODE',
- 'MODE_SENKOUSPANA',
- 'MODE_SENKOUSPANB',
- 'MODE_SIGNAL',
- 'MODE_SMA',
- 'MODE_SMMA',
- 'MODE_SPREAD',
- 'MODE_STARTING',
- 'MODE_STOPLEVEL',
- 'MODE_SWAPLONG',
- 'MODE_SWAPSHORT',
- 'MODE_SWAPTYPE',
- 'MODE_TENKANSEN',
- 'MODE_TICKSIZE',
- 'MODE_TICKVALUE',
- 'MODE_TIME',
- 'MODE_TRADEALLOWED',
- 'MODE_UPPER',
- 'MODE_VOLUME',
- 'MONDAY',
- 'MQL_DEBUG',
- 'MQL_DLLS_ALLOWED',
- 'MQL_FRAME_MODE',
- 'MQL_LICENSE_TYPE',
- 'MQL_OPTIMIZATION',
- 'MQL_PROFILER',
- 'MQL_PROGRAM_NAME',
- 'MQL_PROGRAM_PATH',
- 'MQL_PROGRAM_TYPE',
- 'MQL_TESTER',
- 'MQL_TRADE_ALLOWED',
- 'MQL_VISUAL_MODE',
- 'M_1_PI',
- 'M_2_PI',
- 'M_2_SQRTPI',
- 'M_E',
- 'M_LN2',
- 'M_LN10',
- 'M_LOG2E',
- 'M_LOG10E',
- 'M_PI',
- 'M_PI_2',
- 'M_PI_4',
- 'M_SQRT1_2',
- 'M_SQRT2',
- 'NULL',
- 'OBJPROP_ALIGN',
- 'OBJPROP_ANCHOR',
- 'OBJPROP_ANGLE',
- 'OBJPROP_ARROWCODE',
- 'OBJPROP_BACK',
- 'OBJPROP_BGCOLOR',
- 'OBJPROP_BMPFILE',
- 'OBJPROP_BORDER_COLOR',
- 'OBJPROP_BORDER_TYPE',
- 'OBJPROP_CHART_ID',
- 'OBJPROP_CHART_SCALE',
- 'OBJPROP_COLOR',
- 'OBJPROP_CORNER',
- 'OBJPROP_CREATETIME',
- 'OBJPROP_DATE_SCALE',
- 'OBJPROP_DEVIATION',
- 'OBJPROP_DRAWLINES',
- 'OBJPROP_ELLIPSE',
- 'OBJPROP_FIBOLEVELS',
- 'OBJPROP_FILL',
- 'OBJPROP_FIRSTLEVEL',
- 'OBJPROP_FONTSIZE',
- 'OBJPROP_FONT',
- 'OBJPROP_HIDDEN',
- 'OBJPROP_LEVELCOLOR',
- 'OBJPROP_LEVELSTYLE',
- 'OBJPROP_LEVELS',
- 'OBJPROP_LEVELTEXT',
- 'OBJPROP_LEVELVALUE',
- 'OBJPROP_LEVELWIDTH',
- 'OBJPROP_NAME',
- 'OBJPROP_PERIOD',
- 'OBJPROP_PRICE1',
- 'OBJPROP_PRICE2',
- 'OBJPROP_PRICE3',
- 'OBJPROP_PRICE',
- 'OBJPROP_PRICE_SCALE',
- 'OBJPROP_RAY',
- 'OBJPROP_RAY_RIGHT',
- 'OBJPROP_READONLY',
- 'OBJPROP_SCALE',
- 'OBJPROP_SELECTABLE',
- 'OBJPROP_SELECTED',
- 'OBJPROP_STATE',
- 'OBJPROP_STYLE',
- 'OBJPROP_SYMBOL',
- 'OBJPROP_TEXT',
- 'OBJPROP_TIME1',
- 'OBJPROP_TIME2',
- 'OBJPROP_TIME3',
- 'OBJPROP_TIMEFRAMES',
- 'OBJPROP_TIME',
- 'OBJPROP_TOOLTIP',
- 'OBJPROP_TYPE',
- 'OBJPROP_WIDTH',
- 'OBJPROP_XDISTANCE',
- 'OBJPROP_XOFFSET',
- 'OBJPROP_XSIZE',
- 'OBJPROP_YDISTANCE',
- 'OBJPROP_YOFFSET',
- 'OBJPROP_YSIZE',
- 'OBJPROP_ZORDER',
- 'OBJ_ALL_PERIODS',
- 'OBJ_ARROW',
- 'OBJ_ARROW_BUY',
- 'OBJ_ARROW_CHECK',
- 'OBJ_ARROW_DOWN',
- 'OBJ_ARROW_LEFT_PRICE',
- 'OBJ_ARROW_RIGHT_PRICE',
- 'OBJ_ARROW_SELL',
- 'OBJ_ARROW_STOP',
- 'OBJ_ARROW_THUMB_DOWN',
- 'OBJ_ARROW_THUMB_UP',
- 'OBJ_ARROW_UP',
- 'OBJ_BITMAP',
- 'OBJ_BITMAP_LABEL',
- 'OBJ_BUTTON',
- 'OBJ_CHANNEL',
- 'OBJ_CYCLES',
- 'OBJ_EDIT',
- 'OBJ_ELLIPSE',
- 'OBJ_EVENT',
- 'OBJ_EXPANSION',
- 'OBJ_FIBOARC',
- 'OBJ_FIBOCHANNEL',
- 'OBJ_FIBOFAN',
- 'OBJ_FIBOTIMES',
- 'OBJ_FIBO',
- 'OBJ_GANNFAN',
- 'OBJ_GANNGRID',
- 'OBJ_GANNLINE',
- 'OBJ_HLINE',
- 'OBJ_LABEL',
- 'OBJ_NO_PERIODS',
- 'OBJ_PERIOD_D1',
- 'OBJ_PERIOD_H1',
- 'OBJ_PERIOD_H4',
- 'OBJ_PERIOD_M1',
- 'OBJ_PERIOD_M5',
- 'OBJ_PERIOD_M15',
- 'OBJ_PERIOD_M30',
- 'OBJ_PERIOD_MN1',
- 'OBJ_PERIOD_W1',
- 'OBJ_PITCHFORK',
- 'OBJ_RECTANGLE',
- 'OBJ_RECTANGLE_LABEL',
- 'OBJ_REGRESSION',
- 'OBJ_STDDEVCHANNEL',
- 'OBJ_TEXT',
- 'OBJ_TRENDBYANGLE',
- 'OBJ_TREND',
- 'OBJ_TRIANGLE',
- 'OBJ_VLINE',
- 'OP_BUYLIMIT',
- 'OP_BUYSTOP',
- 'OP_BUY',
- 'OP_SELLLIMIT',
- 'OP_SELLSTOP',
- 'OP_SELL',
- 'PERIOD_CURRENT',
- 'PERIOD_D1',
- 'PERIOD_H1',
- 'PERIOD_H2',
- 'PERIOD_H3',
- 'PERIOD_H4',
- 'PERIOD_H6',
- 'PERIOD_H8',
- 'PERIOD_H12',
- 'PERIOD_M1',
- 'PERIOD_M2',
- 'PERIOD_M3',
- 'PERIOD_M4',
- 'PERIOD_M5',
- 'PERIOD_M6',
- 'PERIOD_M10',
- 'PERIOD_M12',
- 'PERIOD_M15',
- 'PERIOD_M20',
- 'PERIOD_M30',
- 'PERIOD_MN1',
- 'PERIOD_W1',
- 'POINTER_AUTOMATIC',
- 'POINTER_DYNAMIC',
+ :license: BSD, see LICENSE for details.
+"""
+types = (
+ 'AccountBalance',
+ 'AccountCompany',
+ 'AccountCredit',
+ 'AccountCurrency',
+ 'AccountEquity',
+ 'AccountFreeMarginCheck',
+ 'AccountFreeMarginMode',
+ 'AccountFreeMargin',
+ 'AccountInfoDouble',
+ 'AccountInfoInteger',
+ 'AccountInfoString',
+ 'AccountLeverage',
+ 'AccountMargin',
+ 'AccountName',
+ 'AccountNumber',
+ 'AccountProfit',
+ 'AccountServer',
+ 'AccountStopoutLevel',
+ 'AccountStopoutMode',
+ 'Alert',
+ 'ArrayBsearch',
+ 'ArrayCompare',
+ 'ArrayCopyRates',
+ 'ArrayCopySeries',
+ 'ArrayCopy',
+ 'ArrayDimension',
+ 'ArrayFill',
+ 'ArrayFree',
+ 'ArrayGetAsSeries',
+ 'ArrayInitialize',
+ 'ArrayIsDynamic',
+ 'ArrayIsSeries',
+ 'ArrayMaximum',
+ 'ArrayMinimum',
+ 'ArrayRange',
+ 'ArrayResize',
+ 'ArraySetAsSeries',
+ 'ArraySize',
+ 'ArraySort',
+ 'CharArrayToString',
+ 'CharToString',
+ 'CharToStr',
+ 'CheckPointer',
+ 'ColorToARGB',
+ 'ColorToString',
+ 'Comment',
+ 'CopyClose',
+ 'CopyHigh',
+ 'CopyLow',
+ 'CopyOpen',
+ 'CopyRates',
+ 'CopyRealVolume',
+ 'CopySpread',
+ 'CopyTickVolume',
+ 'CopyTime',
+ 'DayOfWeek',
+ 'DayOfYear',
+ 'Day',
+ 'DebugBreak',
+ 'Digits',
+ 'DoubleToString',
+ 'DoubleToStr',
+ 'EnumToString',
+ 'EventChartCustom',
+ 'EventKillTimer',
+ 'EventSetMillisecondTimer',
+ 'EventSetTimer',
+ 'ExpertRemove',
+ 'FileClose',
+ 'FileCopy',
+ 'FileDelete',
+ 'FileFindClose',
+ 'FileFindFirst',
+ 'FileFindNext',
+ 'FileFlush',
+ 'FileGetInteger',
+ 'FileIsEnding',
+ 'FileIsExist',
+ 'FileIsLineEnding',
+ 'FileMove',
+ 'FileOpenHistory',
+ 'FileOpen',
+ 'FileReadArray',
+ 'FileReadBool',
+ 'FileReadDatetime',
+ 'FileReadDouble',
+ 'FileReadFloat',
+ 'FileReadInteger',
+ 'FileReadLong',
+ 'FileReadNumber',
+ 'FileReadString',
+ 'FileReadStruct',
+ 'FileSeek',
+ 'FileSize',
+ 'FileTell',
+ 'FileWriteArray',
+ 'FileWriteDouble',
+ 'FileWriteFloat',
+ 'FileWriteInteger',
+ 'FileWriteLong',
+ 'FileWriteString',
+ 'FileWriteStruct',
+ 'FileWrite',
+ 'FolderClean',
+ 'FolderCreate',
+ 'FolderDelete',
+ 'GetLastError',
+ 'GetPointer',
+ 'GetTickCount',
+ 'GlobalVariableCheck',
+ 'GlobalVariableDel',
+ 'GlobalVariableGet',
+ 'GlobalVariableName',
+ 'GlobalVariableSetOnCondition',
+ 'GlobalVariableSet',
+ 'GlobalVariableTemp',
+ 'GlobalVariableTime',
+ 'GlobalVariablesDeleteAll',
+ 'GlobalVariablesFlush',
+ 'GlobalVariablesTotal',
+ 'HideTestIndicators',
+ 'Hour',
+ 'IndicatorBuffers',
+ 'IndicatorCounted',
+ 'IndicatorDigits',
+ 'IndicatorSetDouble',
+ 'IndicatorSetInteger',
+ 'IndicatorSetString',
+ 'IndicatorShortName',
+ 'IntegerToString',
+ 'IsConnected',
+ 'IsDemo',
+ 'IsDllsAllowed',
+ 'IsExpertEnabled',
+ 'IsLibrariesAllowed',
+ 'IsOptimization',
+ 'IsStopped',
+ 'IsTesting',
+ 'IsTradeAllowed',
+ 'IsTradeContextBusy',
+ 'IsVisualMode',
+ 'MQLInfoInteger',
+ 'MQLInfoString',
+ 'MarketInfo',
+ 'MathAbs',
+ 'MathArccos',
+ 'MathArcsin',
+ 'MathArctan',
+ 'MathCeil',
+ 'MathCos',
+ 'MathExp',
+ 'MathFloor',
+ 'MathIsValidNumber',
+ 'MathLog',
+ 'MathMax',
+ 'MathMin',
+ 'MathMod',
+ 'MathPow',
+ 'MathRand',
+ 'MathRound',
+ 'MathSin',
+ 'MathSqrt',
+ 'MathSrand',
+ 'MathTan',
+ 'MessageBox',
+ 'Minute',
+ 'Month',
+ 'NormalizeDouble',
+ 'ObjectCreate',
+ 'ObjectDelete',
+ 'ObjectDescription',
+ 'ObjectFind',
+ 'ObjectGetDouble',
+ 'ObjectGetFiboDescription',
+ 'ObjectGetInteger',
+ 'ObjectGetShiftByValue',
+ 'ObjectGetString',
+ 'ObjectGetTimeByValue',
+ 'ObjectGetValueByShift',
+ 'ObjectGetValueByTime',
+ 'ObjectGet',
+ 'ObjectMove',
+ 'ObjectName',
+ 'ObjectSetDouble',
+ 'ObjectSetFiboDescription',
+ 'ObjectSetInteger',
+ 'ObjectSetString',
+ 'ObjectSetText',
+ 'ObjectSet',
+ 'ObjectType',
+ 'ObjectsDeleteAll',
+ 'ObjectsTotal',
+ 'OrderCloseBy',
+ 'OrderClosePrice',
+ 'OrderCloseTime',
+ 'OrderClose',
+ 'OrderComment',
+ 'OrderCommission',
+ 'OrderDelete',
+ 'OrderExpiration',
+ 'OrderLots',
+ 'OrderMagicNumber',
+ 'OrderModify',
+ 'OrderOpenPrice',
+ 'OrderOpenTime',
+ 'OrderPrint',
+ 'OrderProfit',
+ 'OrderSelect',
+ 'OrderSend',
+ 'OrderStopLoss',
+ 'OrderSwap',
+ 'OrderSymbol',
+ 'OrderTakeProfit',
+ 'OrderTicket',
+ 'OrderType',
+ 'OrdersHistoryTotal',
+ 'OrdersTotal',
+ 'PeriodSeconds',
+ 'Period',
+ 'PlaySound',
+ 'Point',
+ 'PrintFormat',
+ 'Print',
+ 'RefreshRates',
+ 'ResetLastError',
+ 'ResourceCreate',
+ 'ResourceFree',
+ 'ResourceReadImage',
+ 'ResourceSave',
+ 'Seconds',
+ 'SendFTP',
+ 'SendMail',
+ 'SendNotification',
+ 'SeriesInfoInteger',
+ 'SetIndexArrow',
+ 'SetIndexBuffer',
+ 'SetIndexDrawBegin',
+ 'SetIndexEmptyValue',
+ 'SetIndexLabel',
+ 'SetIndexShift',
+ 'SetIndexStyle',
+ 'SetLevelStyle',
+ 'SetLevelValue',
+ 'ShortArrayToString',
+ 'ShortToString',
+ 'Sleep',
+ 'StrToDouble',
+ 'StrToInteger',
+ 'StrToTime',
+ 'StringAdd',
+ 'StringBufferLen',
+ 'StringCompare',
+ 'StringConcatenate',
+ 'StringFill',
+ 'StringFind',
+ 'StringFormat',
+ 'StringGetCharacter',
+ 'StringGetChar',
+ 'StringInit',
+ 'StringLen',
+ 'StringReplace',
+ 'StringSetCharacter',
+ 'StringSetChar',
+ 'StringSplit',
+ 'StringSubstr',
+ 'StringToCharArray',
+ 'StringToColor',
+ 'StringToDouble',
+ 'StringToInteger',
+ 'StringToLower',
+ 'StringToShortArray',
+ 'StringToTime',
+ 'StringToUpper',
+ 'StringTrimLeft',
+ 'StringTrimRight',
+ 'StructToTime',
+ 'SymbolInfoDouble',
+ 'SymbolInfoInteger',
+ 'SymbolInfoSessionQuote',
+ 'SymbolInfoSessionTrade',
+ 'SymbolInfoString',
+ 'SymbolInfoTick',
+ 'SymbolIsSynchronized',
+ 'SymbolName',
+ 'SymbolSelect',
+ 'SymbolsTotal',
+ 'Symbol',
+ 'TerminalClose',
+ 'TerminalCompany',
+ 'TerminalName',
+ 'TerminalPath',
+ 'TesterStatistics',
+ 'TextGetSize',
+ 'TextOut',
+ 'TextSetFont',
+ 'TimeCurrent',
+ 'TimeDayOfWeek',
+ 'TimeDayOfYear',
+ 'TimeDaylightSavings',
+ 'TimeDay',
+ 'TimeGMTOffset',
+ 'TimeGMT',
+ 'TimeHour',
+ 'TimeLocal',
+ 'TimeMinute',
+ 'TimeMonth',
+ 'TimeSeconds',
+ 'TimeToString',
+ 'TimeToStruct',
+ 'TimeToStr',
+ 'TimeTradeServer',
+ 'TimeYear',
+ 'UninitializeReason',
+ 'WindowBarsPerChart',
+ 'WindowExpertName',
+ 'WindowFind',
+ 'WindowFirstVisibleBar',
+ 'WindowHandle',
+ 'WindowIsVisible',
+ 'WindowOnDropped',
+ 'WindowPriceMax',
+ 'WindowPriceMin',
+ 'WindowPriceOnDropped',
+ 'WindowRedraw',
+ 'WindowScreenShot',
+ 'WindowTimeOnDropped',
+ 'WindowXOnDropped',
+ 'WindowYOnDropped',
+ 'WindowsTotal',
+ 'Year',
+ 'ZeroMemory',
+ 'iAC',
+ 'iADX',
+ 'iAD',
+ 'iAO',
+ 'iATR',
+ 'iAlligator',
+ 'iBWMFI',
+ 'iBandsOnArray',
+ 'iBands',
+ 'iBarShift',
+ 'iBars',
+ 'iBearsPower',
+ 'iBullsPower',
+ 'iCCIOnArray',
+ 'iCCI',
+ 'iClose',
+ 'iCustom',
+ 'iDeMarker',
+ 'iEnvelopesOnArray',
+ 'iEnvelopes',
+ 'iForce',
+ 'iFractals',
+ 'iGator',
+ 'iHighest',
+ 'iHigh',
+ 'iIchimoku',
+ 'iLowest',
+ 'iLow',
+ 'iMACD',
+ 'iMAOnArray',
+ 'iMA',
+ 'iMFI',
+ 'iMomentumOnArray',
+ 'iMomentum',
+ 'iOBV',
+ 'iOpen',
+ 'iOsMA',
+ 'iRSIOnArray',
+ 'iRSI',
+ 'iRVI',
+ 'iSAR',
+ 'iStdDevOnArray',
+ 'iStdDev',
+ 'iStochastic',
+ 'iTime',
+ 'iVolume',
+ 'iWPR',
+)
+
+constants = (
+ 'ACCOUNT_BALANCE',
+ 'ACCOUNT_COMPANY',
+ 'ACCOUNT_CREDIT',
+ 'ACCOUNT_CURRENCY',
+ 'ACCOUNT_EQUITY',
+ 'ACCOUNT_FREEMARGIN',
+ 'ACCOUNT_LEVERAGE',
+ 'ACCOUNT_LIMIT_ORDERS',
+ 'ACCOUNT_LOGIN',
+ 'ACCOUNT_MARGIN',
+ 'ACCOUNT_MARGIN_LEVEL',
+ 'ACCOUNT_MARGIN_SO_CALL',
+ 'ACCOUNT_MARGIN_SO_MODE',
+ 'ACCOUNT_MARGIN_SO_SO',
+ 'ACCOUNT_NAME',
+ 'ACCOUNT_PROFIT',
+ 'ACCOUNT_SERVER',
+ 'ACCOUNT_STOPOUT_MODE_MONEY',
+ 'ACCOUNT_STOPOUT_MODE_PERCENT',
+ 'ACCOUNT_TRADE_ALLOWED',
+ 'ACCOUNT_TRADE_EXPERT',
+ 'ACCOUNT_TRADE_MODE',
+ 'ACCOUNT_TRADE_MODE_CONTEST',
+ 'ACCOUNT_TRADE_MODE_DEMO',
+ 'ACCOUNT_TRADE_MODE_REAL',
+ 'ALIGN_CENTER',
+ 'ALIGN_LEFT',
+ 'ALIGN_RIGHT',
+ 'ANCHOR_BOTTOM',
+ 'ANCHOR_CENTER',
+ 'ANCHOR_LEFT',
+ 'ANCHOR_LEFT_LOWER',
+ 'ANCHOR_LEFT_UPPER',
+ 'ANCHOR_LOWER',
+ 'ANCHOR_RIGHT',
+ 'ANCHOR_RIGHT_LOWER',
+ 'ANCHOR_RIGHT_UPPER',
+ 'ANCHOR_TOP',
+ 'ANCHOR_UPPER',
+ 'BORDER_FLAT',
+ 'BORDER_RAISED',
+ 'BORDER_SUNKEN',
+ 'CHARTEVENT_CHART_CHANGE',
+ 'CHARTEVENT_CLICK',
+ 'CHARTEVENT_CUSTOM',
+ 'CHARTEVENT_CUSTOM_LAST',
+ 'CHARTEVENT_KEYDOWN',
+ 'CHARTEVENT_MOUSE_MOVE',
+ 'CHARTEVENT_OBJECT_CHANGE',
+ 'CHARTEVENT_OBJECT_CLICK',
+ 'CHARTEVENT_OBJECT_CREATE',
+ 'CHARTEVENT_OBJECT_DELETE',
+ 'CHARTEVENT_OBJECT_DRAG',
+ 'CHARTEVENT_OBJECT_ENDEDIT',
+ 'CHARTS_MAX',
+ 'CHART_AUTOSCROLL',
+ 'CHART_BARS',
+ 'CHART_BEGIN',
+ 'CHART_BRING_TO_TOP',
+ 'CHART_CANDLES',
+ 'CHART_COLOR_ASK',
+ 'CHART_COLOR_BACKGROUND',
+ 'CHART_COLOR_BID',
+ 'CHART_COLOR_CANDLE_BEAR',
+ 'CHART_COLOR_CANDLE_BULL',
+ 'CHART_COLOR_CHART_DOWN',
+ 'CHART_COLOR_CHART_LINE',
+ 'CHART_COLOR_CHART_UP',
+ 'CHART_COLOR_FOREGROUND',
+ 'CHART_COLOR_GRID',
+ 'CHART_COLOR_LAST',
+ 'CHART_COLOR_STOP_LEVEL',
+ 'CHART_COLOR_VOLUME',
+ 'CHART_COMMENT',
+ 'CHART_CURRENT_POS',
+ 'CHART_DRAG_TRADE_LEVELS',
+ 'CHART_END',
+ 'CHART_EVENT_MOUSE_MOVE',
+ 'CHART_EVENT_OBJECT_CREATE',
+ 'CHART_EVENT_OBJECT_DELETE',
+ 'CHART_FIRST_VISIBLE_BAR',
+ 'CHART_FIXED_MAX',
+ 'CHART_FIXED_MIN',
+ 'CHART_FIXED_POSITION',
+ 'CHART_FOREGROUND',
+ 'CHART_HEIGHT_IN_PIXELS',
+ 'CHART_IS_OBJECT',
+ 'CHART_LINE',
+ 'CHART_MODE',
+ 'CHART_MOUSE_SCROLL',
+ 'CHART_POINTS_PER_BAR',
+ 'CHART_PRICE_MAX',
+ 'CHART_PRICE_MIN',
+ 'CHART_SCALEFIX',
+ 'CHART_SCALEFIX_11',
+ 'CHART_SCALE',
+ 'CHART_SCALE_PT_PER_BAR',
+ 'CHART_SHIFT',
+ 'CHART_SHIFT_SIZE',
+ 'CHART_SHOW_ASK_LINE',
+ 'CHART_SHOW_BID_LINE',
+ 'CHART_SHOW_DATE_SCALE',
+ 'CHART_SHOW_GRID',
+ 'CHART_SHOW_LAST_LINE',
+ 'CHART_SHOW_OBJECT_DESCR',
+ 'CHART_SHOW_OHLC',
+ 'CHART_SHOW_PERIOD_SEP',
+ 'CHART_SHOW_PRICE_SCALE',
+ 'CHART_SHOW_TRADE_LEVELS',
+ 'CHART_SHOW_VOLUMES',
+ 'CHART_VISIBLE_BARS',
+ 'CHART_VOLUME_HIDE',
+ 'CHART_VOLUME_REAL',
+ 'CHART_VOLUME_TICK',
+ 'CHART_WIDTH_IN_BARS',
+ 'CHART_WIDTH_IN_PIXELS',
+ 'CHART_WINDOWS_TOTAL',
+ 'CHART_WINDOW_HANDLE',
+ 'CHART_WINDOW_IS_VISIBLE',
+ 'CHART_WINDOW_YDISTANCE',
+ 'CHAR_MAX',
+ 'CHAR_MIN',
+ 'CLR_NONE',
+ 'CORNER_LEFT_LOWER',
+ 'CORNER_LEFT_UPPER',
+ 'CORNER_RIGHT_LOWER',
+ 'CORNER_RIGHT_UPPER',
+ 'CP_ACP',
+ 'CP_MACCP',
+ 'CP_OEMCP',
+ 'CP_SYMBOL',
+ 'CP_THREAD_ACP',
+ 'CP_UTF7',
+ 'CP_UTF8',
+ 'DBL_DIG',
+ 'DBL_EPSILON',
+ 'DBL_MANT_DIG',
+ 'DBL_MAX',
+ 'DBL_MAX_10_EXP',
+ 'DBL_MAX_EXP',
+ 'DBL_MIN',
+ 'DBL_MIN_10_EXP',
+ 'DBL_MIN_EXP',
+ 'DRAW_ARROW',
+ 'DRAW_FILLING',
+ 'DRAW_HISTOGRAM',
+ 'DRAW_LINE',
+ 'DRAW_NONE',
+ 'DRAW_SECTION',
+ 'DRAW_ZIGZAG',
+ 'EMPTY',
+ 'EMPTY_VALUE',
+ 'ERR_ACCOUNT_DISABLED',
+ 'ERR_BROKER_BUSY',
+ 'ERR_COMMON_ERROR',
+ 'ERR_INVALID_ACCOUNT',
+ 'ERR_INVALID_PRICE',
+ 'ERR_INVALID_STOPS',
+ 'ERR_INVALID_TRADE_PARAMETERS',
+ 'ERR_INVALID_TRADE_VOLUME',
+ 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
+ 'ERR_MALFUNCTIONAL_TRADE',
+ 'ERR_MARKET_CLOSED',
+ 'ERR_NOT_ENOUGH_MONEY',
+ 'ERR_NOT_ENOUGH_RIGHTS',
+ 'ERR_NO_CONNECTION',
+ 'ERR_NO_ERROR',
+ 'ERR_NO_RESULT',
+ 'ERR_OFF_QUOTES',
+ 'ERR_OLD_VERSION',
+ 'ERR_ORDER_LOCKED',
+ 'ERR_PRICE_CHANGED',
+ 'ERR_REQUOTE',
+ 'ERR_SERVER_BUSY',
+ 'ERR_TOO_FREQUENT_REQUESTS',
+ 'ERR_TOO_MANY_REQUESTS',
+ 'ERR_TRADE_CONTEXT_BUSY',
+ 'ERR_TRADE_DISABLED',
+ 'ERR_TRADE_EXPIRATION_DENIED',
+ 'ERR_TRADE_HEDGE_PROHIBITED',
+ 'ERR_TRADE_MODIFY_DENIED',
+ 'ERR_TRADE_PROHIBITED_BY_FIFO',
+ 'ERR_TRADE_TIMEOUT',
+ 'ERR_TRADE_TOO_MANY_ORDERS',
+ 'FILE_ACCESS_DATE',
+ 'FILE_ANSI',
+ 'FILE_BIN',
+ 'FILE_COMMON',
+ 'FILE_CREATE_DATE',
+ 'FILE_CSV',
+ 'FILE_END',
+ 'FILE_EXISTS',
+ 'FILE_IS_ANSI',
+ 'FILE_IS_BINARY',
+ 'FILE_IS_COMMON',
+ 'FILE_IS_CSV',
+ 'FILE_IS_READABLE',
+ 'FILE_IS_TEXT',
+ 'FILE_IS_WRITABLE',
+ 'FILE_LINE_END',
+ 'FILE_MODIFY_DATE',
+ 'FILE_POSITION',
+ 'FILE_READ',
+ 'FILE_REWRITE',
+ 'FILE_SHARE_READ',
+ 'FILE_SHARE_WRITE',
+ 'FILE_SIZE',
+ 'FILE_TXT',
+ 'FILE_UNICODE',
+ 'FILE_WRITE',
+ 'FLT_DIG',
+ 'FLT_EPSILON',
+ 'FLT_MANT_DIG',
+ 'FLT_MAX',
+ 'FLT_MAX_10_EXP',
+ 'FLT_MAX_EXP',
+ 'FLT_MIN',
+ 'FLT_MIN_10_EXP',
+ 'FLT_MIN_EXP',
+ 'FRIDAY',
+ 'GANN_DOWN_TREND',
+ 'GANN_UP_TREND',
+ 'IDABORT',
+ 'IDCANCEL',
+ 'IDCONTINUE',
+ 'IDIGNORE',
+ 'IDNO',
+ 'IDOK',
+ 'IDRETRY',
+ 'IDTRYAGAIN',
+ 'IDYES',
+ 'INDICATOR_CALCULATIONS',
+ 'INDICATOR_COLOR_INDEX',
+ 'INDICATOR_DATA',
+ 'INDICATOR_DIGITS',
+ 'INDICATOR_HEIGHT',
+ 'INDICATOR_LEVELCOLOR',
+ 'INDICATOR_LEVELSTYLE',
+ 'INDICATOR_LEVELS',
+ 'INDICATOR_LEVELTEXT',
+ 'INDICATOR_LEVELVALUE',
+ 'INDICATOR_LEVELWIDTH',
+ 'INDICATOR_MAXIMUM',
+ 'INDICATOR_MINIMUM',
+ 'INDICATOR_SHORTNAME',
+ 'INT_MAX',
+ 'INT_MIN',
+ 'INVALID_HANDLE',
+ 'IS_DEBUG_MODE',
+ 'IS_PROFILE_MODE',
+ 'LICENSE_DEMO',
+ 'LICENSE_FREE',
+ 'LICENSE_FULL',
+ 'LICENSE_TIME',
+ 'LONG_MAX',
+ 'LONG_MIN',
+ 'MB_ABORTRETRYIGNORE',
+ 'MB_CANCELTRYCONTINUE',
+ 'MB_DEFBUTTON1',
+ 'MB_DEFBUTTON2',
+ 'MB_DEFBUTTON3',
+ 'MB_DEFBUTTON4',
+ 'MB_ICONASTERISK',
+ 'MB_ICONERROR',
+ 'MB_ICONEXCLAMATION',
+ 'MB_ICONHAND',
+ 'MB_ICONINFORMATION',
+ 'MB_ICONQUESTION',
+ 'MB_ICONSTOP',
+ 'MB_ICONWARNING',
+ 'MB_OKCANCEL',
+ 'MB_OK',
+ 'MB_RETRYCANCEL',
+ 'MB_YESNOCANCEL',
+ 'MB_YESNO',
+ 'MODE_ASK',
+ 'MODE_BID',
+ 'MODE_CHINKOUSPAN',
+ 'MODE_CLOSE',
+ 'MODE_DIGITS',
+ 'MODE_EMA',
+ 'MODE_EXPIRATION',
+ 'MODE_FREEZELEVEL',
+ 'MODE_GATORJAW',
+ 'MODE_GATORLIPS',
+ 'MODE_GATORTEETH',
+ 'MODE_HIGH',
+ 'MODE_KIJUNSEN',
+ 'MODE_LOTSIZE',
+ 'MODE_LOTSTEP',
+ 'MODE_LOWER',
+ 'MODE_LOW',
+ 'MODE_LWMA',
+ 'MODE_MAIN',
+ 'MODE_MARGINCALCMODE',
+ 'MODE_MARGINHEDGED',
+ 'MODE_MARGININIT',
+ 'MODE_MARGINMAINTENANCE',
+ 'MODE_MARGINREQUIRED',
+ 'MODE_MAXLOT',
+ 'MODE_MINLOT',
+ 'MODE_MINUSDI',
+ 'MODE_OPEN',
+ 'MODE_PLUSDI',
+ 'MODE_POINT',
+ 'MODE_PROFITCALCMODE',
+ 'MODE_SENKOUSPANA',
+ 'MODE_SENKOUSPANB',
+ 'MODE_SIGNAL',
+ 'MODE_SMA',
+ 'MODE_SMMA',
+ 'MODE_SPREAD',
+ 'MODE_STARTING',
+ 'MODE_STOPLEVEL',
+ 'MODE_SWAPLONG',
+ 'MODE_SWAPSHORT',
+ 'MODE_SWAPTYPE',
+ 'MODE_TENKANSEN',
+ 'MODE_TICKSIZE',
+ 'MODE_TICKVALUE',
+ 'MODE_TIME',
+ 'MODE_TRADEALLOWED',
+ 'MODE_UPPER',
+ 'MODE_VOLUME',
+ 'MONDAY',
+ 'MQL_DEBUG',
+ 'MQL_DLLS_ALLOWED',
+ 'MQL_FRAME_MODE',
+ 'MQL_LICENSE_TYPE',
+ 'MQL_OPTIMIZATION',
+ 'MQL_PROFILER',
+ 'MQL_PROGRAM_NAME',
+ 'MQL_PROGRAM_PATH',
+ 'MQL_PROGRAM_TYPE',
+ 'MQL_TESTER',
+ 'MQL_TRADE_ALLOWED',
+ 'MQL_VISUAL_MODE',
+ 'M_1_PI',
+ 'M_2_PI',
+ 'M_2_SQRTPI',
+ 'M_E',
+ 'M_LN2',
+ 'M_LN10',
+ 'M_LOG2E',
+ 'M_LOG10E',
+ 'M_PI',
+ 'M_PI_2',
+ 'M_PI_4',
+ 'M_SQRT1_2',
+ 'M_SQRT2',
+ 'NULL',
+ 'OBJPROP_ALIGN',
+ 'OBJPROP_ANCHOR',
+ 'OBJPROP_ANGLE',
+ 'OBJPROP_ARROWCODE',
+ 'OBJPROP_BACK',
+ 'OBJPROP_BGCOLOR',
+ 'OBJPROP_BMPFILE',
+ 'OBJPROP_BORDER_COLOR',
+ 'OBJPROP_BORDER_TYPE',
+ 'OBJPROP_CHART_ID',
+ 'OBJPROP_CHART_SCALE',
+ 'OBJPROP_COLOR',
+ 'OBJPROP_CORNER',
+ 'OBJPROP_CREATETIME',
+ 'OBJPROP_DATE_SCALE',
+ 'OBJPROP_DEVIATION',
+ 'OBJPROP_DRAWLINES',
+ 'OBJPROP_ELLIPSE',
+ 'OBJPROP_FIBOLEVELS',
+ 'OBJPROP_FILL',
+ 'OBJPROP_FIRSTLEVEL',
+ 'OBJPROP_FONTSIZE',
+ 'OBJPROP_FONT',
+ 'OBJPROP_HIDDEN',
+ 'OBJPROP_LEVELCOLOR',
+ 'OBJPROP_LEVELSTYLE',
+ 'OBJPROP_LEVELS',
+ 'OBJPROP_LEVELTEXT',
+ 'OBJPROP_LEVELVALUE',
+ 'OBJPROP_LEVELWIDTH',
+ 'OBJPROP_NAME',
+ 'OBJPROP_PERIOD',
+ 'OBJPROP_PRICE1',
+ 'OBJPROP_PRICE2',
+ 'OBJPROP_PRICE3',
+ 'OBJPROP_PRICE',
+ 'OBJPROP_PRICE_SCALE',
+ 'OBJPROP_RAY',
+ 'OBJPROP_RAY_RIGHT',
+ 'OBJPROP_READONLY',
+ 'OBJPROP_SCALE',
+ 'OBJPROP_SELECTABLE',
+ 'OBJPROP_SELECTED',
+ 'OBJPROP_STATE',
+ 'OBJPROP_STYLE',
+ 'OBJPROP_SYMBOL',
+ 'OBJPROP_TEXT',
+ 'OBJPROP_TIME1',
+ 'OBJPROP_TIME2',
+ 'OBJPROP_TIME3',
+ 'OBJPROP_TIMEFRAMES',
+ 'OBJPROP_TIME',
+ 'OBJPROP_TOOLTIP',
+ 'OBJPROP_TYPE',
+ 'OBJPROP_WIDTH',
+ 'OBJPROP_XDISTANCE',
+ 'OBJPROP_XOFFSET',
+ 'OBJPROP_XSIZE',
+ 'OBJPROP_YDISTANCE',
+ 'OBJPROP_YOFFSET',
+ 'OBJPROP_YSIZE',
+ 'OBJPROP_ZORDER',
+ 'OBJ_ALL_PERIODS',
+ 'OBJ_ARROW',
+ 'OBJ_ARROW_BUY',
+ 'OBJ_ARROW_CHECK',
+ 'OBJ_ARROW_DOWN',
+ 'OBJ_ARROW_LEFT_PRICE',
+ 'OBJ_ARROW_RIGHT_PRICE',
+ 'OBJ_ARROW_SELL',
+ 'OBJ_ARROW_STOP',
+ 'OBJ_ARROW_THUMB_DOWN',
+ 'OBJ_ARROW_THUMB_UP',
+ 'OBJ_ARROW_UP',
+ 'OBJ_BITMAP',
+ 'OBJ_BITMAP_LABEL',
+ 'OBJ_BUTTON',
+ 'OBJ_CHANNEL',
+ 'OBJ_CYCLES',
+ 'OBJ_EDIT',
+ 'OBJ_ELLIPSE',
+ 'OBJ_EVENT',
+ 'OBJ_EXPANSION',
+ 'OBJ_FIBOARC',
+ 'OBJ_FIBOCHANNEL',
+ 'OBJ_FIBOFAN',
+ 'OBJ_FIBOTIMES',
+ 'OBJ_FIBO',
+ 'OBJ_GANNFAN',
+ 'OBJ_GANNGRID',
+ 'OBJ_GANNLINE',
+ 'OBJ_HLINE',
+ 'OBJ_LABEL',
+ 'OBJ_NO_PERIODS',
+ 'OBJ_PERIOD_D1',
+ 'OBJ_PERIOD_H1',
+ 'OBJ_PERIOD_H4',
+ 'OBJ_PERIOD_M1',
+ 'OBJ_PERIOD_M5',
+ 'OBJ_PERIOD_M15',
+ 'OBJ_PERIOD_M30',
+ 'OBJ_PERIOD_MN1',
+ 'OBJ_PERIOD_W1',
+ 'OBJ_PITCHFORK',
+ 'OBJ_RECTANGLE',
+ 'OBJ_RECTANGLE_LABEL',
+ 'OBJ_REGRESSION',
+ 'OBJ_STDDEVCHANNEL',
+ 'OBJ_TEXT',
+ 'OBJ_TRENDBYANGLE',
+ 'OBJ_TREND',
+ 'OBJ_TRIANGLE',
+ 'OBJ_VLINE',
+ 'OP_BUYLIMIT',
+ 'OP_BUYSTOP',
+ 'OP_BUY',
+ 'OP_SELLLIMIT',
+ 'OP_SELLSTOP',
+ 'OP_SELL',
+ 'PERIOD_CURRENT',
+ 'PERIOD_D1',
+ 'PERIOD_H1',
+ 'PERIOD_H2',
+ 'PERIOD_H3',
+ 'PERIOD_H4',
+ 'PERIOD_H6',
+ 'PERIOD_H8',
+ 'PERIOD_H12',
+ 'PERIOD_M1',
+ 'PERIOD_M2',
+ 'PERIOD_M3',
+ 'PERIOD_M4',
+ 'PERIOD_M5',
+ 'PERIOD_M6',
+ 'PERIOD_M10',
+ 'PERIOD_M12',
+ 'PERIOD_M15',
+ 'PERIOD_M20',
+ 'PERIOD_M30',
+ 'PERIOD_MN1',
+ 'PERIOD_W1',
+ 'POINTER_AUTOMATIC',
+ 'POINTER_DYNAMIC',
'POINTER_INVALID',
- 'PRICE_CLOSE',
- 'PRICE_HIGH',
- 'PRICE_LOW',
- 'PRICE_MEDIAN',
- 'PRICE_OPEN',
- 'PRICE_TYPICAL',
- 'PRICE_WEIGHTED',
- 'PROGRAM_EXPERT',
- 'PROGRAM_INDICATOR',
- 'PROGRAM_SCRIPT',
- 'REASON_ACCOUNT',
- 'REASON_CHARTCHANGE',
- 'REASON_CHARTCLOSE',
- 'REASON_CLOSE',
- 'REASON_INITFAILED',
- 'REASON_PARAMETERS',
- 'REASON_PROGRAM'
- 'REASON_RECOMPILE',
- 'REASON_REMOVE',
- 'REASON_TEMPLATE',
- 'SATURDAY',
- 'SEEK_CUR',
- 'SEEK_END',
- 'SEEK_SET',
- 'SERIES_BARS_COUNT',
- 'SERIES_FIRSTDATE',
- 'SERIES_LASTBAR_DATE',
- 'SERIES_SERVER_FIRSTDATE',
- 'SERIES_SYNCHRONIZED',
- 'SERIES_TERMINAL_FIRSTDATE',
- 'SHORT_MAX',
- 'SHORT_MIN',
- 'STAT_BALANCEDD_PERCENT',
- 'STAT_BALANCEMIN',
- 'STAT_BALANCE_DDREL_PERCENT',
- 'STAT_BALANCE_DD',
- 'STAT_BALANCE_DD_RELATIVE',
- 'STAT_CONLOSSMAX',
- 'STAT_CONLOSSMAX_TRADES',
- 'STAT_CONPROFITMAX',
- 'STAT_CONPROFITMAX_TRADES',
- 'STAT_CUSTOM_ONTESTER',
- 'STAT_DEALS',
- 'STAT_EQUITYDD_PERCENT',
- 'STAT_EQUITYMIN',
- 'STAT_EQUITY_DDREL_PERCENT',
- 'STAT_EQUITY_DD',
- 'STAT_EQUITY_DD_RELATIVE',
- 'STAT_EXPECTED_PAYOFF',
- 'STAT_GROSS_LOSS',
- 'STAT_GROSS_PROFIT',
- 'STAT_INITIAL_DEPOSIT',
- 'STAT_LONG_TRADES',
- 'STAT_LOSSTRADES_AVGCON',
- 'STAT_LOSS_TRADES',
- 'STAT_MAX_CONLOSSES',
- 'STAT_MAX_CONLOSS_TRADES',
- 'STAT_MAX_CONPROFIT_TRADES',
- 'STAT_MAX_CONWINS',
- 'STAT_MAX_LOSSTRADE',
- 'STAT_MAX_PROFITTRADE',
- 'STAT_MIN_MARGINLEVEL',
- 'STAT_PROFITTRADES_AVGCON',
- 'STAT_PROFIT',
- 'STAT_PROFIT_FACTOR',
- 'STAT_PROFIT_LONGTRADES',
- 'STAT_PROFIT_SHORTTRADES',
- 'STAT_PROFIT_TRADES',
- 'STAT_RECOVERY_FACTOR',
- 'STAT_SHARPE_RATIO',
- 'STAT_SHORT_TRADES',
- 'STAT_TRADES',
- 'STAT_WITHDRAWAL',
- 'STO_CLOSECLOSE',
- 'STO_LOWHIGH',
- 'STYLE_DASHDOTDOT',
- 'STYLE_DASHDOT',
- 'STYLE_DASH',
- 'STYLE_DOT',
- 'STYLE_SOLID',
- 'SUNDAY',
- 'SYMBOL_ARROWDOWN',
- 'SYMBOL_ARROWUP',
- 'SYMBOL_CHECKSIGN',
- 'SYMBOL_LEFTPRICE',
- 'SYMBOL_RIGHTPRICE',
- 'SYMBOL_STOPSIGN',
- 'SYMBOL_THUMBSDOWN',
- 'SYMBOL_THUMBSUP',
- 'TERMINAL_BUILD',
- 'TERMINAL_CODEPAGE',
- 'TERMINAL_COMMONDATA_PATH',
- 'TERMINAL_COMPANY',
- 'TERMINAL_CONNECTED',
- 'TERMINAL_CPU_CORES',
- 'TERMINAL_DATA_PATH',
- 'TERMINAL_DISK_SPACE',
- 'TERMINAL_DLLS_ALLOWED',
- 'TERMINAL_EMAIL_ENABLED',
- 'TERMINAL_FTP_ENABLED',
- 'TERMINAL_LANGUAGE',
- 'TERMINAL_MAXBARS',
- 'TERMINAL_MEMORY_AVAILABLE',
- 'TERMINAL_MEMORY_PHYSICAL',
- 'TERMINAL_MEMORY_TOTAL',
- 'TERMINAL_MEMORY_USED',
- 'TERMINAL_NAME',
- 'TERMINAL_OPENCL_SUPPORT',
- 'TERMINAL_PATH',
- 'TERMINAL_TRADE_ALLOWED',
- 'TERMINAL_X64',
- 'THURSDAY',
- 'TRADE_ACTION_DEAL',
- 'TRADE_ACTION_MODIFY',
- 'TRADE_ACTION_PENDING',
- 'TRADE_ACTION_REMOVE',
- 'TRADE_ACTION_SLTP',
- 'TUESDAY',
- 'UCHAR_MAX',
- 'UINT_MAX',
- 'ULONG_MAX',
- 'USHORT_MAX',
- 'VOLUME_REAL',
- 'VOLUME_TICK',
- 'WEDNESDAY',
- 'WHOLE_ARRAY',
- 'WRONG_VALUE',
- 'clrNONE',
- '__DATETIME__',
- '__DATE__',
- '__FILE__',
- '__FUNCSIG__',
- '__FUNCTION__',
- '__LINE__',
- '__MQL4BUILD__',
- '__MQLBUILD__',
- '__PATH__',
-)
-
-colors = (
- 'AliceBlue',
- 'AntiqueWhite',
- 'Aquamarine',
- 'Aqua',
- 'Beige',
- 'Bisque',
- 'Black',
- 'BlanchedAlmond',
- 'BlueViolet',
- 'Blue',
- 'Brown',
- 'BurlyWood',
- 'CadetBlue',
- 'Chartreuse',
- 'Chocolate',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'DarkBlue',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DimGray',
- 'DodgerBlue',
- 'FireBrick',
- 'ForestGreen',
- 'Gainsboro',
- 'Goldenrod',
- 'Gold',
- 'Gray',
- 'GreenYellow',
- 'Green',
- 'Honeydew',
- 'HotPink',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'Khaki',
- 'LavenderBlush',
- 'Lavender',
- 'LawnGreen',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrod',
- 'LightGray',
- 'LightGreen',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'LimeGreen',
- 'Lime',
- 'Linen',
- 'Magenta',
- 'Maroon',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'MidnightBlue',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'NavajoWhite',
- 'Navy',
- 'OldLace',
- 'OliveDrab',
- 'Olive',
- 'OrangeRed',
- 'Orange',
- 'Orchid',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'PeachPuff',
- 'Peru',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'Purple',
- 'Red',
- 'RosyBrown',
- 'RoyalBlue',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sienna',
- 'Silver',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Tan',
- 'Teal',
- 'Thistle',
- 'Tomato',
- 'Turquoise',
- 'Violet',
- 'Wheat',
- 'WhiteSmoke',
- 'White',
- 'YellowGreen',
- 'Yellow',
-)
-
-keywords = (
- 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
- '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
- 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
- 'Volume',
-)
-c_types = (
- 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
- 'color', 'long', 'ulong', 'datetime', 'float', 'double',
- 'string',
-)
+ 'PRICE_CLOSE',
+ 'PRICE_HIGH',
+ 'PRICE_LOW',
+ 'PRICE_MEDIAN',
+ 'PRICE_OPEN',
+ 'PRICE_TYPICAL',
+ 'PRICE_WEIGHTED',
+ 'PROGRAM_EXPERT',
+ 'PROGRAM_INDICATOR',
+ 'PROGRAM_SCRIPT',
+ 'REASON_ACCOUNT',
+ 'REASON_CHARTCHANGE',
+ 'REASON_CHARTCLOSE',
+ 'REASON_CLOSE',
+ 'REASON_INITFAILED',
+ 'REASON_PARAMETERS',
+ 'REASON_PROGRAM'
+ 'REASON_RECOMPILE',
+ 'REASON_REMOVE',
+ 'REASON_TEMPLATE',
+ 'SATURDAY',
+ 'SEEK_CUR',
+ 'SEEK_END',
+ 'SEEK_SET',
+ 'SERIES_BARS_COUNT',
+ 'SERIES_FIRSTDATE',
+ 'SERIES_LASTBAR_DATE',
+ 'SERIES_SERVER_FIRSTDATE',
+ 'SERIES_SYNCHRONIZED',
+ 'SERIES_TERMINAL_FIRSTDATE',
+ 'SHORT_MAX',
+ 'SHORT_MIN',
+ 'STAT_BALANCEDD_PERCENT',
+ 'STAT_BALANCEMIN',
+ 'STAT_BALANCE_DDREL_PERCENT',
+ 'STAT_BALANCE_DD',
+ 'STAT_BALANCE_DD_RELATIVE',
+ 'STAT_CONLOSSMAX',
+ 'STAT_CONLOSSMAX_TRADES',
+ 'STAT_CONPROFITMAX',
+ 'STAT_CONPROFITMAX_TRADES',
+ 'STAT_CUSTOM_ONTESTER',
+ 'STAT_DEALS',
+ 'STAT_EQUITYDD_PERCENT',
+ 'STAT_EQUITYMIN',
+ 'STAT_EQUITY_DDREL_PERCENT',
+ 'STAT_EQUITY_DD',
+ 'STAT_EQUITY_DD_RELATIVE',
+ 'STAT_EXPECTED_PAYOFF',
+ 'STAT_GROSS_LOSS',
+ 'STAT_GROSS_PROFIT',
+ 'STAT_INITIAL_DEPOSIT',
+ 'STAT_LONG_TRADES',
+ 'STAT_LOSSTRADES_AVGCON',
+ 'STAT_LOSS_TRADES',
+ 'STAT_MAX_CONLOSSES',
+ 'STAT_MAX_CONLOSS_TRADES',
+ 'STAT_MAX_CONPROFIT_TRADES',
+ 'STAT_MAX_CONWINS',
+ 'STAT_MAX_LOSSTRADE',
+ 'STAT_MAX_PROFITTRADE',
+ 'STAT_MIN_MARGINLEVEL',
+ 'STAT_PROFITTRADES_AVGCON',
+ 'STAT_PROFIT',
+ 'STAT_PROFIT_FACTOR',
+ 'STAT_PROFIT_LONGTRADES',
+ 'STAT_PROFIT_SHORTTRADES',
+ 'STAT_PROFIT_TRADES',
+ 'STAT_RECOVERY_FACTOR',
+ 'STAT_SHARPE_RATIO',
+ 'STAT_SHORT_TRADES',
+ 'STAT_TRADES',
+ 'STAT_WITHDRAWAL',
+ 'STO_CLOSECLOSE',
+ 'STO_LOWHIGH',
+ 'STYLE_DASHDOTDOT',
+ 'STYLE_DASHDOT',
+ 'STYLE_DASH',
+ 'STYLE_DOT',
+ 'STYLE_SOLID',
+ 'SUNDAY',
+ 'SYMBOL_ARROWDOWN',
+ 'SYMBOL_ARROWUP',
+ 'SYMBOL_CHECKSIGN',
+ 'SYMBOL_LEFTPRICE',
+ 'SYMBOL_RIGHTPRICE',
+ 'SYMBOL_STOPSIGN',
+ 'SYMBOL_THUMBSDOWN',
+ 'SYMBOL_THUMBSUP',
+ 'TERMINAL_BUILD',
+ 'TERMINAL_CODEPAGE',
+ 'TERMINAL_COMMONDATA_PATH',
+ 'TERMINAL_COMPANY',
+ 'TERMINAL_CONNECTED',
+ 'TERMINAL_CPU_CORES',
+ 'TERMINAL_DATA_PATH',
+ 'TERMINAL_DISK_SPACE',
+ 'TERMINAL_DLLS_ALLOWED',
+ 'TERMINAL_EMAIL_ENABLED',
+ 'TERMINAL_FTP_ENABLED',
+ 'TERMINAL_LANGUAGE',
+ 'TERMINAL_MAXBARS',
+ 'TERMINAL_MEMORY_AVAILABLE',
+ 'TERMINAL_MEMORY_PHYSICAL',
+ 'TERMINAL_MEMORY_TOTAL',
+ 'TERMINAL_MEMORY_USED',
+ 'TERMINAL_NAME',
+ 'TERMINAL_OPENCL_SUPPORT',
+ 'TERMINAL_PATH',
+ 'TERMINAL_TRADE_ALLOWED',
+ 'TERMINAL_X64',
+ 'THURSDAY',
+ 'TRADE_ACTION_DEAL',
+ 'TRADE_ACTION_MODIFY',
+ 'TRADE_ACTION_PENDING',
+ 'TRADE_ACTION_REMOVE',
+ 'TRADE_ACTION_SLTP',
+ 'TUESDAY',
+ 'UCHAR_MAX',
+ 'UINT_MAX',
+ 'ULONG_MAX',
+ 'USHORT_MAX',
+ 'VOLUME_REAL',
+ 'VOLUME_TICK',
+ 'WEDNESDAY',
+ 'WHOLE_ARRAY',
+ 'WRONG_VALUE',
+ 'clrNONE',
+ '__DATETIME__',
+ '__DATE__',
+ '__FILE__',
+ '__FUNCSIG__',
+ '__FUNCTION__',
+ '__LINE__',
+ '__MQL4BUILD__',
+ '__MQLBUILD__',
+ '__PATH__',
+)
+
+colors = (
+ 'AliceBlue',
+ 'AntiqueWhite',
+ 'Aquamarine',
+ 'Aqua',
+ 'Beige',
+ 'Bisque',
+ 'Black',
+ 'BlanchedAlmond',
+ 'BlueViolet',
+ 'Blue',
+ 'Brown',
+ 'BurlyWood',
+ 'CadetBlue',
+ 'Chartreuse',
+ 'Chocolate',
+ 'Coral',
+ 'CornflowerBlue',
+ 'Cornsilk',
+ 'Crimson',
+ 'DarkBlue',
+ 'DarkGoldenrod',
+ 'DarkGray',
+ 'DarkGreen',
+ 'DarkKhaki',
+ 'DarkOliveGreen',
+ 'DarkOrange',
+ 'DarkOrchid',
+ 'DarkSalmon',
+ 'DarkSeaGreen',
+ 'DarkSlateBlue',
+ 'DarkSlateGray',
+ 'DarkTurquoise',
+ 'DarkViolet',
+ 'DeepPink',
+ 'DeepSkyBlue',
+ 'DimGray',
+ 'DodgerBlue',
+ 'FireBrick',
+ 'ForestGreen',
+ 'Gainsboro',
+ 'Goldenrod',
+ 'Gold',
+ 'Gray',
+ 'GreenYellow',
+ 'Green',
+ 'Honeydew',
+ 'HotPink',
+ 'IndianRed',
+ 'Indigo',
+ 'Ivory',
+ 'Khaki',
+ 'LavenderBlush',
+ 'Lavender',
+ 'LawnGreen',
+ 'LemonChiffon',
+ 'LightBlue',
+ 'LightCoral',
+ 'LightCyan',
+ 'LightGoldenrod',
+ 'LightGray',
+ 'LightGreen',
+ 'LightPink',
+ 'LightSalmon',
+ 'LightSeaGreen',
+ 'LightSkyBlue',
+ 'LightSlateGray',
+ 'LightSteelBlue',
+ 'LightYellow',
+ 'LimeGreen',
+ 'Lime',
+ 'Linen',
+ 'Magenta',
+ 'Maroon',
+ 'MediumAquamarine',
+ 'MediumBlue',
+ 'MediumOrchid',
+ 'MediumPurple',
+ 'MediumSeaGreen',
+ 'MediumSlateBlue',
+ 'MediumSpringGreen',
+ 'MediumTurquoise',
+ 'MediumVioletRed',
+ 'MidnightBlue',
+ 'MintCream',
+ 'MistyRose',
+ 'Moccasin',
+ 'NavajoWhite',
+ 'Navy',
+ 'OldLace',
+ 'OliveDrab',
+ 'Olive',
+ 'OrangeRed',
+ 'Orange',
+ 'Orchid',
+ 'PaleGoldenrod',
+ 'PaleGreen',
+ 'PaleTurquoise',
+ 'PaleVioletRed',
+ 'PapayaWhip',
+ 'PeachPuff',
+ 'Peru',
+ 'Pink',
+ 'Plum',
+ 'PowderBlue',
+ 'Purple',
+ 'Red',
+ 'RosyBrown',
+ 'RoyalBlue',
+ 'SaddleBrown',
+ 'Salmon',
+ 'SandyBrown',
+ 'SeaGreen',
+ 'Seashell',
+ 'Sienna',
+ 'Silver',
+ 'SkyBlue',
+ 'SlateBlue',
+ 'SlateGray',
+ 'Snow',
+ 'SpringGreen',
+ 'SteelBlue',
+ 'Tan',
+ 'Teal',
+ 'Thistle',
+ 'Tomato',
+ 'Turquoise',
+ 'Violet',
+ 'Wheat',
+ 'WhiteSmoke',
+ 'White',
+ 'YellowGreen',
+ 'Yellow',
+)
+
+keywords = (
+ 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
+ '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
+ 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
+ 'Volume',
+)
+c_types = (
+ 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
+ 'color', 'long', 'ulong', 'datetime', 'float', 'double',
+ 'string',
+)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_openedge_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_openedge_builtins.py
index 09587e0043..9b4b5c1e4e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_openedge_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_openedge_builtins.py
@@ -1,2547 +1,2547 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._openedge_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the OpenEdgeLexer.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._openedge_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the OpenEdgeLexer.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = (
- 'ABSOLUTE',
- 'ABS',
- 'ABSO',
- 'ABSOL',
- 'ABSOLU',
- 'ABSOLUT',
- 'ACCELERATOR',
- 'ACCUMULATE',
- 'ACCUM',
- 'ACCUMU',
- 'ACCUMUL',
- 'ACCUMULA',
- 'ACCUMULAT',
- 'ACTIVE-FORM',
- 'ACTIVE-WINDOW',
- 'ADD',
- 'ADD-BUFFER',
- 'ADD-CALC-COLUMN',
- 'ADD-COLUMNS-FROM',
- 'ADD-EVENTS-PROCEDURE',
- 'ADD-FIELDS-FROM',
- 'ADD-FIRST',
- 'ADD-INDEX-FIELD',
- 'ADD-LAST',
- 'ADD-LIKE-COLUMN',
- 'ADD-LIKE-FIELD',
- 'ADD-LIKE-INDEX',
- 'ADD-NEW-FIELD',
- 'ADD-NEW-INDEX',
- 'ADD-SCHEMA-LOCATION',
- 'ADD-SUPER-PROCEDURE',
- 'ADM-DATA',
- 'ADVISE',
- 'ALERT-BOX',
- 'ALIAS',
- 'ALL',
- 'ALLOW-COLUMN-SEARCHING',
- 'ALLOW-REPLICATION',
- 'ALTER',
- 'ALWAYS-ON-TOP',
- 'AMBIGUOUS',
- 'AMBIG',
- 'AMBIGU',
- 'AMBIGUO',
- 'AMBIGUOU',
- 'ANALYZE',
- 'ANALYZ',
- 'AND',
- 'ANSI-ONLY',
- 'ANY',
- 'ANYWHERE',
- 'APPEND',
- 'APPL-ALERT-BOXES',
- 'APPL-ALERT',
- 'APPL-ALERT-',
- 'APPL-ALERT-B',
- 'APPL-ALERT-BO',
- 'APPL-ALERT-BOX',
- 'APPL-ALERT-BOXE',
- 'APPL-CONTEXT-ID',
- 'APPLICATION',
- 'APPLY',
- 'APPSERVER-INFO',
- 'APPSERVER-PASSWORD',
- 'APPSERVER-USERID',
- 'ARRAY-MESSAGE',
- 'AS',
- 'ASC',
- 'ASCENDING',
- 'ASCE',
- 'ASCEN',
- 'ASCEND',
- 'ASCENDI',
- 'ASCENDIN',
- 'ASK-OVERWRITE',
- 'ASSEMBLY',
- 'ASSIGN',
- 'ASYNCHRONOUS',
- 'ASYNC-REQUEST-COUNT',
- 'ASYNC-REQUEST-HANDLE',
- 'AT',
- 'ATTACHED-PAIRLIST',
- 'ATTR-SPACE',
- 'ATTR',
- 'ATTRI',
- 'ATTRIB',
- 'ATTRIBU',
- 'ATTRIBUT',
- 'AUDIT-CONTROL',
- 'AUDIT-ENABLED',
- 'AUDIT-EVENT-CONTEXT',
- 'AUDIT-POLICY',
- 'AUTHENTICATION-FAILED',
- 'AUTHORIZATION',
- 'AUTO-COMPLETION',
- 'AUTO-COMP',
- 'AUTO-COMPL',
- 'AUTO-COMPLE',
- 'AUTO-COMPLET',
- 'AUTO-COMPLETI',
- 'AUTO-COMPLETIO',
- 'AUTO-ENDKEY',
- 'AUTO-END-KEY',
- 'AUTO-GO',
- 'AUTO-INDENT',
- 'AUTO-IND',
- 'AUTO-INDE',
- 'AUTO-INDEN',
- 'AUTOMATIC',
- 'AUTO-RESIZE',
- 'AUTO-RETURN',
- 'AUTO-RET',
- 'AUTO-RETU',
- 'AUTO-RETUR',
- 'AUTO-SYNCHRONIZE',
- 'AUTO-ZAP',
- 'AUTO-Z',
- 'AUTO-ZA',
- 'AVAILABLE',
- 'AVAIL',
- 'AVAILA',
- 'AVAILAB',
- 'AVAILABL',
- 'AVAILABLE-FORMATS',
- 'AVERAGE',
- 'AVE',
- 'AVER',
- 'AVERA',
- 'AVERAG',
- 'AVG',
- 'BACKGROUND',
- 'BACK',
- 'BACKG',
- 'BACKGR',
- 'BACKGRO',
- 'BACKGROU',
- 'BACKGROUN',
- 'BACKWARDS',
- 'BACKWARD',
- 'BASE64-DECODE',
- 'BASE64-ENCODE',
- 'BASE-ADE',
- 'BASE-KEY',
- 'BATCH-MODE',
- 'BATCH',
- 'BATCH-',
- 'BATCH-M',
- 'BATCH-MO',
- 'BATCH-MOD',
- 'BATCH-SIZE',
- 'BEFORE-HIDE',
- 'BEFORE-H',
- 'BEFORE-HI',
- 'BEFORE-HID',
- 'BEGIN-EVENT-GROUP',
- 'BEGINS',
- 'BELL',
- 'BETWEEN',
- 'BGCOLOR',
- 'BGC',
- 'BGCO',
- 'BGCOL',
- 'BGCOLO',
- 'BIG-ENDIAN',
- 'BINARY',
- 'BIND',
- 'BIND-WHERE',
- 'BLANK',
- 'BLOCK-ITERATION-DISPLAY',
- 'BORDER-BOTTOM-CHARS',
- 'BORDER-B',
- 'BORDER-BO',
- 'BORDER-BOT',
- 'BORDER-BOTT',
- 'BORDER-BOTTO',
- 'BORDER-BOTTOM-PIXELS',
- 'BORDER-BOTTOM-P',
- 'BORDER-BOTTOM-PI',
- 'BORDER-BOTTOM-PIX',
- 'BORDER-BOTTOM-PIXE',
- 'BORDER-BOTTOM-PIXEL',
- 'BORDER-LEFT-CHARS',
- 'BORDER-L',
- 'BORDER-LE',
- 'BORDER-LEF',
- 'BORDER-LEFT',
- 'BORDER-LEFT-',
- 'BORDER-LEFT-C',
- 'BORDER-LEFT-CH',
- 'BORDER-LEFT-CHA',
- 'BORDER-LEFT-CHAR',
- 'BORDER-LEFT-PIXELS',
- 'BORDER-LEFT-P',
- 'BORDER-LEFT-PI',
- 'BORDER-LEFT-PIX',
- 'BORDER-LEFT-PIXE',
- 'BORDER-LEFT-PIXEL',
- 'BORDER-RIGHT-CHARS',
- 'BORDER-R',
- 'BORDER-RI',
- 'BORDER-RIG',
- 'BORDER-RIGH',
- 'BORDER-RIGHT',
- 'BORDER-RIGHT-',
- 'BORDER-RIGHT-C',
- 'BORDER-RIGHT-CH',
- 'BORDER-RIGHT-CHA',
- 'BORDER-RIGHT-CHAR',
- 'BORDER-RIGHT-PIXELS',
- 'BORDER-RIGHT-P',
- 'BORDER-RIGHT-PI',
- 'BORDER-RIGHT-PIX',
- 'BORDER-RIGHT-PIXE',
- 'BORDER-RIGHT-PIXEL',
- 'BORDER-TOP-CHARS',
- 'BORDER-T',
- 'BORDER-TO',
- 'BORDER-TOP',
- 'BORDER-TOP-',
- 'BORDER-TOP-C',
- 'BORDER-TOP-CH',
- 'BORDER-TOP-CHA',
- 'BORDER-TOP-CHAR',
- 'BORDER-TOP-PIXELS',
- 'BORDER-TOP-P',
- 'BORDER-TOP-PI',
- 'BORDER-TOP-PIX',
- 'BORDER-TOP-PIXE',
- 'BORDER-TOP-PIXEL',
- 'BOX',
- 'BOX-SELECTABLE',
- 'BOX-SELECT',
- 'BOX-SELECTA',
- 'BOX-SELECTAB',
- 'BOX-SELECTABL',
- 'BREAK',
- 'BROWSE',
- 'BUFFER',
- 'BUFFER-CHARS',
- 'BUFFER-COMPARE',
- 'BUFFER-COPY',
- 'BUFFER-CREATE',
- 'BUFFER-DELETE',
- 'BUFFER-FIELD',
- 'BUFFER-HANDLE',
- 'BUFFER-LINES',
- 'BUFFER-NAME',
- 'BUFFER-RELEASE',
- 'BUFFER-VALUE',
- 'BUTTON',
- 'BUTTONS',
- 'BY',
- 'BY-POINTER',
- 'BY-VARIANT-POINTER',
- 'CACHE',
- 'CACHE-SIZE',
- 'CALL',
- 'CALL-NAME',
- 'CALL-TYPE',
- 'CANCEL-BREAK',
- 'CANCEL-BUTTON',
- 'CAN-CREATE',
- 'CAN-DELETE',
- 'CAN-DO',
- 'CAN-FIND',
- 'CAN-QUERY',
- 'CAN-READ',
- 'CAN-SET',
- 'CAN-WRITE',
- 'CAPS',
- 'CAREFUL-PAINT',
- 'CASE',
- 'CASE-SENSITIVE',
- 'CASE-SEN',
- 'CASE-SENS',
- 'CASE-SENSI',
- 'CASE-SENSIT',
- 'CASE-SENSITI',
- 'CASE-SENSITIV',
- 'CAST',
- 'CATCH',
- 'CDECL',
- 'CENTERED',
- 'CENTER',
- 'CENTERE',
- 'CHAINED',
- 'CHARACTER_LENGTH',
- 'CHARSET',
- 'CHECK',
- 'CHECKED',
- 'CHOOSE',
- 'CHR',
- 'CLASS',
- 'CLASS-TYPE',
- 'CLEAR',
- 'CLEAR-APPL-CONTEXT',
- 'CLEAR-LOG',
- 'CLEAR-SELECTION',
- 'CLEAR-SELECT',
- 'CLEAR-SELECTI',
- 'CLEAR-SELECTIO',
- 'CLEAR-SORT-ARROWS',
- 'CLEAR-SORT-ARROW',
- 'CLIENT-CONNECTION-ID',
- 'CLIENT-PRINCIPAL',
- 'CLIENT-TTY',
- 'CLIENT-TYPE',
- 'CLIENT-WORKSTATION',
- 'CLIPBOARD',
- 'CLOSE',
- 'CLOSE-LOG',
- 'CODE',
- 'CODEBASE-LOCATOR',
- 'CODEPAGE',
- 'CODEPAGE-CONVERT',
- 'COLLATE',
- 'COL-OF',
- 'COLON',
- 'COLON-ALIGNED',
- 'COLON-ALIGN',
- 'COLON-ALIGNE',
- 'COLOR',
- 'COLOR-TABLE',
- 'COLUMN',
- 'COL',
- 'COLU',
- 'COLUM',
- 'COLUMN-BGCOLOR',
- 'COLUMN-DCOLOR',
- 'COLUMN-FGCOLOR',
- 'COLUMN-FONT',
- 'COLUMN-LABEL',
- 'COLUMN-LAB',
- 'COLUMN-LABE',
- 'COLUMN-MOVABLE',
- 'COLUMN-OF',
- 'COLUMN-PFCOLOR',
- 'COLUMN-READ-ONLY',
- 'COLUMN-RESIZABLE',
- 'COLUMNS',
- 'COLUMN-SCROLLING',
- 'COMBO-BOX',
- 'COMMAND',
- 'COMPARES',
- 'COMPILE',
- 'COMPILER',
- 'COMPLETE',
- 'COM-SELF',
- 'CONFIG-NAME',
- 'CONNECT',
- 'CONNECTED',
- 'CONSTRUCTOR',
- 'CONTAINS',
- 'CONTENTS',
- 'CONTEXT',
- 'CONTEXT-HELP',
- 'CONTEXT-HELP-FILE',
- 'CONTEXT-HELP-ID',
- 'CONTEXT-POPUP',
- 'CONTROL',
- 'CONTROL-BOX',
- 'CONTROL-FRAME',
- 'CONVERT',
- 'CONVERT-3D-COLORS',
- 'CONVERT-TO-OFFSET',
- 'CONVERT-TO-OFFS',
- 'CONVERT-TO-OFFSE',
- 'COPY-DATASET',
- 'COPY-LOB',
- 'COPY-SAX-ATTRIBUTES',
- 'COPY-TEMP-TABLE',
- 'COUNT',
- 'COUNT-OF',
- 'CPCASE',
- 'CPCOLL',
- 'CPINTERNAL',
- 'CPLOG',
- 'CPPRINT',
- 'CPRCODEIN',
- 'CPRCODEOUT',
- 'CPSTREAM',
- 'CPTERM',
- 'CRC-VALUE',
- 'CREATE',
- 'CREATE-LIKE',
- 'CREATE-LIKE-SEQUENTIAL',
- 'CREATE-NODE-NAMESPACE',
- 'CREATE-RESULT-LIST-ENTRY',
- 'CREATE-TEST-FILE',
- 'CURRENT',
- 'CURRENT_DATE',
- 'CURRENT-CHANGED',
- 'CURRENT-COLUMN',
- 'CURRENT-ENVIRONMENT',
- 'CURRENT-ENV',
- 'CURRENT-ENVI',
- 'CURRENT-ENVIR',
- 'CURRENT-ENVIRO',
- 'CURRENT-ENVIRON',
- 'CURRENT-ENVIRONM',
- 'CURRENT-ENVIRONME',
- 'CURRENT-ENVIRONMEN',
- 'CURRENT-ITERATION',
- 'CURRENT-LANGUAGE',
- 'CURRENT-LANG',
- 'CURRENT-LANGU',
- 'CURRENT-LANGUA',
- 'CURRENT-LANGUAG',
- 'CURRENT-QUERY',
- 'CURRENT-RESULT-ROW',
- 'CURRENT-ROW-MODIFIED',
- 'CURRENT-VALUE',
- 'CURRENT-WINDOW',
- 'CURSOR',
- 'CURS',
- 'CURSO',
- 'CURSOR-CHAR',
- 'CURSOR-LINE',
- 'CURSOR-OFFSET',
- 'DATABASE',
- 'DATA-BIND',
- 'DATA-ENTRY-RETURN',
- 'DATA-ENTRY-RET',
- 'DATA-ENTRY-RETU',
- 'DATA-ENTRY-RETUR',
- 'DATA-RELATION',
- 'DATA-REL',
- 'DATA-RELA',
- 'DATA-RELAT',
- 'DATA-RELATI',
- 'DATA-RELATIO',
- 'DATASERVERS',
- 'DATASET',
- 'DATASET-HANDLE',
- 'DATA-SOURCE',
- 'DATA-SOURCE-COMPLETE-MAP',
- 'DATA-SOURCE-MODIFIED',
- 'DATA-SOURCE-ROWID',
- 'DATA-TYPE',
- 'DATA-T',
- 'DATA-TY',
- 'DATA-TYP',
- 'DATE-FORMAT',
- 'DATE-F',
- 'DATE-FO',
- 'DATE-FOR',
- 'DATE-FORM',
- 'DATE-FORMA',
- 'DAY',
- 'DBCODEPAGE',
- 'DBCOLLATION',
- 'DBNAME',
- 'DBPARAM',
- 'DB-REFERENCES',
- 'DBRESTRICTIONS',
- 'DBREST',
- 'DBRESTR',
- 'DBRESTRI',
- 'DBRESTRIC',
- 'DBRESTRICT',
- 'DBRESTRICTI',
- 'DBRESTRICTIO',
- 'DBRESTRICTION',
- 'DBTASKID',
- 'DBTYPE',
- 'DBVERSION',
- 'DBVERS',
- 'DBVERSI',
- 'DBVERSIO',
- 'DCOLOR',
- 'DDE',
- 'DDE-ERROR',
- 'DDE-ID',
- 'DDE-I',
- 'DDE-ITEM',
- 'DDE-NAME',
- 'DDE-TOPIC',
- 'DEBLANK',
- 'DEBUG',
- 'DEBU',
- 'DEBUG-ALERT',
- 'DEBUGGER',
- 'DEBUG-LIST',
- 'DECIMALS',
- 'DECLARE',
- 'DECLARE-NAMESPACE',
- 'DECRYPT',
- 'DEFAULT',
- 'DEFAULT-BUFFER-HANDLE',
- 'DEFAULT-BUTTON',
- 'DEFAUT-B',
- 'DEFAUT-BU',
- 'DEFAUT-BUT',
- 'DEFAUT-BUTT',
- 'DEFAUT-BUTTO',
- 'DEFAULT-COMMIT',
- 'DEFAULT-EXTENSION',
- 'DEFAULT-EX',
- 'DEFAULT-EXT',
- 'DEFAULT-EXTE',
- 'DEFAULT-EXTEN',
- 'DEFAULT-EXTENS',
- 'DEFAULT-EXTENSI',
- 'DEFAULT-EXTENSIO',
- 'DEFAULT-NOXLATE',
- 'DEFAULT-NOXL',
- 'DEFAULT-NOXLA',
- 'DEFAULT-NOXLAT',
- 'DEFAULT-VALUE',
- 'DEFAULT-WINDOW',
- 'DEFINED',
- 'DEFINE-USER-EVENT-MANAGER',
- 'DELETE',
- 'DEL',
- 'DELE',
- 'DELET',
- 'DELETE-CHARACTER',
- 'DELETE-CHAR',
- 'DELETE-CHARA',
- 'DELETE-CHARAC',
- 'DELETE-CHARACT',
- 'DELETE-CHARACTE',
- 'DELETE-CURRENT-ROW',
- 'DELETE-LINE',
- 'DELETE-RESULT-LIST-ENTRY',
- 'DELETE-SELECTED-ROW',
- 'DELETE-SELECTED-ROWS',
- 'DELIMITER',
- 'DESC',
- 'DESCENDING',
- 'DESCE',
- 'DESCEN',
- 'DESCEND',
- 'DESCENDI',
- 'DESCENDIN',
- 'DESELECT-FOCUSED-ROW',
- 'DESELECTION',
- 'DESELECT-ROWS',
- 'DESELECT-SELECTED-ROW',
- 'DESTRUCTOR',
- 'DIALOG-BOX',
- 'DICTIONARY',
- 'DICT',
- 'DICTI',
- 'DICTIO',
- 'DICTION',
- 'DICTIONA',
- 'DICTIONAR',
- 'DIR',
- 'DISABLE',
- 'DISABLE-AUTO-ZAP',
- 'DISABLED',
- 'DISABLE-DUMP-TRIGGERS',
- 'DISABLE-LOAD-TRIGGERS',
- 'DISCONNECT',
- 'DISCON',
- 'DISCONN',
- 'DISCONNE',
- 'DISCONNEC',
- 'DISP',
- 'DISPLAY',
- 'DISPL',
- 'DISPLA',
- 'DISPLAY-MESSAGE',
- 'DISPLAY-TYPE',
- 'DISPLAY-T',
- 'DISPLAY-TY',
- 'DISPLAY-TYP',
- 'DISTINCT',
- 'DO',
- 'DOMAIN-DESCRIPTION',
- 'DOMAIN-NAME',
- 'DOMAIN-TYPE',
- 'DOS',
- 'DOUBLE',
- 'DOWN',
- 'DRAG-ENABLED',
- 'DROP',
- 'DROP-DOWN',
- 'DROP-DOWN-LIST',
- 'DROP-FILE-NOTIFY',
- 'DROP-TARGET',
- 'DUMP',
- 'DYNAMIC',
- 'DYNAMIC-FUNCTION',
- 'EACH',
- 'ECHO',
- 'EDGE-CHARS',
- 'EDGE',
- 'EDGE-',
- 'EDGE-C',
- 'EDGE-CH',
- 'EDGE-CHA',
- 'EDGE-CHAR',
- 'EDGE-PIXELS',
- 'EDGE-P',
- 'EDGE-PI',
- 'EDGE-PIX',
- 'EDGE-PIXE',
- 'EDGE-PIXEL',
- 'EDIT-CAN-PASTE',
- 'EDIT-CAN-UNDO',
- 'EDIT-CLEAR',
- 'EDIT-COPY',
- 'EDIT-CUT',
- 'EDITING',
- 'EDITOR',
- 'EDIT-PASTE',
- 'EDIT-UNDO',
- 'ELSE',
- 'EMPTY',
- 'EMPTY-TEMP-TABLE',
- 'ENABLE',
- 'ENABLED-FIELDS',
- 'ENCODE',
- 'ENCRYPT',
- 'ENCRYPT-AUDIT-MAC-KEY',
- 'ENCRYPTION-SALT',
- 'END',
- 'END-DOCUMENT',
- 'END-ELEMENT',
- 'END-EVENT-GROUP',
- 'END-FILE-DROP',
- 'ENDKEY',
- 'END-KEY',
- 'END-MOVE',
- 'END-RESIZE',
- 'END-ROW-RESIZE',
- 'END-USER-PROMPT',
- 'ENTERED',
- 'ENTRY',
- 'EQ',
- 'ERROR',
- 'ERROR-COLUMN',
- 'ERROR-COL',
- 'ERROR-COLU',
- 'ERROR-COLUM',
- 'ERROR-ROW',
- 'ERROR-STACK-TRACE',
- 'ERROR-STATUS',
- 'ERROR-STAT',
- 'ERROR-STATU',
- 'ESCAPE',
- 'ETIME',
- 'EVENT-GROUP-ID',
- 'EVENT-PROCEDURE',
- 'EVENT-PROCEDURE-CONTEXT',
- 'EVENTS',
- 'EVENT',
- 'EVENT-TYPE',
- 'EVENT-T',
- 'EVENT-TY',
- 'EVENT-TYP',
- 'EXCEPT',
- 'EXCLUSIVE-ID',
- 'EXCLUSIVE-LOCK',
- 'EXCLUSIVE',
- 'EXCLUSIVE-',
- 'EXCLUSIVE-L',
- 'EXCLUSIVE-LO',
- 'EXCLUSIVE-LOC',
- 'EXCLUSIVE-WEB-USER',
- 'EXECUTE',
- 'EXISTS',
- 'EXP',
- 'EXPAND',
- 'EXPANDABLE',
- 'EXPLICIT',
- 'EXPORT',
- 'EXPORT-PRINCIPAL',
- 'EXTENDED',
- 'EXTENT',
- 'EXTERNAL',
- 'FALSE',
- 'FETCH',
- 'FETCH-SELECTED-ROW',
- 'FGCOLOR',
- 'FGC',
- 'FGCO',
- 'FGCOL',
- 'FGCOLO',
- 'FIELD',
- 'FIELDS',
- 'FILE',
- 'FILE-CREATE-DATE',
- 'FILE-CREATE-TIME',
- 'FILE-INFORMATION',
- 'FILE-INFO',
- 'FILE-INFOR',
- 'FILE-INFORM',
- 'FILE-INFORMA',
- 'FILE-INFORMAT',
- 'FILE-INFORMATI',
- 'FILE-INFORMATIO',
- 'FILE-MOD-DATE',
- 'FILE-MOD-TIME',
- 'FILENAME',
- 'FILE-NAME',
- 'FILE-OFFSET',
- 'FILE-OFF',
- 'FILE-OFFS',
- 'FILE-OFFSE',
- 'FILE-SIZE',
- 'FILE-TYPE',
- 'FILL',
- 'FILLED',
- 'FILL-IN',
- 'FILTERS',
- 'FINAL',
- 'FINALLY',
- 'FIND',
- 'FIND-BY-ROWID',
- 'FIND-CASE-SENSITIVE',
- 'FIND-CURRENT',
- 'FINDER',
- 'FIND-FIRST',
- 'FIND-GLOBAL',
- 'FIND-LAST',
- 'FIND-NEXT-OCCURRENCE',
- 'FIND-PREV-OCCURRENCE',
- 'FIND-SELECT',
- 'FIND-UNIQUE',
- 'FIND-WRAP-AROUND',
- 'FIRST',
- 'FIRST-ASYNCH-REQUEST',
- 'FIRST-CHILD',
- 'FIRST-COLUMN',
- 'FIRST-FORM',
- 'FIRST-OBJECT',
- 'FIRST-OF',
- 'FIRST-PROCEDURE',
- 'FIRST-PROC',
- 'FIRST-PROCE',
- 'FIRST-PROCED',
- 'FIRST-PROCEDU',
- 'FIRST-PROCEDUR',
- 'FIRST-SERVER',
- 'FIRST-TAB-ITEM',
- 'FIRST-TAB-I',
- 'FIRST-TAB-IT',
- 'FIRST-TAB-ITE',
- 'FIT-LAST-COLUMN',
- 'FIXED-ONLY',
- 'FLAT-BUTTON',
- 'FLOAT',
- 'FOCUS',
- 'FOCUSED-ROW',
- 'FOCUSED-ROW-SELECTED',
- 'FONT',
- 'FONT-TABLE',
- 'FOR',
- 'FORCE-FILE',
- 'FOREGROUND',
- 'FORE',
- 'FOREG',
- 'FOREGR',
- 'FOREGRO',
- 'FOREGROU',
- 'FOREGROUN',
- 'FORM',
- 'FORMAT',
- 'FORMA',
- 'FORMATTED',
- 'FORMATTE',
- 'FORM-LONG-INPUT',
- 'FORWARD',
- 'FORWARDS',
- 'FRAGMENT',
- 'FRAGMEN',
- 'FRAME',
- 'FRAM',
- 'FRAME-COL',
- 'FRAME-DB',
- 'FRAME-DOWN',
- 'FRAME-FIELD',
- 'FRAME-FILE',
- 'FRAME-INDEX',
- 'FRAME-INDE',
- 'FRAME-LINE',
- 'FRAME-NAME',
- 'FRAME-ROW',
- 'FRAME-SPACING',
- 'FRAME-SPA',
- 'FRAME-SPAC',
- 'FRAME-SPACI',
- 'FRAME-SPACIN',
- 'FRAME-VALUE',
- 'FRAME-VAL',
- 'FRAME-VALU',
- 'FRAME-X',
- 'FRAME-Y',
- 'FREQUENCY',
- 'FROM',
- 'FROM-CHARS',
- 'FROM-C',
- 'FROM-CH',
- 'FROM-CHA',
- 'FROM-CHAR',
- 'FROM-CURRENT',
- 'FROM-CUR',
- 'FROM-CURR',
- 'FROM-CURRE',
- 'FROM-CURREN',
- 'FROM-PIXELS',
- 'FROM-P',
- 'FROM-PI',
- 'FROM-PIX',
- 'FROM-PIXE',
- 'FROM-PIXEL',
- 'FULL-HEIGHT-CHARS',
- 'FULL-HEIGHT',
- 'FULL-HEIGHT-',
- 'FULL-HEIGHT-C',
- 'FULL-HEIGHT-CH',
- 'FULL-HEIGHT-CHA',
- 'FULL-HEIGHT-CHAR',
- 'FULL-HEIGHT-PIXELS',
- 'FULL-HEIGHT-P',
- 'FULL-HEIGHT-PI',
- 'FULL-HEIGHT-PIX',
- 'FULL-HEIGHT-PIXE',
- 'FULL-HEIGHT-PIXEL',
- 'FULL-PATHNAME',
- 'FULL-PATHN',
- 'FULL-PATHNA',
- 'FULL-PATHNAM',
- 'FULL-WIDTH-CHARS',
- 'FULL-WIDTH',
- 'FULL-WIDTH-',
- 'FULL-WIDTH-C',
- 'FULL-WIDTH-CH',
- 'FULL-WIDTH-CHA',
- 'FULL-WIDTH-CHAR',
- 'FULL-WIDTH-PIXELS',
- 'FULL-WIDTH-P',
- 'FULL-WIDTH-PI',
- 'FULL-WIDTH-PIX',
- 'FULL-WIDTH-PIXE',
- 'FULL-WIDTH-PIXEL',
- 'FUNCTION',
- 'FUNCTION-CALL-TYPE',
- 'GATEWAYS',
- 'GATEWAY',
- 'GE',
- 'GENERATE-MD5',
- 'GENERATE-PBE-KEY',
- 'GENERATE-PBE-SALT',
- 'GENERATE-RANDOM-KEY',
- 'GENERATE-UUID',
- 'GET',
- 'GET-ATTR-CALL-TYPE',
- 'GET-ATTRIBUTE-NODE',
- 'GET-BINARY-DATA',
- 'GET-BLUE-VALUE',
- 'GET-BLUE',
- 'GET-BLUE-',
- 'GET-BLUE-V',
- 'GET-BLUE-VA',
- 'GET-BLUE-VAL',
- 'GET-BLUE-VALU',
- 'GET-BROWSE-COLUMN',
- 'GET-BUFFER-HANDLEGETBYTE',
- 'GET-BYTE',
- 'GET-CALLBACK-PROC-CONTEXT',
- 'GET-CALLBACK-PROC-NAME',
- 'GET-CGI-LIST',
- 'GET-CGI-LONG-VALUE',
- 'GET-CGI-VALUE',
- 'GET-CODEPAGES',
- 'GET-COLLATIONS',
- 'GET-CONFIG-VALUE',
- 'GET-CURRENT',
- 'GET-DOUBLE',
- 'GET-DROPPED-FILE',
- 'GET-DYNAMIC',
- 'GET-ERROR-COLUMN',
- 'GET-ERROR-ROW',
- 'GET-FILE',
- 'GET-FILE-NAME',
- 'GET-FILE-OFFSET',
- 'GET-FILE-OFFSE',
- 'GET-FIRST',
- 'GET-FLOAT',
- 'GET-GREEN-VALUE',
- 'GET-GREEN',
- 'GET-GREEN-',
- 'GET-GREEN-V',
- 'GET-GREEN-VA',
- 'GET-GREEN-VAL',
- 'GET-GREEN-VALU',
- 'GET-INDEX-BY-NAMESPACE-NAME',
- 'GET-INDEX-BY-QNAME',
- 'GET-INT64',
- 'GET-ITERATION',
- 'GET-KEY-VALUE',
- 'GET-KEY-VAL',
- 'GET-KEY-VALU',
- 'GET-LAST',
- 'GET-LOCALNAME-BY-INDEX',
- 'GET-LONG',
- 'GET-MESSAGE',
- 'GET-NEXT',
- 'GET-NUMBER',
- 'GET-POINTER-VALUE',
- 'GET-PREV',
- 'GET-PRINTERS',
- 'GET-PROPERTY',
- 'GET-QNAME-BY-INDEX',
- 'GET-RED-VALUE',
- 'GET-RED',
- 'GET-RED-',
- 'GET-RED-V',
- 'GET-RED-VA',
- 'GET-RED-VAL',
- 'GET-RED-VALU',
- 'GET-REPOSITIONED-ROW',
- 'GET-RGB-VALUE',
- 'GET-SELECTED-WIDGET',
- 'GET-SELECTED',
- 'GET-SELECTED-',
- 'GET-SELECTED-W',
- 'GET-SELECTED-WI',
- 'GET-SELECTED-WID',
- 'GET-SELECTED-WIDG',
- 'GET-SELECTED-WIDGE',
- 'GET-SHORT',
- 'GET-SIGNATURE',
- 'GET-SIZE',
- 'GET-STRING',
- 'GET-TAB-ITEM',
- 'GET-TEXT-HEIGHT-CHARS',
- 'GET-TEXT-HEIGHT',
- 'GET-TEXT-HEIGHT-',
- 'GET-TEXT-HEIGHT-C',
- 'GET-TEXT-HEIGHT-CH',
- 'GET-TEXT-HEIGHT-CHA',
- 'GET-TEXT-HEIGHT-CHAR',
- 'GET-TEXT-HEIGHT-PIXELS',
- 'GET-TEXT-HEIGHT-P',
- 'GET-TEXT-HEIGHT-PI',
- 'GET-TEXT-HEIGHT-PIX',
- 'GET-TEXT-HEIGHT-PIXE',
- 'GET-TEXT-HEIGHT-PIXEL',
- 'GET-TEXT-WIDTH-CHARS',
- 'GET-TEXT-WIDTH',
- 'GET-TEXT-WIDTH-',
- 'GET-TEXT-WIDTH-C',
- 'GET-TEXT-WIDTH-CH',
- 'GET-TEXT-WIDTH-CHA',
- 'GET-TEXT-WIDTH-CHAR',
- 'GET-TEXT-WIDTH-PIXELS',
- 'GET-TEXT-WIDTH-P',
- 'GET-TEXT-WIDTH-PI',
- 'GET-TEXT-WIDTH-PIX',
- 'GET-TEXT-WIDTH-PIXE',
- 'GET-TEXT-WIDTH-PIXEL',
- 'GET-TYPE-BY-INDEX',
- 'GET-TYPE-BY-NAMESPACE-NAME',
- 'GET-TYPE-BY-QNAME',
- 'GET-UNSIGNED-LONG',
- 'GET-UNSIGNED-SHORT',
- 'GET-URI-BY-INDEX',
- 'GET-VALUE-BY-INDEX',
- 'GET-VALUE-BY-NAMESPACE-NAME',
- 'GET-VALUE-BY-QNAME',
- 'GET-WAIT-STATE',
- 'GLOBAL',
- 'GO-ON',
- 'GO-PENDING',
- 'GO-PEND',
- 'GO-PENDI',
- 'GO-PENDIN',
- 'GRANT',
- 'GRAPHIC-EDGE',
- 'GRAPHIC-E',
- 'GRAPHIC-ED',
- 'GRAPHIC-EDG',
- 'GRID-FACTOR-HORIZONTAL',
- 'GRID-FACTOR-H',
- 'GRID-FACTOR-HO',
- 'GRID-FACTOR-HOR',
- 'GRID-FACTOR-HORI',
- 'GRID-FACTOR-HORIZ',
- 'GRID-FACTOR-HORIZO',
- 'GRID-FACTOR-HORIZON',
- 'GRID-FACTOR-HORIZONT',
- 'GRID-FACTOR-HORIZONTA',
- 'GRID-FACTOR-VERTICAL',
- 'GRID-FACTOR-V',
- 'GRID-FACTOR-VE',
- 'GRID-FACTOR-VER',
- 'GRID-FACTOR-VERT',
- 'GRID-FACTOR-VERTI',
- 'GRID-FACTOR-VERTIC',
- 'GRID-FACTOR-VERTICA',
- 'GRID-SNAP',
- 'GRID-UNIT-HEIGHT-CHARS',
- 'GRID-UNIT-HEIGHT',
- 'GRID-UNIT-HEIGHT-',
- 'GRID-UNIT-HEIGHT-C',
- 'GRID-UNIT-HEIGHT-CH',
- 'GRID-UNIT-HEIGHT-CHA',
- 'GRID-UNIT-HEIGHT-PIXELS',
- 'GRID-UNIT-HEIGHT-P',
- 'GRID-UNIT-HEIGHT-PI',
- 'GRID-UNIT-HEIGHT-PIX',
- 'GRID-UNIT-HEIGHT-PIXE',
- 'GRID-UNIT-HEIGHT-PIXEL',
- 'GRID-UNIT-WIDTH-CHARS',
- 'GRID-UNIT-WIDTH',
- 'GRID-UNIT-WIDTH-',
- 'GRID-UNIT-WIDTH-C',
- 'GRID-UNIT-WIDTH-CH',
- 'GRID-UNIT-WIDTH-CHA',
- 'GRID-UNIT-WIDTH-CHAR',
- 'GRID-UNIT-WIDTH-PIXELS',
- 'GRID-UNIT-WIDTH-P',
- 'GRID-UNIT-WIDTH-PI',
- 'GRID-UNIT-WIDTH-PIX',
- 'GRID-UNIT-WIDTH-PIXE',
- 'GRID-UNIT-WIDTH-PIXEL',
- 'GRID-VISIBLE',
- 'GROUP',
- 'GT',
- 'GUID',
- 'HANDLER',
- 'HAS-RECORDS',
- 'HAVING',
- 'HEADER',
- 'HEIGHT-CHARS',
- 'HEIGHT',
- 'HEIGHT-',
- 'HEIGHT-C',
- 'HEIGHT-CH',
- 'HEIGHT-CHA',
- 'HEIGHT-CHAR',
- 'HEIGHT-PIXELS',
- 'HEIGHT-P',
- 'HEIGHT-PI',
- 'HEIGHT-PIX',
- 'HEIGHT-PIXE',
- 'HEIGHT-PIXEL',
- 'HELP',
- 'HEX-DECODE',
- 'HEX-ENCODE',
- 'HIDDEN',
- 'HIDE',
- 'HORIZONTAL',
- 'HORI',
- 'HORIZ',
- 'HORIZO',
- 'HORIZON',
- 'HORIZONT',
- 'HORIZONTA',
- 'HOST-BYTE-ORDER',
- 'HTML-CHARSET',
- 'HTML-END-OF-LINE',
- 'HTML-END-OF-PAGE',
- 'HTML-FRAME-BEGIN',
- 'HTML-FRAME-END',
- 'HTML-HEADER-BEGIN',
- 'HTML-HEADER-END',
- 'HTML-TITLE-BEGIN',
- 'HTML-TITLE-END',
- 'HWND',
- 'ICON',
- 'IF',
- 'IMAGE',
- 'IMAGE-DOWN',
- 'IMAGE-INSENSITIVE',
- 'IMAGE-SIZE',
- 'IMAGE-SIZE-CHARS',
- 'IMAGE-SIZE-C',
- 'IMAGE-SIZE-CH',
- 'IMAGE-SIZE-CHA',
- 'IMAGE-SIZE-CHAR',
- 'IMAGE-SIZE-PIXELS',
- 'IMAGE-SIZE-P',
- 'IMAGE-SIZE-PI',
- 'IMAGE-SIZE-PIX',
- 'IMAGE-SIZE-PIXE',
- 'IMAGE-SIZE-PIXEL',
- 'IMAGE-UP',
- 'IMMEDIATE-DISPLAY',
- 'IMPLEMENTS',
- 'IMPORT',
- 'IMPORT-PRINCIPAL',
- 'IN',
- 'INCREMENT-EXCLUSIVE-ID',
- 'INDEX',
- 'INDEXED-REPOSITION',
- 'INDEX-HINT',
- 'INDEX-INFORMATION',
- 'INDICATOR',
- 'INFORMATION',
- 'INFO',
- 'INFOR',
- 'INFORM',
- 'INFORMA',
- 'INFORMAT',
- 'INFORMATI',
- 'INFORMATIO',
- 'IN-HANDLE',
- 'INHERIT-BGCOLOR',
- 'INHERIT-BGC',
- 'INHERIT-BGCO',
- 'INHERIT-BGCOL',
- 'INHERIT-BGCOLO',
- 'INHERIT-FGCOLOR',
- 'INHERIT-FGC',
- 'INHERIT-FGCO',
- 'INHERIT-FGCOL',
- 'INHERIT-FGCOLO',
- 'INHERITS',
- 'INITIAL',
- 'INIT',
- 'INITI',
- 'INITIA',
- 'INITIAL-DIR',
- 'INITIAL-FILTER',
- 'INITIALIZE-DOCUMENT-TYPE',
- 'INITIATE',
- 'INNER-CHARS',
- 'INNER-LINES',
- 'INPUT',
- 'INPUT-OUTPUT',
- 'INPUT-O',
- 'INPUT-OU',
- 'INPUT-OUT',
- 'INPUT-OUTP',
- 'INPUT-OUTPU',
- 'INPUT-VALUE',
- 'INSERT',
- 'INSERT-ATTRIBUTE',
- 'INSERT-BACKTAB',
- 'INSERT-B',
- 'INSERT-BA',
- 'INSERT-BAC',
- 'INSERT-BACK',
- 'INSERT-BACKT',
- 'INSERT-BACKTA',
- 'INSERT-FILE',
- 'INSERT-ROW',
- 'INSERT-STRING',
- 'INSERT-TAB',
- 'INSERT-T',
- 'INSERT-TA',
- 'INTERFACE',
- 'INTERNAL-ENTRIES',
- 'INTO',
- 'INVOKE',
- 'IS',
- 'IS-ATTR-SPACE',
- 'IS-ATTR',
- 'IS-ATTR-',
- 'IS-ATTR-S',
- 'IS-ATTR-SP',
- 'IS-ATTR-SPA',
- 'IS-ATTR-SPAC',
- 'IS-CLASS',
- 'IS-CLAS',
- 'IS-LEAD-BYTE',
- 'IS-OPEN',
- 'IS-PARAMETER-SET',
- 'IS-ROW-SELECTED',
- 'IS-SELECTED',
- 'ITEM',
- 'ITEMS-PER-ROW',
- 'JOIN',
- 'JOIN-BY-SQLDB',
- 'KBLABEL',
- 'KEEP-CONNECTION-OPEN',
- 'KEEP-FRAME-Z-ORDER',
- 'KEEP-FRAME-Z',
- 'KEEP-FRAME-Z-',
- 'KEEP-FRAME-Z-O',
- 'KEEP-FRAME-Z-OR',
- 'KEEP-FRAME-Z-ORD',
- 'KEEP-FRAME-Z-ORDE',
- 'KEEP-MESSAGES',
- 'KEEP-SECURITY-CACHE',
- 'KEEP-TAB-ORDER',
- 'KEY',
- 'KEYCODE',
- 'KEY-CODE',
- 'KEYFUNCTION',
- 'KEYFUNC',
- 'KEYFUNCT',
- 'KEYFUNCTI',
- 'KEYFUNCTIO',
- 'KEY-FUNCTION',
- 'KEY-FUNC',
- 'KEY-FUNCT',
- 'KEY-FUNCTI',
- 'KEY-FUNCTIO',
- 'KEYLABEL',
- 'KEY-LABEL',
- 'KEYS',
- 'KEYWORD',
- 'KEYWORD-ALL',
- 'LABEL',
- 'LABEL-BGCOLOR',
- 'LABEL-BGC',
- 'LABEL-BGCO',
- 'LABEL-BGCOL',
- 'LABEL-BGCOLO',
- 'LABEL-DCOLOR',
- 'LABEL-DC',
- 'LABEL-DCO',
- 'LABEL-DCOL',
- 'LABEL-DCOLO',
- 'LABEL-FGCOLOR',
- 'LABEL-FGC',
- 'LABEL-FGCO',
- 'LABEL-FGCOL',
- 'LABEL-FGCOLO',
- 'LABEL-FONT',
- 'LABEL-PFCOLOR',
- 'LABEL-PFC',
- 'LABEL-PFCO',
- 'LABEL-PFCOL',
- 'LABEL-PFCOLO',
- 'LABELS',
- 'LANDSCAPE',
- 'LANGUAGES',
- 'LANGUAGE',
- 'LARGE',
- 'LARGE-TO-SMALL',
- 'LAST',
- 'LAST-ASYNCH-REQUEST',
- 'LAST-BATCH',
- 'LAST-CHILD',
- 'LAST-EVENT',
- 'LAST-EVEN',
- 'LAST-FORM',
- 'LASTKEY',
- 'LAST-KEY',
- 'LAST-OBJECT',
- 'LAST-OF',
- 'LAST-PROCEDURE',
- 'LAST-PROCE',
- 'LAST-PROCED',
- 'LAST-PROCEDU',
- 'LAST-PROCEDUR',
- 'LAST-SERVER',
- 'LAST-TAB-ITEM',
- 'LAST-TAB-I',
- 'LAST-TAB-IT',
- 'LAST-TAB-ITE',
- 'LC',
- 'LDBNAME',
- 'LE',
- 'LEAVE',
- 'LEFT-ALIGNED',
- 'LEFT-ALIGN',
- 'LEFT-ALIGNE',
- 'LEFT-TRIM',
- 'LENGTH',
- 'LIBRARY',
- 'LIKE',
- 'LIKE-SEQUENTIAL',
- 'LINE',
- 'LINE-COUNTER',
- 'LINE-COUNT',
- 'LINE-COUNTE',
- 'LIST-EVENTS',
- 'LISTING',
- 'LISTI',
- 'LISTIN',
- 'LIST-ITEM-PAIRS',
- 'LIST-ITEMS',
- 'LIST-PROPERTY-NAMES',
- 'LIST-QUERY-ATTRS',
- 'LIST-SET-ATTRS',
- 'LIST-WIDGETS',
- 'LITERAL-QUESTION',
- 'LITTLE-ENDIAN',
- 'LOAD',
- 'LOAD-DOMAINS',
- 'LOAD-ICON',
- 'LOAD-IMAGE',
- 'LOAD-IMAGE-DOWN',
- 'LOAD-IMAGE-INSENSITIVE',
- 'LOAD-IMAGE-UP',
- 'LOAD-MOUSE-POINTER',
- 'LOAD-MOUSE-P',
- 'LOAD-MOUSE-PO',
- 'LOAD-MOUSE-POI',
- 'LOAD-MOUSE-POIN',
- 'LOAD-MOUSE-POINT',
- 'LOAD-MOUSE-POINTE',
- 'LOAD-PICTURE',
- 'LOAD-SMALL-ICON',
- 'LOCAL-NAME',
- 'LOCATOR-COLUMN-NUMBER',
- 'LOCATOR-LINE-NUMBER',
- 'LOCATOR-PUBLIC-ID',
- 'LOCATOR-SYSTEM-ID',
- 'LOCATOR-TYPE',
- 'LOCKED',
- 'LOCK-REGISTRATION',
- 'LOG',
- 'LOG-AUDIT-EVENT',
- 'LOGIN-EXPIRATION-TIMESTAMP',
- 'LOGIN-HOST',
- 'LOGIN-STATE',
- 'LOG-MANAGER',
- 'LOGOUT',
- 'LOOKAHEAD',
- 'LOOKUP',
- 'LT',
- 'MACHINE-CLASS',
- 'MANDATORY',
- 'MANUAL-HIGHLIGHT',
- 'MAP',
- 'MARGIN-EXTRA',
- 'MARGIN-HEIGHT-CHARS',
- 'MARGIN-HEIGHT',
- 'MARGIN-HEIGHT-',
- 'MARGIN-HEIGHT-C',
- 'MARGIN-HEIGHT-CH',
- 'MARGIN-HEIGHT-CHA',
- 'MARGIN-HEIGHT-CHAR',
- 'MARGIN-HEIGHT-PIXELS',
- 'MARGIN-HEIGHT-P',
- 'MARGIN-HEIGHT-PI',
- 'MARGIN-HEIGHT-PIX',
- 'MARGIN-HEIGHT-PIXE',
- 'MARGIN-HEIGHT-PIXEL',
- 'MARGIN-WIDTH-CHARS',
- 'MARGIN-WIDTH',
- 'MARGIN-WIDTH-',
- 'MARGIN-WIDTH-C',
- 'MARGIN-WIDTH-CH',
- 'MARGIN-WIDTH-CHA',
- 'MARGIN-WIDTH-CHAR',
- 'MARGIN-WIDTH-PIXELS',
- 'MARGIN-WIDTH-P',
- 'MARGIN-WIDTH-PI',
- 'MARGIN-WIDTH-PIX',
- 'MARGIN-WIDTH-PIXE',
- 'MARGIN-WIDTH-PIXEL',
- 'MARK-NEW',
- 'MARK-ROW-STATE',
- 'MATCHES',
- 'MAX-BUTTON',
- 'MAX-CHARS',
- 'MAX-DATA-GUESS',
- 'MAX-HEIGHT',
- 'MAX-HEIGHT-CHARS',
- 'MAX-HEIGHT-C',
- 'MAX-HEIGHT-CH',
- 'MAX-HEIGHT-CHA',
- 'MAX-HEIGHT-CHAR',
- 'MAX-HEIGHT-PIXELS',
- 'MAX-HEIGHT-P',
- 'MAX-HEIGHT-PI',
- 'MAX-HEIGHT-PIX',
- 'MAX-HEIGHT-PIXE',
- 'MAX-HEIGHT-PIXEL',
- 'MAXIMIZE',
- 'MAXIMUM',
- 'MAX',
- 'MAXI',
- 'MAXIM',
- 'MAXIMU',
- 'MAXIMUM-LEVEL',
- 'MAX-ROWS',
- 'MAX-SIZE',
- 'MAX-VALUE',
- 'MAX-VAL',
- 'MAX-VALU',
- 'MAX-WIDTH-CHARS',
- 'MAX-WIDTH',
- 'MAX-WIDTH-',
- 'MAX-WIDTH-C',
- 'MAX-WIDTH-CH',
- 'MAX-WIDTH-CHA',
- 'MAX-WIDTH-CHAR',
- 'MAX-WIDTH-PIXELS',
- 'MAX-WIDTH-P',
- 'MAX-WIDTH-PI',
- 'MAX-WIDTH-PIX',
- 'MAX-WIDTH-PIXE',
- 'MAX-WIDTH-PIXEL',
- 'MD5-DIGEST',
- 'MEMBER',
- 'MEMPTR-TO-NODE-VALUE',
- 'MENU',
- 'MENUBAR',
- 'MENU-BAR',
- 'MENU-ITEM',
- 'MENU-KEY',
- 'MENU-K',
- 'MENU-KE',
- 'MENU-MOUSE',
- 'MENU-M',
- 'MENU-MO',
- 'MENU-MOU',
- 'MENU-MOUS',
- 'MERGE-BY-FIELD',
- 'MESSAGE',
- 'MESSAGE-AREA',
- 'MESSAGE-AREA-FONT',
- 'MESSAGE-LINES',
- 'METHOD',
- 'MIN-BUTTON',
- 'MIN-COLUMN-WIDTH-CHARS',
- 'MIN-COLUMN-WIDTH-C',
- 'MIN-COLUMN-WIDTH-CH',
- 'MIN-COLUMN-WIDTH-CHA',
- 'MIN-COLUMN-WIDTH-CHAR',
- 'MIN-COLUMN-WIDTH-PIXELS',
- 'MIN-COLUMN-WIDTH-P',
- 'MIN-COLUMN-WIDTH-PI',
- 'MIN-COLUMN-WIDTH-PIX',
- 'MIN-COLUMN-WIDTH-PIXE',
- 'MIN-COLUMN-WIDTH-PIXEL',
- 'MIN-HEIGHT-CHARS',
- 'MIN-HEIGHT',
- 'MIN-HEIGHT-',
- 'MIN-HEIGHT-C',
- 'MIN-HEIGHT-CH',
- 'MIN-HEIGHT-CHA',
- 'MIN-HEIGHT-CHAR',
- 'MIN-HEIGHT-PIXELS',
- 'MIN-HEIGHT-P',
- 'MIN-HEIGHT-PI',
- 'MIN-HEIGHT-PIX',
- 'MIN-HEIGHT-PIXE',
- 'MIN-HEIGHT-PIXEL',
- 'MINIMUM',
- 'MIN',
- 'MINI',
- 'MINIM',
- 'MINIMU',
- 'MIN-SIZE',
- 'MIN-VALUE',
- 'MIN-VAL',
- 'MIN-VALU',
- 'MIN-WIDTH-CHARS',
- 'MIN-WIDTH',
- 'MIN-WIDTH-',
- 'MIN-WIDTH-C',
- 'MIN-WIDTH-CH',
- 'MIN-WIDTH-CHA',
- 'MIN-WIDTH-CHAR',
- 'MIN-WIDTH-PIXELS',
- 'MIN-WIDTH-P',
- 'MIN-WIDTH-PI',
- 'MIN-WIDTH-PIX',
- 'MIN-WIDTH-PIXE',
- 'MIN-WIDTH-PIXEL',
- 'MODIFIED',
- 'MODULO',
- 'MOD',
- 'MODU',
- 'MODUL',
- 'MONTH',
- 'MOUSE',
- 'MOUSE-POINTER',
- 'MOUSE-P',
- 'MOUSE-PO',
- 'MOUSE-POI',
- 'MOUSE-POIN',
- 'MOUSE-POINT',
- 'MOUSE-POINTE',
- 'MOVABLE',
- 'MOVE-AFTER-TAB-ITEM',
- 'MOVE-AFTER',
- 'MOVE-AFTER-',
- 'MOVE-AFTER-T',
- 'MOVE-AFTER-TA',
- 'MOVE-AFTER-TAB',
- 'MOVE-AFTER-TAB-',
- 'MOVE-AFTER-TAB-I',
- 'MOVE-AFTER-TAB-IT',
- 'MOVE-AFTER-TAB-ITE',
- 'MOVE-BEFORE-TAB-ITEM',
- 'MOVE-BEFOR',
- 'MOVE-BEFORE',
- 'MOVE-BEFORE-',
- 'MOVE-BEFORE-T',
- 'MOVE-BEFORE-TA',
- 'MOVE-BEFORE-TAB',
- 'MOVE-BEFORE-TAB-',
- 'MOVE-BEFORE-TAB-I',
- 'MOVE-BEFORE-TAB-IT',
- 'MOVE-BEFORE-TAB-ITE',
- 'MOVE-COLUMN',
- 'MOVE-COL',
- 'MOVE-COLU',
- 'MOVE-COLUM',
- 'MOVE-TO-BOTTOM',
- 'MOVE-TO-B',
- 'MOVE-TO-BO',
- 'MOVE-TO-BOT',
- 'MOVE-TO-BOTT',
- 'MOVE-TO-BOTTO',
- 'MOVE-TO-EOF',
- 'MOVE-TO-TOP',
- 'MOVE-TO-T',
- 'MOVE-TO-TO',
- 'MPE',
- 'MULTI-COMPILE',
- 'MULTIPLE',
- 'MULTIPLE-KEY',
- 'MULTITASKING-INTERVAL',
- 'MUST-EXIST',
- 'NAME',
- 'NAMESPACE-PREFIX',
- 'NAMESPACE-URI',
- 'NATIVE',
- 'NE',
- 'NEEDS-APPSERVER-PROMPT',
- 'NEEDS-PROMPT',
- 'NEW',
- 'NEW-INSTANCE',
- 'NEW-ROW',
- 'NEXT',
- 'NEXT-COLUMN',
- 'NEXT-PROMPT',
- 'NEXT-ROWID',
- 'NEXT-SIBLING',
- 'NEXT-TAB-ITEM',
- 'NEXT-TAB-I',
- 'NEXT-TAB-IT',
- 'NEXT-TAB-ITE',
- 'NEXT-VALUE',
- 'NO',
- 'NO-APPLY',
- 'NO-ARRAY-MESSAGE',
- 'NO-ASSIGN',
- 'NO-ATTR-LIST',
- 'NO-ATTR',
- 'NO-ATTR-',
- 'NO-ATTR-L',
- 'NO-ATTR-LI',
- 'NO-ATTR-LIS',
- 'NO-ATTR-SPACE',
- 'NO-ATTR-S',
- 'NO-ATTR-SP',
- 'NO-ATTR-SPA',
- 'NO-ATTR-SPAC',
- 'NO-AUTO-VALIDATE',
- 'NO-BIND-WHERE',
- 'NO-BOX',
- 'NO-CONSOLE',
- 'NO-CONVERT',
- 'NO-CONVERT-3D-COLORS',
- 'NO-CURRENT-VALUE',
- 'NO-DEBUG',
- 'NODE-VALUE-TO-MEMPTR',
- 'NO-DRAG',
- 'NO-ECHO',
- 'NO-EMPTY-SPACE',
- 'NO-ERROR',
- 'NO-FILL',
- 'NO-F',
- 'NO-FI',
- 'NO-FIL',
- 'NO-FOCUS',
- 'NO-HELP',
- 'NO-HIDE',
- 'NO-INDEX-HINT',
- 'NO-INHERIT-BGCOLOR',
- 'NO-INHERIT-BGC',
- 'NO-INHERIT-BGCO',
- 'NO-INHERIT-FGCOLOR',
- 'NO-INHERIT-FGC',
- 'NO-INHERIT-FGCO',
- 'NO-INHERIT-FGCOL',
- 'NO-INHERIT-FGCOLO',
- 'NO-JOIN-BY-SQLDB',
- 'NO-LABELS',
- 'NO-LABE',
- 'NO-LOBS',
- 'NO-LOCK',
- 'NO-LOOKAHEAD',
- 'NO-MAP',
- 'NO-MESSAGE',
- 'NO-MES',
- 'NO-MESS',
- 'NO-MESSA',
- 'NO-MESSAG',
- 'NONAMESPACE-SCHEMA-LOCATION',
- 'NONE',
- 'NO-PAUSE',
- 'NO-PREFETCH',
- 'NO-PREFE',
- 'NO-PREFET',
- 'NO-PREFETC',
- 'NORMALIZE',
- 'NO-ROW-MARKERS',
- 'NO-SCROLLBAR-VERTICAL',
- 'NO-SEPARATE-CONNECTION',
- 'NO-SEPARATORS',
- 'NOT',
- 'NO-TAB-STOP',
- 'NOT-ACTIVE',
- 'NO-UNDERLINE',
- 'NO-UND',
- 'NO-UNDE',
- 'NO-UNDER',
- 'NO-UNDERL',
- 'NO-UNDERLI',
- 'NO-UNDERLIN',
- 'NO-UNDO',
- 'NO-VALIDATE',
- 'NO-VAL',
- 'NO-VALI',
- 'NO-VALID',
- 'NO-VALIDA',
- 'NO-VALIDAT',
- 'NOW',
- 'NO-WAIT',
- 'NO-WORD-WRAP',
- 'NULL',
- 'NUM-ALIASES',
- 'NUM-ALI',
- 'NUM-ALIA',
- 'NUM-ALIAS',
- 'NUM-ALIASE',
- 'NUM-BUFFERS',
- 'NUM-BUTTONS',
- 'NUM-BUT',
- 'NUM-BUTT',
- 'NUM-BUTTO',
- 'NUM-BUTTON',
- 'NUM-COLUMNS',
- 'NUM-COL',
- 'NUM-COLU',
- 'NUM-COLUM',
- 'NUM-COLUMN',
- 'NUM-COPIES',
- 'NUM-DBS',
- 'NUM-DROPPED-FILES',
- 'NUM-ENTRIES',
- 'NUMERIC',
- 'NUMERIC-FORMAT',
- 'NUMERIC-F',
- 'NUMERIC-FO',
- 'NUMERIC-FOR',
- 'NUMERIC-FORM',
- 'NUMERIC-FORMA',
- 'NUM-FIELDS',
- 'NUM-FORMATS',
- 'NUM-ITEMS',
- 'NUM-ITERATIONS',
- 'NUM-LINES',
- 'NUM-LOCKED-COLUMNS',
- 'NUM-LOCKED-COL',
- 'NUM-LOCKED-COLU',
- 'NUM-LOCKED-COLUM',
- 'NUM-LOCKED-COLUMN',
- 'NUM-MESSAGES',
- 'NUM-PARAMETERS',
- 'NUM-REFERENCES',
- 'NUM-REPLACED',
- 'NUM-RESULTS',
- 'NUM-SELECTED-ROWS',
- 'NUM-SELECTED-WIDGETS',
- 'NUM-SELECTED',
- 'NUM-SELECTED-',
- 'NUM-SELECTED-W',
- 'NUM-SELECTED-WI',
- 'NUM-SELECTED-WID',
- 'NUM-SELECTED-WIDG',
- 'NUM-SELECTED-WIDGE',
- 'NUM-SELECTED-WIDGET',
- 'NUM-TABS',
- 'NUM-TO-RETAIN',
- 'NUM-VISIBLE-COLUMNS',
- 'OCTET-LENGTH',
- 'OF',
- 'OFF',
- 'OK',
- 'OK-CANCEL',
- 'OLD',
- 'ON',
- 'ON-FRAME-BORDER',
- 'ON-FRAME',
- 'ON-FRAME-',
- 'ON-FRAME-B',
- 'ON-FRAME-BO',
- 'ON-FRAME-BOR',
- 'ON-FRAME-BORD',
- 'ON-FRAME-BORDE',
- 'OPEN',
- 'OPSYS',
- 'OPTION',
- 'OR',
- 'ORDERED-JOIN',
- 'ORDINAL',
- 'OS-APPEND',
- 'OS-COMMAND',
- 'OS-COPY',
- 'OS-CREATE-DIR',
- 'OS-DELETE',
- 'OS-DIR',
- 'OS-DRIVES',
- 'OS-DRIVE',
- 'OS-ERROR',
- 'OS-GETENV',
- 'OS-RENAME',
- 'OTHERWISE',
- 'OUTPUT',
- 'OVERLAY',
- 'OVERRIDE',
- 'OWNER',
- 'PAGE',
- 'PAGE-BOTTOM',
- 'PAGE-BOT',
- 'PAGE-BOTT',
- 'PAGE-BOTTO',
- 'PAGED',
- 'PAGE-NUMBER',
- 'PAGE-NUM',
- 'PAGE-NUMB',
- 'PAGE-NUMBE',
- 'PAGE-SIZE',
- 'PAGE-TOP',
- 'PAGE-WIDTH',
- 'PAGE-WID',
- 'PAGE-WIDT',
- 'PARAMETER',
- 'PARAM',
- 'PARAME',
- 'PARAMET',
- 'PARAMETE',
- 'PARENT',
- 'PARSE-STATUS',
- 'PARTIAL-KEY',
- 'PASCAL',
- 'PASSWORD-FIELD',
- 'PATHNAME',
- 'PAUSE',
- 'PBE-HASH-ALGORITHM',
- 'PBE-HASH-ALG',
- 'PBE-HASH-ALGO',
- 'PBE-HASH-ALGOR',
- 'PBE-HASH-ALGORI',
- 'PBE-HASH-ALGORIT',
- 'PBE-HASH-ALGORITH',
- 'PBE-KEY-ROUNDS',
- 'PDBNAME',
- 'PERSISTENT',
- 'PERSIST',
- 'PERSISTE',
- 'PERSISTEN',
- 'PERSISTENT-CACHE-DISABLED',
- 'PFCOLOR',
- 'PFC',
- 'PFCO',
- 'PFCOL',
- 'PFCOLO',
- 'PIXELS',
- 'PIXELS-PER-COLUMN',
- 'PIXELS-PER-COL',
- 'PIXELS-PER-COLU',
- 'PIXELS-PER-COLUM',
- 'PIXELS-PER-ROW',
- 'POPUP-MENU',
- 'POPUP-M',
- 'POPUP-ME',
- 'POPUP-MEN',
- 'POPUP-ONLY',
- 'POPUP-O',
- 'POPUP-ON',
- 'POPUP-ONL',
- 'PORTRAIT',
- 'POSITION',
- 'PRECISION',
- 'PREFER-DATASET',
- 'PREPARED',
- 'PREPARE-STRING',
- 'PREPROCESS',
- 'PREPROC',
- 'PREPROCE',
- 'PREPROCES',
- 'PRESELECT',
- 'PRESEL',
- 'PRESELE',
- 'PRESELEC',
- 'PREV',
- 'PREV-COLUMN',
- 'PREV-SIBLING',
- 'PREV-TAB-ITEM',
- 'PREV-TAB-I',
- 'PREV-TAB-IT',
- 'PREV-TAB-ITE',
- 'PRIMARY',
- 'PRINTER',
- 'PRINTER-CONTROL-HANDLE',
- 'PRINTER-HDC',
- 'PRINTER-NAME',
- 'PRINTER-PORT',
- 'PRINTER-SETUP',
- 'PRIVATE',
- 'PRIVATE-DATA',
- 'PRIVATE-D',
- 'PRIVATE-DA',
- 'PRIVATE-DAT',
- 'PRIVILEGES',
- 'PROCEDURE',
- 'PROCE',
- 'PROCED',
- 'PROCEDU',
- 'PROCEDUR',
- 'PROCEDURE-CALL-TYPE',
- 'PROCESS',
- 'PROC-HANDLE',
- 'PROC-HA',
- 'PROC-HAN',
- 'PROC-HAND',
- 'PROC-HANDL',
- 'PROC-STATUS',
- 'PROC-ST',
- 'PROC-STA',
- 'PROC-STAT',
- 'PROC-STATU',
- 'proc-text',
- 'proc-text-buffe',
- 'PROFILER',
- 'PROGRAM-NAME',
- 'PROGRESS',
- 'PROGRESS-SOURCE',
- 'PROGRESS-S',
- 'PROGRESS-SO',
- 'PROGRESS-SOU',
- 'PROGRESS-SOUR',
- 'PROGRESS-SOURC',
- 'PROMPT',
- 'PROMPT-FOR',
- 'PROMPT-F',
- 'PROMPT-FO',
- 'PROMSGS',
- 'PROPATH',
- 'PROPERTY',
- 'PROTECTED',
- 'PROVERSION',
- 'PROVERS',
- 'PROVERSI',
- 'PROVERSIO',
- 'PROXY',
- 'PROXY-PASSWORD',
- 'PROXY-USERID',
- 'PUBLIC',
- 'PUBLIC-ID',
- 'PUBLISH',
- 'PUBLISHED-EVENTS',
- 'PUT',
- 'PUTBYTE',
- 'PUT-BYTE',
- 'PUT-DOUBLE',
- 'PUT-FLOAT',
- 'PUT-INT64',
- 'PUT-KEY-VALUE',
- 'PUT-KEY-VAL',
- 'PUT-KEY-VALU',
- 'PUT-LONG',
- 'PUT-SHORT',
- 'PUT-STRING',
- 'PUT-UNSIGNED-LONG',
- 'QUERY',
- 'QUERY-CLOSE',
- 'QUERY-OFF-END',
- 'QUERY-OPEN',
- 'QUERY-PREPARE',
- 'QUERY-TUNING',
- 'QUESTION',
- 'QUIT',
- 'QUOTER',
- 'RADIO-BUTTONS',
- 'RADIO-SET',
- 'RANDOM',
- 'RAW-TRANSFER',
- 'RCODE-INFORMATION',
- 'RCODE-INFO',
- 'RCODE-INFOR',
- 'RCODE-INFORM',
- 'RCODE-INFORMA',
- 'RCODE-INFORMAT',
- 'RCODE-INFORMATI',
- 'RCODE-INFORMATIO',
- 'READ-AVAILABLE',
- 'READ-EXACT-NUM',
- 'READ-FILE',
- 'READKEY',
- 'READ-ONLY',
- 'READ-XML',
- 'READ-XMLSCHEMA',
- 'REAL',
- 'RECORD-LENGTH',
- 'RECTANGLE',
- 'RECT',
- 'RECTA',
- 'RECTAN',
- 'RECTANG',
- 'RECTANGL',
- 'RECURSIVE',
- 'REFERENCE-ONLY',
- 'REFRESH',
- 'REFRESHABLE',
- 'REFRESH-AUDIT-POLICY',
- 'REGISTER-DOMAIN',
- 'RELEASE',
- 'REMOTE',
- 'REMOVE-EVENTS-PROCEDURE',
- 'REMOVE-SUPER-PROCEDURE',
- 'REPEAT',
- 'REPLACE',
- 'REPLACE-SELECTION-TEXT',
- 'REPOSITION',
- 'REPOSITION-BACKWARD',
- 'REPOSITION-FORWARD',
- 'REPOSITION-MODE',
- 'REPOSITION-TO-ROW',
- 'REPOSITION-TO-ROWID',
- 'REQUEST',
- 'RESET',
- 'RESIZABLE',
- 'RESIZA',
- 'RESIZAB',
- 'RESIZABL',
- 'RESIZE',
- 'RESTART-ROW',
- 'RESTART-ROWID',
- 'RETAIN',
- 'RETAIN-SHAPE',
- 'RETRY',
- 'RETRY-CANCEL',
- 'RETURN',
- 'RETURN-INSERTED',
- 'RETURN-INS',
- 'RETURN-INSE',
- 'RETURN-INSER',
- 'RETURN-INSERT',
- 'RETURN-INSERTE',
- 'RETURNS',
- 'RETURN-TO-START-DIR',
- 'RETURN-TO-START-DI',
- 'RETURN-VALUE',
- 'RETURN-VAL',
- 'RETURN-VALU',
- 'RETURN-VALUE-DATA-TYPE',
- 'REVERSE-FROM',
- 'REVERT',
- 'REVOKE',
- 'RGB-VALUE',
- 'RIGHT-ALIGNED',
- 'RETURN-ALIGN',
- 'RETURN-ALIGNE',
- 'RIGHT-TRIM',
- 'R-INDEX',
- 'ROLES',
- 'ROUND',
- 'ROUTINE-LEVEL',
- 'ROW',
- 'ROW-HEIGHT-CHARS',
- 'ROW-HEIGHT-PIXELS',
- 'ROW-MARKERS',
- 'ROW-OF',
- 'ROW-RESIZABLE',
- 'RULE',
- 'RUN',
- 'RUN-PROCEDURE',
- 'SAVE',
- 'SAVE-AS',
- 'SAVE-FILE',
- 'SAX-COMPLETE',
- 'SAX-COMPLE',
- 'SAX-COMPLET',
- 'SAX-PARSE',
- 'SAX-PARSE-FIRST',
- 'SAX-PARSE-NEXT',
- 'SAX-PARSER-ERROR',
- 'SAX-RUNNING',
- 'SAX-UNINITIALIZED',
- 'SAX-WRITE-BEGIN',
- 'SAX-WRITE-COMPLETE',
- 'SAX-WRITE-CONTENT',
- 'SAX-WRITE-ELEMENT',
- 'SAX-WRITE-ERROR',
- 'SAX-WRITE-IDLE',
- 'SAX-WRITER',
- 'SAX-WRITE-TAG',
- 'SCHEMA',
- 'SCHEMA-LOCATION',
- 'SCHEMA-MARSHAL',
- 'SCHEMA-PATH',
- 'SCREEN',
- 'SCREEN-IO',
- 'SCREEN-LINES',
- 'SCREEN-VALUE',
- 'SCREEN-VAL',
- 'SCREEN-VALU',
- 'SCROLL',
- 'SCROLLABLE',
- 'SCROLLBAR-HORIZONTAL',
- 'SCROLLBAR-H',
- 'SCROLLBAR-HO',
- 'SCROLLBAR-HOR',
- 'SCROLLBAR-HORI',
- 'SCROLLBAR-HORIZ',
- 'SCROLLBAR-HORIZO',
- 'SCROLLBAR-HORIZON',
- 'SCROLLBAR-HORIZONT',
- 'SCROLLBAR-HORIZONTA',
- 'SCROLL-BARS',
- 'SCROLLBAR-VERTICAL',
- 'SCROLLBAR-V',
- 'SCROLLBAR-VE',
- 'SCROLLBAR-VER',
- 'SCROLLBAR-VERT',
- 'SCROLLBAR-VERTI',
- 'SCROLLBAR-VERTIC',
- 'SCROLLBAR-VERTICA',
- 'SCROLL-DELTA',
- 'SCROLLED-ROW-POSITION',
- 'SCROLLED-ROW-POS',
- 'SCROLLED-ROW-POSI',
- 'SCROLLED-ROW-POSIT',
- 'SCROLLED-ROW-POSITI',
- 'SCROLLED-ROW-POSITIO',
- 'SCROLLING',
- 'SCROLL-OFFSET',
- 'SCROLL-TO-CURRENT-ROW',
- 'SCROLL-TO-ITEM',
- 'SCROLL-TO-I',
- 'SCROLL-TO-IT',
- 'SCROLL-TO-ITE',
- 'SCROLL-TO-SELECTED-ROW',
- 'SDBNAME',
- 'SEAL',
- 'SEAL-TIMESTAMP',
- 'SEARCH',
- 'SEARCH-SELF',
- 'SEARCH-TARGET',
- 'SECTION',
- 'SECURITY-POLICY',
- 'SEEK',
- 'SELECT',
- 'SELECTABLE',
- 'SELECT-ALL',
- 'SELECTED',
- 'SELECT-FOCUSED-ROW',
- 'SELECTION',
- 'SELECTION-END',
- 'SELECTION-LIST',
- 'SELECTION-START',
- 'SELECTION-TEXT',
- 'SELECT-NEXT-ROW',
- 'SELECT-PREV-ROW',
- 'SELECT-ROW',
- 'SELF',
- 'SEND',
- 'send-sql-statement',
- 'send-sql',
- 'SENSITIVE',
- 'SEPARATE-CONNECTION',
- 'SEPARATOR-FGCOLOR',
- 'SEPARATORS',
- 'SERVER',
- 'SERVER-CONNECTION-BOUND',
- 'SERVER-CONNECTION-BOUND-REQUEST',
- 'SERVER-CONNECTION-CONTEXT',
- 'SERVER-CONNECTION-ID',
- 'SERVER-OPERATING-MODE',
- 'SESSION',
- 'SESSION-ID',
- 'SET',
- 'SET-APPL-CONTEXT',
- 'SET-ATTR-CALL-TYPE',
- 'SET-ATTRIBUTE-NODE',
- 'SET-BLUE-VALUE',
- 'SET-BLUE',
- 'SET-BLUE-',
- 'SET-BLUE-V',
- 'SET-BLUE-VA',
- 'SET-BLUE-VAL',
- 'SET-BLUE-VALU',
- 'SET-BREAK',
- 'SET-BUFFERS',
- 'SET-CALLBACK',
- 'SET-CLIENT',
- 'SET-COMMIT',
- 'SET-CONTENTS',
- 'SET-CURRENT-VALUE',
- 'SET-DB-CLIENT',
- 'SET-DYNAMIC',
- 'SET-EVENT-MANAGER-OPTION',
- 'SET-GREEN-VALUE',
- 'SET-GREEN',
- 'SET-GREEN-',
- 'SET-GREEN-V',
- 'SET-GREEN-VA',
- 'SET-GREEN-VAL',
- 'SET-GREEN-VALU',
- 'SET-INPUT-SOURCE',
- 'SET-OPTION',
- 'SET-OUTPUT-DESTINATION',
- 'SET-PARAMETER',
- 'SET-POINTER-VALUE',
- 'SET-PROPERTY',
- 'SET-RED-VALUE',
- 'SET-RED',
- 'SET-RED-',
- 'SET-RED-V',
- 'SET-RED-VA',
- 'SET-RED-VAL',
- 'SET-RED-VALU',
- 'SET-REPOSITIONED-ROW',
- 'SET-RGB-VALUE',
- 'SET-ROLLBACK',
- 'SET-SELECTION',
- 'SET-SIZE',
- 'SET-SORT-ARROW',
- 'SETUSERID',
- 'SETUSER',
- 'SETUSERI',
- 'SET-WAIT-STATE',
- 'SHA1-DIGEST',
- 'SHARED',
- 'SHARE-LOCK',
- 'SHARE',
- 'SHARE-',
- 'SHARE-L',
- 'SHARE-LO',
- 'SHARE-LOC',
- 'SHOW-IN-TASKBAR',
- 'SHOW-STATS',
- 'SHOW-STAT',
- 'SIDE-LABEL-HANDLE',
- 'SIDE-LABEL-H',
- 'SIDE-LABEL-HA',
- 'SIDE-LABEL-HAN',
- 'SIDE-LABEL-HAND',
- 'SIDE-LABEL-HANDL',
- 'SIDE-LABELS',
- 'SIDE-LAB',
- 'SIDE-LABE',
- 'SIDE-LABEL',
- 'SILENT',
- 'SIMPLE',
- 'SINGLE',
- 'SIZE',
- 'SIZE-CHARS',
- 'SIZE-C',
- 'SIZE-CH',
- 'SIZE-CHA',
- 'SIZE-CHAR',
- 'SIZE-PIXELS',
- 'SIZE-P',
- 'SIZE-PI',
- 'SIZE-PIX',
- 'SIZE-PIXE',
- 'SIZE-PIXEL',
- 'SKIP',
- 'SKIP-DELETED-RECORD',
- 'SLIDER',
- 'SMALL-ICON',
- 'SMALLINT',
- 'SMALL-TITLE',
- 'SOME',
- 'SORT',
- 'SORT-ASCENDING',
- 'SORT-NUMBER',
- 'SOURCE',
- 'SOURCE-PROCEDURE',
- 'SPACE',
- 'SQL',
- 'SQRT',
- 'SSL-SERVER-NAME',
- 'STANDALONE',
- 'START',
- 'START-DOCUMENT',
- 'START-ELEMENT',
- 'START-MOVE',
- 'START-RESIZE',
- 'START-ROW-RESIZE',
- 'STATE-DETAIL',
- 'STATIC',
- 'STATUS',
- 'STATUS-AREA',
- 'STATUS-AREA-FONT',
- 'STDCALL',
- 'STOP',
- 'STOP-PARSING',
- 'STOPPED',
- 'STOPPE',
- 'STORED-PROCEDURE',
- 'STORED-PROC',
- 'STORED-PROCE',
- 'STORED-PROCED',
- 'STORED-PROCEDU',
- 'STORED-PROCEDUR',
- 'STREAM',
- 'STREAM-HANDLE',
- 'STREAM-IO',
- 'STRETCH-TO-FIT',
- 'STRICT',
- 'STRING',
- 'STRING-VALUE',
- 'STRING-XREF',
- 'SUB-AVERAGE',
- 'SUB-AVE',
- 'SUB-AVER',
- 'SUB-AVERA',
- 'SUB-AVERAG',
- 'SUB-COUNT',
- 'SUB-MAXIMUM',
- 'SUM-MAX',
- 'SUM-MAXI',
- 'SUM-MAXIM',
- 'SUM-MAXIMU',
- 'SUB-MENU',
- 'SUBSUB-',
- 'SUB-MIN',
- 'SUBSCRIBE',
- 'SUBSTITUTE',
- 'SUBST',
- 'SUBSTI',
- 'SUBSTIT',
- 'SUBSTITU',
- 'SUBSTITUT',
- 'SUBSTRING',
- 'SUBSTR',
- 'SUBSTRI',
- 'SUBSTRIN',
- 'SUB-TOTAL',
- 'SUBTYPE',
- 'SUM',
- 'SUPER',
- 'SUPER-PROCEDURES',
- 'SUPPRESS-NAMESPACE-PROCESSING',
- 'SUPPRESS-WARNINGS',
- 'SUPPRESS-W',
- 'SUPPRESS-WA',
- 'SUPPRESS-WAR',
- 'SUPPRESS-WARN',
- 'SUPPRESS-WARNI',
- 'SUPPRESS-WARNIN',
- 'SUPPRESS-WARNING',
- 'SYMMETRIC-ENCRYPTION-ALGORITHM',
- 'SYMMETRIC-ENCRYPTION-IV',
- 'SYMMETRIC-ENCRYPTION-KEY',
- 'SYMMETRIC-SUPPORT',
- 'SYSTEM-ALERT-BOXES',
- 'SYSTEM-ALERT',
- 'SYSTEM-ALERT-',
- 'SYSTEM-ALERT-B',
- 'SYSTEM-ALERT-BO',
- 'SYSTEM-ALERT-BOX',
- 'SYSTEM-ALERT-BOXE',
- 'SYSTEM-DIALOG',
- 'SYSTEM-HELP',
- 'SYSTEM-ID',
- 'TABLE',
- 'TABLE-HANDLE',
- 'TABLE-NUMBER',
- 'TAB-POSITION',
- 'TAB-STOP',
- 'TARGET',
- 'TARGET-PROCEDURE',
- 'TEMP-DIRECTORY',
- 'TEMP-DIR',
- 'TEMP-DIRE',
- 'TEMP-DIREC',
- 'TEMP-DIRECT',
- 'TEMP-DIRECTO',
- 'TEMP-DIRECTOR',
- 'TEMP-TABLE',
- 'TEMP-TABLE-PREPARE',
- 'TERM',
- 'TERMINAL',
- 'TERMI',
- 'TERMIN',
- 'TERMINA',
- 'TERMINATE',
- 'TEXT',
- 'TEXT-CURSOR',
- 'TEXT-SEG-GROW',
- 'TEXT-SELECTED',
- 'THEN',
- 'THIS-OBJECT',
- 'THIS-PROCEDURE',
- 'THREE-D',
- 'THROW',
- 'THROUGH',
- 'THRU',
- 'TIC-MARKS',
- 'TIME',
- 'TIME-SOURCE',
- 'TITLE',
- 'TITLE-BGCOLOR',
- 'TITLE-BGC',
- 'TITLE-BGCO',
- 'TITLE-BGCOL',
- 'TITLE-BGCOLO',
- 'TITLE-DCOLOR',
- 'TITLE-DC',
- 'TITLE-DCO',
- 'TITLE-DCOL',
- 'TITLE-DCOLO',
- 'TITLE-FGCOLOR',
- 'TITLE-FGC',
- 'TITLE-FGCO',
- 'TITLE-FGCOL',
- 'TITLE-FGCOLO',
- 'TITLE-FONT',
- 'TITLE-FO',
- 'TITLE-FON',
- 'TO',
- 'TODAY',
- 'TOGGLE-BOX',
- 'TOOLTIP',
- 'TOOLTIPS',
- 'TOPIC',
- 'TOP-NAV-QUERY',
- 'TOP-ONLY',
- 'TO-ROWID',
- 'TOTAL',
- 'TRAILING',
- 'TRANS',
- 'TRANSACTION',
- 'TRANSACTION-MODE',
- 'TRANS-INIT-PROCEDURE',
- 'TRANSPARENT',
- 'TRIGGER',
- 'TRIGGERS',
- 'TRIM',
- 'TRUE',
- 'TRUNCATE',
- 'TRUNC',
- 'TRUNCA',
- 'TRUNCAT',
- 'TYPE',
- 'TYPE-OF',
- 'UNBOX',
- 'UNBUFFERED',
- 'UNBUFF',
- 'UNBUFFE',
- 'UNBUFFER',
- 'UNBUFFERE',
- 'UNDERLINE',
- 'UNDERL',
- 'UNDERLI',
- 'UNDERLIN',
- 'UNDO',
- 'UNFORMATTED',
- 'UNFORM',
- 'UNFORMA',
- 'UNFORMAT',
- 'UNFORMATT',
- 'UNFORMATTE',
- 'UNION',
- 'UNIQUE',
- 'UNIQUE-ID',
- 'UNIQUE-MATCH',
- 'UNIX',
- 'UNLESS-HIDDEN',
- 'UNLOAD',
- 'UNSIGNED-LONG',
- 'UNSUBSCRIBE',
- 'UP',
- 'UPDATE',
- 'UPDATE-ATTRIBUTE',
- 'URL',
- 'URL-DECODE',
- 'URL-ENCODE',
- 'URL-PASSWORD',
- 'URL-USERID',
- 'USE',
- 'USE-DICT-EXPS',
- 'USE-FILENAME',
- 'USE-INDEX',
- 'USER',
- 'USE-REVVIDEO',
- 'USERID',
- 'USER-ID',
- 'USE-TEXT',
- 'USE-UNDERLINE',
- 'USE-WIDGET-POOL',
- 'USING',
- 'V6DISPLAY',
- 'V6FRAME',
- 'VALIDATE',
- 'VALIDATE-EXPRESSION',
- 'VALIDATE-MESSAGE',
- 'VALIDATE-SEAL',
- 'VALIDATION-ENABLED',
- 'VALID-EVENT',
- 'VALID-HANDLE',
- 'VALID-OBJECT',
- 'VALUE',
- 'VALUE-CHANGED',
- 'VALUES',
- 'VARIABLE',
- 'VAR',
- 'VARI',
- 'VARIA',
- 'VARIAB',
- 'VARIABL',
- 'VERBOSE',
- 'VERSION',
- 'VERTICAL',
- 'VERT',
- 'VERTI',
- 'VERTIC',
- 'VERTICA',
- 'VIEW',
- 'VIEW-AS',
- 'VIEW-FIRST-COLUMN-ON-REOPEN',
- 'VIRTUAL-HEIGHT-CHARS',
- 'VIRTUAL-HEIGHT',
- 'VIRTUAL-HEIGHT-',
- 'VIRTUAL-HEIGHT-C',
- 'VIRTUAL-HEIGHT-CH',
- 'VIRTUAL-HEIGHT-CHA',
- 'VIRTUAL-HEIGHT-CHAR',
- 'VIRTUAL-HEIGHT-PIXELS',
- 'VIRTUAL-HEIGHT-P',
- 'VIRTUAL-HEIGHT-PI',
- 'VIRTUAL-HEIGHT-PIX',
- 'VIRTUAL-HEIGHT-PIXE',
- 'VIRTUAL-HEIGHT-PIXEL',
- 'VIRTUAL-WIDTH-CHARS',
- 'VIRTUAL-WIDTH',
- 'VIRTUAL-WIDTH-',
- 'VIRTUAL-WIDTH-C',
- 'VIRTUAL-WIDTH-CH',
- 'VIRTUAL-WIDTH-CHA',
- 'VIRTUAL-WIDTH-CHAR',
- 'VIRTUAL-WIDTH-PIXELS',
- 'VIRTUAL-WIDTH-P',
- 'VIRTUAL-WIDTH-PI',
- 'VIRTUAL-WIDTH-PIX',
- 'VIRTUAL-WIDTH-PIXE',
- 'VIRTUAL-WIDTH-PIXEL',
- 'VISIBLE',
- 'VOID',
- 'WAIT',
- 'WAIT-FOR',
- 'WARNING',
- 'WEB-CONTEXT',
- 'WEEKDAY',
- 'WHEN',
- 'WHERE',
- 'WHILE',
- 'WIDGET',
- 'WIDGET-ENTER',
- 'WIDGET-E',
- 'WIDGET-EN',
- 'WIDGET-ENT',
- 'WIDGET-ENTE',
- 'WIDGET-ID',
- 'WIDGET-LEAVE',
- 'WIDGET-L',
- 'WIDGET-LE',
- 'WIDGET-LEA',
- 'WIDGET-LEAV',
- 'WIDGET-POOL',
- 'WIDTH-CHARS',
- 'WIDTH',
- 'WIDTH-',
- 'WIDTH-C',
- 'WIDTH-CH',
- 'WIDTH-CHA',
- 'WIDTH-CHAR',
- 'WIDTH-PIXELS',
- 'WIDTH-P',
- 'WIDTH-PI',
- 'WIDTH-PIX',
- 'WIDTH-PIXE',
- 'WIDTH-PIXEL',
- 'WINDOW',
- 'WINDOW-MAXIMIZED',
- 'WINDOW-MAXIM',
- 'WINDOW-MAXIMI',
- 'WINDOW-MAXIMIZ',
- 'WINDOW-MAXIMIZE',
- 'WINDOW-MINIMIZED',
- 'WINDOW-MINIM',
- 'WINDOW-MINIMI',
- 'WINDOW-MINIMIZ',
- 'WINDOW-MINIMIZE',
- 'WINDOW-NAME',
- 'WINDOW-NORMAL',
- 'WINDOW-STATE',
- 'WINDOW-STA',
- 'WINDOW-STAT',
- 'WINDOW-SYSTEM',
- 'WITH',
- 'WORD-INDEX',
- 'WORD-WRAP',
- 'WORK-AREA-HEIGHT-PIXELS',
- 'WORK-AREA-WIDTH-PIXELS',
- 'WORK-AREA-X',
- 'WORK-AREA-Y',
- 'WORKFILE',
- 'WORK-TABLE',
- 'WORK-TAB',
- 'WORK-TABL',
- 'WRITE',
- 'WRITE-CDATA',
- 'WRITE-CHARACTERS',
- 'WRITE-COMMENT',
- 'WRITE-DATA-ELEMENT',
- 'WRITE-EMPTY-ELEMENT',
- 'WRITE-ENTITY-REF',
- 'WRITE-EXTERNAL-DTD',
- 'WRITE-FRAGMENT',
- 'WRITE-MESSAGE',
- 'WRITE-PROCESSING-INSTRUCTION',
- 'WRITE-STATUS',
- 'WRITE-XML',
- 'WRITE-XMLSCHEMA',
- 'X',
- 'XCODE',
- 'XML-DATA-TYPE',
- 'XML-NODE-TYPE',
- 'XML-SCHEMA-PATH',
- 'XML-SUPPRESS-NAMESPACE-PROCESSING',
- 'X-OF',
- 'XREF',
- 'XREF-XML',
- 'Y',
- 'YEAR',
- 'YEAR-OFFSET',
- 'YES',
- 'YES-NO',
- 'YES-NO-CANCEL',
- 'Y-OF'
-)
+ :license: BSD, see LICENSE for details.
+"""
+
+OPENEDGEKEYWORDS = (
+ 'ABSOLUTE',
+ 'ABS',
+ 'ABSO',
+ 'ABSOL',
+ 'ABSOLU',
+ 'ABSOLUT',
+ 'ACCELERATOR',
+ 'ACCUMULATE',
+ 'ACCUM',
+ 'ACCUMU',
+ 'ACCUMUL',
+ 'ACCUMULA',
+ 'ACCUMULAT',
+ 'ACTIVE-FORM',
+ 'ACTIVE-WINDOW',
+ 'ADD',
+ 'ADD-BUFFER',
+ 'ADD-CALC-COLUMN',
+ 'ADD-COLUMNS-FROM',
+ 'ADD-EVENTS-PROCEDURE',
+ 'ADD-FIELDS-FROM',
+ 'ADD-FIRST',
+ 'ADD-INDEX-FIELD',
+ 'ADD-LAST',
+ 'ADD-LIKE-COLUMN',
+ 'ADD-LIKE-FIELD',
+ 'ADD-LIKE-INDEX',
+ 'ADD-NEW-FIELD',
+ 'ADD-NEW-INDEX',
+ 'ADD-SCHEMA-LOCATION',
+ 'ADD-SUPER-PROCEDURE',
+ 'ADM-DATA',
+ 'ADVISE',
+ 'ALERT-BOX',
+ 'ALIAS',
+ 'ALL',
+ 'ALLOW-COLUMN-SEARCHING',
+ 'ALLOW-REPLICATION',
+ 'ALTER',
+ 'ALWAYS-ON-TOP',
+ 'AMBIGUOUS',
+ 'AMBIG',
+ 'AMBIGU',
+ 'AMBIGUO',
+ 'AMBIGUOU',
+ 'ANALYZE',
+ 'ANALYZ',
+ 'AND',
+ 'ANSI-ONLY',
+ 'ANY',
+ 'ANYWHERE',
+ 'APPEND',
+ 'APPL-ALERT-BOXES',
+ 'APPL-ALERT',
+ 'APPL-ALERT-',
+ 'APPL-ALERT-B',
+ 'APPL-ALERT-BO',
+ 'APPL-ALERT-BOX',
+ 'APPL-ALERT-BOXE',
+ 'APPL-CONTEXT-ID',
+ 'APPLICATION',
+ 'APPLY',
+ 'APPSERVER-INFO',
+ 'APPSERVER-PASSWORD',
+ 'APPSERVER-USERID',
+ 'ARRAY-MESSAGE',
+ 'AS',
+ 'ASC',
+ 'ASCENDING',
+ 'ASCE',
+ 'ASCEN',
+ 'ASCEND',
+ 'ASCENDI',
+ 'ASCENDIN',
+ 'ASK-OVERWRITE',
+ 'ASSEMBLY',
+ 'ASSIGN',
+ 'ASYNCHRONOUS',
+ 'ASYNC-REQUEST-COUNT',
+ 'ASYNC-REQUEST-HANDLE',
+ 'AT',
+ 'ATTACHED-PAIRLIST',
+ 'ATTR-SPACE',
+ 'ATTR',
+ 'ATTRI',
+ 'ATTRIB',
+ 'ATTRIBU',
+ 'ATTRIBUT',
+ 'AUDIT-CONTROL',
+ 'AUDIT-ENABLED',
+ 'AUDIT-EVENT-CONTEXT',
+ 'AUDIT-POLICY',
+ 'AUTHENTICATION-FAILED',
+ 'AUTHORIZATION',
+ 'AUTO-COMPLETION',
+ 'AUTO-COMP',
+ 'AUTO-COMPL',
+ 'AUTO-COMPLE',
+ 'AUTO-COMPLET',
+ 'AUTO-COMPLETI',
+ 'AUTO-COMPLETIO',
+ 'AUTO-ENDKEY',
+ 'AUTO-END-KEY',
+ 'AUTO-GO',
+ 'AUTO-INDENT',
+ 'AUTO-IND',
+ 'AUTO-INDE',
+ 'AUTO-INDEN',
+ 'AUTOMATIC',
+ 'AUTO-RESIZE',
+ 'AUTO-RETURN',
+ 'AUTO-RET',
+ 'AUTO-RETU',
+ 'AUTO-RETUR',
+ 'AUTO-SYNCHRONIZE',
+ 'AUTO-ZAP',
+ 'AUTO-Z',
+ 'AUTO-ZA',
+ 'AVAILABLE',
+ 'AVAIL',
+ 'AVAILA',
+ 'AVAILAB',
+ 'AVAILABL',
+ 'AVAILABLE-FORMATS',
+ 'AVERAGE',
+ 'AVE',
+ 'AVER',
+ 'AVERA',
+ 'AVERAG',
+ 'AVG',
+ 'BACKGROUND',
+ 'BACK',
+ 'BACKG',
+ 'BACKGR',
+ 'BACKGRO',
+ 'BACKGROU',
+ 'BACKGROUN',
+ 'BACKWARDS',
+ 'BACKWARD',
+ 'BASE64-DECODE',
+ 'BASE64-ENCODE',
+ 'BASE-ADE',
+ 'BASE-KEY',
+ 'BATCH-MODE',
+ 'BATCH',
+ 'BATCH-',
+ 'BATCH-M',
+ 'BATCH-MO',
+ 'BATCH-MOD',
+ 'BATCH-SIZE',
+ 'BEFORE-HIDE',
+ 'BEFORE-H',
+ 'BEFORE-HI',
+ 'BEFORE-HID',
+ 'BEGIN-EVENT-GROUP',
+ 'BEGINS',
+ 'BELL',
+ 'BETWEEN',
+ 'BGCOLOR',
+ 'BGC',
+ 'BGCO',
+ 'BGCOL',
+ 'BGCOLO',
+ 'BIG-ENDIAN',
+ 'BINARY',
+ 'BIND',
+ 'BIND-WHERE',
+ 'BLANK',
+ 'BLOCK-ITERATION-DISPLAY',
+ 'BORDER-BOTTOM-CHARS',
+ 'BORDER-B',
+ 'BORDER-BO',
+ 'BORDER-BOT',
+ 'BORDER-BOTT',
+ 'BORDER-BOTTO',
+ 'BORDER-BOTTOM-PIXELS',
+ 'BORDER-BOTTOM-P',
+ 'BORDER-BOTTOM-PI',
+ 'BORDER-BOTTOM-PIX',
+ 'BORDER-BOTTOM-PIXE',
+ 'BORDER-BOTTOM-PIXEL',
+ 'BORDER-LEFT-CHARS',
+ 'BORDER-L',
+ 'BORDER-LE',
+ 'BORDER-LEF',
+ 'BORDER-LEFT',
+ 'BORDER-LEFT-',
+ 'BORDER-LEFT-C',
+ 'BORDER-LEFT-CH',
+ 'BORDER-LEFT-CHA',
+ 'BORDER-LEFT-CHAR',
+ 'BORDER-LEFT-PIXELS',
+ 'BORDER-LEFT-P',
+ 'BORDER-LEFT-PI',
+ 'BORDER-LEFT-PIX',
+ 'BORDER-LEFT-PIXE',
+ 'BORDER-LEFT-PIXEL',
+ 'BORDER-RIGHT-CHARS',
+ 'BORDER-R',
+ 'BORDER-RI',
+ 'BORDER-RIG',
+ 'BORDER-RIGH',
+ 'BORDER-RIGHT',
+ 'BORDER-RIGHT-',
+ 'BORDER-RIGHT-C',
+ 'BORDER-RIGHT-CH',
+ 'BORDER-RIGHT-CHA',
+ 'BORDER-RIGHT-CHAR',
+ 'BORDER-RIGHT-PIXELS',
+ 'BORDER-RIGHT-P',
+ 'BORDER-RIGHT-PI',
+ 'BORDER-RIGHT-PIX',
+ 'BORDER-RIGHT-PIXE',
+ 'BORDER-RIGHT-PIXEL',
+ 'BORDER-TOP-CHARS',
+ 'BORDER-T',
+ 'BORDER-TO',
+ 'BORDER-TOP',
+ 'BORDER-TOP-',
+ 'BORDER-TOP-C',
+ 'BORDER-TOP-CH',
+ 'BORDER-TOP-CHA',
+ 'BORDER-TOP-CHAR',
+ 'BORDER-TOP-PIXELS',
+ 'BORDER-TOP-P',
+ 'BORDER-TOP-PI',
+ 'BORDER-TOP-PIX',
+ 'BORDER-TOP-PIXE',
+ 'BORDER-TOP-PIXEL',
+ 'BOX',
+ 'BOX-SELECTABLE',
+ 'BOX-SELECT',
+ 'BOX-SELECTA',
+ 'BOX-SELECTAB',
+ 'BOX-SELECTABL',
+ 'BREAK',
+ 'BROWSE',
+ 'BUFFER',
+ 'BUFFER-CHARS',
+ 'BUFFER-COMPARE',
+ 'BUFFER-COPY',
+ 'BUFFER-CREATE',
+ 'BUFFER-DELETE',
+ 'BUFFER-FIELD',
+ 'BUFFER-HANDLE',
+ 'BUFFER-LINES',
+ 'BUFFER-NAME',
+ 'BUFFER-RELEASE',
+ 'BUFFER-VALUE',
+ 'BUTTON',
+ 'BUTTONS',
+ 'BY',
+ 'BY-POINTER',
+ 'BY-VARIANT-POINTER',
+ 'CACHE',
+ 'CACHE-SIZE',
+ 'CALL',
+ 'CALL-NAME',
+ 'CALL-TYPE',
+ 'CANCEL-BREAK',
+ 'CANCEL-BUTTON',
+ 'CAN-CREATE',
+ 'CAN-DELETE',
+ 'CAN-DO',
+ 'CAN-FIND',
+ 'CAN-QUERY',
+ 'CAN-READ',
+ 'CAN-SET',
+ 'CAN-WRITE',
+ 'CAPS',
+ 'CAREFUL-PAINT',
+ 'CASE',
+ 'CASE-SENSITIVE',
+ 'CASE-SEN',
+ 'CASE-SENS',
+ 'CASE-SENSI',
+ 'CASE-SENSIT',
+ 'CASE-SENSITI',
+ 'CASE-SENSITIV',
+ 'CAST',
+ 'CATCH',
+ 'CDECL',
+ 'CENTERED',
+ 'CENTER',
+ 'CENTERE',
+ 'CHAINED',
+ 'CHARACTER_LENGTH',
+ 'CHARSET',
+ 'CHECK',
+ 'CHECKED',
+ 'CHOOSE',
+ 'CHR',
+ 'CLASS',
+ 'CLASS-TYPE',
+ 'CLEAR',
+ 'CLEAR-APPL-CONTEXT',
+ 'CLEAR-LOG',
+ 'CLEAR-SELECTION',
+ 'CLEAR-SELECT',
+ 'CLEAR-SELECTI',
+ 'CLEAR-SELECTIO',
+ 'CLEAR-SORT-ARROWS',
+ 'CLEAR-SORT-ARROW',
+ 'CLIENT-CONNECTION-ID',
+ 'CLIENT-PRINCIPAL',
+ 'CLIENT-TTY',
+ 'CLIENT-TYPE',
+ 'CLIENT-WORKSTATION',
+ 'CLIPBOARD',
+ 'CLOSE',
+ 'CLOSE-LOG',
+ 'CODE',
+ 'CODEBASE-LOCATOR',
+ 'CODEPAGE',
+ 'CODEPAGE-CONVERT',
+ 'COLLATE',
+ 'COL-OF',
+ 'COLON',
+ 'COLON-ALIGNED',
+ 'COLON-ALIGN',
+ 'COLON-ALIGNE',
+ 'COLOR',
+ 'COLOR-TABLE',
+ 'COLUMN',
+ 'COL',
+ 'COLU',
+ 'COLUM',
+ 'COLUMN-BGCOLOR',
+ 'COLUMN-DCOLOR',
+ 'COLUMN-FGCOLOR',
+ 'COLUMN-FONT',
+ 'COLUMN-LABEL',
+ 'COLUMN-LAB',
+ 'COLUMN-LABE',
+ 'COLUMN-MOVABLE',
+ 'COLUMN-OF',
+ 'COLUMN-PFCOLOR',
+ 'COLUMN-READ-ONLY',
+ 'COLUMN-RESIZABLE',
+ 'COLUMNS',
+ 'COLUMN-SCROLLING',
+ 'COMBO-BOX',
+ 'COMMAND',
+ 'COMPARES',
+ 'COMPILE',
+ 'COMPILER',
+ 'COMPLETE',
+ 'COM-SELF',
+ 'CONFIG-NAME',
+ 'CONNECT',
+ 'CONNECTED',
+ 'CONSTRUCTOR',
+ 'CONTAINS',
+ 'CONTENTS',
+ 'CONTEXT',
+ 'CONTEXT-HELP',
+ 'CONTEXT-HELP-FILE',
+ 'CONTEXT-HELP-ID',
+ 'CONTEXT-POPUP',
+ 'CONTROL',
+ 'CONTROL-BOX',
+ 'CONTROL-FRAME',
+ 'CONVERT',
+ 'CONVERT-3D-COLORS',
+ 'CONVERT-TO-OFFSET',
+ 'CONVERT-TO-OFFS',
+ 'CONVERT-TO-OFFSE',
+ 'COPY-DATASET',
+ 'COPY-LOB',
+ 'COPY-SAX-ATTRIBUTES',
+ 'COPY-TEMP-TABLE',
+ 'COUNT',
+ 'COUNT-OF',
+ 'CPCASE',
+ 'CPCOLL',
+ 'CPINTERNAL',
+ 'CPLOG',
+ 'CPPRINT',
+ 'CPRCODEIN',
+ 'CPRCODEOUT',
+ 'CPSTREAM',
+ 'CPTERM',
+ 'CRC-VALUE',
+ 'CREATE',
+ 'CREATE-LIKE',
+ 'CREATE-LIKE-SEQUENTIAL',
+ 'CREATE-NODE-NAMESPACE',
+ 'CREATE-RESULT-LIST-ENTRY',
+ 'CREATE-TEST-FILE',
+ 'CURRENT',
+ 'CURRENT_DATE',
+ 'CURRENT-CHANGED',
+ 'CURRENT-COLUMN',
+ 'CURRENT-ENVIRONMENT',
+ 'CURRENT-ENV',
+ 'CURRENT-ENVI',
+ 'CURRENT-ENVIR',
+ 'CURRENT-ENVIRO',
+ 'CURRENT-ENVIRON',
+ 'CURRENT-ENVIRONM',
+ 'CURRENT-ENVIRONME',
+ 'CURRENT-ENVIRONMEN',
+ 'CURRENT-ITERATION',
+ 'CURRENT-LANGUAGE',
+ 'CURRENT-LANG',
+ 'CURRENT-LANGU',
+ 'CURRENT-LANGUA',
+ 'CURRENT-LANGUAG',
+ 'CURRENT-QUERY',
+ 'CURRENT-RESULT-ROW',
+ 'CURRENT-ROW-MODIFIED',
+ 'CURRENT-VALUE',
+ 'CURRENT-WINDOW',
+ 'CURSOR',
+ 'CURS',
+ 'CURSO',
+ 'CURSOR-CHAR',
+ 'CURSOR-LINE',
+ 'CURSOR-OFFSET',
+ 'DATABASE',
+ 'DATA-BIND',
+ 'DATA-ENTRY-RETURN',
+ 'DATA-ENTRY-RET',
+ 'DATA-ENTRY-RETU',
+ 'DATA-ENTRY-RETUR',
+ 'DATA-RELATION',
+ 'DATA-REL',
+ 'DATA-RELA',
+ 'DATA-RELAT',
+ 'DATA-RELATI',
+ 'DATA-RELATIO',
+ 'DATASERVERS',
+ 'DATASET',
+ 'DATASET-HANDLE',
+ 'DATA-SOURCE',
+ 'DATA-SOURCE-COMPLETE-MAP',
+ 'DATA-SOURCE-MODIFIED',
+ 'DATA-SOURCE-ROWID',
+ 'DATA-TYPE',
+ 'DATA-T',
+ 'DATA-TY',
+ 'DATA-TYP',
+ 'DATE-FORMAT',
+ 'DATE-F',
+ 'DATE-FO',
+ 'DATE-FOR',
+ 'DATE-FORM',
+ 'DATE-FORMA',
+ 'DAY',
+ 'DBCODEPAGE',
+ 'DBCOLLATION',
+ 'DBNAME',
+ 'DBPARAM',
+ 'DB-REFERENCES',
+ 'DBRESTRICTIONS',
+ 'DBREST',
+ 'DBRESTR',
+ 'DBRESTRI',
+ 'DBRESTRIC',
+ 'DBRESTRICT',
+ 'DBRESTRICTI',
+ 'DBRESTRICTIO',
+ 'DBRESTRICTION',
+ 'DBTASKID',
+ 'DBTYPE',
+ 'DBVERSION',
+ 'DBVERS',
+ 'DBVERSI',
+ 'DBVERSIO',
+ 'DCOLOR',
+ 'DDE',
+ 'DDE-ERROR',
+ 'DDE-ID',
+ 'DDE-I',
+ 'DDE-ITEM',
+ 'DDE-NAME',
+ 'DDE-TOPIC',
+ 'DEBLANK',
+ 'DEBUG',
+ 'DEBU',
+ 'DEBUG-ALERT',
+ 'DEBUGGER',
+ 'DEBUG-LIST',
+ 'DECIMALS',
+ 'DECLARE',
+ 'DECLARE-NAMESPACE',
+ 'DECRYPT',
+ 'DEFAULT',
+ 'DEFAULT-BUFFER-HANDLE',
+ 'DEFAULT-BUTTON',
+ 'DEFAUT-B',
+ 'DEFAUT-BU',
+ 'DEFAUT-BUT',
+ 'DEFAUT-BUTT',
+ 'DEFAUT-BUTTO',
+ 'DEFAULT-COMMIT',
+ 'DEFAULT-EXTENSION',
+ 'DEFAULT-EX',
+ 'DEFAULT-EXT',
+ 'DEFAULT-EXTE',
+ 'DEFAULT-EXTEN',
+ 'DEFAULT-EXTENS',
+ 'DEFAULT-EXTENSI',
+ 'DEFAULT-EXTENSIO',
+ 'DEFAULT-NOXLATE',
+ 'DEFAULT-NOXL',
+ 'DEFAULT-NOXLA',
+ 'DEFAULT-NOXLAT',
+ 'DEFAULT-VALUE',
+ 'DEFAULT-WINDOW',
+ 'DEFINED',
+ 'DEFINE-USER-EVENT-MANAGER',
+ 'DELETE',
+ 'DEL',
+ 'DELE',
+ 'DELET',
+ 'DELETE-CHARACTER',
+ 'DELETE-CHAR',
+ 'DELETE-CHARA',
+ 'DELETE-CHARAC',
+ 'DELETE-CHARACT',
+ 'DELETE-CHARACTE',
+ 'DELETE-CURRENT-ROW',
+ 'DELETE-LINE',
+ 'DELETE-RESULT-LIST-ENTRY',
+ 'DELETE-SELECTED-ROW',
+ 'DELETE-SELECTED-ROWS',
+ 'DELIMITER',
+ 'DESC',
+ 'DESCENDING',
+ 'DESCE',
+ 'DESCEN',
+ 'DESCEND',
+ 'DESCENDI',
+ 'DESCENDIN',
+ 'DESELECT-FOCUSED-ROW',
+ 'DESELECTION',
+ 'DESELECT-ROWS',
+ 'DESELECT-SELECTED-ROW',
+ 'DESTRUCTOR',
+ 'DIALOG-BOX',
+ 'DICTIONARY',
+ 'DICT',
+ 'DICTI',
+ 'DICTIO',
+ 'DICTION',
+ 'DICTIONA',
+ 'DICTIONAR',
+ 'DIR',
+ 'DISABLE',
+ 'DISABLE-AUTO-ZAP',
+ 'DISABLED',
+ 'DISABLE-DUMP-TRIGGERS',
+ 'DISABLE-LOAD-TRIGGERS',
+ 'DISCONNECT',
+ 'DISCON',
+ 'DISCONN',
+ 'DISCONNE',
+ 'DISCONNEC',
+ 'DISP',
+ 'DISPLAY',
+ 'DISPL',
+ 'DISPLA',
+ 'DISPLAY-MESSAGE',
+ 'DISPLAY-TYPE',
+ 'DISPLAY-T',
+ 'DISPLAY-TY',
+ 'DISPLAY-TYP',
+ 'DISTINCT',
+ 'DO',
+ 'DOMAIN-DESCRIPTION',
+ 'DOMAIN-NAME',
+ 'DOMAIN-TYPE',
+ 'DOS',
+ 'DOUBLE',
+ 'DOWN',
+ 'DRAG-ENABLED',
+ 'DROP',
+ 'DROP-DOWN',
+ 'DROP-DOWN-LIST',
+ 'DROP-FILE-NOTIFY',
+ 'DROP-TARGET',
+ 'DUMP',
+ 'DYNAMIC',
+ 'DYNAMIC-FUNCTION',
+ 'EACH',
+ 'ECHO',
+ 'EDGE-CHARS',
+ 'EDGE',
+ 'EDGE-',
+ 'EDGE-C',
+ 'EDGE-CH',
+ 'EDGE-CHA',
+ 'EDGE-CHAR',
+ 'EDGE-PIXELS',
+ 'EDGE-P',
+ 'EDGE-PI',
+ 'EDGE-PIX',
+ 'EDGE-PIXE',
+ 'EDGE-PIXEL',
+ 'EDIT-CAN-PASTE',
+ 'EDIT-CAN-UNDO',
+ 'EDIT-CLEAR',
+ 'EDIT-COPY',
+ 'EDIT-CUT',
+ 'EDITING',
+ 'EDITOR',
+ 'EDIT-PASTE',
+ 'EDIT-UNDO',
+ 'ELSE',
+ 'EMPTY',
+ 'EMPTY-TEMP-TABLE',
+ 'ENABLE',
+ 'ENABLED-FIELDS',
+ 'ENCODE',
+ 'ENCRYPT',
+ 'ENCRYPT-AUDIT-MAC-KEY',
+ 'ENCRYPTION-SALT',
+ 'END',
+ 'END-DOCUMENT',
+ 'END-ELEMENT',
+ 'END-EVENT-GROUP',
+ 'END-FILE-DROP',
+ 'ENDKEY',
+ 'END-KEY',
+ 'END-MOVE',
+ 'END-RESIZE',
+ 'END-ROW-RESIZE',
+ 'END-USER-PROMPT',
+ 'ENTERED',
+ 'ENTRY',
+ 'EQ',
+ 'ERROR',
+ 'ERROR-COLUMN',
+ 'ERROR-COL',
+ 'ERROR-COLU',
+ 'ERROR-COLUM',
+ 'ERROR-ROW',
+ 'ERROR-STACK-TRACE',
+ 'ERROR-STATUS',
+ 'ERROR-STAT',
+ 'ERROR-STATU',
+ 'ESCAPE',
+ 'ETIME',
+ 'EVENT-GROUP-ID',
+ 'EVENT-PROCEDURE',
+ 'EVENT-PROCEDURE-CONTEXT',
+ 'EVENTS',
+ 'EVENT',
+ 'EVENT-TYPE',
+ 'EVENT-T',
+ 'EVENT-TY',
+ 'EVENT-TYP',
+ 'EXCEPT',
+ 'EXCLUSIVE-ID',
+ 'EXCLUSIVE-LOCK',
+ 'EXCLUSIVE',
+ 'EXCLUSIVE-',
+ 'EXCLUSIVE-L',
+ 'EXCLUSIVE-LO',
+ 'EXCLUSIVE-LOC',
+ 'EXCLUSIVE-WEB-USER',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXP',
+ 'EXPAND',
+ 'EXPANDABLE',
+ 'EXPLICIT',
+ 'EXPORT',
+ 'EXPORT-PRINCIPAL',
+ 'EXTENDED',
+ 'EXTENT',
+ 'EXTERNAL',
+ 'FALSE',
+ 'FETCH',
+ 'FETCH-SELECTED-ROW',
+ 'FGCOLOR',
+ 'FGC',
+ 'FGCO',
+ 'FGCOL',
+ 'FGCOLO',
+ 'FIELD',
+ 'FIELDS',
+ 'FILE',
+ 'FILE-CREATE-DATE',
+ 'FILE-CREATE-TIME',
+ 'FILE-INFORMATION',
+ 'FILE-INFO',
+ 'FILE-INFOR',
+ 'FILE-INFORM',
+ 'FILE-INFORMA',
+ 'FILE-INFORMAT',
+ 'FILE-INFORMATI',
+ 'FILE-INFORMATIO',
+ 'FILE-MOD-DATE',
+ 'FILE-MOD-TIME',
+ 'FILENAME',
+ 'FILE-NAME',
+ 'FILE-OFFSET',
+ 'FILE-OFF',
+ 'FILE-OFFS',
+ 'FILE-OFFSE',
+ 'FILE-SIZE',
+ 'FILE-TYPE',
+ 'FILL',
+ 'FILLED',
+ 'FILL-IN',
+ 'FILTERS',
+ 'FINAL',
+ 'FINALLY',
+ 'FIND',
+ 'FIND-BY-ROWID',
+ 'FIND-CASE-SENSITIVE',
+ 'FIND-CURRENT',
+ 'FINDER',
+ 'FIND-FIRST',
+ 'FIND-GLOBAL',
+ 'FIND-LAST',
+ 'FIND-NEXT-OCCURRENCE',
+ 'FIND-PREV-OCCURRENCE',
+ 'FIND-SELECT',
+ 'FIND-UNIQUE',
+ 'FIND-WRAP-AROUND',
+ 'FIRST',
+ 'FIRST-ASYNCH-REQUEST',
+ 'FIRST-CHILD',
+ 'FIRST-COLUMN',
+ 'FIRST-FORM',
+ 'FIRST-OBJECT',
+ 'FIRST-OF',
+ 'FIRST-PROCEDURE',
+ 'FIRST-PROC',
+ 'FIRST-PROCE',
+ 'FIRST-PROCED',
+ 'FIRST-PROCEDU',
+ 'FIRST-PROCEDUR',
+ 'FIRST-SERVER',
+ 'FIRST-TAB-ITEM',
+ 'FIRST-TAB-I',
+ 'FIRST-TAB-IT',
+ 'FIRST-TAB-ITE',
+ 'FIT-LAST-COLUMN',
+ 'FIXED-ONLY',
+ 'FLAT-BUTTON',
+ 'FLOAT',
+ 'FOCUS',
+ 'FOCUSED-ROW',
+ 'FOCUSED-ROW-SELECTED',
+ 'FONT',
+ 'FONT-TABLE',
+ 'FOR',
+ 'FORCE-FILE',
+ 'FOREGROUND',
+ 'FORE',
+ 'FOREG',
+ 'FOREGR',
+ 'FOREGRO',
+ 'FOREGROU',
+ 'FOREGROUN',
+ 'FORM',
+ 'FORMAT',
+ 'FORMA',
+ 'FORMATTED',
+ 'FORMATTE',
+ 'FORM-LONG-INPUT',
+ 'FORWARD',
+ 'FORWARDS',
+ 'FRAGMENT',
+ 'FRAGMEN',
+ 'FRAME',
+ 'FRAM',
+ 'FRAME-COL',
+ 'FRAME-DB',
+ 'FRAME-DOWN',
+ 'FRAME-FIELD',
+ 'FRAME-FILE',
+ 'FRAME-INDEX',
+ 'FRAME-INDE',
+ 'FRAME-LINE',
+ 'FRAME-NAME',
+ 'FRAME-ROW',
+ 'FRAME-SPACING',
+ 'FRAME-SPA',
+ 'FRAME-SPAC',
+ 'FRAME-SPACI',
+ 'FRAME-SPACIN',
+ 'FRAME-VALUE',
+ 'FRAME-VAL',
+ 'FRAME-VALU',
+ 'FRAME-X',
+ 'FRAME-Y',
+ 'FREQUENCY',
+ 'FROM',
+ 'FROM-CHARS',
+ 'FROM-C',
+ 'FROM-CH',
+ 'FROM-CHA',
+ 'FROM-CHAR',
+ 'FROM-CURRENT',
+ 'FROM-CUR',
+ 'FROM-CURR',
+ 'FROM-CURRE',
+ 'FROM-CURREN',
+ 'FROM-PIXELS',
+ 'FROM-P',
+ 'FROM-PI',
+ 'FROM-PIX',
+ 'FROM-PIXE',
+ 'FROM-PIXEL',
+ 'FULL-HEIGHT-CHARS',
+ 'FULL-HEIGHT',
+ 'FULL-HEIGHT-',
+ 'FULL-HEIGHT-C',
+ 'FULL-HEIGHT-CH',
+ 'FULL-HEIGHT-CHA',
+ 'FULL-HEIGHT-CHAR',
+ 'FULL-HEIGHT-PIXELS',
+ 'FULL-HEIGHT-P',
+ 'FULL-HEIGHT-PI',
+ 'FULL-HEIGHT-PIX',
+ 'FULL-HEIGHT-PIXE',
+ 'FULL-HEIGHT-PIXEL',
+ 'FULL-PATHNAME',
+ 'FULL-PATHN',
+ 'FULL-PATHNA',
+ 'FULL-PATHNAM',
+ 'FULL-WIDTH-CHARS',
+ 'FULL-WIDTH',
+ 'FULL-WIDTH-',
+ 'FULL-WIDTH-C',
+ 'FULL-WIDTH-CH',
+ 'FULL-WIDTH-CHA',
+ 'FULL-WIDTH-CHAR',
+ 'FULL-WIDTH-PIXELS',
+ 'FULL-WIDTH-P',
+ 'FULL-WIDTH-PI',
+ 'FULL-WIDTH-PIX',
+ 'FULL-WIDTH-PIXE',
+ 'FULL-WIDTH-PIXEL',
+ 'FUNCTION',
+ 'FUNCTION-CALL-TYPE',
+ 'GATEWAYS',
+ 'GATEWAY',
+ 'GE',
+ 'GENERATE-MD5',
+ 'GENERATE-PBE-KEY',
+ 'GENERATE-PBE-SALT',
+ 'GENERATE-RANDOM-KEY',
+ 'GENERATE-UUID',
+ 'GET',
+ 'GET-ATTR-CALL-TYPE',
+ 'GET-ATTRIBUTE-NODE',
+ 'GET-BINARY-DATA',
+ 'GET-BLUE-VALUE',
+ 'GET-BLUE',
+ 'GET-BLUE-',
+ 'GET-BLUE-V',
+ 'GET-BLUE-VA',
+ 'GET-BLUE-VAL',
+ 'GET-BLUE-VALU',
+ 'GET-BROWSE-COLUMN',
+ 'GET-BUFFER-HANDLEGETBYTE',
+ 'GET-BYTE',
+ 'GET-CALLBACK-PROC-CONTEXT',
+ 'GET-CALLBACK-PROC-NAME',
+ 'GET-CGI-LIST',
+ 'GET-CGI-LONG-VALUE',
+ 'GET-CGI-VALUE',
+ 'GET-CODEPAGES',
+ 'GET-COLLATIONS',
+ 'GET-CONFIG-VALUE',
+ 'GET-CURRENT',
+ 'GET-DOUBLE',
+ 'GET-DROPPED-FILE',
+ 'GET-DYNAMIC',
+ 'GET-ERROR-COLUMN',
+ 'GET-ERROR-ROW',
+ 'GET-FILE',
+ 'GET-FILE-NAME',
+ 'GET-FILE-OFFSET',
+ 'GET-FILE-OFFSE',
+ 'GET-FIRST',
+ 'GET-FLOAT',
+ 'GET-GREEN-VALUE',
+ 'GET-GREEN',
+ 'GET-GREEN-',
+ 'GET-GREEN-V',
+ 'GET-GREEN-VA',
+ 'GET-GREEN-VAL',
+ 'GET-GREEN-VALU',
+ 'GET-INDEX-BY-NAMESPACE-NAME',
+ 'GET-INDEX-BY-QNAME',
+ 'GET-INT64',
+ 'GET-ITERATION',
+ 'GET-KEY-VALUE',
+ 'GET-KEY-VAL',
+ 'GET-KEY-VALU',
+ 'GET-LAST',
+ 'GET-LOCALNAME-BY-INDEX',
+ 'GET-LONG',
+ 'GET-MESSAGE',
+ 'GET-NEXT',
+ 'GET-NUMBER',
+ 'GET-POINTER-VALUE',
+ 'GET-PREV',
+ 'GET-PRINTERS',
+ 'GET-PROPERTY',
+ 'GET-QNAME-BY-INDEX',
+ 'GET-RED-VALUE',
+ 'GET-RED',
+ 'GET-RED-',
+ 'GET-RED-V',
+ 'GET-RED-VA',
+ 'GET-RED-VAL',
+ 'GET-RED-VALU',
+ 'GET-REPOSITIONED-ROW',
+ 'GET-RGB-VALUE',
+ 'GET-SELECTED-WIDGET',
+ 'GET-SELECTED',
+ 'GET-SELECTED-',
+ 'GET-SELECTED-W',
+ 'GET-SELECTED-WI',
+ 'GET-SELECTED-WID',
+ 'GET-SELECTED-WIDG',
+ 'GET-SELECTED-WIDGE',
+ 'GET-SHORT',
+ 'GET-SIGNATURE',
+ 'GET-SIZE',
+ 'GET-STRING',
+ 'GET-TAB-ITEM',
+ 'GET-TEXT-HEIGHT-CHARS',
+ 'GET-TEXT-HEIGHT',
+ 'GET-TEXT-HEIGHT-',
+ 'GET-TEXT-HEIGHT-C',
+ 'GET-TEXT-HEIGHT-CH',
+ 'GET-TEXT-HEIGHT-CHA',
+ 'GET-TEXT-HEIGHT-CHAR',
+ 'GET-TEXT-HEIGHT-PIXELS',
+ 'GET-TEXT-HEIGHT-P',
+ 'GET-TEXT-HEIGHT-PI',
+ 'GET-TEXT-HEIGHT-PIX',
+ 'GET-TEXT-HEIGHT-PIXE',
+ 'GET-TEXT-HEIGHT-PIXEL',
+ 'GET-TEXT-WIDTH-CHARS',
+ 'GET-TEXT-WIDTH',
+ 'GET-TEXT-WIDTH-',
+ 'GET-TEXT-WIDTH-C',
+ 'GET-TEXT-WIDTH-CH',
+ 'GET-TEXT-WIDTH-CHA',
+ 'GET-TEXT-WIDTH-CHAR',
+ 'GET-TEXT-WIDTH-PIXELS',
+ 'GET-TEXT-WIDTH-P',
+ 'GET-TEXT-WIDTH-PI',
+ 'GET-TEXT-WIDTH-PIX',
+ 'GET-TEXT-WIDTH-PIXE',
+ 'GET-TEXT-WIDTH-PIXEL',
+ 'GET-TYPE-BY-INDEX',
+ 'GET-TYPE-BY-NAMESPACE-NAME',
+ 'GET-TYPE-BY-QNAME',
+ 'GET-UNSIGNED-LONG',
+ 'GET-UNSIGNED-SHORT',
+ 'GET-URI-BY-INDEX',
+ 'GET-VALUE-BY-INDEX',
+ 'GET-VALUE-BY-NAMESPACE-NAME',
+ 'GET-VALUE-BY-QNAME',
+ 'GET-WAIT-STATE',
+ 'GLOBAL',
+ 'GO-ON',
+ 'GO-PENDING',
+ 'GO-PEND',
+ 'GO-PENDI',
+ 'GO-PENDIN',
+ 'GRANT',
+ 'GRAPHIC-EDGE',
+ 'GRAPHIC-E',
+ 'GRAPHIC-ED',
+ 'GRAPHIC-EDG',
+ 'GRID-FACTOR-HORIZONTAL',
+ 'GRID-FACTOR-H',
+ 'GRID-FACTOR-HO',
+ 'GRID-FACTOR-HOR',
+ 'GRID-FACTOR-HORI',
+ 'GRID-FACTOR-HORIZ',
+ 'GRID-FACTOR-HORIZO',
+ 'GRID-FACTOR-HORIZON',
+ 'GRID-FACTOR-HORIZONT',
+ 'GRID-FACTOR-HORIZONTA',
+ 'GRID-FACTOR-VERTICAL',
+ 'GRID-FACTOR-V',
+ 'GRID-FACTOR-VE',
+ 'GRID-FACTOR-VER',
+ 'GRID-FACTOR-VERT',
+ 'GRID-FACTOR-VERTI',
+ 'GRID-FACTOR-VERTIC',
+ 'GRID-FACTOR-VERTICA',
+ 'GRID-SNAP',
+ 'GRID-UNIT-HEIGHT-CHARS',
+ 'GRID-UNIT-HEIGHT',
+ 'GRID-UNIT-HEIGHT-',
+ 'GRID-UNIT-HEIGHT-C',
+ 'GRID-UNIT-HEIGHT-CH',
+ 'GRID-UNIT-HEIGHT-CHA',
+ 'GRID-UNIT-HEIGHT-PIXELS',
+ 'GRID-UNIT-HEIGHT-P',
+ 'GRID-UNIT-HEIGHT-PI',
+ 'GRID-UNIT-HEIGHT-PIX',
+ 'GRID-UNIT-HEIGHT-PIXE',
+ 'GRID-UNIT-HEIGHT-PIXEL',
+ 'GRID-UNIT-WIDTH-CHARS',
+ 'GRID-UNIT-WIDTH',
+ 'GRID-UNIT-WIDTH-',
+ 'GRID-UNIT-WIDTH-C',
+ 'GRID-UNIT-WIDTH-CH',
+ 'GRID-UNIT-WIDTH-CHA',
+ 'GRID-UNIT-WIDTH-CHAR',
+ 'GRID-UNIT-WIDTH-PIXELS',
+ 'GRID-UNIT-WIDTH-P',
+ 'GRID-UNIT-WIDTH-PI',
+ 'GRID-UNIT-WIDTH-PIX',
+ 'GRID-UNIT-WIDTH-PIXE',
+ 'GRID-UNIT-WIDTH-PIXEL',
+ 'GRID-VISIBLE',
+ 'GROUP',
+ 'GT',
+ 'GUID',
+ 'HANDLER',
+ 'HAS-RECORDS',
+ 'HAVING',
+ 'HEADER',
+ 'HEIGHT-CHARS',
+ 'HEIGHT',
+ 'HEIGHT-',
+ 'HEIGHT-C',
+ 'HEIGHT-CH',
+ 'HEIGHT-CHA',
+ 'HEIGHT-CHAR',
+ 'HEIGHT-PIXELS',
+ 'HEIGHT-P',
+ 'HEIGHT-PI',
+ 'HEIGHT-PIX',
+ 'HEIGHT-PIXE',
+ 'HEIGHT-PIXEL',
+ 'HELP',
+ 'HEX-DECODE',
+ 'HEX-ENCODE',
+ 'HIDDEN',
+ 'HIDE',
+ 'HORIZONTAL',
+ 'HORI',
+ 'HORIZ',
+ 'HORIZO',
+ 'HORIZON',
+ 'HORIZONT',
+ 'HORIZONTA',
+ 'HOST-BYTE-ORDER',
+ 'HTML-CHARSET',
+ 'HTML-END-OF-LINE',
+ 'HTML-END-OF-PAGE',
+ 'HTML-FRAME-BEGIN',
+ 'HTML-FRAME-END',
+ 'HTML-HEADER-BEGIN',
+ 'HTML-HEADER-END',
+ 'HTML-TITLE-BEGIN',
+ 'HTML-TITLE-END',
+ 'HWND',
+ 'ICON',
+ 'IF',
+ 'IMAGE',
+ 'IMAGE-DOWN',
+ 'IMAGE-INSENSITIVE',
+ 'IMAGE-SIZE',
+ 'IMAGE-SIZE-CHARS',
+ 'IMAGE-SIZE-C',
+ 'IMAGE-SIZE-CH',
+ 'IMAGE-SIZE-CHA',
+ 'IMAGE-SIZE-CHAR',
+ 'IMAGE-SIZE-PIXELS',
+ 'IMAGE-SIZE-P',
+ 'IMAGE-SIZE-PI',
+ 'IMAGE-SIZE-PIX',
+ 'IMAGE-SIZE-PIXE',
+ 'IMAGE-SIZE-PIXEL',
+ 'IMAGE-UP',
+ 'IMMEDIATE-DISPLAY',
+ 'IMPLEMENTS',
+ 'IMPORT',
+ 'IMPORT-PRINCIPAL',
+ 'IN',
+ 'INCREMENT-EXCLUSIVE-ID',
+ 'INDEX',
+ 'INDEXED-REPOSITION',
+ 'INDEX-HINT',
+ 'INDEX-INFORMATION',
+ 'INDICATOR',
+ 'INFORMATION',
+ 'INFO',
+ 'INFOR',
+ 'INFORM',
+ 'INFORMA',
+ 'INFORMAT',
+ 'INFORMATI',
+ 'INFORMATIO',
+ 'IN-HANDLE',
+ 'INHERIT-BGCOLOR',
+ 'INHERIT-BGC',
+ 'INHERIT-BGCO',
+ 'INHERIT-BGCOL',
+ 'INHERIT-BGCOLO',
+ 'INHERIT-FGCOLOR',
+ 'INHERIT-FGC',
+ 'INHERIT-FGCO',
+ 'INHERIT-FGCOL',
+ 'INHERIT-FGCOLO',
+ 'INHERITS',
+ 'INITIAL',
+ 'INIT',
+ 'INITI',
+ 'INITIA',
+ 'INITIAL-DIR',
+ 'INITIAL-FILTER',
+ 'INITIALIZE-DOCUMENT-TYPE',
+ 'INITIATE',
+ 'INNER-CHARS',
+ 'INNER-LINES',
+ 'INPUT',
+ 'INPUT-OUTPUT',
+ 'INPUT-O',
+ 'INPUT-OU',
+ 'INPUT-OUT',
+ 'INPUT-OUTP',
+ 'INPUT-OUTPU',
+ 'INPUT-VALUE',
+ 'INSERT',
+ 'INSERT-ATTRIBUTE',
+ 'INSERT-BACKTAB',
+ 'INSERT-B',
+ 'INSERT-BA',
+ 'INSERT-BAC',
+ 'INSERT-BACK',
+ 'INSERT-BACKT',
+ 'INSERT-BACKTA',
+ 'INSERT-FILE',
+ 'INSERT-ROW',
+ 'INSERT-STRING',
+ 'INSERT-TAB',
+ 'INSERT-T',
+ 'INSERT-TA',
+ 'INTERFACE',
+ 'INTERNAL-ENTRIES',
+ 'INTO',
+ 'INVOKE',
+ 'IS',
+ 'IS-ATTR-SPACE',
+ 'IS-ATTR',
+ 'IS-ATTR-',
+ 'IS-ATTR-S',
+ 'IS-ATTR-SP',
+ 'IS-ATTR-SPA',
+ 'IS-ATTR-SPAC',
+ 'IS-CLASS',
+ 'IS-CLAS',
+ 'IS-LEAD-BYTE',
+ 'IS-OPEN',
+ 'IS-PARAMETER-SET',
+ 'IS-ROW-SELECTED',
+ 'IS-SELECTED',
+ 'ITEM',
+ 'ITEMS-PER-ROW',
+ 'JOIN',
+ 'JOIN-BY-SQLDB',
+ 'KBLABEL',
+ 'KEEP-CONNECTION-OPEN',
+ 'KEEP-FRAME-Z-ORDER',
+ 'KEEP-FRAME-Z',
+ 'KEEP-FRAME-Z-',
+ 'KEEP-FRAME-Z-O',
+ 'KEEP-FRAME-Z-OR',
+ 'KEEP-FRAME-Z-ORD',
+ 'KEEP-FRAME-Z-ORDE',
+ 'KEEP-MESSAGES',
+ 'KEEP-SECURITY-CACHE',
+ 'KEEP-TAB-ORDER',
+ 'KEY',
+ 'KEYCODE',
+ 'KEY-CODE',
+ 'KEYFUNCTION',
+ 'KEYFUNC',
+ 'KEYFUNCT',
+ 'KEYFUNCTI',
+ 'KEYFUNCTIO',
+ 'KEY-FUNCTION',
+ 'KEY-FUNC',
+ 'KEY-FUNCT',
+ 'KEY-FUNCTI',
+ 'KEY-FUNCTIO',
+ 'KEYLABEL',
+ 'KEY-LABEL',
+ 'KEYS',
+ 'KEYWORD',
+ 'KEYWORD-ALL',
+ 'LABEL',
+ 'LABEL-BGCOLOR',
+ 'LABEL-BGC',
+ 'LABEL-BGCO',
+ 'LABEL-BGCOL',
+ 'LABEL-BGCOLO',
+ 'LABEL-DCOLOR',
+ 'LABEL-DC',
+ 'LABEL-DCO',
+ 'LABEL-DCOL',
+ 'LABEL-DCOLO',
+ 'LABEL-FGCOLOR',
+ 'LABEL-FGC',
+ 'LABEL-FGCO',
+ 'LABEL-FGCOL',
+ 'LABEL-FGCOLO',
+ 'LABEL-FONT',
+ 'LABEL-PFCOLOR',
+ 'LABEL-PFC',
+ 'LABEL-PFCO',
+ 'LABEL-PFCOL',
+ 'LABEL-PFCOLO',
+ 'LABELS',
+ 'LANDSCAPE',
+ 'LANGUAGES',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LARGE-TO-SMALL',
+ 'LAST',
+ 'LAST-ASYNCH-REQUEST',
+ 'LAST-BATCH',
+ 'LAST-CHILD',
+ 'LAST-EVENT',
+ 'LAST-EVEN',
+ 'LAST-FORM',
+ 'LASTKEY',
+ 'LAST-KEY',
+ 'LAST-OBJECT',
+ 'LAST-OF',
+ 'LAST-PROCEDURE',
+ 'LAST-PROCE',
+ 'LAST-PROCED',
+ 'LAST-PROCEDU',
+ 'LAST-PROCEDUR',
+ 'LAST-SERVER',
+ 'LAST-TAB-ITEM',
+ 'LAST-TAB-I',
+ 'LAST-TAB-IT',
+ 'LAST-TAB-ITE',
+ 'LC',
+ 'LDBNAME',
+ 'LE',
+ 'LEAVE',
+ 'LEFT-ALIGNED',
+ 'LEFT-ALIGN',
+ 'LEFT-ALIGNE',
+ 'LEFT-TRIM',
+ 'LENGTH',
+ 'LIBRARY',
+ 'LIKE',
+ 'LIKE-SEQUENTIAL',
+ 'LINE',
+ 'LINE-COUNTER',
+ 'LINE-COUNT',
+ 'LINE-COUNTE',
+ 'LIST-EVENTS',
+ 'LISTING',
+ 'LISTI',
+ 'LISTIN',
+ 'LIST-ITEM-PAIRS',
+ 'LIST-ITEMS',
+ 'LIST-PROPERTY-NAMES',
+ 'LIST-QUERY-ATTRS',
+ 'LIST-SET-ATTRS',
+ 'LIST-WIDGETS',
+ 'LITERAL-QUESTION',
+ 'LITTLE-ENDIAN',
+ 'LOAD',
+ 'LOAD-DOMAINS',
+ 'LOAD-ICON',
+ 'LOAD-IMAGE',
+ 'LOAD-IMAGE-DOWN',
+ 'LOAD-IMAGE-INSENSITIVE',
+ 'LOAD-IMAGE-UP',
+ 'LOAD-MOUSE-POINTER',
+ 'LOAD-MOUSE-P',
+ 'LOAD-MOUSE-PO',
+ 'LOAD-MOUSE-POI',
+ 'LOAD-MOUSE-POIN',
+ 'LOAD-MOUSE-POINT',
+ 'LOAD-MOUSE-POINTE',
+ 'LOAD-PICTURE',
+ 'LOAD-SMALL-ICON',
+ 'LOCAL-NAME',
+ 'LOCATOR-COLUMN-NUMBER',
+ 'LOCATOR-LINE-NUMBER',
+ 'LOCATOR-PUBLIC-ID',
+ 'LOCATOR-SYSTEM-ID',
+ 'LOCATOR-TYPE',
+ 'LOCKED',
+ 'LOCK-REGISTRATION',
+ 'LOG',
+ 'LOG-AUDIT-EVENT',
+ 'LOGIN-EXPIRATION-TIMESTAMP',
+ 'LOGIN-HOST',
+ 'LOGIN-STATE',
+ 'LOG-MANAGER',
+ 'LOGOUT',
+ 'LOOKAHEAD',
+ 'LOOKUP',
+ 'LT',
+ 'MACHINE-CLASS',
+ 'MANDATORY',
+ 'MANUAL-HIGHLIGHT',
+ 'MAP',
+ 'MARGIN-EXTRA',
+ 'MARGIN-HEIGHT-CHARS',
+ 'MARGIN-HEIGHT',
+ 'MARGIN-HEIGHT-',
+ 'MARGIN-HEIGHT-C',
+ 'MARGIN-HEIGHT-CH',
+ 'MARGIN-HEIGHT-CHA',
+ 'MARGIN-HEIGHT-CHAR',
+ 'MARGIN-HEIGHT-PIXELS',
+ 'MARGIN-HEIGHT-P',
+ 'MARGIN-HEIGHT-PI',
+ 'MARGIN-HEIGHT-PIX',
+ 'MARGIN-HEIGHT-PIXE',
+ 'MARGIN-HEIGHT-PIXEL',
+ 'MARGIN-WIDTH-CHARS',
+ 'MARGIN-WIDTH',
+ 'MARGIN-WIDTH-',
+ 'MARGIN-WIDTH-C',
+ 'MARGIN-WIDTH-CH',
+ 'MARGIN-WIDTH-CHA',
+ 'MARGIN-WIDTH-CHAR',
+ 'MARGIN-WIDTH-PIXELS',
+ 'MARGIN-WIDTH-P',
+ 'MARGIN-WIDTH-PI',
+ 'MARGIN-WIDTH-PIX',
+ 'MARGIN-WIDTH-PIXE',
+ 'MARGIN-WIDTH-PIXEL',
+ 'MARK-NEW',
+ 'MARK-ROW-STATE',
+ 'MATCHES',
+ 'MAX-BUTTON',
+ 'MAX-CHARS',
+ 'MAX-DATA-GUESS',
+ 'MAX-HEIGHT',
+ 'MAX-HEIGHT-CHARS',
+ 'MAX-HEIGHT-C',
+ 'MAX-HEIGHT-CH',
+ 'MAX-HEIGHT-CHA',
+ 'MAX-HEIGHT-CHAR',
+ 'MAX-HEIGHT-PIXELS',
+ 'MAX-HEIGHT-P',
+ 'MAX-HEIGHT-PI',
+ 'MAX-HEIGHT-PIX',
+ 'MAX-HEIGHT-PIXE',
+ 'MAX-HEIGHT-PIXEL',
+ 'MAXIMIZE',
+ 'MAXIMUM',
+ 'MAX',
+ 'MAXI',
+ 'MAXIM',
+ 'MAXIMU',
+ 'MAXIMUM-LEVEL',
+ 'MAX-ROWS',
+ 'MAX-SIZE',
+ 'MAX-VALUE',
+ 'MAX-VAL',
+ 'MAX-VALU',
+ 'MAX-WIDTH-CHARS',
+ 'MAX-WIDTH',
+ 'MAX-WIDTH-',
+ 'MAX-WIDTH-C',
+ 'MAX-WIDTH-CH',
+ 'MAX-WIDTH-CHA',
+ 'MAX-WIDTH-CHAR',
+ 'MAX-WIDTH-PIXELS',
+ 'MAX-WIDTH-P',
+ 'MAX-WIDTH-PI',
+ 'MAX-WIDTH-PIX',
+ 'MAX-WIDTH-PIXE',
+ 'MAX-WIDTH-PIXEL',
+ 'MD5-DIGEST',
+ 'MEMBER',
+ 'MEMPTR-TO-NODE-VALUE',
+ 'MENU',
+ 'MENUBAR',
+ 'MENU-BAR',
+ 'MENU-ITEM',
+ 'MENU-KEY',
+ 'MENU-K',
+ 'MENU-KE',
+ 'MENU-MOUSE',
+ 'MENU-M',
+ 'MENU-MO',
+ 'MENU-MOU',
+ 'MENU-MOUS',
+ 'MERGE-BY-FIELD',
+ 'MESSAGE',
+ 'MESSAGE-AREA',
+ 'MESSAGE-AREA-FONT',
+ 'MESSAGE-LINES',
+ 'METHOD',
+ 'MIN-BUTTON',
+ 'MIN-COLUMN-WIDTH-CHARS',
+ 'MIN-COLUMN-WIDTH-C',
+ 'MIN-COLUMN-WIDTH-CH',
+ 'MIN-COLUMN-WIDTH-CHA',
+ 'MIN-COLUMN-WIDTH-CHAR',
+ 'MIN-COLUMN-WIDTH-PIXELS',
+ 'MIN-COLUMN-WIDTH-P',
+ 'MIN-COLUMN-WIDTH-PI',
+ 'MIN-COLUMN-WIDTH-PIX',
+ 'MIN-COLUMN-WIDTH-PIXE',
+ 'MIN-COLUMN-WIDTH-PIXEL',
+ 'MIN-HEIGHT-CHARS',
+ 'MIN-HEIGHT',
+ 'MIN-HEIGHT-',
+ 'MIN-HEIGHT-C',
+ 'MIN-HEIGHT-CH',
+ 'MIN-HEIGHT-CHA',
+ 'MIN-HEIGHT-CHAR',
+ 'MIN-HEIGHT-PIXELS',
+ 'MIN-HEIGHT-P',
+ 'MIN-HEIGHT-PI',
+ 'MIN-HEIGHT-PIX',
+ 'MIN-HEIGHT-PIXE',
+ 'MIN-HEIGHT-PIXEL',
+ 'MINIMUM',
+ 'MIN',
+ 'MINI',
+ 'MINIM',
+ 'MINIMU',
+ 'MIN-SIZE',
+ 'MIN-VALUE',
+ 'MIN-VAL',
+ 'MIN-VALU',
+ 'MIN-WIDTH-CHARS',
+ 'MIN-WIDTH',
+ 'MIN-WIDTH-',
+ 'MIN-WIDTH-C',
+ 'MIN-WIDTH-CH',
+ 'MIN-WIDTH-CHA',
+ 'MIN-WIDTH-CHAR',
+ 'MIN-WIDTH-PIXELS',
+ 'MIN-WIDTH-P',
+ 'MIN-WIDTH-PI',
+ 'MIN-WIDTH-PIX',
+ 'MIN-WIDTH-PIXE',
+ 'MIN-WIDTH-PIXEL',
+ 'MODIFIED',
+ 'MODULO',
+ 'MOD',
+ 'MODU',
+ 'MODUL',
+ 'MONTH',
+ 'MOUSE',
+ 'MOUSE-POINTER',
+ 'MOUSE-P',
+ 'MOUSE-PO',
+ 'MOUSE-POI',
+ 'MOUSE-POIN',
+ 'MOUSE-POINT',
+ 'MOUSE-POINTE',
+ 'MOVABLE',
+ 'MOVE-AFTER-TAB-ITEM',
+ 'MOVE-AFTER',
+ 'MOVE-AFTER-',
+ 'MOVE-AFTER-T',
+ 'MOVE-AFTER-TA',
+ 'MOVE-AFTER-TAB',
+ 'MOVE-AFTER-TAB-',
+ 'MOVE-AFTER-TAB-I',
+ 'MOVE-AFTER-TAB-IT',
+ 'MOVE-AFTER-TAB-ITE',
+ 'MOVE-BEFORE-TAB-ITEM',
+ 'MOVE-BEFOR',
+ 'MOVE-BEFORE',
+ 'MOVE-BEFORE-',
+ 'MOVE-BEFORE-T',
+ 'MOVE-BEFORE-TA',
+ 'MOVE-BEFORE-TAB',
+ 'MOVE-BEFORE-TAB-',
+ 'MOVE-BEFORE-TAB-I',
+ 'MOVE-BEFORE-TAB-IT',
+ 'MOVE-BEFORE-TAB-ITE',
+ 'MOVE-COLUMN',
+ 'MOVE-COL',
+ 'MOVE-COLU',
+ 'MOVE-COLUM',
+ 'MOVE-TO-BOTTOM',
+ 'MOVE-TO-B',
+ 'MOVE-TO-BO',
+ 'MOVE-TO-BOT',
+ 'MOVE-TO-BOTT',
+ 'MOVE-TO-BOTTO',
+ 'MOVE-TO-EOF',
+ 'MOVE-TO-TOP',
+ 'MOVE-TO-T',
+ 'MOVE-TO-TO',
+ 'MPE',
+ 'MULTI-COMPILE',
+ 'MULTIPLE',
+ 'MULTIPLE-KEY',
+ 'MULTITASKING-INTERVAL',
+ 'MUST-EXIST',
+ 'NAME',
+ 'NAMESPACE-PREFIX',
+ 'NAMESPACE-URI',
+ 'NATIVE',
+ 'NE',
+ 'NEEDS-APPSERVER-PROMPT',
+ 'NEEDS-PROMPT',
+ 'NEW',
+ 'NEW-INSTANCE',
+ 'NEW-ROW',
+ 'NEXT',
+ 'NEXT-COLUMN',
+ 'NEXT-PROMPT',
+ 'NEXT-ROWID',
+ 'NEXT-SIBLING',
+ 'NEXT-TAB-ITEM',
+ 'NEXT-TAB-I',
+ 'NEXT-TAB-IT',
+ 'NEXT-TAB-ITE',
+ 'NEXT-VALUE',
+ 'NO',
+ 'NO-APPLY',
+ 'NO-ARRAY-MESSAGE',
+ 'NO-ASSIGN',
+ 'NO-ATTR-LIST',
+ 'NO-ATTR',
+ 'NO-ATTR-',
+ 'NO-ATTR-L',
+ 'NO-ATTR-LI',
+ 'NO-ATTR-LIS',
+ 'NO-ATTR-SPACE',
+ 'NO-ATTR-S',
+ 'NO-ATTR-SP',
+ 'NO-ATTR-SPA',
+ 'NO-ATTR-SPAC',
+ 'NO-AUTO-VALIDATE',
+ 'NO-BIND-WHERE',
+ 'NO-BOX',
+ 'NO-CONSOLE',
+ 'NO-CONVERT',
+ 'NO-CONVERT-3D-COLORS',
+ 'NO-CURRENT-VALUE',
+ 'NO-DEBUG',
+ 'NODE-VALUE-TO-MEMPTR',
+ 'NO-DRAG',
+ 'NO-ECHO',
+ 'NO-EMPTY-SPACE',
+ 'NO-ERROR',
+ 'NO-FILL',
+ 'NO-F',
+ 'NO-FI',
+ 'NO-FIL',
+ 'NO-FOCUS',
+ 'NO-HELP',
+ 'NO-HIDE',
+ 'NO-INDEX-HINT',
+ 'NO-INHERIT-BGCOLOR',
+ 'NO-INHERIT-BGC',
+ 'NO-INHERIT-BGCO',
+ 'NO-INHERIT-FGCOLOR',
+ 'NO-INHERIT-FGC',
+ 'NO-INHERIT-FGCO',
+ 'NO-INHERIT-FGCOL',
+ 'NO-INHERIT-FGCOLO',
+ 'NO-JOIN-BY-SQLDB',
+ 'NO-LABELS',
+ 'NO-LABE',
+ 'NO-LOBS',
+ 'NO-LOCK',
+ 'NO-LOOKAHEAD',
+ 'NO-MAP',
+ 'NO-MESSAGE',
+ 'NO-MES',
+ 'NO-MESS',
+ 'NO-MESSA',
+ 'NO-MESSAG',
+ 'NONAMESPACE-SCHEMA-LOCATION',
+ 'NONE',
+ 'NO-PAUSE',
+ 'NO-PREFETCH',
+ 'NO-PREFE',
+ 'NO-PREFET',
+ 'NO-PREFETC',
+ 'NORMALIZE',
+ 'NO-ROW-MARKERS',
+ 'NO-SCROLLBAR-VERTICAL',
+ 'NO-SEPARATE-CONNECTION',
+ 'NO-SEPARATORS',
+ 'NOT',
+ 'NO-TAB-STOP',
+ 'NOT-ACTIVE',
+ 'NO-UNDERLINE',
+ 'NO-UND',
+ 'NO-UNDE',
+ 'NO-UNDER',
+ 'NO-UNDERL',
+ 'NO-UNDERLI',
+ 'NO-UNDERLIN',
+ 'NO-UNDO',
+ 'NO-VALIDATE',
+ 'NO-VAL',
+ 'NO-VALI',
+ 'NO-VALID',
+ 'NO-VALIDA',
+ 'NO-VALIDAT',
+ 'NOW',
+ 'NO-WAIT',
+ 'NO-WORD-WRAP',
+ 'NULL',
+ 'NUM-ALIASES',
+ 'NUM-ALI',
+ 'NUM-ALIA',
+ 'NUM-ALIAS',
+ 'NUM-ALIASE',
+ 'NUM-BUFFERS',
+ 'NUM-BUTTONS',
+ 'NUM-BUT',
+ 'NUM-BUTT',
+ 'NUM-BUTTO',
+ 'NUM-BUTTON',
+ 'NUM-COLUMNS',
+ 'NUM-COL',
+ 'NUM-COLU',
+ 'NUM-COLUM',
+ 'NUM-COLUMN',
+ 'NUM-COPIES',
+ 'NUM-DBS',
+ 'NUM-DROPPED-FILES',
+ 'NUM-ENTRIES',
+ 'NUMERIC',
+ 'NUMERIC-FORMAT',
+ 'NUMERIC-F',
+ 'NUMERIC-FO',
+ 'NUMERIC-FOR',
+ 'NUMERIC-FORM',
+ 'NUMERIC-FORMA',
+ 'NUM-FIELDS',
+ 'NUM-FORMATS',
+ 'NUM-ITEMS',
+ 'NUM-ITERATIONS',
+ 'NUM-LINES',
+ 'NUM-LOCKED-COLUMNS',
+ 'NUM-LOCKED-COL',
+ 'NUM-LOCKED-COLU',
+ 'NUM-LOCKED-COLUM',
+ 'NUM-LOCKED-COLUMN',
+ 'NUM-MESSAGES',
+ 'NUM-PARAMETERS',
+ 'NUM-REFERENCES',
+ 'NUM-REPLACED',
+ 'NUM-RESULTS',
+ 'NUM-SELECTED-ROWS',
+ 'NUM-SELECTED-WIDGETS',
+ 'NUM-SELECTED',
+ 'NUM-SELECTED-',
+ 'NUM-SELECTED-W',
+ 'NUM-SELECTED-WI',
+ 'NUM-SELECTED-WID',
+ 'NUM-SELECTED-WIDG',
+ 'NUM-SELECTED-WIDGE',
+ 'NUM-SELECTED-WIDGET',
+ 'NUM-TABS',
+ 'NUM-TO-RETAIN',
+ 'NUM-VISIBLE-COLUMNS',
+ 'OCTET-LENGTH',
+ 'OF',
+ 'OFF',
+ 'OK',
+ 'OK-CANCEL',
+ 'OLD',
+ 'ON',
+ 'ON-FRAME-BORDER',
+ 'ON-FRAME',
+ 'ON-FRAME-',
+ 'ON-FRAME-B',
+ 'ON-FRAME-BO',
+ 'ON-FRAME-BOR',
+ 'ON-FRAME-BORD',
+ 'ON-FRAME-BORDE',
+ 'OPEN',
+ 'OPSYS',
+ 'OPTION',
+ 'OR',
+ 'ORDERED-JOIN',
+ 'ORDINAL',
+ 'OS-APPEND',
+ 'OS-COMMAND',
+ 'OS-COPY',
+ 'OS-CREATE-DIR',
+ 'OS-DELETE',
+ 'OS-DIR',
+ 'OS-DRIVES',
+ 'OS-DRIVE',
+ 'OS-ERROR',
+ 'OS-GETENV',
+ 'OS-RENAME',
+ 'OTHERWISE',
+ 'OUTPUT',
+ 'OVERLAY',
+ 'OVERRIDE',
+ 'OWNER',
+ 'PAGE',
+ 'PAGE-BOTTOM',
+ 'PAGE-BOT',
+ 'PAGE-BOTT',
+ 'PAGE-BOTTO',
+ 'PAGED',
+ 'PAGE-NUMBER',
+ 'PAGE-NUM',
+ 'PAGE-NUMB',
+ 'PAGE-NUMBE',
+ 'PAGE-SIZE',
+ 'PAGE-TOP',
+ 'PAGE-WIDTH',
+ 'PAGE-WID',
+ 'PAGE-WIDT',
+ 'PARAMETER',
+ 'PARAM',
+ 'PARAME',
+ 'PARAMET',
+ 'PARAMETE',
+ 'PARENT',
+ 'PARSE-STATUS',
+ 'PARTIAL-KEY',
+ 'PASCAL',
+ 'PASSWORD-FIELD',
+ 'PATHNAME',
+ 'PAUSE',
+ 'PBE-HASH-ALGORITHM',
+ 'PBE-HASH-ALG',
+ 'PBE-HASH-ALGO',
+ 'PBE-HASH-ALGOR',
+ 'PBE-HASH-ALGORI',
+ 'PBE-HASH-ALGORIT',
+ 'PBE-HASH-ALGORITH',
+ 'PBE-KEY-ROUNDS',
+ 'PDBNAME',
+ 'PERSISTENT',
+ 'PERSIST',
+ 'PERSISTE',
+ 'PERSISTEN',
+ 'PERSISTENT-CACHE-DISABLED',
+ 'PFCOLOR',
+ 'PFC',
+ 'PFCO',
+ 'PFCOL',
+ 'PFCOLO',
+ 'PIXELS',
+ 'PIXELS-PER-COLUMN',
+ 'PIXELS-PER-COL',
+ 'PIXELS-PER-COLU',
+ 'PIXELS-PER-COLUM',
+ 'PIXELS-PER-ROW',
+ 'POPUP-MENU',
+ 'POPUP-M',
+ 'POPUP-ME',
+ 'POPUP-MEN',
+ 'POPUP-ONLY',
+ 'POPUP-O',
+ 'POPUP-ON',
+ 'POPUP-ONL',
+ 'PORTRAIT',
+ 'POSITION',
+ 'PRECISION',
+ 'PREFER-DATASET',
+ 'PREPARED',
+ 'PREPARE-STRING',
+ 'PREPROCESS',
+ 'PREPROC',
+ 'PREPROCE',
+ 'PREPROCES',
+ 'PRESELECT',
+ 'PRESEL',
+ 'PRESELE',
+ 'PRESELEC',
+ 'PREV',
+ 'PREV-COLUMN',
+ 'PREV-SIBLING',
+ 'PREV-TAB-ITEM',
+ 'PREV-TAB-I',
+ 'PREV-TAB-IT',
+ 'PREV-TAB-ITE',
+ 'PRIMARY',
+ 'PRINTER',
+ 'PRINTER-CONTROL-HANDLE',
+ 'PRINTER-HDC',
+ 'PRINTER-NAME',
+ 'PRINTER-PORT',
+ 'PRINTER-SETUP',
+ 'PRIVATE',
+ 'PRIVATE-DATA',
+ 'PRIVATE-D',
+ 'PRIVATE-DA',
+ 'PRIVATE-DAT',
+ 'PRIVILEGES',
+ 'PROCEDURE',
+ 'PROCE',
+ 'PROCED',
+ 'PROCEDU',
+ 'PROCEDUR',
+ 'PROCEDURE-CALL-TYPE',
+ 'PROCESS',
+ 'PROC-HANDLE',
+ 'PROC-HA',
+ 'PROC-HAN',
+ 'PROC-HAND',
+ 'PROC-HANDL',
+ 'PROC-STATUS',
+ 'PROC-ST',
+ 'PROC-STA',
+ 'PROC-STAT',
+ 'PROC-STATU',
+ 'proc-text',
+ 'proc-text-buffe',
+ 'PROFILER',
+ 'PROGRAM-NAME',
+ 'PROGRESS',
+ 'PROGRESS-SOURCE',
+ 'PROGRESS-S',
+ 'PROGRESS-SO',
+ 'PROGRESS-SOU',
+ 'PROGRESS-SOUR',
+ 'PROGRESS-SOURC',
+ 'PROMPT',
+ 'PROMPT-FOR',
+ 'PROMPT-F',
+ 'PROMPT-FO',
+ 'PROMSGS',
+ 'PROPATH',
+ 'PROPERTY',
+ 'PROTECTED',
+ 'PROVERSION',
+ 'PROVERS',
+ 'PROVERSI',
+ 'PROVERSIO',
+ 'PROXY',
+ 'PROXY-PASSWORD',
+ 'PROXY-USERID',
+ 'PUBLIC',
+ 'PUBLIC-ID',
+ 'PUBLISH',
+ 'PUBLISHED-EVENTS',
+ 'PUT',
+ 'PUTBYTE',
+ 'PUT-BYTE',
+ 'PUT-DOUBLE',
+ 'PUT-FLOAT',
+ 'PUT-INT64',
+ 'PUT-KEY-VALUE',
+ 'PUT-KEY-VAL',
+ 'PUT-KEY-VALU',
+ 'PUT-LONG',
+ 'PUT-SHORT',
+ 'PUT-STRING',
+ 'PUT-UNSIGNED-LONG',
+ 'QUERY',
+ 'QUERY-CLOSE',
+ 'QUERY-OFF-END',
+ 'QUERY-OPEN',
+ 'QUERY-PREPARE',
+ 'QUERY-TUNING',
+ 'QUESTION',
+ 'QUIT',
+ 'QUOTER',
+ 'RADIO-BUTTONS',
+ 'RADIO-SET',
+ 'RANDOM',
+ 'RAW-TRANSFER',
+ 'RCODE-INFORMATION',
+ 'RCODE-INFO',
+ 'RCODE-INFOR',
+ 'RCODE-INFORM',
+ 'RCODE-INFORMA',
+ 'RCODE-INFORMAT',
+ 'RCODE-INFORMATI',
+ 'RCODE-INFORMATIO',
+ 'READ-AVAILABLE',
+ 'READ-EXACT-NUM',
+ 'READ-FILE',
+ 'READKEY',
+ 'READ-ONLY',
+ 'READ-XML',
+ 'READ-XMLSCHEMA',
+ 'REAL',
+ 'RECORD-LENGTH',
+ 'RECTANGLE',
+ 'RECT',
+ 'RECTA',
+ 'RECTAN',
+ 'RECTANG',
+ 'RECTANGL',
+ 'RECURSIVE',
+ 'REFERENCE-ONLY',
+ 'REFRESH',
+ 'REFRESHABLE',
+ 'REFRESH-AUDIT-POLICY',
+ 'REGISTER-DOMAIN',
+ 'RELEASE',
+ 'REMOTE',
+ 'REMOVE-EVENTS-PROCEDURE',
+ 'REMOVE-SUPER-PROCEDURE',
+ 'REPEAT',
+ 'REPLACE',
+ 'REPLACE-SELECTION-TEXT',
+ 'REPOSITION',
+ 'REPOSITION-BACKWARD',
+ 'REPOSITION-FORWARD',
+ 'REPOSITION-MODE',
+ 'REPOSITION-TO-ROW',
+ 'REPOSITION-TO-ROWID',
+ 'REQUEST',
+ 'RESET',
+ 'RESIZABLE',
+ 'RESIZA',
+ 'RESIZAB',
+ 'RESIZABL',
+ 'RESIZE',
+ 'RESTART-ROW',
+ 'RESTART-ROWID',
+ 'RETAIN',
+ 'RETAIN-SHAPE',
+ 'RETRY',
+ 'RETRY-CANCEL',
+ 'RETURN',
+ 'RETURN-INSERTED',
+ 'RETURN-INS',
+ 'RETURN-INSE',
+ 'RETURN-INSER',
+ 'RETURN-INSERT',
+ 'RETURN-INSERTE',
+ 'RETURNS',
+ 'RETURN-TO-START-DIR',
+ 'RETURN-TO-START-DI',
+ 'RETURN-VALUE',
+ 'RETURN-VAL',
+ 'RETURN-VALU',
+ 'RETURN-VALUE-DATA-TYPE',
+ 'REVERSE-FROM',
+ 'REVERT',
+ 'REVOKE',
+ 'RGB-VALUE',
+ 'RIGHT-ALIGNED',
+ 'RETURN-ALIGN',
+ 'RETURN-ALIGNE',
+ 'RIGHT-TRIM',
+ 'R-INDEX',
+ 'ROLES',
+ 'ROUND',
+ 'ROUTINE-LEVEL',
+ 'ROW',
+ 'ROW-HEIGHT-CHARS',
+ 'ROW-HEIGHT-PIXELS',
+ 'ROW-MARKERS',
+ 'ROW-OF',
+ 'ROW-RESIZABLE',
+ 'RULE',
+ 'RUN',
+ 'RUN-PROCEDURE',
+ 'SAVE',
+ 'SAVE-AS',
+ 'SAVE-FILE',
+ 'SAX-COMPLETE',
+ 'SAX-COMPLE',
+ 'SAX-COMPLET',
+ 'SAX-PARSE',
+ 'SAX-PARSE-FIRST',
+ 'SAX-PARSE-NEXT',
+ 'SAX-PARSER-ERROR',
+ 'SAX-RUNNING',
+ 'SAX-UNINITIALIZED',
+ 'SAX-WRITE-BEGIN',
+ 'SAX-WRITE-COMPLETE',
+ 'SAX-WRITE-CONTENT',
+ 'SAX-WRITE-ELEMENT',
+ 'SAX-WRITE-ERROR',
+ 'SAX-WRITE-IDLE',
+ 'SAX-WRITER',
+ 'SAX-WRITE-TAG',
+ 'SCHEMA',
+ 'SCHEMA-LOCATION',
+ 'SCHEMA-MARSHAL',
+ 'SCHEMA-PATH',
+ 'SCREEN',
+ 'SCREEN-IO',
+ 'SCREEN-LINES',
+ 'SCREEN-VALUE',
+ 'SCREEN-VAL',
+ 'SCREEN-VALU',
+ 'SCROLL',
+ 'SCROLLABLE',
+ 'SCROLLBAR-HORIZONTAL',
+ 'SCROLLBAR-H',
+ 'SCROLLBAR-HO',
+ 'SCROLLBAR-HOR',
+ 'SCROLLBAR-HORI',
+ 'SCROLLBAR-HORIZ',
+ 'SCROLLBAR-HORIZO',
+ 'SCROLLBAR-HORIZON',
+ 'SCROLLBAR-HORIZONT',
+ 'SCROLLBAR-HORIZONTA',
+ 'SCROLL-BARS',
+ 'SCROLLBAR-VERTICAL',
+ 'SCROLLBAR-V',
+ 'SCROLLBAR-VE',
+ 'SCROLLBAR-VER',
+ 'SCROLLBAR-VERT',
+ 'SCROLLBAR-VERTI',
+ 'SCROLLBAR-VERTIC',
+ 'SCROLLBAR-VERTICA',
+ 'SCROLL-DELTA',
+ 'SCROLLED-ROW-POSITION',
+ 'SCROLLED-ROW-POS',
+ 'SCROLLED-ROW-POSI',
+ 'SCROLLED-ROW-POSIT',
+ 'SCROLLED-ROW-POSITI',
+ 'SCROLLED-ROW-POSITIO',
+ 'SCROLLING',
+ 'SCROLL-OFFSET',
+ 'SCROLL-TO-CURRENT-ROW',
+ 'SCROLL-TO-ITEM',
+ 'SCROLL-TO-I',
+ 'SCROLL-TO-IT',
+ 'SCROLL-TO-ITE',
+ 'SCROLL-TO-SELECTED-ROW',
+ 'SDBNAME',
+ 'SEAL',
+ 'SEAL-TIMESTAMP',
+ 'SEARCH',
+ 'SEARCH-SELF',
+ 'SEARCH-TARGET',
+ 'SECTION',
+ 'SECURITY-POLICY',
+ 'SEEK',
+ 'SELECT',
+ 'SELECTABLE',
+ 'SELECT-ALL',
+ 'SELECTED',
+ 'SELECT-FOCUSED-ROW',
+ 'SELECTION',
+ 'SELECTION-END',
+ 'SELECTION-LIST',
+ 'SELECTION-START',
+ 'SELECTION-TEXT',
+ 'SELECT-NEXT-ROW',
+ 'SELECT-PREV-ROW',
+ 'SELECT-ROW',
+ 'SELF',
+ 'SEND',
+ 'send-sql-statement',
+ 'send-sql',
+ 'SENSITIVE',
+ 'SEPARATE-CONNECTION',
+ 'SEPARATOR-FGCOLOR',
+ 'SEPARATORS',
+ 'SERVER',
+ 'SERVER-CONNECTION-BOUND',
+ 'SERVER-CONNECTION-BOUND-REQUEST',
+ 'SERVER-CONNECTION-CONTEXT',
+ 'SERVER-CONNECTION-ID',
+ 'SERVER-OPERATING-MODE',
+ 'SESSION',
+ 'SESSION-ID',
+ 'SET',
+ 'SET-APPL-CONTEXT',
+ 'SET-ATTR-CALL-TYPE',
+ 'SET-ATTRIBUTE-NODE',
+ 'SET-BLUE-VALUE',
+ 'SET-BLUE',
+ 'SET-BLUE-',
+ 'SET-BLUE-V',
+ 'SET-BLUE-VA',
+ 'SET-BLUE-VAL',
+ 'SET-BLUE-VALU',
+ 'SET-BREAK',
+ 'SET-BUFFERS',
+ 'SET-CALLBACK',
+ 'SET-CLIENT',
+ 'SET-COMMIT',
+ 'SET-CONTENTS',
+ 'SET-CURRENT-VALUE',
+ 'SET-DB-CLIENT',
+ 'SET-DYNAMIC',
+ 'SET-EVENT-MANAGER-OPTION',
+ 'SET-GREEN-VALUE',
+ 'SET-GREEN',
+ 'SET-GREEN-',
+ 'SET-GREEN-V',
+ 'SET-GREEN-VA',
+ 'SET-GREEN-VAL',
+ 'SET-GREEN-VALU',
+ 'SET-INPUT-SOURCE',
+ 'SET-OPTION',
+ 'SET-OUTPUT-DESTINATION',
+ 'SET-PARAMETER',
+ 'SET-POINTER-VALUE',
+ 'SET-PROPERTY',
+ 'SET-RED-VALUE',
+ 'SET-RED',
+ 'SET-RED-',
+ 'SET-RED-V',
+ 'SET-RED-VA',
+ 'SET-RED-VAL',
+ 'SET-RED-VALU',
+ 'SET-REPOSITIONED-ROW',
+ 'SET-RGB-VALUE',
+ 'SET-ROLLBACK',
+ 'SET-SELECTION',
+ 'SET-SIZE',
+ 'SET-SORT-ARROW',
+ 'SETUSERID',
+ 'SETUSER',
+ 'SETUSERI',
+ 'SET-WAIT-STATE',
+ 'SHA1-DIGEST',
+ 'SHARED',
+ 'SHARE-LOCK',
+ 'SHARE',
+ 'SHARE-',
+ 'SHARE-L',
+ 'SHARE-LO',
+ 'SHARE-LOC',
+ 'SHOW-IN-TASKBAR',
+ 'SHOW-STATS',
+ 'SHOW-STAT',
+ 'SIDE-LABEL-HANDLE',
+ 'SIDE-LABEL-H',
+ 'SIDE-LABEL-HA',
+ 'SIDE-LABEL-HAN',
+ 'SIDE-LABEL-HAND',
+ 'SIDE-LABEL-HANDL',
+ 'SIDE-LABELS',
+ 'SIDE-LAB',
+ 'SIDE-LABE',
+ 'SIDE-LABEL',
+ 'SILENT',
+ 'SIMPLE',
+ 'SINGLE',
+ 'SIZE',
+ 'SIZE-CHARS',
+ 'SIZE-C',
+ 'SIZE-CH',
+ 'SIZE-CHA',
+ 'SIZE-CHAR',
+ 'SIZE-PIXELS',
+ 'SIZE-P',
+ 'SIZE-PI',
+ 'SIZE-PIX',
+ 'SIZE-PIXE',
+ 'SIZE-PIXEL',
+ 'SKIP',
+ 'SKIP-DELETED-RECORD',
+ 'SLIDER',
+ 'SMALL-ICON',
+ 'SMALLINT',
+ 'SMALL-TITLE',
+ 'SOME',
+ 'SORT',
+ 'SORT-ASCENDING',
+ 'SORT-NUMBER',
+ 'SOURCE',
+ 'SOURCE-PROCEDURE',
+ 'SPACE',
+ 'SQL',
+ 'SQRT',
+ 'SSL-SERVER-NAME',
+ 'STANDALONE',
+ 'START',
+ 'START-DOCUMENT',
+ 'START-ELEMENT',
+ 'START-MOVE',
+ 'START-RESIZE',
+ 'START-ROW-RESIZE',
+ 'STATE-DETAIL',
+ 'STATIC',
+ 'STATUS',
+ 'STATUS-AREA',
+ 'STATUS-AREA-FONT',
+ 'STDCALL',
+ 'STOP',
+ 'STOP-PARSING',
+ 'STOPPED',
+ 'STOPPE',
+ 'STORED-PROCEDURE',
+ 'STORED-PROC',
+ 'STORED-PROCE',
+ 'STORED-PROCED',
+ 'STORED-PROCEDU',
+ 'STORED-PROCEDUR',
+ 'STREAM',
+ 'STREAM-HANDLE',
+ 'STREAM-IO',
+ 'STRETCH-TO-FIT',
+ 'STRICT',
+ 'STRING',
+ 'STRING-VALUE',
+ 'STRING-XREF',
+ 'SUB-AVERAGE',
+ 'SUB-AVE',
+ 'SUB-AVER',
+ 'SUB-AVERA',
+ 'SUB-AVERAG',
+ 'SUB-COUNT',
+ 'SUB-MAXIMUM',
+ 'SUM-MAX',
+ 'SUM-MAXI',
+ 'SUM-MAXIM',
+ 'SUM-MAXIMU',
+ 'SUB-MENU',
+ 'SUBSUB-',
+ 'SUB-MIN',
+ 'SUBSCRIBE',
+ 'SUBSTITUTE',
+ 'SUBST',
+ 'SUBSTI',
+ 'SUBSTIT',
+ 'SUBSTITU',
+ 'SUBSTITUT',
+ 'SUBSTRING',
+ 'SUBSTR',
+ 'SUBSTRI',
+ 'SUBSTRIN',
+ 'SUB-TOTAL',
+ 'SUBTYPE',
+ 'SUM',
+ 'SUPER',
+ 'SUPER-PROCEDURES',
+ 'SUPPRESS-NAMESPACE-PROCESSING',
+ 'SUPPRESS-WARNINGS',
+ 'SUPPRESS-W',
+ 'SUPPRESS-WA',
+ 'SUPPRESS-WAR',
+ 'SUPPRESS-WARN',
+ 'SUPPRESS-WARNI',
+ 'SUPPRESS-WARNIN',
+ 'SUPPRESS-WARNING',
+ 'SYMMETRIC-ENCRYPTION-ALGORITHM',
+ 'SYMMETRIC-ENCRYPTION-IV',
+ 'SYMMETRIC-ENCRYPTION-KEY',
+ 'SYMMETRIC-SUPPORT',
+ 'SYSTEM-ALERT-BOXES',
+ 'SYSTEM-ALERT',
+ 'SYSTEM-ALERT-',
+ 'SYSTEM-ALERT-B',
+ 'SYSTEM-ALERT-BO',
+ 'SYSTEM-ALERT-BOX',
+ 'SYSTEM-ALERT-BOXE',
+ 'SYSTEM-DIALOG',
+ 'SYSTEM-HELP',
+ 'SYSTEM-ID',
+ 'TABLE',
+ 'TABLE-HANDLE',
+ 'TABLE-NUMBER',
+ 'TAB-POSITION',
+ 'TAB-STOP',
+ 'TARGET',
+ 'TARGET-PROCEDURE',
+ 'TEMP-DIRECTORY',
+ 'TEMP-DIR',
+ 'TEMP-DIRE',
+ 'TEMP-DIREC',
+ 'TEMP-DIRECT',
+ 'TEMP-DIRECTO',
+ 'TEMP-DIRECTOR',
+ 'TEMP-TABLE',
+ 'TEMP-TABLE-PREPARE',
+ 'TERM',
+ 'TERMINAL',
+ 'TERMI',
+ 'TERMIN',
+ 'TERMINA',
+ 'TERMINATE',
+ 'TEXT',
+ 'TEXT-CURSOR',
+ 'TEXT-SEG-GROW',
+ 'TEXT-SELECTED',
+ 'THEN',
+ 'THIS-OBJECT',
+ 'THIS-PROCEDURE',
+ 'THREE-D',
+ 'THROW',
+ 'THROUGH',
+ 'THRU',
+ 'TIC-MARKS',
+ 'TIME',
+ 'TIME-SOURCE',
+ 'TITLE',
+ 'TITLE-BGCOLOR',
+ 'TITLE-BGC',
+ 'TITLE-BGCO',
+ 'TITLE-BGCOL',
+ 'TITLE-BGCOLO',
+ 'TITLE-DCOLOR',
+ 'TITLE-DC',
+ 'TITLE-DCO',
+ 'TITLE-DCOL',
+ 'TITLE-DCOLO',
+ 'TITLE-FGCOLOR',
+ 'TITLE-FGC',
+ 'TITLE-FGCO',
+ 'TITLE-FGCOL',
+ 'TITLE-FGCOLO',
+ 'TITLE-FONT',
+ 'TITLE-FO',
+ 'TITLE-FON',
+ 'TO',
+ 'TODAY',
+ 'TOGGLE-BOX',
+ 'TOOLTIP',
+ 'TOOLTIPS',
+ 'TOPIC',
+ 'TOP-NAV-QUERY',
+ 'TOP-ONLY',
+ 'TO-ROWID',
+ 'TOTAL',
+ 'TRAILING',
+ 'TRANS',
+ 'TRANSACTION',
+ 'TRANSACTION-MODE',
+ 'TRANS-INIT-PROCEDURE',
+ 'TRANSPARENT',
+ 'TRIGGER',
+ 'TRIGGERS',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUNC',
+ 'TRUNCA',
+ 'TRUNCAT',
+ 'TYPE',
+ 'TYPE-OF',
+ 'UNBOX',
+ 'UNBUFFERED',
+ 'UNBUFF',
+ 'UNBUFFE',
+ 'UNBUFFER',
+ 'UNBUFFERE',
+ 'UNDERLINE',
+ 'UNDERL',
+ 'UNDERLI',
+ 'UNDERLIN',
+ 'UNDO',
+ 'UNFORMATTED',
+ 'UNFORM',
+ 'UNFORMA',
+ 'UNFORMAT',
+ 'UNFORMATT',
+ 'UNFORMATTE',
+ 'UNION',
+ 'UNIQUE',
+ 'UNIQUE-ID',
+ 'UNIQUE-MATCH',
+ 'UNIX',
+ 'UNLESS-HIDDEN',
+ 'UNLOAD',
+ 'UNSIGNED-LONG',
+ 'UNSUBSCRIBE',
+ 'UP',
+ 'UPDATE',
+ 'UPDATE-ATTRIBUTE',
+ 'URL',
+ 'URL-DECODE',
+ 'URL-ENCODE',
+ 'URL-PASSWORD',
+ 'URL-USERID',
+ 'USE',
+ 'USE-DICT-EXPS',
+ 'USE-FILENAME',
+ 'USE-INDEX',
+ 'USER',
+ 'USE-REVVIDEO',
+ 'USERID',
+ 'USER-ID',
+ 'USE-TEXT',
+ 'USE-UNDERLINE',
+ 'USE-WIDGET-POOL',
+ 'USING',
+ 'V6DISPLAY',
+ 'V6FRAME',
+ 'VALIDATE',
+ 'VALIDATE-EXPRESSION',
+ 'VALIDATE-MESSAGE',
+ 'VALIDATE-SEAL',
+ 'VALIDATION-ENABLED',
+ 'VALID-EVENT',
+ 'VALID-HANDLE',
+ 'VALID-OBJECT',
+ 'VALUE',
+ 'VALUE-CHANGED',
+ 'VALUES',
+ 'VARIABLE',
+ 'VAR',
+ 'VARI',
+ 'VARIA',
+ 'VARIAB',
+ 'VARIABL',
+ 'VERBOSE',
+ 'VERSION',
+ 'VERTICAL',
+ 'VERT',
+ 'VERTI',
+ 'VERTIC',
+ 'VERTICA',
+ 'VIEW',
+ 'VIEW-AS',
+ 'VIEW-FIRST-COLUMN-ON-REOPEN',
+ 'VIRTUAL-HEIGHT-CHARS',
+ 'VIRTUAL-HEIGHT',
+ 'VIRTUAL-HEIGHT-',
+ 'VIRTUAL-HEIGHT-C',
+ 'VIRTUAL-HEIGHT-CH',
+ 'VIRTUAL-HEIGHT-CHA',
+ 'VIRTUAL-HEIGHT-CHAR',
+ 'VIRTUAL-HEIGHT-PIXELS',
+ 'VIRTUAL-HEIGHT-P',
+ 'VIRTUAL-HEIGHT-PI',
+ 'VIRTUAL-HEIGHT-PIX',
+ 'VIRTUAL-HEIGHT-PIXE',
+ 'VIRTUAL-HEIGHT-PIXEL',
+ 'VIRTUAL-WIDTH-CHARS',
+ 'VIRTUAL-WIDTH',
+ 'VIRTUAL-WIDTH-',
+ 'VIRTUAL-WIDTH-C',
+ 'VIRTUAL-WIDTH-CH',
+ 'VIRTUAL-WIDTH-CHA',
+ 'VIRTUAL-WIDTH-CHAR',
+ 'VIRTUAL-WIDTH-PIXELS',
+ 'VIRTUAL-WIDTH-P',
+ 'VIRTUAL-WIDTH-PI',
+ 'VIRTUAL-WIDTH-PIX',
+ 'VIRTUAL-WIDTH-PIXE',
+ 'VIRTUAL-WIDTH-PIXEL',
+ 'VISIBLE',
+ 'VOID',
+ 'WAIT',
+ 'WAIT-FOR',
+ 'WARNING',
+ 'WEB-CONTEXT',
+ 'WEEKDAY',
+ 'WHEN',
+ 'WHERE',
+ 'WHILE',
+ 'WIDGET',
+ 'WIDGET-ENTER',
+ 'WIDGET-E',
+ 'WIDGET-EN',
+ 'WIDGET-ENT',
+ 'WIDGET-ENTE',
+ 'WIDGET-ID',
+ 'WIDGET-LEAVE',
+ 'WIDGET-L',
+ 'WIDGET-LE',
+ 'WIDGET-LEA',
+ 'WIDGET-LEAV',
+ 'WIDGET-POOL',
+ 'WIDTH-CHARS',
+ 'WIDTH',
+ 'WIDTH-',
+ 'WIDTH-C',
+ 'WIDTH-CH',
+ 'WIDTH-CHA',
+ 'WIDTH-CHAR',
+ 'WIDTH-PIXELS',
+ 'WIDTH-P',
+ 'WIDTH-PI',
+ 'WIDTH-PIX',
+ 'WIDTH-PIXE',
+ 'WIDTH-PIXEL',
+ 'WINDOW',
+ 'WINDOW-MAXIMIZED',
+ 'WINDOW-MAXIM',
+ 'WINDOW-MAXIMI',
+ 'WINDOW-MAXIMIZ',
+ 'WINDOW-MAXIMIZE',
+ 'WINDOW-MINIMIZED',
+ 'WINDOW-MINIM',
+ 'WINDOW-MINIMI',
+ 'WINDOW-MINIMIZ',
+ 'WINDOW-MINIMIZE',
+ 'WINDOW-NAME',
+ 'WINDOW-NORMAL',
+ 'WINDOW-STATE',
+ 'WINDOW-STA',
+ 'WINDOW-STAT',
+ 'WINDOW-SYSTEM',
+ 'WITH',
+ 'WORD-INDEX',
+ 'WORD-WRAP',
+ 'WORK-AREA-HEIGHT-PIXELS',
+ 'WORK-AREA-WIDTH-PIXELS',
+ 'WORK-AREA-X',
+ 'WORK-AREA-Y',
+ 'WORKFILE',
+ 'WORK-TABLE',
+ 'WORK-TAB',
+ 'WORK-TABL',
+ 'WRITE',
+ 'WRITE-CDATA',
+ 'WRITE-CHARACTERS',
+ 'WRITE-COMMENT',
+ 'WRITE-DATA-ELEMENT',
+ 'WRITE-EMPTY-ELEMENT',
+ 'WRITE-ENTITY-REF',
+ 'WRITE-EXTERNAL-DTD',
+ 'WRITE-FRAGMENT',
+ 'WRITE-MESSAGE',
+ 'WRITE-PROCESSING-INSTRUCTION',
+ 'WRITE-STATUS',
+ 'WRITE-XML',
+ 'WRITE-XMLSCHEMA',
+ 'X',
+ 'XCODE',
+ 'XML-DATA-TYPE',
+ 'XML-NODE-TYPE',
+ 'XML-SCHEMA-PATH',
+ 'XML-SUPPRESS-NAMESPACE-PROCESSING',
+ 'X-OF',
+ 'XREF',
+ 'XREF-XML',
+ 'Y',
+ 'YEAR',
+ 'YEAR-OFFSET',
+ 'YES',
+ 'YES-NO',
+ 'YES-NO-CANCEL',
+ 'Y-OF'
+)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_php_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_php_builtins.py
index 44ef20530e..c6e017ca7e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_php_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_php_builtins.py
@@ -1,4703 +1,4703 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._php_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file loads the function names and their modules from the
- php webpage and generates itself.
-
- Do not alter the MODULES dict by hand!
-
- WARNING: the generation transfers quite much data over your
- internet connection. don't run that at home, use
- a server ;-)
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._php_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file loads the function names and their modules from the
+ php webpage and generates itself.
+
+ Do not alter the MODULES dict by hand!
+
+ WARNING: the generation transfers quite much data over your
+ internet connection. don't run that at home, use
+ a server ;-)
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-MODULES = {'.NET': ('dotnet_load',),
- 'APC': ('apc_add',
- 'apc_bin_dump',
- 'apc_bin_dumpfile',
- 'apc_bin_load',
- 'apc_bin_loadfile',
- 'apc_cache_info',
- 'apc_cas',
- 'apc_clear_cache',
- 'apc_compile_file',
- 'apc_dec',
- 'apc_define_constants',
- 'apc_delete_file',
- 'apc_delete',
- 'apc_exists',
- 'apc_fetch',
- 'apc_inc',
- 'apc_load_constants',
- 'apc_sma_info',
- 'apc_store'),
- 'APD': ('apd_breakpoint',
- 'apd_callstack',
- 'apd_clunk',
- 'apd_continue',
- 'apd_croak',
- 'apd_dump_function_table',
- 'apd_dump_persistent_resources',
- 'apd_dump_regular_resources',
- 'apd_echo',
- 'apd_get_active_symbols',
- 'apd_set_pprof_trace',
- 'apd_set_session_trace_socket',
- 'apd_set_session_trace',
- 'apd_set_session',
- 'override_function',
- 'rename_function'),
- 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
- 'mysqli_bind_result',
- 'mysqli_client_encoding',
- 'mysqli_connect',
- 'mysqli_disable_rpl_parse',
- 'mysqli_enable_reads_from_master',
- 'mysqli_enable_rpl_parse',
- 'mysqli_escape_string',
- 'mysqli_execute',
- 'mysqli_fetch',
- 'mysqli_get_cache_stats',
- 'mysqli_get_metadata',
- 'mysqli_master_query',
- 'mysqli_param_count',
- 'mysqli_report',
- 'mysqli_rpl_parse_enabled',
- 'mysqli_rpl_probe',
- 'mysqli_send_long_data',
- 'mysqli_slave_query'),
- 'Apache': ('apache_child_terminate',
- 'apache_get_modules',
- 'apache_get_version',
- 'apache_getenv',
- 'apache_lookup_uri',
- 'apache_note',
- 'apache_request_headers',
- 'apache_reset_timeout',
- 'apache_response_headers',
- 'apache_setenv',
- 'getallheaders',
- 'virtual'),
- 'Array': ('array_change_key_case',
- 'array_chunk',
- 'array_column',
- 'array_combine',
- 'array_count_values',
- 'array_diff_assoc',
- 'array_diff_key',
- 'array_diff_uassoc',
- 'array_diff_ukey',
- 'array_diff',
- 'array_fill_keys',
- 'array_fill',
- 'array_filter',
- 'array_flip',
- 'array_intersect_assoc',
- 'array_intersect_key',
- 'array_intersect_uassoc',
- 'array_intersect_ukey',
- 'array_intersect',
- 'array_key_exists',
- 'array_keys',
- 'array_map',
- 'array_merge_recursive',
- 'array_merge',
- 'array_multisort',
- 'array_pad',
- 'array_pop',
- 'array_product',
- 'array_push',
- 'array_rand',
- 'array_reduce',
- 'array_replace_recursive',
- 'array_replace',
- 'array_reverse',
- 'array_search',
- 'array_shift',
- 'array_slice',
- 'array_splice',
- 'array_sum',
- 'array_udiff_assoc',
- 'array_udiff_uassoc',
- 'array_udiff',
- 'array_uintersect_assoc',
- 'array_uintersect_uassoc',
- 'array_uintersect',
- 'array_unique',
- 'array_unshift',
- 'array_values',
- 'array_walk_recursive',
- 'array_walk',
- 'array',
- 'arsort',
- 'asort',
- 'compact',
- 'count',
- 'current',
- 'each',
- 'end',
- 'extract',
- 'in_array',
- 'key_exists',
- 'key',
- 'krsort',
- 'ksort',
- 'list',
- 'natcasesort',
- 'natsort',
- 'next',
- 'pos',
- 'prev',
- 'range',
- 'reset',
- 'rsort',
- 'shuffle',
- 'sizeof',
- 'sort',
- 'uasort',
- 'uksort',
- 'usort'),
- 'BBCode': ('bbcode_add_element',
- 'bbcode_add_smiley',
- 'bbcode_create',
- 'bbcode_destroy',
- 'bbcode_parse',
- 'bbcode_set_arg_parser',
- 'bbcode_set_flags'),
- 'BC Math': ('bcadd',
- 'bccomp',
- 'bcdiv',
- 'bcmod',
- 'bcmul',
- 'bcpow',
- 'bcpowmod',
- 'bcscale',
- 'bcsqrt',
- 'bcsub'),
- 'Blenc': ('blenc_encrypt',),
- 'Bzip2': ('bzclose',
- 'bzcompress',
- 'bzdecompress',
- 'bzerrno',
- 'bzerror',
- 'bzerrstr',
- 'bzflush',
- 'bzopen',
- 'bzread',
- 'bzwrite'),
- 'COM': ('com_addref',
- 'com_create_guid',
- 'com_event_sink',
- 'com_get_active_object',
- 'com_get',
- 'com_invoke',
- 'com_isenum',
- 'com_load_typelib',
- 'com_load',
- 'com_message_pump',
- 'com_print_typeinfo',
- 'com_propget',
- 'com_propput',
- 'com_propset',
- 'com_release',
- 'com_set',
- 'variant_abs',
- 'variant_add',
- 'variant_and',
- 'variant_cast',
- 'variant_cat',
- 'variant_cmp',
- 'variant_date_from_timestamp',
- 'variant_date_to_timestamp',
- 'variant_div',
- 'variant_eqv',
- 'variant_fix',
- 'variant_get_type',
- 'variant_idiv',
- 'variant_imp',
- 'variant_int',
- 'variant_mod',
- 'variant_mul',
- 'variant_neg',
- 'variant_not',
- 'variant_or',
- 'variant_pow',
- 'variant_round',
- 'variant_set_type',
- 'variant_set',
- 'variant_sub',
- 'variant_xor'),
- 'CUBRID': ('cubrid_bind',
- 'cubrid_close_prepare',
- 'cubrid_close_request',
- 'cubrid_col_get',
- 'cubrid_col_size',
- 'cubrid_column_names',
- 'cubrid_column_types',
- 'cubrid_commit',
- 'cubrid_connect_with_url',
- 'cubrid_connect',
- 'cubrid_current_oid',
- 'cubrid_disconnect',
- 'cubrid_drop',
- 'cubrid_error_code_facility',
- 'cubrid_error_code',
- 'cubrid_error_msg',
- 'cubrid_execute',
- 'cubrid_fetch',
- 'cubrid_free_result',
- 'cubrid_get_autocommit',
- 'cubrid_get_charset',
- 'cubrid_get_class_name',
- 'cubrid_get_client_info',
- 'cubrid_get_db_parameter',
- 'cubrid_get_query_timeout',
- 'cubrid_get_server_info',
- 'cubrid_get',
- 'cubrid_insert_id',
- 'cubrid_is_instance',
- 'cubrid_lob_close',
- 'cubrid_lob_export',
- 'cubrid_lob_get',
- 'cubrid_lob_send',
- 'cubrid_lob_size',
- 'cubrid_lob2_bind',
- 'cubrid_lob2_close',
- 'cubrid_lob2_export',
- 'cubrid_lob2_import',
- 'cubrid_lob2_new',
- 'cubrid_lob2_read',
- 'cubrid_lob2_seek64',
- 'cubrid_lob2_seek',
- 'cubrid_lob2_size64',
- 'cubrid_lob2_size',
- 'cubrid_lob2_tell64',
- 'cubrid_lob2_tell',
- 'cubrid_lob2_write',
- 'cubrid_lock_read',
- 'cubrid_lock_write',
- 'cubrid_move_cursor',
- 'cubrid_next_result',
- 'cubrid_num_cols',
- 'cubrid_num_rows',
- 'cubrid_pconnect_with_url',
- 'cubrid_pconnect',
- 'cubrid_prepare',
- 'cubrid_put',
- 'cubrid_rollback',
- 'cubrid_schema',
- 'cubrid_seq_drop',
- 'cubrid_seq_insert',
- 'cubrid_seq_put',
- 'cubrid_set_add',
- 'cubrid_set_autocommit',
- 'cubrid_set_db_parameter',
- 'cubrid_set_drop',
- 'cubrid_set_query_timeout',
- 'cubrid_version'),
- 'Cairo': ('cairo_create',
- 'cairo_font_face_get_type',
- 'cairo_font_face_status',
- 'cairo_font_options_create',
- 'cairo_font_options_equal',
- 'cairo_font_options_get_antialias',
- 'cairo_font_options_get_hint_metrics',
- 'cairo_font_options_get_hint_style',
- 'cairo_font_options_get_subpixel_order',
- 'cairo_font_options_hash',
- 'cairo_font_options_merge',
- 'cairo_font_options_set_antialias',
- 'cairo_font_options_set_hint_metrics',
- 'cairo_font_options_set_hint_style',
- 'cairo_font_options_set_subpixel_order',
- 'cairo_font_options_status',
- 'cairo_format_stride_for_width',
- 'cairo_image_surface_create_for_data',
- 'cairo_image_surface_create_from_png',
- 'cairo_image_surface_create',
- 'cairo_image_surface_get_data',
- 'cairo_image_surface_get_format',
- 'cairo_image_surface_get_height',
- 'cairo_image_surface_get_stride',
- 'cairo_image_surface_get_width',
- 'cairo_matrix_create_scale',
- 'cairo_matrix_create_translate',
- 'cairo_matrix_invert',
- 'cairo_matrix_multiply',
- 'cairo_matrix_rotate',
- 'cairo_matrix_transform_distance',
- 'cairo_matrix_transform_point',
- 'cairo_matrix_translate',
- 'cairo_pattern_add_color_stop_rgb',
- 'cairo_pattern_add_color_stop_rgba',
- 'cairo_pattern_create_for_surface',
- 'cairo_pattern_create_linear',
- 'cairo_pattern_create_radial',
- 'cairo_pattern_create_rgb',
- 'cairo_pattern_create_rgba',
- 'cairo_pattern_get_color_stop_count',
- 'cairo_pattern_get_color_stop_rgba',
- 'cairo_pattern_get_extend',
- 'cairo_pattern_get_filter',
- 'cairo_pattern_get_linear_points',
- 'cairo_pattern_get_matrix',
- 'cairo_pattern_get_radial_circles',
- 'cairo_pattern_get_rgba',
- 'cairo_pattern_get_surface',
- 'cairo_pattern_get_type',
- 'cairo_pattern_set_extend',
- 'cairo_pattern_set_filter',
- 'cairo_pattern_set_matrix',
- 'cairo_pattern_status',
- 'cairo_pdf_surface_create',
- 'cairo_pdf_surface_set_size',
- 'cairo_ps_get_levels',
- 'cairo_ps_level_to_string',
- 'cairo_ps_surface_create',
- 'cairo_ps_surface_dsc_begin_page_setup',
- 'cairo_ps_surface_dsc_begin_setup',
- 'cairo_ps_surface_dsc_comment',
- 'cairo_ps_surface_get_eps',
- 'cairo_ps_surface_restrict_to_level',
- 'cairo_ps_surface_set_eps',
- 'cairo_ps_surface_set_size',
- 'cairo_scaled_font_create',
- 'cairo_scaled_font_extents',
- 'cairo_scaled_font_get_ctm',
- 'cairo_scaled_font_get_font_face',
- 'cairo_scaled_font_get_font_matrix',
- 'cairo_scaled_font_get_font_options',
- 'cairo_scaled_font_get_scale_matrix',
- 'cairo_scaled_font_get_type',
- 'cairo_scaled_font_glyph_extents',
- 'cairo_scaled_font_status',
- 'cairo_scaled_font_text_extents',
- 'cairo_surface_copy_page',
- 'cairo_surface_create_similar',
- 'cairo_surface_finish',
- 'cairo_surface_flush',
- 'cairo_surface_get_content',
- 'cairo_surface_get_device_offset',
- 'cairo_surface_get_font_options',
- 'cairo_surface_get_type',
- 'cairo_surface_mark_dirty_rectangle',
- 'cairo_surface_mark_dirty',
- 'cairo_surface_set_device_offset',
- 'cairo_surface_set_fallback_resolution',
- 'cairo_surface_show_page',
- 'cairo_surface_status',
- 'cairo_surface_write_to_png',
- 'cairo_svg_surface_create',
- 'cairo_svg_surface_restrict_to_version',
- 'cairo_svg_version_to_string'),
- 'Calendar': ('cal_days_in_month',
- 'cal_from_jd',
- 'cal_info',
- 'cal_to_jd',
- 'easter_date',
- 'easter_days',
- 'FrenchToJD',
- 'GregorianToJD',
- 'JDDayOfWeek',
- 'JDMonthName',
- 'JDToFrench',
- 'JDToGregorian',
- 'jdtojewish',
- 'JDToJulian',
- 'jdtounix',
- 'JewishToJD',
- 'JulianToJD',
- 'unixtojd'),
- 'Classes/Object': ('__autoload',
- 'call_user_method_array',
- 'call_user_method',
- 'class_alias',
- 'class_exists',
- 'get_called_class',
- 'get_class_methods',
- 'get_class_vars',
- 'get_class',
- 'get_declared_classes',
- 'get_declared_interfaces',
- 'get_declared_traits',
- 'get_object_vars',
- 'get_parent_class',
- 'interface_exists',
- 'is_a',
- 'is_subclass_of',
- 'method_exists',
- 'property_exists',
- 'trait_exists'),
- 'Classkit': ('classkit_import',
- 'classkit_method_add',
- 'classkit_method_copy',
- 'classkit_method_redefine',
- 'classkit_method_remove',
- 'classkit_method_rename'),
- 'Crack': ('crack_check',
- 'crack_closedict',
- 'crack_getlastmessage',
- 'crack_opendict'),
- 'Ctype': ('ctype_alnum',
- 'ctype_alpha',
- 'ctype_cntrl',
- 'ctype_digit',
- 'ctype_graph',
- 'ctype_lower',
- 'ctype_print',
- 'ctype_punct',
- 'ctype_space',
- 'ctype_upper',
- 'ctype_xdigit'),
- 'Cyrus': ('cyrus_authenticate',
- 'cyrus_bind',
- 'cyrus_close',
- 'cyrus_connect',
- 'cyrus_query',
- 'cyrus_unbind'),
- 'DB++': ('dbplus_add',
- 'dbplus_aql',
- 'dbplus_chdir',
- 'dbplus_close',
- 'dbplus_curr',
- 'dbplus_errcode',
- 'dbplus_errno',
- 'dbplus_find',
- 'dbplus_first',
- 'dbplus_flush',
- 'dbplus_freealllocks',
- 'dbplus_freelock',
- 'dbplus_freerlocks',
- 'dbplus_getlock',
- 'dbplus_getunique',
- 'dbplus_info',
- 'dbplus_last',
- 'dbplus_lockrel',
- 'dbplus_next',
- 'dbplus_open',
- 'dbplus_prev',
- 'dbplus_rchperm',
- 'dbplus_rcreate',
- 'dbplus_rcrtexact',
- 'dbplus_rcrtlike',
- 'dbplus_resolve',
- 'dbplus_restorepos',
- 'dbplus_rkeys',
- 'dbplus_ropen',
- 'dbplus_rquery',
- 'dbplus_rrename',
- 'dbplus_rsecindex',
- 'dbplus_runlink',
- 'dbplus_rzap',
- 'dbplus_savepos',
- 'dbplus_setindex',
- 'dbplus_setindexbynumber',
- 'dbplus_sql',
- 'dbplus_tcl',
- 'dbplus_tremove',
- 'dbplus_undo',
- 'dbplus_undoprepare',
- 'dbplus_unlockrel',
- 'dbplus_unselect',
- 'dbplus_update',
- 'dbplus_xlockrel',
- 'dbplus_xunlockrel'),
- 'DBA': ('dba_close',
- 'dba_delete',
- 'dba_exists',
- 'dba_fetch',
- 'dba_firstkey',
- 'dba_handlers',
- 'dba_insert',
- 'dba_key_split',
- 'dba_list',
- 'dba_nextkey',
- 'dba_open',
- 'dba_optimize',
- 'dba_popen',
- 'dba_replace',
- 'dba_sync'),
- 'DOM': ('dom_import_simplexml',),
- 'Date/Time': ('checkdate',
- 'date_add',
- 'date_create_from_format',
- 'date_create_immutable_from_format',
- 'date_create_immutable',
- 'date_create',
- 'date_date_set',
- 'date_default_timezone_get',
- 'date_default_timezone_set',
- 'date_diff',
- 'date_format',
- 'date_get_last_errors',
- 'date_interval_create_from_date_string',
- 'date_interval_format',
- 'date_isodate_set',
- 'date_modify',
- 'date_offset_get',
- 'date_parse_from_format',
- 'date_parse',
- 'date_sub',
- 'date_sun_info',
- 'date_sunrise',
- 'date_sunset',
- 'date_time_set',
- 'date_timestamp_get',
- 'date_timestamp_set',
- 'date_timezone_get',
- 'date_timezone_set',
- 'date',
- 'getdate',
- 'gettimeofday',
- 'gmdate',
- 'gmmktime',
- 'gmstrftime',
- 'idate',
- 'localtime',
- 'microtime',
- 'mktime',
- 'strftime',
- 'strptime',
- 'strtotime',
- 'time',
- 'timezone_abbreviations_list',
- 'timezone_identifiers_list',
- 'timezone_location_get',
- 'timezone_name_from_abbr',
- 'timezone_name_get',
- 'timezone_offset_get',
- 'timezone_open',
- 'timezone_transitions_get',
- 'timezone_version_get'),
- 'Direct IO': ('dio_close',
- 'dio_fcntl',
- 'dio_open',
- 'dio_read',
- 'dio_seek',
- 'dio_stat',
- 'dio_tcsetattr',
- 'dio_truncate',
- 'dio_write'),
- 'Directory': ('chdir',
- 'chroot',
- 'closedir',
- 'dir',
- 'getcwd',
- 'opendir',
- 'readdir',
- 'rewinddir',
- 'scandir'),
- 'Eio': ('eio_busy',
- 'eio_cancel',
- 'eio_chmod',
- 'eio_chown',
- 'eio_close',
- 'eio_custom',
- 'eio_dup2',
- 'eio_event_loop',
- 'eio_fallocate',
- 'eio_fchmod',
- 'eio_fchown',
- 'eio_fdatasync',
- 'eio_fstat',
- 'eio_fstatvfs',
- 'eio_fsync',
- 'eio_ftruncate',
- 'eio_futime',
- 'eio_get_event_stream',
- 'eio_get_last_error',
- 'eio_grp_add',
- 'eio_grp_cancel',
- 'eio_grp_limit',
- 'eio_grp',
- 'eio_init',
- 'eio_link',
- 'eio_lstat',
- 'eio_mkdir',
- 'eio_mknod',
- 'eio_nop',
- 'eio_npending',
- 'eio_nready',
- 'eio_nreqs',
- 'eio_nthreads',
- 'eio_open',
- 'eio_poll',
- 'eio_read',
- 'eio_readahead',
- 'eio_readdir',
- 'eio_readlink',
- 'eio_realpath',
- 'eio_rename',
- 'eio_rmdir',
- 'eio_seek',
- 'eio_sendfile',
- 'eio_set_max_idle',
- 'eio_set_max_parallel',
- 'eio_set_max_poll_reqs',
- 'eio_set_max_poll_time',
- 'eio_set_min_parallel',
- 'eio_stat',
- 'eio_statvfs',
- 'eio_symlink',
- 'eio_sync_file_range',
- 'eio_sync',
- 'eio_syncfs',
- 'eio_truncate',
- 'eio_unlink',
- 'eio_utime',
- 'eio_write'),
- 'Enchant': ('enchant_broker_describe',
- 'enchant_broker_dict_exists',
- 'enchant_broker_free_dict',
- 'enchant_broker_free',
- 'enchant_broker_get_error',
- 'enchant_broker_init',
- 'enchant_broker_list_dicts',
- 'enchant_broker_request_dict',
- 'enchant_broker_request_pwl_dict',
- 'enchant_broker_set_ordering',
- 'enchant_dict_add_to_personal',
- 'enchant_dict_add_to_session',
- 'enchant_dict_check',
- 'enchant_dict_describe',
- 'enchant_dict_get_error',
- 'enchant_dict_is_in_session',
- 'enchant_dict_quick_check',
- 'enchant_dict_store_replacement',
- 'enchant_dict_suggest'),
- 'Error Handling': ('debug_backtrace',
- 'debug_print_backtrace',
- 'error_get_last',
- 'error_log',
- 'error_reporting',
- 'restore_error_handler',
- 'restore_exception_handler',
- 'set_error_handler',
- 'set_exception_handler',
- 'trigger_error',
- 'user_error'),
- 'Exif': ('exif_imagetype',
- 'exif_read_data',
- 'exif_tagname',
- 'exif_thumbnail',
- 'read_exif_data'),
- 'Expect': ('expect_expectl', 'expect_popen'),
- 'FAM': ('fam_cancel_monitor',
- 'fam_close',
- 'fam_monitor_collection',
- 'fam_monitor_directory',
- 'fam_monitor_file',
- 'fam_next_event',
- 'fam_open',
- 'fam_pending',
- 'fam_resume_monitor',
- 'fam_suspend_monitor'),
- 'FDF': ('fdf_add_doc_javascript',
- 'fdf_add_template',
- 'fdf_close',
- 'fdf_create',
- 'fdf_enum_values',
- 'fdf_errno',
- 'fdf_error',
- 'fdf_get_ap',
- 'fdf_get_attachment',
- 'fdf_get_encoding',
- 'fdf_get_file',
- 'fdf_get_flags',
- 'fdf_get_opt',
- 'fdf_get_status',
- 'fdf_get_value',
- 'fdf_get_version',
- 'fdf_header',
- 'fdf_next_field_name',
- 'fdf_open_string',
- 'fdf_open',
- 'fdf_remove_item',
- 'fdf_save_string',
- 'fdf_save',
- 'fdf_set_ap',
- 'fdf_set_encoding',
- 'fdf_set_file',
- 'fdf_set_flags',
- 'fdf_set_javascript_action',
- 'fdf_set_on_import_javascript',
- 'fdf_set_opt',
- 'fdf_set_status',
- 'fdf_set_submit_form_action',
- 'fdf_set_target_frame',
- 'fdf_set_value',
- 'fdf_set_version'),
- 'FPM': ('fastcgi_finish_request',),
- 'FTP': ('ftp_alloc',
- 'ftp_cdup',
- 'ftp_chdir',
- 'ftp_chmod',
- 'ftp_close',
- 'ftp_connect',
- 'ftp_delete',
- 'ftp_exec',
- 'ftp_fget',
- 'ftp_fput',
- 'ftp_get_option',
- 'ftp_get',
- 'ftp_login',
- 'ftp_mdtm',
- 'ftp_mkdir',
- 'ftp_nb_continue',
- 'ftp_nb_fget',
- 'ftp_nb_fput',
- 'ftp_nb_get',
- 'ftp_nb_put',
- 'ftp_nlist',
- 'ftp_pasv',
- 'ftp_put',
- 'ftp_pwd',
- 'ftp_quit',
- 'ftp_raw',
- 'ftp_rawlist',
- 'ftp_rename',
- 'ftp_rmdir',
- 'ftp_set_option',
- 'ftp_site',
- 'ftp_size',
- 'ftp_ssl_connect',
- 'ftp_systype'),
- 'Fann': ('fann_cascadetrain_on_data',
- 'fann_cascadetrain_on_file',
- 'fann_clear_scaling_params',
- 'fann_copy',
- 'fann_create_from_file',
- 'fann_create_shortcut_array',
- 'fann_create_shortcut',
- 'fann_create_sparse_array',
- 'fann_create_sparse',
- 'fann_create_standard_array',
- 'fann_create_standard',
- 'fann_create_train_from_callback',
- 'fann_create_train',
- 'fann_descale_input',
- 'fann_descale_output',
- 'fann_descale_train',
- 'fann_destroy_train',
- 'fann_destroy',
- 'fann_duplicate_train_data',
- 'fann_get_activation_function',
- 'fann_get_activation_steepness',
- 'fann_get_bias_array',
- 'fann_get_bit_fail_limit',
- 'fann_get_bit_fail',
- 'fann_get_cascade_activation_functions_count',
- 'fann_get_cascade_activation_functions',
- 'fann_get_cascade_activation_steepnesses_count',
- 'fann_get_cascade_activation_steepnesses',
- 'fann_get_cascade_candidate_change_fraction',
- 'fann_get_cascade_candidate_limit',
- 'fann_get_cascade_candidate_stagnation_epochs',
- 'fann_get_cascade_max_cand_epochs',
- 'fann_get_cascade_max_out_epochs',
- 'fann_get_cascade_min_cand_epochs',
- 'fann_get_cascade_min_out_epochs',
- 'fann_get_cascade_num_candidate_groups',
- 'fann_get_cascade_num_candidates',
- 'fann_get_cascade_output_change_fraction',
- 'fann_get_cascade_output_stagnation_epochs',
- 'fann_get_cascade_weight_multiplier',
- 'fann_get_connection_array',
- 'fann_get_connection_rate',
- 'fann_get_errno',
- 'fann_get_errstr',
- 'fann_get_layer_array',
- 'fann_get_learning_momentum',
- 'fann_get_learning_rate',
- 'fann_get_MSE',
- 'fann_get_network_type',
- 'fann_get_num_input',
- 'fann_get_num_layers',
- 'fann_get_num_output',
- 'fann_get_quickprop_decay',
- 'fann_get_quickprop_mu',
- 'fann_get_rprop_decrease_factor',
- 'fann_get_rprop_delta_max',
- 'fann_get_rprop_delta_min',
- 'fann_get_rprop_delta_zero',
- 'fann_get_rprop_increase_factor',
- 'fann_get_sarprop_step_error_shift',
- 'fann_get_sarprop_step_error_threshold_factor',
- 'fann_get_sarprop_temperature',
- 'fann_get_sarprop_weight_decay_shift',
- 'fann_get_total_connections',
- 'fann_get_total_neurons',
- 'fann_get_train_error_function',
- 'fann_get_train_stop_function',
- 'fann_get_training_algorithm',
- 'fann_init_weights',
- 'fann_length_train_data',
- 'fann_merge_train_data',
- 'fann_num_input_train_data',
- 'fann_num_output_train_data',
- 'fann_print_error',
- 'fann_randomize_weights',
- 'fann_read_train_from_file',
- 'fann_reset_errno',
- 'fann_reset_errstr',
- 'fann_reset_MSE',
- 'fann_run',
- 'fann_save_train',
- 'fann_save',
- 'fann_scale_input_train_data',
- 'fann_scale_input',
- 'fann_scale_output_train_data',
- 'fann_scale_output',
- 'fann_scale_train_data',
- 'fann_scale_train',
- 'fann_set_activation_function_hidden',
- 'fann_set_activation_function_layer',
- 'fann_set_activation_function_output',
- 'fann_set_activation_function',
- 'fann_set_activation_steepness_hidden',
- 'fann_set_activation_steepness_layer',
- 'fann_set_activation_steepness_output',
- 'fann_set_activation_steepness',
- 'fann_set_bit_fail_limit',
- 'fann_set_callback',
- 'fann_set_cascade_activation_functions',
- 'fann_set_cascade_activation_steepnesses',
- 'fann_set_cascade_candidate_change_fraction',
- 'fann_set_cascade_candidate_limit',
- 'fann_set_cascade_candidate_stagnation_epochs',
- 'fann_set_cascade_max_cand_epochs',
- 'fann_set_cascade_max_out_epochs',
- 'fann_set_cascade_min_cand_epochs',
- 'fann_set_cascade_min_out_epochs',
- 'fann_set_cascade_num_candidate_groups',
- 'fann_set_cascade_output_change_fraction',
- 'fann_set_cascade_output_stagnation_epochs',
- 'fann_set_cascade_weight_multiplier',
- 'fann_set_error_log',
- 'fann_set_input_scaling_params',
- 'fann_set_learning_momentum',
- 'fann_set_learning_rate',
- 'fann_set_output_scaling_params',
- 'fann_set_quickprop_decay',
- 'fann_set_quickprop_mu',
- 'fann_set_rprop_decrease_factor',
- 'fann_set_rprop_delta_max',
- 'fann_set_rprop_delta_min',
- 'fann_set_rprop_delta_zero',
- 'fann_set_rprop_increase_factor',
- 'fann_set_sarprop_step_error_shift',
- 'fann_set_sarprop_step_error_threshold_factor',
- 'fann_set_sarprop_temperature',
- 'fann_set_sarprop_weight_decay_shift',
- 'fann_set_scaling_params',
- 'fann_set_train_error_function',
- 'fann_set_train_stop_function',
- 'fann_set_training_algorithm',
- 'fann_set_weight_array',
- 'fann_set_weight',
- 'fann_shuffle_train_data',
- 'fann_subset_train_data',
- 'fann_test_data',
- 'fann_test',
- 'fann_train_epoch',
- 'fann_train_on_data',
- 'fann_train_on_file',
- 'fann_train'),
- 'Fileinfo': ('finfo_buffer',
- 'finfo_close',
- 'finfo_file',
- 'finfo_open',
- 'finfo_set_flags',
- 'mime_content_type'),
- 'Filesystem': ('basename',
- 'chgrp',
- 'chmod',
- 'chown',
- 'clearstatcache',
- 'copy',
- 'dirname',
- 'disk_free_space',
- 'disk_total_space',
- 'diskfreespace',
- 'fclose',
- 'feof',
- 'fflush',
- 'fgetc',
- 'fgetcsv',
- 'fgets',
- 'fgetss',
- 'file_exists',
- 'file_get_contents',
- 'file_put_contents',
- 'file',
- 'fileatime',
- 'filectime',
- 'filegroup',
- 'fileinode',
- 'filemtime',
- 'fileowner',
- 'fileperms',
- 'filesize',
- 'filetype',
- 'flock',
- 'fnmatch',
- 'fopen',
- 'fpassthru',
- 'fputcsv',
- 'fputs',
- 'fread',
- 'fscanf',
- 'fseek',
- 'fstat',
- 'ftell',
- 'ftruncate',
- 'fwrite',
- 'glob',
- 'is_dir',
- 'is_executable',
- 'is_file',
- 'is_link',
- 'is_readable',
- 'is_uploaded_file',
- 'is_writable',
- 'is_writeable',
- 'lchgrp',
- 'lchown',
- 'link',
- 'linkinfo',
- 'lstat',
- 'mkdir',
- 'move_uploaded_file',
- 'parse_ini_file',
- 'parse_ini_string',
- 'pathinfo',
- 'pclose',
- 'popen',
- 'readfile',
- 'readlink',
- 'realpath_cache_get',
- 'realpath_cache_size',
- 'realpath',
- 'rename',
- 'rewind',
- 'rmdir',
- 'set_file_buffer',
- 'stat',
- 'symlink',
- 'tempnam',
- 'tmpfile',
- 'touch',
- 'umask',
- 'unlink'),
- 'Filter': ('filter_has_var',
- 'filter_id',
- 'filter_input_array',
- 'filter_input',
- 'filter_list',
- 'filter_var_array',
- 'filter_var'),
- 'Firebird/InterBase': ('ibase_add_user',
- 'ibase_affected_rows',
- 'ibase_backup',
- 'ibase_blob_add',
- 'ibase_blob_cancel',
- 'ibase_blob_close',
- 'ibase_blob_create',
- 'ibase_blob_echo',
- 'ibase_blob_get',
- 'ibase_blob_import',
- 'ibase_blob_info',
- 'ibase_blob_open',
- 'ibase_close',
- 'ibase_commit_ret',
- 'ibase_commit',
- 'ibase_connect',
- 'ibase_db_info',
- 'ibase_delete_user',
- 'ibase_drop_db',
- 'ibase_errcode',
- 'ibase_errmsg',
- 'ibase_execute',
- 'ibase_fetch_assoc',
- 'ibase_fetch_object',
- 'ibase_fetch_row',
- 'ibase_field_info',
- 'ibase_free_event_handler',
- 'ibase_free_query',
- 'ibase_free_result',
- 'ibase_gen_id',
- 'ibase_maintain_db',
- 'ibase_modify_user',
- 'ibase_name_result',
- 'ibase_num_fields',
- 'ibase_num_params',
- 'ibase_param_info',
- 'ibase_pconnect',
- 'ibase_prepare',
- 'ibase_query',
- 'ibase_restore',
- 'ibase_rollback_ret',
- 'ibase_rollback',
- 'ibase_server_info',
- 'ibase_service_attach',
- 'ibase_service_detach',
- 'ibase_set_event_handler',
- 'ibase_trans',
- 'ibase_wait_event'),
- 'FriBiDi': ('fribidi_log2vis',),
- 'FrontBase': ('fbsql_affected_rows',
- 'fbsql_autocommit',
- 'fbsql_blob_size',
- 'fbsql_change_user',
- 'fbsql_clob_size',
- 'fbsql_close',
- 'fbsql_commit',
- 'fbsql_connect',
- 'fbsql_create_blob',
- 'fbsql_create_clob',
- 'fbsql_create_db',
- 'fbsql_data_seek',
- 'fbsql_database_password',
- 'fbsql_database',
- 'fbsql_db_query',
- 'fbsql_db_status',
- 'fbsql_drop_db',
- 'fbsql_errno',
- 'fbsql_error',
- 'fbsql_fetch_array',
- 'fbsql_fetch_assoc',
- 'fbsql_fetch_field',
- 'fbsql_fetch_lengths',
- 'fbsql_fetch_object',
- 'fbsql_fetch_row',
- 'fbsql_field_flags',
- 'fbsql_field_len',
- 'fbsql_field_name',
- 'fbsql_field_seek',
- 'fbsql_field_table',
- 'fbsql_field_type',
- 'fbsql_free_result',
- 'fbsql_get_autostart_info',
- 'fbsql_hostname',
- 'fbsql_insert_id',
- 'fbsql_list_dbs',
- 'fbsql_list_fields',
- 'fbsql_list_tables',
- 'fbsql_next_result',
- 'fbsql_num_fields',
- 'fbsql_num_rows',
- 'fbsql_password',
- 'fbsql_pconnect',
- 'fbsql_query',
- 'fbsql_read_blob',
- 'fbsql_read_clob',
- 'fbsql_result',
- 'fbsql_rollback',
- 'fbsql_rows_fetched',
- 'fbsql_select_db',
- 'fbsql_set_characterset',
- 'fbsql_set_lob_mode',
- 'fbsql_set_password',
- 'fbsql_set_transaction',
- 'fbsql_start_db',
- 'fbsql_stop_db',
- 'fbsql_table_name',
- 'fbsql_tablename',
- 'fbsql_username',
- 'fbsql_warnings'),
- 'Function handling': ('call_user_func_array',
- 'call_user_func',
- 'create_function',
- 'forward_static_call_array',
- 'forward_static_call',
- 'func_get_arg',
- 'func_get_args',
- 'func_num_args',
- 'function_exists',
- 'get_defined_functions',
- 'register_shutdown_function',
- 'register_tick_function',
- 'unregister_tick_function'),
- 'GD and Image': ('gd_info',
- 'getimagesize',
- 'getimagesizefromstring',
- 'image_type_to_extension',
- 'image_type_to_mime_type',
- 'image2wbmp',
- 'imageaffine',
- 'imageaffinematrixconcat',
- 'imageaffinematrixget',
- 'imagealphablending',
- 'imageantialias',
- 'imagearc',
- 'imagechar',
- 'imagecharup',
- 'imagecolorallocate',
- 'imagecolorallocatealpha',
- 'imagecolorat',
- 'imagecolorclosest',
- 'imagecolorclosestalpha',
- 'imagecolorclosesthwb',
- 'imagecolordeallocate',
- 'imagecolorexact',
- 'imagecolorexactalpha',
- 'imagecolormatch',
- 'imagecolorresolve',
- 'imagecolorresolvealpha',
- 'imagecolorset',
- 'imagecolorsforindex',
- 'imagecolorstotal',
- 'imagecolortransparent',
- 'imageconvolution',
- 'imagecopy',
- 'imagecopymerge',
- 'imagecopymergegray',
- 'imagecopyresampled',
- 'imagecopyresized',
- 'imagecreate',
- 'imagecreatefromgd2',
- 'imagecreatefromgd2part',
- 'imagecreatefromgd',
- 'imagecreatefromgif',
- 'imagecreatefromjpeg',
- 'imagecreatefrompng',
- 'imagecreatefromstring',
- 'imagecreatefromwbmp',
- 'imagecreatefromwebp',
- 'imagecreatefromxbm',
- 'imagecreatefromxpm',
- 'imagecreatetruecolor',
- 'imagecrop',
- 'imagecropauto',
- 'imagedashedline',
- 'imagedestroy',
- 'imageellipse',
- 'imagefill',
- 'imagefilledarc',
- 'imagefilledellipse',
- 'imagefilledpolygon',
- 'imagefilledrectangle',
- 'imagefilltoborder',
- 'imagefilter',
- 'imageflip',
- 'imagefontheight',
- 'imagefontwidth',
- 'imageftbbox',
- 'imagefttext',
- 'imagegammacorrect',
- 'imagegd2',
- 'imagegd',
- 'imagegif',
- 'imagegrabscreen',
- 'imagegrabwindow',
- 'imageinterlace',
- 'imageistruecolor',
- 'imagejpeg',
- 'imagelayereffect',
- 'imageline',
- 'imageloadfont',
- 'imagepalettecopy',
- 'imagepalettetotruecolor',
- 'imagepng',
- 'imagepolygon',
- 'imagepsbbox',
- 'imagepsencodefont',
- 'imagepsextendfont',
- 'imagepsfreefont',
- 'imagepsloadfont',
- 'imagepsslantfont',
- 'imagepstext',
- 'imagerectangle',
- 'imagerotate',
- 'imagesavealpha',
- 'imagescale',
- 'imagesetbrush',
- 'imagesetinterpolation',
- 'imagesetpixel',
- 'imagesetstyle',
- 'imagesetthickness',
- 'imagesettile',
- 'imagestring',
- 'imagestringup',
- 'imagesx',
- 'imagesy',
- 'imagetruecolortopalette',
- 'imagettfbbox',
- 'imagettftext',
- 'imagetypes',
- 'imagewbmp',
- 'imagewebp',
- 'imagexbm',
- 'iptcembed',
- 'iptcparse',
- 'jpeg2wbmp',
- 'png2wbmp'),
- 'GMP': ('gmp_abs',
- 'gmp_add',
- 'gmp_and',
- 'gmp_clrbit',
- 'gmp_cmp',
- 'gmp_com',
- 'gmp_div_q',
- 'gmp_div_qr',
- 'gmp_div_r',
- 'gmp_div',
- 'gmp_divexact',
- 'gmp_fact',
- 'gmp_gcd',
- 'gmp_gcdext',
- 'gmp_hamdist',
- 'gmp_init',
- 'gmp_intval',
- 'gmp_invert',
- 'gmp_jacobi',
- 'gmp_legendre',
- 'gmp_mod',
- 'gmp_mul',
- 'gmp_neg',
- 'gmp_nextprime',
- 'gmp_or',
- 'gmp_perfect_square',
- 'gmp_popcount',
- 'gmp_pow',
- 'gmp_powm',
- 'gmp_prob_prime',
- 'gmp_random',
- 'gmp_scan0',
- 'gmp_scan1',
- 'gmp_setbit',
- 'gmp_sign',
- 'gmp_sqrt',
- 'gmp_sqrtrem',
- 'gmp_strval',
- 'gmp_sub',
- 'gmp_testbit',
- 'gmp_xor'),
- 'GeoIP': ('geoip_asnum_by_name',
- 'geoip_continent_code_by_name',
- 'geoip_country_code_by_name',
- 'geoip_country_code3_by_name',
- 'geoip_country_name_by_name',
- 'geoip_database_info',
- 'geoip_db_avail',
- 'geoip_db_filename',
- 'geoip_db_get_all_info',
- 'geoip_domain_by_name',
- 'geoip_id_by_name',
- 'geoip_isp_by_name',
- 'geoip_netspeedcell_by_name',
- 'geoip_org_by_name',
- 'geoip_record_by_name',
- 'geoip_region_by_name',
- 'geoip_region_name_by_code',
- 'geoip_setup_custom_directory',
- 'geoip_time_zone_by_country_and_region'),
- 'Gettext': ('bind_textdomain_codeset',
- 'bindtextdomain',
- 'dcgettext',
- 'dcngettext',
- 'dgettext',
- 'dngettext',
- 'gettext',
- 'ngettext',
- 'textdomain'),
- 'GnuPG': ('gnupg_adddecryptkey',
- 'gnupg_addencryptkey',
- 'gnupg_addsignkey',
- 'gnupg_cleardecryptkeys',
- 'gnupg_clearencryptkeys',
- 'gnupg_clearsignkeys',
- 'gnupg_decrypt',
- 'gnupg_decryptverify',
- 'gnupg_encrypt',
- 'gnupg_encryptsign',
- 'gnupg_export',
- 'gnupg_geterror',
- 'gnupg_getprotocol',
- 'gnupg_import',
- 'gnupg_init',
- 'gnupg_keyinfo',
- 'gnupg_setarmor',
- 'gnupg_seterrormode',
- 'gnupg_setsignmode',
- 'gnupg_sign',
- 'gnupg_verify'),
- 'Gopher': ('gopher_parsedir',),
- 'Grapheme': ('grapheme_extract',
- 'grapheme_stripos',
- 'grapheme_stristr',
- 'grapheme_strlen',
- 'grapheme_strpos',
- 'grapheme_strripos',
- 'grapheme_strrpos',
- 'grapheme_strstr',
- 'grapheme_substr'),
- 'Gupnp': ('gupnp_context_get_host_ip',
- 'gupnp_context_get_port',
- 'gupnp_context_get_subscription_timeout',
- 'gupnp_context_host_path',
- 'gupnp_context_new',
- 'gupnp_context_set_subscription_timeout',
- 'gupnp_context_timeout_add',
- 'gupnp_context_unhost_path',
- 'gupnp_control_point_browse_start',
- 'gupnp_control_point_browse_stop',
- 'gupnp_control_point_callback_set',
- 'gupnp_control_point_new',
- 'gupnp_device_action_callback_set',
- 'gupnp_device_info_get_service',
- 'gupnp_device_info_get',
- 'gupnp_root_device_get_available',
- 'gupnp_root_device_get_relative_location',
- 'gupnp_root_device_new',
- 'gupnp_root_device_set_available',
- 'gupnp_root_device_start',
- 'gupnp_root_device_stop',
- 'gupnp_service_action_get',
- 'gupnp_service_action_return_error',
- 'gupnp_service_action_return',
- 'gupnp_service_action_set',
- 'gupnp_service_freeze_notify',
- 'gupnp_service_info_get_introspection',
- 'gupnp_service_info_get',
- 'gupnp_service_introspection_get_state_variable',
- 'gupnp_service_notify',
- 'gupnp_service_proxy_action_get',
- 'gupnp_service_proxy_action_set',
- 'gupnp_service_proxy_add_notify',
- 'gupnp_service_proxy_callback_set',
- 'gupnp_service_proxy_get_subscribed',
- 'gupnp_service_proxy_remove_notify',
- 'gupnp_service_proxy_set_subscribed',
- 'gupnp_service_thaw_notify'),
- 'HTTP': ('http_cache_etag',
- 'http_cache_last_modified',
- 'http_chunked_decode',
- 'http_deflate',
- 'http_inflate',
- 'http_build_cookie',
- 'http_date',
- 'http_get_request_body_stream',
- 'http_get_request_body',
- 'http_get_request_headers',
- 'http_match_etag',
- 'http_match_modified',
- 'http_match_request_header',
- 'http_support',
- 'http_negotiate_charset',
- 'http_negotiate_content_type',
- 'http_negotiate_language',
- 'ob_deflatehandler',
- 'ob_etaghandler',
- 'ob_inflatehandler',
- 'http_parse_cookie',
- 'http_parse_headers',
- 'http_parse_message',
- 'http_parse_params',
- 'http_persistent_handles_clean',
- 'http_persistent_handles_count',
- 'http_persistent_handles_ident',
- 'http_get',
- 'http_head',
- 'http_post_data',
- 'http_post_fields',
- 'http_put_data',
- 'http_put_file',
- 'http_put_stream',
- 'http_request_body_encode',
- 'http_request_method_exists',
- 'http_request_method_name',
- 'http_request_method_register',
- 'http_request_method_unregister',
- 'http_request',
- 'http_redirect',
- 'http_send_content_disposition',
- 'http_send_content_type',
- 'http_send_data',
- 'http_send_file',
- 'http_send_last_modified',
- 'http_send_status',
- 'http_send_stream',
- 'http_throttle',
- 'http_build_str',
- 'http_build_url'),
- 'Hash': ('hash_algos',
- 'hash_copy',
- 'hash_file',
- 'hash_final',
- 'hash_hmac_file',
- 'hash_hmac',
- 'hash_init',
- 'hash_pbkdf2',
- 'hash_update_file',
- 'hash_update_stream',
- 'hash_update',
- 'hash'),
- 'Hyperwave': ('hw_Array2Objrec',
- 'hw_changeobject',
- 'hw_Children',
- 'hw_ChildrenObj',
- 'hw_Close',
- 'hw_Connect',
- 'hw_connection_info',
- 'hw_cp',
- 'hw_Deleteobject',
- 'hw_DocByAnchor',
- 'hw_DocByAnchorObj',
- 'hw_Document_Attributes',
- 'hw_Document_BodyTag',
- 'hw_Document_Content',
- 'hw_Document_SetContent',
- 'hw_Document_Size',
- 'hw_dummy',
- 'hw_EditText',
- 'hw_Error',
- 'hw_ErrorMsg',
- 'hw_Free_Document',
- 'hw_GetAnchors',
- 'hw_GetAnchorsObj',
- 'hw_GetAndLock',
- 'hw_GetChildColl',
- 'hw_GetChildCollObj',
- 'hw_GetChildDocColl',
- 'hw_GetChildDocCollObj',
- 'hw_GetObject',
- 'hw_GetObjectByQuery',
- 'hw_GetObjectByQueryColl',
- 'hw_GetObjectByQueryCollObj',
- 'hw_GetObjectByQueryObj',
- 'hw_GetParents',
- 'hw_GetParentsObj',
- 'hw_getrellink',
- 'hw_GetRemote',
- 'hw_getremotechildren',
- 'hw_GetSrcByDestObj',
- 'hw_GetText',
- 'hw_getusername',
- 'hw_Identify',
- 'hw_InCollections',
- 'hw_Info',
- 'hw_InsColl',
- 'hw_InsDoc',
- 'hw_insertanchors',
- 'hw_InsertDocument',
- 'hw_InsertObject',
- 'hw_mapid',
- 'hw_Modifyobject',
- 'hw_mv',
- 'hw_New_Document',
- 'hw_objrec2array',
- 'hw_Output_Document',
- 'hw_pConnect',
- 'hw_PipeDocument',
- 'hw_Root',
- 'hw_setlinkroot',
- 'hw_stat',
- 'hw_Unlock',
- 'hw_Who'),
- 'Hyperwave API': ('hwapi_attribute_new',
- 'hwapi_content_new',
- 'hwapi_hgcsp',
- 'hwapi_object_new'),
- 'IBM DB2': ('db2_autocommit',
- 'db2_bind_param',
- 'db2_client_info',
- 'db2_close',
- 'db2_column_privileges',
- 'db2_columns',
- 'db2_commit',
- 'db2_conn_error',
- 'db2_conn_errormsg',
- 'db2_connect',
- 'db2_cursor_type',
- 'db2_escape_string',
- 'db2_exec',
- 'db2_execute',
- 'db2_fetch_array',
- 'db2_fetch_assoc',
- 'db2_fetch_both',
- 'db2_fetch_object',
- 'db2_fetch_row',
- 'db2_field_display_size',
- 'db2_field_name',
- 'db2_field_num',
- 'db2_field_precision',
- 'db2_field_scale',
- 'db2_field_type',
- 'db2_field_width',
- 'db2_foreign_keys',
- 'db2_free_result',
- 'db2_free_stmt',
- 'db2_get_option',
- 'db2_last_insert_id',
- 'db2_lob_read',
- 'db2_next_result',
- 'db2_num_fields',
- 'db2_num_rows',
- 'db2_pclose',
- 'db2_pconnect',
- 'db2_prepare',
- 'db2_primary_keys',
- 'db2_procedure_columns',
- 'db2_procedures',
- 'db2_result',
- 'db2_rollback',
- 'db2_server_info',
- 'db2_set_option',
- 'db2_special_columns',
- 'db2_statistics',
- 'db2_stmt_error',
- 'db2_stmt_errormsg',
- 'db2_table_privileges',
- 'db2_tables'),
- 'ID3': ('id3_get_frame_long_name',
- 'id3_get_frame_short_name',
- 'id3_get_genre_id',
- 'id3_get_genre_list',
- 'id3_get_genre_name',
- 'id3_get_tag',
- 'id3_get_version',
- 'id3_remove_tag',
- 'id3_set_tag'),
- 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
- 'IIS': ('iis_add_server',
- 'iis_get_dir_security',
- 'iis_get_script_map',
- 'iis_get_server_by_comment',
- 'iis_get_server_by_path',
- 'iis_get_server_rights',
- 'iis_get_service_state',
- 'iis_remove_server',
- 'iis_set_app_settings',
- 'iis_set_dir_security',
- 'iis_set_script_map',
- 'iis_set_server_rights',
- 'iis_start_server',
- 'iis_start_service',
- 'iis_stop_server',
- 'iis_stop_service'),
- 'IMAP': ('imap_8bit',
- 'imap_alerts',
- 'imap_append',
- 'imap_base64',
- 'imap_binary',
- 'imap_body',
- 'imap_bodystruct',
- 'imap_check',
- 'imap_clearflag_full',
- 'imap_close',
- 'imap_create',
- 'imap_createmailbox',
- 'imap_delete',
- 'imap_deletemailbox',
- 'imap_errors',
- 'imap_expunge',
- 'imap_fetch_overview',
- 'imap_fetchbody',
- 'imap_fetchheader',
- 'imap_fetchmime',
- 'imap_fetchstructure',
- 'imap_fetchtext',
- 'imap_gc',
- 'imap_get_quota',
- 'imap_get_quotaroot',
- 'imap_getacl',
- 'imap_getmailboxes',
- 'imap_getsubscribed',
- 'imap_header',
- 'imap_headerinfo',
- 'imap_headers',
- 'imap_last_error',
- 'imap_list',
- 'imap_listmailbox',
- 'imap_listscan',
- 'imap_listsubscribed',
- 'imap_lsub',
- 'imap_mail_compose',
- 'imap_mail_copy',
- 'imap_mail_move',
- 'imap_mail',
- 'imap_mailboxmsginfo',
- 'imap_mime_header_decode',
- 'imap_msgno',
- 'imap_num_msg',
- 'imap_num_recent',
- 'imap_open',
- 'imap_ping',
- 'imap_qprint',
- 'imap_rename',
- 'imap_renamemailbox',
- 'imap_reopen',
- 'imap_rfc822_parse_adrlist',
- 'imap_rfc822_parse_headers',
- 'imap_rfc822_write_address',
- 'imap_savebody',
- 'imap_scan',
- 'imap_scanmailbox',
- 'imap_search',
- 'imap_set_quota',
- 'imap_setacl',
- 'imap_setflag_full',
- 'imap_sort',
- 'imap_status',
- 'imap_subscribe',
- 'imap_thread',
- 'imap_timeout',
- 'imap_uid',
- 'imap_undelete',
- 'imap_unsubscribe',
- 'imap_utf7_decode',
- 'imap_utf7_encode',
- 'imap_utf8'),
- 'Informix': ('ifx_affected_rows',
- 'ifx_blobinfile_mode',
- 'ifx_byteasvarchar',
- 'ifx_close',
- 'ifx_connect',
- 'ifx_copy_blob',
- 'ifx_create_blob',
- 'ifx_create_char',
- 'ifx_do',
- 'ifx_error',
- 'ifx_errormsg',
- 'ifx_fetch_row',
- 'ifx_fieldproperties',
- 'ifx_fieldtypes',
- 'ifx_free_blob',
- 'ifx_free_char',
- 'ifx_free_result',
- 'ifx_get_blob',
- 'ifx_get_char',
- 'ifx_getsqlca',
- 'ifx_htmltbl_result',
- 'ifx_nullformat',
- 'ifx_num_fields',
- 'ifx_num_rows',
- 'ifx_pconnect',
- 'ifx_prepare',
- 'ifx_query',
- 'ifx_textasvarchar',
- 'ifx_update_blob',
- 'ifx_update_char',
- 'ifxus_close_slob',
- 'ifxus_create_slob',
- 'ifxus_free_slob',
- 'ifxus_open_slob',
- 'ifxus_read_slob',
- 'ifxus_seek_slob',
- 'ifxus_tell_slob',
- 'ifxus_write_slob'),
- 'Ingres': ('ingres_autocommit_state',
- 'ingres_autocommit',
- 'ingres_charset',
- 'ingres_close',
- 'ingres_commit',
- 'ingres_connect',
- 'ingres_cursor',
- 'ingres_errno',
- 'ingres_error',
- 'ingres_errsqlstate',
- 'ingres_escape_string',
- 'ingres_execute',
- 'ingres_fetch_array',
- 'ingres_fetch_assoc',
- 'ingres_fetch_object',
- 'ingres_fetch_proc_return',
- 'ingres_fetch_row',
- 'ingres_field_length',
- 'ingres_field_name',
- 'ingres_field_nullable',
- 'ingres_field_precision',
- 'ingres_field_scale',
- 'ingres_field_type',
- 'ingres_free_result',
- 'ingres_next_error',
- 'ingres_num_fields',
- 'ingres_num_rows',
- 'ingres_pconnect',
- 'ingres_prepare',
- 'ingres_query',
- 'ingres_result_seek',
- 'ingres_rollback',
- 'ingres_set_environment',
- 'ingres_unbuffered_query'),
- 'Inotify': ('inotify_add_watch',
- 'inotify_init',
- 'inotify_queue_len',
- 'inotify_read',
- 'inotify_rm_watch'),
- 'JSON': ('json_decode',
- 'json_encode',
- 'json_last_error_msg',
- 'json_last_error'),
- 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
- 'Judy': ('judy_type', 'judy_version'),
- 'KADM5': ('kadm5_chpass_principal',
- 'kadm5_create_principal',
- 'kadm5_delete_principal',
- 'kadm5_destroy',
- 'kadm5_flush',
- 'kadm5_get_policies',
- 'kadm5_get_principal',
- 'kadm5_get_principals',
- 'kadm5_init_with_password',
- 'kadm5_modify_principal'),
- 'LDAP': ('ldap_8859_to_t61',
- 'ldap_add',
- 'ldap_bind',
- 'ldap_close',
- 'ldap_compare',
- 'ldap_connect',
- 'ldap_control_paged_result_response',
- 'ldap_control_paged_result',
- 'ldap_count_entries',
- 'ldap_delete',
- 'ldap_dn2ufn',
- 'ldap_err2str',
- 'ldap_errno',
- 'ldap_error',
- 'ldap_explode_dn',
- 'ldap_first_attribute',
- 'ldap_first_entry',
- 'ldap_first_reference',
- 'ldap_free_result',
- 'ldap_get_attributes',
- 'ldap_get_dn',
- 'ldap_get_entries',
- 'ldap_get_option',
- 'ldap_get_values_len',
- 'ldap_get_values',
- 'ldap_list',
- 'ldap_mod_add',
- 'ldap_mod_del',
- 'ldap_mod_replace',
- 'ldap_modify',
- 'ldap_next_attribute',
- 'ldap_next_entry',
- 'ldap_next_reference',
- 'ldap_parse_reference',
- 'ldap_parse_result',
- 'ldap_read',
- 'ldap_rename',
- 'ldap_sasl_bind',
- 'ldap_search',
- 'ldap_set_option',
- 'ldap_set_rebind_proc',
- 'ldap_sort',
- 'ldap_start_tls',
- 'ldap_t61_to_8859',
- 'ldap_unbind'),
- 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
- 'Libevent': ('event_add',
- 'event_base_free',
- 'event_base_loop',
- 'event_base_loopbreak',
- 'event_base_loopexit',
- 'event_base_new',
- 'event_base_priority_init',
- 'event_base_set',
- 'event_buffer_base_set',
- 'event_buffer_disable',
- 'event_buffer_enable',
- 'event_buffer_fd_set',
- 'event_buffer_free',
- 'event_buffer_new',
- 'event_buffer_priority_set',
- 'event_buffer_read',
- 'event_buffer_set_callback',
- 'event_buffer_timeout_set',
- 'event_buffer_watermark_set',
- 'event_buffer_write',
- 'event_del',
- 'event_free',
- 'event_new',
- 'event_set'),
- 'Lotus Notes': ('notes_body',
- 'notes_copy_db',
- 'notes_create_db',
- 'notes_create_note',
- 'notes_drop_db',
- 'notes_find_note',
- 'notes_header_info',
- 'notes_list_msgs',
- 'notes_mark_read',
- 'notes_mark_unread',
- 'notes_nav_create',
- 'notes_search',
- 'notes_unread',
- 'notes_version'),
- 'MCVE': ('m_checkstatus',
- 'm_completeauthorizations',
- 'm_connect',
- 'm_connectionerror',
- 'm_deletetrans',
- 'm_destroyconn',
- 'm_destroyengine',
- 'm_getcell',
- 'm_getcellbynum',
- 'm_getcommadelimited',
- 'm_getheader',
- 'm_initconn',
- 'm_initengine',
- 'm_iscommadelimited',
- 'm_maxconntimeout',
- 'm_monitor',
- 'm_numcolumns',
- 'm_numrows',
- 'm_parsecommadelimited',
- 'm_responsekeys',
- 'm_responseparam',
- 'm_returnstatus',
- 'm_setblocking',
- 'm_setdropfile',
- 'm_setip',
- 'm_setssl_cafile',
- 'm_setssl_files',
- 'm_setssl',
- 'm_settimeout',
- 'm_sslcert_gen_hash',
- 'm_transactionssent',
- 'm_transinqueue',
- 'm_transkeyval',
- 'm_transnew',
- 'm_transsend',
- 'm_uwait',
- 'm_validateidentifier',
- 'm_verifyconnection',
- 'm_verifysslcert'),
- 'Mail': ('ezmlm_hash', 'mail'),
- 'Mailparse': ('mailparse_determine_best_xfer_encoding',
- 'mailparse_msg_create',
- 'mailparse_msg_extract_part_file',
- 'mailparse_msg_extract_part',
- 'mailparse_msg_extract_whole_part_file',
- 'mailparse_msg_free',
- 'mailparse_msg_get_part_data',
- 'mailparse_msg_get_part',
- 'mailparse_msg_get_structure',
- 'mailparse_msg_parse_file',
- 'mailparse_msg_parse',
- 'mailparse_rfc822_parse_addresses',
- 'mailparse_stream_encode',
- 'mailparse_uudecode_all'),
- 'Math': ('abs',
- 'acos',
- 'acosh',
- 'asin',
- 'asinh',
- 'atan2',
- 'atan',
- 'atanh',
- 'base_convert',
- 'bindec',
- 'ceil',
- 'cos',
- 'cosh',
- 'decbin',
- 'dechex',
- 'decoct',
- 'deg2rad',
- 'exp',
- 'expm1',
- 'floor',
- 'fmod',
- 'getrandmax',
- 'hexdec',
- 'hypot',
- 'is_finite',
- 'is_infinite',
- 'is_nan',
- 'lcg_value',
- 'log10',
- 'log1p',
- 'log',
- 'max',
- 'min',
- 'mt_getrandmax',
- 'mt_rand',
- 'mt_srand',
- 'octdec',
- 'pi',
- 'pow',
- 'rad2deg',
- 'rand',
- 'round',
- 'sin',
- 'sinh',
- 'sqrt',
- 'srand',
- 'tan',
- 'tanh'),
- 'MaxDB': ('maxdb_affected_rows',
- 'maxdb_autocommit',
- 'maxdb_bind_param',
- 'maxdb_bind_result',
- 'maxdb_change_user',
- 'maxdb_character_set_name',
- 'maxdb_client_encoding',
- 'maxdb_close_long_data',
- 'maxdb_close',
- 'maxdb_commit',
- 'maxdb_connect_errno',
- 'maxdb_connect_error',
- 'maxdb_connect',
- 'maxdb_data_seek',
- 'maxdb_debug',
- 'maxdb_disable_reads_from_master',
- 'maxdb_disable_rpl_parse',
- 'maxdb_dump_debug_info',
- 'maxdb_embedded_connect',
- 'maxdb_enable_reads_from_master',
- 'maxdb_enable_rpl_parse',
- 'maxdb_errno',
- 'maxdb_error',
- 'maxdb_escape_string',
- 'maxdb_execute',
- 'maxdb_fetch_array',
- 'maxdb_fetch_assoc',
- 'maxdb_fetch_field_direct',
- 'maxdb_fetch_field',
- 'maxdb_fetch_fields',
- 'maxdb_fetch_lengths',
- 'maxdb_fetch_object',
- 'maxdb_fetch_row',
- 'maxdb_fetch',
- 'maxdb_field_count',
- 'maxdb_field_seek',
- 'maxdb_field_tell',
- 'maxdb_free_result',
- 'maxdb_get_client_info',
- 'maxdb_get_client_version',
- 'maxdb_get_host_info',
- 'maxdb_get_metadata',
- 'maxdb_get_proto_info',
- 'maxdb_get_server_info',
- 'maxdb_get_server_version',
- 'maxdb_info',
- 'maxdb_init',
- 'maxdb_insert_id',
- 'maxdb_kill',
- 'maxdb_master_query',
- 'maxdb_more_results',
- 'maxdb_multi_query',
- 'maxdb_next_result',
- 'maxdb_num_fields',
- 'maxdb_num_rows',
- 'maxdb_options',
- 'maxdb_param_count',
- 'maxdb_ping',
- 'maxdb_prepare',
- 'maxdb_query',
- 'maxdb_real_connect',
- 'maxdb_real_escape_string',
- 'maxdb_real_query',
- 'maxdb_report',
- 'maxdb_rollback',
- 'maxdb_rpl_parse_enabled',
- 'maxdb_rpl_probe',
- 'maxdb_rpl_query_type',
- 'maxdb_select_db',
- 'maxdb_send_long_data',
- 'maxdb_send_query',
- 'maxdb_server_end',
- 'maxdb_server_init',
- 'maxdb_set_opt',
- 'maxdb_sqlstate',
- 'maxdb_ssl_set',
- 'maxdb_stat',
- 'maxdb_stmt_affected_rows',
- 'maxdb_stmt_bind_param',
- 'maxdb_stmt_bind_result',
- 'maxdb_stmt_close_long_data',
- 'maxdb_stmt_close',
- 'maxdb_stmt_data_seek',
- 'maxdb_stmt_errno',
- 'maxdb_stmt_error',
- 'maxdb_stmt_execute',
- 'maxdb_stmt_fetch',
- 'maxdb_stmt_free_result',
- 'maxdb_stmt_init',
- 'maxdb_stmt_num_rows',
- 'maxdb_stmt_param_count',
- 'maxdb_stmt_prepare',
- 'maxdb_stmt_reset',
- 'maxdb_stmt_result_metadata',
- 'maxdb_stmt_send_long_data',
- 'maxdb_stmt_sqlstate',
- 'maxdb_stmt_store_result',
- 'maxdb_store_result',
- 'maxdb_thread_id',
- 'maxdb_thread_safe',
- 'maxdb_use_result',
- 'maxdb_warning_count'),
- 'Mcrypt': ('mcrypt_cbc',
- 'mcrypt_cfb',
- 'mcrypt_create_iv',
- 'mcrypt_decrypt',
- 'mcrypt_ecb',
- 'mcrypt_enc_get_algorithms_name',
- 'mcrypt_enc_get_block_size',
- 'mcrypt_enc_get_iv_size',
- 'mcrypt_enc_get_key_size',
- 'mcrypt_enc_get_modes_name',
- 'mcrypt_enc_get_supported_key_sizes',
- 'mcrypt_enc_is_block_algorithm_mode',
- 'mcrypt_enc_is_block_algorithm',
- 'mcrypt_enc_is_block_mode',
- 'mcrypt_enc_self_test',
- 'mcrypt_encrypt',
- 'mcrypt_generic_deinit',
- 'mcrypt_generic_end',
- 'mcrypt_generic_init',
- 'mcrypt_generic',
- 'mcrypt_get_block_size',
- 'mcrypt_get_cipher_name',
- 'mcrypt_get_iv_size',
- 'mcrypt_get_key_size',
- 'mcrypt_list_algorithms',
- 'mcrypt_list_modes',
- 'mcrypt_module_close',
- 'mcrypt_module_get_algo_block_size',
- 'mcrypt_module_get_algo_key_size',
- 'mcrypt_module_get_supported_key_sizes',
- 'mcrypt_module_is_block_algorithm_mode',
- 'mcrypt_module_is_block_algorithm',
- 'mcrypt_module_is_block_mode',
- 'mcrypt_module_open',
- 'mcrypt_module_self_test',
- 'mcrypt_ofb',
- 'mdecrypt_generic'),
- 'Memcache': ('memcache_debug',),
- 'Mhash': ('mhash_count',
- 'mhash_get_block_size',
- 'mhash_get_hash_name',
- 'mhash_keygen_s2k',
- 'mhash'),
- 'Ming': ('ming_keypress',
- 'ming_setcubicthreshold',
- 'ming_setscale',
- 'ming_setswfcompression',
- 'ming_useconstants',
- 'ming_useswfversion'),
- 'Misc.': ('connection_aborted',
- 'connection_status',
- 'connection_timeout',
- 'constant',
- 'define',
- 'defined',
- 'die',
- 'eval',
- 'exit',
- 'get_browser',
- '__halt_compiler',
- 'highlight_file',
- 'highlight_string',
- 'ignore_user_abort',
- 'pack',
- 'php_check_syntax',
- 'php_strip_whitespace',
- 'show_source',
- 'sleep',
- 'sys_getloadavg',
- 'time_nanosleep',
- 'time_sleep_until',
- 'uniqid',
- 'unpack',
- 'usleep'),
- 'Mongo': ('bson_decode', 'bson_encode'),
- 'Msession': ('msession_connect',
- 'msession_count',
- 'msession_create',
- 'msession_destroy',
- 'msession_disconnect',
- 'msession_find',
- 'msession_get_array',
- 'msession_get_data',
- 'msession_get',
- 'msession_inc',
- 'msession_list',
- 'msession_listvar',
- 'msession_lock',
- 'msession_plugin',
- 'msession_randstr',
- 'msession_set_array',
- 'msession_set_data',
- 'msession_set',
- 'msession_timeout',
- 'msession_uniq',
- 'msession_unlock'),
- 'Mssql': ('mssql_bind',
- 'mssql_close',
- 'mssql_connect',
- 'mssql_data_seek',
- 'mssql_execute',
- 'mssql_fetch_array',
- 'mssql_fetch_assoc',
- 'mssql_fetch_batch',
- 'mssql_fetch_field',
- 'mssql_fetch_object',
- 'mssql_fetch_row',
- 'mssql_field_length',
- 'mssql_field_name',
- 'mssql_field_seek',
- 'mssql_field_type',
- 'mssql_free_result',
- 'mssql_free_statement',
- 'mssql_get_last_message',
- 'mssql_guid_string',
- 'mssql_init',
- 'mssql_min_error_severity',
- 'mssql_min_message_severity',
- 'mssql_next_result',
- 'mssql_num_fields',
- 'mssql_num_rows',
- 'mssql_pconnect',
- 'mssql_query',
- 'mssql_result',
- 'mssql_rows_affected',
- 'mssql_select_db'),
- 'Multibyte String': ('mb_check_encoding',
- 'mb_convert_case',
- 'mb_convert_encoding',
- 'mb_convert_kana',
- 'mb_convert_variables',
- 'mb_decode_mimeheader',
- 'mb_decode_numericentity',
- 'mb_detect_encoding',
- 'mb_detect_order',
- 'mb_encode_mimeheader',
- 'mb_encode_numericentity',
- 'mb_encoding_aliases',
- 'mb_ereg_match',
- 'mb_ereg_replace_callback',
- 'mb_ereg_replace',
- 'mb_ereg_search_getpos',
- 'mb_ereg_search_getregs',
- 'mb_ereg_search_init',
- 'mb_ereg_search_pos',
- 'mb_ereg_search_regs',
- 'mb_ereg_search_setpos',
- 'mb_ereg_search',
- 'mb_ereg',
- 'mb_eregi_replace',
- 'mb_eregi',
- 'mb_get_info',
- 'mb_http_input',
- 'mb_http_output',
- 'mb_internal_encoding',
- 'mb_language',
- 'mb_list_encodings',
- 'mb_output_handler',
- 'mb_parse_str',
- 'mb_preferred_mime_name',
- 'mb_regex_encoding',
- 'mb_regex_set_options',
- 'mb_send_mail',
- 'mb_split',
- 'mb_strcut',
- 'mb_strimwidth',
- 'mb_stripos',
- 'mb_stristr',
- 'mb_strlen',
- 'mb_strpos',
- 'mb_strrchr',
- 'mb_strrichr',
- 'mb_strripos',
- 'mb_strrpos',
- 'mb_strstr',
- 'mb_strtolower',
- 'mb_strtoupper',
- 'mb_strwidth',
- 'mb_substitute_character',
- 'mb_substr_count',
- 'mb_substr'),
- 'MySQL': ('mysql_affected_rows',
- 'mysql_client_encoding',
- 'mysql_close',
- 'mysql_connect',
- 'mysql_create_db',
- 'mysql_data_seek',
- 'mysql_db_name',
- 'mysql_db_query',
- 'mysql_drop_db',
- 'mysql_errno',
- 'mysql_error',
- 'mysql_escape_string',
- 'mysql_fetch_array',
- 'mysql_fetch_assoc',
- 'mysql_fetch_field',
- 'mysql_fetch_lengths',
- 'mysql_fetch_object',
- 'mysql_fetch_row',
- 'mysql_field_flags',
- 'mysql_field_len',
- 'mysql_field_name',
- 'mysql_field_seek',
- 'mysql_field_table',
- 'mysql_field_type',
- 'mysql_free_result',
- 'mysql_get_client_info',
- 'mysql_get_host_info',
- 'mysql_get_proto_info',
- 'mysql_get_server_info',
- 'mysql_info',
- 'mysql_insert_id',
- 'mysql_list_dbs',
- 'mysql_list_fields',
- 'mysql_list_processes',
- 'mysql_list_tables',
- 'mysql_num_fields',
- 'mysql_num_rows',
- 'mysql_pconnect',
- 'mysql_ping',
- 'mysql_query',
- 'mysql_real_escape_string',
- 'mysql_result',
- 'mysql_select_db',
- 'mysql_set_charset',
- 'mysql_stat',
- 'mysql_tablename',
- 'mysql_thread_id',
- 'mysql_unbuffered_query'),
- 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
- 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
- 'mysqlnd_ms_fabric_select_global',
- 'mysqlnd_ms_fabric_select_shard',
- 'mysqlnd_ms_get_last_gtid',
- 'mysqlnd_ms_get_last_used_connection',
- 'mysqlnd_ms_get_stats',
- 'mysqlnd_ms_match_wild',
- 'mysqlnd_ms_query_is_select',
- 'mysqlnd_ms_set_qos',
- 'mysqlnd_ms_set_user_pick_server'),
- 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
- 'mysqlnd_uh_set_connection_proxy',
- 'mysqlnd_uh_set_statement_proxy'),
- 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
- 'Ncurses': ('ncurses_addch',
- 'ncurses_addchnstr',
- 'ncurses_addchstr',
- 'ncurses_addnstr',
- 'ncurses_addstr',
- 'ncurses_assume_default_colors',
- 'ncurses_attroff',
- 'ncurses_attron',
- 'ncurses_attrset',
- 'ncurses_baudrate',
- 'ncurses_beep',
- 'ncurses_bkgd',
- 'ncurses_bkgdset',
- 'ncurses_border',
- 'ncurses_bottom_panel',
- 'ncurses_can_change_color',
- 'ncurses_cbreak',
- 'ncurses_clear',
- 'ncurses_clrtobot',
- 'ncurses_clrtoeol',
- 'ncurses_color_content',
- 'ncurses_color_set',
- 'ncurses_curs_set',
- 'ncurses_def_prog_mode',
- 'ncurses_def_shell_mode',
- 'ncurses_define_key',
- 'ncurses_del_panel',
- 'ncurses_delay_output',
- 'ncurses_delch',
- 'ncurses_deleteln',
- 'ncurses_delwin',
- 'ncurses_doupdate',
- 'ncurses_echo',
- 'ncurses_echochar',
- 'ncurses_end',
- 'ncurses_erase',
- 'ncurses_erasechar',
- 'ncurses_filter',
- 'ncurses_flash',
- 'ncurses_flushinp',
- 'ncurses_getch',
- 'ncurses_getmaxyx',
- 'ncurses_getmouse',
- 'ncurses_getyx',
- 'ncurses_halfdelay',
- 'ncurses_has_colors',
- 'ncurses_has_ic',
- 'ncurses_has_il',
- 'ncurses_has_key',
- 'ncurses_hide_panel',
- 'ncurses_hline',
- 'ncurses_inch',
- 'ncurses_init_color',
- 'ncurses_init_pair',
- 'ncurses_init',
- 'ncurses_insch',
- 'ncurses_insdelln',
- 'ncurses_insertln',
- 'ncurses_insstr',
- 'ncurses_instr',
- 'ncurses_isendwin',
- 'ncurses_keyok',
- 'ncurses_keypad',
- 'ncurses_killchar',
- 'ncurses_longname',
- 'ncurses_meta',
- 'ncurses_mouse_trafo',
- 'ncurses_mouseinterval',
- 'ncurses_mousemask',
- 'ncurses_move_panel',
- 'ncurses_move',
- 'ncurses_mvaddch',
- 'ncurses_mvaddchnstr',
- 'ncurses_mvaddchstr',
- 'ncurses_mvaddnstr',
- 'ncurses_mvaddstr',
- 'ncurses_mvcur',
- 'ncurses_mvdelch',
- 'ncurses_mvgetch',
- 'ncurses_mvhline',
- 'ncurses_mvinch',
- 'ncurses_mvvline',
- 'ncurses_mvwaddstr',
- 'ncurses_napms',
- 'ncurses_new_panel',
- 'ncurses_newpad',
- 'ncurses_newwin',
- 'ncurses_nl',
- 'ncurses_nocbreak',
- 'ncurses_noecho',
- 'ncurses_nonl',
- 'ncurses_noqiflush',
- 'ncurses_noraw',
- 'ncurses_pair_content',
- 'ncurses_panel_above',
- 'ncurses_panel_below',
- 'ncurses_panel_window',
- 'ncurses_pnoutrefresh',
- 'ncurses_prefresh',
- 'ncurses_putp',
- 'ncurses_qiflush',
- 'ncurses_raw',
- 'ncurses_refresh',
- 'ncurses_replace_panel',
- 'ncurses_reset_prog_mode',
- 'ncurses_reset_shell_mode',
- 'ncurses_resetty',
- 'ncurses_savetty',
- 'ncurses_scr_dump',
- 'ncurses_scr_init',
- 'ncurses_scr_restore',
- 'ncurses_scr_set',
- 'ncurses_scrl',
- 'ncurses_show_panel',
- 'ncurses_slk_attr',
- 'ncurses_slk_attroff',
- 'ncurses_slk_attron',
- 'ncurses_slk_attrset',
- 'ncurses_slk_clear',
- 'ncurses_slk_color',
- 'ncurses_slk_init',
- 'ncurses_slk_noutrefresh',
- 'ncurses_slk_refresh',
- 'ncurses_slk_restore',
- 'ncurses_slk_set',
- 'ncurses_slk_touch',
- 'ncurses_standend',
- 'ncurses_standout',
- 'ncurses_start_color',
- 'ncurses_termattrs',
- 'ncurses_termname',
- 'ncurses_timeout',
- 'ncurses_top_panel',
- 'ncurses_typeahead',
- 'ncurses_ungetch',
- 'ncurses_ungetmouse',
- 'ncurses_update_panels',
- 'ncurses_use_default_colors',
- 'ncurses_use_env',
- 'ncurses_use_extended_names',
- 'ncurses_vidattr',
- 'ncurses_vline',
- 'ncurses_waddch',
- 'ncurses_waddstr',
- 'ncurses_wattroff',
- 'ncurses_wattron',
- 'ncurses_wattrset',
- 'ncurses_wborder',
- 'ncurses_wclear',
- 'ncurses_wcolor_set',
- 'ncurses_werase',
- 'ncurses_wgetch',
- 'ncurses_whline',
- 'ncurses_wmouse_trafo',
- 'ncurses_wmove',
- 'ncurses_wnoutrefresh',
- 'ncurses_wrefresh',
- 'ncurses_wstandend',
- 'ncurses_wstandout',
- 'ncurses_wvline'),
- 'Network': ('checkdnsrr',
- 'closelog',
- 'define_syslog_variables',
- 'dns_check_record',
- 'dns_get_mx',
- 'dns_get_record',
- 'fsockopen',
- 'gethostbyaddr',
- 'gethostbyname',
- 'gethostbynamel',
- 'gethostname',
- 'getmxrr',
- 'getprotobyname',
- 'getprotobynumber',
- 'getservbyname',
- 'getservbyport',
- 'header_register_callback',
- 'header_remove',
- 'header',
- 'headers_list',
- 'headers_sent',
- 'http_response_code',
- 'inet_ntop',
- 'inet_pton',
- 'ip2long',
- 'long2ip',
- 'openlog',
- 'pfsockopen',
- 'setcookie',
- 'setrawcookie',
- 'socket_get_status',
- 'socket_set_blocking',
- 'socket_set_timeout',
- 'syslog'),
- 'Newt': ('newt_bell',
- 'newt_button_bar',
- 'newt_button',
- 'newt_centered_window',
- 'newt_checkbox_get_value',
- 'newt_checkbox_set_flags',
- 'newt_checkbox_set_value',
- 'newt_checkbox_tree_add_item',
- 'newt_checkbox_tree_find_item',
- 'newt_checkbox_tree_get_current',
- 'newt_checkbox_tree_get_entry_value',
- 'newt_checkbox_tree_get_multi_selection',
- 'newt_checkbox_tree_get_selection',
- 'newt_checkbox_tree_multi',
- 'newt_checkbox_tree_set_current',
- 'newt_checkbox_tree_set_entry_value',
- 'newt_checkbox_tree_set_entry',
- 'newt_checkbox_tree_set_width',
- 'newt_checkbox_tree',
- 'newt_checkbox',
- 'newt_clear_key_buffer',
- 'newt_cls',
- 'newt_compact_button',
- 'newt_component_add_callback',
- 'newt_component_takes_focus',
- 'newt_create_grid',
- 'newt_cursor_off',
- 'newt_cursor_on',
- 'newt_delay',
- 'newt_draw_form',
- 'newt_draw_root_text',
- 'newt_entry_get_value',
- 'newt_entry_set_filter',
- 'newt_entry_set_flags',
- 'newt_entry_set',
- 'newt_entry',
- 'newt_finished',
- 'newt_form_add_component',
- 'newt_form_add_components',
- 'newt_form_add_hot_key',
- 'newt_form_destroy',
- 'newt_form_get_current',
- 'newt_form_run',
- 'newt_form_set_background',
- 'newt_form_set_height',
- 'newt_form_set_size',
- 'newt_form_set_timer',
- 'newt_form_set_width',
- 'newt_form_watch_fd',
- 'newt_form',
- 'newt_get_screen_size',
- 'newt_grid_add_components_to_form',
- 'newt_grid_basic_window',
- 'newt_grid_free',
- 'newt_grid_get_size',
- 'newt_grid_h_close_stacked',
- 'newt_grid_h_stacked',
- 'newt_grid_place',
- 'newt_grid_set_field',
- 'newt_grid_simple_window',
- 'newt_grid_v_close_stacked',
- 'newt_grid_v_stacked',
- 'newt_grid_wrapped_window_at',
- 'newt_grid_wrapped_window',
- 'newt_init',
- 'newt_label_set_text',
- 'newt_label',
- 'newt_listbox_append_entry',
- 'newt_listbox_clear_selection',
- 'newt_listbox_clear',
- 'newt_listbox_delete_entry',
- 'newt_listbox_get_current',
- 'newt_listbox_get_selection',
- 'newt_listbox_insert_entry',
- 'newt_listbox_item_count',
- 'newt_listbox_select_item',
- 'newt_listbox_set_current_by_key',
- 'newt_listbox_set_current',
- 'newt_listbox_set_data',
- 'newt_listbox_set_entry',
- 'newt_listbox_set_width',
- 'newt_listbox',
- 'newt_listitem_get_data',
- 'newt_listitem_set',
- 'newt_listitem',
- 'newt_open_window',
- 'newt_pop_help_line',
- 'newt_pop_window',
- 'newt_push_help_line',
- 'newt_radio_get_current',
- 'newt_radiobutton',
- 'newt_redraw_help_line',
- 'newt_reflow_text',
- 'newt_refresh',
- 'newt_resize_screen',
- 'newt_resume',
- 'newt_run_form',
- 'newt_scale_set',
- 'newt_scale',
- 'newt_scrollbar_set',
- 'newt_set_help_callback',
- 'newt_set_suspend_callback',
- 'newt_suspend',
- 'newt_textbox_get_num_lines',
- 'newt_textbox_reflowed',
- 'newt_textbox_set_height',
- 'newt_textbox_set_text',
- 'newt_textbox',
- 'newt_vertical_scrollbar',
- 'newt_wait_for_key',
- 'newt_win_choice',
- 'newt_win_entries',
- 'newt_win_menu',
- 'newt_win_message',
- 'newt_win_messagev',
- 'newt_win_ternary'),
- 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
- 'OCI8': ('oci_bind_array_by_name',
- 'oci_bind_by_name',
- 'oci_cancel',
- 'oci_client_version',
- 'oci_close',
- 'oci_commit',
- 'oci_connect',
- 'oci_define_by_name',
- 'oci_error',
- 'oci_execute',
- 'oci_fetch_all',
- 'oci_fetch_array',
- 'oci_fetch_assoc',
- 'oci_fetch_object',
- 'oci_fetch_row',
- 'oci_fetch',
- 'oci_field_is_null',
- 'oci_field_name',
- 'oci_field_precision',
- 'oci_field_scale',
- 'oci_field_size',
- 'oci_field_type_raw',
- 'oci_field_type',
- 'oci_free_descriptor',
- 'oci_free_statement',
- 'oci_get_implicit_resultset',
- 'oci_internal_debug',
- 'oci_lob_copy',
- 'oci_lob_is_equal',
- 'oci_new_collection',
- 'oci_new_connect',
- 'oci_new_cursor',
- 'oci_new_descriptor',
- 'oci_num_fields',
- 'oci_num_rows',
- 'oci_parse',
- 'oci_password_change',
- 'oci_pconnect',
- 'oci_result',
- 'oci_rollback',
- 'oci_server_version',
- 'oci_set_action',
- 'oci_set_client_identifier',
- 'oci_set_client_info',
- 'oci_set_edition',
- 'oci_set_module_name',
- 'oci_set_prefetch',
- 'oci_statement_type'),
- 'ODBC': ('odbc_autocommit',
- 'odbc_binmode',
- 'odbc_close_all',
- 'odbc_close',
- 'odbc_columnprivileges',
- 'odbc_columns',
- 'odbc_commit',
- 'odbc_connect',
- 'odbc_cursor',
- 'odbc_data_source',
- 'odbc_do',
- 'odbc_error',
- 'odbc_errormsg',
- 'odbc_exec',
- 'odbc_execute',
- 'odbc_fetch_array',
- 'odbc_fetch_into',
- 'odbc_fetch_object',
- 'odbc_fetch_row',
- 'odbc_field_len',
- 'odbc_field_name',
- 'odbc_field_num',
- 'odbc_field_precision',
- 'odbc_field_scale',
- 'odbc_field_type',
- 'odbc_foreignkeys',
- 'odbc_free_result',
- 'odbc_gettypeinfo',
- 'odbc_longreadlen',
- 'odbc_next_result',
- 'odbc_num_fields',
- 'odbc_num_rows',
- 'odbc_pconnect',
- 'odbc_prepare',
- 'odbc_primarykeys',
- 'odbc_procedurecolumns',
- 'odbc_procedures',
- 'odbc_result_all',
- 'odbc_result',
- 'odbc_rollback',
- 'odbc_setoption',
- 'odbc_specialcolumns',
- 'odbc_statistics',
- 'odbc_tableprivileges',
- 'odbc_tables'),
- 'OPcache': ('opcache_compile_file',
- 'opcache_get_configuration',
- 'opcache_get_status',
- 'opcache_invalidate',
- 'opcache_reset'),
- 'Object Aggregation': ('aggregate_info',
- 'aggregate_methods_by_list',
- 'aggregate_methods_by_regexp',
- 'aggregate_methods',
- 'aggregate_properties_by_list',
- 'aggregate_properties_by_regexp',
- 'aggregate_properties',
- 'aggregate',
- 'aggregation_info',
- 'deaggregate'),
- 'OpenAL': ('openal_buffer_create',
- 'openal_buffer_data',
- 'openal_buffer_destroy',
- 'openal_buffer_get',
- 'openal_buffer_loadwav',
- 'openal_context_create',
- 'openal_context_current',
- 'openal_context_destroy',
- 'openal_context_process',
- 'openal_context_suspend',
- 'openal_device_close',
- 'openal_device_open',
- 'openal_listener_get',
- 'openal_listener_set',
- 'openal_source_create',
- 'openal_source_destroy',
- 'openal_source_get',
- 'openal_source_pause',
- 'openal_source_play',
- 'openal_source_rewind',
- 'openal_source_set',
- 'openal_source_stop',
- 'openal_stream'),
- 'OpenSSL': ('openssl_cipher_iv_length',
- 'openssl_csr_export_to_file',
- 'openssl_csr_export',
- 'openssl_csr_get_public_key',
- 'openssl_csr_get_subject',
- 'openssl_csr_new',
- 'openssl_csr_sign',
- 'openssl_decrypt',
- 'openssl_dh_compute_key',
- 'openssl_digest',
- 'openssl_encrypt',
- 'openssl_error_string',
- 'openssl_free_key',
- 'openssl_get_cipher_methods',
- 'openssl_get_md_methods',
- 'openssl_get_privatekey',
- 'openssl_get_publickey',
- 'openssl_open',
- 'openssl_pbkdf2',
- 'openssl_pkcs12_export_to_file',
- 'openssl_pkcs12_export',
- 'openssl_pkcs12_read',
- 'openssl_pkcs7_decrypt',
- 'openssl_pkcs7_encrypt',
- 'openssl_pkcs7_sign',
- 'openssl_pkcs7_verify',
- 'openssl_pkey_export_to_file',
- 'openssl_pkey_export',
- 'openssl_pkey_free',
- 'openssl_pkey_get_details',
- 'openssl_pkey_get_private',
- 'openssl_pkey_get_public',
- 'openssl_pkey_new',
- 'openssl_private_decrypt',
- 'openssl_private_encrypt',
- 'openssl_public_decrypt',
- 'openssl_public_encrypt',
- 'openssl_random_pseudo_bytes',
- 'openssl_seal',
- 'openssl_sign',
- 'openssl_spki_export_challenge',
- 'openssl_spki_export',
- 'openssl_spki_new',
- 'openssl_spki_verify',
- 'openssl_verify',
- 'openssl_x509_check_private_key',
- 'openssl_x509_checkpurpose',
- 'openssl_x509_export_to_file',
- 'openssl_x509_export',
- 'openssl_x509_free',
- 'openssl_x509_parse',
- 'openssl_x509_read'),
- 'Output Control': ('flush',
- 'ob_clean',
- 'ob_end_clean',
- 'ob_end_flush',
- 'ob_flush',
- 'ob_get_clean',
- 'ob_get_contents',
- 'ob_get_flush',
- 'ob_get_length',
- 'ob_get_level',
- 'ob_get_status',
- 'ob_gzhandler',
- 'ob_implicit_flush',
- 'ob_list_handlers',
- 'ob_start',
- 'output_add_rewrite_var',
- 'output_reset_rewrite_vars'),
- 'Ovrimos SQL': ('ovrimos_close',
- 'ovrimos_commit',
- 'ovrimos_connect',
- 'ovrimos_cursor',
- 'ovrimos_exec',
- 'ovrimos_execute',
- 'ovrimos_fetch_into',
- 'ovrimos_fetch_row',
- 'ovrimos_field_len',
- 'ovrimos_field_name',
- 'ovrimos_field_num',
- 'ovrimos_field_type',
- 'ovrimos_free_result',
- 'ovrimos_longreadlen',
- 'ovrimos_num_fields',
- 'ovrimos_num_rows',
- 'ovrimos_prepare',
- 'ovrimos_result_all',
- 'ovrimos_result',
- 'ovrimos_rollback'),
- 'PCNTL': ('pcntl_alarm',
- 'pcntl_errno',
- 'pcntl_exec',
- 'pcntl_fork',
- 'pcntl_get_last_error',
- 'pcntl_getpriority',
- 'pcntl_setpriority',
- 'pcntl_signal_dispatch',
- 'pcntl_signal',
- 'pcntl_sigprocmask',
- 'pcntl_sigtimedwait',
- 'pcntl_sigwaitinfo',
- 'pcntl_strerror',
- 'pcntl_wait',
- 'pcntl_waitpid',
- 'pcntl_wexitstatus',
- 'pcntl_wifexited',
- 'pcntl_wifsignaled',
- 'pcntl_wifstopped',
- 'pcntl_wstopsig',
- 'pcntl_wtermsig'),
- 'PCRE': ('preg_filter',
- 'preg_grep',
- 'preg_last_error',
- 'preg_match_all',
- 'preg_match',
- 'preg_quote',
- 'preg_replace_callback',
- 'preg_replace',
- 'preg_split'),
- 'PDF': ('PDF_activate_item',
- 'PDF_add_annotation',
- 'PDF_add_bookmark',
- 'PDF_add_launchlink',
- 'PDF_add_locallink',
- 'PDF_add_nameddest',
- 'PDF_add_note',
- 'PDF_add_outline',
- 'PDF_add_pdflink',
- 'PDF_add_table_cell',
- 'PDF_add_textflow',
- 'PDF_add_thumbnail',
- 'PDF_add_weblink',
- 'PDF_arc',
- 'PDF_arcn',
- 'PDF_attach_file',
- 'PDF_begin_document',
- 'PDF_begin_font',
- 'PDF_begin_glyph',
- 'PDF_begin_item',
- 'PDF_begin_layer',
- 'PDF_begin_page_ext',
- 'PDF_begin_page',
- 'PDF_begin_pattern',
- 'PDF_begin_template_ext',
- 'PDF_begin_template',
- 'PDF_circle',
- 'PDF_clip',
- 'PDF_close_image',
- 'PDF_close_pdi_page',
- 'PDF_close_pdi',
- 'PDF_close',
- 'PDF_closepath_fill_stroke',
- 'PDF_closepath_stroke',
- 'PDF_closepath',
- 'PDF_concat',
- 'PDF_continue_text',
- 'PDF_create_3dview',
- 'PDF_create_action',
- 'PDF_create_annotation',
- 'PDF_create_bookmark',
- 'PDF_create_field',
- 'PDF_create_fieldgroup',
- 'PDF_create_gstate',
- 'PDF_create_pvf',
- 'PDF_create_textflow',
- 'PDF_curveto',
- 'PDF_define_layer',
- 'PDF_delete_pvf',
- 'PDF_delete_table',
- 'PDF_delete_textflow',
- 'PDF_delete',
- 'PDF_encoding_set_char',
- 'PDF_end_document',
- 'PDF_end_font',
- 'PDF_end_glyph',
- 'PDF_end_item',
- 'PDF_end_layer',
- 'PDF_end_page_ext',
- 'PDF_end_page',
- 'PDF_end_pattern',
- 'PDF_end_template',
- 'PDF_endpath',
- 'PDF_fill_imageblock',
- 'PDF_fill_pdfblock',
- 'PDF_fill_stroke',
- 'PDF_fill_textblock',
- 'PDF_fill',
- 'PDF_findfont',
- 'PDF_fit_image',
- 'PDF_fit_pdi_page',
- 'PDF_fit_table',
- 'PDF_fit_textflow',
- 'PDF_fit_textline',
- 'PDF_get_apiname',
- 'PDF_get_buffer',
- 'PDF_get_errmsg',
- 'PDF_get_errnum',
- 'PDF_get_font',
- 'PDF_get_fontname',
- 'PDF_get_fontsize',
- 'PDF_get_image_height',
- 'PDF_get_image_width',
- 'PDF_get_majorversion',
- 'PDF_get_minorversion',
- 'PDF_get_parameter',
- 'PDF_get_pdi_parameter',
- 'PDF_get_pdi_value',
- 'PDF_get_value',
- 'PDF_info_font',
- 'PDF_info_matchbox',
- 'PDF_info_table',
- 'PDF_info_textflow',
- 'PDF_info_textline',
- 'PDF_initgraphics',
- 'PDF_lineto',
- 'PDF_load_3ddata',
- 'PDF_load_font',
- 'PDF_load_iccprofile',
- 'PDF_load_image',
- 'PDF_makespotcolor',
- 'PDF_moveto',
- 'PDF_new',
- 'PDF_open_ccitt',
- 'PDF_open_file',
- 'PDF_open_gif',
- 'PDF_open_image_file',
- 'PDF_open_image',
- 'PDF_open_jpeg',
- 'PDF_open_memory_image',
- 'PDF_open_pdi_document',
- 'PDF_open_pdi_page',
- 'PDF_open_pdi',
- 'PDF_open_tiff',
- 'PDF_pcos_get_number',
- 'PDF_pcos_get_stream',
- 'PDF_pcos_get_string',
- 'PDF_place_image',
- 'PDF_place_pdi_page',
- 'PDF_process_pdi',
- 'PDF_rect',
- 'PDF_restore',
- 'PDF_resume_page',
- 'PDF_rotate',
- 'PDF_save',
- 'PDF_scale',
- 'PDF_set_border_color',
- 'PDF_set_border_dash',
- 'PDF_set_border_style',
- 'PDF_set_char_spacing',
- 'PDF_set_duration',
- 'PDF_set_gstate',
- 'PDF_set_horiz_scaling',
- 'PDF_set_info_author',
- 'PDF_set_info_creator',
- 'PDF_set_info_keywords',
- 'PDF_set_info_subject',
- 'PDF_set_info_title',
- 'PDF_set_info',
- 'PDF_set_layer_dependency',
- 'PDF_set_leading',
- 'PDF_set_parameter',
- 'PDF_set_text_matrix',
- 'PDF_set_text_pos',
- 'PDF_set_text_rendering',
- 'PDF_set_text_rise',
- 'PDF_set_value',
- 'PDF_set_word_spacing',
- 'PDF_setcolor',
- 'PDF_setdash',
- 'PDF_setdashpattern',
- 'PDF_setflat',
- 'PDF_setfont',
- 'PDF_setgray_fill',
- 'PDF_setgray_stroke',
- 'PDF_setgray',
- 'PDF_setlinecap',
- 'PDF_setlinejoin',
- 'PDF_setlinewidth',
- 'PDF_setmatrix',
- 'PDF_setmiterlimit',
- 'PDF_setpolydash',
- 'PDF_setrgbcolor_fill',
- 'PDF_setrgbcolor_stroke',
- 'PDF_setrgbcolor',
- 'PDF_shading_pattern',
- 'PDF_shading',
- 'PDF_shfill',
- 'PDF_show_boxed',
- 'PDF_show_xy',
- 'PDF_show',
- 'PDF_skew',
- 'PDF_stringwidth',
- 'PDF_stroke',
- 'PDF_suspend_page',
- 'PDF_translate',
- 'PDF_utf16_to_utf8',
- 'PDF_utf32_to_utf16',
- 'PDF_utf8_to_utf16'),
- 'PHP Options/Info': ('assert_options',
- 'assert',
- 'cli_get_process_title',
- 'cli_set_process_title',
- 'dl',
- 'extension_loaded',
- 'gc_collect_cycles',
- 'gc_disable',
- 'gc_enable',
- 'gc_enabled',
- 'get_cfg_var',
- 'get_current_user',
- 'get_defined_constants',
- 'get_extension_funcs',
- 'get_include_path',
- 'get_included_files',
- 'get_loaded_extensions',
- 'get_magic_quotes_gpc',
- 'get_magic_quotes_runtime',
- 'get_required_files',
- 'getenv',
- 'getlastmod',
- 'getmygid',
- 'getmyinode',
- 'getmypid',
- 'getmyuid',
- 'getopt',
- 'getrusage',
- 'ini_alter',
- 'ini_get_all',
- 'ini_get',
- 'ini_restore',
- 'ini_set',
- 'magic_quotes_runtime',
- 'memory_get_peak_usage',
- 'memory_get_usage',
- 'php_ini_loaded_file',
- 'php_ini_scanned_files',
- 'php_logo_guid',
- 'php_sapi_name',
- 'php_uname',
- 'phpcredits',
- 'phpinfo',
- 'phpversion',
- 'putenv',
- 'restore_include_path',
- 'set_include_path',
- 'set_magic_quotes_runtime',
- 'set_time_limit',
- 'sys_get_temp_dir',
- 'version_compare',
- 'zend_logo_guid',
- 'zend_thread_id',
- 'zend_version'),
- 'POSIX': ('posix_access',
- 'posix_ctermid',
- 'posix_errno',
- 'posix_get_last_error',
- 'posix_getcwd',
- 'posix_getegid',
- 'posix_geteuid',
- 'posix_getgid',
- 'posix_getgrgid',
- 'posix_getgrnam',
- 'posix_getgroups',
- 'posix_getlogin',
- 'posix_getpgid',
- 'posix_getpgrp',
- 'posix_getpid',
- 'posix_getppid',
- 'posix_getpwnam',
- 'posix_getpwuid',
- 'posix_getrlimit',
- 'posix_getsid',
- 'posix_getuid',
- 'posix_initgroups',
- 'posix_isatty',
- 'posix_kill',
- 'posix_mkfifo',
- 'posix_mknod',
- 'posix_setegid',
- 'posix_seteuid',
- 'posix_setgid',
- 'posix_setpgid',
- 'posix_setsid',
- 'posix_setuid',
- 'posix_strerror',
- 'posix_times',
- 'posix_ttyname',
- 'posix_uname'),
- 'POSIX Regex': ('ereg_replace',
- 'ereg',
- 'eregi_replace',
- 'eregi',
- 'split',
- 'spliti',
- 'sql_regcase'),
- 'PS': ('ps_add_bookmark',
- 'ps_add_launchlink',
- 'ps_add_locallink',
- 'ps_add_note',
- 'ps_add_pdflink',
- 'ps_add_weblink',
- 'ps_arc',
- 'ps_arcn',
- 'ps_begin_page',
- 'ps_begin_pattern',
- 'ps_begin_template',
- 'ps_circle',
- 'ps_clip',
- 'ps_close_image',
- 'ps_close',
- 'ps_closepath_stroke',
- 'ps_closepath',
- 'ps_continue_text',
- 'ps_curveto',
- 'ps_delete',
- 'ps_end_page',
- 'ps_end_pattern',
- 'ps_end_template',
- 'ps_fill_stroke',
- 'ps_fill',
- 'ps_findfont',
- 'ps_get_buffer',
- 'ps_get_parameter',
- 'ps_get_value',
- 'ps_hyphenate',
- 'ps_include_file',
- 'ps_lineto',
- 'ps_makespotcolor',
- 'ps_moveto',
- 'ps_new',
- 'ps_open_file',
- 'ps_open_image_file',
- 'ps_open_image',
- 'ps_open_memory_image',
- 'ps_place_image',
- 'ps_rect',
- 'ps_restore',
- 'ps_rotate',
- 'ps_save',
- 'ps_scale',
- 'ps_set_border_color',
- 'ps_set_border_dash',
- 'ps_set_border_style',
- 'ps_set_info',
- 'ps_set_parameter',
- 'ps_set_text_pos',
- 'ps_set_value',
- 'ps_setcolor',
- 'ps_setdash',
- 'ps_setflat',
- 'ps_setfont',
- 'ps_setgray',
- 'ps_setlinecap',
- 'ps_setlinejoin',
- 'ps_setlinewidth',
- 'ps_setmiterlimit',
- 'ps_setoverprintmode',
- 'ps_setpolydash',
- 'ps_shading_pattern',
- 'ps_shading',
- 'ps_shfill',
- 'ps_show_boxed',
- 'ps_show_xy2',
- 'ps_show_xy',
- 'ps_show2',
- 'ps_show',
- 'ps_string_geometry',
- 'ps_stringwidth',
- 'ps_stroke',
- 'ps_symbol_name',
- 'ps_symbol_width',
- 'ps_symbol',
- 'ps_translate'),
- 'Paradox': ('px_close',
- 'px_create_fp',
- 'px_date2string',
- 'px_delete_record',
- 'px_delete',
- 'px_get_field',
- 'px_get_info',
- 'px_get_parameter',
- 'px_get_record',
- 'px_get_schema',
- 'px_get_value',
- 'px_insert_record',
- 'px_new',
- 'px_numfields',
- 'px_numrecords',
- 'px_open_fp',
- 'px_put_record',
- 'px_retrieve_record',
- 'px_set_blob_file',
- 'px_set_parameter',
- 'px_set_tablename',
- 'px_set_targetencoding',
- 'px_set_value',
- 'px_timestamp2string',
- 'px_update_record'),
- 'Parsekit': ('parsekit_compile_file',
- 'parsekit_compile_string',
- 'parsekit_func_arginfo'),
- 'Password Hashing': ('password_get_info',
- 'password_hash',
- 'password_needs_rehash',
- 'password_verify'),
- 'PostgreSQL': ('pg_affected_rows',
- 'pg_cancel_query',
- 'pg_client_encoding',
- 'pg_close',
- 'pg_connect',
- 'pg_connection_busy',
- 'pg_connection_reset',
- 'pg_connection_status',
- 'pg_convert',
- 'pg_copy_from',
- 'pg_copy_to',
- 'pg_dbname',
- 'pg_delete',
- 'pg_end_copy',
- 'pg_escape_bytea',
- 'pg_escape_identifier',
- 'pg_escape_literal',
- 'pg_escape_string',
- 'pg_execute',
- 'pg_fetch_all_columns',
- 'pg_fetch_all',
- 'pg_fetch_array',
- 'pg_fetch_assoc',
- 'pg_fetch_object',
- 'pg_fetch_result',
- 'pg_fetch_row',
- 'pg_field_is_null',
- 'pg_field_name',
- 'pg_field_num',
- 'pg_field_prtlen',
- 'pg_field_size',
- 'pg_field_table',
- 'pg_field_type_oid',
- 'pg_field_type',
- 'pg_free_result',
- 'pg_get_notify',
- 'pg_get_pid',
- 'pg_get_result',
- 'pg_host',
- 'pg_insert',
- 'pg_last_error',
- 'pg_last_notice',
- 'pg_last_oid',
- 'pg_lo_close',
- 'pg_lo_create',
- 'pg_lo_export',
- 'pg_lo_import',
- 'pg_lo_open',
- 'pg_lo_read_all',
- 'pg_lo_read',
- 'pg_lo_seek',
- 'pg_lo_tell',
- 'pg_lo_truncate',
- 'pg_lo_unlink',
- 'pg_lo_write',
- 'pg_meta_data',
- 'pg_num_fields',
- 'pg_num_rows',
- 'pg_options',
- 'pg_parameter_status',
- 'pg_pconnect',
- 'pg_ping',
- 'pg_port',
- 'pg_prepare',
- 'pg_put_line',
- 'pg_query_params',
- 'pg_query',
- 'pg_result_error_field',
- 'pg_result_error',
- 'pg_result_seek',
- 'pg_result_status',
- 'pg_select',
- 'pg_send_execute',
- 'pg_send_prepare',
- 'pg_send_query_params',
- 'pg_send_query',
- 'pg_set_client_encoding',
- 'pg_set_error_verbosity',
- 'pg_trace',
- 'pg_transaction_status',
- 'pg_tty',
- 'pg_unescape_bytea',
- 'pg_untrace',
- 'pg_update',
- 'pg_version'),
- 'Printer': ('printer_abort',
- 'printer_close',
- 'printer_create_brush',
- 'printer_create_dc',
- 'printer_create_font',
- 'printer_create_pen',
- 'printer_delete_brush',
- 'printer_delete_dc',
- 'printer_delete_font',
- 'printer_delete_pen',
- 'printer_draw_bmp',
- 'printer_draw_chord',
- 'printer_draw_elipse',
- 'printer_draw_line',
- 'printer_draw_pie',
- 'printer_draw_rectangle',
- 'printer_draw_roundrect',
- 'printer_draw_text',
- 'printer_end_doc',
- 'printer_end_page',
- 'printer_get_option',
- 'printer_list',
- 'printer_logical_fontheight',
- 'printer_open',
- 'printer_select_brush',
- 'printer_select_font',
- 'printer_select_pen',
- 'printer_set_option',
- 'printer_start_doc',
- 'printer_start_page',
- 'printer_write'),
- 'Proctitle': ('setproctitle', 'setthreadtitle'),
- 'Program execution': ('escapeshellarg',
- 'escapeshellcmd',
- 'exec',
- 'passthru',
- 'proc_close',
- 'proc_get_status',
- 'proc_nice',
- 'proc_open',
- 'proc_terminate',
- 'shell_exec',
- 'system'),
- 'Pspell': ('pspell_add_to_personal',
- 'pspell_add_to_session',
- 'pspell_check',
- 'pspell_clear_session',
- 'pspell_config_create',
- 'pspell_config_data_dir',
- 'pspell_config_dict_dir',
- 'pspell_config_ignore',
- 'pspell_config_mode',
- 'pspell_config_personal',
- 'pspell_config_repl',
- 'pspell_config_runtogether',
- 'pspell_config_save_repl',
- 'pspell_new_config',
- 'pspell_new_personal',
- 'pspell_new',
- 'pspell_save_wordlist',
- 'pspell_store_replacement',
- 'pspell_suggest'),
- 'RPM Reader': ('rpm_close',
- 'rpm_get_tag',
- 'rpm_is_valid',
- 'rpm_open',
- 'rpm_version'),
- 'RRD': ('rrd_create',
- 'rrd_error',
- 'rrd_fetch',
- 'rrd_first',
- 'rrd_graph',
- 'rrd_info',
- 'rrd_last',
- 'rrd_lastupdate',
- 'rrd_restore',
- 'rrd_tune',
- 'rrd_update',
- 'rrd_version',
- 'rrd_xport',
- 'rrdc_disconnect'),
- 'Radius': ('radius_acct_open',
- 'radius_add_server',
- 'radius_auth_open',
- 'radius_close',
- 'radius_config',
- 'radius_create_request',
- 'radius_cvt_addr',
- 'radius_cvt_int',
- 'radius_cvt_string',
- 'radius_demangle_mppe_key',
- 'radius_demangle',
- 'radius_get_attr',
- 'radius_get_tagged_attr_data',
- 'radius_get_tagged_attr_tag',
- 'radius_get_vendor_attr',
- 'radius_put_addr',
- 'radius_put_attr',
- 'radius_put_int',
- 'radius_put_string',
- 'radius_put_vendor_addr',
- 'radius_put_vendor_attr',
- 'radius_put_vendor_int',
- 'radius_put_vendor_string',
- 'radius_request_authenticator',
- 'radius_salt_encrypt_attr',
- 'radius_send_request',
- 'radius_server_secret',
- 'radius_strerror'),
- 'Rar': ('rar_wrapper_cache_stats',),
- 'Readline': ('readline_add_history',
- 'readline_callback_handler_install',
- 'readline_callback_handler_remove',
- 'readline_callback_read_char',
- 'readline_clear_history',
- 'readline_completion_function',
- 'readline_info',
- 'readline_list_history',
- 'readline_on_new_line',
- 'readline_read_history',
- 'readline_redisplay',
- 'readline_write_history',
- 'readline'),
- 'Recode': ('recode_file', 'recode_string', 'recode'),
- 'SNMP': ('snmp_get_quick_print',
- 'snmp_get_valueretrieval',
- 'snmp_read_mib',
- 'snmp_set_enum_print',
- 'snmp_set_oid_numeric_print',
- 'snmp_set_oid_output_format',
- 'snmp_set_quick_print',
- 'snmp_set_valueretrieval',
- 'snmp2_get',
- 'snmp2_getnext',
- 'snmp2_real_walk',
- 'snmp2_set',
- 'snmp2_walk',
- 'snmp3_get',
- 'snmp3_getnext',
- 'snmp3_real_walk',
- 'snmp3_set',
- 'snmp3_walk',
- 'snmpget',
- 'snmpgetnext',
- 'snmprealwalk',
- 'snmpset',
- 'snmpwalk',
- 'snmpwalkoid'),
- 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
- 'SPL': ('class_implements',
- 'class_parents',
- 'class_uses',
- 'iterator_apply',
- 'iterator_count',
- 'iterator_to_array',
- 'spl_autoload_call',
- 'spl_autoload_extensions',
- 'spl_autoload_functions',
- 'spl_autoload_register',
- 'spl_autoload_unregister',
- 'spl_autoload',
- 'spl_classes',
- 'spl_object_hash'),
- 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
- 'SQLSRV': ('sqlsrv_begin_transaction',
- 'sqlsrv_cancel',
- 'sqlsrv_client_info',
- 'sqlsrv_close',
- 'sqlsrv_commit',
- 'sqlsrv_configure',
- 'sqlsrv_connect',
- 'sqlsrv_errors',
- 'sqlsrv_execute',
- 'sqlsrv_fetch_array',
- 'sqlsrv_fetch_object',
- 'sqlsrv_fetch',
- 'sqlsrv_field_metadata',
- 'sqlsrv_free_stmt',
- 'sqlsrv_get_config',
- 'sqlsrv_get_field',
- 'sqlsrv_has_rows',
- 'sqlsrv_next_result',
- 'sqlsrv_num_fields',
- 'sqlsrv_num_rows',
- 'sqlsrv_prepare',
- 'sqlsrv_query',
- 'sqlsrv_rollback',
- 'sqlsrv_rows_affected',
- 'sqlsrv_send_stream_data',
- 'sqlsrv_server_info'),
- 'SQLite': ('sqlite_array_query',
- 'sqlite_busy_timeout',
- 'sqlite_changes',
- 'sqlite_close',
- 'sqlite_column',
- 'sqlite_create_aggregate',
- 'sqlite_create_function',
- 'sqlite_current',
- 'sqlite_error_string',
- 'sqlite_escape_string',
- 'sqlite_exec',
- 'sqlite_factory',
- 'sqlite_fetch_all',
- 'sqlite_fetch_array',
- 'sqlite_fetch_column_types',
- 'sqlite_fetch_object',
- 'sqlite_fetch_single',
- 'sqlite_fetch_string',
- 'sqlite_field_name',
- 'sqlite_has_more',
- 'sqlite_has_prev',
- 'sqlite_key',
- 'sqlite_last_error',
- 'sqlite_last_insert_rowid',
- 'sqlite_libencoding',
- 'sqlite_libversion',
- 'sqlite_next',
- 'sqlite_num_fields',
- 'sqlite_num_rows',
- 'sqlite_open',
- 'sqlite_popen',
- 'sqlite_prev',
- 'sqlite_query',
- 'sqlite_rewind',
- 'sqlite_seek',
- 'sqlite_single_query',
- 'sqlite_udf_decode_binary',
- 'sqlite_udf_encode_binary',
- 'sqlite_unbuffered_query',
- 'sqlite_valid'),
- 'SSH2': ('ssh2_auth_agent',
- 'ssh2_auth_hostbased_file',
- 'ssh2_auth_none',
- 'ssh2_auth_password',
- 'ssh2_auth_pubkey_file',
- 'ssh2_connect',
- 'ssh2_exec',
- 'ssh2_fetch_stream',
- 'ssh2_fingerprint',
- 'ssh2_methods_negotiated',
- 'ssh2_publickey_add',
- 'ssh2_publickey_init',
- 'ssh2_publickey_list',
- 'ssh2_publickey_remove',
- 'ssh2_scp_recv',
- 'ssh2_scp_send',
- 'ssh2_sftp_chmod',
- 'ssh2_sftp_lstat',
- 'ssh2_sftp_mkdir',
- 'ssh2_sftp_readlink',
- 'ssh2_sftp_realpath',
- 'ssh2_sftp_rename',
- 'ssh2_sftp_rmdir',
- 'ssh2_sftp_stat',
- 'ssh2_sftp_symlink',
- 'ssh2_sftp_unlink',
- 'ssh2_sftp',
- 'ssh2_shell',
- 'ssh2_tunnel'),
- 'SVN': ('svn_add',
- 'svn_auth_get_parameter',
- 'svn_auth_set_parameter',
- 'svn_blame',
- 'svn_cat',
- 'svn_checkout',
- 'svn_cleanup',
- 'svn_client_version',
- 'svn_commit',
- 'svn_delete',
- 'svn_diff',
- 'svn_export',
- 'svn_fs_abort_txn',
- 'svn_fs_apply_text',
- 'svn_fs_begin_txn2',
- 'svn_fs_change_node_prop',
- 'svn_fs_check_path',
- 'svn_fs_contents_changed',
- 'svn_fs_copy',
- 'svn_fs_delete',
- 'svn_fs_dir_entries',
- 'svn_fs_file_contents',
- 'svn_fs_file_length',
- 'svn_fs_is_dir',
- 'svn_fs_is_file',
- 'svn_fs_make_dir',
- 'svn_fs_make_file',
- 'svn_fs_node_created_rev',
- 'svn_fs_node_prop',
- 'svn_fs_props_changed',
- 'svn_fs_revision_prop',
- 'svn_fs_revision_root',
- 'svn_fs_txn_root',
- 'svn_fs_youngest_rev',
- 'svn_import',
- 'svn_log',
- 'svn_ls',
- 'svn_mkdir',
- 'svn_repos_create',
- 'svn_repos_fs_begin_txn_for_commit',
- 'svn_repos_fs_commit_txn',
- 'svn_repos_fs',
- 'svn_repos_hotcopy',
- 'svn_repos_open',
- 'svn_repos_recover',
- 'svn_revert',
- 'svn_status',
- 'svn_update'),
- 'SWF': ('swf_actiongeturl',
- 'swf_actiongotoframe',
- 'swf_actiongotolabel',
- 'swf_actionnextframe',
- 'swf_actionplay',
- 'swf_actionprevframe',
- 'swf_actionsettarget',
- 'swf_actionstop',
- 'swf_actiontogglequality',
- 'swf_actionwaitforframe',
- 'swf_addbuttonrecord',
- 'swf_addcolor',
- 'swf_closefile',
- 'swf_definebitmap',
- 'swf_definefont',
- 'swf_defineline',
- 'swf_definepoly',
- 'swf_definerect',
- 'swf_definetext',
- 'swf_endbutton',
- 'swf_enddoaction',
- 'swf_endshape',
- 'swf_endsymbol',
- 'swf_fontsize',
- 'swf_fontslant',
- 'swf_fonttracking',
- 'swf_getbitmapinfo',
- 'swf_getfontinfo',
- 'swf_getframe',
- 'swf_labelframe',
- 'swf_lookat',
- 'swf_modifyobject',
- 'swf_mulcolor',
- 'swf_nextid',
- 'swf_oncondition',
- 'swf_openfile',
- 'swf_ortho2',
- 'swf_ortho',
- 'swf_perspective',
- 'swf_placeobject',
- 'swf_polarview',
- 'swf_popmatrix',
- 'swf_posround',
- 'swf_pushmatrix',
- 'swf_removeobject',
- 'swf_rotate',
- 'swf_scale',
- 'swf_setfont',
- 'swf_setframe',
- 'swf_shapearc',
- 'swf_shapecurveto3',
- 'swf_shapecurveto',
- 'swf_shapefillbitmapclip',
- 'swf_shapefillbitmaptile',
- 'swf_shapefilloff',
- 'swf_shapefillsolid',
- 'swf_shapelinesolid',
- 'swf_shapelineto',
- 'swf_shapemoveto',
- 'swf_showframe',
- 'swf_startbutton',
- 'swf_startdoaction',
- 'swf_startshape',
- 'swf_startsymbol',
- 'swf_textwidth',
- 'swf_translate',
- 'swf_viewport'),
- 'Semaphore': ('ftok',
- 'msg_get_queue',
- 'msg_queue_exists',
- 'msg_receive',
- 'msg_remove_queue',
- 'msg_send',
- 'msg_set_queue',
- 'msg_stat_queue',
- 'sem_acquire',
- 'sem_get',
- 'sem_release',
- 'sem_remove',
- 'shm_attach',
- 'shm_detach',
- 'shm_get_var',
- 'shm_has_var',
- 'shm_put_var',
- 'shm_remove_var',
- 'shm_remove'),
- 'Session': ('session_cache_expire',
- 'session_cache_limiter',
- 'session_commit',
- 'session_decode',
- 'session_destroy',
- 'session_encode',
- 'session_get_cookie_params',
- 'session_id',
- 'session_is_registered',
- 'session_module_name',
- 'session_name',
- 'session_regenerate_id',
- 'session_register_shutdown',
- 'session_register',
- 'session_save_path',
- 'session_set_cookie_params',
- 'session_set_save_handler',
- 'session_start',
- 'session_status',
- 'session_unregister',
- 'session_unset',
- 'session_write_close'),
- 'Session PgSQL': ('session_pgsql_add_error',
- 'session_pgsql_get_error',
- 'session_pgsql_get_field',
- 'session_pgsql_reset',
- 'session_pgsql_set_field',
- 'session_pgsql_status'),
- 'Shared Memory': ('shmop_close',
- 'shmop_delete',
- 'shmop_open',
- 'shmop_read',
- 'shmop_size',
- 'shmop_write'),
- 'SimpleXML': ('simplexml_import_dom',
- 'simplexml_load_file',
- 'simplexml_load_string'),
- 'Socket': ('socket_accept',
- 'socket_bind',
- 'socket_clear_error',
- 'socket_close',
- 'socket_cmsg_space',
- 'socket_connect',
- 'socket_create_listen',
- 'socket_create_pair',
- 'socket_create',
- 'socket_get_option',
- 'socket_getpeername',
- 'socket_getsockname',
- 'socket_import_stream',
- 'socket_last_error',
- 'socket_listen',
- 'socket_read',
- 'socket_recv',
- 'socket_recvfrom',
- 'socket_recvmsg',
- 'socket_select',
- 'socket_send',
- 'socket_sendmsg',
- 'socket_sendto',
- 'socket_set_block',
- 'socket_set_nonblock',
- 'socket_set_option',
- 'socket_shutdown',
- 'socket_strerror',
- 'socket_write'),
- 'Solr': ('solr_get_version',),
- 'Statistic': ('stats_absolute_deviation',
- 'stats_cdf_beta',
- 'stats_cdf_binomial',
- 'stats_cdf_cauchy',
- 'stats_cdf_chisquare',
- 'stats_cdf_exponential',
- 'stats_cdf_f',
- 'stats_cdf_gamma',
- 'stats_cdf_laplace',
- 'stats_cdf_logistic',
- 'stats_cdf_negative_binomial',
- 'stats_cdf_noncentral_chisquare',
- 'stats_cdf_noncentral_f',
- 'stats_cdf_poisson',
- 'stats_cdf_t',
- 'stats_cdf_uniform',
- 'stats_cdf_weibull',
- 'stats_covariance',
- 'stats_den_uniform',
- 'stats_dens_beta',
- 'stats_dens_cauchy',
- 'stats_dens_chisquare',
- 'stats_dens_exponential',
- 'stats_dens_f',
- 'stats_dens_gamma',
- 'stats_dens_laplace',
- 'stats_dens_logistic',
- 'stats_dens_negative_binomial',
- 'stats_dens_normal',
- 'stats_dens_pmf_binomial',
- 'stats_dens_pmf_hypergeometric',
- 'stats_dens_pmf_poisson',
- 'stats_dens_t',
- 'stats_dens_weibull',
- 'stats_harmonic_mean',
- 'stats_kurtosis',
- 'stats_rand_gen_beta',
- 'stats_rand_gen_chisquare',
- 'stats_rand_gen_exponential',
- 'stats_rand_gen_f',
- 'stats_rand_gen_funiform',
- 'stats_rand_gen_gamma',
- 'stats_rand_gen_ibinomial_negative',
- 'stats_rand_gen_ibinomial',
- 'stats_rand_gen_int',
- 'stats_rand_gen_ipoisson',
- 'stats_rand_gen_iuniform',
- 'stats_rand_gen_noncenral_chisquare',
- 'stats_rand_gen_noncentral_f',
- 'stats_rand_gen_noncentral_t',
- 'stats_rand_gen_normal',
- 'stats_rand_gen_t',
- 'stats_rand_get_seeds',
- 'stats_rand_phrase_to_seeds',
- 'stats_rand_ranf',
- 'stats_rand_setall',
- 'stats_skew',
- 'stats_standard_deviation',
- 'stats_stat_binomial_coef',
- 'stats_stat_correlation',
- 'stats_stat_gennch',
- 'stats_stat_independent_t',
- 'stats_stat_innerproduct',
- 'stats_stat_noncentral_t',
- 'stats_stat_paired_t',
- 'stats_stat_percentile',
- 'stats_stat_powersum',
- 'stats_variance'),
- 'Stomp': ('stomp_connect_error', 'stomp_version'),
- 'Stream': ('set_socket_blocking',
- 'stream_bucket_append',
- 'stream_bucket_make_writeable',
- 'stream_bucket_new',
- 'stream_bucket_prepend',
- 'stream_context_create',
- 'stream_context_get_default',
- 'stream_context_get_options',
- 'stream_context_get_params',
- 'stream_context_set_default',
- 'stream_context_set_option',
- 'stream_context_set_params',
- 'stream_copy_to_stream',
- 'stream_encoding',
- 'stream_filter_append',
- 'stream_filter_prepend',
- 'stream_filter_register',
- 'stream_filter_remove',
- 'stream_get_contents',
- 'stream_get_filters',
- 'stream_get_line',
- 'stream_get_meta_data',
- 'stream_get_transports',
- 'stream_get_wrappers',
- 'stream_is_local',
- 'stream_notification_callback',
- 'stream_register_wrapper',
- 'stream_resolve_include_path',
- 'stream_select',
- 'stream_set_blocking',
- 'stream_set_chunk_size',
- 'stream_set_read_buffer',
- 'stream_set_timeout',
- 'stream_set_write_buffer',
- 'stream_socket_accept',
- 'stream_socket_client',
- 'stream_socket_enable_crypto',
- 'stream_socket_get_name',
- 'stream_socket_pair',
- 'stream_socket_recvfrom',
- 'stream_socket_sendto',
- 'stream_socket_server',
- 'stream_socket_shutdown',
- 'stream_supports_lock',
- 'stream_wrapper_register',
- 'stream_wrapper_restore',
- 'stream_wrapper_unregister'),
- 'String': ('addcslashes',
- 'addslashes',
- 'bin2hex',
- 'chop',
- 'chr',
- 'chunk_split',
- 'convert_cyr_string',
- 'convert_uudecode',
- 'convert_uuencode',
- 'count_chars',
- 'crc32',
- 'crypt',
- 'echo',
- 'explode',
- 'fprintf',
- 'get_html_translation_table',
- 'hebrev',
- 'hebrevc',
- 'hex2bin',
- 'html_entity_decode',
- 'htmlentities',
- 'htmlspecialchars_decode',
- 'htmlspecialchars',
- 'implode',
- 'join',
- 'lcfirst',
- 'levenshtein',
- 'localeconv',
- 'ltrim',
- 'md5_file',
- 'md5',
- 'metaphone',
- 'money_format',
- 'nl_langinfo',
- 'nl2br',
- 'number_format',
- 'ord',
- 'parse_str',
- 'print',
- 'printf',
- 'quoted_printable_decode',
- 'quoted_printable_encode',
- 'quotemeta',
- 'rtrim',
- 'setlocale',
- 'sha1_file',
- 'sha1',
- 'similar_text',
- 'soundex',
- 'sprintf',
- 'sscanf',
- 'str_getcsv',
- 'str_ireplace',
- 'str_pad',
- 'str_repeat',
- 'str_replace',
- 'str_rot13',
- 'str_shuffle',
- 'str_split',
- 'str_word_count',
- 'strcasecmp',
- 'strchr',
- 'strcmp',
- 'strcoll',
- 'strcspn',
- 'strip_tags',
- 'stripcslashes',
- 'stripos',
- 'stripslashes',
- 'stristr',
- 'strlen',
- 'strnatcasecmp',
- 'strnatcmp',
- 'strncasecmp',
- 'strncmp',
- 'strpbrk',
- 'strpos',
- 'strrchr',
- 'strrev',
- 'strripos',
- 'strrpos',
- 'strspn',
- 'strstr',
- 'strtok',
- 'strtolower',
- 'strtoupper',
- 'strtr',
- 'substr_compare',
- 'substr_count',
- 'substr_replace',
- 'substr',
- 'trim',
- 'ucfirst',
- 'ucwords',
- 'vfprintf',
- 'vprintf',
- 'vsprintf',
- 'wordwrap'),
- 'Sybase': ('sybase_affected_rows',
- 'sybase_close',
- 'sybase_connect',
- 'sybase_data_seek',
- 'sybase_deadlock_retry_count',
- 'sybase_fetch_array',
- 'sybase_fetch_assoc',
- 'sybase_fetch_field',
- 'sybase_fetch_object',
- 'sybase_fetch_row',
- 'sybase_field_seek',
- 'sybase_free_result',
- 'sybase_get_last_message',
- 'sybase_min_client_severity',
- 'sybase_min_error_severity',
- 'sybase_min_message_severity',
- 'sybase_min_server_severity',
- 'sybase_num_fields',
- 'sybase_num_rows',
- 'sybase_pconnect',
- 'sybase_query',
- 'sybase_result',
- 'sybase_select_db',
- 'sybase_set_message_handler',
- 'sybase_unbuffered_query'),
- 'TCP': ('tcpwrap_check',),
- 'Taint': ('is_tainted', 'taint', 'untaint'),
- 'Tidy': ('ob_tidyhandler',
- 'tidy_access_count',
- 'tidy_config_count',
- 'tidy_error_count',
- 'tidy_get_output',
- 'tidy_load_config',
- 'tidy_reset_config',
- 'tidy_save_config',
- 'tidy_set_encoding',
- 'tidy_setopt',
- 'tidy_warning_count'),
- 'Tokenizer': ('token_get_all', 'token_name'),
- 'Trader': ('trader_acos',
- 'trader_ad',
- 'trader_add',
- 'trader_adosc',
- 'trader_adx',
- 'trader_adxr',
- 'trader_apo',
- 'trader_aroon',
- 'trader_aroonosc',
- 'trader_asin',
- 'trader_atan',
- 'trader_atr',
- 'trader_avgprice',
- 'trader_bbands',
- 'trader_beta',
- 'trader_bop',
- 'trader_cci',
- 'trader_cdl2crows',
- 'trader_cdl3blackcrows',
- 'trader_cdl3inside',
- 'trader_cdl3linestrike',
- 'trader_cdl3outside',
- 'trader_cdl3starsinsouth',
- 'trader_cdl3whitesoldiers',
- 'trader_cdlabandonedbaby',
- 'trader_cdladvanceblock',
- 'trader_cdlbelthold',
- 'trader_cdlbreakaway',
- 'trader_cdlclosingmarubozu',
- 'trader_cdlconcealbabyswall',
- 'trader_cdlcounterattack',
- 'trader_cdldarkcloudcover',
- 'trader_cdldoji',
- 'trader_cdldojistar',
- 'trader_cdldragonflydoji',
- 'trader_cdlengulfing',
- 'trader_cdleveningdojistar',
- 'trader_cdleveningstar',
- 'trader_cdlgapsidesidewhite',
- 'trader_cdlgravestonedoji',
- 'trader_cdlhammer',
- 'trader_cdlhangingman',
- 'trader_cdlharami',
- 'trader_cdlharamicross',
- 'trader_cdlhighwave',
- 'trader_cdlhikkake',
- 'trader_cdlhikkakemod',
- 'trader_cdlhomingpigeon',
- 'trader_cdlidentical3crows',
- 'trader_cdlinneck',
- 'trader_cdlinvertedhammer',
- 'trader_cdlkicking',
- 'trader_cdlkickingbylength',
- 'trader_cdlladderbottom',
- 'trader_cdllongleggeddoji',
- 'trader_cdllongline',
- 'trader_cdlmarubozu',
- 'trader_cdlmatchinglow',
- 'trader_cdlmathold',
- 'trader_cdlmorningdojistar',
- 'trader_cdlmorningstar',
- 'trader_cdlonneck',
- 'trader_cdlpiercing',
- 'trader_cdlrickshawman',
- 'trader_cdlrisefall3methods',
- 'trader_cdlseparatinglines',
- 'trader_cdlshootingstar',
- 'trader_cdlshortline',
- 'trader_cdlspinningtop',
- 'trader_cdlstalledpattern',
- 'trader_cdlsticksandwich',
- 'trader_cdltakuri',
- 'trader_cdltasukigap',
- 'trader_cdlthrusting',
- 'trader_cdltristar',
- 'trader_cdlunique3river',
- 'trader_cdlupsidegap2crows',
- 'trader_cdlxsidegap3methods',
- 'trader_ceil',
- 'trader_cmo',
- 'trader_correl',
- 'trader_cos',
- 'trader_cosh',
- 'trader_dema',
- 'trader_div',
- 'trader_dx',
- 'trader_ema',
- 'trader_errno',
- 'trader_exp',
- 'trader_floor',
- 'trader_get_compat',
- 'trader_get_unstable_period',
- 'trader_ht_dcperiod',
- 'trader_ht_dcphase',
- 'trader_ht_phasor',
- 'trader_ht_sine',
- 'trader_ht_trendline',
- 'trader_ht_trendmode',
- 'trader_kama',
- 'trader_linearreg_angle',
- 'trader_linearreg_intercept',
- 'trader_linearreg_slope',
- 'trader_linearreg',
- 'trader_ln',
- 'trader_log10',
- 'trader_ma',
- 'trader_macd',
- 'trader_macdext',
- 'trader_macdfix',
- 'trader_mama',
- 'trader_mavp',
- 'trader_max',
- 'trader_maxindex',
- 'trader_medprice',
- 'trader_mfi',
- 'trader_midpoint',
- 'trader_midprice',
- 'trader_min',
- 'trader_minindex',
- 'trader_minmax',
- 'trader_minmaxindex',
- 'trader_minus_di',
- 'trader_minus_dm',
- 'trader_mom',
- 'trader_mult',
- 'trader_natr',
- 'trader_obv',
- 'trader_plus_di',
- 'trader_plus_dm',
- 'trader_ppo',
- 'trader_roc',
- 'trader_rocp',
- 'trader_rocr100',
- 'trader_rocr',
- 'trader_rsi',
- 'trader_sar',
- 'trader_sarext',
- 'trader_set_compat',
- 'trader_set_unstable_period',
- 'trader_sin',
- 'trader_sinh',
- 'trader_sma',
- 'trader_sqrt',
- 'trader_stddev',
- 'trader_stoch',
- 'trader_stochf',
- 'trader_stochrsi',
- 'trader_sub',
- 'trader_sum',
- 'trader_t3',
- 'trader_tan',
- 'trader_tanh',
- 'trader_tema',
- 'trader_trange',
- 'trader_trima',
- 'trader_trix',
- 'trader_tsf',
- 'trader_typprice',
- 'trader_ultosc',
- 'trader_var',
- 'trader_wclprice',
- 'trader_willr',
- 'trader_wma'),
- 'URL': ('base64_decode',
- 'base64_encode',
- 'get_headers',
- 'get_meta_tags',
- 'http_build_query',
- 'parse_url',
- 'rawurldecode',
- 'rawurlencode',
- 'urldecode',
- 'urlencode'),
- 'Uopz': ('uopz_backup',
- 'uopz_compose',
- 'uopz_copy',
- 'uopz_delete',
- 'uopz_extend',
- 'uopz_flags',
- 'uopz_function',
- 'uopz_implement',
- 'uopz_overload',
- 'uopz_redefine',
- 'uopz_rename',
- 'uopz_restore',
- 'uopz_undefine'),
- 'Variable handling': ('boolval',
- 'debug_zval_dump',
- 'doubleval',
- 'empty',
- 'floatval',
- 'get_defined_vars',
- 'get_resource_type',
- 'gettype',
- 'import_request_variables',
- 'intval',
- 'is_array',
- 'is_bool',
- 'is_callable',
- 'is_double',
- 'is_float',
- 'is_int',
- 'is_integer',
- 'is_long',
- 'is_null',
- 'is_numeric',
- 'is_object',
- 'is_real',
- 'is_resource',
- 'is_scalar',
- 'is_string',
- 'isset',
- 'print_r',
- 'serialize',
- 'settype',
- 'strval',
- 'unserialize',
- 'unset',
- 'var_dump',
- 'var_export'),
- 'W32api': ('w32api_deftype',
- 'w32api_init_dtype',
- 'w32api_invoke_function',
- 'w32api_register_function',
- 'w32api_set_call_method'),
- 'WDDX': ('wddx_add_vars',
- 'wddx_deserialize',
- 'wddx_packet_end',
- 'wddx_packet_start',
- 'wddx_serialize_value',
- 'wddx_serialize_vars'),
- 'WinCache': ('wincache_fcache_fileinfo',
- 'wincache_fcache_meminfo',
- 'wincache_lock',
- 'wincache_ocache_fileinfo',
- 'wincache_ocache_meminfo',
- 'wincache_refresh_if_changed',
- 'wincache_rplist_fileinfo',
- 'wincache_rplist_meminfo',
- 'wincache_scache_info',
- 'wincache_scache_meminfo',
- 'wincache_ucache_add',
- 'wincache_ucache_cas',
- 'wincache_ucache_clear',
- 'wincache_ucache_dec',
- 'wincache_ucache_delete',
- 'wincache_ucache_exists',
- 'wincache_ucache_get',
- 'wincache_ucache_inc',
- 'wincache_ucache_info',
- 'wincache_ucache_meminfo',
- 'wincache_ucache_set',
- 'wincache_unlock'),
- 'XML Parser': ('utf8_decode',
- 'utf8_encode',
- 'xml_error_string',
- 'xml_get_current_byte_index',
- 'xml_get_current_column_number',
- 'xml_get_current_line_number',
- 'xml_get_error_code',
- 'xml_parse_into_struct',
- 'xml_parse',
- 'xml_parser_create_ns',
- 'xml_parser_create',
- 'xml_parser_free',
- 'xml_parser_get_option',
- 'xml_parser_set_option',
- 'xml_set_character_data_handler',
- 'xml_set_default_handler',
- 'xml_set_element_handler',
- 'xml_set_end_namespace_decl_handler',
- 'xml_set_external_entity_ref_handler',
- 'xml_set_notation_decl_handler',
- 'xml_set_object',
- 'xml_set_processing_instruction_handler',
- 'xml_set_start_namespace_decl_handler',
- 'xml_set_unparsed_entity_decl_handler'),
- 'XML-RPC': ('xmlrpc_decode_request',
- 'xmlrpc_decode',
- 'xmlrpc_encode_request',
- 'xmlrpc_encode',
- 'xmlrpc_get_type',
- 'xmlrpc_is_fault',
- 'xmlrpc_parse_method_descriptions',
- 'xmlrpc_server_add_introspection_data',
- 'xmlrpc_server_call_method',
- 'xmlrpc_server_create',
- 'xmlrpc_server_destroy',
- 'xmlrpc_server_register_introspection_callback',
- 'xmlrpc_server_register_method',
- 'xmlrpc_set_type'),
- 'XSLT (PHP 4)': ('xslt_backend_info',
- 'xslt_backend_name',
- 'xslt_backend_version',
- 'xslt_create',
- 'xslt_errno',
- 'xslt_error',
- 'xslt_free',
- 'xslt_getopt',
- 'xslt_process',
- 'xslt_set_base',
- 'xslt_set_encoding',
- 'xslt_set_error_handler',
- 'xslt_set_log',
- 'xslt_set_object',
- 'xslt_set_sax_handler',
- 'xslt_set_sax_handlers',
- 'xslt_set_scheme_handler',
- 'xslt_set_scheme_handlers',
- 'xslt_setopt'),
- 'Xhprof': ('xhprof_disable',
- 'xhprof_enable',
- 'xhprof_sample_disable',
- 'xhprof_sample_enable'),
- 'YAZ': ('yaz_addinfo',
- 'yaz_ccl_conf',
- 'yaz_ccl_parse',
- 'yaz_close',
- 'yaz_connect',
- 'yaz_database',
- 'yaz_element',
- 'yaz_errno',
- 'yaz_error',
- 'yaz_es_result',
- 'yaz_es',
- 'yaz_get_option',
- 'yaz_hits',
- 'yaz_itemorder',
- 'yaz_present',
- 'yaz_range',
- 'yaz_record',
- 'yaz_scan_result',
- 'yaz_scan',
- 'yaz_schema',
- 'yaz_search',
- 'yaz_set_option',
- 'yaz_sort',
- 'yaz_syntax',
- 'yaz_wait'),
- 'YP/NIS': ('yp_all',
- 'yp_cat',
- 'yp_err_string',
- 'yp_errno',
- 'yp_first',
- 'yp_get_default_domain',
- 'yp_master',
- 'yp_match',
- 'yp_next',
- 'yp_order'),
- 'Yaml': ('yaml_emit_file',
- 'yaml_emit',
- 'yaml_parse_file',
- 'yaml_parse_url',
- 'yaml_parse'),
- 'Zip': ('zip_close',
- 'zip_entry_close',
- 'zip_entry_compressedsize',
- 'zip_entry_compressionmethod',
- 'zip_entry_filesize',
- 'zip_entry_name',
- 'zip_entry_open',
- 'zip_entry_read',
- 'zip_open',
- 'zip_read'),
- 'Zlib': ('gzclose',
- 'gzcompress',
- 'gzdecode',
- 'gzdeflate',
- 'gzencode',
- 'gzeof',
- 'gzfile',
- 'gzgetc',
- 'gzgets',
- 'gzgetss',
- 'gzinflate',
- 'gzopen',
- 'gzpassthru',
- 'gzputs',
- 'gzread',
- 'gzrewind',
- 'gzseek',
- 'gztell',
- 'gzuncompress',
- 'gzwrite',
- 'readgzfile',
- 'zlib_decode',
- 'zlib_encode',
- 'zlib_get_coding_type'),
- 'bcompiler': ('bcompiler_load_exe',
- 'bcompiler_load',
- 'bcompiler_parse_class',
- 'bcompiler_read',
- 'bcompiler_write_class',
- 'bcompiler_write_constant',
- 'bcompiler_write_exe_footer',
- 'bcompiler_write_file',
- 'bcompiler_write_footer',
- 'bcompiler_write_function',
- 'bcompiler_write_functions_from_file',
- 'bcompiler_write_header',
- 'bcompiler_write_included_filename'),
- 'cURL': ('curl_close',
- 'curl_copy_handle',
- 'curl_errno',
- 'curl_error',
- 'curl_escape',
- 'curl_exec',
- 'curl_file_create',
- 'curl_getinfo',
- 'curl_init',
- 'curl_multi_add_handle',
- 'curl_multi_close',
- 'curl_multi_exec',
- 'curl_multi_getcontent',
- 'curl_multi_info_read',
- 'curl_multi_init',
- 'curl_multi_remove_handle',
- 'curl_multi_select',
- 'curl_multi_setopt',
- 'curl_multi_strerror',
- 'curl_pause',
- 'curl_reset',
- 'curl_setopt_array',
- 'curl_setopt',
- 'curl_share_close',
- 'curl_share_init',
- 'curl_share_setopt',
- 'curl_strerror',
- 'curl_unescape',
- 'curl_version'),
- 'chdb': ('chdb_create',),
- 'dBase': ('dbase_add_record',
- 'dbase_close',
- 'dbase_create',
- 'dbase_delete_record',
- 'dbase_get_header_info',
- 'dbase_get_record_with_names',
- 'dbase_get_record',
- 'dbase_numfields',
- 'dbase_numrecords',
- 'dbase_open',
- 'dbase_pack',
- 'dbase_replace_record'),
- 'dbx': ('dbx_close',
- 'dbx_compare',
- 'dbx_connect',
- 'dbx_error',
- 'dbx_escape_string',
- 'dbx_fetch_row',
- 'dbx_query',
- 'dbx_sort'),
- 'filePro': ('filepro_fieldcount',
- 'filepro_fieldname',
- 'filepro_fieldtype',
- 'filepro_fieldwidth',
- 'filepro_retrieve',
- 'filepro_rowcount',
- 'filepro'),
- 'iconv': ('iconv_get_encoding',
- 'iconv_mime_decode_headers',
- 'iconv_mime_decode',
- 'iconv_mime_encode',
- 'iconv_set_encoding',
- 'iconv_strlen',
- 'iconv_strpos',
- 'iconv_strrpos',
- 'iconv_substr',
- 'iconv',
- 'ob_iconv_handler'),
- 'inclued': ('inclued_get_data',),
- 'intl': ('intl_error_name',
- 'intl_get_error_code',
- 'intl_get_error_message',
- 'intl_is_failure'),
- 'libxml': ('libxml_clear_errors',
- 'libxml_disable_entity_loader',
- 'libxml_get_errors',
- 'libxml_get_last_error',
- 'libxml_set_external_entity_loader',
- 'libxml_set_streams_context',
- 'libxml_use_internal_errors'),
- 'mSQL': ('msql_affected_rows',
- 'msql_close',
- 'msql_connect',
- 'msql_create_db',
- 'msql_createdb',
- 'msql_data_seek',
- 'msql_db_query',
- 'msql_dbname',
- 'msql_drop_db',
- 'msql_error',
- 'msql_fetch_array',
- 'msql_fetch_field',
- 'msql_fetch_object',
- 'msql_fetch_row',
- 'msql_field_flags',
- 'msql_field_len',
- 'msql_field_name',
- 'msql_field_seek',
- 'msql_field_table',
- 'msql_field_type',
- 'msql_fieldflags',
- 'msql_fieldlen',
- 'msql_fieldname',
- 'msql_fieldtable',
- 'msql_fieldtype',
- 'msql_free_result',
- 'msql_list_dbs',
- 'msql_list_fields',
- 'msql_list_tables',
- 'msql_num_fields',
- 'msql_num_rows',
- 'msql_numfields',
- 'msql_numrows',
- 'msql_pconnect',
- 'msql_query',
- 'msql_regcase',
- 'msql_result',
- 'msql_select_db',
- 'msql_tablename',
- 'msql'),
- 'mnoGoSearch': ('udm_add_search_limit',
- 'udm_alloc_agent_array',
- 'udm_alloc_agent',
- 'udm_api_version',
- 'udm_cat_list',
- 'udm_cat_path',
- 'udm_check_charset',
- 'udm_check_stored',
- 'udm_clear_search_limits',
- 'udm_close_stored',
- 'udm_crc32',
- 'udm_errno',
- 'udm_error',
- 'udm_find',
- 'udm_free_agent',
- 'udm_free_ispell_data',
- 'udm_free_res',
- 'udm_get_doc_count',
- 'udm_get_res_field',
- 'udm_get_res_param',
- 'udm_hash32',
- 'udm_load_ispell_data',
- 'udm_open_stored',
- 'udm_set_agent_param'),
- 'mqseries': ('mqseries_back',
- 'mqseries_begin',
- 'mqseries_close',
- 'mqseries_cmit',
- 'mqseries_conn',
- 'mqseries_connx',
- 'mqseries_disc',
- 'mqseries_get',
- 'mqseries_inq',
- 'mqseries_open',
- 'mqseries_put1',
- 'mqseries_put',
- 'mqseries_set',
- 'mqseries_strerror'),
- 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
- 'mysqlnd_qc_get_available_handlers',
- 'mysqlnd_qc_get_cache_info',
- 'mysqlnd_qc_get_core_stats',
- 'mysqlnd_qc_get_normalized_query_trace_log',
- 'mysqlnd_qc_get_query_trace_log',
- 'mysqlnd_qc_set_cache_condition',
- 'mysqlnd_qc_set_is_select',
- 'mysqlnd_qc_set_storage_handler',
- 'mysqlnd_qc_set_user_handlers'),
- 'qtdom': ('qdom_error', 'qdom_tree'),
- 'runkit': ('runkit_class_adopt',
- 'runkit_class_emancipate',
- 'runkit_constant_add',
- 'runkit_constant_redefine',
- 'runkit_constant_remove',
- 'runkit_function_add',
- 'runkit_function_copy',
- 'runkit_function_redefine',
- 'runkit_function_remove',
- 'runkit_function_rename',
- 'runkit_import',
- 'runkit_lint_file',
- 'runkit_lint',
- 'runkit_method_add',
- 'runkit_method_copy',
- 'runkit_method_redefine',
- 'runkit_method_remove',
- 'runkit_method_rename',
- 'runkit_return_value_used',
- 'runkit_sandbox_output_handler',
- 'runkit_superglobals'),
- 'ssdeep': ('ssdeep_fuzzy_compare',
- 'ssdeep_fuzzy_hash_filename',
- 'ssdeep_fuzzy_hash'),
- 'vpopmail': ('vpopmail_add_alias_domain_ex',
- 'vpopmail_add_alias_domain',
- 'vpopmail_add_domain_ex',
- 'vpopmail_add_domain',
- 'vpopmail_add_user',
- 'vpopmail_alias_add',
- 'vpopmail_alias_del_domain',
- 'vpopmail_alias_del',
- 'vpopmail_alias_get_all',
- 'vpopmail_alias_get',
- 'vpopmail_auth_user',
- 'vpopmail_del_domain_ex',
- 'vpopmail_del_domain',
- 'vpopmail_del_user',
- 'vpopmail_error',
- 'vpopmail_passwd',
- 'vpopmail_set_user_quota'),
- 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
- 'win32service': ('win32_continue_service',
- 'win32_create_service',
- 'win32_delete_service',
- 'win32_get_last_control_message',
- 'win32_pause_service',
- 'win32_query_service_status',
- 'win32_set_service_status',
- 'win32_start_service_ctrl_dispatcher',
- 'win32_start_service',
- 'win32_stop_service'),
- 'xattr': ('xattr_get',
- 'xattr_list',
- 'xattr_remove',
- 'xattr_set',
- 'xattr_supported'),
- 'xdiff': ('xdiff_file_bdiff_size',
- 'xdiff_file_bdiff',
- 'xdiff_file_bpatch',
- 'xdiff_file_diff_binary',
- 'xdiff_file_diff',
- 'xdiff_file_merge3',
- 'xdiff_file_patch_binary',
- 'xdiff_file_patch',
- 'xdiff_file_rabdiff',
- 'xdiff_string_bdiff_size',
- 'xdiff_string_bdiff',
- 'xdiff_string_bpatch',
- 'xdiff_string_diff_binary',
- 'xdiff_string_diff',
- 'xdiff_string_merge3',
- 'xdiff_string_patch_binary',
- 'xdiff_string_patch',
- 'xdiff_string_rabdiff')}
-
-
-if __name__ == '__main__': # pragma: no cover
- import glob
- import os
- import pprint
- import re
- import shutil
- import tarfile
- try:
- from urllib import urlretrieve
- except ImportError:
- from urllib.request import urlretrieve
-
- PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
- PHP_MANUAL_DIR = './php-chunked-xhtml/'
- PHP_REFERENCE_GLOB = 'ref.*'
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+MODULES = {'.NET': ('dotnet_load',),
+ 'APC': ('apc_add',
+ 'apc_bin_dump',
+ 'apc_bin_dumpfile',
+ 'apc_bin_load',
+ 'apc_bin_loadfile',
+ 'apc_cache_info',
+ 'apc_cas',
+ 'apc_clear_cache',
+ 'apc_compile_file',
+ 'apc_dec',
+ 'apc_define_constants',
+ 'apc_delete_file',
+ 'apc_delete',
+ 'apc_exists',
+ 'apc_fetch',
+ 'apc_inc',
+ 'apc_load_constants',
+ 'apc_sma_info',
+ 'apc_store'),
+ 'APD': ('apd_breakpoint',
+ 'apd_callstack',
+ 'apd_clunk',
+ 'apd_continue',
+ 'apd_croak',
+ 'apd_dump_function_table',
+ 'apd_dump_persistent_resources',
+ 'apd_dump_regular_resources',
+ 'apd_echo',
+ 'apd_get_active_symbols',
+ 'apd_set_pprof_trace',
+ 'apd_set_session_trace_socket',
+ 'apd_set_session_trace',
+ 'apd_set_session',
+ 'override_function',
+ 'rename_function'),
+ 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
+ 'mysqli_bind_result',
+ 'mysqli_client_encoding',
+ 'mysqli_connect',
+ 'mysqli_disable_rpl_parse',
+ 'mysqli_enable_reads_from_master',
+ 'mysqli_enable_rpl_parse',
+ 'mysqli_escape_string',
+ 'mysqli_execute',
+ 'mysqli_fetch',
+ 'mysqli_get_cache_stats',
+ 'mysqli_get_metadata',
+ 'mysqli_master_query',
+ 'mysqli_param_count',
+ 'mysqli_report',
+ 'mysqli_rpl_parse_enabled',
+ 'mysqli_rpl_probe',
+ 'mysqli_send_long_data',
+ 'mysqli_slave_query'),
+ 'Apache': ('apache_child_terminate',
+ 'apache_get_modules',
+ 'apache_get_version',
+ 'apache_getenv',
+ 'apache_lookup_uri',
+ 'apache_note',
+ 'apache_request_headers',
+ 'apache_reset_timeout',
+ 'apache_response_headers',
+ 'apache_setenv',
+ 'getallheaders',
+ 'virtual'),
+ 'Array': ('array_change_key_case',
+ 'array_chunk',
+ 'array_column',
+ 'array_combine',
+ 'array_count_values',
+ 'array_diff_assoc',
+ 'array_diff_key',
+ 'array_diff_uassoc',
+ 'array_diff_ukey',
+ 'array_diff',
+ 'array_fill_keys',
+ 'array_fill',
+ 'array_filter',
+ 'array_flip',
+ 'array_intersect_assoc',
+ 'array_intersect_key',
+ 'array_intersect_uassoc',
+ 'array_intersect_ukey',
+ 'array_intersect',
+ 'array_key_exists',
+ 'array_keys',
+ 'array_map',
+ 'array_merge_recursive',
+ 'array_merge',
+ 'array_multisort',
+ 'array_pad',
+ 'array_pop',
+ 'array_product',
+ 'array_push',
+ 'array_rand',
+ 'array_reduce',
+ 'array_replace_recursive',
+ 'array_replace',
+ 'array_reverse',
+ 'array_search',
+ 'array_shift',
+ 'array_slice',
+ 'array_splice',
+ 'array_sum',
+ 'array_udiff_assoc',
+ 'array_udiff_uassoc',
+ 'array_udiff',
+ 'array_uintersect_assoc',
+ 'array_uintersect_uassoc',
+ 'array_uintersect',
+ 'array_unique',
+ 'array_unshift',
+ 'array_values',
+ 'array_walk_recursive',
+ 'array_walk',
+ 'array',
+ 'arsort',
+ 'asort',
+ 'compact',
+ 'count',
+ 'current',
+ 'each',
+ 'end',
+ 'extract',
+ 'in_array',
+ 'key_exists',
+ 'key',
+ 'krsort',
+ 'ksort',
+ 'list',
+ 'natcasesort',
+ 'natsort',
+ 'next',
+ 'pos',
+ 'prev',
+ 'range',
+ 'reset',
+ 'rsort',
+ 'shuffle',
+ 'sizeof',
+ 'sort',
+ 'uasort',
+ 'uksort',
+ 'usort'),
+ 'BBCode': ('bbcode_add_element',
+ 'bbcode_add_smiley',
+ 'bbcode_create',
+ 'bbcode_destroy',
+ 'bbcode_parse',
+ 'bbcode_set_arg_parser',
+ 'bbcode_set_flags'),
+ 'BC Math': ('bcadd',
+ 'bccomp',
+ 'bcdiv',
+ 'bcmod',
+ 'bcmul',
+ 'bcpow',
+ 'bcpowmod',
+ 'bcscale',
+ 'bcsqrt',
+ 'bcsub'),
+ 'Blenc': ('blenc_encrypt',),
+ 'Bzip2': ('bzclose',
+ 'bzcompress',
+ 'bzdecompress',
+ 'bzerrno',
+ 'bzerror',
+ 'bzerrstr',
+ 'bzflush',
+ 'bzopen',
+ 'bzread',
+ 'bzwrite'),
+ 'COM': ('com_addref',
+ 'com_create_guid',
+ 'com_event_sink',
+ 'com_get_active_object',
+ 'com_get',
+ 'com_invoke',
+ 'com_isenum',
+ 'com_load_typelib',
+ 'com_load',
+ 'com_message_pump',
+ 'com_print_typeinfo',
+ 'com_propget',
+ 'com_propput',
+ 'com_propset',
+ 'com_release',
+ 'com_set',
+ 'variant_abs',
+ 'variant_add',
+ 'variant_and',
+ 'variant_cast',
+ 'variant_cat',
+ 'variant_cmp',
+ 'variant_date_from_timestamp',
+ 'variant_date_to_timestamp',
+ 'variant_div',
+ 'variant_eqv',
+ 'variant_fix',
+ 'variant_get_type',
+ 'variant_idiv',
+ 'variant_imp',
+ 'variant_int',
+ 'variant_mod',
+ 'variant_mul',
+ 'variant_neg',
+ 'variant_not',
+ 'variant_or',
+ 'variant_pow',
+ 'variant_round',
+ 'variant_set_type',
+ 'variant_set',
+ 'variant_sub',
+ 'variant_xor'),
+ 'CUBRID': ('cubrid_bind',
+ 'cubrid_close_prepare',
+ 'cubrid_close_request',
+ 'cubrid_col_get',
+ 'cubrid_col_size',
+ 'cubrid_column_names',
+ 'cubrid_column_types',
+ 'cubrid_commit',
+ 'cubrid_connect_with_url',
+ 'cubrid_connect',
+ 'cubrid_current_oid',
+ 'cubrid_disconnect',
+ 'cubrid_drop',
+ 'cubrid_error_code_facility',
+ 'cubrid_error_code',
+ 'cubrid_error_msg',
+ 'cubrid_execute',
+ 'cubrid_fetch',
+ 'cubrid_free_result',
+ 'cubrid_get_autocommit',
+ 'cubrid_get_charset',
+ 'cubrid_get_class_name',
+ 'cubrid_get_client_info',
+ 'cubrid_get_db_parameter',
+ 'cubrid_get_query_timeout',
+ 'cubrid_get_server_info',
+ 'cubrid_get',
+ 'cubrid_insert_id',
+ 'cubrid_is_instance',
+ 'cubrid_lob_close',
+ 'cubrid_lob_export',
+ 'cubrid_lob_get',
+ 'cubrid_lob_send',
+ 'cubrid_lob_size',
+ 'cubrid_lob2_bind',
+ 'cubrid_lob2_close',
+ 'cubrid_lob2_export',
+ 'cubrid_lob2_import',
+ 'cubrid_lob2_new',
+ 'cubrid_lob2_read',
+ 'cubrid_lob2_seek64',
+ 'cubrid_lob2_seek',
+ 'cubrid_lob2_size64',
+ 'cubrid_lob2_size',
+ 'cubrid_lob2_tell64',
+ 'cubrid_lob2_tell',
+ 'cubrid_lob2_write',
+ 'cubrid_lock_read',
+ 'cubrid_lock_write',
+ 'cubrid_move_cursor',
+ 'cubrid_next_result',
+ 'cubrid_num_cols',
+ 'cubrid_num_rows',
+ 'cubrid_pconnect_with_url',
+ 'cubrid_pconnect',
+ 'cubrid_prepare',
+ 'cubrid_put',
+ 'cubrid_rollback',
+ 'cubrid_schema',
+ 'cubrid_seq_drop',
+ 'cubrid_seq_insert',
+ 'cubrid_seq_put',
+ 'cubrid_set_add',
+ 'cubrid_set_autocommit',
+ 'cubrid_set_db_parameter',
+ 'cubrid_set_drop',
+ 'cubrid_set_query_timeout',
+ 'cubrid_version'),
+ 'Cairo': ('cairo_create',
+ 'cairo_font_face_get_type',
+ 'cairo_font_face_status',
+ 'cairo_font_options_create',
+ 'cairo_font_options_equal',
+ 'cairo_font_options_get_antialias',
+ 'cairo_font_options_get_hint_metrics',
+ 'cairo_font_options_get_hint_style',
+ 'cairo_font_options_get_subpixel_order',
+ 'cairo_font_options_hash',
+ 'cairo_font_options_merge',
+ 'cairo_font_options_set_antialias',
+ 'cairo_font_options_set_hint_metrics',
+ 'cairo_font_options_set_hint_style',
+ 'cairo_font_options_set_subpixel_order',
+ 'cairo_font_options_status',
+ 'cairo_format_stride_for_width',
+ 'cairo_image_surface_create_for_data',
+ 'cairo_image_surface_create_from_png',
+ 'cairo_image_surface_create',
+ 'cairo_image_surface_get_data',
+ 'cairo_image_surface_get_format',
+ 'cairo_image_surface_get_height',
+ 'cairo_image_surface_get_stride',
+ 'cairo_image_surface_get_width',
+ 'cairo_matrix_create_scale',
+ 'cairo_matrix_create_translate',
+ 'cairo_matrix_invert',
+ 'cairo_matrix_multiply',
+ 'cairo_matrix_rotate',
+ 'cairo_matrix_transform_distance',
+ 'cairo_matrix_transform_point',
+ 'cairo_matrix_translate',
+ 'cairo_pattern_add_color_stop_rgb',
+ 'cairo_pattern_add_color_stop_rgba',
+ 'cairo_pattern_create_for_surface',
+ 'cairo_pattern_create_linear',
+ 'cairo_pattern_create_radial',
+ 'cairo_pattern_create_rgb',
+ 'cairo_pattern_create_rgba',
+ 'cairo_pattern_get_color_stop_count',
+ 'cairo_pattern_get_color_stop_rgba',
+ 'cairo_pattern_get_extend',
+ 'cairo_pattern_get_filter',
+ 'cairo_pattern_get_linear_points',
+ 'cairo_pattern_get_matrix',
+ 'cairo_pattern_get_radial_circles',
+ 'cairo_pattern_get_rgba',
+ 'cairo_pattern_get_surface',
+ 'cairo_pattern_get_type',
+ 'cairo_pattern_set_extend',
+ 'cairo_pattern_set_filter',
+ 'cairo_pattern_set_matrix',
+ 'cairo_pattern_status',
+ 'cairo_pdf_surface_create',
+ 'cairo_pdf_surface_set_size',
+ 'cairo_ps_get_levels',
+ 'cairo_ps_level_to_string',
+ 'cairo_ps_surface_create',
+ 'cairo_ps_surface_dsc_begin_page_setup',
+ 'cairo_ps_surface_dsc_begin_setup',
+ 'cairo_ps_surface_dsc_comment',
+ 'cairo_ps_surface_get_eps',
+ 'cairo_ps_surface_restrict_to_level',
+ 'cairo_ps_surface_set_eps',
+ 'cairo_ps_surface_set_size',
+ 'cairo_scaled_font_create',
+ 'cairo_scaled_font_extents',
+ 'cairo_scaled_font_get_ctm',
+ 'cairo_scaled_font_get_font_face',
+ 'cairo_scaled_font_get_font_matrix',
+ 'cairo_scaled_font_get_font_options',
+ 'cairo_scaled_font_get_scale_matrix',
+ 'cairo_scaled_font_get_type',
+ 'cairo_scaled_font_glyph_extents',
+ 'cairo_scaled_font_status',
+ 'cairo_scaled_font_text_extents',
+ 'cairo_surface_copy_page',
+ 'cairo_surface_create_similar',
+ 'cairo_surface_finish',
+ 'cairo_surface_flush',
+ 'cairo_surface_get_content',
+ 'cairo_surface_get_device_offset',
+ 'cairo_surface_get_font_options',
+ 'cairo_surface_get_type',
+ 'cairo_surface_mark_dirty_rectangle',
+ 'cairo_surface_mark_dirty',
+ 'cairo_surface_set_device_offset',
+ 'cairo_surface_set_fallback_resolution',
+ 'cairo_surface_show_page',
+ 'cairo_surface_status',
+ 'cairo_surface_write_to_png',
+ 'cairo_svg_surface_create',
+ 'cairo_svg_surface_restrict_to_version',
+ 'cairo_svg_version_to_string'),
+ 'Calendar': ('cal_days_in_month',
+ 'cal_from_jd',
+ 'cal_info',
+ 'cal_to_jd',
+ 'easter_date',
+ 'easter_days',
+ 'FrenchToJD',
+ 'GregorianToJD',
+ 'JDDayOfWeek',
+ 'JDMonthName',
+ 'JDToFrench',
+ 'JDToGregorian',
+ 'jdtojewish',
+ 'JDToJulian',
+ 'jdtounix',
+ 'JewishToJD',
+ 'JulianToJD',
+ 'unixtojd'),
+ 'Classes/Object': ('__autoload',
+ 'call_user_method_array',
+ 'call_user_method',
+ 'class_alias',
+ 'class_exists',
+ 'get_called_class',
+ 'get_class_methods',
+ 'get_class_vars',
+ 'get_class',
+ 'get_declared_classes',
+ 'get_declared_interfaces',
+ 'get_declared_traits',
+ 'get_object_vars',
+ 'get_parent_class',
+ 'interface_exists',
+ 'is_a',
+ 'is_subclass_of',
+ 'method_exists',
+ 'property_exists',
+ 'trait_exists'),
+ 'Classkit': ('classkit_import',
+ 'classkit_method_add',
+ 'classkit_method_copy',
+ 'classkit_method_redefine',
+ 'classkit_method_remove',
+ 'classkit_method_rename'),
+ 'Crack': ('crack_check',
+ 'crack_closedict',
+ 'crack_getlastmessage',
+ 'crack_opendict'),
+ 'Ctype': ('ctype_alnum',
+ 'ctype_alpha',
+ 'ctype_cntrl',
+ 'ctype_digit',
+ 'ctype_graph',
+ 'ctype_lower',
+ 'ctype_print',
+ 'ctype_punct',
+ 'ctype_space',
+ 'ctype_upper',
+ 'ctype_xdigit'),
+ 'Cyrus': ('cyrus_authenticate',
+ 'cyrus_bind',
+ 'cyrus_close',
+ 'cyrus_connect',
+ 'cyrus_query',
+ 'cyrus_unbind'),
+ 'DB++': ('dbplus_add',
+ 'dbplus_aql',
+ 'dbplus_chdir',
+ 'dbplus_close',
+ 'dbplus_curr',
+ 'dbplus_errcode',
+ 'dbplus_errno',
+ 'dbplus_find',
+ 'dbplus_first',
+ 'dbplus_flush',
+ 'dbplus_freealllocks',
+ 'dbplus_freelock',
+ 'dbplus_freerlocks',
+ 'dbplus_getlock',
+ 'dbplus_getunique',
+ 'dbplus_info',
+ 'dbplus_last',
+ 'dbplus_lockrel',
+ 'dbplus_next',
+ 'dbplus_open',
+ 'dbplus_prev',
+ 'dbplus_rchperm',
+ 'dbplus_rcreate',
+ 'dbplus_rcrtexact',
+ 'dbplus_rcrtlike',
+ 'dbplus_resolve',
+ 'dbplus_restorepos',
+ 'dbplus_rkeys',
+ 'dbplus_ropen',
+ 'dbplus_rquery',
+ 'dbplus_rrename',
+ 'dbplus_rsecindex',
+ 'dbplus_runlink',
+ 'dbplus_rzap',
+ 'dbplus_savepos',
+ 'dbplus_setindex',
+ 'dbplus_setindexbynumber',
+ 'dbplus_sql',
+ 'dbplus_tcl',
+ 'dbplus_tremove',
+ 'dbplus_undo',
+ 'dbplus_undoprepare',
+ 'dbplus_unlockrel',
+ 'dbplus_unselect',
+ 'dbplus_update',
+ 'dbplus_xlockrel',
+ 'dbplus_xunlockrel'),
+ 'DBA': ('dba_close',
+ 'dba_delete',
+ 'dba_exists',
+ 'dba_fetch',
+ 'dba_firstkey',
+ 'dba_handlers',
+ 'dba_insert',
+ 'dba_key_split',
+ 'dba_list',
+ 'dba_nextkey',
+ 'dba_open',
+ 'dba_optimize',
+ 'dba_popen',
+ 'dba_replace',
+ 'dba_sync'),
+ 'DOM': ('dom_import_simplexml',),
+ 'Date/Time': ('checkdate',
+ 'date_add',
+ 'date_create_from_format',
+ 'date_create_immutable_from_format',
+ 'date_create_immutable',
+ 'date_create',
+ 'date_date_set',
+ 'date_default_timezone_get',
+ 'date_default_timezone_set',
+ 'date_diff',
+ 'date_format',
+ 'date_get_last_errors',
+ 'date_interval_create_from_date_string',
+ 'date_interval_format',
+ 'date_isodate_set',
+ 'date_modify',
+ 'date_offset_get',
+ 'date_parse_from_format',
+ 'date_parse',
+ 'date_sub',
+ 'date_sun_info',
+ 'date_sunrise',
+ 'date_sunset',
+ 'date_time_set',
+ 'date_timestamp_get',
+ 'date_timestamp_set',
+ 'date_timezone_get',
+ 'date_timezone_set',
+ 'date',
+ 'getdate',
+ 'gettimeofday',
+ 'gmdate',
+ 'gmmktime',
+ 'gmstrftime',
+ 'idate',
+ 'localtime',
+ 'microtime',
+ 'mktime',
+ 'strftime',
+ 'strptime',
+ 'strtotime',
+ 'time',
+ 'timezone_abbreviations_list',
+ 'timezone_identifiers_list',
+ 'timezone_location_get',
+ 'timezone_name_from_abbr',
+ 'timezone_name_get',
+ 'timezone_offset_get',
+ 'timezone_open',
+ 'timezone_transitions_get',
+ 'timezone_version_get'),
+ 'Direct IO': ('dio_close',
+ 'dio_fcntl',
+ 'dio_open',
+ 'dio_read',
+ 'dio_seek',
+ 'dio_stat',
+ 'dio_tcsetattr',
+ 'dio_truncate',
+ 'dio_write'),
+ 'Directory': ('chdir',
+ 'chroot',
+ 'closedir',
+ 'dir',
+ 'getcwd',
+ 'opendir',
+ 'readdir',
+ 'rewinddir',
+ 'scandir'),
+ 'Eio': ('eio_busy',
+ 'eio_cancel',
+ 'eio_chmod',
+ 'eio_chown',
+ 'eio_close',
+ 'eio_custom',
+ 'eio_dup2',
+ 'eio_event_loop',
+ 'eio_fallocate',
+ 'eio_fchmod',
+ 'eio_fchown',
+ 'eio_fdatasync',
+ 'eio_fstat',
+ 'eio_fstatvfs',
+ 'eio_fsync',
+ 'eio_ftruncate',
+ 'eio_futime',
+ 'eio_get_event_stream',
+ 'eio_get_last_error',
+ 'eio_grp_add',
+ 'eio_grp_cancel',
+ 'eio_grp_limit',
+ 'eio_grp',
+ 'eio_init',
+ 'eio_link',
+ 'eio_lstat',
+ 'eio_mkdir',
+ 'eio_mknod',
+ 'eio_nop',
+ 'eio_npending',
+ 'eio_nready',
+ 'eio_nreqs',
+ 'eio_nthreads',
+ 'eio_open',
+ 'eio_poll',
+ 'eio_read',
+ 'eio_readahead',
+ 'eio_readdir',
+ 'eio_readlink',
+ 'eio_realpath',
+ 'eio_rename',
+ 'eio_rmdir',
+ 'eio_seek',
+ 'eio_sendfile',
+ 'eio_set_max_idle',
+ 'eio_set_max_parallel',
+ 'eio_set_max_poll_reqs',
+ 'eio_set_max_poll_time',
+ 'eio_set_min_parallel',
+ 'eio_stat',
+ 'eio_statvfs',
+ 'eio_symlink',
+ 'eio_sync_file_range',
+ 'eio_sync',
+ 'eio_syncfs',
+ 'eio_truncate',
+ 'eio_unlink',
+ 'eio_utime',
+ 'eio_write'),
+ 'Enchant': ('enchant_broker_describe',
+ 'enchant_broker_dict_exists',
+ 'enchant_broker_free_dict',
+ 'enchant_broker_free',
+ 'enchant_broker_get_error',
+ 'enchant_broker_init',
+ 'enchant_broker_list_dicts',
+ 'enchant_broker_request_dict',
+ 'enchant_broker_request_pwl_dict',
+ 'enchant_broker_set_ordering',
+ 'enchant_dict_add_to_personal',
+ 'enchant_dict_add_to_session',
+ 'enchant_dict_check',
+ 'enchant_dict_describe',
+ 'enchant_dict_get_error',
+ 'enchant_dict_is_in_session',
+ 'enchant_dict_quick_check',
+ 'enchant_dict_store_replacement',
+ 'enchant_dict_suggest'),
+ 'Error Handling': ('debug_backtrace',
+ 'debug_print_backtrace',
+ 'error_get_last',
+ 'error_log',
+ 'error_reporting',
+ 'restore_error_handler',
+ 'restore_exception_handler',
+ 'set_error_handler',
+ 'set_exception_handler',
+ 'trigger_error',
+ 'user_error'),
+ 'Exif': ('exif_imagetype',
+ 'exif_read_data',
+ 'exif_tagname',
+ 'exif_thumbnail',
+ 'read_exif_data'),
+ 'Expect': ('expect_expectl', 'expect_popen'),
+ 'FAM': ('fam_cancel_monitor',
+ 'fam_close',
+ 'fam_monitor_collection',
+ 'fam_monitor_directory',
+ 'fam_monitor_file',
+ 'fam_next_event',
+ 'fam_open',
+ 'fam_pending',
+ 'fam_resume_monitor',
+ 'fam_suspend_monitor'),
+ 'FDF': ('fdf_add_doc_javascript',
+ 'fdf_add_template',
+ 'fdf_close',
+ 'fdf_create',
+ 'fdf_enum_values',
+ 'fdf_errno',
+ 'fdf_error',
+ 'fdf_get_ap',
+ 'fdf_get_attachment',
+ 'fdf_get_encoding',
+ 'fdf_get_file',
+ 'fdf_get_flags',
+ 'fdf_get_opt',
+ 'fdf_get_status',
+ 'fdf_get_value',
+ 'fdf_get_version',
+ 'fdf_header',
+ 'fdf_next_field_name',
+ 'fdf_open_string',
+ 'fdf_open',
+ 'fdf_remove_item',
+ 'fdf_save_string',
+ 'fdf_save',
+ 'fdf_set_ap',
+ 'fdf_set_encoding',
+ 'fdf_set_file',
+ 'fdf_set_flags',
+ 'fdf_set_javascript_action',
+ 'fdf_set_on_import_javascript',
+ 'fdf_set_opt',
+ 'fdf_set_status',
+ 'fdf_set_submit_form_action',
+ 'fdf_set_target_frame',
+ 'fdf_set_value',
+ 'fdf_set_version'),
+ 'FPM': ('fastcgi_finish_request',),
+ 'FTP': ('ftp_alloc',
+ 'ftp_cdup',
+ 'ftp_chdir',
+ 'ftp_chmod',
+ 'ftp_close',
+ 'ftp_connect',
+ 'ftp_delete',
+ 'ftp_exec',
+ 'ftp_fget',
+ 'ftp_fput',
+ 'ftp_get_option',
+ 'ftp_get',
+ 'ftp_login',
+ 'ftp_mdtm',
+ 'ftp_mkdir',
+ 'ftp_nb_continue',
+ 'ftp_nb_fget',
+ 'ftp_nb_fput',
+ 'ftp_nb_get',
+ 'ftp_nb_put',
+ 'ftp_nlist',
+ 'ftp_pasv',
+ 'ftp_put',
+ 'ftp_pwd',
+ 'ftp_quit',
+ 'ftp_raw',
+ 'ftp_rawlist',
+ 'ftp_rename',
+ 'ftp_rmdir',
+ 'ftp_set_option',
+ 'ftp_site',
+ 'ftp_size',
+ 'ftp_ssl_connect',
+ 'ftp_systype'),
+ 'Fann': ('fann_cascadetrain_on_data',
+ 'fann_cascadetrain_on_file',
+ 'fann_clear_scaling_params',
+ 'fann_copy',
+ 'fann_create_from_file',
+ 'fann_create_shortcut_array',
+ 'fann_create_shortcut',
+ 'fann_create_sparse_array',
+ 'fann_create_sparse',
+ 'fann_create_standard_array',
+ 'fann_create_standard',
+ 'fann_create_train_from_callback',
+ 'fann_create_train',
+ 'fann_descale_input',
+ 'fann_descale_output',
+ 'fann_descale_train',
+ 'fann_destroy_train',
+ 'fann_destroy',
+ 'fann_duplicate_train_data',
+ 'fann_get_activation_function',
+ 'fann_get_activation_steepness',
+ 'fann_get_bias_array',
+ 'fann_get_bit_fail_limit',
+ 'fann_get_bit_fail',
+ 'fann_get_cascade_activation_functions_count',
+ 'fann_get_cascade_activation_functions',
+ 'fann_get_cascade_activation_steepnesses_count',
+ 'fann_get_cascade_activation_steepnesses',
+ 'fann_get_cascade_candidate_change_fraction',
+ 'fann_get_cascade_candidate_limit',
+ 'fann_get_cascade_candidate_stagnation_epochs',
+ 'fann_get_cascade_max_cand_epochs',
+ 'fann_get_cascade_max_out_epochs',
+ 'fann_get_cascade_min_cand_epochs',
+ 'fann_get_cascade_min_out_epochs',
+ 'fann_get_cascade_num_candidate_groups',
+ 'fann_get_cascade_num_candidates',
+ 'fann_get_cascade_output_change_fraction',
+ 'fann_get_cascade_output_stagnation_epochs',
+ 'fann_get_cascade_weight_multiplier',
+ 'fann_get_connection_array',
+ 'fann_get_connection_rate',
+ 'fann_get_errno',
+ 'fann_get_errstr',
+ 'fann_get_layer_array',
+ 'fann_get_learning_momentum',
+ 'fann_get_learning_rate',
+ 'fann_get_MSE',
+ 'fann_get_network_type',
+ 'fann_get_num_input',
+ 'fann_get_num_layers',
+ 'fann_get_num_output',
+ 'fann_get_quickprop_decay',
+ 'fann_get_quickprop_mu',
+ 'fann_get_rprop_decrease_factor',
+ 'fann_get_rprop_delta_max',
+ 'fann_get_rprop_delta_min',
+ 'fann_get_rprop_delta_zero',
+ 'fann_get_rprop_increase_factor',
+ 'fann_get_sarprop_step_error_shift',
+ 'fann_get_sarprop_step_error_threshold_factor',
+ 'fann_get_sarprop_temperature',
+ 'fann_get_sarprop_weight_decay_shift',
+ 'fann_get_total_connections',
+ 'fann_get_total_neurons',
+ 'fann_get_train_error_function',
+ 'fann_get_train_stop_function',
+ 'fann_get_training_algorithm',
+ 'fann_init_weights',
+ 'fann_length_train_data',
+ 'fann_merge_train_data',
+ 'fann_num_input_train_data',
+ 'fann_num_output_train_data',
+ 'fann_print_error',
+ 'fann_randomize_weights',
+ 'fann_read_train_from_file',
+ 'fann_reset_errno',
+ 'fann_reset_errstr',
+ 'fann_reset_MSE',
+ 'fann_run',
+ 'fann_save_train',
+ 'fann_save',
+ 'fann_scale_input_train_data',
+ 'fann_scale_input',
+ 'fann_scale_output_train_data',
+ 'fann_scale_output',
+ 'fann_scale_train_data',
+ 'fann_scale_train',
+ 'fann_set_activation_function_hidden',
+ 'fann_set_activation_function_layer',
+ 'fann_set_activation_function_output',
+ 'fann_set_activation_function',
+ 'fann_set_activation_steepness_hidden',
+ 'fann_set_activation_steepness_layer',
+ 'fann_set_activation_steepness_output',
+ 'fann_set_activation_steepness',
+ 'fann_set_bit_fail_limit',
+ 'fann_set_callback',
+ 'fann_set_cascade_activation_functions',
+ 'fann_set_cascade_activation_steepnesses',
+ 'fann_set_cascade_candidate_change_fraction',
+ 'fann_set_cascade_candidate_limit',
+ 'fann_set_cascade_candidate_stagnation_epochs',
+ 'fann_set_cascade_max_cand_epochs',
+ 'fann_set_cascade_max_out_epochs',
+ 'fann_set_cascade_min_cand_epochs',
+ 'fann_set_cascade_min_out_epochs',
+ 'fann_set_cascade_num_candidate_groups',
+ 'fann_set_cascade_output_change_fraction',
+ 'fann_set_cascade_output_stagnation_epochs',
+ 'fann_set_cascade_weight_multiplier',
+ 'fann_set_error_log',
+ 'fann_set_input_scaling_params',
+ 'fann_set_learning_momentum',
+ 'fann_set_learning_rate',
+ 'fann_set_output_scaling_params',
+ 'fann_set_quickprop_decay',
+ 'fann_set_quickprop_mu',
+ 'fann_set_rprop_decrease_factor',
+ 'fann_set_rprop_delta_max',
+ 'fann_set_rprop_delta_min',
+ 'fann_set_rprop_delta_zero',
+ 'fann_set_rprop_increase_factor',
+ 'fann_set_sarprop_step_error_shift',
+ 'fann_set_sarprop_step_error_threshold_factor',
+ 'fann_set_sarprop_temperature',
+ 'fann_set_sarprop_weight_decay_shift',
+ 'fann_set_scaling_params',
+ 'fann_set_train_error_function',
+ 'fann_set_train_stop_function',
+ 'fann_set_training_algorithm',
+ 'fann_set_weight_array',
+ 'fann_set_weight',
+ 'fann_shuffle_train_data',
+ 'fann_subset_train_data',
+ 'fann_test_data',
+ 'fann_test',
+ 'fann_train_epoch',
+ 'fann_train_on_data',
+ 'fann_train_on_file',
+ 'fann_train'),
+ 'Fileinfo': ('finfo_buffer',
+ 'finfo_close',
+ 'finfo_file',
+ 'finfo_open',
+ 'finfo_set_flags',
+ 'mime_content_type'),
+ 'Filesystem': ('basename',
+ 'chgrp',
+ 'chmod',
+ 'chown',
+ 'clearstatcache',
+ 'copy',
+ 'dirname',
+ 'disk_free_space',
+ 'disk_total_space',
+ 'diskfreespace',
+ 'fclose',
+ 'feof',
+ 'fflush',
+ 'fgetc',
+ 'fgetcsv',
+ 'fgets',
+ 'fgetss',
+ 'file_exists',
+ 'file_get_contents',
+ 'file_put_contents',
+ 'file',
+ 'fileatime',
+ 'filectime',
+ 'filegroup',
+ 'fileinode',
+ 'filemtime',
+ 'fileowner',
+ 'fileperms',
+ 'filesize',
+ 'filetype',
+ 'flock',
+ 'fnmatch',
+ 'fopen',
+ 'fpassthru',
+ 'fputcsv',
+ 'fputs',
+ 'fread',
+ 'fscanf',
+ 'fseek',
+ 'fstat',
+ 'ftell',
+ 'ftruncate',
+ 'fwrite',
+ 'glob',
+ 'is_dir',
+ 'is_executable',
+ 'is_file',
+ 'is_link',
+ 'is_readable',
+ 'is_uploaded_file',
+ 'is_writable',
+ 'is_writeable',
+ 'lchgrp',
+ 'lchown',
+ 'link',
+ 'linkinfo',
+ 'lstat',
+ 'mkdir',
+ 'move_uploaded_file',
+ 'parse_ini_file',
+ 'parse_ini_string',
+ 'pathinfo',
+ 'pclose',
+ 'popen',
+ 'readfile',
+ 'readlink',
+ 'realpath_cache_get',
+ 'realpath_cache_size',
+ 'realpath',
+ 'rename',
+ 'rewind',
+ 'rmdir',
+ 'set_file_buffer',
+ 'stat',
+ 'symlink',
+ 'tempnam',
+ 'tmpfile',
+ 'touch',
+ 'umask',
+ 'unlink'),
+ 'Filter': ('filter_has_var',
+ 'filter_id',
+ 'filter_input_array',
+ 'filter_input',
+ 'filter_list',
+ 'filter_var_array',
+ 'filter_var'),
+ 'Firebird/InterBase': ('ibase_add_user',
+ 'ibase_affected_rows',
+ 'ibase_backup',
+ 'ibase_blob_add',
+ 'ibase_blob_cancel',
+ 'ibase_blob_close',
+ 'ibase_blob_create',
+ 'ibase_blob_echo',
+ 'ibase_blob_get',
+ 'ibase_blob_import',
+ 'ibase_blob_info',
+ 'ibase_blob_open',
+ 'ibase_close',
+ 'ibase_commit_ret',
+ 'ibase_commit',
+ 'ibase_connect',
+ 'ibase_db_info',
+ 'ibase_delete_user',
+ 'ibase_drop_db',
+ 'ibase_errcode',
+ 'ibase_errmsg',
+ 'ibase_execute',
+ 'ibase_fetch_assoc',
+ 'ibase_fetch_object',
+ 'ibase_fetch_row',
+ 'ibase_field_info',
+ 'ibase_free_event_handler',
+ 'ibase_free_query',
+ 'ibase_free_result',
+ 'ibase_gen_id',
+ 'ibase_maintain_db',
+ 'ibase_modify_user',
+ 'ibase_name_result',
+ 'ibase_num_fields',
+ 'ibase_num_params',
+ 'ibase_param_info',
+ 'ibase_pconnect',
+ 'ibase_prepare',
+ 'ibase_query',
+ 'ibase_restore',
+ 'ibase_rollback_ret',
+ 'ibase_rollback',
+ 'ibase_server_info',
+ 'ibase_service_attach',
+ 'ibase_service_detach',
+ 'ibase_set_event_handler',
+ 'ibase_trans',
+ 'ibase_wait_event'),
+ 'FriBiDi': ('fribidi_log2vis',),
+ 'FrontBase': ('fbsql_affected_rows',
+ 'fbsql_autocommit',
+ 'fbsql_blob_size',
+ 'fbsql_change_user',
+ 'fbsql_clob_size',
+ 'fbsql_close',
+ 'fbsql_commit',
+ 'fbsql_connect',
+ 'fbsql_create_blob',
+ 'fbsql_create_clob',
+ 'fbsql_create_db',
+ 'fbsql_data_seek',
+ 'fbsql_database_password',
+ 'fbsql_database',
+ 'fbsql_db_query',
+ 'fbsql_db_status',
+ 'fbsql_drop_db',
+ 'fbsql_errno',
+ 'fbsql_error',
+ 'fbsql_fetch_array',
+ 'fbsql_fetch_assoc',
+ 'fbsql_fetch_field',
+ 'fbsql_fetch_lengths',
+ 'fbsql_fetch_object',
+ 'fbsql_fetch_row',
+ 'fbsql_field_flags',
+ 'fbsql_field_len',
+ 'fbsql_field_name',
+ 'fbsql_field_seek',
+ 'fbsql_field_table',
+ 'fbsql_field_type',
+ 'fbsql_free_result',
+ 'fbsql_get_autostart_info',
+ 'fbsql_hostname',
+ 'fbsql_insert_id',
+ 'fbsql_list_dbs',
+ 'fbsql_list_fields',
+ 'fbsql_list_tables',
+ 'fbsql_next_result',
+ 'fbsql_num_fields',
+ 'fbsql_num_rows',
+ 'fbsql_password',
+ 'fbsql_pconnect',
+ 'fbsql_query',
+ 'fbsql_read_blob',
+ 'fbsql_read_clob',
+ 'fbsql_result',
+ 'fbsql_rollback',
+ 'fbsql_rows_fetched',
+ 'fbsql_select_db',
+ 'fbsql_set_characterset',
+ 'fbsql_set_lob_mode',
+ 'fbsql_set_password',
+ 'fbsql_set_transaction',
+ 'fbsql_start_db',
+ 'fbsql_stop_db',
+ 'fbsql_table_name',
+ 'fbsql_tablename',
+ 'fbsql_username',
+ 'fbsql_warnings'),
+ 'Function handling': ('call_user_func_array',
+ 'call_user_func',
+ 'create_function',
+ 'forward_static_call_array',
+ 'forward_static_call',
+ 'func_get_arg',
+ 'func_get_args',
+ 'func_num_args',
+ 'function_exists',
+ 'get_defined_functions',
+ 'register_shutdown_function',
+ 'register_tick_function',
+ 'unregister_tick_function'),
+ 'GD and Image': ('gd_info',
+ 'getimagesize',
+ 'getimagesizefromstring',
+ 'image_type_to_extension',
+ 'image_type_to_mime_type',
+ 'image2wbmp',
+ 'imageaffine',
+ 'imageaffinematrixconcat',
+ 'imageaffinematrixget',
+ 'imagealphablending',
+ 'imageantialias',
+ 'imagearc',
+ 'imagechar',
+ 'imagecharup',
+ 'imagecolorallocate',
+ 'imagecolorallocatealpha',
+ 'imagecolorat',
+ 'imagecolorclosest',
+ 'imagecolorclosestalpha',
+ 'imagecolorclosesthwb',
+ 'imagecolordeallocate',
+ 'imagecolorexact',
+ 'imagecolorexactalpha',
+ 'imagecolormatch',
+ 'imagecolorresolve',
+ 'imagecolorresolvealpha',
+ 'imagecolorset',
+ 'imagecolorsforindex',
+ 'imagecolorstotal',
+ 'imagecolortransparent',
+ 'imageconvolution',
+ 'imagecopy',
+ 'imagecopymerge',
+ 'imagecopymergegray',
+ 'imagecopyresampled',
+ 'imagecopyresized',
+ 'imagecreate',
+ 'imagecreatefromgd2',
+ 'imagecreatefromgd2part',
+ 'imagecreatefromgd',
+ 'imagecreatefromgif',
+ 'imagecreatefromjpeg',
+ 'imagecreatefrompng',
+ 'imagecreatefromstring',
+ 'imagecreatefromwbmp',
+ 'imagecreatefromwebp',
+ 'imagecreatefromxbm',
+ 'imagecreatefromxpm',
+ 'imagecreatetruecolor',
+ 'imagecrop',
+ 'imagecropauto',
+ 'imagedashedline',
+ 'imagedestroy',
+ 'imageellipse',
+ 'imagefill',
+ 'imagefilledarc',
+ 'imagefilledellipse',
+ 'imagefilledpolygon',
+ 'imagefilledrectangle',
+ 'imagefilltoborder',
+ 'imagefilter',
+ 'imageflip',
+ 'imagefontheight',
+ 'imagefontwidth',
+ 'imageftbbox',
+ 'imagefttext',
+ 'imagegammacorrect',
+ 'imagegd2',
+ 'imagegd',
+ 'imagegif',
+ 'imagegrabscreen',
+ 'imagegrabwindow',
+ 'imageinterlace',
+ 'imageistruecolor',
+ 'imagejpeg',
+ 'imagelayereffect',
+ 'imageline',
+ 'imageloadfont',
+ 'imagepalettecopy',
+ 'imagepalettetotruecolor',
+ 'imagepng',
+ 'imagepolygon',
+ 'imagepsbbox',
+ 'imagepsencodefont',
+ 'imagepsextendfont',
+ 'imagepsfreefont',
+ 'imagepsloadfont',
+ 'imagepsslantfont',
+ 'imagepstext',
+ 'imagerectangle',
+ 'imagerotate',
+ 'imagesavealpha',
+ 'imagescale',
+ 'imagesetbrush',
+ 'imagesetinterpolation',
+ 'imagesetpixel',
+ 'imagesetstyle',
+ 'imagesetthickness',
+ 'imagesettile',
+ 'imagestring',
+ 'imagestringup',
+ 'imagesx',
+ 'imagesy',
+ 'imagetruecolortopalette',
+ 'imagettfbbox',
+ 'imagettftext',
+ 'imagetypes',
+ 'imagewbmp',
+ 'imagewebp',
+ 'imagexbm',
+ 'iptcembed',
+ 'iptcparse',
+ 'jpeg2wbmp',
+ 'png2wbmp'),
+ 'GMP': ('gmp_abs',
+ 'gmp_add',
+ 'gmp_and',
+ 'gmp_clrbit',
+ 'gmp_cmp',
+ 'gmp_com',
+ 'gmp_div_q',
+ 'gmp_div_qr',
+ 'gmp_div_r',
+ 'gmp_div',
+ 'gmp_divexact',
+ 'gmp_fact',
+ 'gmp_gcd',
+ 'gmp_gcdext',
+ 'gmp_hamdist',
+ 'gmp_init',
+ 'gmp_intval',
+ 'gmp_invert',
+ 'gmp_jacobi',
+ 'gmp_legendre',
+ 'gmp_mod',
+ 'gmp_mul',
+ 'gmp_neg',
+ 'gmp_nextprime',
+ 'gmp_or',
+ 'gmp_perfect_square',
+ 'gmp_popcount',
+ 'gmp_pow',
+ 'gmp_powm',
+ 'gmp_prob_prime',
+ 'gmp_random',
+ 'gmp_scan0',
+ 'gmp_scan1',
+ 'gmp_setbit',
+ 'gmp_sign',
+ 'gmp_sqrt',
+ 'gmp_sqrtrem',
+ 'gmp_strval',
+ 'gmp_sub',
+ 'gmp_testbit',
+ 'gmp_xor'),
+ 'GeoIP': ('geoip_asnum_by_name',
+ 'geoip_continent_code_by_name',
+ 'geoip_country_code_by_name',
+ 'geoip_country_code3_by_name',
+ 'geoip_country_name_by_name',
+ 'geoip_database_info',
+ 'geoip_db_avail',
+ 'geoip_db_filename',
+ 'geoip_db_get_all_info',
+ 'geoip_domain_by_name',
+ 'geoip_id_by_name',
+ 'geoip_isp_by_name',
+ 'geoip_netspeedcell_by_name',
+ 'geoip_org_by_name',
+ 'geoip_record_by_name',
+ 'geoip_region_by_name',
+ 'geoip_region_name_by_code',
+ 'geoip_setup_custom_directory',
+ 'geoip_time_zone_by_country_and_region'),
+ 'Gettext': ('bind_textdomain_codeset',
+ 'bindtextdomain',
+ 'dcgettext',
+ 'dcngettext',
+ 'dgettext',
+ 'dngettext',
+ 'gettext',
+ 'ngettext',
+ 'textdomain'),
+ 'GnuPG': ('gnupg_adddecryptkey',
+ 'gnupg_addencryptkey',
+ 'gnupg_addsignkey',
+ 'gnupg_cleardecryptkeys',
+ 'gnupg_clearencryptkeys',
+ 'gnupg_clearsignkeys',
+ 'gnupg_decrypt',
+ 'gnupg_decryptverify',
+ 'gnupg_encrypt',
+ 'gnupg_encryptsign',
+ 'gnupg_export',
+ 'gnupg_geterror',
+ 'gnupg_getprotocol',
+ 'gnupg_import',
+ 'gnupg_init',
+ 'gnupg_keyinfo',
+ 'gnupg_setarmor',
+ 'gnupg_seterrormode',
+ 'gnupg_setsignmode',
+ 'gnupg_sign',
+ 'gnupg_verify'),
+ 'Gopher': ('gopher_parsedir',),
+ 'Grapheme': ('grapheme_extract',
+ 'grapheme_stripos',
+ 'grapheme_stristr',
+ 'grapheme_strlen',
+ 'grapheme_strpos',
+ 'grapheme_strripos',
+ 'grapheme_strrpos',
+ 'grapheme_strstr',
+ 'grapheme_substr'),
+ 'Gupnp': ('gupnp_context_get_host_ip',
+ 'gupnp_context_get_port',
+ 'gupnp_context_get_subscription_timeout',
+ 'gupnp_context_host_path',
+ 'gupnp_context_new',
+ 'gupnp_context_set_subscription_timeout',
+ 'gupnp_context_timeout_add',
+ 'gupnp_context_unhost_path',
+ 'gupnp_control_point_browse_start',
+ 'gupnp_control_point_browse_stop',
+ 'gupnp_control_point_callback_set',
+ 'gupnp_control_point_new',
+ 'gupnp_device_action_callback_set',
+ 'gupnp_device_info_get_service',
+ 'gupnp_device_info_get',
+ 'gupnp_root_device_get_available',
+ 'gupnp_root_device_get_relative_location',
+ 'gupnp_root_device_new',
+ 'gupnp_root_device_set_available',
+ 'gupnp_root_device_start',
+ 'gupnp_root_device_stop',
+ 'gupnp_service_action_get',
+ 'gupnp_service_action_return_error',
+ 'gupnp_service_action_return',
+ 'gupnp_service_action_set',
+ 'gupnp_service_freeze_notify',
+ 'gupnp_service_info_get_introspection',
+ 'gupnp_service_info_get',
+ 'gupnp_service_introspection_get_state_variable',
+ 'gupnp_service_notify',
+ 'gupnp_service_proxy_action_get',
+ 'gupnp_service_proxy_action_set',
+ 'gupnp_service_proxy_add_notify',
+ 'gupnp_service_proxy_callback_set',
+ 'gupnp_service_proxy_get_subscribed',
+ 'gupnp_service_proxy_remove_notify',
+ 'gupnp_service_proxy_set_subscribed',
+ 'gupnp_service_thaw_notify'),
+ 'HTTP': ('http_cache_etag',
+ 'http_cache_last_modified',
+ 'http_chunked_decode',
+ 'http_deflate',
+ 'http_inflate',
+ 'http_build_cookie',
+ 'http_date',
+ 'http_get_request_body_stream',
+ 'http_get_request_body',
+ 'http_get_request_headers',
+ 'http_match_etag',
+ 'http_match_modified',
+ 'http_match_request_header',
+ 'http_support',
+ 'http_negotiate_charset',
+ 'http_negotiate_content_type',
+ 'http_negotiate_language',
+ 'ob_deflatehandler',
+ 'ob_etaghandler',
+ 'ob_inflatehandler',
+ 'http_parse_cookie',
+ 'http_parse_headers',
+ 'http_parse_message',
+ 'http_parse_params',
+ 'http_persistent_handles_clean',
+ 'http_persistent_handles_count',
+ 'http_persistent_handles_ident',
+ 'http_get',
+ 'http_head',
+ 'http_post_data',
+ 'http_post_fields',
+ 'http_put_data',
+ 'http_put_file',
+ 'http_put_stream',
+ 'http_request_body_encode',
+ 'http_request_method_exists',
+ 'http_request_method_name',
+ 'http_request_method_register',
+ 'http_request_method_unregister',
+ 'http_request',
+ 'http_redirect',
+ 'http_send_content_disposition',
+ 'http_send_content_type',
+ 'http_send_data',
+ 'http_send_file',
+ 'http_send_last_modified',
+ 'http_send_status',
+ 'http_send_stream',
+ 'http_throttle',
+ 'http_build_str',
+ 'http_build_url'),
+ 'Hash': ('hash_algos',
+ 'hash_copy',
+ 'hash_file',
+ 'hash_final',
+ 'hash_hmac_file',
+ 'hash_hmac',
+ 'hash_init',
+ 'hash_pbkdf2',
+ 'hash_update_file',
+ 'hash_update_stream',
+ 'hash_update',
+ 'hash'),
+ 'Hyperwave': ('hw_Array2Objrec',
+ 'hw_changeobject',
+ 'hw_Children',
+ 'hw_ChildrenObj',
+ 'hw_Close',
+ 'hw_Connect',
+ 'hw_connection_info',
+ 'hw_cp',
+ 'hw_Deleteobject',
+ 'hw_DocByAnchor',
+ 'hw_DocByAnchorObj',
+ 'hw_Document_Attributes',
+ 'hw_Document_BodyTag',
+ 'hw_Document_Content',
+ 'hw_Document_SetContent',
+ 'hw_Document_Size',
+ 'hw_dummy',
+ 'hw_EditText',
+ 'hw_Error',
+ 'hw_ErrorMsg',
+ 'hw_Free_Document',
+ 'hw_GetAnchors',
+ 'hw_GetAnchorsObj',
+ 'hw_GetAndLock',
+ 'hw_GetChildColl',
+ 'hw_GetChildCollObj',
+ 'hw_GetChildDocColl',
+ 'hw_GetChildDocCollObj',
+ 'hw_GetObject',
+ 'hw_GetObjectByQuery',
+ 'hw_GetObjectByQueryColl',
+ 'hw_GetObjectByQueryCollObj',
+ 'hw_GetObjectByQueryObj',
+ 'hw_GetParents',
+ 'hw_GetParentsObj',
+ 'hw_getrellink',
+ 'hw_GetRemote',
+ 'hw_getremotechildren',
+ 'hw_GetSrcByDestObj',
+ 'hw_GetText',
+ 'hw_getusername',
+ 'hw_Identify',
+ 'hw_InCollections',
+ 'hw_Info',
+ 'hw_InsColl',
+ 'hw_InsDoc',
+ 'hw_insertanchors',
+ 'hw_InsertDocument',
+ 'hw_InsertObject',
+ 'hw_mapid',
+ 'hw_Modifyobject',
+ 'hw_mv',
+ 'hw_New_Document',
+ 'hw_objrec2array',
+ 'hw_Output_Document',
+ 'hw_pConnect',
+ 'hw_PipeDocument',
+ 'hw_Root',
+ 'hw_setlinkroot',
+ 'hw_stat',
+ 'hw_Unlock',
+ 'hw_Who'),
+ 'Hyperwave API': ('hwapi_attribute_new',
+ 'hwapi_content_new',
+ 'hwapi_hgcsp',
+ 'hwapi_object_new'),
+ 'IBM DB2': ('db2_autocommit',
+ 'db2_bind_param',
+ 'db2_client_info',
+ 'db2_close',
+ 'db2_column_privileges',
+ 'db2_columns',
+ 'db2_commit',
+ 'db2_conn_error',
+ 'db2_conn_errormsg',
+ 'db2_connect',
+ 'db2_cursor_type',
+ 'db2_escape_string',
+ 'db2_exec',
+ 'db2_execute',
+ 'db2_fetch_array',
+ 'db2_fetch_assoc',
+ 'db2_fetch_both',
+ 'db2_fetch_object',
+ 'db2_fetch_row',
+ 'db2_field_display_size',
+ 'db2_field_name',
+ 'db2_field_num',
+ 'db2_field_precision',
+ 'db2_field_scale',
+ 'db2_field_type',
+ 'db2_field_width',
+ 'db2_foreign_keys',
+ 'db2_free_result',
+ 'db2_free_stmt',
+ 'db2_get_option',
+ 'db2_last_insert_id',
+ 'db2_lob_read',
+ 'db2_next_result',
+ 'db2_num_fields',
+ 'db2_num_rows',
+ 'db2_pclose',
+ 'db2_pconnect',
+ 'db2_prepare',
+ 'db2_primary_keys',
+ 'db2_procedure_columns',
+ 'db2_procedures',
+ 'db2_result',
+ 'db2_rollback',
+ 'db2_server_info',
+ 'db2_set_option',
+ 'db2_special_columns',
+ 'db2_statistics',
+ 'db2_stmt_error',
+ 'db2_stmt_errormsg',
+ 'db2_table_privileges',
+ 'db2_tables'),
+ 'ID3': ('id3_get_frame_long_name',
+ 'id3_get_frame_short_name',
+ 'id3_get_genre_id',
+ 'id3_get_genre_list',
+ 'id3_get_genre_name',
+ 'id3_get_tag',
+ 'id3_get_version',
+ 'id3_remove_tag',
+ 'id3_set_tag'),
+ 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
+ 'IIS': ('iis_add_server',
+ 'iis_get_dir_security',
+ 'iis_get_script_map',
+ 'iis_get_server_by_comment',
+ 'iis_get_server_by_path',
+ 'iis_get_server_rights',
+ 'iis_get_service_state',
+ 'iis_remove_server',
+ 'iis_set_app_settings',
+ 'iis_set_dir_security',
+ 'iis_set_script_map',
+ 'iis_set_server_rights',
+ 'iis_start_server',
+ 'iis_start_service',
+ 'iis_stop_server',
+ 'iis_stop_service'),
+ 'IMAP': ('imap_8bit',
+ 'imap_alerts',
+ 'imap_append',
+ 'imap_base64',
+ 'imap_binary',
+ 'imap_body',
+ 'imap_bodystruct',
+ 'imap_check',
+ 'imap_clearflag_full',
+ 'imap_close',
+ 'imap_create',
+ 'imap_createmailbox',
+ 'imap_delete',
+ 'imap_deletemailbox',
+ 'imap_errors',
+ 'imap_expunge',
+ 'imap_fetch_overview',
+ 'imap_fetchbody',
+ 'imap_fetchheader',
+ 'imap_fetchmime',
+ 'imap_fetchstructure',
+ 'imap_fetchtext',
+ 'imap_gc',
+ 'imap_get_quota',
+ 'imap_get_quotaroot',
+ 'imap_getacl',
+ 'imap_getmailboxes',
+ 'imap_getsubscribed',
+ 'imap_header',
+ 'imap_headerinfo',
+ 'imap_headers',
+ 'imap_last_error',
+ 'imap_list',
+ 'imap_listmailbox',
+ 'imap_listscan',
+ 'imap_listsubscribed',
+ 'imap_lsub',
+ 'imap_mail_compose',
+ 'imap_mail_copy',
+ 'imap_mail_move',
+ 'imap_mail',
+ 'imap_mailboxmsginfo',
+ 'imap_mime_header_decode',
+ 'imap_msgno',
+ 'imap_num_msg',
+ 'imap_num_recent',
+ 'imap_open',
+ 'imap_ping',
+ 'imap_qprint',
+ 'imap_rename',
+ 'imap_renamemailbox',
+ 'imap_reopen',
+ 'imap_rfc822_parse_adrlist',
+ 'imap_rfc822_parse_headers',
+ 'imap_rfc822_write_address',
+ 'imap_savebody',
+ 'imap_scan',
+ 'imap_scanmailbox',
+ 'imap_search',
+ 'imap_set_quota',
+ 'imap_setacl',
+ 'imap_setflag_full',
+ 'imap_sort',
+ 'imap_status',
+ 'imap_subscribe',
+ 'imap_thread',
+ 'imap_timeout',
+ 'imap_uid',
+ 'imap_undelete',
+ 'imap_unsubscribe',
+ 'imap_utf7_decode',
+ 'imap_utf7_encode',
+ 'imap_utf8'),
+ 'Informix': ('ifx_affected_rows',
+ 'ifx_blobinfile_mode',
+ 'ifx_byteasvarchar',
+ 'ifx_close',
+ 'ifx_connect',
+ 'ifx_copy_blob',
+ 'ifx_create_blob',
+ 'ifx_create_char',
+ 'ifx_do',
+ 'ifx_error',
+ 'ifx_errormsg',
+ 'ifx_fetch_row',
+ 'ifx_fieldproperties',
+ 'ifx_fieldtypes',
+ 'ifx_free_blob',
+ 'ifx_free_char',
+ 'ifx_free_result',
+ 'ifx_get_blob',
+ 'ifx_get_char',
+ 'ifx_getsqlca',
+ 'ifx_htmltbl_result',
+ 'ifx_nullformat',
+ 'ifx_num_fields',
+ 'ifx_num_rows',
+ 'ifx_pconnect',
+ 'ifx_prepare',
+ 'ifx_query',
+ 'ifx_textasvarchar',
+ 'ifx_update_blob',
+ 'ifx_update_char',
+ 'ifxus_close_slob',
+ 'ifxus_create_slob',
+ 'ifxus_free_slob',
+ 'ifxus_open_slob',
+ 'ifxus_read_slob',
+ 'ifxus_seek_slob',
+ 'ifxus_tell_slob',
+ 'ifxus_write_slob'),
+ 'Ingres': ('ingres_autocommit_state',
+ 'ingres_autocommit',
+ 'ingres_charset',
+ 'ingres_close',
+ 'ingres_commit',
+ 'ingres_connect',
+ 'ingres_cursor',
+ 'ingres_errno',
+ 'ingres_error',
+ 'ingres_errsqlstate',
+ 'ingres_escape_string',
+ 'ingres_execute',
+ 'ingres_fetch_array',
+ 'ingres_fetch_assoc',
+ 'ingres_fetch_object',
+ 'ingres_fetch_proc_return',
+ 'ingres_fetch_row',
+ 'ingres_field_length',
+ 'ingres_field_name',
+ 'ingres_field_nullable',
+ 'ingres_field_precision',
+ 'ingres_field_scale',
+ 'ingres_field_type',
+ 'ingres_free_result',
+ 'ingres_next_error',
+ 'ingres_num_fields',
+ 'ingres_num_rows',
+ 'ingres_pconnect',
+ 'ingres_prepare',
+ 'ingres_query',
+ 'ingres_result_seek',
+ 'ingres_rollback',
+ 'ingres_set_environment',
+ 'ingres_unbuffered_query'),
+ 'Inotify': ('inotify_add_watch',
+ 'inotify_init',
+ 'inotify_queue_len',
+ 'inotify_read',
+ 'inotify_rm_watch'),
+ 'JSON': ('json_decode',
+ 'json_encode',
+ 'json_last_error_msg',
+ 'json_last_error'),
+ 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
+ 'Judy': ('judy_type', 'judy_version'),
+ 'KADM5': ('kadm5_chpass_principal',
+ 'kadm5_create_principal',
+ 'kadm5_delete_principal',
+ 'kadm5_destroy',
+ 'kadm5_flush',
+ 'kadm5_get_policies',
+ 'kadm5_get_principal',
+ 'kadm5_get_principals',
+ 'kadm5_init_with_password',
+ 'kadm5_modify_principal'),
+ 'LDAP': ('ldap_8859_to_t61',
+ 'ldap_add',
+ 'ldap_bind',
+ 'ldap_close',
+ 'ldap_compare',
+ 'ldap_connect',
+ 'ldap_control_paged_result_response',
+ 'ldap_control_paged_result',
+ 'ldap_count_entries',
+ 'ldap_delete',
+ 'ldap_dn2ufn',
+ 'ldap_err2str',
+ 'ldap_errno',
+ 'ldap_error',
+ 'ldap_explode_dn',
+ 'ldap_first_attribute',
+ 'ldap_first_entry',
+ 'ldap_first_reference',
+ 'ldap_free_result',
+ 'ldap_get_attributes',
+ 'ldap_get_dn',
+ 'ldap_get_entries',
+ 'ldap_get_option',
+ 'ldap_get_values_len',
+ 'ldap_get_values',
+ 'ldap_list',
+ 'ldap_mod_add',
+ 'ldap_mod_del',
+ 'ldap_mod_replace',
+ 'ldap_modify',
+ 'ldap_next_attribute',
+ 'ldap_next_entry',
+ 'ldap_next_reference',
+ 'ldap_parse_reference',
+ 'ldap_parse_result',
+ 'ldap_read',
+ 'ldap_rename',
+ 'ldap_sasl_bind',
+ 'ldap_search',
+ 'ldap_set_option',
+ 'ldap_set_rebind_proc',
+ 'ldap_sort',
+ 'ldap_start_tls',
+ 'ldap_t61_to_8859',
+ 'ldap_unbind'),
+ 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
+ 'Libevent': ('event_add',
+ 'event_base_free',
+ 'event_base_loop',
+ 'event_base_loopbreak',
+ 'event_base_loopexit',
+ 'event_base_new',
+ 'event_base_priority_init',
+ 'event_base_set',
+ 'event_buffer_base_set',
+ 'event_buffer_disable',
+ 'event_buffer_enable',
+ 'event_buffer_fd_set',
+ 'event_buffer_free',
+ 'event_buffer_new',
+ 'event_buffer_priority_set',
+ 'event_buffer_read',
+ 'event_buffer_set_callback',
+ 'event_buffer_timeout_set',
+ 'event_buffer_watermark_set',
+ 'event_buffer_write',
+ 'event_del',
+ 'event_free',
+ 'event_new',
+ 'event_set'),
+ 'Lotus Notes': ('notes_body',
+ 'notes_copy_db',
+ 'notes_create_db',
+ 'notes_create_note',
+ 'notes_drop_db',
+ 'notes_find_note',
+ 'notes_header_info',
+ 'notes_list_msgs',
+ 'notes_mark_read',
+ 'notes_mark_unread',
+ 'notes_nav_create',
+ 'notes_search',
+ 'notes_unread',
+ 'notes_version'),
+ 'MCVE': ('m_checkstatus',
+ 'm_completeauthorizations',
+ 'm_connect',
+ 'm_connectionerror',
+ 'm_deletetrans',
+ 'm_destroyconn',
+ 'm_destroyengine',
+ 'm_getcell',
+ 'm_getcellbynum',
+ 'm_getcommadelimited',
+ 'm_getheader',
+ 'm_initconn',
+ 'm_initengine',
+ 'm_iscommadelimited',
+ 'm_maxconntimeout',
+ 'm_monitor',
+ 'm_numcolumns',
+ 'm_numrows',
+ 'm_parsecommadelimited',
+ 'm_responsekeys',
+ 'm_responseparam',
+ 'm_returnstatus',
+ 'm_setblocking',
+ 'm_setdropfile',
+ 'm_setip',
+ 'm_setssl_cafile',
+ 'm_setssl_files',
+ 'm_setssl',
+ 'm_settimeout',
+ 'm_sslcert_gen_hash',
+ 'm_transactionssent',
+ 'm_transinqueue',
+ 'm_transkeyval',
+ 'm_transnew',
+ 'm_transsend',
+ 'm_uwait',
+ 'm_validateidentifier',
+ 'm_verifyconnection',
+ 'm_verifysslcert'),
+ 'Mail': ('ezmlm_hash', 'mail'),
+ 'Mailparse': ('mailparse_determine_best_xfer_encoding',
+ 'mailparse_msg_create',
+ 'mailparse_msg_extract_part_file',
+ 'mailparse_msg_extract_part',
+ 'mailparse_msg_extract_whole_part_file',
+ 'mailparse_msg_free',
+ 'mailparse_msg_get_part_data',
+ 'mailparse_msg_get_part',
+ 'mailparse_msg_get_structure',
+ 'mailparse_msg_parse_file',
+ 'mailparse_msg_parse',
+ 'mailparse_rfc822_parse_addresses',
+ 'mailparse_stream_encode',
+ 'mailparse_uudecode_all'),
+ 'Math': ('abs',
+ 'acos',
+ 'acosh',
+ 'asin',
+ 'asinh',
+ 'atan2',
+ 'atan',
+ 'atanh',
+ 'base_convert',
+ 'bindec',
+ 'ceil',
+ 'cos',
+ 'cosh',
+ 'decbin',
+ 'dechex',
+ 'decoct',
+ 'deg2rad',
+ 'exp',
+ 'expm1',
+ 'floor',
+ 'fmod',
+ 'getrandmax',
+ 'hexdec',
+ 'hypot',
+ 'is_finite',
+ 'is_infinite',
+ 'is_nan',
+ 'lcg_value',
+ 'log10',
+ 'log1p',
+ 'log',
+ 'max',
+ 'min',
+ 'mt_getrandmax',
+ 'mt_rand',
+ 'mt_srand',
+ 'octdec',
+ 'pi',
+ 'pow',
+ 'rad2deg',
+ 'rand',
+ 'round',
+ 'sin',
+ 'sinh',
+ 'sqrt',
+ 'srand',
+ 'tan',
+ 'tanh'),
+ 'MaxDB': ('maxdb_affected_rows',
+ 'maxdb_autocommit',
+ 'maxdb_bind_param',
+ 'maxdb_bind_result',
+ 'maxdb_change_user',
+ 'maxdb_character_set_name',
+ 'maxdb_client_encoding',
+ 'maxdb_close_long_data',
+ 'maxdb_close',
+ 'maxdb_commit',
+ 'maxdb_connect_errno',
+ 'maxdb_connect_error',
+ 'maxdb_connect',
+ 'maxdb_data_seek',
+ 'maxdb_debug',
+ 'maxdb_disable_reads_from_master',
+ 'maxdb_disable_rpl_parse',
+ 'maxdb_dump_debug_info',
+ 'maxdb_embedded_connect',
+ 'maxdb_enable_reads_from_master',
+ 'maxdb_enable_rpl_parse',
+ 'maxdb_errno',
+ 'maxdb_error',
+ 'maxdb_escape_string',
+ 'maxdb_execute',
+ 'maxdb_fetch_array',
+ 'maxdb_fetch_assoc',
+ 'maxdb_fetch_field_direct',
+ 'maxdb_fetch_field',
+ 'maxdb_fetch_fields',
+ 'maxdb_fetch_lengths',
+ 'maxdb_fetch_object',
+ 'maxdb_fetch_row',
+ 'maxdb_fetch',
+ 'maxdb_field_count',
+ 'maxdb_field_seek',
+ 'maxdb_field_tell',
+ 'maxdb_free_result',
+ 'maxdb_get_client_info',
+ 'maxdb_get_client_version',
+ 'maxdb_get_host_info',
+ 'maxdb_get_metadata',
+ 'maxdb_get_proto_info',
+ 'maxdb_get_server_info',
+ 'maxdb_get_server_version',
+ 'maxdb_info',
+ 'maxdb_init',
+ 'maxdb_insert_id',
+ 'maxdb_kill',
+ 'maxdb_master_query',
+ 'maxdb_more_results',
+ 'maxdb_multi_query',
+ 'maxdb_next_result',
+ 'maxdb_num_fields',
+ 'maxdb_num_rows',
+ 'maxdb_options',
+ 'maxdb_param_count',
+ 'maxdb_ping',
+ 'maxdb_prepare',
+ 'maxdb_query',
+ 'maxdb_real_connect',
+ 'maxdb_real_escape_string',
+ 'maxdb_real_query',
+ 'maxdb_report',
+ 'maxdb_rollback',
+ 'maxdb_rpl_parse_enabled',
+ 'maxdb_rpl_probe',
+ 'maxdb_rpl_query_type',
+ 'maxdb_select_db',
+ 'maxdb_send_long_data',
+ 'maxdb_send_query',
+ 'maxdb_server_end',
+ 'maxdb_server_init',
+ 'maxdb_set_opt',
+ 'maxdb_sqlstate',
+ 'maxdb_ssl_set',
+ 'maxdb_stat',
+ 'maxdb_stmt_affected_rows',
+ 'maxdb_stmt_bind_param',
+ 'maxdb_stmt_bind_result',
+ 'maxdb_stmt_close_long_data',
+ 'maxdb_stmt_close',
+ 'maxdb_stmt_data_seek',
+ 'maxdb_stmt_errno',
+ 'maxdb_stmt_error',
+ 'maxdb_stmt_execute',
+ 'maxdb_stmt_fetch',
+ 'maxdb_stmt_free_result',
+ 'maxdb_stmt_init',
+ 'maxdb_stmt_num_rows',
+ 'maxdb_stmt_param_count',
+ 'maxdb_stmt_prepare',
+ 'maxdb_stmt_reset',
+ 'maxdb_stmt_result_metadata',
+ 'maxdb_stmt_send_long_data',
+ 'maxdb_stmt_sqlstate',
+ 'maxdb_stmt_store_result',
+ 'maxdb_store_result',
+ 'maxdb_thread_id',
+ 'maxdb_thread_safe',
+ 'maxdb_use_result',
+ 'maxdb_warning_count'),
+ 'Mcrypt': ('mcrypt_cbc',
+ 'mcrypt_cfb',
+ 'mcrypt_create_iv',
+ 'mcrypt_decrypt',
+ 'mcrypt_ecb',
+ 'mcrypt_enc_get_algorithms_name',
+ 'mcrypt_enc_get_block_size',
+ 'mcrypt_enc_get_iv_size',
+ 'mcrypt_enc_get_key_size',
+ 'mcrypt_enc_get_modes_name',
+ 'mcrypt_enc_get_supported_key_sizes',
+ 'mcrypt_enc_is_block_algorithm_mode',
+ 'mcrypt_enc_is_block_algorithm',
+ 'mcrypt_enc_is_block_mode',
+ 'mcrypt_enc_self_test',
+ 'mcrypt_encrypt',
+ 'mcrypt_generic_deinit',
+ 'mcrypt_generic_end',
+ 'mcrypt_generic_init',
+ 'mcrypt_generic',
+ 'mcrypt_get_block_size',
+ 'mcrypt_get_cipher_name',
+ 'mcrypt_get_iv_size',
+ 'mcrypt_get_key_size',
+ 'mcrypt_list_algorithms',
+ 'mcrypt_list_modes',
+ 'mcrypt_module_close',
+ 'mcrypt_module_get_algo_block_size',
+ 'mcrypt_module_get_algo_key_size',
+ 'mcrypt_module_get_supported_key_sizes',
+ 'mcrypt_module_is_block_algorithm_mode',
+ 'mcrypt_module_is_block_algorithm',
+ 'mcrypt_module_is_block_mode',
+ 'mcrypt_module_open',
+ 'mcrypt_module_self_test',
+ 'mcrypt_ofb',
+ 'mdecrypt_generic'),
+ 'Memcache': ('memcache_debug',),
+ 'Mhash': ('mhash_count',
+ 'mhash_get_block_size',
+ 'mhash_get_hash_name',
+ 'mhash_keygen_s2k',
+ 'mhash'),
+ 'Ming': ('ming_keypress',
+ 'ming_setcubicthreshold',
+ 'ming_setscale',
+ 'ming_setswfcompression',
+ 'ming_useconstants',
+ 'ming_useswfversion'),
+ 'Misc.': ('connection_aborted',
+ 'connection_status',
+ 'connection_timeout',
+ 'constant',
+ 'define',
+ 'defined',
+ 'die',
+ 'eval',
+ 'exit',
+ 'get_browser',
+ '__halt_compiler',
+ 'highlight_file',
+ 'highlight_string',
+ 'ignore_user_abort',
+ 'pack',
+ 'php_check_syntax',
+ 'php_strip_whitespace',
+ 'show_source',
+ 'sleep',
+ 'sys_getloadavg',
+ 'time_nanosleep',
+ 'time_sleep_until',
+ 'uniqid',
+ 'unpack',
+ 'usleep'),
+ 'Mongo': ('bson_decode', 'bson_encode'),
+ 'Msession': ('msession_connect',
+ 'msession_count',
+ 'msession_create',
+ 'msession_destroy',
+ 'msession_disconnect',
+ 'msession_find',
+ 'msession_get_array',
+ 'msession_get_data',
+ 'msession_get',
+ 'msession_inc',
+ 'msession_list',
+ 'msession_listvar',
+ 'msession_lock',
+ 'msession_plugin',
+ 'msession_randstr',
+ 'msession_set_array',
+ 'msession_set_data',
+ 'msession_set',
+ 'msession_timeout',
+ 'msession_uniq',
+ 'msession_unlock'),
+ 'Mssql': ('mssql_bind',
+ 'mssql_close',
+ 'mssql_connect',
+ 'mssql_data_seek',
+ 'mssql_execute',
+ 'mssql_fetch_array',
+ 'mssql_fetch_assoc',
+ 'mssql_fetch_batch',
+ 'mssql_fetch_field',
+ 'mssql_fetch_object',
+ 'mssql_fetch_row',
+ 'mssql_field_length',
+ 'mssql_field_name',
+ 'mssql_field_seek',
+ 'mssql_field_type',
+ 'mssql_free_result',
+ 'mssql_free_statement',
+ 'mssql_get_last_message',
+ 'mssql_guid_string',
+ 'mssql_init',
+ 'mssql_min_error_severity',
+ 'mssql_min_message_severity',
+ 'mssql_next_result',
+ 'mssql_num_fields',
+ 'mssql_num_rows',
+ 'mssql_pconnect',
+ 'mssql_query',
+ 'mssql_result',
+ 'mssql_rows_affected',
+ 'mssql_select_db'),
+ 'Multibyte String': ('mb_check_encoding',
+ 'mb_convert_case',
+ 'mb_convert_encoding',
+ 'mb_convert_kana',
+ 'mb_convert_variables',
+ 'mb_decode_mimeheader',
+ 'mb_decode_numericentity',
+ 'mb_detect_encoding',
+ 'mb_detect_order',
+ 'mb_encode_mimeheader',
+ 'mb_encode_numericentity',
+ 'mb_encoding_aliases',
+ 'mb_ereg_match',
+ 'mb_ereg_replace_callback',
+ 'mb_ereg_replace',
+ 'mb_ereg_search_getpos',
+ 'mb_ereg_search_getregs',
+ 'mb_ereg_search_init',
+ 'mb_ereg_search_pos',
+ 'mb_ereg_search_regs',
+ 'mb_ereg_search_setpos',
+ 'mb_ereg_search',
+ 'mb_ereg',
+ 'mb_eregi_replace',
+ 'mb_eregi',
+ 'mb_get_info',
+ 'mb_http_input',
+ 'mb_http_output',
+ 'mb_internal_encoding',
+ 'mb_language',
+ 'mb_list_encodings',
+ 'mb_output_handler',
+ 'mb_parse_str',
+ 'mb_preferred_mime_name',
+ 'mb_regex_encoding',
+ 'mb_regex_set_options',
+ 'mb_send_mail',
+ 'mb_split',
+ 'mb_strcut',
+ 'mb_strimwidth',
+ 'mb_stripos',
+ 'mb_stristr',
+ 'mb_strlen',
+ 'mb_strpos',
+ 'mb_strrchr',
+ 'mb_strrichr',
+ 'mb_strripos',
+ 'mb_strrpos',
+ 'mb_strstr',
+ 'mb_strtolower',
+ 'mb_strtoupper',
+ 'mb_strwidth',
+ 'mb_substitute_character',
+ 'mb_substr_count',
+ 'mb_substr'),
+ 'MySQL': ('mysql_affected_rows',
+ 'mysql_client_encoding',
+ 'mysql_close',
+ 'mysql_connect',
+ 'mysql_create_db',
+ 'mysql_data_seek',
+ 'mysql_db_name',
+ 'mysql_db_query',
+ 'mysql_drop_db',
+ 'mysql_errno',
+ 'mysql_error',
+ 'mysql_escape_string',
+ 'mysql_fetch_array',
+ 'mysql_fetch_assoc',
+ 'mysql_fetch_field',
+ 'mysql_fetch_lengths',
+ 'mysql_fetch_object',
+ 'mysql_fetch_row',
+ 'mysql_field_flags',
+ 'mysql_field_len',
+ 'mysql_field_name',
+ 'mysql_field_seek',
+ 'mysql_field_table',
+ 'mysql_field_type',
+ 'mysql_free_result',
+ 'mysql_get_client_info',
+ 'mysql_get_host_info',
+ 'mysql_get_proto_info',
+ 'mysql_get_server_info',
+ 'mysql_info',
+ 'mysql_insert_id',
+ 'mysql_list_dbs',
+ 'mysql_list_fields',
+ 'mysql_list_processes',
+ 'mysql_list_tables',
+ 'mysql_num_fields',
+ 'mysql_num_rows',
+ 'mysql_pconnect',
+ 'mysql_ping',
+ 'mysql_query',
+ 'mysql_real_escape_string',
+ 'mysql_result',
+ 'mysql_select_db',
+ 'mysql_set_charset',
+ 'mysql_stat',
+ 'mysql_tablename',
+ 'mysql_thread_id',
+ 'mysql_unbuffered_query'),
+ 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
+ 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
+ 'mysqlnd_ms_fabric_select_global',
+ 'mysqlnd_ms_fabric_select_shard',
+ 'mysqlnd_ms_get_last_gtid',
+ 'mysqlnd_ms_get_last_used_connection',
+ 'mysqlnd_ms_get_stats',
+ 'mysqlnd_ms_match_wild',
+ 'mysqlnd_ms_query_is_select',
+ 'mysqlnd_ms_set_qos',
+ 'mysqlnd_ms_set_user_pick_server'),
+ 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
+ 'mysqlnd_uh_set_connection_proxy',
+ 'mysqlnd_uh_set_statement_proxy'),
+ 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
+ 'Ncurses': ('ncurses_addch',
+ 'ncurses_addchnstr',
+ 'ncurses_addchstr',
+ 'ncurses_addnstr',
+ 'ncurses_addstr',
+ 'ncurses_assume_default_colors',
+ 'ncurses_attroff',
+ 'ncurses_attron',
+ 'ncurses_attrset',
+ 'ncurses_baudrate',
+ 'ncurses_beep',
+ 'ncurses_bkgd',
+ 'ncurses_bkgdset',
+ 'ncurses_border',
+ 'ncurses_bottom_panel',
+ 'ncurses_can_change_color',
+ 'ncurses_cbreak',
+ 'ncurses_clear',
+ 'ncurses_clrtobot',
+ 'ncurses_clrtoeol',
+ 'ncurses_color_content',
+ 'ncurses_color_set',
+ 'ncurses_curs_set',
+ 'ncurses_def_prog_mode',
+ 'ncurses_def_shell_mode',
+ 'ncurses_define_key',
+ 'ncurses_del_panel',
+ 'ncurses_delay_output',
+ 'ncurses_delch',
+ 'ncurses_deleteln',
+ 'ncurses_delwin',
+ 'ncurses_doupdate',
+ 'ncurses_echo',
+ 'ncurses_echochar',
+ 'ncurses_end',
+ 'ncurses_erase',
+ 'ncurses_erasechar',
+ 'ncurses_filter',
+ 'ncurses_flash',
+ 'ncurses_flushinp',
+ 'ncurses_getch',
+ 'ncurses_getmaxyx',
+ 'ncurses_getmouse',
+ 'ncurses_getyx',
+ 'ncurses_halfdelay',
+ 'ncurses_has_colors',
+ 'ncurses_has_ic',
+ 'ncurses_has_il',
+ 'ncurses_has_key',
+ 'ncurses_hide_panel',
+ 'ncurses_hline',
+ 'ncurses_inch',
+ 'ncurses_init_color',
+ 'ncurses_init_pair',
+ 'ncurses_init',
+ 'ncurses_insch',
+ 'ncurses_insdelln',
+ 'ncurses_insertln',
+ 'ncurses_insstr',
+ 'ncurses_instr',
+ 'ncurses_isendwin',
+ 'ncurses_keyok',
+ 'ncurses_keypad',
+ 'ncurses_killchar',
+ 'ncurses_longname',
+ 'ncurses_meta',
+ 'ncurses_mouse_trafo',
+ 'ncurses_mouseinterval',
+ 'ncurses_mousemask',
+ 'ncurses_move_panel',
+ 'ncurses_move',
+ 'ncurses_mvaddch',
+ 'ncurses_mvaddchnstr',
+ 'ncurses_mvaddchstr',
+ 'ncurses_mvaddnstr',
+ 'ncurses_mvaddstr',
+ 'ncurses_mvcur',
+ 'ncurses_mvdelch',
+ 'ncurses_mvgetch',
+ 'ncurses_mvhline',
+ 'ncurses_mvinch',
+ 'ncurses_mvvline',
+ 'ncurses_mvwaddstr',
+ 'ncurses_napms',
+ 'ncurses_new_panel',
+ 'ncurses_newpad',
+ 'ncurses_newwin',
+ 'ncurses_nl',
+ 'ncurses_nocbreak',
+ 'ncurses_noecho',
+ 'ncurses_nonl',
+ 'ncurses_noqiflush',
+ 'ncurses_noraw',
+ 'ncurses_pair_content',
+ 'ncurses_panel_above',
+ 'ncurses_panel_below',
+ 'ncurses_panel_window',
+ 'ncurses_pnoutrefresh',
+ 'ncurses_prefresh',
+ 'ncurses_putp',
+ 'ncurses_qiflush',
+ 'ncurses_raw',
+ 'ncurses_refresh',
+ 'ncurses_replace_panel',
+ 'ncurses_reset_prog_mode',
+ 'ncurses_reset_shell_mode',
+ 'ncurses_resetty',
+ 'ncurses_savetty',
+ 'ncurses_scr_dump',
+ 'ncurses_scr_init',
+ 'ncurses_scr_restore',
+ 'ncurses_scr_set',
+ 'ncurses_scrl',
+ 'ncurses_show_panel',
+ 'ncurses_slk_attr',
+ 'ncurses_slk_attroff',
+ 'ncurses_slk_attron',
+ 'ncurses_slk_attrset',
+ 'ncurses_slk_clear',
+ 'ncurses_slk_color',
+ 'ncurses_slk_init',
+ 'ncurses_slk_noutrefresh',
+ 'ncurses_slk_refresh',
+ 'ncurses_slk_restore',
+ 'ncurses_slk_set',
+ 'ncurses_slk_touch',
+ 'ncurses_standend',
+ 'ncurses_standout',
+ 'ncurses_start_color',
+ 'ncurses_termattrs',
+ 'ncurses_termname',
+ 'ncurses_timeout',
+ 'ncurses_top_panel',
+ 'ncurses_typeahead',
+ 'ncurses_ungetch',
+ 'ncurses_ungetmouse',
+ 'ncurses_update_panels',
+ 'ncurses_use_default_colors',
+ 'ncurses_use_env',
+ 'ncurses_use_extended_names',
+ 'ncurses_vidattr',
+ 'ncurses_vline',
+ 'ncurses_waddch',
+ 'ncurses_waddstr',
+ 'ncurses_wattroff',
+ 'ncurses_wattron',
+ 'ncurses_wattrset',
+ 'ncurses_wborder',
+ 'ncurses_wclear',
+ 'ncurses_wcolor_set',
+ 'ncurses_werase',
+ 'ncurses_wgetch',
+ 'ncurses_whline',
+ 'ncurses_wmouse_trafo',
+ 'ncurses_wmove',
+ 'ncurses_wnoutrefresh',
+ 'ncurses_wrefresh',
+ 'ncurses_wstandend',
+ 'ncurses_wstandout',
+ 'ncurses_wvline'),
+ 'Network': ('checkdnsrr',
+ 'closelog',
+ 'define_syslog_variables',
+ 'dns_check_record',
+ 'dns_get_mx',
+ 'dns_get_record',
+ 'fsockopen',
+ 'gethostbyaddr',
+ 'gethostbyname',
+ 'gethostbynamel',
+ 'gethostname',
+ 'getmxrr',
+ 'getprotobyname',
+ 'getprotobynumber',
+ 'getservbyname',
+ 'getservbyport',
+ 'header_register_callback',
+ 'header_remove',
+ 'header',
+ 'headers_list',
+ 'headers_sent',
+ 'http_response_code',
+ 'inet_ntop',
+ 'inet_pton',
+ 'ip2long',
+ 'long2ip',
+ 'openlog',
+ 'pfsockopen',
+ 'setcookie',
+ 'setrawcookie',
+ 'socket_get_status',
+ 'socket_set_blocking',
+ 'socket_set_timeout',
+ 'syslog'),
+ 'Newt': ('newt_bell',
+ 'newt_button_bar',
+ 'newt_button',
+ 'newt_centered_window',
+ 'newt_checkbox_get_value',
+ 'newt_checkbox_set_flags',
+ 'newt_checkbox_set_value',
+ 'newt_checkbox_tree_add_item',
+ 'newt_checkbox_tree_find_item',
+ 'newt_checkbox_tree_get_current',
+ 'newt_checkbox_tree_get_entry_value',
+ 'newt_checkbox_tree_get_multi_selection',
+ 'newt_checkbox_tree_get_selection',
+ 'newt_checkbox_tree_multi',
+ 'newt_checkbox_tree_set_current',
+ 'newt_checkbox_tree_set_entry_value',
+ 'newt_checkbox_tree_set_entry',
+ 'newt_checkbox_tree_set_width',
+ 'newt_checkbox_tree',
+ 'newt_checkbox',
+ 'newt_clear_key_buffer',
+ 'newt_cls',
+ 'newt_compact_button',
+ 'newt_component_add_callback',
+ 'newt_component_takes_focus',
+ 'newt_create_grid',
+ 'newt_cursor_off',
+ 'newt_cursor_on',
+ 'newt_delay',
+ 'newt_draw_form',
+ 'newt_draw_root_text',
+ 'newt_entry_get_value',
+ 'newt_entry_set_filter',
+ 'newt_entry_set_flags',
+ 'newt_entry_set',
+ 'newt_entry',
+ 'newt_finished',
+ 'newt_form_add_component',
+ 'newt_form_add_components',
+ 'newt_form_add_hot_key',
+ 'newt_form_destroy',
+ 'newt_form_get_current',
+ 'newt_form_run',
+ 'newt_form_set_background',
+ 'newt_form_set_height',
+ 'newt_form_set_size',
+ 'newt_form_set_timer',
+ 'newt_form_set_width',
+ 'newt_form_watch_fd',
+ 'newt_form',
+ 'newt_get_screen_size',
+ 'newt_grid_add_components_to_form',
+ 'newt_grid_basic_window',
+ 'newt_grid_free',
+ 'newt_grid_get_size',
+ 'newt_grid_h_close_stacked',
+ 'newt_grid_h_stacked',
+ 'newt_grid_place',
+ 'newt_grid_set_field',
+ 'newt_grid_simple_window',
+ 'newt_grid_v_close_stacked',
+ 'newt_grid_v_stacked',
+ 'newt_grid_wrapped_window_at',
+ 'newt_grid_wrapped_window',
+ 'newt_init',
+ 'newt_label_set_text',
+ 'newt_label',
+ 'newt_listbox_append_entry',
+ 'newt_listbox_clear_selection',
+ 'newt_listbox_clear',
+ 'newt_listbox_delete_entry',
+ 'newt_listbox_get_current',
+ 'newt_listbox_get_selection',
+ 'newt_listbox_insert_entry',
+ 'newt_listbox_item_count',
+ 'newt_listbox_select_item',
+ 'newt_listbox_set_current_by_key',
+ 'newt_listbox_set_current',
+ 'newt_listbox_set_data',
+ 'newt_listbox_set_entry',
+ 'newt_listbox_set_width',
+ 'newt_listbox',
+ 'newt_listitem_get_data',
+ 'newt_listitem_set',
+ 'newt_listitem',
+ 'newt_open_window',
+ 'newt_pop_help_line',
+ 'newt_pop_window',
+ 'newt_push_help_line',
+ 'newt_radio_get_current',
+ 'newt_radiobutton',
+ 'newt_redraw_help_line',
+ 'newt_reflow_text',
+ 'newt_refresh',
+ 'newt_resize_screen',
+ 'newt_resume',
+ 'newt_run_form',
+ 'newt_scale_set',
+ 'newt_scale',
+ 'newt_scrollbar_set',
+ 'newt_set_help_callback',
+ 'newt_set_suspend_callback',
+ 'newt_suspend',
+ 'newt_textbox_get_num_lines',
+ 'newt_textbox_reflowed',
+ 'newt_textbox_set_height',
+ 'newt_textbox_set_text',
+ 'newt_textbox',
+ 'newt_vertical_scrollbar',
+ 'newt_wait_for_key',
+ 'newt_win_choice',
+ 'newt_win_entries',
+ 'newt_win_menu',
+ 'newt_win_message',
+ 'newt_win_messagev',
+ 'newt_win_ternary'),
+ 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
+ 'OCI8': ('oci_bind_array_by_name',
+ 'oci_bind_by_name',
+ 'oci_cancel',
+ 'oci_client_version',
+ 'oci_close',
+ 'oci_commit',
+ 'oci_connect',
+ 'oci_define_by_name',
+ 'oci_error',
+ 'oci_execute',
+ 'oci_fetch_all',
+ 'oci_fetch_array',
+ 'oci_fetch_assoc',
+ 'oci_fetch_object',
+ 'oci_fetch_row',
+ 'oci_fetch',
+ 'oci_field_is_null',
+ 'oci_field_name',
+ 'oci_field_precision',
+ 'oci_field_scale',
+ 'oci_field_size',
+ 'oci_field_type_raw',
+ 'oci_field_type',
+ 'oci_free_descriptor',
+ 'oci_free_statement',
+ 'oci_get_implicit_resultset',
+ 'oci_internal_debug',
+ 'oci_lob_copy',
+ 'oci_lob_is_equal',
+ 'oci_new_collection',
+ 'oci_new_connect',
+ 'oci_new_cursor',
+ 'oci_new_descriptor',
+ 'oci_num_fields',
+ 'oci_num_rows',
+ 'oci_parse',
+ 'oci_password_change',
+ 'oci_pconnect',
+ 'oci_result',
+ 'oci_rollback',
+ 'oci_server_version',
+ 'oci_set_action',
+ 'oci_set_client_identifier',
+ 'oci_set_client_info',
+ 'oci_set_edition',
+ 'oci_set_module_name',
+ 'oci_set_prefetch',
+ 'oci_statement_type'),
+ 'ODBC': ('odbc_autocommit',
+ 'odbc_binmode',
+ 'odbc_close_all',
+ 'odbc_close',
+ 'odbc_columnprivileges',
+ 'odbc_columns',
+ 'odbc_commit',
+ 'odbc_connect',
+ 'odbc_cursor',
+ 'odbc_data_source',
+ 'odbc_do',
+ 'odbc_error',
+ 'odbc_errormsg',
+ 'odbc_exec',
+ 'odbc_execute',
+ 'odbc_fetch_array',
+ 'odbc_fetch_into',
+ 'odbc_fetch_object',
+ 'odbc_fetch_row',
+ 'odbc_field_len',
+ 'odbc_field_name',
+ 'odbc_field_num',
+ 'odbc_field_precision',
+ 'odbc_field_scale',
+ 'odbc_field_type',
+ 'odbc_foreignkeys',
+ 'odbc_free_result',
+ 'odbc_gettypeinfo',
+ 'odbc_longreadlen',
+ 'odbc_next_result',
+ 'odbc_num_fields',
+ 'odbc_num_rows',
+ 'odbc_pconnect',
+ 'odbc_prepare',
+ 'odbc_primarykeys',
+ 'odbc_procedurecolumns',
+ 'odbc_procedures',
+ 'odbc_result_all',
+ 'odbc_result',
+ 'odbc_rollback',
+ 'odbc_setoption',
+ 'odbc_specialcolumns',
+ 'odbc_statistics',
+ 'odbc_tableprivileges',
+ 'odbc_tables'),
+ 'OPcache': ('opcache_compile_file',
+ 'opcache_get_configuration',
+ 'opcache_get_status',
+ 'opcache_invalidate',
+ 'opcache_reset'),
+ 'Object Aggregation': ('aggregate_info',
+ 'aggregate_methods_by_list',
+ 'aggregate_methods_by_regexp',
+ 'aggregate_methods',
+ 'aggregate_properties_by_list',
+ 'aggregate_properties_by_regexp',
+ 'aggregate_properties',
+ 'aggregate',
+ 'aggregation_info',
+ 'deaggregate'),
+ 'OpenAL': ('openal_buffer_create',
+ 'openal_buffer_data',
+ 'openal_buffer_destroy',
+ 'openal_buffer_get',
+ 'openal_buffer_loadwav',
+ 'openal_context_create',
+ 'openal_context_current',
+ 'openal_context_destroy',
+ 'openal_context_process',
+ 'openal_context_suspend',
+ 'openal_device_close',
+ 'openal_device_open',
+ 'openal_listener_get',
+ 'openal_listener_set',
+ 'openal_source_create',
+ 'openal_source_destroy',
+ 'openal_source_get',
+ 'openal_source_pause',
+ 'openal_source_play',
+ 'openal_source_rewind',
+ 'openal_source_set',
+ 'openal_source_stop',
+ 'openal_stream'),
+ 'OpenSSL': ('openssl_cipher_iv_length',
+ 'openssl_csr_export_to_file',
+ 'openssl_csr_export',
+ 'openssl_csr_get_public_key',
+ 'openssl_csr_get_subject',
+ 'openssl_csr_new',
+ 'openssl_csr_sign',
+ 'openssl_decrypt',
+ 'openssl_dh_compute_key',
+ 'openssl_digest',
+ 'openssl_encrypt',
+ 'openssl_error_string',
+ 'openssl_free_key',
+ 'openssl_get_cipher_methods',
+ 'openssl_get_md_methods',
+ 'openssl_get_privatekey',
+ 'openssl_get_publickey',
+ 'openssl_open',
+ 'openssl_pbkdf2',
+ 'openssl_pkcs12_export_to_file',
+ 'openssl_pkcs12_export',
+ 'openssl_pkcs12_read',
+ 'openssl_pkcs7_decrypt',
+ 'openssl_pkcs7_encrypt',
+ 'openssl_pkcs7_sign',
+ 'openssl_pkcs7_verify',
+ 'openssl_pkey_export_to_file',
+ 'openssl_pkey_export',
+ 'openssl_pkey_free',
+ 'openssl_pkey_get_details',
+ 'openssl_pkey_get_private',
+ 'openssl_pkey_get_public',
+ 'openssl_pkey_new',
+ 'openssl_private_decrypt',
+ 'openssl_private_encrypt',
+ 'openssl_public_decrypt',
+ 'openssl_public_encrypt',
+ 'openssl_random_pseudo_bytes',
+ 'openssl_seal',
+ 'openssl_sign',
+ 'openssl_spki_export_challenge',
+ 'openssl_spki_export',
+ 'openssl_spki_new',
+ 'openssl_spki_verify',
+ 'openssl_verify',
+ 'openssl_x509_check_private_key',
+ 'openssl_x509_checkpurpose',
+ 'openssl_x509_export_to_file',
+ 'openssl_x509_export',
+ 'openssl_x509_free',
+ 'openssl_x509_parse',
+ 'openssl_x509_read'),
+ 'Output Control': ('flush',
+ 'ob_clean',
+ 'ob_end_clean',
+ 'ob_end_flush',
+ 'ob_flush',
+ 'ob_get_clean',
+ 'ob_get_contents',
+ 'ob_get_flush',
+ 'ob_get_length',
+ 'ob_get_level',
+ 'ob_get_status',
+ 'ob_gzhandler',
+ 'ob_implicit_flush',
+ 'ob_list_handlers',
+ 'ob_start',
+ 'output_add_rewrite_var',
+ 'output_reset_rewrite_vars'),
+ 'Ovrimos SQL': ('ovrimos_close',
+ 'ovrimos_commit',
+ 'ovrimos_connect',
+ 'ovrimos_cursor',
+ 'ovrimos_exec',
+ 'ovrimos_execute',
+ 'ovrimos_fetch_into',
+ 'ovrimos_fetch_row',
+ 'ovrimos_field_len',
+ 'ovrimos_field_name',
+ 'ovrimos_field_num',
+ 'ovrimos_field_type',
+ 'ovrimos_free_result',
+ 'ovrimos_longreadlen',
+ 'ovrimos_num_fields',
+ 'ovrimos_num_rows',
+ 'ovrimos_prepare',
+ 'ovrimos_result_all',
+ 'ovrimos_result',
+ 'ovrimos_rollback'),
+ 'PCNTL': ('pcntl_alarm',
+ 'pcntl_errno',
+ 'pcntl_exec',
+ 'pcntl_fork',
+ 'pcntl_get_last_error',
+ 'pcntl_getpriority',
+ 'pcntl_setpriority',
+ 'pcntl_signal_dispatch',
+ 'pcntl_signal',
+ 'pcntl_sigprocmask',
+ 'pcntl_sigtimedwait',
+ 'pcntl_sigwaitinfo',
+ 'pcntl_strerror',
+ 'pcntl_wait',
+ 'pcntl_waitpid',
+ 'pcntl_wexitstatus',
+ 'pcntl_wifexited',
+ 'pcntl_wifsignaled',
+ 'pcntl_wifstopped',
+ 'pcntl_wstopsig',
+ 'pcntl_wtermsig'),
+ 'PCRE': ('preg_filter',
+ 'preg_grep',
+ 'preg_last_error',
+ 'preg_match_all',
+ 'preg_match',
+ 'preg_quote',
+ 'preg_replace_callback',
+ 'preg_replace',
+ 'preg_split'),
+ 'PDF': ('PDF_activate_item',
+ 'PDF_add_annotation',
+ 'PDF_add_bookmark',
+ 'PDF_add_launchlink',
+ 'PDF_add_locallink',
+ 'PDF_add_nameddest',
+ 'PDF_add_note',
+ 'PDF_add_outline',
+ 'PDF_add_pdflink',
+ 'PDF_add_table_cell',
+ 'PDF_add_textflow',
+ 'PDF_add_thumbnail',
+ 'PDF_add_weblink',
+ 'PDF_arc',
+ 'PDF_arcn',
+ 'PDF_attach_file',
+ 'PDF_begin_document',
+ 'PDF_begin_font',
+ 'PDF_begin_glyph',
+ 'PDF_begin_item',
+ 'PDF_begin_layer',
+ 'PDF_begin_page_ext',
+ 'PDF_begin_page',
+ 'PDF_begin_pattern',
+ 'PDF_begin_template_ext',
+ 'PDF_begin_template',
+ 'PDF_circle',
+ 'PDF_clip',
+ 'PDF_close_image',
+ 'PDF_close_pdi_page',
+ 'PDF_close_pdi',
+ 'PDF_close',
+ 'PDF_closepath_fill_stroke',
+ 'PDF_closepath_stroke',
+ 'PDF_closepath',
+ 'PDF_concat',
+ 'PDF_continue_text',
+ 'PDF_create_3dview',
+ 'PDF_create_action',
+ 'PDF_create_annotation',
+ 'PDF_create_bookmark',
+ 'PDF_create_field',
+ 'PDF_create_fieldgroup',
+ 'PDF_create_gstate',
+ 'PDF_create_pvf',
+ 'PDF_create_textflow',
+ 'PDF_curveto',
+ 'PDF_define_layer',
+ 'PDF_delete_pvf',
+ 'PDF_delete_table',
+ 'PDF_delete_textflow',
+ 'PDF_delete',
+ 'PDF_encoding_set_char',
+ 'PDF_end_document',
+ 'PDF_end_font',
+ 'PDF_end_glyph',
+ 'PDF_end_item',
+ 'PDF_end_layer',
+ 'PDF_end_page_ext',
+ 'PDF_end_page',
+ 'PDF_end_pattern',
+ 'PDF_end_template',
+ 'PDF_endpath',
+ 'PDF_fill_imageblock',
+ 'PDF_fill_pdfblock',
+ 'PDF_fill_stroke',
+ 'PDF_fill_textblock',
+ 'PDF_fill',
+ 'PDF_findfont',
+ 'PDF_fit_image',
+ 'PDF_fit_pdi_page',
+ 'PDF_fit_table',
+ 'PDF_fit_textflow',
+ 'PDF_fit_textline',
+ 'PDF_get_apiname',
+ 'PDF_get_buffer',
+ 'PDF_get_errmsg',
+ 'PDF_get_errnum',
+ 'PDF_get_font',
+ 'PDF_get_fontname',
+ 'PDF_get_fontsize',
+ 'PDF_get_image_height',
+ 'PDF_get_image_width',
+ 'PDF_get_majorversion',
+ 'PDF_get_minorversion',
+ 'PDF_get_parameter',
+ 'PDF_get_pdi_parameter',
+ 'PDF_get_pdi_value',
+ 'PDF_get_value',
+ 'PDF_info_font',
+ 'PDF_info_matchbox',
+ 'PDF_info_table',
+ 'PDF_info_textflow',
+ 'PDF_info_textline',
+ 'PDF_initgraphics',
+ 'PDF_lineto',
+ 'PDF_load_3ddata',
+ 'PDF_load_font',
+ 'PDF_load_iccprofile',
+ 'PDF_load_image',
+ 'PDF_makespotcolor',
+ 'PDF_moveto',
+ 'PDF_new',
+ 'PDF_open_ccitt',
+ 'PDF_open_file',
+ 'PDF_open_gif',
+ 'PDF_open_image_file',
+ 'PDF_open_image',
+ 'PDF_open_jpeg',
+ 'PDF_open_memory_image',
+ 'PDF_open_pdi_document',
+ 'PDF_open_pdi_page',
+ 'PDF_open_pdi',
+ 'PDF_open_tiff',
+ 'PDF_pcos_get_number',
+ 'PDF_pcos_get_stream',
+ 'PDF_pcos_get_string',
+ 'PDF_place_image',
+ 'PDF_place_pdi_page',
+ 'PDF_process_pdi',
+ 'PDF_rect',
+ 'PDF_restore',
+ 'PDF_resume_page',
+ 'PDF_rotate',
+ 'PDF_save',
+ 'PDF_scale',
+ 'PDF_set_border_color',
+ 'PDF_set_border_dash',
+ 'PDF_set_border_style',
+ 'PDF_set_char_spacing',
+ 'PDF_set_duration',
+ 'PDF_set_gstate',
+ 'PDF_set_horiz_scaling',
+ 'PDF_set_info_author',
+ 'PDF_set_info_creator',
+ 'PDF_set_info_keywords',
+ 'PDF_set_info_subject',
+ 'PDF_set_info_title',
+ 'PDF_set_info',
+ 'PDF_set_layer_dependency',
+ 'PDF_set_leading',
+ 'PDF_set_parameter',
+ 'PDF_set_text_matrix',
+ 'PDF_set_text_pos',
+ 'PDF_set_text_rendering',
+ 'PDF_set_text_rise',
+ 'PDF_set_value',
+ 'PDF_set_word_spacing',
+ 'PDF_setcolor',
+ 'PDF_setdash',
+ 'PDF_setdashpattern',
+ 'PDF_setflat',
+ 'PDF_setfont',
+ 'PDF_setgray_fill',
+ 'PDF_setgray_stroke',
+ 'PDF_setgray',
+ 'PDF_setlinecap',
+ 'PDF_setlinejoin',
+ 'PDF_setlinewidth',
+ 'PDF_setmatrix',
+ 'PDF_setmiterlimit',
+ 'PDF_setpolydash',
+ 'PDF_setrgbcolor_fill',
+ 'PDF_setrgbcolor_stroke',
+ 'PDF_setrgbcolor',
+ 'PDF_shading_pattern',
+ 'PDF_shading',
+ 'PDF_shfill',
+ 'PDF_show_boxed',
+ 'PDF_show_xy',
+ 'PDF_show',
+ 'PDF_skew',
+ 'PDF_stringwidth',
+ 'PDF_stroke',
+ 'PDF_suspend_page',
+ 'PDF_translate',
+ 'PDF_utf16_to_utf8',
+ 'PDF_utf32_to_utf16',
+ 'PDF_utf8_to_utf16'),
+ 'PHP Options/Info': ('assert_options',
+ 'assert',
+ 'cli_get_process_title',
+ 'cli_set_process_title',
+ 'dl',
+ 'extension_loaded',
+ 'gc_collect_cycles',
+ 'gc_disable',
+ 'gc_enable',
+ 'gc_enabled',
+ 'get_cfg_var',
+ 'get_current_user',
+ 'get_defined_constants',
+ 'get_extension_funcs',
+ 'get_include_path',
+ 'get_included_files',
+ 'get_loaded_extensions',
+ 'get_magic_quotes_gpc',
+ 'get_magic_quotes_runtime',
+ 'get_required_files',
+ 'getenv',
+ 'getlastmod',
+ 'getmygid',
+ 'getmyinode',
+ 'getmypid',
+ 'getmyuid',
+ 'getopt',
+ 'getrusage',
+ 'ini_alter',
+ 'ini_get_all',
+ 'ini_get',
+ 'ini_restore',
+ 'ini_set',
+ 'magic_quotes_runtime',
+ 'memory_get_peak_usage',
+ 'memory_get_usage',
+ 'php_ini_loaded_file',
+ 'php_ini_scanned_files',
+ 'php_logo_guid',
+ 'php_sapi_name',
+ 'php_uname',
+ 'phpcredits',
+ 'phpinfo',
+ 'phpversion',
+ 'putenv',
+ 'restore_include_path',
+ 'set_include_path',
+ 'set_magic_quotes_runtime',
+ 'set_time_limit',
+ 'sys_get_temp_dir',
+ 'version_compare',
+ 'zend_logo_guid',
+ 'zend_thread_id',
+ 'zend_version'),
+ 'POSIX': ('posix_access',
+ 'posix_ctermid',
+ 'posix_errno',
+ 'posix_get_last_error',
+ 'posix_getcwd',
+ 'posix_getegid',
+ 'posix_geteuid',
+ 'posix_getgid',
+ 'posix_getgrgid',
+ 'posix_getgrnam',
+ 'posix_getgroups',
+ 'posix_getlogin',
+ 'posix_getpgid',
+ 'posix_getpgrp',
+ 'posix_getpid',
+ 'posix_getppid',
+ 'posix_getpwnam',
+ 'posix_getpwuid',
+ 'posix_getrlimit',
+ 'posix_getsid',
+ 'posix_getuid',
+ 'posix_initgroups',
+ 'posix_isatty',
+ 'posix_kill',
+ 'posix_mkfifo',
+ 'posix_mknod',
+ 'posix_setegid',
+ 'posix_seteuid',
+ 'posix_setgid',
+ 'posix_setpgid',
+ 'posix_setsid',
+ 'posix_setuid',
+ 'posix_strerror',
+ 'posix_times',
+ 'posix_ttyname',
+ 'posix_uname'),
+ 'POSIX Regex': ('ereg_replace',
+ 'ereg',
+ 'eregi_replace',
+ 'eregi',
+ 'split',
+ 'spliti',
+ 'sql_regcase'),
+ 'PS': ('ps_add_bookmark',
+ 'ps_add_launchlink',
+ 'ps_add_locallink',
+ 'ps_add_note',
+ 'ps_add_pdflink',
+ 'ps_add_weblink',
+ 'ps_arc',
+ 'ps_arcn',
+ 'ps_begin_page',
+ 'ps_begin_pattern',
+ 'ps_begin_template',
+ 'ps_circle',
+ 'ps_clip',
+ 'ps_close_image',
+ 'ps_close',
+ 'ps_closepath_stroke',
+ 'ps_closepath',
+ 'ps_continue_text',
+ 'ps_curveto',
+ 'ps_delete',
+ 'ps_end_page',
+ 'ps_end_pattern',
+ 'ps_end_template',
+ 'ps_fill_stroke',
+ 'ps_fill',
+ 'ps_findfont',
+ 'ps_get_buffer',
+ 'ps_get_parameter',
+ 'ps_get_value',
+ 'ps_hyphenate',
+ 'ps_include_file',
+ 'ps_lineto',
+ 'ps_makespotcolor',
+ 'ps_moveto',
+ 'ps_new',
+ 'ps_open_file',
+ 'ps_open_image_file',
+ 'ps_open_image',
+ 'ps_open_memory_image',
+ 'ps_place_image',
+ 'ps_rect',
+ 'ps_restore',
+ 'ps_rotate',
+ 'ps_save',
+ 'ps_scale',
+ 'ps_set_border_color',
+ 'ps_set_border_dash',
+ 'ps_set_border_style',
+ 'ps_set_info',
+ 'ps_set_parameter',
+ 'ps_set_text_pos',
+ 'ps_set_value',
+ 'ps_setcolor',
+ 'ps_setdash',
+ 'ps_setflat',
+ 'ps_setfont',
+ 'ps_setgray',
+ 'ps_setlinecap',
+ 'ps_setlinejoin',
+ 'ps_setlinewidth',
+ 'ps_setmiterlimit',
+ 'ps_setoverprintmode',
+ 'ps_setpolydash',
+ 'ps_shading_pattern',
+ 'ps_shading',
+ 'ps_shfill',
+ 'ps_show_boxed',
+ 'ps_show_xy2',
+ 'ps_show_xy',
+ 'ps_show2',
+ 'ps_show',
+ 'ps_string_geometry',
+ 'ps_stringwidth',
+ 'ps_stroke',
+ 'ps_symbol_name',
+ 'ps_symbol_width',
+ 'ps_symbol',
+ 'ps_translate'),
+ 'Paradox': ('px_close',
+ 'px_create_fp',
+ 'px_date2string',
+ 'px_delete_record',
+ 'px_delete',
+ 'px_get_field',
+ 'px_get_info',
+ 'px_get_parameter',
+ 'px_get_record',
+ 'px_get_schema',
+ 'px_get_value',
+ 'px_insert_record',
+ 'px_new',
+ 'px_numfields',
+ 'px_numrecords',
+ 'px_open_fp',
+ 'px_put_record',
+ 'px_retrieve_record',
+ 'px_set_blob_file',
+ 'px_set_parameter',
+ 'px_set_tablename',
+ 'px_set_targetencoding',
+ 'px_set_value',
+ 'px_timestamp2string',
+ 'px_update_record'),
+ 'Parsekit': ('parsekit_compile_file',
+ 'parsekit_compile_string',
+ 'parsekit_func_arginfo'),
+ 'Password Hashing': ('password_get_info',
+ 'password_hash',
+ 'password_needs_rehash',
+ 'password_verify'),
+ 'PostgreSQL': ('pg_affected_rows',
+ 'pg_cancel_query',
+ 'pg_client_encoding',
+ 'pg_close',
+ 'pg_connect',
+ 'pg_connection_busy',
+ 'pg_connection_reset',
+ 'pg_connection_status',
+ 'pg_convert',
+ 'pg_copy_from',
+ 'pg_copy_to',
+ 'pg_dbname',
+ 'pg_delete',
+ 'pg_end_copy',
+ 'pg_escape_bytea',
+ 'pg_escape_identifier',
+ 'pg_escape_literal',
+ 'pg_escape_string',
+ 'pg_execute',
+ 'pg_fetch_all_columns',
+ 'pg_fetch_all',
+ 'pg_fetch_array',
+ 'pg_fetch_assoc',
+ 'pg_fetch_object',
+ 'pg_fetch_result',
+ 'pg_fetch_row',
+ 'pg_field_is_null',
+ 'pg_field_name',
+ 'pg_field_num',
+ 'pg_field_prtlen',
+ 'pg_field_size',
+ 'pg_field_table',
+ 'pg_field_type_oid',
+ 'pg_field_type',
+ 'pg_free_result',
+ 'pg_get_notify',
+ 'pg_get_pid',
+ 'pg_get_result',
+ 'pg_host',
+ 'pg_insert',
+ 'pg_last_error',
+ 'pg_last_notice',
+ 'pg_last_oid',
+ 'pg_lo_close',
+ 'pg_lo_create',
+ 'pg_lo_export',
+ 'pg_lo_import',
+ 'pg_lo_open',
+ 'pg_lo_read_all',
+ 'pg_lo_read',
+ 'pg_lo_seek',
+ 'pg_lo_tell',
+ 'pg_lo_truncate',
+ 'pg_lo_unlink',
+ 'pg_lo_write',
+ 'pg_meta_data',
+ 'pg_num_fields',
+ 'pg_num_rows',
+ 'pg_options',
+ 'pg_parameter_status',
+ 'pg_pconnect',
+ 'pg_ping',
+ 'pg_port',
+ 'pg_prepare',
+ 'pg_put_line',
+ 'pg_query_params',
+ 'pg_query',
+ 'pg_result_error_field',
+ 'pg_result_error',
+ 'pg_result_seek',
+ 'pg_result_status',
+ 'pg_select',
+ 'pg_send_execute',
+ 'pg_send_prepare',
+ 'pg_send_query_params',
+ 'pg_send_query',
+ 'pg_set_client_encoding',
+ 'pg_set_error_verbosity',
+ 'pg_trace',
+ 'pg_transaction_status',
+ 'pg_tty',
+ 'pg_unescape_bytea',
+ 'pg_untrace',
+ 'pg_update',
+ 'pg_version'),
+ 'Printer': ('printer_abort',
+ 'printer_close',
+ 'printer_create_brush',
+ 'printer_create_dc',
+ 'printer_create_font',
+ 'printer_create_pen',
+ 'printer_delete_brush',
+ 'printer_delete_dc',
+ 'printer_delete_font',
+ 'printer_delete_pen',
+ 'printer_draw_bmp',
+ 'printer_draw_chord',
+ 'printer_draw_elipse',
+ 'printer_draw_line',
+ 'printer_draw_pie',
+ 'printer_draw_rectangle',
+ 'printer_draw_roundrect',
+ 'printer_draw_text',
+ 'printer_end_doc',
+ 'printer_end_page',
+ 'printer_get_option',
+ 'printer_list',
+ 'printer_logical_fontheight',
+ 'printer_open',
+ 'printer_select_brush',
+ 'printer_select_font',
+ 'printer_select_pen',
+ 'printer_set_option',
+ 'printer_start_doc',
+ 'printer_start_page',
+ 'printer_write'),
+ 'Proctitle': ('setproctitle', 'setthreadtitle'),
+ 'Program execution': ('escapeshellarg',
+ 'escapeshellcmd',
+ 'exec',
+ 'passthru',
+ 'proc_close',
+ 'proc_get_status',
+ 'proc_nice',
+ 'proc_open',
+ 'proc_terminate',
+ 'shell_exec',
+ 'system'),
+ 'Pspell': ('pspell_add_to_personal',
+ 'pspell_add_to_session',
+ 'pspell_check',
+ 'pspell_clear_session',
+ 'pspell_config_create',
+ 'pspell_config_data_dir',
+ 'pspell_config_dict_dir',
+ 'pspell_config_ignore',
+ 'pspell_config_mode',
+ 'pspell_config_personal',
+ 'pspell_config_repl',
+ 'pspell_config_runtogether',
+ 'pspell_config_save_repl',
+ 'pspell_new_config',
+ 'pspell_new_personal',
+ 'pspell_new',
+ 'pspell_save_wordlist',
+ 'pspell_store_replacement',
+ 'pspell_suggest'),
+ 'RPM Reader': ('rpm_close',
+ 'rpm_get_tag',
+ 'rpm_is_valid',
+ 'rpm_open',
+ 'rpm_version'),
+ 'RRD': ('rrd_create',
+ 'rrd_error',
+ 'rrd_fetch',
+ 'rrd_first',
+ 'rrd_graph',
+ 'rrd_info',
+ 'rrd_last',
+ 'rrd_lastupdate',
+ 'rrd_restore',
+ 'rrd_tune',
+ 'rrd_update',
+ 'rrd_version',
+ 'rrd_xport',
+ 'rrdc_disconnect'),
+ 'Radius': ('radius_acct_open',
+ 'radius_add_server',
+ 'radius_auth_open',
+ 'radius_close',
+ 'radius_config',
+ 'radius_create_request',
+ 'radius_cvt_addr',
+ 'radius_cvt_int',
+ 'radius_cvt_string',
+ 'radius_demangle_mppe_key',
+ 'radius_demangle',
+ 'radius_get_attr',
+ 'radius_get_tagged_attr_data',
+ 'radius_get_tagged_attr_tag',
+ 'radius_get_vendor_attr',
+ 'radius_put_addr',
+ 'radius_put_attr',
+ 'radius_put_int',
+ 'radius_put_string',
+ 'radius_put_vendor_addr',
+ 'radius_put_vendor_attr',
+ 'radius_put_vendor_int',
+ 'radius_put_vendor_string',
+ 'radius_request_authenticator',
+ 'radius_salt_encrypt_attr',
+ 'radius_send_request',
+ 'radius_server_secret',
+ 'radius_strerror'),
+ 'Rar': ('rar_wrapper_cache_stats',),
+ 'Readline': ('readline_add_history',
+ 'readline_callback_handler_install',
+ 'readline_callback_handler_remove',
+ 'readline_callback_read_char',
+ 'readline_clear_history',
+ 'readline_completion_function',
+ 'readline_info',
+ 'readline_list_history',
+ 'readline_on_new_line',
+ 'readline_read_history',
+ 'readline_redisplay',
+ 'readline_write_history',
+ 'readline'),
+ 'Recode': ('recode_file', 'recode_string', 'recode'),
+ 'SNMP': ('snmp_get_quick_print',
+ 'snmp_get_valueretrieval',
+ 'snmp_read_mib',
+ 'snmp_set_enum_print',
+ 'snmp_set_oid_numeric_print',
+ 'snmp_set_oid_output_format',
+ 'snmp_set_quick_print',
+ 'snmp_set_valueretrieval',
+ 'snmp2_get',
+ 'snmp2_getnext',
+ 'snmp2_real_walk',
+ 'snmp2_set',
+ 'snmp2_walk',
+ 'snmp3_get',
+ 'snmp3_getnext',
+ 'snmp3_real_walk',
+ 'snmp3_set',
+ 'snmp3_walk',
+ 'snmpget',
+ 'snmpgetnext',
+ 'snmprealwalk',
+ 'snmpset',
+ 'snmpwalk',
+ 'snmpwalkoid'),
+ 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
+ 'SPL': ('class_implements',
+ 'class_parents',
+ 'class_uses',
+ 'iterator_apply',
+ 'iterator_count',
+ 'iterator_to_array',
+ 'spl_autoload_call',
+ 'spl_autoload_extensions',
+ 'spl_autoload_functions',
+ 'spl_autoload_register',
+ 'spl_autoload_unregister',
+ 'spl_autoload',
+ 'spl_classes',
+ 'spl_object_hash'),
+ 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
+ 'SQLSRV': ('sqlsrv_begin_transaction',
+ 'sqlsrv_cancel',
+ 'sqlsrv_client_info',
+ 'sqlsrv_close',
+ 'sqlsrv_commit',
+ 'sqlsrv_configure',
+ 'sqlsrv_connect',
+ 'sqlsrv_errors',
+ 'sqlsrv_execute',
+ 'sqlsrv_fetch_array',
+ 'sqlsrv_fetch_object',
+ 'sqlsrv_fetch',
+ 'sqlsrv_field_metadata',
+ 'sqlsrv_free_stmt',
+ 'sqlsrv_get_config',
+ 'sqlsrv_get_field',
+ 'sqlsrv_has_rows',
+ 'sqlsrv_next_result',
+ 'sqlsrv_num_fields',
+ 'sqlsrv_num_rows',
+ 'sqlsrv_prepare',
+ 'sqlsrv_query',
+ 'sqlsrv_rollback',
+ 'sqlsrv_rows_affected',
+ 'sqlsrv_send_stream_data',
+ 'sqlsrv_server_info'),
+ 'SQLite': ('sqlite_array_query',
+ 'sqlite_busy_timeout',
+ 'sqlite_changes',
+ 'sqlite_close',
+ 'sqlite_column',
+ 'sqlite_create_aggregate',
+ 'sqlite_create_function',
+ 'sqlite_current',
+ 'sqlite_error_string',
+ 'sqlite_escape_string',
+ 'sqlite_exec',
+ 'sqlite_factory',
+ 'sqlite_fetch_all',
+ 'sqlite_fetch_array',
+ 'sqlite_fetch_column_types',
+ 'sqlite_fetch_object',
+ 'sqlite_fetch_single',
+ 'sqlite_fetch_string',
+ 'sqlite_field_name',
+ 'sqlite_has_more',
+ 'sqlite_has_prev',
+ 'sqlite_key',
+ 'sqlite_last_error',
+ 'sqlite_last_insert_rowid',
+ 'sqlite_libencoding',
+ 'sqlite_libversion',
+ 'sqlite_next',
+ 'sqlite_num_fields',
+ 'sqlite_num_rows',
+ 'sqlite_open',
+ 'sqlite_popen',
+ 'sqlite_prev',
+ 'sqlite_query',
+ 'sqlite_rewind',
+ 'sqlite_seek',
+ 'sqlite_single_query',
+ 'sqlite_udf_decode_binary',
+ 'sqlite_udf_encode_binary',
+ 'sqlite_unbuffered_query',
+ 'sqlite_valid'),
+ 'SSH2': ('ssh2_auth_agent',
+ 'ssh2_auth_hostbased_file',
+ 'ssh2_auth_none',
+ 'ssh2_auth_password',
+ 'ssh2_auth_pubkey_file',
+ 'ssh2_connect',
+ 'ssh2_exec',
+ 'ssh2_fetch_stream',
+ 'ssh2_fingerprint',
+ 'ssh2_methods_negotiated',
+ 'ssh2_publickey_add',
+ 'ssh2_publickey_init',
+ 'ssh2_publickey_list',
+ 'ssh2_publickey_remove',
+ 'ssh2_scp_recv',
+ 'ssh2_scp_send',
+ 'ssh2_sftp_chmod',
+ 'ssh2_sftp_lstat',
+ 'ssh2_sftp_mkdir',
+ 'ssh2_sftp_readlink',
+ 'ssh2_sftp_realpath',
+ 'ssh2_sftp_rename',
+ 'ssh2_sftp_rmdir',
+ 'ssh2_sftp_stat',
+ 'ssh2_sftp_symlink',
+ 'ssh2_sftp_unlink',
+ 'ssh2_sftp',
+ 'ssh2_shell',
+ 'ssh2_tunnel'),
+ 'SVN': ('svn_add',
+ 'svn_auth_get_parameter',
+ 'svn_auth_set_parameter',
+ 'svn_blame',
+ 'svn_cat',
+ 'svn_checkout',
+ 'svn_cleanup',
+ 'svn_client_version',
+ 'svn_commit',
+ 'svn_delete',
+ 'svn_diff',
+ 'svn_export',
+ 'svn_fs_abort_txn',
+ 'svn_fs_apply_text',
+ 'svn_fs_begin_txn2',
+ 'svn_fs_change_node_prop',
+ 'svn_fs_check_path',
+ 'svn_fs_contents_changed',
+ 'svn_fs_copy',
+ 'svn_fs_delete',
+ 'svn_fs_dir_entries',
+ 'svn_fs_file_contents',
+ 'svn_fs_file_length',
+ 'svn_fs_is_dir',
+ 'svn_fs_is_file',
+ 'svn_fs_make_dir',
+ 'svn_fs_make_file',
+ 'svn_fs_node_created_rev',
+ 'svn_fs_node_prop',
+ 'svn_fs_props_changed',
+ 'svn_fs_revision_prop',
+ 'svn_fs_revision_root',
+ 'svn_fs_txn_root',
+ 'svn_fs_youngest_rev',
+ 'svn_import',
+ 'svn_log',
+ 'svn_ls',
+ 'svn_mkdir',
+ 'svn_repos_create',
+ 'svn_repos_fs_begin_txn_for_commit',
+ 'svn_repos_fs_commit_txn',
+ 'svn_repos_fs',
+ 'svn_repos_hotcopy',
+ 'svn_repos_open',
+ 'svn_repos_recover',
+ 'svn_revert',
+ 'svn_status',
+ 'svn_update'),
+ 'SWF': ('swf_actiongeturl',
+ 'swf_actiongotoframe',
+ 'swf_actiongotolabel',
+ 'swf_actionnextframe',
+ 'swf_actionplay',
+ 'swf_actionprevframe',
+ 'swf_actionsettarget',
+ 'swf_actionstop',
+ 'swf_actiontogglequality',
+ 'swf_actionwaitforframe',
+ 'swf_addbuttonrecord',
+ 'swf_addcolor',
+ 'swf_closefile',
+ 'swf_definebitmap',
+ 'swf_definefont',
+ 'swf_defineline',
+ 'swf_definepoly',
+ 'swf_definerect',
+ 'swf_definetext',
+ 'swf_endbutton',
+ 'swf_enddoaction',
+ 'swf_endshape',
+ 'swf_endsymbol',
+ 'swf_fontsize',
+ 'swf_fontslant',
+ 'swf_fonttracking',
+ 'swf_getbitmapinfo',
+ 'swf_getfontinfo',
+ 'swf_getframe',
+ 'swf_labelframe',
+ 'swf_lookat',
+ 'swf_modifyobject',
+ 'swf_mulcolor',
+ 'swf_nextid',
+ 'swf_oncondition',
+ 'swf_openfile',
+ 'swf_ortho2',
+ 'swf_ortho',
+ 'swf_perspective',
+ 'swf_placeobject',
+ 'swf_polarview',
+ 'swf_popmatrix',
+ 'swf_posround',
+ 'swf_pushmatrix',
+ 'swf_removeobject',
+ 'swf_rotate',
+ 'swf_scale',
+ 'swf_setfont',
+ 'swf_setframe',
+ 'swf_shapearc',
+ 'swf_shapecurveto3',
+ 'swf_shapecurveto',
+ 'swf_shapefillbitmapclip',
+ 'swf_shapefillbitmaptile',
+ 'swf_shapefilloff',
+ 'swf_shapefillsolid',
+ 'swf_shapelinesolid',
+ 'swf_shapelineto',
+ 'swf_shapemoveto',
+ 'swf_showframe',
+ 'swf_startbutton',
+ 'swf_startdoaction',
+ 'swf_startshape',
+ 'swf_startsymbol',
+ 'swf_textwidth',
+ 'swf_translate',
+ 'swf_viewport'),
+ 'Semaphore': ('ftok',
+ 'msg_get_queue',
+ 'msg_queue_exists',
+ 'msg_receive',
+ 'msg_remove_queue',
+ 'msg_send',
+ 'msg_set_queue',
+ 'msg_stat_queue',
+ 'sem_acquire',
+ 'sem_get',
+ 'sem_release',
+ 'sem_remove',
+ 'shm_attach',
+ 'shm_detach',
+ 'shm_get_var',
+ 'shm_has_var',
+ 'shm_put_var',
+ 'shm_remove_var',
+ 'shm_remove'),
+ 'Session': ('session_cache_expire',
+ 'session_cache_limiter',
+ 'session_commit',
+ 'session_decode',
+ 'session_destroy',
+ 'session_encode',
+ 'session_get_cookie_params',
+ 'session_id',
+ 'session_is_registered',
+ 'session_module_name',
+ 'session_name',
+ 'session_regenerate_id',
+ 'session_register_shutdown',
+ 'session_register',
+ 'session_save_path',
+ 'session_set_cookie_params',
+ 'session_set_save_handler',
+ 'session_start',
+ 'session_status',
+ 'session_unregister',
+ 'session_unset',
+ 'session_write_close'),
+ 'Session PgSQL': ('session_pgsql_add_error',
+ 'session_pgsql_get_error',
+ 'session_pgsql_get_field',
+ 'session_pgsql_reset',
+ 'session_pgsql_set_field',
+ 'session_pgsql_status'),
+ 'Shared Memory': ('shmop_close',
+ 'shmop_delete',
+ 'shmop_open',
+ 'shmop_read',
+ 'shmop_size',
+ 'shmop_write'),
+ 'SimpleXML': ('simplexml_import_dom',
+ 'simplexml_load_file',
+ 'simplexml_load_string'),
+ 'Socket': ('socket_accept',
+ 'socket_bind',
+ 'socket_clear_error',
+ 'socket_close',
+ 'socket_cmsg_space',
+ 'socket_connect',
+ 'socket_create_listen',
+ 'socket_create_pair',
+ 'socket_create',
+ 'socket_get_option',
+ 'socket_getpeername',
+ 'socket_getsockname',
+ 'socket_import_stream',
+ 'socket_last_error',
+ 'socket_listen',
+ 'socket_read',
+ 'socket_recv',
+ 'socket_recvfrom',
+ 'socket_recvmsg',
+ 'socket_select',
+ 'socket_send',
+ 'socket_sendmsg',
+ 'socket_sendto',
+ 'socket_set_block',
+ 'socket_set_nonblock',
+ 'socket_set_option',
+ 'socket_shutdown',
+ 'socket_strerror',
+ 'socket_write'),
+ 'Solr': ('solr_get_version',),
+ 'Statistic': ('stats_absolute_deviation',
+ 'stats_cdf_beta',
+ 'stats_cdf_binomial',
+ 'stats_cdf_cauchy',
+ 'stats_cdf_chisquare',
+ 'stats_cdf_exponential',
+ 'stats_cdf_f',
+ 'stats_cdf_gamma',
+ 'stats_cdf_laplace',
+ 'stats_cdf_logistic',
+ 'stats_cdf_negative_binomial',
+ 'stats_cdf_noncentral_chisquare',
+ 'stats_cdf_noncentral_f',
+ 'stats_cdf_poisson',
+ 'stats_cdf_t',
+ 'stats_cdf_uniform',
+ 'stats_cdf_weibull',
+ 'stats_covariance',
+ 'stats_den_uniform',
+ 'stats_dens_beta',
+ 'stats_dens_cauchy',
+ 'stats_dens_chisquare',
+ 'stats_dens_exponential',
+ 'stats_dens_f',
+ 'stats_dens_gamma',
+ 'stats_dens_laplace',
+ 'stats_dens_logistic',
+ 'stats_dens_negative_binomial',
+ 'stats_dens_normal',
+ 'stats_dens_pmf_binomial',
+ 'stats_dens_pmf_hypergeometric',
+ 'stats_dens_pmf_poisson',
+ 'stats_dens_t',
+ 'stats_dens_weibull',
+ 'stats_harmonic_mean',
+ 'stats_kurtosis',
+ 'stats_rand_gen_beta',
+ 'stats_rand_gen_chisquare',
+ 'stats_rand_gen_exponential',
+ 'stats_rand_gen_f',
+ 'stats_rand_gen_funiform',
+ 'stats_rand_gen_gamma',
+ 'stats_rand_gen_ibinomial_negative',
+ 'stats_rand_gen_ibinomial',
+ 'stats_rand_gen_int',
+ 'stats_rand_gen_ipoisson',
+ 'stats_rand_gen_iuniform',
+ 'stats_rand_gen_noncenral_chisquare',
+ 'stats_rand_gen_noncentral_f',
+ 'stats_rand_gen_noncentral_t',
+ 'stats_rand_gen_normal',
+ 'stats_rand_gen_t',
+ 'stats_rand_get_seeds',
+ 'stats_rand_phrase_to_seeds',
+ 'stats_rand_ranf',
+ 'stats_rand_setall',
+ 'stats_skew',
+ 'stats_standard_deviation',
+ 'stats_stat_binomial_coef',
+ 'stats_stat_correlation',
+ 'stats_stat_gennch',
+ 'stats_stat_independent_t',
+ 'stats_stat_innerproduct',
+ 'stats_stat_noncentral_t',
+ 'stats_stat_paired_t',
+ 'stats_stat_percentile',
+ 'stats_stat_powersum',
+ 'stats_variance'),
+ 'Stomp': ('stomp_connect_error', 'stomp_version'),
+ 'Stream': ('set_socket_blocking',
+ 'stream_bucket_append',
+ 'stream_bucket_make_writeable',
+ 'stream_bucket_new',
+ 'stream_bucket_prepend',
+ 'stream_context_create',
+ 'stream_context_get_default',
+ 'stream_context_get_options',
+ 'stream_context_get_params',
+ 'stream_context_set_default',
+ 'stream_context_set_option',
+ 'stream_context_set_params',
+ 'stream_copy_to_stream',
+ 'stream_encoding',
+ 'stream_filter_append',
+ 'stream_filter_prepend',
+ 'stream_filter_register',
+ 'stream_filter_remove',
+ 'stream_get_contents',
+ 'stream_get_filters',
+ 'stream_get_line',
+ 'stream_get_meta_data',
+ 'stream_get_transports',
+ 'stream_get_wrappers',
+ 'stream_is_local',
+ 'stream_notification_callback',
+ 'stream_register_wrapper',
+ 'stream_resolve_include_path',
+ 'stream_select',
+ 'stream_set_blocking',
+ 'stream_set_chunk_size',
+ 'stream_set_read_buffer',
+ 'stream_set_timeout',
+ 'stream_set_write_buffer',
+ 'stream_socket_accept',
+ 'stream_socket_client',
+ 'stream_socket_enable_crypto',
+ 'stream_socket_get_name',
+ 'stream_socket_pair',
+ 'stream_socket_recvfrom',
+ 'stream_socket_sendto',
+ 'stream_socket_server',
+ 'stream_socket_shutdown',
+ 'stream_supports_lock',
+ 'stream_wrapper_register',
+ 'stream_wrapper_restore',
+ 'stream_wrapper_unregister'),
+ 'String': ('addcslashes',
+ 'addslashes',
+ 'bin2hex',
+ 'chop',
+ 'chr',
+ 'chunk_split',
+ 'convert_cyr_string',
+ 'convert_uudecode',
+ 'convert_uuencode',
+ 'count_chars',
+ 'crc32',
+ 'crypt',
+ 'echo',
+ 'explode',
+ 'fprintf',
+ 'get_html_translation_table',
+ 'hebrev',
+ 'hebrevc',
+ 'hex2bin',
+ 'html_entity_decode',
+ 'htmlentities',
+ 'htmlspecialchars_decode',
+ 'htmlspecialchars',
+ 'implode',
+ 'join',
+ 'lcfirst',
+ 'levenshtein',
+ 'localeconv',
+ 'ltrim',
+ 'md5_file',
+ 'md5',
+ 'metaphone',
+ 'money_format',
+ 'nl_langinfo',
+ 'nl2br',
+ 'number_format',
+ 'ord',
+ 'parse_str',
+ 'print',
+ 'printf',
+ 'quoted_printable_decode',
+ 'quoted_printable_encode',
+ 'quotemeta',
+ 'rtrim',
+ 'setlocale',
+ 'sha1_file',
+ 'sha1',
+ 'similar_text',
+ 'soundex',
+ 'sprintf',
+ 'sscanf',
+ 'str_getcsv',
+ 'str_ireplace',
+ 'str_pad',
+ 'str_repeat',
+ 'str_replace',
+ 'str_rot13',
+ 'str_shuffle',
+ 'str_split',
+ 'str_word_count',
+ 'strcasecmp',
+ 'strchr',
+ 'strcmp',
+ 'strcoll',
+ 'strcspn',
+ 'strip_tags',
+ 'stripcslashes',
+ 'stripos',
+ 'stripslashes',
+ 'stristr',
+ 'strlen',
+ 'strnatcasecmp',
+ 'strnatcmp',
+ 'strncasecmp',
+ 'strncmp',
+ 'strpbrk',
+ 'strpos',
+ 'strrchr',
+ 'strrev',
+ 'strripos',
+ 'strrpos',
+ 'strspn',
+ 'strstr',
+ 'strtok',
+ 'strtolower',
+ 'strtoupper',
+ 'strtr',
+ 'substr_compare',
+ 'substr_count',
+ 'substr_replace',
+ 'substr',
+ 'trim',
+ 'ucfirst',
+ 'ucwords',
+ 'vfprintf',
+ 'vprintf',
+ 'vsprintf',
+ 'wordwrap'),
+ 'Sybase': ('sybase_affected_rows',
+ 'sybase_close',
+ 'sybase_connect',
+ 'sybase_data_seek',
+ 'sybase_deadlock_retry_count',
+ 'sybase_fetch_array',
+ 'sybase_fetch_assoc',
+ 'sybase_fetch_field',
+ 'sybase_fetch_object',
+ 'sybase_fetch_row',
+ 'sybase_field_seek',
+ 'sybase_free_result',
+ 'sybase_get_last_message',
+ 'sybase_min_client_severity',
+ 'sybase_min_error_severity',
+ 'sybase_min_message_severity',
+ 'sybase_min_server_severity',
+ 'sybase_num_fields',
+ 'sybase_num_rows',
+ 'sybase_pconnect',
+ 'sybase_query',
+ 'sybase_result',
+ 'sybase_select_db',
+ 'sybase_set_message_handler',
+ 'sybase_unbuffered_query'),
+ 'TCP': ('tcpwrap_check',),
+ 'Taint': ('is_tainted', 'taint', 'untaint'),
+ 'Tidy': ('ob_tidyhandler',
+ 'tidy_access_count',
+ 'tidy_config_count',
+ 'tidy_error_count',
+ 'tidy_get_output',
+ 'tidy_load_config',
+ 'tidy_reset_config',
+ 'tidy_save_config',
+ 'tidy_set_encoding',
+ 'tidy_setopt',
+ 'tidy_warning_count'),
+ 'Tokenizer': ('token_get_all', 'token_name'),
+ 'Trader': ('trader_acos',
+ 'trader_ad',
+ 'trader_add',
+ 'trader_adosc',
+ 'trader_adx',
+ 'trader_adxr',
+ 'trader_apo',
+ 'trader_aroon',
+ 'trader_aroonosc',
+ 'trader_asin',
+ 'trader_atan',
+ 'trader_atr',
+ 'trader_avgprice',
+ 'trader_bbands',
+ 'trader_beta',
+ 'trader_bop',
+ 'trader_cci',
+ 'trader_cdl2crows',
+ 'trader_cdl3blackcrows',
+ 'trader_cdl3inside',
+ 'trader_cdl3linestrike',
+ 'trader_cdl3outside',
+ 'trader_cdl3starsinsouth',
+ 'trader_cdl3whitesoldiers',
+ 'trader_cdlabandonedbaby',
+ 'trader_cdladvanceblock',
+ 'trader_cdlbelthold',
+ 'trader_cdlbreakaway',
+ 'trader_cdlclosingmarubozu',
+ 'trader_cdlconcealbabyswall',
+ 'trader_cdlcounterattack',
+ 'trader_cdldarkcloudcover',
+ 'trader_cdldoji',
+ 'trader_cdldojistar',
+ 'trader_cdldragonflydoji',
+ 'trader_cdlengulfing',
+ 'trader_cdleveningdojistar',
+ 'trader_cdleveningstar',
+ 'trader_cdlgapsidesidewhite',
+ 'trader_cdlgravestonedoji',
+ 'trader_cdlhammer',
+ 'trader_cdlhangingman',
+ 'trader_cdlharami',
+ 'trader_cdlharamicross',
+ 'trader_cdlhighwave',
+ 'trader_cdlhikkake',
+ 'trader_cdlhikkakemod',
+ 'trader_cdlhomingpigeon',
+ 'trader_cdlidentical3crows',
+ 'trader_cdlinneck',
+ 'trader_cdlinvertedhammer',
+ 'trader_cdlkicking',
+ 'trader_cdlkickingbylength',
+ 'trader_cdlladderbottom',
+ 'trader_cdllongleggeddoji',
+ 'trader_cdllongline',
+ 'trader_cdlmarubozu',
+ 'trader_cdlmatchinglow',
+ 'trader_cdlmathold',
+ 'trader_cdlmorningdojistar',
+ 'trader_cdlmorningstar',
+ 'trader_cdlonneck',
+ 'trader_cdlpiercing',
+ 'trader_cdlrickshawman',
+ 'trader_cdlrisefall3methods',
+ 'trader_cdlseparatinglines',
+ 'trader_cdlshootingstar',
+ 'trader_cdlshortline',
+ 'trader_cdlspinningtop',
+ 'trader_cdlstalledpattern',
+ 'trader_cdlsticksandwich',
+ 'trader_cdltakuri',
+ 'trader_cdltasukigap',
+ 'trader_cdlthrusting',
+ 'trader_cdltristar',
+ 'trader_cdlunique3river',
+ 'trader_cdlupsidegap2crows',
+ 'trader_cdlxsidegap3methods',
+ 'trader_ceil',
+ 'trader_cmo',
+ 'trader_correl',
+ 'trader_cos',
+ 'trader_cosh',
+ 'trader_dema',
+ 'trader_div',
+ 'trader_dx',
+ 'trader_ema',
+ 'trader_errno',
+ 'trader_exp',
+ 'trader_floor',
+ 'trader_get_compat',
+ 'trader_get_unstable_period',
+ 'trader_ht_dcperiod',
+ 'trader_ht_dcphase',
+ 'trader_ht_phasor',
+ 'trader_ht_sine',
+ 'trader_ht_trendline',
+ 'trader_ht_trendmode',
+ 'trader_kama',
+ 'trader_linearreg_angle',
+ 'trader_linearreg_intercept',
+ 'trader_linearreg_slope',
+ 'trader_linearreg',
+ 'trader_ln',
+ 'trader_log10',
+ 'trader_ma',
+ 'trader_macd',
+ 'trader_macdext',
+ 'trader_macdfix',
+ 'trader_mama',
+ 'trader_mavp',
+ 'trader_max',
+ 'trader_maxindex',
+ 'trader_medprice',
+ 'trader_mfi',
+ 'trader_midpoint',
+ 'trader_midprice',
+ 'trader_min',
+ 'trader_minindex',
+ 'trader_minmax',
+ 'trader_minmaxindex',
+ 'trader_minus_di',
+ 'trader_minus_dm',
+ 'trader_mom',
+ 'trader_mult',
+ 'trader_natr',
+ 'trader_obv',
+ 'trader_plus_di',
+ 'trader_plus_dm',
+ 'trader_ppo',
+ 'trader_roc',
+ 'trader_rocp',
+ 'trader_rocr100',
+ 'trader_rocr',
+ 'trader_rsi',
+ 'trader_sar',
+ 'trader_sarext',
+ 'trader_set_compat',
+ 'trader_set_unstable_period',
+ 'trader_sin',
+ 'trader_sinh',
+ 'trader_sma',
+ 'trader_sqrt',
+ 'trader_stddev',
+ 'trader_stoch',
+ 'trader_stochf',
+ 'trader_stochrsi',
+ 'trader_sub',
+ 'trader_sum',
+ 'trader_t3',
+ 'trader_tan',
+ 'trader_tanh',
+ 'trader_tema',
+ 'trader_trange',
+ 'trader_trima',
+ 'trader_trix',
+ 'trader_tsf',
+ 'trader_typprice',
+ 'trader_ultosc',
+ 'trader_var',
+ 'trader_wclprice',
+ 'trader_willr',
+ 'trader_wma'),
+ 'URL': ('base64_decode',
+ 'base64_encode',
+ 'get_headers',
+ 'get_meta_tags',
+ 'http_build_query',
+ 'parse_url',
+ 'rawurldecode',
+ 'rawurlencode',
+ 'urldecode',
+ 'urlencode'),
+ 'Uopz': ('uopz_backup',
+ 'uopz_compose',
+ 'uopz_copy',
+ 'uopz_delete',
+ 'uopz_extend',
+ 'uopz_flags',
+ 'uopz_function',
+ 'uopz_implement',
+ 'uopz_overload',
+ 'uopz_redefine',
+ 'uopz_rename',
+ 'uopz_restore',
+ 'uopz_undefine'),
+ 'Variable handling': ('boolval',
+ 'debug_zval_dump',
+ 'doubleval',
+ 'empty',
+ 'floatval',
+ 'get_defined_vars',
+ 'get_resource_type',
+ 'gettype',
+ 'import_request_variables',
+ 'intval',
+ 'is_array',
+ 'is_bool',
+ 'is_callable',
+ 'is_double',
+ 'is_float',
+ 'is_int',
+ 'is_integer',
+ 'is_long',
+ 'is_null',
+ 'is_numeric',
+ 'is_object',
+ 'is_real',
+ 'is_resource',
+ 'is_scalar',
+ 'is_string',
+ 'isset',
+ 'print_r',
+ 'serialize',
+ 'settype',
+ 'strval',
+ 'unserialize',
+ 'unset',
+ 'var_dump',
+ 'var_export'),
+ 'W32api': ('w32api_deftype',
+ 'w32api_init_dtype',
+ 'w32api_invoke_function',
+ 'w32api_register_function',
+ 'w32api_set_call_method'),
+ 'WDDX': ('wddx_add_vars',
+ 'wddx_deserialize',
+ 'wddx_packet_end',
+ 'wddx_packet_start',
+ 'wddx_serialize_value',
+ 'wddx_serialize_vars'),
+ 'WinCache': ('wincache_fcache_fileinfo',
+ 'wincache_fcache_meminfo',
+ 'wincache_lock',
+ 'wincache_ocache_fileinfo',
+ 'wincache_ocache_meminfo',
+ 'wincache_refresh_if_changed',
+ 'wincache_rplist_fileinfo',
+ 'wincache_rplist_meminfo',
+ 'wincache_scache_info',
+ 'wincache_scache_meminfo',
+ 'wincache_ucache_add',
+ 'wincache_ucache_cas',
+ 'wincache_ucache_clear',
+ 'wincache_ucache_dec',
+ 'wincache_ucache_delete',
+ 'wincache_ucache_exists',
+ 'wincache_ucache_get',
+ 'wincache_ucache_inc',
+ 'wincache_ucache_info',
+ 'wincache_ucache_meminfo',
+ 'wincache_ucache_set',
+ 'wincache_unlock'),
+ 'XML Parser': ('utf8_decode',
+ 'utf8_encode',
+ 'xml_error_string',
+ 'xml_get_current_byte_index',
+ 'xml_get_current_column_number',
+ 'xml_get_current_line_number',
+ 'xml_get_error_code',
+ 'xml_parse_into_struct',
+ 'xml_parse',
+ 'xml_parser_create_ns',
+ 'xml_parser_create',
+ 'xml_parser_free',
+ 'xml_parser_get_option',
+ 'xml_parser_set_option',
+ 'xml_set_character_data_handler',
+ 'xml_set_default_handler',
+ 'xml_set_element_handler',
+ 'xml_set_end_namespace_decl_handler',
+ 'xml_set_external_entity_ref_handler',
+ 'xml_set_notation_decl_handler',
+ 'xml_set_object',
+ 'xml_set_processing_instruction_handler',
+ 'xml_set_start_namespace_decl_handler',
+ 'xml_set_unparsed_entity_decl_handler'),
+ 'XML-RPC': ('xmlrpc_decode_request',
+ 'xmlrpc_decode',
+ 'xmlrpc_encode_request',
+ 'xmlrpc_encode',
+ 'xmlrpc_get_type',
+ 'xmlrpc_is_fault',
+ 'xmlrpc_parse_method_descriptions',
+ 'xmlrpc_server_add_introspection_data',
+ 'xmlrpc_server_call_method',
+ 'xmlrpc_server_create',
+ 'xmlrpc_server_destroy',
+ 'xmlrpc_server_register_introspection_callback',
+ 'xmlrpc_server_register_method',
+ 'xmlrpc_set_type'),
+ 'XSLT (PHP 4)': ('xslt_backend_info',
+ 'xslt_backend_name',
+ 'xslt_backend_version',
+ 'xslt_create',
+ 'xslt_errno',
+ 'xslt_error',
+ 'xslt_free',
+ 'xslt_getopt',
+ 'xslt_process',
+ 'xslt_set_base',
+ 'xslt_set_encoding',
+ 'xslt_set_error_handler',
+ 'xslt_set_log',
+ 'xslt_set_object',
+ 'xslt_set_sax_handler',
+ 'xslt_set_sax_handlers',
+ 'xslt_set_scheme_handler',
+ 'xslt_set_scheme_handlers',
+ 'xslt_setopt'),
+ 'Xhprof': ('xhprof_disable',
+ 'xhprof_enable',
+ 'xhprof_sample_disable',
+ 'xhprof_sample_enable'),
+ 'YAZ': ('yaz_addinfo',
+ 'yaz_ccl_conf',
+ 'yaz_ccl_parse',
+ 'yaz_close',
+ 'yaz_connect',
+ 'yaz_database',
+ 'yaz_element',
+ 'yaz_errno',
+ 'yaz_error',
+ 'yaz_es_result',
+ 'yaz_es',
+ 'yaz_get_option',
+ 'yaz_hits',
+ 'yaz_itemorder',
+ 'yaz_present',
+ 'yaz_range',
+ 'yaz_record',
+ 'yaz_scan_result',
+ 'yaz_scan',
+ 'yaz_schema',
+ 'yaz_search',
+ 'yaz_set_option',
+ 'yaz_sort',
+ 'yaz_syntax',
+ 'yaz_wait'),
+ 'YP/NIS': ('yp_all',
+ 'yp_cat',
+ 'yp_err_string',
+ 'yp_errno',
+ 'yp_first',
+ 'yp_get_default_domain',
+ 'yp_master',
+ 'yp_match',
+ 'yp_next',
+ 'yp_order'),
+ 'Yaml': ('yaml_emit_file',
+ 'yaml_emit',
+ 'yaml_parse_file',
+ 'yaml_parse_url',
+ 'yaml_parse'),
+ 'Zip': ('zip_close',
+ 'zip_entry_close',
+ 'zip_entry_compressedsize',
+ 'zip_entry_compressionmethod',
+ 'zip_entry_filesize',
+ 'zip_entry_name',
+ 'zip_entry_open',
+ 'zip_entry_read',
+ 'zip_open',
+ 'zip_read'),
+ 'Zlib': ('gzclose',
+ 'gzcompress',
+ 'gzdecode',
+ 'gzdeflate',
+ 'gzencode',
+ 'gzeof',
+ 'gzfile',
+ 'gzgetc',
+ 'gzgets',
+ 'gzgetss',
+ 'gzinflate',
+ 'gzopen',
+ 'gzpassthru',
+ 'gzputs',
+ 'gzread',
+ 'gzrewind',
+ 'gzseek',
+ 'gztell',
+ 'gzuncompress',
+ 'gzwrite',
+ 'readgzfile',
+ 'zlib_decode',
+ 'zlib_encode',
+ 'zlib_get_coding_type'),
+ 'bcompiler': ('bcompiler_load_exe',
+ 'bcompiler_load',
+ 'bcompiler_parse_class',
+ 'bcompiler_read',
+ 'bcompiler_write_class',
+ 'bcompiler_write_constant',
+ 'bcompiler_write_exe_footer',
+ 'bcompiler_write_file',
+ 'bcompiler_write_footer',
+ 'bcompiler_write_function',
+ 'bcompiler_write_functions_from_file',
+ 'bcompiler_write_header',
+ 'bcompiler_write_included_filename'),
+ 'cURL': ('curl_close',
+ 'curl_copy_handle',
+ 'curl_errno',
+ 'curl_error',
+ 'curl_escape',
+ 'curl_exec',
+ 'curl_file_create',
+ 'curl_getinfo',
+ 'curl_init',
+ 'curl_multi_add_handle',
+ 'curl_multi_close',
+ 'curl_multi_exec',
+ 'curl_multi_getcontent',
+ 'curl_multi_info_read',
+ 'curl_multi_init',
+ 'curl_multi_remove_handle',
+ 'curl_multi_select',
+ 'curl_multi_setopt',
+ 'curl_multi_strerror',
+ 'curl_pause',
+ 'curl_reset',
+ 'curl_setopt_array',
+ 'curl_setopt',
+ 'curl_share_close',
+ 'curl_share_init',
+ 'curl_share_setopt',
+ 'curl_strerror',
+ 'curl_unescape',
+ 'curl_version'),
+ 'chdb': ('chdb_create',),
+ 'dBase': ('dbase_add_record',
+ 'dbase_close',
+ 'dbase_create',
+ 'dbase_delete_record',
+ 'dbase_get_header_info',
+ 'dbase_get_record_with_names',
+ 'dbase_get_record',
+ 'dbase_numfields',
+ 'dbase_numrecords',
+ 'dbase_open',
+ 'dbase_pack',
+ 'dbase_replace_record'),
+ 'dbx': ('dbx_close',
+ 'dbx_compare',
+ 'dbx_connect',
+ 'dbx_error',
+ 'dbx_escape_string',
+ 'dbx_fetch_row',
+ 'dbx_query',
+ 'dbx_sort'),
+ 'filePro': ('filepro_fieldcount',
+ 'filepro_fieldname',
+ 'filepro_fieldtype',
+ 'filepro_fieldwidth',
+ 'filepro_retrieve',
+ 'filepro_rowcount',
+ 'filepro'),
+ 'iconv': ('iconv_get_encoding',
+ 'iconv_mime_decode_headers',
+ 'iconv_mime_decode',
+ 'iconv_mime_encode',
+ 'iconv_set_encoding',
+ 'iconv_strlen',
+ 'iconv_strpos',
+ 'iconv_strrpos',
+ 'iconv_substr',
+ 'iconv',
+ 'ob_iconv_handler'),
+ 'inclued': ('inclued_get_data',),
+ 'intl': ('intl_error_name',
+ 'intl_get_error_code',
+ 'intl_get_error_message',
+ 'intl_is_failure'),
+ 'libxml': ('libxml_clear_errors',
+ 'libxml_disable_entity_loader',
+ 'libxml_get_errors',
+ 'libxml_get_last_error',
+ 'libxml_set_external_entity_loader',
+ 'libxml_set_streams_context',
+ 'libxml_use_internal_errors'),
+ 'mSQL': ('msql_affected_rows',
+ 'msql_close',
+ 'msql_connect',
+ 'msql_create_db',
+ 'msql_createdb',
+ 'msql_data_seek',
+ 'msql_db_query',
+ 'msql_dbname',
+ 'msql_drop_db',
+ 'msql_error',
+ 'msql_fetch_array',
+ 'msql_fetch_field',
+ 'msql_fetch_object',
+ 'msql_fetch_row',
+ 'msql_field_flags',
+ 'msql_field_len',
+ 'msql_field_name',
+ 'msql_field_seek',
+ 'msql_field_table',
+ 'msql_field_type',
+ 'msql_fieldflags',
+ 'msql_fieldlen',
+ 'msql_fieldname',
+ 'msql_fieldtable',
+ 'msql_fieldtype',
+ 'msql_free_result',
+ 'msql_list_dbs',
+ 'msql_list_fields',
+ 'msql_list_tables',
+ 'msql_num_fields',
+ 'msql_num_rows',
+ 'msql_numfields',
+ 'msql_numrows',
+ 'msql_pconnect',
+ 'msql_query',
+ 'msql_regcase',
+ 'msql_result',
+ 'msql_select_db',
+ 'msql_tablename',
+ 'msql'),
+ 'mnoGoSearch': ('udm_add_search_limit',
+ 'udm_alloc_agent_array',
+ 'udm_alloc_agent',
+ 'udm_api_version',
+ 'udm_cat_list',
+ 'udm_cat_path',
+ 'udm_check_charset',
+ 'udm_check_stored',
+ 'udm_clear_search_limits',
+ 'udm_close_stored',
+ 'udm_crc32',
+ 'udm_errno',
+ 'udm_error',
+ 'udm_find',
+ 'udm_free_agent',
+ 'udm_free_ispell_data',
+ 'udm_free_res',
+ 'udm_get_doc_count',
+ 'udm_get_res_field',
+ 'udm_get_res_param',
+ 'udm_hash32',
+ 'udm_load_ispell_data',
+ 'udm_open_stored',
+ 'udm_set_agent_param'),
+ 'mqseries': ('mqseries_back',
+ 'mqseries_begin',
+ 'mqseries_close',
+ 'mqseries_cmit',
+ 'mqseries_conn',
+ 'mqseries_connx',
+ 'mqseries_disc',
+ 'mqseries_get',
+ 'mqseries_inq',
+ 'mqseries_open',
+ 'mqseries_put1',
+ 'mqseries_put',
+ 'mqseries_set',
+ 'mqseries_strerror'),
+ 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
+ 'mysqlnd_qc_get_available_handlers',
+ 'mysqlnd_qc_get_cache_info',
+ 'mysqlnd_qc_get_core_stats',
+ 'mysqlnd_qc_get_normalized_query_trace_log',
+ 'mysqlnd_qc_get_query_trace_log',
+ 'mysqlnd_qc_set_cache_condition',
+ 'mysqlnd_qc_set_is_select',
+ 'mysqlnd_qc_set_storage_handler',
+ 'mysqlnd_qc_set_user_handlers'),
+ 'qtdom': ('qdom_error', 'qdom_tree'),
+ 'runkit': ('runkit_class_adopt',
+ 'runkit_class_emancipate',
+ 'runkit_constant_add',
+ 'runkit_constant_redefine',
+ 'runkit_constant_remove',
+ 'runkit_function_add',
+ 'runkit_function_copy',
+ 'runkit_function_redefine',
+ 'runkit_function_remove',
+ 'runkit_function_rename',
+ 'runkit_import',
+ 'runkit_lint_file',
+ 'runkit_lint',
+ 'runkit_method_add',
+ 'runkit_method_copy',
+ 'runkit_method_redefine',
+ 'runkit_method_remove',
+ 'runkit_method_rename',
+ 'runkit_return_value_used',
+ 'runkit_sandbox_output_handler',
+ 'runkit_superglobals'),
+ 'ssdeep': ('ssdeep_fuzzy_compare',
+ 'ssdeep_fuzzy_hash_filename',
+ 'ssdeep_fuzzy_hash'),
+ 'vpopmail': ('vpopmail_add_alias_domain_ex',
+ 'vpopmail_add_alias_domain',
+ 'vpopmail_add_domain_ex',
+ 'vpopmail_add_domain',
+ 'vpopmail_add_user',
+ 'vpopmail_alias_add',
+ 'vpopmail_alias_del_domain',
+ 'vpopmail_alias_del',
+ 'vpopmail_alias_get_all',
+ 'vpopmail_alias_get',
+ 'vpopmail_auth_user',
+ 'vpopmail_del_domain_ex',
+ 'vpopmail_del_domain',
+ 'vpopmail_del_user',
+ 'vpopmail_error',
+ 'vpopmail_passwd',
+ 'vpopmail_set_user_quota'),
+ 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
+ 'win32service': ('win32_continue_service',
+ 'win32_create_service',
+ 'win32_delete_service',
+ 'win32_get_last_control_message',
+ 'win32_pause_service',
+ 'win32_query_service_status',
+ 'win32_set_service_status',
+ 'win32_start_service_ctrl_dispatcher',
+ 'win32_start_service',
+ 'win32_stop_service'),
+ 'xattr': ('xattr_get',
+ 'xattr_list',
+ 'xattr_remove',
+ 'xattr_set',
+ 'xattr_supported'),
+ 'xdiff': ('xdiff_file_bdiff_size',
+ 'xdiff_file_bdiff',
+ 'xdiff_file_bpatch',
+ 'xdiff_file_diff_binary',
+ 'xdiff_file_diff',
+ 'xdiff_file_merge3',
+ 'xdiff_file_patch_binary',
+ 'xdiff_file_patch',
+ 'xdiff_file_rabdiff',
+ 'xdiff_string_bdiff_size',
+ 'xdiff_string_bdiff',
+ 'xdiff_string_bpatch',
+ 'xdiff_string_diff_binary',
+ 'xdiff_string_diff',
+ 'xdiff_string_merge3',
+ 'xdiff_string_patch_binary',
+ 'xdiff_string_patch',
+ 'xdiff_string_rabdiff')}
+
+
+if __name__ == '__main__': # pragma: no cover
+ import glob
+ import os
+ import pprint
+ import re
+ import shutil
+ import tarfile
+ try:
+ from urllib import urlretrieve
+ except ImportError:
+ from urllib.request import urlretrieve
+
+ PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
+ PHP_MANUAL_DIR = './php-chunked-xhtml/'
+ PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = r'<a href="function\..*?\.html">(.*?)</a>'
- PHP_MODULE_RE = '<title>(.*?) Functions</title>'
-
- def get_php_functions():
- function_re = re.compile(PHP_FUNCTION_RE)
- module_re = re.compile(PHP_MODULE_RE)
- modules = {}
-
- for file in get_php_references():
- module = ''
+ PHP_MODULE_RE = '<title>(.*?) Functions</title>'
+
+ def get_php_functions():
+ function_re = re.compile(PHP_FUNCTION_RE)
+ module_re = re.compile(PHP_MODULE_RE)
+ modules = {}
+
+ for file in get_php_references():
+ module = ''
with open(file) as f:
for line in f:
if not module:
@@ -4705,52 +4705,52 @@ if __name__ == '__main__': # pragma: no cover
if search:
module = search.group(1)
modules[module] = []
-
+
elif 'href="function.' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '-&gt;' not in fn and '::' not in fn and fn not in modules[module]:
modules[module].append(fn)
-
- if module:
- # These are dummy manual pages, not actual functions
- if module == 'PHP Options/Info':
- modules[module].remove('main')
-
- if module == 'Filesystem':
- modules[module].remove('delete')
-
- if not modules[module]:
- del modules[module]
-
- return modules
-
- def get_php_references():
- download = urlretrieve(PHP_MANUAL_URL)
+
+ if module:
+ # These are dummy manual pages, not actual functions
+ if module == 'PHP Options/Info':
+ modules[module].remove('main')
+
+ if module == 'Filesystem':
+ modules[module].remove('delete')
+
+ if not modules[module]:
+ del modules[module]
+
+ return modules
+
+ def get_php_references():
+ download = urlretrieve(PHP_MANUAL_URL)
with tarfile.open(download[0]) as tar:
tar.extractall()
- for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
- yield file
- os.remove(download[0])
-
- def regenerate(filename, modules):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- print('>> Downloading Function Index')
- modules = get_php_functions()
- total = sum(len(v) for v in modules.values())
- print('%d functions found' % total)
- regenerate(__file__, modules)
- shutil.rmtree(PHP_MANUAL_DIR)
-
- run()
+ for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
+ yield file
+ os.remove(download[0])
+
+ def regenerate(filename, modules):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
+
+ def run():
+ print('>> Downloading Function Index')
+ modules = get_php_functions()
+ total = sum(len(v) for v in modules.values())
+ print('%d functions found' % total)
+ regenerate(__file__, modules)
+ shutil.rmtree(PHP_MANUAL_DIR)
+
+ run()
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_postgres_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_postgres_builtins.py
index 21086722d4..b3578e5059 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_postgres_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_postgres_builtins.py
@@ -1,621 +1,621 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._postgres_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Self-updating data files for PostgreSQL lexer.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._postgres_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Self-updating data files for PostgreSQL lexer.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-# Autogenerated: please edit them if you like wasting your time.
-
-KEYWORDS = (
- 'ABORT',
- 'ABSOLUTE',
- 'ACCESS',
- 'ACTION',
- 'ADD',
- 'ADMIN',
- 'AFTER',
- 'AGGREGATE',
- 'ALL',
- 'ALSO',
- 'ALTER',
- 'ALWAYS',
- 'ANALYSE',
- 'ANALYZE',
- 'AND',
- 'ANY',
- 'ARRAY',
- 'AS',
- 'ASC',
- 'ASSERTION',
- 'ASSIGNMENT',
- 'ASYMMETRIC',
- 'AT',
- 'ATTRIBUTE',
- 'AUTHORIZATION',
- 'BACKWARD',
- 'BEFORE',
- 'BEGIN',
- 'BETWEEN',
- 'BIGINT',
- 'BINARY',
- 'BIT',
- 'BOOLEAN',
- 'BOTH',
- 'BY',
- 'CACHE',
- 'CALLED',
- 'CASCADE',
- 'CASCADED',
- 'CASE',
- 'CAST',
- 'CATALOG',
- 'CHAIN',
- 'CHAR',
- 'CHARACTER',
- 'CHARACTERISTICS',
- 'CHECK',
- 'CHECKPOINT',
- 'CLASS',
- 'CLOSE',
- 'CLUSTER',
- 'COALESCE',
- 'COLLATE',
- 'COLLATION',
- 'COLUMN',
- 'COMMENT',
- 'COMMENTS',
- 'COMMIT',
- 'COMMITTED',
- 'CONCURRENTLY',
- 'CONFIGURATION',
- 'CONNECTION',
- 'CONSTRAINT',
- 'CONSTRAINTS',
- 'CONTENT',
- 'CONTINUE',
- 'CONVERSION',
- 'COPY',
- 'COST',
- 'CREATE',
- 'CROSS',
- 'CSV',
- 'CURRENT',
- 'CURRENT_CATALOG',
- 'CURRENT_DATE',
- 'CURRENT_ROLE',
- 'CURRENT_SCHEMA',
- 'CURRENT_TIME',
- 'CURRENT_TIMESTAMP',
- 'CURRENT_USER',
- 'CURSOR',
- 'CYCLE',
- 'DATA',
- 'DATABASE',
- 'DAY',
- 'DEALLOCATE',
- 'DEC',
- 'DECIMAL',
- 'DECLARE',
- 'DEFAULT',
- 'DEFAULTS',
- 'DEFERRABLE',
- 'DEFERRED',
- 'DEFINER',
- 'DELETE',
- 'DELIMITER',
- 'DELIMITERS',
- 'DESC',
- 'DICTIONARY',
- 'DISABLE',
- 'DISCARD',
- 'DISTINCT',
- 'DO',
- 'DOCUMENT',
- 'DOMAIN',
- 'DOUBLE',
- 'DROP',
- 'EACH',
- 'ELSE',
- 'ENABLE',
- 'ENCODING',
- 'ENCRYPTED',
- 'END',
- 'ENUM',
- 'ESCAPE',
- 'EVENT',
- 'EXCEPT',
- 'EXCLUDE',
- 'EXCLUDING',
- 'EXCLUSIVE',
- 'EXECUTE',
- 'EXISTS',
- 'EXPLAIN',
- 'EXTENSION',
- 'EXTERNAL',
- 'EXTRACT',
- 'FALSE',
- 'FAMILY',
- 'FETCH',
- 'FILTER',
- 'FIRST',
- 'FLOAT',
- 'FOLLOWING',
- 'FOR',
- 'FORCE',
- 'FOREIGN',
- 'FORWARD',
- 'FREEZE',
- 'FROM',
- 'FULL',
- 'FUNCTION',
- 'FUNCTIONS',
- 'GLOBAL',
- 'GRANT',
- 'GRANTED',
- 'GREATEST',
- 'GROUP',
- 'HANDLER',
- 'HAVING',
- 'HEADER',
- 'HOLD',
- 'HOUR',
- 'IDENTITY',
- 'IF',
- 'ILIKE',
- 'IMMEDIATE',
- 'IMMUTABLE',
- 'IMPLICIT',
- 'IN',
- 'INCLUDING',
- 'INCREMENT',
- 'INDEX',
- 'INDEXES',
- 'INHERIT',
- 'INHERITS',
- 'INITIALLY',
- 'INLINE',
- 'INNER',
- 'INOUT',
- 'INPUT',
- 'INSENSITIVE',
- 'INSERT',
- 'INSTEAD',
- 'INT',
- 'INTEGER',
- 'INTERSECT',
- 'INTERVAL',
- 'INTO',
- 'INVOKER',
- 'IS',
- 'ISNULL',
- 'ISOLATION',
- 'JOIN',
- 'KEY',
- 'LABEL',
- 'LANGUAGE',
- 'LARGE',
- 'LAST',
- 'LATERAL',
- 'LC_COLLATE',
- 'LC_CTYPE',
- 'LEADING',
- 'LEAKPROOF',
- 'LEAST',
- 'LEFT',
- 'LEVEL',
- 'LIKE',
- 'LIMIT',
- 'LISTEN',
- 'LOAD',
- 'LOCAL',
- 'LOCALTIME',
- 'LOCALTIMESTAMP',
- 'LOCATION',
- 'LOCK',
- 'MAPPING',
- 'MATCH',
- 'MATERIALIZED',
- 'MAXVALUE',
- 'MINUTE',
- 'MINVALUE',
- 'MODE',
- 'MONTH',
- 'MOVE',
- 'NAME',
- 'NAMES',
- 'NATIONAL',
- 'NATURAL',
- 'NCHAR',
- 'NEXT',
- 'NO',
- 'NONE',
- 'NOT',
- 'NOTHING',
- 'NOTIFY',
- 'NOTNULL',
- 'NOWAIT',
- 'NULL',
- 'NULLIF',
- 'NULLS',
- 'NUMERIC',
- 'OBJECT',
- 'OF',
- 'OFF',
- 'OFFSET',
- 'OIDS',
- 'ON',
- 'ONLY',
- 'OPERATOR',
- 'OPTION',
- 'OPTIONS',
- 'OR',
- 'ORDER',
- 'ORDINALITY',
- 'OUT',
- 'OUTER',
- 'OVER',
- 'OVERLAPS',
- 'OVERLAY',
- 'OWNED',
- 'OWNER',
- 'PARSER',
- 'PARTIAL',
- 'PARTITION',
- 'PASSING',
- 'PASSWORD',
- 'PLACING',
- 'PLANS',
- 'POLICY',
- 'POSITION',
- 'PRECEDING',
- 'PRECISION',
- 'PREPARE',
- 'PREPARED',
- 'PRESERVE',
- 'PRIMARY',
- 'PRIOR',
- 'PRIVILEGES',
- 'PROCEDURAL',
- 'PROCEDURE',
- 'PROGRAM',
- 'QUOTE',
- 'RANGE',
- 'READ',
- 'REAL',
- 'REASSIGN',
- 'RECHECK',
- 'RECURSIVE',
- 'REF',
- 'REFERENCES',
- 'REFRESH',
- 'REINDEX',
- 'RELATIVE',
- 'RELEASE',
- 'RENAME',
- 'REPEATABLE',
- 'REPLACE',
- 'REPLICA',
- 'RESET',
- 'RESTART',
- 'RESTRICT',
- 'RETURNING',
- 'RETURNS',
- 'REVOKE',
- 'RIGHT',
- 'ROLE',
- 'ROLLBACK',
- 'ROW',
- 'ROWS',
- 'RULE',
- 'SAVEPOINT',
- 'SCHEMA',
- 'SCROLL',
- 'SEARCH',
- 'SECOND',
- 'SECURITY',
- 'SELECT',
- 'SEQUENCE',
- 'SEQUENCES',
- 'SERIALIZABLE',
- 'SERVER',
- 'SESSION',
- 'SESSION_USER',
- 'SET',
- 'SETOF',
- 'SHARE',
- 'SHOW',
- 'SIMILAR',
- 'SIMPLE',
- 'SMALLINT',
- 'SNAPSHOT',
- 'SOME',
- 'STABLE',
- 'STANDALONE',
- 'START',
- 'STATEMENT',
- 'STATISTICS',
- 'STDIN',
- 'STDOUT',
- 'STORAGE',
- 'STRICT',
- 'STRIP',
- 'SUBSTRING',
- 'SYMMETRIC',
- 'SYSID',
- 'SYSTEM',
- 'TABLE',
- 'TABLES',
- 'TABLESPACE',
- 'TEMP',
- 'TEMPLATE',
- 'TEMPORARY',
- 'TEXT',
- 'THEN',
- 'TIME',
- 'TIMESTAMP',
- 'TO',
- 'TRAILING',
- 'TRANSACTION',
- 'TREAT',
- 'TRIGGER',
- 'TRIM',
- 'TRUE',
- 'TRUNCATE',
- 'TRUSTED',
- 'TYPE',
- 'TYPES',
- 'UNBOUNDED',
- 'UNCOMMITTED',
- 'UNENCRYPTED',
- 'UNION',
- 'UNIQUE',
- 'UNKNOWN',
- 'UNLISTEN',
- 'UNLOGGED',
- 'UNTIL',
- 'UPDATE',
- 'USER',
- 'USING',
- 'VACUUM',
- 'VALID',
- 'VALIDATE',
- 'VALIDATOR',
- 'VALUE',
- 'VALUES',
- 'VARCHAR',
- 'VARIADIC',
- 'VARYING',
- 'VERBOSE',
- 'VERSION',
- 'VIEW',
- 'VIEWS',
- 'VOLATILE',
- 'WHEN',
- 'WHERE',
- 'WHITESPACE',
- 'WINDOW',
- 'WITH',
- 'WITHIN',
- 'WITHOUT',
- 'WORK',
- 'WRAPPER',
- 'WRITE',
- 'XML',
- 'XMLATTRIBUTES',
- 'XMLCONCAT',
- 'XMLELEMENT',
- 'XMLEXISTS',
- 'XMLFOREST',
- 'XMLPARSE',
- 'XMLPI',
- 'XMLROOT',
- 'XMLSERIALIZE',
- 'YEAR',
- 'YES',
- 'ZONE',
-)
-
-DATATYPES = (
- 'bigint',
- 'bigserial',
- 'bit',
- 'bit varying',
- 'bool',
- 'boolean',
- 'box',
- 'bytea',
- 'char',
- 'character',
- 'character varying',
- 'cidr',
- 'circle',
- 'date',
- 'decimal',
- 'double precision',
- 'float4',
- 'float8',
- 'inet',
- 'int',
- 'int2',
- 'int4',
- 'int8',
- 'integer',
- 'interval',
- 'json',
- 'jsonb',
- 'line',
- 'lseg',
- 'macaddr',
- 'money',
- 'numeric',
- 'path',
- 'pg_lsn',
- 'point',
- 'polygon',
- 'real',
- 'serial',
- 'serial2',
- 'serial4',
- 'serial8',
- 'smallint',
- 'smallserial',
- 'text',
- 'time',
- 'timestamp',
- 'timestamptz',
- 'timetz',
- 'tsquery',
- 'tsvector',
- 'txid_snapshot',
- 'uuid',
- 'varbit',
- 'varchar',
- 'with time zone',
- 'without time zone',
- 'xml',
-)
-
-PSEUDO_TYPES = (
- 'any',
- 'anyelement',
- 'anyarray',
- 'anynonarray',
- 'anyenum',
- 'anyrange',
- 'cstring',
- 'internal',
- 'language_handler',
- 'fdw_handler',
- 'record',
- 'trigger',
- 'void',
- 'opaque',
-)
-
-# Remove 'trigger' from types
-PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
-
-PLPGSQL_KEYWORDS = (
- 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
- 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
- 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- try:
- from urllib import urlopen
- except ImportError:
- from urllib.request import urlopen
-
- from pygments.util import format_lines
-
- # One man's constant is another man's variable.
- SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
- KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
- DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
-
- def update_myself():
- data_file = list(urlopen(DATATYPES_URL))
- datatypes = parse_datatypes(data_file)
- pseudos = parse_pseudos(data_file)
-
- keywords = parse_keywords(urlopen(KEYWORDS_URL))
- update_consts(__file__, 'DATATYPES', datatypes)
- update_consts(__file__, 'PSEUDO_TYPES', pseudos)
- update_consts(__file__, 'KEYWORDS', keywords)
-
- def parse_keywords(f):
- kw = []
- for m in re.finditer(
- r'\s*<entry><token>([^<]+)</token></entry>\s*'
- r'<entry>([^<]+)</entry>', f.read()):
- kw.append(m.group(1))
-
- if not kw:
- raise ValueError('no keyword found')
-
- kw.sort()
- return kw
-
- def parse_datatypes(f):
- dt = set()
- for line in f:
- if '<sect1' in line:
- break
- if '<entry><type>' not in line:
- continue
-
- # Parse a string such as
- # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
- # into types "time" and "without time zone"
-
- # remove all the tags
- line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
- line = re.sub("<[^>]+>", "", line)
-
- # Drop the parts containing braces
- for tmp in [t for tmp in line.split('[')
- for t in tmp.split(']') if "(" not in t]:
- for t in tmp.split(','):
- t = t.strip()
- if not t: continue
- dt.add(" ".join(t.split()))
-
- dt = list(dt)
- dt.sort()
- return dt
-
- def parse_pseudos(f):
- dt = []
- re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
- re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
- re_end = re.compile(r'\s*</table>')
-
- f = iter(f)
- for line in f:
- if re_start.match(line) is not None:
- break
- else:
- raise ValueError('pseudo datatypes table not found')
-
- for line in f:
- m = re_entry.match(line)
- if m is not None:
- dt.append(m.group(1))
-
- if re_end.match(line) is not None:
- break
- else:
- raise ValueError('end of pseudo datatypes table not found')
-
- if not dt:
- raise ValueError('pseudo datatypes not found')
-
- return dt
-
- def update_consts(filename, constname, content):
- with open(filename) as f:
- data = f.read()
-
- # Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
- m = re_match.search(data)
- if not m:
- raise ValueError('Could not find existing definition for %s' %
- (constname,))
-
- new_block = format_lines(constname, content)
- data = data[:m.start()] + new_block + data[m.end():]
-
- with open(filename, 'w') as f:
- f.write(data)
-
- update_myself()
+ :license: BSD, see LICENSE for details.
+"""
+
+
+# Autogenerated: please edit them if you like wasting your time.
+
+KEYWORDS = (
+ 'ABORT',
+ 'ABSOLUTE',
+ 'ACCESS',
+ 'ACTION',
+ 'ADD',
+ 'ADMIN',
+ 'AFTER',
+ 'AGGREGATE',
+ 'ALL',
+ 'ALSO',
+ 'ALTER',
+ 'ALWAYS',
+ 'ANALYSE',
+ 'ANALYZE',
+ 'AND',
+ 'ANY',
+ 'ARRAY',
+ 'AS',
+ 'ASC',
+ 'ASSERTION',
+ 'ASSIGNMENT',
+ 'ASYMMETRIC',
+ 'AT',
+ 'ATTRIBUTE',
+ 'AUTHORIZATION',
+ 'BACKWARD',
+ 'BEFORE',
+ 'BEGIN',
+ 'BETWEEN',
+ 'BIGINT',
+ 'BINARY',
+ 'BIT',
+ 'BOOLEAN',
+ 'BOTH',
+ 'BY',
+ 'CACHE',
+ 'CALLED',
+ 'CASCADE',
+ 'CASCADED',
+ 'CASE',
+ 'CAST',
+ 'CATALOG',
+ 'CHAIN',
+ 'CHAR',
+ 'CHARACTER',
+ 'CHARACTERISTICS',
+ 'CHECK',
+ 'CHECKPOINT',
+ 'CLASS',
+ 'CLOSE',
+ 'CLUSTER',
+ 'COALESCE',
+ 'COLLATE',
+ 'COLLATION',
+ 'COLUMN',
+ 'COMMENT',
+ 'COMMENTS',
+ 'COMMIT',
+ 'COMMITTED',
+ 'CONCURRENTLY',
+ 'CONFIGURATION',
+ 'CONNECTION',
+ 'CONSTRAINT',
+ 'CONSTRAINTS',
+ 'CONTENT',
+ 'CONTINUE',
+ 'CONVERSION',
+ 'COPY',
+ 'COST',
+ 'CREATE',
+ 'CROSS',
+ 'CSV',
+ 'CURRENT',
+ 'CURRENT_CATALOG',
+ 'CURRENT_DATE',
+ 'CURRENT_ROLE',
+ 'CURRENT_SCHEMA',
+ 'CURRENT_TIME',
+ 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER',
+ 'CURSOR',
+ 'CYCLE',
+ 'DATA',
+ 'DATABASE',
+ 'DAY',
+ 'DEALLOCATE',
+ 'DEC',
+ 'DECIMAL',
+ 'DECLARE',
+ 'DEFAULT',
+ 'DEFAULTS',
+ 'DEFERRABLE',
+ 'DEFERRED',
+ 'DEFINER',
+ 'DELETE',
+ 'DELIMITER',
+ 'DELIMITERS',
+ 'DESC',
+ 'DICTIONARY',
+ 'DISABLE',
+ 'DISCARD',
+ 'DISTINCT',
+ 'DO',
+ 'DOCUMENT',
+ 'DOMAIN',
+ 'DOUBLE',
+ 'DROP',
+ 'EACH',
+ 'ELSE',
+ 'ENABLE',
+ 'ENCODING',
+ 'ENCRYPTED',
+ 'END',
+ 'ENUM',
+ 'ESCAPE',
+ 'EVENT',
+ 'EXCEPT',
+ 'EXCLUDE',
+ 'EXCLUDING',
+ 'EXCLUSIVE',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXPLAIN',
+ 'EXTENSION',
+ 'EXTERNAL',
+ 'EXTRACT',
+ 'FALSE',
+ 'FAMILY',
+ 'FETCH',
+ 'FILTER',
+ 'FIRST',
+ 'FLOAT',
+ 'FOLLOWING',
+ 'FOR',
+ 'FORCE',
+ 'FOREIGN',
+ 'FORWARD',
+ 'FREEZE',
+ 'FROM',
+ 'FULL',
+ 'FUNCTION',
+ 'FUNCTIONS',
+ 'GLOBAL',
+ 'GRANT',
+ 'GRANTED',
+ 'GREATEST',
+ 'GROUP',
+ 'HANDLER',
+ 'HAVING',
+ 'HEADER',
+ 'HOLD',
+ 'HOUR',
+ 'IDENTITY',
+ 'IF',
+ 'ILIKE',
+ 'IMMEDIATE',
+ 'IMMUTABLE',
+ 'IMPLICIT',
+ 'IN',
+ 'INCLUDING',
+ 'INCREMENT',
+ 'INDEX',
+ 'INDEXES',
+ 'INHERIT',
+ 'INHERITS',
+ 'INITIALLY',
+ 'INLINE',
+ 'INNER',
+ 'INOUT',
+ 'INPUT',
+ 'INSENSITIVE',
+ 'INSERT',
+ 'INSTEAD',
+ 'INT',
+ 'INTEGER',
+ 'INTERSECT',
+ 'INTERVAL',
+ 'INTO',
+ 'INVOKER',
+ 'IS',
+ 'ISNULL',
+ 'ISOLATION',
+ 'JOIN',
+ 'KEY',
+ 'LABEL',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LAST',
+ 'LATERAL',
+ 'LC_COLLATE',
+ 'LC_CTYPE',
+ 'LEADING',
+ 'LEAKPROOF',
+ 'LEAST',
+ 'LEFT',
+ 'LEVEL',
+ 'LIKE',
+ 'LIMIT',
+ 'LISTEN',
+ 'LOAD',
+ 'LOCAL',
+ 'LOCALTIME',
+ 'LOCALTIMESTAMP',
+ 'LOCATION',
+ 'LOCK',
+ 'MAPPING',
+ 'MATCH',
+ 'MATERIALIZED',
+ 'MAXVALUE',
+ 'MINUTE',
+ 'MINVALUE',
+ 'MODE',
+ 'MONTH',
+ 'MOVE',
+ 'NAME',
+ 'NAMES',
+ 'NATIONAL',
+ 'NATURAL',
+ 'NCHAR',
+ 'NEXT',
+ 'NO',
+ 'NONE',
+ 'NOT',
+ 'NOTHING',
+ 'NOTIFY',
+ 'NOTNULL',
+ 'NOWAIT',
+ 'NULL',
+ 'NULLIF',
+ 'NULLS',
+ 'NUMERIC',
+ 'OBJECT',
+ 'OF',
+ 'OFF',
+ 'OFFSET',
+ 'OIDS',
+ 'ON',
+ 'ONLY',
+ 'OPERATOR',
+ 'OPTION',
+ 'OPTIONS',
+ 'OR',
+ 'ORDER',
+ 'ORDINALITY',
+ 'OUT',
+ 'OUTER',
+ 'OVER',
+ 'OVERLAPS',
+ 'OVERLAY',
+ 'OWNED',
+ 'OWNER',
+ 'PARSER',
+ 'PARTIAL',
+ 'PARTITION',
+ 'PASSING',
+ 'PASSWORD',
+ 'PLACING',
+ 'PLANS',
+ 'POLICY',
+ 'POSITION',
+ 'PRECEDING',
+ 'PRECISION',
+ 'PREPARE',
+ 'PREPARED',
+ 'PRESERVE',
+ 'PRIMARY',
+ 'PRIOR',
+ 'PRIVILEGES',
+ 'PROCEDURAL',
+ 'PROCEDURE',
+ 'PROGRAM',
+ 'QUOTE',
+ 'RANGE',
+ 'READ',
+ 'REAL',
+ 'REASSIGN',
+ 'RECHECK',
+ 'RECURSIVE',
+ 'REF',
+ 'REFERENCES',
+ 'REFRESH',
+ 'REINDEX',
+ 'RELATIVE',
+ 'RELEASE',
+ 'RENAME',
+ 'REPEATABLE',
+ 'REPLACE',
+ 'REPLICA',
+ 'RESET',
+ 'RESTART',
+ 'RESTRICT',
+ 'RETURNING',
+ 'RETURNS',
+ 'REVOKE',
+ 'RIGHT',
+ 'ROLE',
+ 'ROLLBACK',
+ 'ROW',
+ 'ROWS',
+ 'RULE',
+ 'SAVEPOINT',
+ 'SCHEMA',
+ 'SCROLL',
+ 'SEARCH',
+ 'SECOND',
+ 'SECURITY',
+ 'SELECT',
+ 'SEQUENCE',
+ 'SEQUENCES',
+ 'SERIALIZABLE',
+ 'SERVER',
+ 'SESSION',
+ 'SESSION_USER',
+ 'SET',
+ 'SETOF',
+ 'SHARE',
+ 'SHOW',
+ 'SIMILAR',
+ 'SIMPLE',
+ 'SMALLINT',
+ 'SNAPSHOT',
+ 'SOME',
+ 'STABLE',
+ 'STANDALONE',
+ 'START',
+ 'STATEMENT',
+ 'STATISTICS',
+ 'STDIN',
+ 'STDOUT',
+ 'STORAGE',
+ 'STRICT',
+ 'STRIP',
+ 'SUBSTRING',
+ 'SYMMETRIC',
+ 'SYSID',
+ 'SYSTEM',
+ 'TABLE',
+ 'TABLES',
+ 'TABLESPACE',
+ 'TEMP',
+ 'TEMPLATE',
+ 'TEMPORARY',
+ 'TEXT',
+ 'THEN',
+ 'TIME',
+ 'TIMESTAMP',
+ 'TO',
+ 'TRAILING',
+ 'TRANSACTION',
+ 'TREAT',
+ 'TRIGGER',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUSTED',
+ 'TYPE',
+ 'TYPES',
+ 'UNBOUNDED',
+ 'UNCOMMITTED',
+ 'UNENCRYPTED',
+ 'UNION',
+ 'UNIQUE',
+ 'UNKNOWN',
+ 'UNLISTEN',
+ 'UNLOGGED',
+ 'UNTIL',
+ 'UPDATE',
+ 'USER',
+ 'USING',
+ 'VACUUM',
+ 'VALID',
+ 'VALIDATE',
+ 'VALIDATOR',
+ 'VALUE',
+ 'VALUES',
+ 'VARCHAR',
+ 'VARIADIC',
+ 'VARYING',
+ 'VERBOSE',
+ 'VERSION',
+ 'VIEW',
+ 'VIEWS',
+ 'VOLATILE',
+ 'WHEN',
+ 'WHERE',
+ 'WHITESPACE',
+ 'WINDOW',
+ 'WITH',
+ 'WITHIN',
+ 'WITHOUT',
+ 'WORK',
+ 'WRAPPER',
+ 'WRITE',
+ 'XML',
+ 'XMLATTRIBUTES',
+ 'XMLCONCAT',
+ 'XMLELEMENT',
+ 'XMLEXISTS',
+ 'XMLFOREST',
+ 'XMLPARSE',
+ 'XMLPI',
+ 'XMLROOT',
+ 'XMLSERIALIZE',
+ 'YEAR',
+ 'YES',
+ 'ZONE',
+)
+
+DATATYPES = (
+ 'bigint',
+ 'bigserial',
+ 'bit',
+ 'bit varying',
+ 'bool',
+ 'boolean',
+ 'box',
+ 'bytea',
+ 'char',
+ 'character',
+ 'character varying',
+ 'cidr',
+ 'circle',
+ 'date',
+ 'decimal',
+ 'double precision',
+ 'float4',
+ 'float8',
+ 'inet',
+ 'int',
+ 'int2',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'interval',
+ 'json',
+ 'jsonb',
+ 'line',
+ 'lseg',
+ 'macaddr',
+ 'money',
+ 'numeric',
+ 'path',
+ 'pg_lsn',
+ 'point',
+ 'polygon',
+ 'real',
+ 'serial',
+ 'serial2',
+ 'serial4',
+ 'serial8',
+ 'smallint',
+ 'smallserial',
+ 'text',
+ 'time',
+ 'timestamp',
+ 'timestamptz',
+ 'timetz',
+ 'tsquery',
+ 'tsvector',
+ 'txid_snapshot',
+ 'uuid',
+ 'varbit',
+ 'varchar',
+ 'with time zone',
+ 'without time zone',
+ 'xml',
+)
+
+PSEUDO_TYPES = (
+ 'any',
+ 'anyelement',
+ 'anyarray',
+ 'anynonarray',
+ 'anyenum',
+ 'anyrange',
+ 'cstring',
+ 'internal',
+ 'language_handler',
+ 'fdw_handler',
+ 'record',
+ 'trigger',
+ 'void',
+ 'opaque',
+)
+
+# Remove 'trigger' from types
+PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
+
+PLPGSQL_KEYWORDS = (
+ 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
+ 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
+ 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
+
+ from pygments.util import format_lines
+
+ # One man's constant is another man's variable.
+ SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
+ KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
+ DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
+
+ def update_myself():
+ data_file = list(urlopen(DATATYPES_URL))
+ datatypes = parse_datatypes(data_file)
+ pseudos = parse_pseudos(data_file)
+
+ keywords = parse_keywords(urlopen(KEYWORDS_URL))
+ update_consts(__file__, 'DATATYPES', datatypes)
+ update_consts(__file__, 'PSEUDO_TYPES', pseudos)
+ update_consts(__file__, 'KEYWORDS', keywords)
+
+ def parse_keywords(f):
+ kw = []
+ for m in re.finditer(
+ r'\s*<entry><token>([^<]+)</token></entry>\s*'
+ r'<entry>([^<]+)</entry>', f.read()):
+ kw.append(m.group(1))
+
+ if not kw:
+ raise ValueError('no keyword found')
+
+ kw.sort()
+ return kw
+
+ def parse_datatypes(f):
+ dt = set()
+ for line in f:
+ if '<sect1' in line:
+ break
+ if '<entry><type>' not in line:
+ continue
+
+ # Parse a string such as
+ # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
+ # into types "time" and "without time zone"
+
+ # remove all the tags
+ line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
+ line = re.sub("<[^>]+>", "", line)
+
+ # Drop the parts containing braces
+ for tmp in [t for tmp in line.split('[')
+ for t in tmp.split(']') if "(" not in t]:
+ for t in tmp.split(','):
+ t = t.strip()
+ if not t: continue
+ dt.add(" ".join(t.split()))
+
+ dt = list(dt)
+ dt.sort()
+ return dt
+
+ def parse_pseudos(f):
+ dt = []
+ re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
+ re_entry = re.compile(r'\s*<entry><type>([^<]+)</></entry>')
+ re_end = re.compile(r'\s*</table>')
+
+ f = iter(f)
+ for line in f:
+ if re_start.match(line) is not None:
+ break
+ else:
+ raise ValueError('pseudo datatypes table not found')
+
+ for line in f:
+ m = re_entry.match(line)
+ if m is not None:
+ dt.append(m.group(1))
+
+ if re_end.match(line) is not None:
+ break
+ else:
+ raise ValueError('end of pseudo datatypes table not found')
+
+ if not dt:
+ raise ValueError('pseudo datatypes not found')
+
+ return dt
+
+ def update_consts(filename, constname, content):
+ with open(filename) as f:
+ data = f.read()
+
+ # Line to start/end inserting
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find existing definition for %s' %
+ (constname,))
+
+ new_block = format_lines(constname, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
+ with open(filename, 'w') as f:
+ f.write(data)
+
+ update_myself()
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_scilab_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_scilab_builtins.py
index b26a00859d..9cb18e5775 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_scilab_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_scilab_builtins.py
@@ -1,3094 +1,3094 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._scilab_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the ScilabLexer.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._scilab_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the ScilabLexer.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Autogenerated
-
-commands_kw = (
- 'abort',
- 'apropos',
- 'break',
- 'case',
- 'catch',
- 'continue',
- 'do',
- 'else',
- 'elseif',
- 'end',
- 'endfunction',
- 'for',
- 'function',
- 'help',
- 'if',
- 'pause',
- 'quit',
- 'select',
- 'then',
- 'try',
- 'while',
-)
-
-functions_kw = (
- '!!_invoke_',
- '%H5Object_e',
- '%H5Object_fieldnames',
- '%H5Object_p',
- '%XMLAttr_6',
- '%XMLAttr_e',
- '%XMLAttr_i_XMLElem',
- '%XMLAttr_length',
- '%XMLAttr_p',
- '%XMLAttr_size',
- '%XMLDoc_6',
- '%XMLDoc_e',
- '%XMLDoc_i_XMLList',
- '%XMLDoc_p',
- '%XMLElem_6',
- '%XMLElem_e',
- '%XMLElem_i_XMLDoc',
- '%XMLElem_i_XMLElem',
- '%XMLElem_i_XMLList',
- '%XMLElem_p',
- '%XMLList_6',
- '%XMLList_e',
- '%XMLList_i_XMLElem',
- '%XMLList_i_XMLList',
- '%XMLList_length',
- '%XMLList_p',
- '%XMLList_size',
- '%XMLNs_6',
- '%XMLNs_e',
- '%XMLNs_i_XMLElem',
- '%XMLNs_p',
- '%XMLSet_6',
- '%XMLSet_e',
- '%XMLSet_length',
- '%XMLSet_p',
- '%XMLSet_size',
- '%XMLValid_p',
- '%_EClass_6',
- '%_EClass_e',
- '%_EClass_p',
- '%_EObj_0',
- '%_EObj_1__EObj',
- '%_EObj_1_b',
- '%_EObj_1_c',
- '%_EObj_1_i',
- '%_EObj_1_s',
- '%_EObj_2__EObj',
- '%_EObj_2_b',
- '%_EObj_2_c',
- '%_EObj_2_i',
- '%_EObj_2_s',
- '%_EObj_3__EObj',
- '%_EObj_3_b',
- '%_EObj_3_c',
- '%_EObj_3_i',
- '%_EObj_3_s',
- '%_EObj_4__EObj',
- '%_EObj_4_b',
- '%_EObj_4_c',
- '%_EObj_4_i',
- '%_EObj_4_s',
- '%_EObj_5',
- '%_EObj_6',
- '%_EObj_a__EObj',
- '%_EObj_a_b',
- '%_EObj_a_c',
- '%_EObj_a_i',
- '%_EObj_a_s',
- '%_EObj_d__EObj',
- '%_EObj_d_b',
- '%_EObj_d_c',
- '%_EObj_d_i',
- '%_EObj_d_s',
- '%_EObj_disp',
- '%_EObj_e',
- '%_EObj_g__EObj',
- '%_EObj_g_b',
- '%_EObj_g_c',
- '%_EObj_g_i',
- '%_EObj_g_s',
- '%_EObj_h__EObj',
- '%_EObj_h_b',
- '%_EObj_h_c',
- '%_EObj_h_i',
- '%_EObj_h_s',
- '%_EObj_i__EObj',
- '%_EObj_j__EObj',
- '%_EObj_j_b',
- '%_EObj_j_c',
- '%_EObj_j_i',
- '%_EObj_j_s',
- '%_EObj_k__EObj',
- '%_EObj_k_b',
- '%_EObj_k_c',
- '%_EObj_k_i',
- '%_EObj_k_s',
- '%_EObj_l__EObj',
- '%_EObj_l_b',
- '%_EObj_l_c',
- '%_EObj_l_i',
- '%_EObj_l_s',
- '%_EObj_m__EObj',
- '%_EObj_m_b',
- '%_EObj_m_c',
- '%_EObj_m_i',
- '%_EObj_m_s',
- '%_EObj_n__EObj',
- '%_EObj_n_b',
- '%_EObj_n_c',
- '%_EObj_n_i',
- '%_EObj_n_s',
- '%_EObj_o__EObj',
- '%_EObj_o_b',
- '%_EObj_o_c',
- '%_EObj_o_i',
- '%_EObj_o_s',
- '%_EObj_p',
- '%_EObj_p__EObj',
- '%_EObj_p_b',
- '%_EObj_p_c',
- '%_EObj_p_i',
- '%_EObj_p_s',
- '%_EObj_q__EObj',
- '%_EObj_q_b',
- '%_EObj_q_c',
- '%_EObj_q_i',
- '%_EObj_q_s',
- '%_EObj_r__EObj',
- '%_EObj_r_b',
- '%_EObj_r_c',
- '%_EObj_r_i',
- '%_EObj_r_s',
- '%_EObj_s__EObj',
- '%_EObj_s_b',
- '%_EObj_s_c',
- '%_EObj_s_i',
- '%_EObj_s_s',
- '%_EObj_t',
- '%_EObj_x__EObj',
- '%_EObj_x_b',
- '%_EObj_x_c',
- '%_EObj_x_i',
- '%_EObj_x_s',
- '%_EObj_y__EObj',
- '%_EObj_y_b',
- '%_EObj_y_c',
- '%_EObj_y_i',
- '%_EObj_y_s',
- '%_EObj_z__EObj',
- '%_EObj_z_b',
- '%_EObj_z_c',
- '%_EObj_z_i',
- '%_EObj_z_s',
- '%_eigs',
- '%_load',
- '%b_1__EObj',
- '%b_2__EObj',
- '%b_3__EObj',
- '%b_4__EObj',
- '%b_a__EObj',
- '%b_d__EObj',
- '%b_g__EObj',
- '%b_h__EObj',
- '%b_i_XMLList',
- '%b_i__EObj',
- '%b_j__EObj',
- '%b_k__EObj',
- '%b_l__EObj',
- '%b_m__EObj',
- '%b_n__EObj',
- '%b_o__EObj',
- '%b_p__EObj',
- '%b_q__EObj',
- '%b_r__EObj',
- '%b_s__EObj',
- '%b_x__EObj',
- '%b_y__EObj',
- '%b_z__EObj',
- '%c_1__EObj',
- '%c_2__EObj',
- '%c_3__EObj',
- '%c_4__EObj',
- '%c_a__EObj',
- '%c_d__EObj',
- '%c_g__EObj',
- '%c_h__EObj',
- '%c_i_XMLAttr',
- '%c_i_XMLDoc',
- '%c_i_XMLElem',
- '%c_i_XMLList',
- '%c_i__EObj',
- '%c_j__EObj',
- '%c_k__EObj',
- '%c_l__EObj',
- '%c_m__EObj',
- '%c_n__EObj',
- '%c_o__EObj',
- '%c_p__EObj',
- '%c_q__EObj',
- '%c_r__EObj',
- '%c_s__EObj',
- '%c_x__EObj',
- '%c_y__EObj',
- '%c_z__EObj',
- '%ce_i_XMLList',
- '%fptr_i_XMLList',
- '%h_i_XMLList',
- '%hm_i_XMLList',
- '%i_1__EObj',
- '%i_2__EObj',
- '%i_3__EObj',
- '%i_4__EObj',
- '%i_a__EObj',
- '%i_abs',
- '%i_cumprod',
- '%i_cumsum',
- '%i_d__EObj',
- '%i_diag',
- '%i_g__EObj',
- '%i_h__EObj',
- '%i_i_XMLList',
- '%i_i__EObj',
- '%i_j__EObj',
- '%i_k__EObj',
- '%i_l__EObj',
- '%i_m__EObj',
- '%i_matrix',
- '%i_max',
- '%i_maxi',
- '%i_min',
- '%i_mini',
- '%i_mput',
- '%i_n__EObj',
- '%i_o__EObj',
- '%i_p',
- '%i_p__EObj',
- '%i_prod',
- '%i_q__EObj',
- '%i_r__EObj',
- '%i_s__EObj',
- '%i_sum',
- '%i_tril',
- '%i_triu',
- '%i_x__EObj',
- '%i_y__EObj',
- '%i_z__EObj',
- '%ip_i_XMLList',
- '%l_i_XMLList',
- '%l_i__EObj',
- '%lss_i_XMLList',
- '%mc_i_XMLList',
- '%msp_full',
- '%msp_i_XMLList',
- '%msp_spget',
- '%p_i_XMLList',
- '%ptr_i_XMLList',
- '%r_i_XMLList',
- '%s_1__EObj',
- '%s_2__EObj',
- '%s_3__EObj',
- '%s_4__EObj',
- '%s_a__EObj',
- '%s_d__EObj',
- '%s_g__EObj',
- '%s_h__EObj',
- '%s_i_XMLList',
- '%s_i__EObj',
- '%s_j__EObj',
- '%s_k__EObj',
- '%s_l__EObj',
- '%s_m__EObj',
- '%s_n__EObj',
- '%s_o__EObj',
- '%s_p__EObj',
- '%s_q__EObj',
- '%s_r__EObj',
- '%s_s__EObj',
- '%s_x__EObj',
- '%s_y__EObj',
- '%s_z__EObj',
- '%sp_i_XMLList',
- '%spb_i_XMLList',
- '%st_i_XMLList',
- 'Calendar',
- 'ClipBoard',
- 'Matplot',
- 'Matplot1',
- 'PlaySound',
- 'TCL_DeleteInterp',
- 'TCL_DoOneEvent',
- 'TCL_EvalFile',
- 'TCL_EvalStr',
- 'TCL_ExistArray',
- 'TCL_ExistInterp',
- 'TCL_ExistVar',
- 'TCL_GetVar',
- 'TCL_GetVersion',
- 'TCL_SetVar',
- 'TCL_UnsetVar',
- 'TCL_UpVar',
- '_',
- '_code2str',
- '_d',
- '_str2code',
- 'about',
- 'abs',
- 'acos',
- 'addModulePreferences',
- 'addcolor',
- 'addf',
- 'addhistory',
- 'addinter',
- 'addlocalizationdomain',
- 'amell',
- 'and',
- 'argn',
- 'arl2_ius',
- 'ascii',
- 'asin',
- 'atan',
- 'backslash',
- 'balanc',
- 'banner',
- 'base2dec',
- 'basename',
- 'bdiag',
- 'beep',
- 'besselh',
- 'besseli',
- 'besselj',
- 'besselk',
- 'bessely',
- 'beta',
- 'bezout',
- 'bfinit',
- 'blkfc1i',
- 'blkslvi',
- 'bool2s',
- 'browsehistory',
- 'browsevar',
- 'bsplin3val',
- 'buildDoc',
- 'buildouttb',
- 'bvode',
- 'c_link',
- 'call',
- 'callblk',
- 'captions',
- 'cd',
- 'cdfbet',
- 'cdfbin',
- 'cdfchi',
- 'cdfchn',
- 'cdff',
- 'cdffnc',
- 'cdfgam',
- 'cdfnbn',
- 'cdfnor',
- 'cdfpoi',
- 'cdft',
- 'ceil',
- 'champ',
- 'champ1',
- 'chdir',
- 'chol',
- 'clc',
- 'clean',
- 'clear',
- 'clearfun',
- 'clearglobal',
- 'closeEditor',
- 'closeEditvar',
- 'closeXcos',
- 'code2str',
- 'coeff',
- 'color',
- 'comp',
- 'completion',
- 'conj',
- 'contour2di',
- 'contr',
- 'conv2',
- 'convstr',
- 'copy',
- 'copyfile',
- 'corr',
- 'cos',
- 'coserror',
- 'createdir',
- 'cshep2d',
- 'csvDefault',
- 'csvIsnum',
- 'csvRead',
- 'csvStringToDouble',
- 'csvTextScan',
- 'csvWrite',
- 'ctree2',
- 'ctree3',
- 'ctree4',
- 'cumprod',
- 'cumsum',
- 'curblock',
- 'curblockc',
- 'daskr',
- 'dasrt',
- 'dassl',
- 'data2sig',
- 'datatipCreate',
- 'datatipManagerMode',
- 'datatipMove',
- 'datatipRemove',
- 'datatipSetDisplay',
- 'datatipSetInterp',
- 'datatipSetOrientation',
- 'datatipSetStyle',
- 'datatipToggle',
- 'dawson',
- 'dct',
- 'debug',
- 'dec2base',
- 'deff',
- 'definedfields',
- 'degree',
- 'delbpt',
- 'delete',
- 'deletefile',
- 'delip',
- 'delmenu',
- 'det',
- 'dgettext',
- 'dhinf',
- 'diag',
- 'diary',
- 'diffobjs',
- 'disp',
- 'dispbpt',
- 'displayhistory',
- 'disposefftwlibrary',
- 'dlgamma',
- 'dnaupd',
- 'dneupd',
- 'double',
- 'drawaxis',
- 'drawlater',
- 'drawnow',
- 'driver',
- 'dsaupd',
- 'dsearch',
- 'dseupd',
- 'dst',
- 'duplicate',
- 'editvar',
- 'emptystr',
- 'end_scicosim',
- 'ereduc',
- 'erf',
- 'erfc',
- 'erfcx',
- 'erfi',
- 'errcatch',
- 'errclear',
- 'error',
- 'eval_cshep2d',
- 'exec',
- 'execstr',
- 'exists',
- 'exit',
- 'exp',
- 'expm',
- 'exportUI',
- 'export_to_hdf5',
- 'eye',
- 'fadj2sp',
- 'fec',
- 'feval',
- 'fft',
- 'fftw',
- 'fftw_flags',
- 'fftw_forget_wisdom',
- 'fftwlibraryisloaded',
- 'figure',
- 'file',
- 'filebrowser',
- 'fileext',
- 'fileinfo',
- 'fileparts',
- 'filesep',
- 'find',
- 'findBD',
- 'findfiles',
- 'fire_closing_finished',
- 'floor',
- 'format',
- 'fort',
- 'fprintfMat',
- 'freq',
- 'frexp',
- 'fromc',
- 'fromjava',
- 'fscanfMat',
- 'fsolve',
- 'fstair',
- 'full',
- 'fullpath',
- 'funcprot',
- 'funptr',
- 'gamma',
- 'gammaln',
- 'geom3d',
- 'get',
- 'getURL',
- 'get_absolute_file_path',
- 'get_fftw_wisdom',
- 'getblocklabel',
- 'getcallbackobject',
- 'getdate',
- 'getdebuginfo',
- 'getdefaultlanguage',
- 'getdrives',
- 'getdynlibext',
- 'getenv',
- 'getfield',
- 'gethistory',
- 'gethistoryfile',
- 'getinstalledlookandfeels',
- 'getio',
- 'getlanguage',
- 'getlongpathname',
- 'getlookandfeel',
- 'getmd5',
- 'getmemory',
- 'getmodules',
- 'getos',
- 'getpid',
- 'getrelativefilename',
- 'getscicosvars',
- 'getscilabmode',
- 'getshortpathname',
- 'gettext',
- 'getvariablesonstack',
- 'getversion',
- 'glist',
- 'global',
- 'glue',
- 'grand',
- 'graphicfunction',
- 'grayplot',
- 'grep',
- 'gsort',
- 'gstacksize',
- 'h5attr',
- 'h5close',
- 'h5cp',
- 'h5dataset',
- 'h5dump',
- 'h5exists',
- 'h5flush',
- 'h5get',
- 'h5group',
- 'h5isArray',
- 'h5isAttr',
- 'h5isCompound',
- 'h5isFile',
- 'h5isGroup',
- 'h5isList',
- 'h5isRef',
- 'h5isSet',
- 'h5isSpace',
- 'h5isType',
- 'h5isVlen',
- 'h5label',
- 'h5ln',
- 'h5ls',
- 'h5mount',
- 'h5mv',
- 'h5open',
- 'h5read',
- 'h5readattr',
- 'h5rm',
- 'h5umount',
- 'h5write',
- 'h5writeattr',
- 'havewindow',
- 'helpbrowser',
- 'hess',
- 'hinf',
- 'historymanager',
- 'historysize',
- 'host',
- 'htmlDump',
- 'htmlRead',
- 'htmlReadStr',
- 'htmlWrite',
- 'iconvert',
- 'ieee',
- 'ilib_verbose',
- 'imag',
- 'impl',
- 'import_from_hdf5',
- 'imult',
- 'inpnvi',
- 'int',
- 'int16',
- 'int2d',
- 'int32',
- 'int3d',
- 'int8',
- 'interp',
- 'interp2d',
- 'interp3d',
- 'intg',
- 'intppty',
- 'inttype',
- 'inv',
- 'invoke_lu',
- 'is_handle_valid',
- 'is_hdf5_file',
- 'isalphanum',
- 'isascii',
- 'isdef',
- 'isdigit',
- 'isdir',
- 'isequal',
- 'isequalbitwise',
- 'iserror',
- 'isfile',
- 'isglobal',
- 'isletter',
- 'isnum',
- 'isreal',
- 'iswaitingforinput',
- 'jallowClassReloading',
- 'jarray',
- 'jautoTranspose',
- 'jautoUnwrap',
- 'javaclasspath',
- 'javalibrarypath',
- 'jcast',
- 'jcompile',
- 'jconvMatrixMethod',
- 'jcreatejar',
- 'jdeff',
- 'jdisableTrace',
- 'jenableTrace',
- 'jexists',
- 'jgetclassname',
- 'jgetfield',
- 'jgetfields',
- 'jgetinfo',
- 'jgetmethods',
- 'jimport',
- 'jinvoke',
- 'jinvoke_db',
- 'jnewInstance',
- 'jremove',
- 'jsetfield',
- 'junwrap',
- 'junwraprem',
- 'jwrap',
- 'jwrapinfloat',
- 'kron',
- 'lasterror',
- 'ldiv',
- 'ldivf',
- 'legendre',
- 'length',
- 'lib',
- 'librarieslist',
- 'libraryinfo',
- 'light',
- 'linear_interpn',
- 'lines',
- 'link',
- 'linmeq',
- 'list',
- 'listvar_in_hdf5',
- 'load',
- 'loadGui',
- 'loadScicos',
- 'loadXcos',
- 'loadfftwlibrary',
- 'loadhistory',
- 'log',
- 'log1p',
- 'lsq',
- 'lsq_splin',
- 'lsqrsolve',
- 'lsslist',
- 'lstcat',
- 'lstsize',
- 'ltitr',
- 'lu',
- 'ludel',
- 'lufact',
- 'luget',
- 'lusolve',
- 'macr2lst',
- 'macr2tree',
- 'matfile_close',
- 'matfile_listvar',
- 'matfile_open',
- 'matfile_varreadnext',
- 'matfile_varwrite',
- 'matrix',
- 'max',
- 'maxfiles',
- 'mclearerr',
- 'mclose',
- 'meof',
- 'merror',
- 'messagebox',
- 'mfprintf',
- 'mfscanf',
- 'mget',
- 'mgeti',
- 'mgetl',
- 'mgetstr',
- 'min',
- 'mlist',
- 'mode',
- 'model2blk',
- 'mopen',
- 'move',
- 'movefile',
- 'mprintf',
- 'mput',
- 'mputl',
- 'mputstr',
- 'mscanf',
- 'mseek',
- 'msprintf',
- 'msscanf',
- 'mtell',
- 'mtlb_mode',
- 'mtlb_sparse',
- 'mucomp',
- 'mulf',
- 'name2rgb',
- 'nearfloat',
- 'newaxes',
- 'newest',
- 'newfun',
- 'nnz',
- 'norm',
- 'notify',
- 'number_properties',
- 'ode',
- 'odedc',
- 'ones',
- 'openged',
- 'opentk',
- 'optim',
- 'or',
- 'ordmmd',
- 'parallel_concurrency',
- 'parallel_run',
- 'param3d',
- 'param3d1',
- 'part',
- 'pathconvert',
- 'pathsep',
- 'phase_simulation',
- 'plot2d',
- 'plot2d1',
- 'plot2d2',
- 'plot2d3',
- 'plot2d4',
- 'plot3d',
- 'plot3d1',
- 'plotbrowser',
- 'pointer_xproperty',
- 'poly',
- 'ppol',
- 'pppdiv',
- 'predef',
- 'preferences',
- 'print',
- 'printf',
- 'printfigure',
- 'printsetupbox',
- 'prod',
- 'progressionbar',
- 'prompt',
- 'pwd',
- 'qld',
- 'qp_solve',
- 'qr',
- 'raise_window',
- 'rand',
- 'rankqr',
- 'rat',
- 'rcond',
- 'rdivf',
- 'read',
- 'read4b',
- 'read_csv',
- 'readb',
- 'readgateway',
- 'readmps',
- 'real',
- 'realtime',
- 'realtimeinit',
- 'regexp',
- 'relocate_handle',
- 'remez',
- 'removeModulePreferences',
- 'removedir',
- 'removelinehistory',
- 'res_with_prec',
- 'resethistory',
- 'residu',
- 'resume',
- 'return',
- 'ricc',
- 'rlist',
- 'roots',
- 'rotate_axes',
- 'round',
- 'rpem',
- 'rtitr',
- 'rubberbox',
- 'save',
- 'saveGui',
- 'saveafterncommands',
- 'saveconsecutivecommands',
- 'savehistory',
- 'schur',
- 'sci_haltscicos',
- 'sci_tree2',
- 'sci_tree3',
- 'sci_tree4',
- 'sciargs',
- 'scicos_debug',
- 'scicos_debug_count',
- 'scicos_time',
- 'scicosim',
- 'scinotes',
- 'sctree',
- 'semidef',
- 'set',
- 'set_blockerror',
- 'set_fftw_wisdom',
- 'set_xproperty',
- 'setbpt',
- 'setdefaultlanguage',
- 'setenv',
- 'setfield',
- 'sethistoryfile',
- 'setlanguage',
- 'setlookandfeel',
- 'setmenu',
- 'sfact',
- 'sfinit',
- 'show_window',
- 'sident',
- 'sig2data',
- 'sign',
- 'simp',
- 'simp_mode',
- 'sin',
- 'size',
- 'slash',
- 'sleep',
- 'sorder',
- 'sparse',
- 'spchol',
- 'spcompack',
- 'spec',
- 'spget',
- 'splin',
- 'splin2d',
- 'splin3d',
- 'splitURL',
- 'spones',
- 'sprintf',
- 'sqrt',
- 'stacksize',
- 'str2code',
- 'strcat',
- 'strchr',
- 'strcmp',
- 'strcspn',
- 'strindex',
- 'string',
- 'stringbox',
- 'stripblanks',
- 'strncpy',
- 'strrchr',
- 'strrev',
- 'strsplit',
- 'strspn',
- 'strstr',
- 'strsubst',
- 'strtod',
- 'strtok',
- 'subf',
- 'sum',
- 'svd',
- 'swap_handles',
- 'symfcti',
- 'syredi',
- 'system_getproperty',
- 'system_setproperty',
- 'ta2lpd',
- 'tan',
- 'taucs_chdel',
- 'taucs_chfact',
- 'taucs_chget',
- 'taucs_chinfo',
- 'taucs_chsolve',
- 'tempname',
- 'testmatrix',
- 'timer',
- 'tlist',
- 'tohome',
- 'tokens',
- 'toolbar',
- 'toprint',
- 'tr_zer',
- 'tril',
- 'triu',
- 'type',
- 'typename',
- 'uiDisplayTree',
- 'uicontextmenu',
- 'uicontrol',
- 'uigetcolor',
- 'uigetdir',
- 'uigetfile',
- 'uigetfont',
- 'uimenu',
- 'uint16',
- 'uint32',
- 'uint8',
- 'uipopup',
- 'uiputfile',
- 'uiwait',
- 'ulink',
- 'umf_ludel',
- 'umf_lufact',
- 'umf_luget',
- 'umf_luinfo',
- 'umf_lusolve',
- 'umfpack',
- 'unglue',
- 'unix',
- 'unsetmenu',
- 'unzoom',
- 'updatebrowsevar',
- 'usecanvas',
- 'useeditor',
- 'user',
- 'var2vec',
- 'varn',
- 'vec2var',
- 'waitbar',
- 'warnBlockByUID',
- 'warning',
- 'what',
- 'where',
- 'whereis',
- 'who',
- 'winsid',
- 'with_module',
- 'writb',
- 'write',
- 'write4b',
- 'write_csv',
- 'x_choose',
- 'x_choose_modeless',
- 'x_dialog',
- 'x_mdialog',
- 'xarc',
- 'xarcs',
- 'xarrows',
- 'xchange',
- 'xchoicesi',
- 'xclick',
- 'xcos',
- 'xcosAddToolsMenu',
- 'xcosConfigureXmlFile',
- 'xcosDiagramToScilab',
- 'xcosPalCategoryAdd',
- 'xcosPalDelete',
- 'xcosPalDisable',
- 'xcosPalEnable',
- 'xcosPalGenerateIcon',
- 'xcosPalGet',
- 'xcosPalLoad',
- 'xcosPalMove',
- 'xcosSimulationStarted',
- 'xcosUpdateBlock',
- 'xdel',
- 'xend',
- 'xfarc',
- 'xfarcs',
- 'xfpoly',
- 'xfpolys',
- 'xfrect',
- 'xget',
- 'xgetmouse',
- 'xgraduate',
- 'xgrid',
- 'xinit',
- 'xlfont',
- 'xls_open',
- 'xls_read',
- 'xmlAddNs',
- 'xmlAppend',
- 'xmlAsNumber',
- 'xmlAsText',
- 'xmlDTD',
- 'xmlDelete',
- 'xmlDocument',
- 'xmlDump',
- 'xmlElement',
- 'xmlFormat',
- 'xmlGetNsByHref',
- 'xmlGetNsByPrefix',
- 'xmlGetOpenDocs',
- 'xmlIsValidObject',
- 'xmlName',
- 'xmlNs',
- 'xmlRead',
- 'xmlReadStr',
- 'xmlRelaxNG',
- 'xmlRemove',
- 'xmlSchema',
- 'xmlSetAttributes',
- 'xmlValidate',
- 'xmlWrite',
- 'xmlXPath',
- 'xname',
- 'xpause',
- 'xpoly',
- 'xpolys',
- 'xrect',
- 'xrects',
- 'xs2bmp',
- 'xs2emf',
- 'xs2eps',
- 'xs2gif',
- 'xs2jpg',
- 'xs2pdf',
- 'xs2png',
- 'xs2ppm',
- 'xs2ps',
- 'xs2svg',
- 'xsegs',
- 'xset',
- 'xstring',
- 'xstringb',
- 'xtitle',
- 'zeros',
- 'znaupd',
- 'zneupd',
- 'zoom_rect',
-)
-
-macros_kw = (
- '!_deff_wrapper',
- '%0_i_st',
- '%3d_i_h',
- '%Block_xcosUpdateBlock',
- '%TNELDER_p',
- '%TNELDER_string',
- '%TNMPLOT_p',
- '%TNMPLOT_string',
- '%TOPTIM_p',
- '%TOPTIM_string',
- '%TSIMPLEX_p',
- '%TSIMPLEX_string',
- '%_EVoid_p',
- '%_gsort',
- '%_listvarinfile',
- '%_rlist',
- '%_save',
- '%_sodload',
- '%_strsplit',
- '%_unwrap',
- '%ar_p',
- '%asn',
- '%b_a_b',
- '%b_a_s',
- '%b_c_s',
- '%b_c_spb',
- '%b_cumprod',
- '%b_cumsum',
- '%b_d_s',
- '%b_diag',
- '%b_e',
- '%b_f_s',
- '%b_f_spb',
- '%b_g_s',
- '%b_g_spb',
- '%b_grand',
- '%b_h_s',
- '%b_h_spb',
- '%b_i_b',
- '%b_i_ce',
- '%b_i_h',
- '%b_i_hm',
- '%b_i_s',
- '%b_i_sp',
- '%b_i_spb',
- '%b_i_st',
- '%b_iconvert',
- '%b_l_b',
- '%b_l_s',
- '%b_m_b',
- '%b_m_s',
- '%b_matrix',
- '%b_n_hm',
- '%b_o_hm',
- '%b_p_s',
- '%b_prod',
- '%b_r_b',
- '%b_r_s',
- '%b_s_b',
- '%b_s_s',
- '%b_string',
- '%b_sum',
- '%b_tril',
- '%b_triu',
- '%b_x_b',
- '%b_x_s',
- '%bicg',
- '%bicgstab',
- '%c_a_c',
- '%c_b_c',
- '%c_b_s',
- '%c_diag',
- '%c_dsearch',
- '%c_e',
- '%c_eye',
- '%c_f_s',
- '%c_grand',
- '%c_i_c',
- '%c_i_ce',
- '%c_i_h',
- '%c_i_hm',
- '%c_i_lss',
- '%c_i_r',
- '%c_i_s',
- '%c_i_st',
- '%c_matrix',
- '%c_n_l',
- '%c_n_st',
- '%c_o_l',
- '%c_o_st',
- '%c_ones',
- '%c_rand',
- '%c_tril',
- '%c_triu',
- '%cblock_c_cblock',
- '%cblock_c_s',
- '%cblock_e',
- '%cblock_f_cblock',
- '%cblock_p',
- '%cblock_size',
- '%ce_6',
- '%ce_c_ce',
- '%ce_e',
- '%ce_f_ce',
- '%ce_i_ce',
- '%ce_i_s',
- '%ce_i_st',
- '%ce_matrix',
- '%ce_p',
- '%ce_size',
- '%ce_string',
- '%ce_t',
- '%cgs',
- '%champdat_i_h',
- '%choose',
- '%diagram_xcos',
- '%dir_p',
- '%fptr_i_st',
- '%grand_perm',
- '%grayplot_i_h',
- '%h_i_st',
- '%hmS_k_hmS_generic',
- '%hm_1_hm',
- '%hm_1_s',
- '%hm_2_hm',
- '%hm_2_s',
- '%hm_3_hm',
- '%hm_3_s',
- '%hm_4_hm',
- '%hm_4_s',
- '%hm_5',
- '%hm_a_hm',
- '%hm_a_r',
- '%hm_a_s',
- '%hm_abs',
- '%hm_and',
- '%hm_bool2s',
- '%hm_c_hm',
- '%hm_ceil',
- '%hm_conj',
- '%hm_cos',
- '%hm_cumprod',
- '%hm_cumsum',
- '%hm_d_hm',
- '%hm_d_s',
- '%hm_degree',
- '%hm_dsearch',
- '%hm_e',
- '%hm_exp',
- '%hm_eye',
- '%hm_f_hm',
- '%hm_find',
- '%hm_floor',
- '%hm_g_hm',
- '%hm_grand',
- '%hm_gsort',
- '%hm_h_hm',
- '%hm_i_b',
- '%hm_i_ce',
- '%hm_i_h',
- '%hm_i_hm',
- '%hm_i_i',
- '%hm_i_p',
- '%hm_i_r',
- '%hm_i_s',
- '%hm_i_st',
- '%hm_iconvert',
- '%hm_imag',
- '%hm_int',
- '%hm_isnan',
- '%hm_isreal',
- '%hm_j_hm',
- '%hm_j_s',
- '%hm_k_hm',
- '%hm_k_s',
- '%hm_log',
- '%hm_m_p',
- '%hm_m_r',
- '%hm_m_s',
- '%hm_matrix',
- '%hm_max',
- '%hm_mean',
- '%hm_median',
- '%hm_min',
- '%hm_n_b',
- '%hm_n_c',
- '%hm_n_hm',
- '%hm_n_i',
- '%hm_n_p',
- '%hm_n_s',
- '%hm_o_b',
- '%hm_o_c',
- '%hm_o_hm',
- '%hm_o_i',
- '%hm_o_p',
- '%hm_o_s',
- '%hm_ones',
- '%hm_or',
- '%hm_p',
- '%hm_prod',
- '%hm_q_hm',
- '%hm_r_s',
- '%hm_rand',
- '%hm_real',
- '%hm_round',
- '%hm_s',
- '%hm_s_hm',
- '%hm_s_r',
- '%hm_s_s',
- '%hm_sign',
- '%hm_sin',
- '%hm_size',
- '%hm_sqrt',
- '%hm_stdev',
- '%hm_string',
- '%hm_sum',
- '%hm_x_hm',
- '%hm_x_p',
- '%hm_x_s',
- '%hm_zeros',
- '%i_1_s',
- '%i_2_s',
- '%i_3_s',
- '%i_4_s',
- '%i_Matplot',
- '%i_a_i',
- '%i_a_s',
- '%i_and',
- '%i_ascii',
- '%i_b_s',
- '%i_bezout',
- '%i_champ',
- '%i_champ1',
- '%i_contour',
- '%i_contour2d',
- '%i_d_i',
- '%i_d_s',
- '%i_dsearch',
- '%i_e',
- '%i_fft',
- '%i_g_i',
- '%i_gcd',
- '%i_grand',
- '%i_h_i',
- '%i_i_ce',
- '%i_i_h',
- '%i_i_hm',
- '%i_i_i',
- '%i_i_s',
- '%i_i_st',
- '%i_j_i',
- '%i_j_s',
- '%i_l_s',
- '%i_lcm',
- '%i_length',
- '%i_m_i',
- '%i_m_s',
- '%i_mfprintf',
- '%i_mprintf',
- '%i_msprintf',
- '%i_n_s',
- '%i_o_s',
- '%i_or',
- '%i_p_i',
- '%i_p_s',
- '%i_plot2d',
- '%i_plot2d1',
- '%i_plot2d2',
- '%i_q_s',
- '%i_r_i',
- '%i_r_s',
- '%i_round',
- '%i_s_i',
- '%i_s_s',
- '%i_sign',
- '%i_string',
- '%i_x_i',
- '%i_x_s',
- '%ip_a_s',
- '%ip_i_st',
- '%ip_m_s',
- '%ip_n_ip',
- '%ip_o_ip',
- '%ip_p',
- '%ip_part',
- '%ip_s_s',
- '%ip_string',
- '%k',
- '%l_i_h',
- '%l_i_s',
- '%l_i_st',
- '%l_isequal',
- '%l_n_c',
- '%l_n_l',
- '%l_n_m',
- '%l_n_p',
- '%l_n_s',
- '%l_n_st',
- '%l_o_c',
- '%l_o_l',
- '%l_o_m',
- '%l_o_p',
- '%l_o_s',
- '%l_o_st',
- '%lss_a_lss',
- '%lss_a_p',
- '%lss_a_r',
- '%lss_a_s',
- '%lss_c_lss',
- '%lss_c_p',
- '%lss_c_r',
- '%lss_c_s',
- '%lss_e',
- '%lss_eye',
- '%lss_f_lss',
- '%lss_f_p',
- '%lss_f_r',
- '%lss_f_s',
- '%lss_i_ce',
- '%lss_i_lss',
- '%lss_i_p',
- '%lss_i_r',
- '%lss_i_s',
- '%lss_i_st',
- '%lss_inv',
- '%lss_l_lss',
- '%lss_l_p',
- '%lss_l_r',
- '%lss_l_s',
- '%lss_m_lss',
- '%lss_m_p',
- '%lss_m_r',
- '%lss_m_s',
- '%lss_n_lss',
- '%lss_n_p',
- '%lss_n_r',
- '%lss_n_s',
- '%lss_norm',
- '%lss_o_lss',
- '%lss_o_p',
- '%lss_o_r',
- '%lss_o_s',
- '%lss_ones',
- '%lss_r_lss',
- '%lss_r_p',
- '%lss_r_r',
- '%lss_r_s',
- '%lss_rand',
- '%lss_s',
- '%lss_s_lss',
- '%lss_s_p',
- '%lss_s_r',
- '%lss_s_s',
- '%lss_size',
- '%lss_t',
- '%lss_v_lss',
- '%lss_v_p',
- '%lss_v_r',
- '%lss_v_s',
- '%lt_i_s',
- '%m_n_l',
- '%m_o_l',
- '%mc_i_h',
- '%mc_i_s',
- '%mc_i_st',
- '%mc_n_st',
- '%mc_o_st',
- '%mc_string',
- '%mps_p',
- '%mps_string',
- '%msp_a_s',
- '%msp_abs',
- '%msp_e',
- '%msp_find',
- '%msp_i_s',
- '%msp_i_st',
- '%msp_length',
- '%msp_m_s',
- '%msp_maxi',
- '%msp_n_msp',
- '%msp_nnz',
- '%msp_o_msp',
- '%msp_p',
- '%msp_sparse',
- '%msp_spones',
- '%msp_t',
- '%p_a_lss',
- '%p_a_r',
- '%p_c_lss',
- '%p_c_r',
- '%p_cumprod',
- '%p_cumsum',
- '%p_d_p',
- '%p_d_r',
- '%p_d_s',
- '%p_det',
- '%p_e',
- '%p_f_lss',
- '%p_f_r',
- '%p_grand',
- '%p_i_ce',
- '%p_i_h',
- '%p_i_hm',
- '%p_i_lss',
- '%p_i_p',
- '%p_i_r',
- '%p_i_s',
- '%p_i_st',
- '%p_inv',
- '%p_j_s',
- '%p_k_p',
- '%p_k_r',
- '%p_k_s',
- '%p_l_lss',
- '%p_l_p',
- '%p_l_r',
- '%p_l_s',
- '%p_m_hm',
- '%p_m_lss',
- '%p_m_r',
- '%p_matrix',
- '%p_n_l',
- '%p_n_lss',
- '%p_n_r',
- '%p_o_l',
- '%p_o_lss',
- '%p_o_r',
- '%p_o_sp',
- '%p_p_s',
- '%p_part',
- '%p_prod',
- '%p_q_p',
- '%p_q_r',
- '%p_q_s',
- '%p_r_lss',
- '%p_r_p',
- '%p_r_r',
- '%p_r_s',
- '%p_s_lss',
- '%p_s_r',
- '%p_simp',
- '%p_string',
- '%p_sum',
- '%p_v_lss',
- '%p_v_p',
- '%p_v_r',
- '%p_v_s',
- '%p_x_hm',
- '%p_x_r',
- '%p_y_p',
- '%p_y_r',
- '%p_y_s',
- '%p_z_p',
- '%p_z_r',
- '%p_z_s',
- '%pcg',
- '%plist_p',
- '%plist_string',
- '%r_0',
- '%r_a_hm',
- '%r_a_lss',
- '%r_a_p',
- '%r_a_r',
- '%r_a_s',
- '%r_c_lss',
- '%r_c_p',
- '%r_c_r',
- '%r_c_s',
- '%r_clean',
- '%r_cumprod',
- '%r_cumsum',
- '%r_d_p',
- '%r_d_r',
- '%r_d_s',
- '%r_det',
- '%r_diag',
- '%r_e',
- '%r_eye',
- '%r_f_lss',
- '%r_f_p',
- '%r_f_r',
- '%r_f_s',
- '%r_i_ce',
- '%r_i_hm',
- '%r_i_lss',
- '%r_i_p',
- '%r_i_r',
- '%r_i_s',
- '%r_i_st',
- '%r_inv',
- '%r_j_s',
- '%r_k_p',
- '%r_k_r',
- '%r_k_s',
- '%r_l_lss',
- '%r_l_p',
- '%r_l_r',
- '%r_l_s',
- '%r_m_hm',
- '%r_m_lss',
- '%r_m_p',
- '%r_m_r',
- '%r_m_s',
- '%r_matrix',
- '%r_n_lss',
- '%r_n_p',
- '%r_n_r',
- '%r_n_s',
- '%r_norm',
- '%r_o_lss',
- '%r_o_p',
- '%r_o_r',
- '%r_o_s',
- '%r_ones',
- '%r_p',
- '%r_p_s',
- '%r_prod',
- '%r_q_p',
- '%r_q_r',
- '%r_q_s',
- '%r_r_lss',
- '%r_r_p',
- '%r_r_r',
- '%r_r_s',
- '%r_rand',
- '%r_s',
- '%r_s_hm',
- '%r_s_lss',
- '%r_s_p',
- '%r_s_r',
- '%r_s_s',
- '%r_simp',
- '%r_size',
- '%r_string',
- '%r_sum',
- '%r_t',
- '%r_tril',
- '%r_triu',
- '%r_v_lss',
- '%r_v_p',
- '%r_v_r',
- '%r_v_s',
- '%r_varn',
- '%r_x_p',
- '%r_x_r',
- '%r_x_s',
- '%r_y_p',
- '%r_y_r',
- '%r_y_s',
- '%r_z_p',
- '%r_z_r',
- '%r_z_s',
- '%s_1_hm',
- '%s_1_i',
- '%s_2_hm',
- '%s_2_i',
- '%s_3_hm',
- '%s_3_i',
- '%s_4_hm',
- '%s_4_i',
- '%s_5',
- '%s_a_b',
- '%s_a_hm',
- '%s_a_i',
- '%s_a_ip',
- '%s_a_lss',
- '%s_a_msp',
- '%s_a_r',
- '%s_a_sp',
- '%s_and',
- '%s_b_i',
- '%s_b_s',
- '%s_bezout',
- '%s_c_b',
- '%s_c_cblock',
- '%s_c_lss',
- '%s_c_r',
- '%s_c_sp',
- '%s_d_b',
- '%s_d_i',
- '%s_d_p',
- '%s_d_r',
- '%s_d_sp',
- '%s_e',
- '%s_f_b',
- '%s_f_cblock',
- '%s_f_lss',
- '%s_f_r',
- '%s_f_sp',
- '%s_g_b',
- '%s_g_s',
- '%s_gcd',
- '%s_grand',
- '%s_h_b',
- '%s_h_s',
- '%s_i_b',
- '%s_i_c',
- '%s_i_ce',
- '%s_i_h',
- '%s_i_hm',
- '%s_i_i',
- '%s_i_lss',
- '%s_i_p',
- '%s_i_r',
- '%s_i_s',
- '%s_i_sp',
- '%s_i_spb',
- '%s_i_st',
- '%s_j_i',
- '%s_k_hm',
- '%s_k_p',
- '%s_k_r',
- '%s_k_sp',
- '%s_l_b',
- '%s_l_hm',
- '%s_l_i',
- '%s_l_lss',
- '%s_l_p',
- '%s_l_r',
- '%s_l_s',
- '%s_l_sp',
- '%s_lcm',
- '%s_m_b',
- '%s_m_hm',
- '%s_m_i',
- '%s_m_ip',
- '%s_m_lss',
- '%s_m_msp',
- '%s_m_r',
- '%s_matrix',
- '%s_n_hm',
- '%s_n_i',
- '%s_n_l',
- '%s_n_lss',
- '%s_n_r',
- '%s_n_st',
- '%s_o_hm',
- '%s_o_i',
- '%s_o_l',
- '%s_o_lss',
- '%s_o_r',
- '%s_o_st',
- '%s_or',
- '%s_p_b',
- '%s_p_i',
- '%s_pow',
- '%s_q_hm',
- '%s_q_i',
- '%s_q_p',
- '%s_q_r',
- '%s_q_sp',
- '%s_r_b',
- '%s_r_i',
- '%s_r_lss',
- '%s_r_p',
- '%s_r_r',
- '%s_r_s',
- '%s_r_sp',
- '%s_s_b',
- '%s_s_hm',
- '%s_s_i',
- '%s_s_ip',
- '%s_s_lss',
- '%s_s_r',
- '%s_s_sp',
- '%s_simp',
- '%s_v_lss',
- '%s_v_p',
- '%s_v_r',
- '%s_v_s',
- '%s_x_b',
- '%s_x_hm',
- '%s_x_i',
- '%s_x_r',
- '%s_y_p',
- '%s_y_r',
- '%s_y_sp',
- '%s_z_p',
- '%s_z_r',
- '%s_z_sp',
- '%sn',
- '%sp_a_s',
- '%sp_a_sp',
- '%sp_and',
- '%sp_c_s',
- '%sp_ceil',
- '%sp_conj',
- '%sp_cos',
- '%sp_cumprod',
- '%sp_cumsum',
- '%sp_d_s',
- '%sp_d_sp',
- '%sp_det',
- '%sp_diag',
- '%sp_e',
- '%sp_exp',
- '%sp_f_s',
- '%sp_floor',
- '%sp_grand',
- '%sp_gsort',
- '%sp_i_ce',
- '%sp_i_h',
- '%sp_i_s',
- '%sp_i_sp',
- '%sp_i_st',
- '%sp_int',
- '%sp_inv',
- '%sp_k_s',
- '%sp_k_sp',
- '%sp_l_s',
- '%sp_l_sp',
- '%sp_length',
- '%sp_max',
- '%sp_min',
- '%sp_norm',
- '%sp_or',
- '%sp_p_s',
- '%sp_prod',
- '%sp_q_s',
- '%sp_q_sp',
- '%sp_r_s',
- '%sp_r_sp',
- '%sp_round',
- '%sp_s_s',
- '%sp_s_sp',
- '%sp_sin',
- '%sp_sqrt',
- '%sp_string',
- '%sp_sum',
- '%sp_tril',
- '%sp_triu',
- '%sp_y_s',
- '%sp_y_sp',
- '%sp_z_s',
- '%sp_z_sp',
- '%spb_and',
- '%spb_c_b',
- '%spb_cumprod',
- '%spb_cumsum',
- '%spb_diag',
- '%spb_e',
- '%spb_f_b',
- '%spb_g_b',
- '%spb_g_spb',
- '%spb_h_b',
- '%spb_h_spb',
- '%spb_i_b',
- '%spb_i_ce',
- '%spb_i_h',
- '%spb_i_st',
- '%spb_or',
- '%spb_prod',
- '%spb_sum',
- '%spb_tril',
- '%spb_triu',
- '%st_6',
- '%st_c_st',
- '%st_e',
- '%st_f_st',
- '%st_i_b',
- '%st_i_c',
- '%st_i_fptr',
- '%st_i_h',
- '%st_i_i',
- '%st_i_ip',
- '%st_i_lss',
- '%st_i_msp',
- '%st_i_p',
- '%st_i_r',
- '%st_i_s',
- '%st_i_sp',
- '%st_i_spb',
- '%st_i_st',
- '%st_matrix',
- '%st_n_c',
- '%st_n_l',
- '%st_n_mc',
- '%st_n_p',
- '%st_n_s',
- '%st_o_c',
- '%st_o_l',
- '%st_o_mc',
- '%st_o_p',
- '%st_o_s',
- '%st_o_tl',
- '%st_p',
- '%st_size',
- '%st_string',
- '%st_t',
- '%ticks_i_h',
- '%xls_e',
- '%xls_p',
- '%xlssheet_e',
- '%xlssheet_p',
- '%xlssheet_size',
- '%xlssheet_string',
- 'DominationRank',
- 'G_make',
- 'IsAScalar',
- 'NDcost',
- 'OS_Version',
- 'PlotSparse',
- 'ReadHBSparse',
- 'TCL_CreateSlave',
- 'abcd',
- 'abinv',
- 'accept_func_default',
- 'accept_func_vfsa',
- 'acf',
- 'acosd',
- 'acosh',
- 'acoshm',
- 'acosm',
- 'acot',
- 'acotd',
- 'acoth',
- 'acsc',
- 'acscd',
- 'acsch',
- 'add_demo',
- 'add_help_chapter',
- 'add_module_help_chapter',
- 'add_param',
- 'add_profiling',
- 'adj2sp',
- 'aff2ab',
- 'ana_style',
- 'analpf',
- 'analyze',
- 'aplat',
- 'arhnk',
- 'arl2',
- 'arma2p',
- 'arma2ss',
- 'armac',
- 'armax',
- 'armax1',
- 'arobasestring2strings',
- 'arsimul',
- 'ascii2string',
- 'asciimat',
- 'asec',
- 'asecd',
- 'asech',
- 'asind',
- 'asinh',
- 'asinhm',
- 'asinm',
- 'assert_checkalmostequal',
- 'assert_checkequal',
- 'assert_checkerror',
- 'assert_checkfalse',
- 'assert_checkfilesequal',
- 'assert_checktrue',
- 'assert_comparecomplex',
- 'assert_computedigits',
- 'assert_cond2reltol',
- 'assert_cond2reqdigits',
- 'assert_generror',
- 'atand',
- 'atanh',
- 'atanhm',
- 'atanm',
- 'atomsAutoload',
- 'atomsAutoloadAdd',
- 'atomsAutoloadDel',
- 'atomsAutoloadList',
- 'atomsCategoryList',
- 'atomsCheckModule',
- 'atomsDepTreeShow',
- 'atomsGetConfig',
- 'atomsGetInstalled',
- 'atomsGetInstalledPath',
- 'atomsGetLoaded',
- 'atomsGetLoadedPath',
- 'atomsInstall',
- 'atomsIsInstalled',
- 'atomsIsLoaded',
- 'atomsList',
- 'atomsLoad',
- 'atomsQuit',
- 'atomsRemove',
- 'atomsRepositoryAdd',
- 'atomsRepositoryDel',
- 'atomsRepositoryList',
- 'atomsRestoreConfig',
- 'atomsSaveConfig',
- 'atomsSearch',
- 'atomsSetConfig',
- 'atomsShow',
- 'atomsSystemInit',
- 'atomsSystemUpdate',
- 'atomsTest',
- 'atomsUpdate',
- 'atomsVersion',
- 'augment',
- 'auread',
- 'auwrite',
- 'balreal',
- 'bench_run',
- 'bilin',
- 'bilt',
- 'bin2dec',
- 'binomial',
- 'bitand',
- 'bitcmp',
- 'bitget',
- 'bitor',
- 'bitset',
- 'bitxor',
- 'black',
- 'blanks',
- 'bloc2exp',
- 'bloc2ss',
- 'block_parameter_error',
- 'bode',
- 'bode_asymp',
- 'bstap',
- 'buttmag',
- 'bvodeS',
- 'bytecode',
- 'bytecodewalk',
- 'cainv',
- 'calendar',
- 'calerf',
- 'calfrq',
- 'canon',
- 'casc',
- 'cat',
- 'cat_code',
- 'cb_m2sci_gui',
- 'ccontrg',
- 'cell',
- 'cell2mat',
- 'cellstr',
- 'center',
- 'cepstrum',
- 'cfspec',
- 'char',
- 'chart',
- 'cheb1mag',
- 'cheb2mag',
- 'check_gateways',
- 'check_modules_xml',
- 'check_versions',
- 'chepol',
- 'chfact',
- 'chsolve',
- 'classmarkov',
- 'clean_help',
- 'clock',
- 'cls2dls',
- 'cmb_lin',
- 'cmndred',
- 'cmoment',
- 'coding_ga_binary',
- 'coding_ga_identity',
- 'coff',
- 'coffg',
- 'colcomp',
- 'colcompr',
- 'colinout',
- 'colregul',
- 'companion',
- 'complex',
- 'compute_initial_temp',
- 'cond',
- 'cond2sp',
- 'condestsp',
- 'configure_msifort',
- 'configure_msvc',
- 'conjgrad',
- 'cont_frm',
- 'cont_mat',
- 'contrss',
- 'conv',
- 'convert_to_float',
- 'convertindex',
- 'convol',
- 'convol2d',
- 'copfac',
- 'correl',
- 'cosd',
- 'cosh',
- 'coshm',
- 'cosm',
- 'cotd',
- 'cotg',
- 'coth',
- 'cothm',
- 'cov',
- 'covar',
- 'createXConfiguration',
- 'createfun',
- 'createstruct',
- 'cross',
- 'crossover_ga_binary',
- 'crossover_ga_default',
- 'csc',
- 'cscd',
- 'csch',
- 'csgn',
- 'csim',
- 'cspect',
- 'ctr_gram',
- 'czt',
- 'dae',
- 'daeoptions',
- 'damp',
- 'datafit',
- 'date',
- 'datenum',
- 'datevec',
- 'dbphi',
- 'dcf',
- 'ddp',
- 'dec2bin',
- 'dec2hex',
- 'dec2oct',
- 'del_help_chapter',
- 'del_module_help_chapter',
- 'demo_begin',
- 'demo_choose',
- 'demo_compiler',
- 'demo_end',
- 'demo_file_choice',
- 'demo_folder_choice',
- 'demo_function_choice',
- 'demo_gui',
- 'demo_run',
- 'demo_viewCode',
- 'denom',
- 'derivat',
- 'derivative',
- 'des2ss',
- 'des2tf',
- 'detectmsifort64tools',
- 'detectmsvc64tools',
- 'determ',
- 'detr',
- 'detrend',
- 'devtools_run_builder',
- 'dhnorm',
- 'diff',
- 'diophant',
- 'dir',
- 'dirname',
- 'dispfiles',
- 'dllinfo',
- 'dscr',
- 'dsimul',
- 'dt_ility',
- 'dtsi',
- 'edit',
- 'edit_error',
- 'editor',
- 'eigenmarkov',
- 'eigs',
- 'ell1mag',
- 'enlarge_shape',
- 'entropy',
- 'eomday',
- 'epred',
- 'eqfir',
- 'eqiir',
- 'equil',
- 'equil1',
- 'erfinv',
- 'etime',
- 'eval',
- 'evans',
- 'evstr',
- 'example_run',
- 'expression2code',
- 'extract_help_examples',
- 'factor',
- 'factorial',
- 'factors',
- 'faurre',
- 'ffilt',
- 'fft2',
- 'fftshift',
- 'fieldnames',
- 'filt_sinc',
- 'filter',
- 'findABCD',
- 'findAC',
- 'findBDK',
- 'findR',
- 'find_freq',
- 'find_links',
- 'find_scicos_version',
- 'findm',
- 'findmsifortcompiler',
- 'findmsvccompiler',
- 'findx0BD',
- 'firstnonsingleton',
- 'fix',
- 'fixedpointgcd',
- 'flipdim',
- 'flts',
- 'fminsearch',
- 'formatBlackTip',
- 'formatBodeMagTip',
- 'formatBodePhaseTip',
- 'formatGainplotTip',
- 'formatHallModuleTip',
- 'formatHallPhaseTip',
- 'formatNicholsGainTip',
- 'formatNicholsPhaseTip',
- 'formatNyquistTip',
- 'formatPhaseplotTip',
- 'formatSgridDampingTip',
- 'formatSgridFreqTip',
- 'formatZgridDampingTip',
- 'formatZgridFreqTip',
- 'format_txt',
- 'fourplan',
- 'frep2tf',
- 'freson',
- 'frfit',
- 'frmag',
- 'fseek_origin',
- 'fsfirlin',
- 'fspec',
- 'fspecg',
- 'fstabst',
- 'ftest',
- 'ftuneq',
- 'fullfile',
- 'fullrf',
- 'fullrfk',
- 'fun2string',
- 'g_margin',
- 'gainplot',
- 'gamitg',
- 'gcare',
- 'gcd',
- 'gencompilationflags_unix',
- 'generateBlockImage',
- 'generateBlockImages',
- 'generic_i_ce',
- 'generic_i_h',
- 'generic_i_hm',
- 'generic_i_s',
- 'generic_i_st',
- 'genlib',
- 'genmarkov',
- 'geomean',
- 'getDiagramVersion',
- 'getModelicaPath',
- 'getPreferencesValue',
- 'get_file_path',
- 'get_function_path',
- 'get_param',
- 'get_profile',
- 'get_scicos_version',
- 'getd',
- 'getscilabkeywords',
- 'getshell',
- 'gettklib',
- 'gfare',
- 'gfrancis',
- 'givens',
- 'glever',
- 'gmres',
- 'group',
- 'gschur',
- 'gspec',
- 'gtild',
- 'h2norm',
- 'h_cl',
- 'h_inf',
- 'h_inf_st',
- 'h_norm',
- 'hallchart',
- 'halt',
- 'hank',
- 'hankelsv',
- 'harmean',
- 'haveacompiler',
- 'head_comments',
- 'help_from_sci',
- 'help_skeleton',
- 'hermit',
- 'hex2dec',
- 'hilb',
- 'hilbert',
- 'histc',
- 'horner',
- 'householder',
- 'hrmt',
- 'htrianr',
- 'hypermat',
- 'idct',
- 'idst',
- 'ifft',
- 'ifftshift',
- 'iir',
- 'iirgroup',
- 'iirlp',
- 'iirmod',
- 'ilib_build',
- 'ilib_build_jar',
- 'ilib_compile',
- 'ilib_for_link',
- 'ilib_gen_Make',
- 'ilib_gen_Make_unix',
- 'ilib_gen_cleaner',
- 'ilib_gen_gateway',
- 'ilib_gen_loader',
- 'ilib_include_flag',
- 'ilib_mex_build',
- 'im_inv',
- 'importScicosDiagram',
- 'importScicosPal',
- 'importXcosDiagram',
- 'imrep2ss',
- 'ind2sub',
- 'inistate',
- 'init_ga_default',
- 'init_param',
- 'initial_scicos_tables',
- 'input',
- 'instruction2code',
- 'intc',
- 'intdec',
- 'integrate',
- 'interp1',
- 'interpln',
- 'intersect',
- 'intl',
- 'intsplin',
- 'inttrap',
- 'inv_coeff',
- 'invr',
- 'invrs',
- 'invsyslin',
- 'iqr',
- 'isLeapYear',
- 'is_absolute_path',
- 'is_param',
- 'iscell',
- 'iscellstr',
- 'iscolumn',
- 'isempty',
- 'isfield',
- 'isinf',
- 'ismatrix',
- 'isnan',
- 'isrow',
- 'isscalar',
- 'issparse',
- 'issquare',
- 'isstruct',
- 'isvector',
- 'jmat',
- 'justify',
- 'kalm',
- 'karmarkar',
- 'kernel',
- 'kpure',
- 'krac2',
- 'kroneck',
- 'lattn',
- 'lattp',
- 'launchtest',
- 'lcf',
- 'lcm',
- 'lcmdiag',
- 'leastsq',
- 'leqe',
- 'leqr',
- 'lev',
- 'levin',
- 'lex_sort',
- 'lft',
- 'lin',
- 'lin2mu',
- 'lincos',
- 'lindquist',
- 'linf',
- 'linfn',
- 'linsolve',
- 'linspace',
- 'list2vec',
- 'list_param',
- 'listfiles',
- 'listfunctions',
- 'listvarinfile',
- 'lmisolver',
- 'lmitool',
- 'loadXcosLibs',
- 'loadmatfile',
- 'loadwave',
- 'log10',
- 'log2',
- 'logm',
- 'logspace',
- 'lqe',
- 'lqg',
- 'lqg2stan',
- 'lqg_ltr',
- 'lqr',
- 'ls',
- 'lyap',
- 'm2sci_gui',
- 'm_circle',
- 'macglov',
- 'macrovar',
- 'mad',
- 'makecell',
- 'manedit',
- 'mapsound',
- 'markp2ss',
- 'matfile2sci',
- 'mdelete',
- 'mean',
- 'meanf',
- 'median',
- 'members',
- 'mese',
- 'meshgrid',
- 'mfft',
- 'mfile2sci',
- 'minreal',
- 'minss',
- 'mkdir',
- 'modulo',
- 'moment',
- 'mrfit',
- 'msd',
- 'mstr2sci',
- 'mtlb',
- 'mtlb_0',
- 'mtlb_a',
- 'mtlb_all',
- 'mtlb_any',
- 'mtlb_axes',
- 'mtlb_axis',
- 'mtlb_beta',
- 'mtlb_box',
- 'mtlb_choices',
- 'mtlb_close',
- 'mtlb_colordef',
- 'mtlb_cond',
- 'mtlb_cov',
- 'mtlb_cumprod',
- 'mtlb_cumsum',
- 'mtlb_dec2hex',
- 'mtlb_delete',
- 'mtlb_diag',
- 'mtlb_diff',
- 'mtlb_dir',
- 'mtlb_double',
- 'mtlb_e',
- 'mtlb_echo',
- 'mtlb_error',
- 'mtlb_eval',
- 'mtlb_exist',
- 'mtlb_eye',
- 'mtlb_false',
- 'mtlb_fft',
- 'mtlb_fftshift',
- 'mtlb_filter',
- 'mtlb_find',
- 'mtlb_findstr',
- 'mtlb_fliplr',
- 'mtlb_fopen',
- 'mtlb_format',
- 'mtlb_fprintf',
- 'mtlb_fread',
- 'mtlb_fscanf',
- 'mtlb_full',
- 'mtlb_fwrite',
- 'mtlb_get',
- 'mtlb_grid',
- 'mtlb_hold',
- 'mtlb_i',
- 'mtlb_ifft',
- 'mtlb_image',
- 'mtlb_imp',
- 'mtlb_int16',
- 'mtlb_int32',
- 'mtlb_int8',
- 'mtlb_is',
- 'mtlb_isa',
- 'mtlb_isfield',
- 'mtlb_isletter',
- 'mtlb_isspace',
- 'mtlb_l',
- 'mtlb_legendre',
- 'mtlb_linspace',
- 'mtlb_logic',
- 'mtlb_logical',
- 'mtlb_loglog',
- 'mtlb_lower',
- 'mtlb_max',
- 'mtlb_mean',
- 'mtlb_median',
- 'mtlb_mesh',
- 'mtlb_meshdom',
- 'mtlb_min',
- 'mtlb_more',
- 'mtlb_num2str',
- 'mtlb_ones',
- 'mtlb_pcolor',
- 'mtlb_plot',
- 'mtlb_prod',
- 'mtlb_qr',
- 'mtlb_qz',
- 'mtlb_rand',
- 'mtlb_randn',
- 'mtlb_rcond',
- 'mtlb_realmax',
- 'mtlb_realmin',
- 'mtlb_s',
- 'mtlb_semilogx',
- 'mtlb_semilogy',
- 'mtlb_setstr',
- 'mtlb_size',
- 'mtlb_sort',
- 'mtlb_sortrows',
- 'mtlb_sprintf',
- 'mtlb_sscanf',
- 'mtlb_std',
- 'mtlb_strcmp',
- 'mtlb_strcmpi',
- 'mtlb_strfind',
- 'mtlb_strrep',
- 'mtlb_subplot',
- 'mtlb_sum',
- 'mtlb_t',
- 'mtlb_toeplitz',
- 'mtlb_tril',
- 'mtlb_triu',
- 'mtlb_true',
- 'mtlb_type',
- 'mtlb_uint16',
- 'mtlb_uint32',
- 'mtlb_uint8',
- 'mtlb_upper',
- 'mtlb_var',
- 'mtlb_zeros',
- 'mu2lin',
- 'mutation_ga_binary',
- 'mutation_ga_default',
- 'mvcorrel',
- 'mvvacov',
- 'nancumsum',
- 'nand2mean',
- 'nanmax',
- 'nanmean',
- 'nanmeanf',
- 'nanmedian',
- 'nanmin',
- 'nanreglin',
- 'nanstdev',
- 'nansum',
- 'narsimul',
- 'ndgrid',
- 'ndims',
- 'nehari',
- 'neigh_func_csa',
- 'neigh_func_default',
- 'neigh_func_fsa',
- 'neigh_func_vfsa',
- 'neldermead_cget',
- 'neldermead_configure',
- 'neldermead_costf',
- 'neldermead_defaultoutput',
- 'neldermead_destroy',
- 'neldermead_function',
- 'neldermead_get',
- 'neldermead_log',
- 'neldermead_new',
- 'neldermead_restart',
- 'neldermead_search',
- 'neldermead_updatesimp',
- 'nextpow2',
- 'nfreq',
- 'nicholschart',
- 'nlev',
- 'nmplot_cget',
- 'nmplot_configure',
- 'nmplot_contour',
- 'nmplot_destroy',
- 'nmplot_function',
- 'nmplot_get',
- 'nmplot_historyplot',
- 'nmplot_log',
- 'nmplot_new',
- 'nmplot_outputcmd',
- 'nmplot_restart',
- 'nmplot_search',
- 'nmplot_simplexhistory',
- 'noisegen',
- 'nonreg_test_run',
- 'now',
- 'nthroot',
- 'null',
- 'num2cell',
- 'numderivative',
- 'numdiff',
- 'numer',
- 'nyquist',
- 'nyquistfrequencybounds',
- 'obs_gram',
- 'obscont',
- 'observer',
- 'obsv_mat',
- 'obsvss',
- 'oct2dec',
- 'odeoptions',
- 'optim_ga',
- 'optim_moga',
- 'optim_nsga',
- 'optim_nsga2',
- 'optim_sa',
- 'optimbase_cget',
- 'optimbase_checkbounds',
- 'optimbase_checkcostfun',
- 'optimbase_checkx0',
- 'optimbase_configure',
- 'optimbase_destroy',
- 'optimbase_function',
- 'optimbase_get',
- 'optimbase_hasbounds',
- 'optimbase_hasconstraints',
- 'optimbase_hasnlcons',
- 'optimbase_histget',
- 'optimbase_histset',
- 'optimbase_incriter',
- 'optimbase_isfeasible',
- 'optimbase_isinbounds',
- 'optimbase_isinnonlincons',
- 'optimbase_log',
- 'optimbase_logshutdown',
- 'optimbase_logstartup',
- 'optimbase_new',
- 'optimbase_outputcmd',
- 'optimbase_outstruct',
- 'optimbase_proj2bnds',
- 'optimbase_set',
- 'optimbase_stoplog',
- 'optimbase_terminate',
- 'optimget',
- 'optimplotfunccount',
- 'optimplotfval',
- 'optimplotx',
- 'optimset',
- 'optimsimplex_center',
- 'optimsimplex_check',
- 'optimsimplex_compsomefv',
- 'optimsimplex_computefv',
- 'optimsimplex_deltafv',
- 'optimsimplex_deltafvmax',
- 'optimsimplex_destroy',
- 'optimsimplex_dirmat',
- 'optimsimplex_fvmean',
- 'optimsimplex_fvstdev',
- 'optimsimplex_fvvariance',
- 'optimsimplex_getall',
- 'optimsimplex_getallfv',
- 'optimsimplex_getallx',
- 'optimsimplex_getfv',
- 'optimsimplex_getn',
- 'optimsimplex_getnbve',
- 'optimsimplex_getve',
- 'optimsimplex_getx',
- 'optimsimplex_gradientfv',
- 'optimsimplex_log',
- 'optimsimplex_new',
- 'optimsimplex_reflect',
- 'optimsimplex_setall',
- 'optimsimplex_setallfv',
- 'optimsimplex_setallx',
- 'optimsimplex_setfv',
- 'optimsimplex_setn',
- 'optimsimplex_setnbve',
- 'optimsimplex_setve',
- 'optimsimplex_setx',
- 'optimsimplex_shrink',
- 'optimsimplex_size',
- 'optimsimplex_sort',
- 'optimsimplex_xbar',
- 'orth',
- 'output_ga_default',
- 'output_moga_default',
- 'output_nsga2_default',
- 'output_nsga_default',
- 'p_margin',
- 'pack',
- 'pareto_filter',
- 'parrot',
- 'pbig',
- 'pca',
- 'pcg',
- 'pdiv',
- 'pen2ea',
- 'pencan',
- 'pencost',
- 'penlaur',
- 'perctl',
- 'perl',
- 'perms',
- 'permute',
- 'pertrans',
- 'pfactors',
- 'pfss',
- 'phasemag',
- 'phaseplot',
- 'phc',
- 'pinv',
- 'playsnd',
- 'plotprofile',
- 'plzr',
- 'pmodulo',
- 'pol2des',
- 'pol2str',
- 'polar',
- 'polfact',
- 'prbs_a',
- 'prettyprint',
- 'primes',
- 'princomp',
- 'profile',
- 'proj',
- 'projsl',
- 'projspec',
- 'psmall',
- 'pspect',
- 'qmr',
- 'qpsolve',
- 'quart',
- 'quaskro',
- 'rafiter',
- 'randpencil',
- 'range',
- 'rank',
- 'readxls',
- 'recompilefunction',
- 'recons',
- 'reglin',
- 'regress',
- 'remezb',
- 'remove_param',
- 'remove_profiling',
- 'repfreq',
- 'replace_Ix_by_Fx',
- 'repmat',
- 'reset_profiling',
- 'resize_matrix',
- 'returntoscilab',
- 'rhs2code',
- 'ric_desc',
- 'riccati',
- 'rmdir',
- 'routh_t',
- 'rowcomp',
- 'rowcompr',
- 'rowinout',
- 'rowregul',
- 'rowshuff',
- 'rref',
- 'sample',
- 'samplef',
- 'samwr',
- 'savematfile',
- 'savewave',
- 'scanf',
- 'sci2exp',
- 'sciGUI_init',
- 'sci_sparse',
- 'scicos_getvalue',
- 'scicos_simulate',
- 'scicos_workspace_init',
- 'scisptdemo',
- 'scitest',
- 'sdiff',
- 'sec',
- 'secd',
- 'sech',
- 'selection_ga_elitist',
- 'selection_ga_random',
- 'sensi',
- 'setPreferencesValue',
- 'set_param',
- 'setdiff',
- 'sgrid',
- 'show_margins',
- 'show_pca',
- 'showprofile',
- 'signm',
- 'sinc',
- 'sincd',
- 'sind',
- 'sinh',
- 'sinhm',
- 'sinm',
- 'sm2des',
- 'sm2ss',
- 'smga',
- 'smooth',
- 'solve',
- 'sound',
- 'soundsec',
- 'sp2adj',
- 'spaninter',
- 'spanplus',
- 'spantwo',
- 'specfact',
- 'speye',
- 'sprand',
- 'spzeros',
- 'sqroot',
- 'sqrtm',
- 'squarewave',
- 'squeeze',
- 'srfaur',
- 'srkf',
- 'ss2des',
- 'ss2ss',
- 'ss2tf',
- 'sskf',
- 'ssprint',
- 'ssrand',
- 'st_deviation',
- 'st_i_generic',
- 'st_ility',
- 'stabil',
- 'statgain',
- 'stdev',
- 'stdevf',
- 'steadycos',
- 'strange',
- 'strcmpi',
- 'struct',
- 'sub2ind',
- 'sva',
- 'svplot',
- 'sylm',
- 'sylv',
- 'sysconv',
- 'sysdiag',
- 'sysfact',
- 'syslin',
- 'syssize',
- 'system',
- 'systmat',
- 'tabul',
- 'tand',
- 'tanh',
- 'tanhm',
- 'tanm',
- 'tbx_build_blocks',
- 'tbx_build_cleaner',
- 'tbx_build_gateway',
- 'tbx_build_gateway_clean',
- 'tbx_build_gateway_loader',
- 'tbx_build_help',
- 'tbx_build_help_loader',
- 'tbx_build_loader',
- 'tbx_build_localization',
- 'tbx_build_macros',
- 'tbx_build_pal_loader',
- 'tbx_build_src',
- 'tbx_builder',
- 'tbx_builder_gateway',
- 'tbx_builder_gateway_lang',
- 'tbx_builder_help',
- 'tbx_builder_help_lang',
- 'tbx_builder_macros',
- 'tbx_builder_src',
- 'tbx_builder_src_lang',
- 'tbx_generate_pofile',
- 'temp_law_csa',
- 'temp_law_default',
- 'temp_law_fsa',
- 'temp_law_huang',
- 'temp_law_vfsa',
- 'test_clean',
- 'test_on_columns',
- 'test_run',
- 'test_run_level',
- 'testexamples',
- 'tf2des',
- 'tf2ss',
- 'thrownan',
- 'tic',
- 'time_id',
- 'toc',
- 'toeplitz',
- 'tokenpos',
- 'toolboxes',
- 'trace',
- 'trans',
- 'translatepaths',
- 'tree2code',
- 'trfmod',
- 'trianfml',
- 'trimmean',
- 'trisolve',
- 'trzeros',
- 'typeof',
- 'ui_observer',
- 'union',
- 'unique',
- 'unit_test_run',
- 'unix_g',
- 'unix_s',
- 'unix_w',
- 'unix_x',
- 'unobs',
- 'unpack',
- 'unwrap',
- 'variance',
- 'variancef',
- 'vec2list',
- 'vectorfind',
- 'ver',
- 'warnobsolete',
- 'wavread',
- 'wavwrite',
- 'wcenter',
- 'weekday',
- 'wfir',
- 'wfir_gui',
- 'whereami',
- 'who_user',
- 'whos',
- 'wiener',
- 'wigner',
- 'window',
- 'winlist',
- 'with_javasci',
- 'with_macros_source',
- 'with_modelica_compiler',
- 'with_tk',
- 'xcorr',
- 'xcosBlockEval',
- 'xcosBlockInterface',
- 'xcosCodeGeneration',
- 'xcosConfigureModelica',
- 'xcosPal',
- 'xcosPalAdd',
- 'xcosPalAddBlock',
- 'xcosPalExport',
- 'xcosPalGenerateAllIcons',
- 'xcosShowBlockWarning',
- 'xcosValidateBlockSet',
- 'xcosValidateCompareBlock',
- 'xcos_compile',
- 'xcos_debug_gui',
- 'xcos_run',
- 'xcos_simulate',
- 'xcov',
- 'xmltochm',
- 'xmltoformat',
- 'xmltohtml',
- 'xmltojar',
- 'xmltopdf',
- 'xmltops',
- 'xmltoweb',
- 'yulewalk',
- 'zeropen',
- 'zgrid',
- 'zpbutt',
- 'zpch1',
- 'zpch2',
- 'zpell',
-)
-
-variables_kw = (
- '$',
- '%F',
- '%T',
- '%e',
- '%eps',
- '%f',
- '%fftw',
- '%gui',
- '%i',
- '%inf',
- '%io',
- '%modalWarning',
- '%nan',
- '%pi',
- '%s',
- '%t',
- '%tk',
- '%toolboxes',
- '%toolboxes_dir',
- '%z',
- 'PWD',
- 'SCI',
- 'SCIHOME',
- 'TMPDIR',
- 'arnoldilib',
- 'assertlib',
- 'atomslib',
- 'cacsdlib',
- 'compatibility_functilib',
- 'corelib',
- 'data_structureslib',
- 'demo_toolslib',
- 'development_toolslib',
- 'differential_equationlib',
- 'dynamic_linklib',
- 'elementary_functionslib',
- 'enull',
- 'evoid',
- 'external_objectslib',
- 'fd',
- 'fileiolib',
- 'functionslib',
- 'genetic_algorithmslib',
- 'helptoolslib',
- 'home',
- 'integerlib',
- 'interpolationlib',
- 'iolib',
- 'jnull',
- 'jvoid',
- 'linear_algebralib',
- 'm2scilib',
- 'matiolib',
- 'modules_managerlib',
- 'neldermeadlib',
- 'optimbaselib',
- 'optimizationlib',
- 'optimsimplexlib',
- 'output_streamlib',
- 'overloadinglib',
- 'parameterslib',
- 'polynomialslib',
- 'preferenceslib',
- 'randliblib',
- 'scicos_autolib',
- 'scicos_utilslib',
- 'scinoteslib',
- 'signal_processinglib',
- 'simulated_annealinglib',
- 'soundlib',
- 'sparselib',
- 'special_functionslib',
- 'spreadsheetlib',
- 'statisticslib',
- 'stringlib',
- 'tclscilib',
- 'timelib',
- 'umfpacklib',
- 'xcoslib',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import subprocess
- from pygments.util import format_lines, duplicates_removed
-
- mapping = {'variables': 'builtin'}
-
- def extract_completion(var_type):
- s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = s.communicate('''\
-fd = mopen("/dev/stderr", "wt");
-mputl(strcat(completion("", "%s"), "||"), fd);
-mclose(fd)\n''' % var_type)
- if '||' not in output[1]:
- raise Exception(output[0])
- # Invalid DISPLAY causes this to be output:
- text = output[1].strip()
- if text.startswith('Error: unable to open display \n'):
- text = text[len('Error: unable to open display \n'):]
- return text.split('||')
-
- new_data = {}
- seen = set() # only keep first type for a given word
- for t in ('functions', 'commands', 'macros', 'variables'):
- new_data[t] = duplicates_removed(extract_completion(t), seen)
- seen.update(set(new_data[t]))
-
-
- with open(__file__) as f:
- content = f.read()
-
- header = content[:content.find('# Autogenerated')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(__file__, 'w') as f:
- f.write(header)
- f.write('# Autogenerated\n\n')
- for k, v in sorted(new_data.iteritems()):
- f.write(format_lines(k + '_kw', v) + '\n\n')
- f.write(footer)
+ :license: BSD, see LICENSE for details.
+"""
+
+# Autogenerated
+
+commands_kw = (
+ 'abort',
+ 'apropos',
+ 'break',
+ 'case',
+ 'catch',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'endfunction',
+ 'for',
+ 'function',
+ 'help',
+ 'if',
+ 'pause',
+ 'quit',
+ 'select',
+ 'then',
+ 'try',
+ 'while',
+)
+
+functions_kw = (
+ '!!_invoke_',
+ '%H5Object_e',
+ '%H5Object_fieldnames',
+ '%H5Object_p',
+ '%XMLAttr_6',
+ '%XMLAttr_e',
+ '%XMLAttr_i_XMLElem',
+ '%XMLAttr_length',
+ '%XMLAttr_p',
+ '%XMLAttr_size',
+ '%XMLDoc_6',
+ '%XMLDoc_e',
+ '%XMLDoc_i_XMLList',
+ '%XMLDoc_p',
+ '%XMLElem_6',
+ '%XMLElem_e',
+ '%XMLElem_i_XMLDoc',
+ '%XMLElem_i_XMLElem',
+ '%XMLElem_i_XMLList',
+ '%XMLElem_p',
+ '%XMLList_6',
+ '%XMLList_e',
+ '%XMLList_i_XMLElem',
+ '%XMLList_i_XMLList',
+ '%XMLList_length',
+ '%XMLList_p',
+ '%XMLList_size',
+ '%XMLNs_6',
+ '%XMLNs_e',
+ '%XMLNs_i_XMLElem',
+ '%XMLNs_p',
+ '%XMLSet_6',
+ '%XMLSet_e',
+ '%XMLSet_length',
+ '%XMLSet_p',
+ '%XMLSet_size',
+ '%XMLValid_p',
+ '%_EClass_6',
+ '%_EClass_e',
+ '%_EClass_p',
+ '%_EObj_0',
+ '%_EObj_1__EObj',
+ '%_EObj_1_b',
+ '%_EObj_1_c',
+ '%_EObj_1_i',
+ '%_EObj_1_s',
+ '%_EObj_2__EObj',
+ '%_EObj_2_b',
+ '%_EObj_2_c',
+ '%_EObj_2_i',
+ '%_EObj_2_s',
+ '%_EObj_3__EObj',
+ '%_EObj_3_b',
+ '%_EObj_3_c',
+ '%_EObj_3_i',
+ '%_EObj_3_s',
+ '%_EObj_4__EObj',
+ '%_EObj_4_b',
+ '%_EObj_4_c',
+ '%_EObj_4_i',
+ '%_EObj_4_s',
+ '%_EObj_5',
+ '%_EObj_6',
+ '%_EObj_a__EObj',
+ '%_EObj_a_b',
+ '%_EObj_a_c',
+ '%_EObj_a_i',
+ '%_EObj_a_s',
+ '%_EObj_d__EObj',
+ '%_EObj_d_b',
+ '%_EObj_d_c',
+ '%_EObj_d_i',
+ '%_EObj_d_s',
+ '%_EObj_disp',
+ '%_EObj_e',
+ '%_EObj_g__EObj',
+ '%_EObj_g_b',
+ '%_EObj_g_c',
+ '%_EObj_g_i',
+ '%_EObj_g_s',
+ '%_EObj_h__EObj',
+ '%_EObj_h_b',
+ '%_EObj_h_c',
+ '%_EObj_h_i',
+ '%_EObj_h_s',
+ '%_EObj_i__EObj',
+ '%_EObj_j__EObj',
+ '%_EObj_j_b',
+ '%_EObj_j_c',
+ '%_EObj_j_i',
+ '%_EObj_j_s',
+ '%_EObj_k__EObj',
+ '%_EObj_k_b',
+ '%_EObj_k_c',
+ '%_EObj_k_i',
+ '%_EObj_k_s',
+ '%_EObj_l__EObj',
+ '%_EObj_l_b',
+ '%_EObj_l_c',
+ '%_EObj_l_i',
+ '%_EObj_l_s',
+ '%_EObj_m__EObj',
+ '%_EObj_m_b',
+ '%_EObj_m_c',
+ '%_EObj_m_i',
+ '%_EObj_m_s',
+ '%_EObj_n__EObj',
+ '%_EObj_n_b',
+ '%_EObj_n_c',
+ '%_EObj_n_i',
+ '%_EObj_n_s',
+ '%_EObj_o__EObj',
+ '%_EObj_o_b',
+ '%_EObj_o_c',
+ '%_EObj_o_i',
+ '%_EObj_o_s',
+ '%_EObj_p',
+ '%_EObj_p__EObj',
+ '%_EObj_p_b',
+ '%_EObj_p_c',
+ '%_EObj_p_i',
+ '%_EObj_p_s',
+ '%_EObj_q__EObj',
+ '%_EObj_q_b',
+ '%_EObj_q_c',
+ '%_EObj_q_i',
+ '%_EObj_q_s',
+ '%_EObj_r__EObj',
+ '%_EObj_r_b',
+ '%_EObj_r_c',
+ '%_EObj_r_i',
+ '%_EObj_r_s',
+ '%_EObj_s__EObj',
+ '%_EObj_s_b',
+ '%_EObj_s_c',
+ '%_EObj_s_i',
+ '%_EObj_s_s',
+ '%_EObj_t',
+ '%_EObj_x__EObj',
+ '%_EObj_x_b',
+ '%_EObj_x_c',
+ '%_EObj_x_i',
+ '%_EObj_x_s',
+ '%_EObj_y__EObj',
+ '%_EObj_y_b',
+ '%_EObj_y_c',
+ '%_EObj_y_i',
+ '%_EObj_y_s',
+ '%_EObj_z__EObj',
+ '%_EObj_z_b',
+ '%_EObj_z_c',
+ '%_EObj_z_i',
+ '%_EObj_z_s',
+ '%_eigs',
+ '%_load',
+ '%b_1__EObj',
+ '%b_2__EObj',
+ '%b_3__EObj',
+ '%b_4__EObj',
+ '%b_a__EObj',
+ '%b_d__EObj',
+ '%b_g__EObj',
+ '%b_h__EObj',
+ '%b_i_XMLList',
+ '%b_i__EObj',
+ '%b_j__EObj',
+ '%b_k__EObj',
+ '%b_l__EObj',
+ '%b_m__EObj',
+ '%b_n__EObj',
+ '%b_o__EObj',
+ '%b_p__EObj',
+ '%b_q__EObj',
+ '%b_r__EObj',
+ '%b_s__EObj',
+ '%b_x__EObj',
+ '%b_y__EObj',
+ '%b_z__EObj',
+ '%c_1__EObj',
+ '%c_2__EObj',
+ '%c_3__EObj',
+ '%c_4__EObj',
+ '%c_a__EObj',
+ '%c_d__EObj',
+ '%c_g__EObj',
+ '%c_h__EObj',
+ '%c_i_XMLAttr',
+ '%c_i_XMLDoc',
+ '%c_i_XMLElem',
+ '%c_i_XMLList',
+ '%c_i__EObj',
+ '%c_j__EObj',
+ '%c_k__EObj',
+ '%c_l__EObj',
+ '%c_m__EObj',
+ '%c_n__EObj',
+ '%c_o__EObj',
+ '%c_p__EObj',
+ '%c_q__EObj',
+ '%c_r__EObj',
+ '%c_s__EObj',
+ '%c_x__EObj',
+ '%c_y__EObj',
+ '%c_z__EObj',
+ '%ce_i_XMLList',
+ '%fptr_i_XMLList',
+ '%h_i_XMLList',
+ '%hm_i_XMLList',
+ '%i_1__EObj',
+ '%i_2__EObj',
+ '%i_3__EObj',
+ '%i_4__EObj',
+ '%i_a__EObj',
+ '%i_abs',
+ '%i_cumprod',
+ '%i_cumsum',
+ '%i_d__EObj',
+ '%i_diag',
+ '%i_g__EObj',
+ '%i_h__EObj',
+ '%i_i_XMLList',
+ '%i_i__EObj',
+ '%i_j__EObj',
+ '%i_k__EObj',
+ '%i_l__EObj',
+ '%i_m__EObj',
+ '%i_matrix',
+ '%i_max',
+ '%i_maxi',
+ '%i_min',
+ '%i_mini',
+ '%i_mput',
+ '%i_n__EObj',
+ '%i_o__EObj',
+ '%i_p',
+ '%i_p__EObj',
+ '%i_prod',
+ '%i_q__EObj',
+ '%i_r__EObj',
+ '%i_s__EObj',
+ '%i_sum',
+ '%i_tril',
+ '%i_triu',
+ '%i_x__EObj',
+ '%i_y__EObj',
+ '%i_z__EObj',
+ '%ip_i_XMLList',
+ '%l_i_XMLList',
+ '%l_i__EObj',
+ '%lss_i_XMLList',
+ '%mc_i_XMLList',
+ '%msp_full',
+ '%msp_i_XMLList',
+ '%msp_spget',
+ '%p_i_XMLList',
+ '%ptr_i_XMLList',
+ '%r_i_XMLList',
+ '%s_1__EObj',
+ '%s_2__EObj',
+ '%s_3__EObj',
+ '%s_4__EObj',
+ '%s_a__EObj',
+ '%s_d__EObj',
+ '%s_g__EObj',
+ '%s_h__EObj',
+ '%s_i_XMLList',
+ '%s_i__EObj',
+ '%s_j__EObj',
+ '%s_k__EObj',
+ '%s_l__EObj',
+ '%s_m__EObj',
+ '%s_n__EObj',
+ '%s_o__EObj',
+ '%s_p__EObj',
+ '%s_q__EObj',
+ '%s_r__EObj',
+ '%s_s__EObj',
+ '%s_x__EObj',
+ '%s_y__EObj',
+ '%s_z__EObj',
+ '%sp_i_XMLList',
+ '%spb_i_XMLList',
+ '%st_i_XMLList',
+ 'Calendar',
+ 'ClipBoard',
+ 'Matplot',
+ 'Matplot1',
+ 'PlaySound',
+ 'TCL_DeleteInterp',
+ 'TCL_DoOneEvent',
+ 'TCL_EvalFile',
+ 'TCL_EvalStr',
+ 'TCL_ExistArray',
+ 'TCL_ExistInterp',
+ 'TCL_ExistVar',
+ 'TCL_GetVar',
+ 'TCL_GetVersion',
+ 'TCL_SetVar',
+ 'TCL_UnsetVar',
+ 'TCL_UpVar',
+ '_',
+ '_code2str',
+ '_d',
+ '_str2code',
+ 'about',
+ 'abs',
+ 'acos',
+ 'addModulePreferences',
+ 'addcolor',
+ 'addf',
+ 'addhistory',
+ 'addinter',
+ 'addlocalizationdomain',
+ 'amell',
+ 'and',
+ 'argn',
+ 'arl2_ius',
+ 'ascii',
+ 'asin',
+ 'atan',
+ 'backslash',
+ 'balanc',
+ 'banner',
+ 'base2dec',
+ 'basename',
+ 'bdiag',
+ 'beep',
+ 'besselh',
+ 'besseli',
+ 'besselj',
+ 'besselk',
+ 'bessely',
+ 'beta',
+ 'bezout',
+ 'bfinit',
+ 'blkfc1i',
+ 'blkslvi',
+ 'bool2s',
+ 'browsehistory',
+ 'browsevar',
+ 'bsplin3val',
+ 'buildDoc',
+ 'buildouttb',
+ 'bvode',
+ 'c_link',
+ 'call',
+ 'callblk',
+ 'captions',
+ 'cd',
+ 'cdfbet',
+ 'cdfbin',
+ 'cdfchi',
+ 'cdfchn',
+ 'cdff',
+ 'cdffnc',
+ 'cdfgam',
+ 'cdfnbn',
+ 'cdfnor',
+ 'cdfpoi',
+ 'cdft',
+ 'ceil',
+ 'champ',
+ 'champ1',
+ 'chdir',
+ 'chol',
+ 'clc',
+ 'clean',
+ 'clear',
+ 'clearfun',
+ 'clearglobal',
+ 'closeEditor',
+ 'closeEditvar',
+ 'closeXcos',
+ 'code2str',
+ 'coeff',
+ 'color',
+ 'comp',
+ 'completion',
+ 'conj',
+ 'contour2di',
+ 'contr',
+ 'conv2',
+ 'convstr',
+ 'copy',
+ 'copyfile',
+ 'corr',
+ 'cos',
+ 'coserror',
+ 'createdir',
+ 'cshep2d',
+ 'csvDefault',
+ 'csvIsnum',
+ 'csvRead',
+ 'csvStringToDouble',
+ 'csvTextScan',
+ 'csvWrite',
+ 'ctree2',
+ 'ctree3',
+ 'ctree4',
+ 'cumprod',
+ 'cumsum',
+ 'curblock',
+ 'curblockc',
+ 'daskr',
+ 'dasrt',
+ 'dassl',
+ 'data2sig',
+ 'datatipCreate',
+ 'datatipManagerMode',
+ 'datatipMove',
+ 'datatipRemove',
+ 'datatipSetDisplay',
+ 'datatipSetInterp',
+ 'datatipSetOrientation',
+ 'datatipSetStyle',
+ 'datatipToggle',
+ 'dawson',
+ 'dct',
+ 'debug',
+ 'dec2base',
+ 'deff',
+ 'definedfields',
+ 'degree',
+ 'delbpt',
+ 'delete',
+ 'deletefile',
+ 'delip',
+ 'delmenu',
+ 'det',
+ 'dgettext',
+ 'dhinf',
+ 'diag',
+ 'diary',
+ 'diffobjs',
+ 'disp',
+ 'dispbpt',
+ 'displayhistory',
+ 'disposefftwlibrary',
+ 'dlgamma',
+ 'dnaupd',
+ 'dneupd',
+ 'double',
+ 'drawaxis',
+ 'drawlater',
+ 'drawnow',
+ 'driver',
+ 'dsaupd',
+ 'dsearch',
+ 'dseupd',
+ 'dst',
+ 'duplicate',
+ 'editvar',
+ 'emptystr',
+ 'end_scicosim',
+ 'ereduc',
+ 'erf',
+ 'erfc',
+ 'erfcx',
+ 'erfi',
+ 'errcatch',
+ 'errclear',
+ 'error',
+ 'eval_cshep2d',
+ 'exec',
+ 'execstr',
+ 'exists',
+ 'exit',
+ 'exp',
+ 'expm',
+ 'exportUI',
+ 'export_to_hdf5',
+ 'eye',
+ 'fadj2sp',
+ 'fec',
+ 'feval',
+ 'fft',
+ 'fftw',
+ 'fftw_flags',
+ 'fftw_forget_wisdom',
+ 'fftwlibraryisloaded',
+ 'figure',
+ 'file',
+ 'filebrowser',
+ 'fileext',
+ 'fileinfo',
+ 'fileparts',
+ 'filesep',
+ 'find',
+ 'findBD',
+ 'findfiles',
+ 'fire_closing_finished',
+ 'floor',
+ 'format',
+ 'fort',
+ 'fprintfMat',
+ 'freq',
+ 'frexp',
+ 'fromc',
+ 'fromjava',
+ 'fscanfMat',
+ 'fsolve',
+ 'fstair',
+ 'full',
+ 'fullpath',
+ 'funcprot',
+ 'funptr',
+ 'gamma',
+ 'gammaln',
+ 'geom3d',
+ 'get',
+ 'getURL',
+ 'get_absolute_file_path',
+ 'get_fftw_wisdom',
+ 'getblocklabel',
+ 'getcallbackobject',
+ 'getdate',
+ 'getdebuginfo',
+ 'getdefaultlanguage',
+ 'getdrives',
+ 'getdynlibext',
+ 'getenv',
+ 'getfield',
+ 'gethistory',
+ 'gethistoryfile',
+ 'getinstalledlookandfeels',
+ 'getio',
+ 'getlanguage',
+ 'getlongpathname',
+ 'getlookandfeel',
+ 'getmd5',
+ 'getmemory',
+ 'getmodules',
+ 'getos',
+ 'getpid',
+ 'getrelativefilename',
+ 'getscicosvars',
+ 'getscilabmode',
+ 'getshortpathname',
+ 'gettext',
+ 'getvariablesonstack',
+ 'getversion',
+ 'glist',
+ 'global',
+ 'glue',
+ 'grand',
+ 'graphicfunction',
+ 'grayplot',
+ 'grep',
+ 'gsort',
+ 'gstacksize',
+ 'h5attr',
+ 'h5close',
+ 'h5cp',
+ 'h5dataset',
+ 'h5dump',
+ 'h5exists',
+ 'h5flush',
+ 'h5get',
+ 'h5group',
+ 'h5isArray',
+ 'h5isAttr',
+ 'h5isCompound',
+ 'h5isFile',
+ 'h5isGroup',
+ 'h5isList',
+ 'h5isRef',
+ 'h5isSet',
+ 'h5isSpace',
+ 'h5isType',
+ 'h5isVlen',
+ 'h5label',
+ 'h5ln',
+ 'h5ls',
+ 'h5mount',
+ 'h5mv',
+ 'h5open',
+ 'h5read',
+ 'h5readattr',
+ 'h5rm',
+ 'h5umount',
+ 'h5write',
+ 'h5writeattr',
+ 'havewindow',
+ 'helpbrowser',
+ 'hess',
+ 'hinf',
+ 'historymanager',
+ 'historysize',
+ 'host',
+ 'htmlDump',
+ 'htmlRead',
+ 'htmlReadStr',
+ 'htmlWrite',
+ 'iconvert',
+ 'ieee',
+ 'ilib_verbose',
+ 'imag',
+ 'impl',
+ 'import_from_hdf5',
+ 'imult',
+ 'inpnvi',
+ 'int',
+ 'int16',
+ 'int2d',
+ 'int32',
+ 'int3d',
+ 'int8',
+ 'interp',
+ 'interp2d',
+ 'interp3d',
+ 'intg',
+ 'intppty',
+ 'inttype',
+ 'inv',
+ 'invoke_lu',
+ 'is_handle_valid',
+ 'is_hdf5_file',
+ 'isalphanum',
+ 'isascii',
+ 'isdef',
+ 'isdigit',
+ 'isdir',
+ 'isequal',
+ 'isequalbitwise',
+ 'iserror',
+ 'isfile',
+ 'isglobal',
+ 'isletter',
+ 'isnum',
+ 'isreal',
+ 'iswaitingforinput',
+ 'jallowClassReloading',
+ 'jarray',
+ 'jautoTranspose',
+ 'jautoUnwrap',
+ 'javaclasspath',
+ 'javalibrarypath',
+ 'jcast',
+ 'jcompile',
+ 'jconvMatrixMethod',
+ 'jcreatejar',
+ 'jdeff',
+ 'jdisableTrace',
+ 'jenableTrace',
+ 'jexists',
+ 'jgetclassname',
+ 'jgetfield',
+ 'jgetfields',
+ 'jgetinfo',
+ 'jgetmethods',
+ 'jimport',
+ 'jinvoke',
+ 'jinvoke_db',
+ 'jnewInstance',
+ 'jremove',
+ 'jsetfield',
+ 'junwrap',
+ 'junwraprem',
+ 'jwrap',
+ 'jwrapinfloat',
+ 'kron',
+ 'lasterror',
+ 'ldiv',
+ 'ldivf',
+ 'legendre',
+ 'length',
+ 'lib',
+ 'librarieslist',
+ 'libraryinfo',
+ 'light',
+ 'linear_interpn',
+ 'lines',
+ 'link',
+ 'linmeq',
+ 'list',
+ 'listvar_in_hdf5',
+ 'load',
+ 'loadGui',
+ 'loadScicos',
+ 'loadXcos',
+ 'loadfftwlibrary',
+ 'loadhistory',
+ 'log',
+ 'log1p',
+ 'lsq',
+ 'lsq_splin',
+ 'lsqrsolve',
+ 'lsslist',
+ 'lstcat',
+ 'lstsize',
+ 'ltitr',
+ 'lu',
+ 'ludel',
+ 'lufact',
+ 'luget',
+ 'lusolve',
+ 'macr2lst',
+ 'macr2tree',
+ 'matfile_close',
+ 'matfile_listvar',
+ 'matfile_open',
+ 'matfile_varreadnext',
+ 'matfile_varwrite',
+ 'matrix',
+ 'max',
+ 'maxfiles',
+ 'mclearerr',
+ 'mclose',
+ 'meof',
+ 'merror',
+ 'messagebox',
+ 'mfprintf',
+ 'mfscanf',
+ 'mget',
+ 'mgeti',
+ 'mgetl',
+ 'mgetstr',
+ 'min',
+ 'mlist',
+ 'mode',
+ 'model2blk',
+ 'mopen',
+ 'move',
+ 'movefile',
+ 'mprintf',
+ 'mput',
+ 'mputl',
+ 'mputstr',
+ 'mscanf',
+ 'mseek',
+ 'msprintf',
+ 'msscanf',
+ 'mtell',
+ 'mtlb_mode',
+ 'mtlb_sparse',
+ 'mucomp',
+ 'mulf',
+ 'name2rgb',
+ 'nearfloat',
+ 'newaxes',
+ 'newest',
+ 'newfun',
+ 'nnz',
+ 'norm',
+ 'notify',
+ 'number_properties',
+ 'ode',
+ 'odedc',
+ 'ones',
+ 'openged',
+ 'opentk',
+ 'optim',
+ 'or',
+ 'ordmmd',
+ 'parallel_concurrency',
+ 'parallel_run',
+ 'param3d',
+ 'param3d1',
+ 'part',
+ 'pathconvert',
+ 'pathsep',
+ 'phase_simulation',
+ 'plot2d',
+ 'plot2d1',
+ 'plot2d2',
+ 'plot2d3',
+ 'plot2d4',
+ 'plot3d',
+ 'plot3d1',
+ 'plotbrowser',
+ 'pointer_xproperty',
+ 'poly',
+ 'ppol',
+ 'pppdiv',
+ 'predef',
+ 'preferences',
+ 'print',
+ 'printf',
+ 'printfigure',
+ 'printsetupbox',
+ 'prod',
+ 'progressionbar',
+ 'prompt',
+ 'pwd',
+ 'qld',
+ 'qp_solve',
+ 'qr',
+ 'raise_window',
+ 'rand',
+ 'rankqr',
+ 'rat',
+ 'rcond',
+ 'rdivf',
+ 'read',
+ 'read4b',
+ 'read_csv',
+ 'readb',
+ 'readgateway',
+ 'readmps',
+ 'real',
+ 'realtime',
+ 'realtimeinit',
+ 'regexp',
+ 'relocate_handle',
+ 'remez',
+ 'removeModulePreferences',
+ 'removedir',
+ 'removelinehistory',
+ 'res_with_prec',
+ 'resethistory',
+ 'residu',
+ 'resume',
+ 'return',
+ 'ricc',
+ 'rlist',
+ 'roots',
+ 'rotate_axes',
+ 'round',
+ 'rpem',
+ 'rtitr',
+ 'rubberbox',
+ 'save',
+ 'saveGui',
+ 'saveafterncommands',
+ 'saveconsecutivecommands',
+ 'savehistory',
+ 'schur',
+ 'sci_haltscicos',
+ 'sci_tree2',
+ 'sci_tree3',
+ 'sci_tree4',
+ 'sciargs',
+ 'scicos_debug',
+ 'scicos_debug_count',
+ 'scicos_time',
+ 'scicosim',
+ 'scinotes',
+ 'sctree',
+ 'semidef',
+ 'set',
+ 'set_blockerror',
+ 'set_fftw_wisdom',
+ 'set_xproperty',
+ 'setbpt',
+ 'setdefaultlanguage',
+ 'setenv',
+ 'setfield',
+ 'sethistoryfile',
+ 'setlanguage',
+ 'setlookandfeel',
+ 'setmenu',
+ 'sfact',
+ 'sfinit',
+ 'show_window',
+ 'sident',
+ 'sig2data',
+ 'sign',
+ 'simp',
+ 'simp_mode',
+ 'sin',
+ 'size',
+ 'slash',
+ 'sleep',
+ 'sorder',
+ 'sparse',
+ 'spchol',
+ 'spcompack',
+ 'spec',
+ 'spget',
+ 'splin',
+ 'splin2d',
+ 'splin3d',
+ 'splitURL',
+ 'spones',
+ 'sprintf',
+ 'sqrt',
+ 'stacksize',
+ 'str2code',
+ 'strcat',
+ 'strchr',
+ 'strcmp',
+ 'strcspn',
+ 'strindex',
+ 'string',
+ 'stringbox',
+ 'stripblanks',
+ 'strncpy',
+ 'strrchr',
+ 'strrev',
+ 'strsplit',
+ 'strspn',
+ 'strstr',
+ 'strsubst',
+ 'strtod',
+ 'strtok',
+ 'subf',
+ 'sum',
+ 'svd',
+ 'swap_handles',
+ 'symfcti',
+ 'syredi',
+ 'system_getproperty',
+ 'system_setproperty',
+ 'ta2lpd',
+ 'tan',
+ 'taucs_chdel',
+ 'taucs_chfact',
+ 'taucs_chget',
+ 'taucs_chinfo',
+ 'taucs_chsolve',
+ 'tempname',
+ 'testmatrix',
+ 'timer',
+ 'tlist',
+ 'tohome',
+ 'tokens',
+ 'toolbar',
+ 'toprint',
+ 'tr_zer',
+ 'tril',
+ 'triu',
+ 'type',
+ 'typename',
+ 'uiDisplayTree',
+ 'uicontextmenu',
+ 'uicontrol',
+ 'uigetcolor',
+ 'uigetdir',
+ 'uigetfile',
+ 'uigetfont',
+ 'uimenu',
+ 'uint16',
+ 'uint32',
+ 'uint8',
+ 'uipopup',
+ 'uiputfile',
+ 'uiwait',
+ 'ulink',
+ 'umf_ludel',
+ 'umf_lufact',
+ 'umf_luget',
+ 'umf_luinfo',
+ 'umf_lusolve',
+ 'umfpack',
+ 'unglue',
+ 'unix',
+ 'unsetmenu',
+ 'unzoom',
+ 'updatebrowsevar',
+ 'usecanvas',
+ 'useeditor',
+ 'user',
+ 'var2vec',
+ 'varn',
+ 'vec2var',
+ 'waitbar',
+ 'warnBlockByUID',
+ 'warning',
+ 'what',
+ 'where',
+ 'whereis',
+ 'who',
+ 'winsid',
+ 'with_module',
+ 'writb',
+ 'write',
+ 'write4b',
+ 'write_csv',
+ 'x_choose',
+ 'x_choose_modeless',
+ 'x_dialog',
+ 'x_mdialog',
+ 'xarc',
+ 'xarcs',
+ 'xarrows',
+ 'xchange',
+ 'xchoicesi',
+ 'xclick',
+ 'xcos',
+ 'xcosAddToolsMenu',
+ 'xcosConfigureXmlFile',
+ 'xcosDiagramToScilab',
+ 'xcosPalCategoryAdd',
+ 'xcosPalDelete',
+ 'xcosPalDisable',
+ 'xcosPalEnable',
+ 'xcosPalGenerateIcon',
+ 'xcosPalGet',
+ 'xcosPalLoad',
+ 'xcosPalMove',
+ 'xcosSimulationStarted',
+ 'xcosUpdateBlock',
+ 'xdel',
+ 'xend',
+ 'xfarc',
+ 'xfarcs',
+ 'xfpoly',
+ 'xfpolys',
+ 'xfrect',
+ 'xget',
+ 'xgetmouse',
+ 'xgraduate',
+ 'xgrid',
+ 'xinit',
+ 'xlfont',
+ 'xls_open',
+ 'xls_read',
+ 'xmlAddNs',
+ 'xmlAppend',
+ 'xmlAsNumber',
+ 'xmlAsText',
+ 'xmlDTD',
+ 'xmlDelete',
+ 'xmlDocument',
+ 'xmlDump',
+ 'xmlElement',
+ 'xmlFormat',
+ 'xmlGetNsByHref',
+ 'xmlGetNsByPrefix',
+ 'xmlGetOpenDocs',
+ 'xmlIsValidObject',
+ 'xmlName',
+ 'xmlNs',
+ 'xmlRead',
+ 'xmlReadStr',
+ 'xmlRelaxNG',
+ 'xmlRemove',
+ 'xmlSchema',
+ 'xmlSetAttributes',
+ 'xmlValidate',
+ 'xmlWrite',
+ 'xmlXPath',
+ 'xname',
+ 'xpause',
+ 'xpoly',
+ 'xpolys',
+ 'xrect',
+ 'xrects',
+ 'xs2bmp',
+ 'xs2emf',
+ 'xs2eps',
+ 'xs2gif',
+ 'xs2jpg',
+ 'xs2pdf',
+ 'xs2png',
+ 'xs2ppm',
+ 'xs2ps',
+ 'xs2svg',
+ 'xsegs',
+ 'xset',
+ 'xstring',
+ 'xstringb',
+ 'xtitle',
+ 'zeros',
+ 'znaupd',
+ 'zneupd',
+ 'zoom_rect',
+)
+
+macros_kw = (
+ '!_deff_wrapper',
+ '%0_i_st',
+ '%3d_i_h',
+ '%Block_xcosUpdateBlock',
+ '%TNELDER_p',
+ '%TNELDER_string',
+ '%TNMPLOT_p',
+ '%TNMPLOT_string',
+ '%TOPTIM_p',
+ '%TOPTIM_string',
+ '%TSIMPLEX_p',
+ '%TSIMPLEX_string',
+ '%_EVoid_p',
+ '%_gsort',
+ '%_listvarinfile',
+ '%_rlist',
+ '%_save',
+ '%_sodload',
+ '%_strsplit',
+ '%_unwrap',
+ '%ar_p',
+ '%asn',
+ '%b_a_b',
+ '%b_a_s',
+ '%b_c_s',
+ '%b_c_spb',
+ '%b_cumprod',
+ '%b_cumsum',
+ '%b_d_s',
+ '%b_diag',
+ '%b_e',
+ '%b_f_s',
+ '%b_f_spb',
+ '%b_g_s',
+ '%b_g_spb',
+ '%b_grand',
+ '%b_h_s',
+ '%b_h_spb',
+ '%b_i_b',
+ '%b_i_ce',
+ '%b_i_h',
+ '%b_i_hm',
+ '%b_i_s',
+ '%b_i_sp',
+ '%b_i_spb',
+ '%b_i_st',
+ '%b_iconvert',
+ '%b_l_b',
+ '%b_l_s',
+ '%b_m_b',
+ '%b_m_s',
+ '%b_matrix',
+ '%b_n_hm',
+ '%b_o_hm',
+ '%b_p_s',
+ '%b_prod',
+ '%b_r_b',
+ '%b_r_s',
+ '%b_s_b',
+ '%b_s_s',
+ '%b_string',
+ '%b_sum',
+ '%b_tril',
+ '%b_triu',
+ '%b_x_b',
+ '%b_x_s',
+ '%bicg',
+ '%bicgstab',
+ '%c_a_c',
+ '%c_b_c',
+ '%c_b_s',
+ '%c_diag',
+ '%c_dsearch',
+ '%c_e',
+ '%c_eye',
+ '%c_f_s',
+ '%c_grand',
+ '%c_i_c',
+ '%c_i_ce',
+ '%c_i_h',
+ '%c_i_hm',
+ '%c_i_lss',
+ '%c_i_r',
+ '%c_i_s',
+ '%c_i_st',
+ '%c_matrix',
+ '%c_n_l',
+ '%c_n_st',
+ '%c_o_l',
+ '%c_o_st',
+ '%c_ones',
+ '%c_rand',
+ '%c_tril',
+ '%c_triu',
+ '%cblock_c_cblock',
+ '%cblock_c_s',
+ '%cblock_e',
+ '%cblock_f_cblock',
+ '%cblock_p',
+ '%cblock_size',
+ '%ce_6',
+ '%ce_c_ce',
+ '%ce_e',
+ '%ce_f_ce',
+ '%ce_i_ce',
+ '%ce_i_s',
+ '%ce_i_st',
+ '%ce_matrix',
+ '%ce_p',
+ '%ce_size',
+ '%ce_string',
+ '%ce_t',
+ '%cgs',
+ '%champdat_i_h',
+ '%choose',
+ '%diagram_xcos',
+ '%dir_p',
+ '%fptr_i_st',
+ '%grand_perm',
+ '%grayplot_i_h',
+ '%h_i_st',
+ '%hmS_k_hmS_generic',
+ '%hm_1_hm',
+ '%hm_1_s',
+ '%hm_2_hm',
+ '%hm_2_s',
+ '%hm_3_hm',
+ '%hm_3_s',
+ '%hm_4_hm',
+ '%hm_4_s',
+ '%hm_5',
+ '%hm_a_hm',
+ '%hm_a_r',
+ '%hm_a_s',
+ '%hm_abs',
+ '%hm_and',
+ '%hm_bool2s',
+ '%hm_c_hm',
+ '%hm_ceil',
+ '%hm_conj',
+ '%hm_cos',
+ '%hm_cumprod',
+ '%hm_cumsum',
+ '%hm_d_hm',
+ '%hm_d_s',
+ '%hm_degree',
+ '%hm_dsearch',
+ '%hm_e',
+ '%hm_exp',
+ '%hm_eye',
+ '%hm_f_hm',
+ '%hm_find',
+ '%hm_floor',
+ '%hm_g_hm',
+ '%hm_grand',
+ '%hm_gsort',
+ '%hm_h_hm',
+ '%hm_i_b',
+ '%hm_i_ce',
+ '%hm_i_h',
+ '%hm_i_hm',
+ '%hm_i_i',
+ '%hm_i_p',
+ '%hm_i_r',
+ '%hm_i_s',
+ '%hm_i_st',
+ '%hm_iconvert',
+ '%hm_imag',
+ '%hm_int',
+ '%hm_isnan',
+ '%hm_isreal',
+ '%hm_j_hm',
+ '%hm_j_s',
+ '%hm_k_hm',
+ '%hm_k_s',
+ '%hm_log',
+ '%hm_m_p',
+ '%hm_m_r',
+ '%hm_m_s',
+ '%hm_matrix',
+ '%hm_max',
+ '%hm_mean',
+ '%hm_median',
+ '%hm_min',
+ '%hm_n_b',
+ '%hm_n_c',
+ '%hm_n_hm',
+ '%hm_n_i',
+ '%hm_n_p',
+ '%hm_n_s',
+ '%hm_o_b',
+ '%hm_o_c',
+ '%hm_o_hm',
+ '%hm_o_i',
+ '%hm_o_p',
+ '%hm_o_s',
+ '%hm_ones',
+ '%hm_or',
+ '%hm_p',
+ '%hm_prod',
+ '%hm_q_hm',
+ '%hm_r_s',
+ '%hm_rand',
+ '%hm_real',
+ '%hm_round',
+ '%hm_s',
+ '%hm_s_hm',
+ '%hm_s_r',
+ '%hm_s_s',
+ '%hm_sign',
+ '%hm_sin',
+ '%hm_size',
+ '%hm_sqrt',
+ '%hm_stdev',
+ '%hm_string',
+ '%hm_sum',
+ '%hm_x_hm',
+ '%hm_x_p',
+ '%hm_x_s',
+ '%hm_zeros',
+ '%i_1_s',
+ '%i_2_s',
+ '%i_3_s',
+ '%i_4_s',
+ '%i_Matplot',
+ '%i_a_i',
+ '%i_a_s',
+ '%i_and',
+ '%i_ascii',
+ '%i_b_s',
+ '%i_bezout',
+ '%i_champ',
+ '%i_champ1',
+ '%i_contour',
+ '%i_contour2d',
+ '%i_d_i',
+ '%i_d_s',
+ '%i_dsearch',
+ '%i_e',
+ '%i_fft',
+ '%i_g_i',
+ '%i_gcd',
+ '%i_grand',
+ '%i_h_i',
+ '%i_i_ce',
+ '%i_i_h',
+ '%i_i_hm',
+ '%i_i_i',
+ '%i_i_s',
+ '%i_i_st',
+ '%i_j_i',
+ '%i_j_s',
+ '%i_l_s',
+ '%i_lcm',
+ '%i_length',
+ '%i_m_i',
+ '%i_m_s',
+ '%i_mfprintf',
+ '%i_mprintf',
+ '%i_msprintf',
+ '%i_n_s',
+ '%i_o_s',
+ '%i_or',
+ '%i_p_i',
+ '%i_p_s',
+ '%i_plot2d',
+ '%i_plot2d1',
+ '%i_plot2d2',
+ '%i_q_s',
+ '%i_r_i',
+ '%i_r_s',
+ '%i_round',
+ '%i_s_i',
+ '%i_s_s',
+ '%i_sign',
+ '%i_string',
+ '%i_x_i',
+ '%i_x_s',
+ '%ip_a_s',
+ '%ip_i_st',
+ '%ip_m_s',
+ '%ip_n_ip',
+ '%ip_o_ip',
+ '%ip_p',
+ '%ip_part',
+ '%ip_s_s',
+ '%ip_string',
+ '%k',
+ '%l_i_h',
+ '%l_i_s',
+ '%l_i_st',
+ '%l_isequal',
+ '%l_n_c',
+ '%l_n_l',
+ '%l_n_m',
+ '%l_n_p',
+ '%l_n_s',
+ '%l_n_st',
+ '%l_o_c',
+ '%l_o_l',
+ '%l_o_m',
+ '%l_o_p',
+ '%l_o_s',
+ '%l_o_st',
+ '%lss_a_lss',
+ '%lss_a_p',
+ '%lss_a_r',
+ '%lss_a_s',
+ '%lss_c_lss',
+ '%lss_c_p',
+ '%lss_c_r',
+ '%lss_c_s',
+ '%lss_e',
+ '%lss_eye',
+ '%lss_f_lss',
+ '%lss_f_p',
+ '%lss_f_r',
+ '%lss_f_s',
+ '%lss_i_ce',
+ '%lss_i_lss',
+ '%lss_i_p',
+ '%lss_i_r',
+ '%lss_i_s',
+ '%lss_i_st',
+ '%lss_inv',
+ '%lss_l_lss',
+ '%lss_l_p',
+ '%lss_l_r',
+ '%lss_l_s',
+ '%lss_m_lss',
+ '%lss_m_p',
+ '%lss_m_r',
+ '%lss_m_s',
+ '%lss_n_lss',
+ '%lss_n_p',
+ '%lss_n_r',
+ '%lss_n_s',
+ '%lss_norm',
+ '%lss_o_lss',
+ '%lss_o_p',
+ '%lss_o_r',
+ '%lss_o_s',
+ '%lss_ones',
+ '%lss_r_lss',
+ '%lss_r_p',
+ '%lss_r_r',
+ '%lss_r_s',
+ '%lss_rand',
+ '%lss_s',
+ '%lss_s_lss',
+ '%lss_s_p',
+ '%lss_s_r',
+ '%lss_s_s',
+ '%lss_size',
+ '%lss_t',
+ '%lss_v_lss',
+ '%lss_v_p',
+ '%lss_v_r',
+ '%lss_v_s',
+ '%lt_i_s',
+ '%m_n_l',
+ '%m_o_l',
+ '%mc_i_h',
+ '%mc_i_s',
+ '%mc_i_st',
+ '%mc_n_st',
+ '%mc_o_st',
+ '%mc_string',
+ '%mps_p',
+ '%mps_string',
+ '%msp_a_s',
+ '%msp_abs',
+ '%msp_e',
+ '%msp_find',
+ '%msp_i_s',
+ '%msp_i_st',
+ '%msp_length',
+ '%msp_m_s',
+ '%msp_maxi',
+ '%msp_n_msp',
+ '%msp_nnz',
+ '%msp_o_msp',
+ '%msp_p',
+ '%msp_sparse',
+ '%msp_spones',
+ '%msp_t',
+ '%p_a_lss',
+ '%p_a_r',
+ '%p_c_lss',
+ '%p_c_r',
+ '%p_cumprod',
+ '%p_cumsum',
+ '%p_d_p',
+ '%p_d_r',
+ '%p_d_s',
+ '%p_det',
+ '%p_e',
+ '%p_f_lss',
+ '%p_f_r',
+ '%p_grand',
+ '%p_i_ce',
+ '%p_i_h',
+ '%p_i_hm',
+ '%p_i_lss',
+ '%p_i_p',
+ '%p_i_r',
+ '%p_i_s',
+ '%p_i_st',
+ '%p_inv',
+ '%p_j_s',
+ '%p_k_p',
+ '%p_k_r',
+ '%p_k_s',
+ '%p_l_lss',
+ '%p_l_p',
+ '%p_l_r',
+ '%p_l_s',
+ '%p_m_hm',
+ '%p_m_lss',
+ '%p_m_r',
+ '%p_matrix',
+ '%p_n_l',
+ '%p_n_lss',
+ '%p_n_r',
+ '%p_o_l',
+ '%p_o_lss',
+ '%p_o_r',
+ '%p_o_sp',
+ '%p_p_s',
+ '%p_part',
+ '%p_prod',
+ '%p_q_p',
+ '%p_q_r',
+ '%p_q_s',
+ '%p_r_lss',
+ '%p_r_p',
+ '%p_r_r',
+ '%p_r_s',
+ '%p_s_lss',
+ '%p_s_r',
+ '%p_simp',
+ '%p_string',
+ '%p_sum',
+ '%p_v_lss',
+ '%p_v_p',
+ '%p_v_r',
+ '%p_v_s',
+ '%p_x_hm',
+ '%p_x_r',
+ '%p_y_p',
+ '%p_y_r',
+ '%p_y_s',
+ '%p_z_p',
+ '%p_z_r',
+ '%p_z_s',
+ '%pcg',
+ '%plist_p',
+ '%plist_string',
+ '%r_0',
+ '%r_a_hm',
+ '%r_a_lss',
+ '%r_a_p',
+ '%r_a_r',
+ '%r_a_s',
+ '%r_c_lss',
+ '%r_c_p',
+ '%r_c_r',
+ '%r_c_s',
+ '%r_clean',
+ '%r_cumprod',
+ '%r_cumsum',
+ '%r_d_p',
+ '%r_d_r',
+ '%r_d_s',
+ '%r_det',
+ '%r_diag',
+ '%r_e',
+ '%r_eye',
+ '%r_f_lss',
+ '%r_f_p',
+ '%r_f_r',
+ '%r_f_s',
+ '%r_i_ce',
+ '%r_i_hm',
+ '%r_i_lss',
+ '%r_i_p',
+ '%r_i_r',
+ '%r_i_s',
+ '%r_i_st',
+ '%r_inv',
+ '%r_j_s',
+ '%r_k_p',
+ '%r_k_r',
+ '%r_k_s',
+ '%r_l_lss',
+ '%r_l_p',
+ '%r_l_r',
+ '%r_l_s',
+ '%r_m_hm',
+ '%r_m_lss',
+ '%r_m_p',
+ '%r_m_r',
+ '%r_m_s',
+ '%r_matrix',
+ '%r_n_lss',
+ '%r_n_p',
+ '%r_n_r',
+ '%r_n_s',
+ '%r_norm',
+ '%r_o_lss',
+ '%r_o_p',
+ '%r_o_r',
+ '%r_o_s',
+ '%r_ones',
+ '%r_p',
+ '%r_p_s',
+ '%r_prod',
+ '%r_q_p',
+ '%r_q_r',
+ '%r_q_s',
+ '%r_r_lss',
+ '%r_r_p',
+ '%r_r_r',
+ '%r_r_s',
+ '%r_rand',
+ '%r_s',
+ '%r_s_hm',
+ '%r_s_lss',
+ '%r_s_p',
+ '%r_s_r',
+ '%r_s_s',
+ '%r_simp',
+ '%r_size',
+ '%r_string',
+ '%r_sum',
+ '%r_t',
+ '%r_tril',
+ '%r_triu',
+ '%r_v_lss',
+ '%r_v_p',
+ '%r_v_r',
+ '%r_v_s',
+ '%r_varn',
+ '%r_x_p',
+ '%r_x_r',
+ '%r_x_s',
+ '%r_y_p',
+ '%r_y_r',
+ '%r_y_s',
+ '%r_z_p',
+ '%r_z_r',
+ '%r_z_s',
+ '%s_1_hm',
+ '%s_1_i',
+ '%s_2_hm',
+ '%s_2_i',
+ '%s_3_hm',
+ '%s_3_i',
+ '%s_4_hm',
+ '%s_4_i',
+ '%s_5',
+ '%s_a_b',
+ '%s_a_hm',
+ '%s_a_i',
+ '%s_a_ip',
+ '%s_a_lss',
+ '%s_a_msp',
+ '%s_a_r',
+ '%s_a_sp',
+ '%s_and',
+ '%s_b_i',
+ '%s_b_s',
+ '%s_bezout',
+ '%s_c_b',
+ '%s_c_cblock',
+ '%s_c_lss',
+ '%s_c_r',
+ '%s_c_sp',
+ '%s_d_b',
+ '%s_d_i',
+ '%s_d_p',
+ '%s_d_r',
+ '%s_d_sp',
+ '%s_e',
+ '%s_f_b',
+ '%s_f_cblock',
+ '%s_f_lss',
+ '%s_f_r',
+ '%s_f_sp',
+ '%s_g_b',
+ '%s_g_s',
+ '%s_gcd',
+ '%s_grand',
+ '%s_h_b',
+ '%s_h_s',
+ '%s_i_b',
+ '%s_i_c',
+ '%s_i_ce',
+ '%s_i_h',
+ '%s_i_hm',
+ '%s_i_i',
+ '%s_i_lss',
+ '%s_i_p',
+ '%s_i_r',
+ '%s_i_s',
+ '%s_i_sp',
+ '%s_i_spb',
+ '%s_i_st',
+ '%s_j_i',
+ '%s_k_hm',
+ '%s_k_p',
+ '%s_k_r',
+ '%s_k_sp',
+ '%s_l_b',
+ '%s_l_hm',
+ '%s_l_i',
+ '%s_l_lss',
+ '%s_l_p',
+ '%s_l_r',
+ '%s_l_s',
+ '%s_l_sp',
+ '%s_lcm',
+ '%s_m_b',
+ '%s_m_hm',
+ '%s_m_i',
+ '%s_m_ip',
+ '%s_m_lss',
+ '%s_m_msp',
+ '%s_m_r',
+ '%s_matrix',
+ '%s_n_hm',
+ '%s_n_i',
+ '%s_n_l',
+ '%s_n_lss',
+ '%s_n_r',
+ '%s_n_st',
+ '%s_o_hm',
+ '%s_o_i',
+ '%s_o_l',
+ '%s_o_lss',
+ '%s_o_r',
+ '%s_o_st',
+ '%s_or',
+ '%s_p_b',
+ '%s_p_i',
+ '%s_pow',
+ '%s_q_hm',
+ '%s_q_i',
+ '%s_q_p',
+ '%s_q_r',
+ '%s_q_sp',
+ '%s_r_b',
+ '%s_r_i',
+ '%s_r_lss',
+ '%s_r_p',
+ '%s_r_r',
+ '%s_r_s',
+ '%s_r_sp',
+ '%s_s_b',
+ '%s_s_hm',
+ '%s_s_i',
+ '%s_s_ip',
+ '%s_s_lss',
+ '%s_s_r',
+ '%s_s_sp',
+ '%s_simp',
+ '%s_v_lss',
+ '%s_v_p',
+ '%s_v_r',
+ '%s_v_s',
+ '%s_x_b',
+ '%s_x_hm',
+ '%s_x_i',
+ '%s_x_r',
+ '%s_y_p',
+ '%s_y_r',
+ '%s_y_sp',
+ '%s_z_p',
+ '%s_z_r',
+ '%s_z_sp',
+ '%sn',
+ '%sp_a_s',
+ '%sp_a_sp',
+ '%sp_and',
+ '%sp_c_s',
+ '%sp_ceil',
+ '%sp_conj',
+ '%sp_cos',
+ '%sp_cumprod',
+ '%sp_cumsum',
+ '%sp_d_s',
+ '%sp_d_sp',
+ '%sp_det',
+ '%sp_diag',
+ '%sp_e',
+ '%sp_exp',
+ '%sp_f_s',
+ '%sp_floor',
+ '%sp_grand',
+ '%sp_gsort',
+ '%sp_i_ce',
+ '%sp_i_h',
+ '%sp_i_s',
+ '%sp_i_sp',
+ '%sp_i_st',
+ '%sp_int',
+ '%sp_inv',
+ '%sp_k_s',
+ '%sp_k_sp',
+ '%sp_l_s',
+ '%sp_l_sp',
+ '%sp_length',
+ '%sp_max',
+ '%sp_min',
+ '%sp_norm',
+ '%sp_or',
+ '%sp_p_s',
+ '%sp_prod',
+ '%sp_q_s',
+ '%sp_q_sp',
+ '%sp_r_s',
+ '%sp_r_sp',
+ '%sp_round',
+ '%sp_s_s',
+ '%sp_s_sp',
+ '%sp_sin',
+ '%sp_sqrt',
+ '%sp_string',
+ '%sp_sum',
+ '%sp_tril',
+ '%sp_triu',
+ '%sp_y_s',
+ '%sp_y_sp',
+ '%sp_z_s',
+ '%sp_z_sp',
+ '%spb_and',
+ '%spb_c_b',
+ '%spb_cumprod',
+ '%spb_cumsum',
+ '%spb_diag',
+ '%spb_e',
+ '%spb_f_b',
+ '%spb_g_b',
+ '%spb_g_spb',
+ '%spb_h_b',
+ '%spb_h_spb',
+ '%spb_i_b',
+ '%spb_i_ce',
+ '%spb_i_h',
+ '%spb_i_st',
+ '%spb_or',
+ '%spb_prod',
+ '%spb_sum',
+ '%spb_tril',
+ '%spb_triu',
+ '%st_6',
+ '%st_c_st',
+ '%st_e',
+ '%st_f_st',
+ '%st_i_b',
+ '%st_i_c',
+ '%st_i_fptr',
+ '%st_i_h',
+ '%st_i_i',
+ '%st_i_ip',
+ '%st_i_lss',
+ '%st_i_msp',
+ '%st_i_p',
+ '%st_i_r',
+ '%st_i_s',
+ '%st_i_sp',
+ '%st_i_spb',
+ '%st_i_st',
+ '%st_matrix',
+ '%st_n_c',
+ '%st_n_l',
+ '%st_n_mc',
+ '%st_n_p',
+ '%st_n_s',
+ '%st_o_c',
+ '%st_o_l',
+ '%st_o_mc',
+ '%st_o_p',
+ '%st_o_s',
+ '%st_o_tl',
+ '%st_p',
+ '%st_size',
+ '%st_string',
+ '%st_t',
+ '%ticks_i_h',
+ '%xls_e',
+ '%xls_p',
+ '%xlssheet_e',
+ '%xlssheet_p',
+ '%xlssheet_size',
+ '%xlssheet_string',
+ 'DominationRank',
+ 'G_make',
+ 'IsAScalar',
+ 'NDcost',
+ 'OS_Version',
+ 'PlotSparse',
+ 'ReadHBSparse',
+ 'TCL_CreateSlave',
+ 'abcd',
+ 'abinv',
+ 'accept_func_default',
+ 'accept_func_vfsa',
+ 'acf',
+ 'acosd',
+ 'acosh',
+ 'acoshm',
+ 'acosm',
+ 'acot',
+ 'acotd',
+ 'acoth',
+ 'acsc',
+ 'acscd',
+ 'acsch',
+ 'add_demo',
+ 'add_help_chapter',
+ 'add_module_help_chapter',
+ 'add_param',
+ 'add_profiling',
+ 'adj2sp',
+ 'aff2ab',
+ 'ana_style',
+ 'analpf',
+ 'analyze',
+ 'aplat',
+ 'arhnk',
+ 'arl2',
+ 'arma2p',
+ 'arma2ss',
+ 'armac',
+ 'armax',
+ 'armax1',
+ 'arobasestring2strings',
+ 'arsimul',
+ 'ascii2string',
+ 'asciimat',
+ 'asec',
+ 'asecd',
+ 'asech',
+ 'asind',
+ 'asinh',
+ 'asinhm',
+ 'asinm',
+ 'assert_checkalmostequal',
+ 'assert_checkequal',
+ 'assert_checkerror',
+ 'assert_checkfalse',
+ 'assert_checkfilesequal',
+ 'assert_checktrue',
+ 'assert_comparecomplex',
+ 'assert_computedigits',
+ 'assert_cond2reltol',
+ 'assert_cond2reqdigits',
+ 'assert_generror',
+ 'atand',
+ 'atanh',
+ 'atanhm',
+ 'atanm',
+ 'atomsAutoload',
+ 'atomsAutoloadAdd',
+ 'atomsAutoloadDel',
+ 'atomsAutoloadList',
+ 'atomsCategoryList',
+ 'atomsCheckModule',
+ 'atomsDepTreeShow',
+ 'atomsGetConfig',
+ 'atomsGetInstalled',
+ 'atomsGetInstalledPath',
+ 'atomsGetLoaded',
+ 'atomsGetLoadedPath',
+ 'atomsInstall',
+ 'atomsIsInstalled',
+ 'atomsIsLoaded',
+ 'atomsList',
+ 'atomsLoad',
+ 'atomsQuit',
+ 'atomsRemove',
+ 'atomsRepositoryAdd',
+ 'atomsRepositoryDel',
+ 'atomsRepositoryList',
+ 'atomsRestoreConfig',
+ 'atomsSaveConfig',
+ 'atomsSearch',
+ 'atomsSetConfig',
+ 'atomsShow',
+ 'atomsSystemInit',
+ 'atomsSystemUpdate',
+ 'atomsTest',
+ 'atomsUpdate',
+ 'atomsVersion',
+ 'augment',
+ 'auread',
+ 'auwrite',
+ 'balreal',
+ 'bench_run',
+ 'bilin',
+ 'bilt',
+ 'bin2dec',
+ 'binomial',
+ 'bitand',
+ 'bitcmp',
+ 'bitget',
+ 'bitor',
+ 'bitset',
+ 'bitxor',
+ 'black',
+ 'blanks',
+ 'bloc2exp',
+ 'bloc2ss',
+ 'block_parameter_error',
+ 'bode',
+ 'bode_asymp',
+ 'bstap',
+ 'buttmag',
+ 'bvodeS',
+ 'bytecode',
+ 'bytecodewalk',
+ 'cainv',
+ 'calendar',
+ 'calerf',
+ 'calfrq',
+ 'canon',
+ 'casc',
+ 'cat',
+ 'cat_code',
+ 'cb_m2sci_gui',
+ 'ccontrg',
+ 'cell',
+ 'cell2mat',
+ 'cellstr',
+ 'center',
+ 'cepstrum',
+ 'cfspec',
+ 'char',
+ 'chart',
+ 'cheb1mag',
+ 'cheb2mag',
+ 'check_gateways',
+ 'check_modules_xml',
+ 'check_versions',
+ 'chepol',
+ 'chfact',
+ 'chsolve',
+ 'classmarkov',
+ 'clean_help',
+ 'clock',
+ 'cls2dls',
+ 'cmb_lin',
+ 'cmndred',
+ 'cmoment',
+ 'coding_ga_binary',
+ 'coding_ga_identity',
+ 'coff',
+ 'coffg',
+ 'colcomp',
+ 'colcompr',
+ 'colinout',
+ 'colregul',
+ 'companion',
+ 'complex',
+ 'compute_initial_temp',
+ 'cond',
+ 'cond2sp',
+ 'condestsp',
+ 'configure_msifort',
+ 'configure_msvc',
+ 'conjgrad',
+ 'cont_frm',
+ 'cont_mat',
+ 'contrss',
+ 'conv',
+ 'convert_to_float',
+ 'convertindex',
+ 'convol',
+ 'convol2d',
+ 'copfac',
+ 'correl',
+ 'cosd',
+ 'cosh',
+ 'coshm',
+ 'cosm',
+ 'cotd',
+ 'cotg',
+ 'coth',
+ 'cothm',
+ 'cov',
+ 'covar',
+ 'createXConfiguration',
+ 'createfun',
+ 'createstruct',
+ 'cross',
+ 'crossover_ga_binary',
+ 'crossover_ga_default',
+ 'csc',
+ 'cscd',
+ 'csch',
+ 'csgn',
+ 'csim',
+ 'cspect',
+ 'ctr_gram',
+ 'czt',
+ 'dae',
+ 'daeoptions',
+ 'damp',
+ 'datafit',
+ 'date',
+ 'datenum',
+ 'datevec',
+ 'dbphi',
+ 'dcf',
+ 'ddp',
+ 'dec2bin',
+ 'dec2hex',
+ 'dec2oct',
+ 'del_help_chapter',
+ 'del_module_help_chapter',
+ 'demo_begin',
+ 'demo_choose',
+ 'demo_compiler',
+ 'demo_end',
+ 'demo_file_choice',
+ 'demo_folder_choice',
+ 'demo_function_choice',
+ 'demo_gui',
+ 'demo_run',
+ 'demo_viewCode',
+ 'denom',
+ 'derivat',
+ 'derivative',
+ 'des2ss',
+ 'des2tf',
+ 'detectmsifort64tools',
+ 'detectmsvc64tools',
+ 'determ',
+ 'detr',
+ 'detrend',
+ 'devtools_run_builder',
+ 'dhnorm',
+ 'diff',
+ 'diophant',
+ 'dir',
+ 'dirname',
+ 'dispfiles',
+ 'dllinfo',
+ 'dscr',
+ 'dsimul',
+ 'dt_ility',
+ 'dtsi',
+ 'edit',
+ 'edit_error',
+ 'editor',
+ 'eigenmarkov',
+ 'eigs',
+ 'ell1mag',
+ 'enlarge_shape',
+ 'entropy',
+ 'eomday',
+ 'epred',
+ 'eqfir',
+ 'eqiir',
+ 'equil',
+ 'equil1',
+ 'erfinv',
+ 'etime',
+ 'eval',
+ 'evans',
+ 'evstr',
+ 'example_run',
+ 'expression2code',
+ 'extract_help_examples',
+ 'factor',
+ 'factorial',
+ 'factors',
+ 'faurre',
+ 'ffilt',
+ 'fft2',
+ 'fftshift',
+ 'fieldnames',
+ 'filt_sinc',
+ 'filter',
+ 'findABCD',
+ 'findAC',
+ 'findBDK',
+ 'findR',
+ 'find_freq',
+ 'find_links',
+ 'find_scicos_version',
+ 'findm',
+ 'findmsifortcompiler',
+ 'findmsvccompiler',
+ 'findx0BD',
+ 'firstnonsingleton',
+ 'fix',
+ 'fixedpointgcd',
+ 'flipdim',
+ 'flts',
+ 'fminsearch',
+ 'formatBlackTip',
+ 'formatBodeMagTip',
+ 'formatBodePhaseTip',
+ 'formatGainplotTip',
+ 'formatHallModuleTip',
+ 'formatHallPhaseTip',
+ 'formatNicholsGainTip',
+ 'formatNicholsPhaseTip',
+ 'formatNyquistTip',
+ 'formatPhaseplotTip',
+ 'formatSgridDampingTip',
+ 'formatSgridFreqTip',
+ 'formatZgridDampingTip',
+ 'formatZgridFreqTip',
+ 'format_txt',
+ 'fourplan',
+ 'frep2tf',
+ 'freson',
+ 'frfit',
+ 'frmag',
+ 'fseek_origin',
+ 'fsfirlin',
+ 'fspec',
+ 'fspecg',
+ 'fstabst',
+ 'ftest',
+ 'ftuneq',
+ 'fullfile',
+ 'fullrf',
+ 'fullrfk',
+ 'fun2string',
+ 'g_margin',
+ 'gainplot',
+ 'gamitg',
+ 'gcare',
+ 'gcd',
+ 'gencompilationflags_unix',
+ 'generateBlockImage',
+ 'generateBlockImages',
+ 'generic_i_ce',
+ 'generic_i_h',
+ 'generic_i_hm',
+ 'generic_i_s',
+ 'generic_i_st',
+ 'genlib',
+ 'genmarkov',
+ 'geomean',
+ 'getDiagramVersion',
+ 'getModelicaPath',
+ 'getPreferencesValue',
+ 'get_file_path',
+ 'get_function_path',
+ 'get_param',
+ 'get_profile',
+ 'get_scicos_version',
+ 'getd',
+ 'getscilabkeywords',
+ 'getshell',
+ 'gettklib',
+ 'gfare',
+ 'gfrancis',
+ 'givens',
+ 'glever',
+ 'gmres',
+ 'group',
+ 'gschur',
+ 'gspec',
+ 'gtild',
+ 'h2norm',
+ 'h_cl',
+ 'h_inf',
+ 'h_inf_st',
+ 'h_norm',
+ 'hallchart',
+ 'halt',
+ 'hank',
+ 'hankelsv',
+ 'harmean',
+ 'haveacompiler',
+ 'head_comments',
+ 'help_from_sci',
+ 'help_skeleton',
+ 'hermit',
+ 'hex2dec',
+ 'hilb',
+ 'hilbert',
+ 'histc',
+ 'horner',
+ 'householder',
+ 'hrmt',
+ 'htrianr',
+ 'hypermat',
+ 'idct',
+ 'idst',
+ 'ifft',
+ 'ifftshift',
+ 'iir',
+ 'iirgroup',
+ 'iirlp',
+ 'iirmod',
+ 'ilib_build',
+ 'ilib_build_jar',
+ 'ilib_compile',
+ 'ilib_for_link',
+ 'ilib_gen_Make',
+ 'ilib_gen_Make_unix',
+ 'ilib_gen_cleaner',
+ 'ilib_gen_gateway',
+ 'ilib_gen_loader',
+ 'ilib_include_flag',
+ 'ilib_mex_build',
+ 'im_inv',
+ 'importScicosDiagram',
+ 'importScicosPal',
+ 'importXcosDiagram',
+ 'imrep2ss',
+ 'ind2sub',
+ 'inistate',
+ 'init_ga_default',
+ 'init_param',
+ 'initial_scicos_tables',
+ 'input',
+ 'instruction2code',
+ 'intc',
+ 'intdec',
+ 'integrate',
+ 'interp1',
+ 'interpln',
+ 'intersect',
+ 'intl',
+ 'intsplin',
+ 'inttrap',
+ 'inv_coeff',
+ 'invr',
+ 'invrs',
+ 'invsyslin',
+ 'iqr',
+ 'isLeapYear',
+ 'is_absolute_path',
+ 'is_param',
+ 'iscell',
+ 'iscellstr',
+ 'iscolumn',
+ 'isempty',
+ 'isfield',
+ 'isinf',
+ 'ismatrix',
+ 'isnan',
+ 'isrow',
+ 'isscalar',
+ 'issparse',
+ 'issquare',
+ 'isstruct',
+ 'isvector',
+ 'jmat',
+ 'justify',
+ 'kalm',
+ 'karmarkar',
+ 'kernel',
+ 'kpure',
+ 'krac2',
+ 'kroneck',
+ 'lattn',
+ 'lattp',
+ 'launchtest',
+ 'lcf',
+ 'lcm',
+ 'lcmdiag',
+ 'leastsq',
+ 'leqe',
+ 'leqr',
+ 'lev',
+ 'levin',
+ 'lex_sort',
+ 'lft',
+ 'lin',
+ 'lin2mu',
+ 'lincos',
+ 'lindquist',
+ 'linf',
+ 'linfn',
+ 'linsolve',
+ 'linspace',
+ 'list2vec',
+ 'list_param',
+ 'listfiles',
+ 'listfunctions',
+ 'listvarinfile',
+ 'lmisolver',
+ 'lmitool',
+ 'loadXcosLibs',
+ 'loadmatfile',
+ 'loadwave',
+ 'log10',
+ 'log2',
+ 'logm',
+ 'logspace',
+ 'lqe',
+ 'lqg',
+ 'lqg2stan',
+ 'lqg_ltr',
+ 'lqr',
+ 'ls',
+ 'lyap',
+ 'm2sci_gui',
+ 'm_circle',
+ 'macglov',
+ 'macrovar',
+ 'mad',
+ 'makecell',
+ 'manedit',
+ 'mapsound',
+ 'markp2ss',
+ 'matfile2sci',
+ 'mdelete',
+ 'mean',
+ 'meanf',
+ 'median',
+ 'members',
+ 'mese',
+ 'meshgrid',
+ 'mfft',
+ 'mfile2sci',
+ 'minreal',
+ 'minss',
+ 'mkdir',
+ 'modulo',
+ 'moment',
+ 'mrfit',
+ 'msd',
+ 'mstr2sci',
+ 'mtlb',
+ 'mtlb_0',
+ 'mtlb_a',
+ 'mtlb_all',
+ 'mtlb_any',
+ 'mtlb_axes',
+ 'mtlb_axis',
+ 'mtlb_beta',
+ 'mtlb_box',
+ 'mtlb_choices',
+ 'mtlb_close',
+ 'mtlb_colordef',
+ 'mtlb_cond',
+ 'mtlb_cov',
+ 'mtlb_cumprod',
+ 'mtlb_cumsum',
+ 'mtlb_dec2hex',
+ 'mtlb_delete',
+ 'mtlb_diag',
+ 'mtlb_diff',
+ 'mtlb_dir',
+ 'mtlb_double',
+ 'mtlb_e',
+ 'mtlb_echo',
+ 'mtlb_error',
+ 'mtlb_eval',
+ 'mtlb_exist',
+ 'mtlb_eye',
+ 'mtlb_false',
+ 'mtlb_fft',
+ 'mtlb_fftshift',
+ 'mtlb_filter',
+ 'mtlb_find',
+ 'mtlb_findstr',
+ 'mtlb_fliplr',
+ 'mtlb_fopen',
+ 'mtlb_format',
+ 'mtlb_fprintf',
+ 'mtlb_fread',
+ 'mtlb_fscanf',
+ 'mtlb_full',
+ 'mtlb_fwrite',
+ 'mtlb_get',
+ 'mtlb_grid',
+ 'mtlb_hold',
+ 'mtlb_i',
+ 'mtlb_ifft',
+ 'mtlb_image',
+ 'mtlb_imp',
+ 'mtlb_int16',
+ 'mtlb_int32',
+ 'mtlb_int8',
+ 'mtlb_is',
+ 'mtlb_isa',
+ 'mtlb_isfield',
+ 'mtlb_isletter',
+ 'mtlb_isspace',
+ 'mtlb_l',
+ 'mtlb_legendre',
+ 'mtlb_linspace',
+ 'mtlb_logic',
+ 'mtlb_logical',
+ 'mtlb_loglog',
+ 'mtlb_lower',
+ 'mtlb_max',
+ 'mtlb_mean',
+ 'mtlb_median',
+ 'mtlb_mesh',
+ 'mtlb_meshdom',
+ 'mtlb_min',
+ 'mtlb_more',
+ 'mtlb_num2str',
+ 'mtlb_ones',
+ 'mtlb_pcolor',
+ 'mtlb_plot',
+ 'mtlb_prod',
+ 'mtlb_qr',
+ 'mtlb_qz',
+ 'mtlb_rand',
+ 'mtlb_randn',
+ 'mtlb_rcond',
+ 'mtlb_realmax',
+ 'mtlb_realmin',
+ 'mtlb_s',
+ 'mtlb_semilogx',
+ 'mtlb_semilogy',
+ 'mtlb_setstr',
+ 'mtlb_size',
+ 'mtlb_sort',
+ 'mtlb_sortrows',
+ 'mtlb_sprintf',
+ 'mtlb_sscanf',
+ 'mtlb_std',
+ 'mtlb_strcmp',
+ 'mtlb_strcmpi',
+ 'mtlb_strfind',
+ 'mtlb_strrep',
+ 'mtlb_subplot',
+ 'mtlb_sum',
+ 'mtlb_t',
+ 'mtlb_toeplitz',
+ 'mtlb_tril',
+ 'mtlb_triu',
+ 'mtlb_true',
+ 'mtlb_type',
+ 'mtlb_uint16',
+ 'mtlb_uint32',
+ 'mtlb_uint8',
+ 'mtlb_upper',
+ 'mtlb_var',
+ 'mtlb_zeros',
+ 'mu2lin',
+ 'mutation_ga_binary',
+ 'mutation_ga_default',
+ 'mvcorrel',
+ 'mvvacov',
+ 'nancumsum',
+ 'nand2mean',
+ 'nanmax',
+ 'nanmean',
+ 'nanmeanf',
+ 'nanmedian',
+ 'nanmin',
+ 'nanreglin',
+ 'nanstdev',
+ 'nansum',
+ 'narsimul',
+ 'ndgrid',
+ 'ndims',
+ 'nehari',
+ 'neigh_func_csa',
+ 'neigh_func_default',
+ 'neigh_func_fsa',
+ 'neigh_func_vfsa',
+ 'neldermead_cget',
+ 'neldermead_configure',
+ 'neldermead_costf',
+ 'neldermead_defaultoutput',
+ 'neldermead_destroy',
+ 'neldermead_function',
+ 'neldermead_get',
+ 'neldermead_log',
+ 'neldermead_new',
+ 'neldermead_restart',
+ 'neldermead_search',
+ 'neldermead_updatesimp',
+ 'nextpow2',
+ 'nfreq',
+ 'nicholschart',
+ 'nlev',
+ 'nmplot_cget',
+ 'nmplot_configure',
+ 'nmplot_contour',
+ 'nmplot_destroy',
+ 'nmplot_function',
+ 'nmplot_get',
+ 'nmplot_historyplot',
+ 'nmplot_log',
+ 'nmplot_new',
+ 'nmplot_outputcmd',
+ 'nmplot_restart',
+ 'nmplot_search',
+ 'nmplot_simplexhistory',
+ 'noisegen',
+ 'nonreg_test_run',
+ 'now',
+ 'nthroot',
+ 'null',
+ 'num2cell',
+ 'numderivative',
+ 'numdiff',
+ 'numer',
+ 'nyquist',
+ 'nyquistfrequencybounds',
+ 'obs_gram',
+ 'obscont',
+ 'observer',
+ 'obsv_mat',
+ 'obsvss',
+ 'oct2dec',
+ 'odeoptions',
+ 'optim_ga',
+ 'optim_moga',
+ 'optim_nsga',
+ 'optim_nsga2',
+ 'optim_sa',
+ 'optimbase_cget',
+ 'optimbase_checkbounds',
+ 'optimbase_checkcostfun',
+ 'optimbase_checkx0',
+ 'optimbase_configure',
+ 'optimbase_destroy',
+ 'optimbase_function',
+ 'optimbase_get',
+ 'optimbase_hasbounds',
+ 'optimbase_hasconstraints',
+ 'optimbase_hasnlcons',
+ 'optimbase_histget',
+ 'optimbase_histset',
+ 'optimbase_incriter',
+ 'optimbase_isfeasible',
+ 'optimbase_isinbounds',
+ 'optimbase_isinnonlincons',
+ 'optimbase_log',
+ 'optimbase_logshutdown',
+ 'optimbase_logstartup',
+ 'optimbase_new',
+ 'optimbase_outputcmd',
+ 'optimbase_outstruct',
+ 'optimbase_proj2bnds',
+ 'optimbase_set',
+ 'optimbase_stoplog',
+ 'optimbase_terminate',
+ 'optimget',
+ 'optimplotfunccount',
+ 'optimplotfval',
+ 'optimplotx',
+ 'optimset',
+ 'optimsimplex_center',
+ 'optimsimplex_check',
+ 'optimsimplex_compsomefv',
+ 'optimsimplex_computefv',
+ 'optimsimplex_deltafv',
+ 'optimsimplex_deltafvmax',
+ 'optimsimplex_destroy',
+ 'optimsimplex_dirmat',
+ 'optimsimplex_fvmean',
+ 'optimsimplex_fvstdev',
+ 'optimsimplex_fvvariance',
+ 'optimsimplex_getall',
+ 'optimsimplex_getallfv',
+ 'optimsimplex_getallx',
+ 'optimsimplex_getfv',
+ 'optimsimplex_getn',
+ 'optimsimplex_getnbve',
+ 'optimsimplex_getve',
+ 'optimsimplex_getx',
+ 'optimsimplex_gradientfv',
+ 'optimsimplex_log',
+ 'optimsimplex_new',
+ 'optimsimplex_reflect',
+ 'optimsimplex_setall',
+ 'optimsimplex_setallfv',
+ 'optimsimplex_setallx',
+ 'optimsimplex_setfv',
+ 'optimsimplex_setn',
+ 'optimsimplex_setnbve',
+ 'optimsimplex_setve',
+ 'optimsimplex_setx',
+ 'optimsimplex_shrink',
+ 'optimsimplex_size',
+ 'optimsimplex_sort',
+ 'optimsimplex_xbar',
+ 'orth',
+ 'output_ga_default',
+ 'output_moga_default',
+ 'output_nsga2_default',
+ 'output_nsga_default',
+ 'p_margin',
+ 'pack',
+ 'pareto_filter',
+ 'parrot',
+ 'pbig',
+ 'pca',
+ 'pcg',
+ 'pdiv',
+ 'pen2ea',
+ 'pencan',
+ 'pencost',
+ 'penlaur',
+ 'perctl',
+ 'perl',
+ 'perms',
+ 'permute',
+ 'pertrans',
+ 'pfactors',
+ 'pfss',
+ 'phasemag',
+ 'phaseplot',
+ 'phc',
+ 'pinv',
+ 'playsnd',
+ 'plotprofile',
+ 'plzr',
+ 'pmodulo',
+ 'pol2des',
+ 'pol2str',
+ 'polar',
+ 'polfact',
+ 'prbs_a',
+ 'prettyprint',
+ 'primes',
+ 'princomp',
+ 'profile',
+ 'proj',
+ 'projsl',
+ 'projspec',
+ 'psmall',
+ 'pspect',
+ 'qmr',
+ 'qpsolve',
+ 'quart',
+ 'quaskro',
+ 'rafiter',
+ 'randpencil',
+ 'range',
+ 'rank',
+ 'readxls',
+ 'recompilefunction',
+ 'recons',
+ 'reglin',
+ 'regress',
+ 'remezb',
+ 'remove_param',
+ 'remove_profiling',
+ 'repfreq',
+ 'replace_Ix_by_Fx',
+ 'repmat',
+ 'reset_profiling',
+ 'resize_matrix',
+ 'returntoscilab',
+ 'rhs2code',
+ 'ric_desc',
+ 'riccati',
+ 'rmdir',
+ 'routh_t',
+ 'rowcomp',
+ 'rowcompr',
+ 'rowinout',
+ 'rowregul',
+ 'rowshuff',
+ 'rref',
+ 'sample',
+ 'samplef',
+ 'samwr',
+ 'savematfile',
+ 'savewave',
+ 'scanf',
+ 'sci2exp',
+ 'sciGUI_init',
+ 'sci_sparse',
+ 'scicos_getvalue',
+ 'scicos_simulate',
+ 'scicos_workspace_init',
+ 'scisptdemo',
+ 'scitest',
+ 'sdiff',
+ 'sec',
+ 'secd',
+ 'sech',
+ 'selection_ga_elitist',
+ 'selection_ga_random',
+ 'sensi',
+ 'setPreferencesValue',
+ 'set_param',
+ 'setdiff',
+ 'sgrid',
+ 'show_margins',
+ 'show_pca',
+ 'showprofile',
+ 'signm',
+ 'sinc',
+ 'sincd',
+ 'sind',
+ 'sinh',
+ 'sinhm',
+ 'sinm',
+ 'sm2des',
+ 'sm2ss',
+ 'smga',
+ 'smooth',
+ 'solve',
+ 'sound',
+ 'soundsec',
+ 'sp2adj',
+ 'spaninter',
+ 'spanplus',
+ 'spantwo',
+ 'specfact',
+ 'speye',
+ 'sprand',
+ 'spzeros',
+ 'sqroot',
+ 'sqrtm',
+ 'squarewave',
+ 'squeeze',
+ 'srfaur',
+ 'srkf',
+ 'ss2des',
+ 'ss2ss',
+ 'ss2tf',
+ 'sskf',
+ 'ssprint',
+ 'ssrand',
+ 'st_deviation',
+ 'st_i_generic',
+ 'st_ility',
+ 'stabil',
+ 'statgain',
+ 'stdev',
+ 'stdevf',
+ 'steadycos',
+ 'strange',
+ 'strcmpi',
+ 'struct',
+ 'sub2ind',
+ 'sva',
+ 'svplot',
+ 'sylm',
+ 'sylv',
+ 'sysconv',
+ 'sysdiag',
+ 'sysfact',
+ 'syslin',
+ 'syssize',
+ 'system',
+ 'systmat',
+ 'tabul',
+ 'tand',
+ 'tanh',
+ 'tanhm',
+ 'tanm',
+ 'tbx_build_blocks',
+ 'tbx_build_cleaner',
+ 'tbx_build_gateway',
+ 'tbx_build_gateway_clean',
+ 'tbx_build_gateway_loader',
+ 'tbx_build_help',
+ 'tbx_build_help_loader',
+ 'tbx_build_loader',
+ 'tbx_build_localization',
+ 'tbx_build_macros',
+ 'tbx_build_pal_loader',
+ 'tbx_build_src',
+ 'tbx_builder',
+ 'tbx_builder_gateway',
+ 'tbx_builder_gateway_lang',
+ 'tbx_builder_help',
+ 'tbx_builder_help_lang',
+ 'tbx_builder_macros',
+ 'tbx_builder_src',
+ 'tbx_builder_src_lang',
+ 'tbx_generate_pofile',
+ 'temp_law_csa',
+ 'temp_law_default',
+ 'temp_law_fsa',
+ 'temp_law_huang',
+ 'temp_law_vfsa',
+ 'test_clean',
+ 'test_on_columns',
+ 'test_run',
+ 'test_run_level',
+ 'testexamples',
+ 'tf2des',
+ 'tf2ss',
+ 'thrownan',
+ 'tic',
+ 'time_id',
+ 'toc',
+ 'toeplitz',
+ 'tokenpos',
+ 'toolboxes',
+ 'trace',
+ 'trans',
+ 'translatepaths',
+ 'tree2code',
+ 'trfmod',
+ 'trianfml',
+ 'trimmean',
+ 'trisolve',
+ 'trzeros',
+ 'typeof',
+ 'ui_observer',
+ 'union',
+ 'unique',
+ 'unit_test_run',
+ 'unix_g',
+ 'unix_s',
+ 'unix_w',
+ 'unix_x',
+ 'unobs',
+ 'unpack',
+ 'unwrap',
+ 'variance',
+ 'variancef',
+ 'vec2list',
+ 'vectorfind',
+ 'ver',
+ 'warnobsolete',
+ 'wavread',
+ 'wavwrite',
+ 'wcenter',
+ 'weekday',
+ 'wfir',
+ 'wfir_gui',
+ 'whereami',
+ 'who_user',
+ 'whos',
+ 'wiener',
+ 'wigner',
+ 'window',
+ 'winlist',
+ 'with_javasci',
+ 'with_macros_source',
+ 'with_modelica_compiler',
+ 'with_tk',
+ 'xcorr',
+ 'xcosBlockEval',
+ 'xcosBlockInterface',
+ 'xcosCodeGeneration',
+ 'xcosConfigureModelica',
+ 'xcosPal',
+ 'xcosPalAdd',
+ 'xcosPalAddBlock',
+ 'xcosPalExport',
+ 'xcosPalGenerateAllIcons',
+ 'xcosShowBlockWarning',
+ 'xcosValidateBlockSet',
+ 'xcosValidateCompareBlock',
+ 'xcos_compile',
+ 'xcos_debug_gui',
+ 'xcos_run',
+ 'xcos_simulate',
+ 'xcov',
+ 'xmltochm',
+ 'xmltoformat',
+ 'xmltohtml',
+ 'xmltojar',
+ 'xmltopdf',
+ 'xmltops',
+ 'xmltoweb',
+ 'yulewalk',
+ 'zeropen',
+ 'zgrid',
+ 'zpbutt',
+ 'zpch1',
+ 'zpch2',
+ 'zpell',
+)
+
+variables_kw = (
+ '$',
+ '%F',
+ '%T',
+ '%e',
+ '%eps',
+ '%f',
+ '%fftw',
+ '%gui',
+ '%i',
+ '%inf',
+ '%io',
+ '%modalWarning',
+ '%nan',
+ '%pi',
+ '%s',
+ '%t',
+ '%tk',
+ '%toolboxes',
+ '%toolboxes_dir',
+ '%z',
+ 'PWD',
+ 'SCI',
+ 'SCIHOME',
+ 'TMPDIR',
+ 'arnoldilib',
+ 'assertlib',
+ 'atomslib',
+ 'cacsdlib',
+ 'compatibility_functilib',
+ 'corelib',
+ 'data_structureslib',
+ 'demo_toolslib',
+ 'development_toolslib',
+ 'differential_equationlib',
+ 'dynamic_linklib',
+ 'elementary_functionslib',
+ 'enull',
+ 'evoid',
+ 'external_objectslib',
+ 'fd',
+ 'fileiolib',
+ 'functionslib',
+ 'genetic_algorithmslib',
+ 'helptoolslib',
+ 'home',
+ 'integerlib',
+ 'interpolationlib',
+ 'iolib',
+ 'jnull',
+ 'jvoid',
+ 'linear_algebralib',
+ 'm2scilib',
+ 'matiolib',
+ 'modules_managerlib',
+ 'neldermeadlib',
+ 'optimbaselib',
+ 'optimizationlib',
+ 'optimsimplexlib',
+ 'output_streamlib',
+ 'overloadinglib',
+ 'parameterslib',
+ 'polynomialslib',
+ 'preferenceslib',
+ 'randliblib',
+ 'scicos_autolib',
+ 'scicos_utilslib',
+ 'scinoteslib',
+ 'signal_processinglib',
+ 'simulated_annealinglib',
+ 'soundlib',
+ 'sparselib',
+ 'special_functionslib',
+ 'spreadsheetlib',
+ 'statisticslib',
+ 'stringlib',
+ 'tclscilib',
+ 'timelib',
+ 'umfpacklib',
+ 'xcoslib',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import subprocess
+ from pygments.util import format_lines, duplicates_removed
+
+ mapping = {'variables': 'builtin'}
+
+ def extract_completion(var_type):
+ s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = s.communicate('''\
+fd = mopen("/dev/stderr", "wt");
+mputl(strcat(completion("", "%s"), "||"), fd);
+mclose(fd)\n''' % var_type)
+ if '||' not in output[1]:
+ raise Exception(output[0])
+ # Invalid DISPLAY causes this to be output:
+ text = output[1].strip()
+ if text.startswith('Error: unable to open display \n'):
+ text = text[len('Error: unable to open display \n'):]
+ return text.split('||')
+
+ new_data = {}
+ seen = set() # only keep first type for a given word
+ for t in ('functions', 'commands', 'macros', 'variables'):
+ new_data[t] = duplicates_removed(extract_completion(t), seen)
+ seen.update(set(new_data[t]))
+
+
+ with open(__file__) as f:
+ content = f.read()
+
+ header = content[:content.find('# Autogenerated')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(__file__, 'w') as f:
+ f.write(header)
+ f.write('# Autogenerated\n\n')
+ for k, v in sorted(new_data.iteritems()):
+ f.write(format_lines(k + '_kw', v) + '\n\n')
+ f.write(footer)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_sourcemod_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_sourcemod_builtins.py
index fa9671678e..0ac4039b0e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_sourcemod_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_sourcemod_builtins.py
@@ -1,1163 +1,1163 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._sourcemod_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of SourceMod functions.
- It is able to re-generate itself.
-
- Do not edit the FUNCTIONS list by hand.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._sourcemod_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of SourceMod functions.
+ It is able to re-generate itself.
+
+ Do not edit the FUNCTIONS list by hand.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-FUNCTIONS = (
- 'OnEntityCreated',
- 'OnEntityDestroyed',
- 'OnGetGameDescription',
- 'OnLevelInit',
- 'SDKHook',
- 'SDKHookEx',
- 'SDKUnhook',
- 'SDKHooks_TakeDamage',
- 'SDKHooks_DropWeapon',
- 'TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'DisplayTopMenuCategory',
- 'FindTopMenuCategory',
- 'SetTopMenuTitleCaching',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'StringToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageType',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'FindDataMapInfo',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityAddress',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetPlayerJingleFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'GivePlayerAmmo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'RequestFrameCallback',
- 'RequestFrame',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'CommandExists',
- 'OnClientSayCommand',
- 'OnClientSayCommand_Post',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_RemoveWearable',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'TF2_OnPlayerTeleport',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_SetCharset',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'SQL_CreateTransaction',
- 'SQL_AddQuery',
- 'SQLTxnSuccess',
- 'SQLTxnFailure',
- 'SQL_ExecuteTransaction',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'GetGameSoundParams',
- 'EmitGameSound',
- 'EmitAmbientGameSound',
- 'EmitGameSoundToClient',
- 'EmitGameSoundToAll',
- 'PrecacheScriptSound',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetMaxHumanPlayers',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientAuthId',
- 'GetSteamAccountID',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GameConfGetAddress',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'PbReadInt',
- 'PbReadFloat',
- 'PbReadBool',
- 'PbReadString',
- 'PbReadColor',
- 'PbReadAngle',
- 'PbReadVector',
- 'PbReadVector2D',
- 'PbGetRepeatedFieldCount',
- 'PbSetInt',
- 'PbSetFloat',
- 'PbSetBool',
- 'PbSetString',
- 'PbSetColor',
- 'PbSetAngle',
- 'PbSetVector',
- 'PbSetVector2D',
- 'PbAddInt',
- 'PbAddFloat',
- 'PbAddBool',
- 'PbAddString',
- 'PbAddColor',
- 'PbAddAngle',
- 'PbAddVector',
- 'PbAddVector2D',
- 'PbRemoveRepeatedFieldValue',
- 'PbReadMessage',
- 'PbReadRepeatedMessage',
- 'PbAddMessage',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'CS_GetTeamScore',
- 'CS_SetTeamScore',
- 'CS_GetMVPCount',
- 'CS_SetMVPCount',
- 'CS_GetClientContributionScore',
- 'CS_SetClientContributionScore',
- 'CS_GetClientAssists',
- 'CS_SetClientAssists',
- 'CS_AliasToWeaponID',
- 'CS_WeaponIDToAlias',
- 'CS_IsValidWeaponID',
- 'CS_UpdateClientModel',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GetEngineVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetAddress',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall',
- 'GetPlayerResourceEntity',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- import sys
- try:
- from urllib import FancyURLopener
- except ImportError:
- from urllib.request import FancyURLopener
-
- from pygments.util import format_lines
-
- # urllib ends up wanting to import a module called 'math' -- if
- # pygments/lexers is in the path, this ends badly.
- for i in range(len(sys.path)-1, -1, -1):
- if sys.path[i].endswith('/lexers'):
- del sys.path[i]
-
- class Opener(FancyURLopener):
- version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
-
- opener = Opener()
-
- def get_version():
- f = opener.open('http://docs.sourcemod.net/api/index.php')
- r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
- for line in f:
- m = r.search(line)
- if m is not None:
- return m.groups()[0]
- raise ValueError('No version in api docs')
-
- def get_sm_functions():
- f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
- r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def regenerate(filename, natives):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('FUNCTIONS = (')]
- footer = content[content.find("if __name__ == '__main__':")-1:]
-
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write(format_lines('FUNCTIONS', natives))
- fp.write(footer)
-
- def run():
- version = get_version()
- print('> Downloading function index for SourceMod %s' % version)
- functions = get_sm_functions()
- print('> %d functions found:' % len(functions))
-
- functionlist = []
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- functionlist.append(full_function_name)
-
- regenerate(__file__, functionlist)
-
-
- run()
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+FUNCTIONS = (
+ 'OnEntityCreated',
+ 'OnEntityDestroyed',
+ 'OnGetGameDescription',
+ 'OnLevelInit',
+ 'SDKHook',
+ 'SDKHookEx',
+ 'SDKUnhook',
+ 'SDKHooks_TakeDamage',
+ 'SDKHooks_DropWeapon',
+ 'TopMenuHandler',
+ 'CreateTopMenu',
+ 'LoadTopMenuConfig',
+ 'AddToTopMenu',
+ 'GetTopMenuInfoString',
+ 'GetTopMenuObjName',
+ 'RemoveFromTopMenu',
+ 'DisplayTopMenu',
+ 'DisplayTopMenuCategory',
+ 'FindTopMenuCategory',
+ 'SetTopMenuTitleCaching',
+ 'OnAdminMenuCreated',
+ 'OnAdminMenuReady',
+ 'GetAdminTopMenu',
+ 'AddTargetsToMenu',
+ 'AddTargetsToMenu2',
+ 'RedisplayAdminMenu',
+ 'TEHook',
+ 'AddTempEntHook',
+ 'RemoveTempEntHook',
+ 'TE_Start',
+ 'TE_IsValidProp',
+ 'TE_WriteNum',
+ 'TE_ReadNum',
+ 'TE_WriteFloat',
+ 'TE_ReadFloat',
+ 'TE_WriteVector',
+ 'TE_ReadVector',
+ 'TE_WriteAngles',
+ 'TE_WriteFloatArray',
+ 'TE_Send',
+ 'TE_WriteEncodedEnt',
+ 'TE_SendToAll',
+ 'TE_SendToClient',
+ 'CreateKeyValues',
+ 'KvSetString',
+ 'KvSetNum',
+ 'KvSetUInt64',
+ 'KvSetFloat',
+ 'KvSetColor',
+ 'KvSetVector',
+ 'KvGetString',
+ 'KvGetNum',
+ 'KvGetFloat',
+ 'KvGetColor',
+ 'KvGetUInt64',
+ 'KvGetVector',
+ 'KvJumpToKey',
+ 'KvJumpToKeySymbol',
+ 'KvGotoFirstSubKey',
+ 'KvGotoNextKey',
+ 'KvSavePosition',
+ 'KvDeleteKey',
+ 'KvDeleteThis',
+ 'KvGoBack',
+ 'KvRewind',
+ 'KvGetSectionName',
+ 'KvSetSectionName',
+ 'KvGetDataType',
+ 'KeyValuesToFile',
+ 'FileToKeyValues',
+ 'StringToKeyValues',
+ 'KvSetEscapeSequences',
+ 'KvNodesInStack',
+ 'KvCopySubkeys',
+ 'KvFindKeyById',
+ 'KvGetNameSymbol',
+ 'KvGetSectionSymbol',
+ 'TE_SetupSparks',
+ 'TE_SetupSmoke',
+ 'TE_SetupDust',
+ 'TE_SetupMuzzleFlash',
+ 'TE_SetupMetalSparks',
+ 'TE_SetupEnergySplash',
+ 'TE_SetupArmorRicochet',
+ 'TE_SetupGlowSprite',
+ 'TE_SetupExplosion',
+ 'TE_SetupBloodSprite',
+ 'TE_SetupBeamRingPoint',
+ 'TE_SetupBeamPoints',
+ 'TE_SetupBeamLaser',
+ 'TE_SetupBeamRing',
+ 'TE_SetupBeamFollow',
+ 'HookEvent',
+ 'HookEventEx',
+ 'UnhookEvent',
+ 'CreateEvent',
+ 'FireEvent',
+ 'CancelCreatedEvent',
+ 'GetEventBool',
+ 'SetEventBool',
+ 'GetEventInt',
+ 'SetEventInt',
+ 'GetEventFloat',
+ 'SetEventFloat',
+ 'GetEventString',
+ 'SetEventString',
+ 'GetEventName',
+ 'SetEventBroadcast',
+ 'GetUserMessageType',
+ 'GetUserMessageId',
+ 'GetUserMessageName',
+ 'StartMessage',
+ 'StartMessageEx',
+ 'EndMessage',
+ 'MsgHook',
+ 'MsgPostHook',
+ 'HookUserMessage',
+ 'UnhookUserMessage',
+ 'StartMessageAll',
+ 'StartMessageOne',
+ 'InactivateClient',
+ 'ReconnectClient',
+ 'GetMaxEntities',
+ 'GetEntityCount',
+ 'IsValidEntity',
+ 'IsValidEdict',
+ 'IsEntNetworkable',
+ 'CreateEdict',
+ 'RemoveEdict',
+ 'GetEdictFlags',
+ 'SetEdictFlags',
+ 'GetEdictClassname',
+ 'GetEntityNetClass',
+ 'ChangeEdictState',
+ 'GetEntData',
+ 'SetEntData',
+ 'GetEntDataFloat',
+ 'SetEntDataFloat',
+ 'GetEntDataEnt2',
+ 'SetEntDataEnt2',
+ 'GetEntDataVector',
+ 'SetEntDataVector',
+ 'GetEntDataString',
+ 'SetEntDataString',
+ 'FindSendPropOffs',
+ 'FindSendPropInfo',
+ 'FindDataMapOffs',
+ 'FindDataMapInfo',
+ 'GetEntSendPropOffs',
+ 'GetEntProp',
+ 'SetEntProp',
+ 'GetEntPropFloat',
+ 'SetEntPropFloat',
+ 'GetEntPropEnt',
+ 'SetEntPropEnt',
+ 'GetEntPropVector',
+ 'SetEntPropVector',
+ 'GetEntPropString',
+ 'SetEntPropString',
+ 'GetEntPropArraySize',
+ 'GetEntDataArray',
+ 'SetEntDataArray',
+ 'GetEntityAddress',
+ 'GetEntityClassname',
+ 'float',
+ 'FloatMul',
+ 'FloatDiv',
+ 'FloatAdd',
+ 'FloatSub',
+ 'FloatFraction',
+ 'RoundToZero',
+ 'RoundToCeil',
+ 'RoundToFloor',
+ 'RoundToNearest',
+ 'FloatCompare',
+ 'SquareRoot',
+ 'Pow',
+ 'Exponential',
+ 'Logarithm',
+ 'Sine',
+ 'Cosine',
+ 'Tangent',
+ 'FloatAbs',
+ 'ArcTangent',
+ 'ArcCosine',
+ 'ArcSine',
+ 'ArcTangent2',
+ 'RoundFloat',
+ 'operator%',
+ 'DegToRad',
+ 'RadToDeg',
+ 'GetURandomInt',
+ 'GetURandomFloat',
+ 'SetURandomSeed',
+ 'SetURandomSeedSimple',
+ 'RemovePlayerItem',
+ 'GivePlayerItem',
+ 'GetPlayerWeaponSlot',
+ 'IgniteEntity',
+ 'ExtinguishEntity',
+ 'TeleportEntity',
+ 'ForcePlayerSuicide',
+ 'SlapPlayer',
+ 'FindEntityByClassname',
+ 'GetClientEyeAngles',
+ 'CreateEntityByName',
+ 'DispatchSpawn',
+ 'DispatchKeyValue',
+ 'DispatchKeyValueFloat',
+ 'DispatchKeyValueVector',
+ 'GetClientAimTarget',
+ 'GetTeamCount',
+ 'GetTeamName',
+ 'GetTeamScore',
+ 'SetTeamScore',
+ 'GetTeamClientCount',
+ 'SetEntityModel',
+ 'GetPlayerDecalFile',
+ 'GetPlayerJingleFile',
+ 'GetServerNetStats',
+ 'EquipPlayerWeapon',
+ 'ActivateEntity',
+ 'SetClientInfo',
+ 'GivePlayerAmmo',
+ 'SetClientListeningFlags',
+ 'GetClientListeningFlags',
+ 'SetListenOverride',
+ 'GetListenOverride',
+ 'IsClientMuted',
+ 'TR_GetPointContents',
+ 'TR_GetPointContentsEnt',
+ 'TR_TraceRay',
+ 'TR_TraceHull',
+ 'TR_TraceRayFilter',
+ 'TR_TraceHullFilter',
+ 'TR_TraceRayEx',
+ 'TR_TraceHullEx',
+ 'TR_TraceRayFilterEx',
+ 'TR_TraceHullFilterEx',
+ 'TR_GetFraction',
+ 'TR_GetEndPosition',
+ 'TR_GetEntityIndex',
+ 'TR_DidHit',
+ 'TR_GetHitGroup',
+ 'TR_GetPlaneNormal',
+ 'TR_PointOutsideWorld',
+ 'SortIntegers',
+ 'SortFloats',
+ 'SortStrings',
+ 'SortFunc1D',
+ 'SortCustom1D',
+ 'SortCustom2D',
+ 'SortADTArray',
+ 'SortFuncADTArray',
+ 'SortADTArrayCustom',
+ 'CompileRegex',
+ 'MatchRegex',
+ 'GetRegexSubString',
+ 'SimpleRegexMatch',
+ 'TF2_GetPlayerClass',
+ 'TF2_SetPlayerClass',
+ 'TF2_RemoveWeaponSlot',
+ 'TF2_RemoveAllWeapons',
+ 'TF2_IsPlayerInCondition',
+ 'TF2_GetObjectType',
+ 'TF2_GetObjectMode',
+ 'NominateMap',
+ 'RemoveNominationByMap',
+ 'RemoveNominationByOwner',
+ 'GetExcludeMapList',
+ 'GetNominatedMapList',
+ 'CanMapChooserStartVote',
+ 'InitiateMapChooserVote',
+ 'HasEndOfMapVoteFinished',
+ 'EndOfMapVoteEnabled',
+ 'OnNominationRemoved',
+ 'OnMapVoteStarted',
+ 'CreateTimer',
+ 'KillTimer',
+ 'TriggerTimer',
+ 'GetTickedTime',
+ 'GetMapTimeLeft',
+ 'GetMapTimeLimit',
+ 'ExtendMapTimeLimit',
+ 'GetTickInterval',
+ 'OnMapTimeLeftChanged',
+ 'IsServerProcessing',
+ 'CreateDataTimer',
+ 'ByteCountToCells',
+ 'CreateArray',
+ 'ClearArray',
+ 'CloneArray',
+ 'ResizeArray',
+ 'GetArraySize',
+ 'PushArrayCell',
+ 'PushArrayString',
+ 'PushArrayArray',
+ 'GetArrayCell',
+ 'GetArrayString',
+ 'GetArrayArray',
+ 'SetArrayCell',
+ 'SetArrayString',
+ 'SetArrayArray',
+ 'ShiftArrayUp',
+ 'RemoveFromArray',
+ 'SwapArrayItems',
+ 'FindStringInArray',
+ 'FindValueInArray',
+ 'ProcessTargetString',
+ 'ReplyToTargetError',
+ 'MultiTargetFilter',
+ 'AddMultiTargetFilter',
+ 'RemoveMultiTargetFilter',
+ 'OnBanClient',
+ 'OnBanIdentity',
+ 'OnRemoveBan',
+ 'BanClient',
+ 'BanIdentity',
+ 'RemoveBan',
+ 'CreateTrie',
+ 'SetTrieValue',
+ 'SetTrieArray',
+ 'SetTrieString',
+ 'GetTrieValue',
+ 'GetTrieArray',
+ 'GetTrieString',
+ 'RemoveFromTrie',
+ 'ClearTrie',
+ 'GetTrieSize',
+ 'GetFunctionByName',
+ 'CreateGlobalForward',
+ 'CreateForward',
+ 'GetForwardFunctionCount',
+ 'AddToForward',
+ 'RemoveFromForward',
+ 'RemoveAllFromForward',
+ 'Call_StartForward',
+ 'Call_StartFunction',
+ 'Call_PushCell',
+ 'Call_PushCellRef',
+ 'Call_PushFloat',
+ 'Call_PushFloatRef',
+ 'Call_PushArray',
+ 'Call_PushArrayEx',
+ 'Call_PushString',
+ 'Call_PushStringEx',
+ 'Call_Finish',
+ 'Call_Cancel',
+ 'NativeCall',
+ 'CreateNative',
+ 'ThrowNativeError',
+ 'GetNativeStringLength',
+ 'GetNativeString',
+ 'SetNativeString',
+ 'GetNativeCell',
+ 'GetNativeCellRef',
+ 'SetNativeCellRef',
+ 'GetNativeArray',
+ 'SetNativeArray',
+ 'FormatNativeString',
+ 'RequestFrameCallback',
+ 'RequestFrame',
+ 'OnRebuildAdminCache',
+ 'DumpAdminCache',
+ 'AddCommandOverride',
+ 'GetCommandOverride',
+ 'UnsetCommandOverride',
+ 'CreateAdmGroup',
+ 'FindAdmGroup',
+ 'SetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlags',
+ 'SetAdmGroupImmuneFrom',
+ 'GetAdmGroupImmuneCount',
+ 'GetAdmGroupImmuneFrom',
+ 'AddAdmGroupCmdOverride',
+ 'GetAdmGroupCmdOverride',
+ 'RegisterAuthIdentType',
+ 'CreateAdmin',
+ 'GetAdminUsername',
+ 'BindAdminIdentity',
+ 'SetAdminFlag',
+ 'GetAdminFlag',
+ 'GetAdminFlags',
+ 'AdminInheritGroup',
+ 'GetAdminGroupCount',
+ 'GetAdminGroup',
+ 'SetAdminPassword',
+ 'GetAdminPassword',
+ 'FindAdminByIdentity',
+ 'RemoveAdmin',
+ 'FlagBitsToBitArray',
+ 'FlagBitArrayToBits',
+ 'FlagArrayToBits',
+ 'FlagBitsToArray',
+ 'FindFlagByName',
+ 'FindFlagByChar',
+ 'FindFlagChar',
+ 'ReadFlagString',
+ 'CanAdminTarget',
+ 'CreateAuthMethod',
+ 'SetAdmGroupImmunityLevel',
+ 'GetAdmGroupImmunityLevel',
+ 'SetAdminImmunityLevel',
+ 'GetAdminImmunityLevel',
+ 'FlagToBit',
+ 'BitToFlag',
+ 'ServerCommand',
+ 'ServerCommandEx',
+ 'InsertServerCommand',
+ 'ServerExecute',
+ 'ClientCommand',
+ 'FakeClientCommand',
+ 'FakeClientCommandEx',
+ 'PrintToServer',
+ 'PrintToConsole',
+ 'ReplyToCommand',
+ 'GetCmdReplySource',
+ 'SetCmdReplySource',
+ 'IsChatTrigger',
+ 'ShowActivity2',
+ 'ShowActivity',
+ 'ShowActivityEx',
+ 'FormatActivitySource',
+ 'SrvCmd',
+ 'RegServerCmd',
+ 'ConCmd',
+ 'RegConsoleCmd',
+ 'RegAdminCmd',
+ 'GetCmdArgs',
+ 'GetCmdArg',
+ 'GetCmdArgString',
+ 'CreateConVar',
+ 'FindConVar',
+ 'ConVarChanged',
+ 'HookConVarChange',
+ 'UnhookConVarChange',
+ 'GetConVarBool',
+ 'SetConVarBool',
+ 'GetConVarInt',
+ 'SetConVarInt',
+ 'GetConVarFloat',
+ 'SetConVarFloat',
+ 'GetConVarString',
+ 'SetConVarString',
+ 'ResetConVar',
+ 'GetConVarDefault',
+ 'GetConVarFlags',
+ 'SetConVarFlags',
+ 'GetConVarBounds',
+ 'SetConVarBounds',
+ 'GetConVarName',
+ 'QueryClientConVar',
+ 'GetCommandIterator',
+ 'ReadCommandIterator',
+ 'CheckCommandAccess',
+ 'CheckAccess',
+ 'IsValidConVarChar',
+ 'GetCommandFlags',
+ 'SetCommandFlags',
+ 'FindFirstConCommand',
+ 'FindNextConCommand',
+ 'SendConVarValue',
+ 'AddServerTag',
+ 'RemoveServerTag',
+ 'CommandListener',
+ 'AddCommandListener',
+ 'RemoveCommandListener',
+ 'CommandExists',
+ 'OnClientSayCommand',
+ 'OnClientSayCommand_Post',
+ 'TF2_IgnitePlayer',
+ 'TF2_RespawnPlayer',
+ 'TF2_RegeneratePlayer',
+ 'TF2_AddCondition',
+ 'TF2_RemoveCondition',
+ 'TF2_SetPlayerPowerPlay',
+ 'TF2_DisguisePlayer',
+ 'TF2_RemovePlayerDisguise',
+ 'TF2_StunPlayer',
+ 'TF2_MakeBleed',
+ 'TF2_GetClass',
+ 'TF2_CalcIsAttackCritical',
+ 'TF2_OnIsHolidayActive',
+ 'TF2_IsHolidayActive',
+ 'TF2_IsPlayerInDuel',
+ 'TF2_RemoveWearable',
+ 'TF2_OnConditionAdded',
+ 'TF2_OnConditionRemoved',
+ 'TF2_OnWaitingForPlayersStart',
+ 'TF2_OnWaitingForPlayersEnd',
+ 'TF2_OnPlayerTeleport',
+ 'SQL_Connect',
+ 'SQL_DefConnect',
+ 'SQL_ConnectCustom',
+ 'SQLite_UseDatabase',
+ 'SQL_CheckConfig',
+ 'SQL_GetDriver',
+ 'SQL_ReadDriver',
+ 'SQL_GetDriverIdent',
+ 'SQL_GetDriverProduct',
+ 'SQL_SetCharset',
+ 'SQL_GetAffectedRows',
+ 'SQL_GetInsertId',
+ 'SQL_GetError',
+ 'SQL_EscapeString',
+ 'SQL_QuoteString',
+ 'SQL_FastQuery',
+ 'SQL_Query',
+ 'SQL_PrepareQuery',
+ 'SQL_FetchMoreResults',
+ 'SQL_HasResultSet',
+ 'SQL_GetRowCount',
+ 'SQL_GetFieldCount',
+ 'SQL_FieldNumToName',
+ 'SQL_FieldNameToNum',
+ 'SQL_FetchRow',
+ 'SQL_MoreRows',
+ 'SQL_Rewind',
+ 'SQL_FetchString',
+ 'SQL_FetchFloat',
+ 'SQL_FetchInt',
+ 'SQL_IsFieldNull',
+ 'SQL_FetchSize',
+ 'SQL_BindParamInt',
+ 'SQL_BindParamFloat',
+ 'SQL_BindParamString',
+ 'SQL_Execute',
+ 'SQL_LockDatabase',
+ 'SQL_UnlockDatabase',
+ 'SQLTCallback',
+ 'SQL_IsSameConnection',
+ 'SQL_TConnect',
+ 'SQL_TQuery',
+ 'SQL_CreateTransaction',
+ 'SQL_AddQuery',
+ 'SQLTxnSuccess',
+ 'SQLTxnFailure',
+ 'SQL_ExecuteTransaction',
+ 'CloseHandle',
+ 'CloneHandle',
+ 'MenuHandler',
+ 'CreateMenu',
+ 'DisplayMenu',
+ 'DisplayMenuAtItem',
+ 'AddMenuItem',
+ 'InsertMenuItem',
+ 'RemoveMenuItem',
+ 'RemoveAllMenuItems',
+ 'GetMenuItem',
+ 'GetMenuSelectionPosition',
+ 'GetMenuItemCount',
+ 'SetMenuPagination',
+ 'GetMenuPagination',
+ 'GetMenuStyle',
+ 'SetMenuTitle',
+ 'GetMenuTitle',
+ 'CreatePanelFromMenu',
+ 'GetMenuExitButton',
+ 'SetMenuExitButton',
+ 'GetMenuExitBackButton',
+ 'SetMenuExitBackButton',
+ 'SetMenuNoVoteButton',
+ 'CancelMenu',
+ 'GetMenuOptionFlags',
+ 'SetMenuOptionFlags',
+ 'IsVoteInProgress',
+ 'CancelVote',
+ 'VoteMenu',
+ 'VoteMenuToAll',
+ 'VoteHandler',
+ 'SetVoteResultCallback',
+ 'CheckVoteDelay',
+ 'IsClientInVotePool',
+ 'RedrawClientVoteMenu',
+ 'GetMenuStyleHandle',
+ 'CreatePanel',
+ 'CreateMenuEx',
+ 'GetClientMenu',
+ 'CancelClientMenu',
+ 'GetMaxPageItems',
+ 'GetPanelStyle',
+ 'SetPanelTitle',
+ 'DrawPanelItem',
+ 'DrawPanelText',
+ 'CanPanelDrawFlags',
+ 'SetPanelKeys',
+ 'SendPanelToClient',
+ 'GetPanelTextRemaining',
+ 'GetPanelCurrentKey',
+ 'SetPanelCurrentKey',
+ 'RedrawMenuItem',
+ 'InternalShowMenu',
+ 'GetMenuVoteInfo',
+ 'IsNewVoteAllowed',
+ 'PrefetchSound',
+ 'EmitAmbientSound',
+ 'FadeClientVolume',
+ 'StopSound',
+ 'EmitSound',
+ 'EmitSentence',
+ 'GetDistGainFromSoundLevel',
+ 'AmbientSHook',
+ 'NormalSHook',
+ 'AddAmbientSoundHook',
+ 'AddNormalSoundHook',
+ 'RemoveAmbientSoundHook',
+ 'RemoveNormalSoundHook',
+ 'EmitSoundToClient',
+ 'EmitSoundToAll',
+ 'ATTN_TO_SNDLEVEL',
+ 'GetGameSoundParams',
+ 'EmitGameSound',
+ 'EmitAmbientGameSound',
+ 'EmitGameSoundToClient',
+ 'EmitGameSoundToAll',
+ 'PrecacheScriptSound',
+ 'strlen',
+ 'StrContains',
+ 'strcmp',
+ 'strncmp',
+ 'StrEqual',
+ 'strcopy',
+ 'Format',
+ 'FormatEx',
+ 'VFormat',
+ 'StringToInt',
+ 'StringToIntEx',
+ 'IntToString',
+ 'StringToFloat',
+ 'StringToFloatEx',
+ 'FloatToString',
+ 'BreakString',
+ 'TrimString',
+ 'SplitString',
+ 'ReplaceString',
+ 'ReplaceStringEx',
+ 'GetCharBytes',
+ 'IsCharAlpha',
+ 'IsCharNumeric',
+ 'IsCharSpace',
+ 'IsCharMB',
+ 'IsCharUpper',
+ 'IsCharLower',
+ 'StripQuotes',
+ 'CharToUpper',
+ 'CharToLower',
+ 'FindCharInString',
+ 'StrCat',
+ 'ExplodeString',
+ 'ImplodeStrings',
+ 'GetVectorLength',
+ 'GetVectorDistance',
+ 'GetVectorDotProduct',
+ 'GetVectorCrossProduct',
+ 'NormalizeVector',
+ 'GetAngleVectors',
+ 'GetVectorAngles',
+ 'GetVectorVectors',
+ 'AddVectors',
+ 'SubtractVectors',
+ 'ScaleVector',
+ 'NegateVector',
+ 'MakeVectorFromPoints',
+ 'BaseComm_IsClientGagged',
+ 'BaseComm_IsClientMuted',
+ 'BaseComm_SetClientGag',
+ 'BaseComm_SetClientMute',
+ 'FormatUserLogText',
+ 'FindPluginByFile',
+ 'FindTarget',
+ 'AcceptEntityInput',
+ 'SetVariantBool',
+ 'SetVariantString',
+ 'SetVariantInt',
+ 'SetVariantFloat',
+ 'SetVariantVector3D',
+ 'SetVariantPosVector3D',
+ 'SetVariantColor',
+ 'SetVariantEntity',
+ 'GameRules_GetProp',
+ 'GameRules_SetProp',
+ 'GameRules_GetPropFloat',
+ 'GameRules_SetPropFloat',
+ 'GameRules_GetPropEnt',
+ 'GameRules_SetPropEnt',
+ 'GameRules_GetPropVector',
+ 'GameRules_SetPropVector',
+ 'GameRules_GetPropString',
+ 'GameRules_SetPropString',
+ 'GameRules_GetRoundState',
+ 'OnClientConnect',
+ 'OnClientConnected',
+ 'OnClientPutInServer',
+ 'OnClientDisconnect',
+ 'OnClientDisconnect_Post',
+ 'OnClientCommand',
+ 'OnClientSettingsChanged',
+ 'OnClientAuthorized',
+ 'OnClientPreAdminCheck',
+ 'OnClientPostAdminFilter',
+ 'OnClientPostAdminCheck',
+ 'GetMaxClients',
+ 'GetMaxHumanPlayers',
+ 'GetClientCount',
+ 'GetClientName',
+ 'GetClientIP',
+ 'GetClientAuthString',
+ 'GetClientAuthId',
+ 'GetSteamAccountID',
+ 'GetClientUserId',
+ 'IsClientConnected',
+ 'IsClientInGame',
+ 'IsClientInKickQueue',
+ 'IsClientAuthorized',
+ 'IsFakeClient',
+ 'IsClientSourceTV',
+ 'IsClientReplay',
+ 'IsClientObserver',
+ 'IsPlayerAlive',
+ 'GetClientInfo',
+ 'GetClientTeam',
+ 'SetUserAdmin',
+ 'GetUserAdmin',
+ 'AddUserFlags',
+ 'RemoveUserFlags',
+ 'SetUserFlagBits',
+ 'GetUserFlagBits',
+ 'CanUserTarget',
+ 'RunAdminCacheChecks',
+ 'NotifyPostAdminCheck',
+ 'CreateFakeClient',
+ 'SetFakeClientConVar',
+ 'GetClientHealth',
+ 'GetClientModel',
+ 'GetClientWeapon',
+ 'GetClientMaxs',
+ 'GetClientMins',
+ 'GetClientAbsAngles',
+ 'GetClientAbsOrigin',
+ 'GetClientArmor',
+ 'GetClientDeaths',
+ 'GetClientFrags',
+ 'GetClientDataRate',
+ 'IsClientTimingOut',
+ 'GetClientTime',
+ 'GetClientLatency',
+ 'GetClientAvgLatency',
+ 'GetClientAvgLoss',
+ 'GetClientAvgChoke',
+ 'GetClientAvgData',
+ 'GetClientAvgPackets',
+ 'GetClientOfUserId',
+ 'KickClient',
+ 'KickClientEx',
+ 'ChangeClientTeam',
+ 'GetClientSerial',
+ 'GetClientFromSerial',
+ 'FindStringTable',
+ 'GetNumStringTables',
+ 'GetStringTableNumStrings',
+ 'GetStringTableMaxStrings',
+ 'GetStringTableName',
+ 'FindStringIndex',
+ 'ReadStringTable',
+ 'GetStringTableDataLength',
+ 'GetStringTableData',
+ 'SetStringTableData',
+ 'AddToStringTable',
+ 'LockStringTables',
+ 'AddFileToDownloadsTable',
+ 'GetEntityFlags',
+ 'SetEntityFlags',
+ 'GetEntityMoveType',
+ 'SetEntityMoveType',
+ 'GetEntityRenderMode',
+ 'SetEntityRenderMode',
+ 'GetEntityRenderFx',
+ 'SetEntityRenderFx',
+ 'SetEntityRenderColor',
+ 'GetEntityGravity',
+ 'SetEntityGravity',
+ 'SetEntityHealth',
+ 'GetClientButtons',
+ 'EntityOutput',
+ 'HookEntityOutput',
+ 'UnhookEntityOutput',
+ 'HookSingleEntityOutput',
+ 'UnhookSingleEntityOutput',
+ 'SMC_CreateParser',
+ 'SMC_ParseFile',
+ 'SMC_GetErrorString',
+ 'SMC_ParseStart',
+ 'SMC_SetParseStart',
+ 'SMC_ParseEnd',
+ 'SMC_SetParseEnd',
+ 'SMC_NewSection',
+ 'SMC_KeyValue',
+ 'SMC_EndSection',
+ 'SMC_SetReaders',
+ 'SMC_RawLine',
+ 'SMC_SetRawLine',
+ 'BfWriteBool',
+ 'BfWriteByte',
+ 'BfWriteChar',
+ 'BfWriteShort',
+ 'BfWriteWord',
+ 'BfWriteNum',
+ 'BfWriteFloat',
+ 'BfWriteString',
+ 'BfWriteEntity',
+ 'BfWriteAngle',
+ 'BfWriteCoord',
+ 'BfWriteVecCoord',
+ 'BfWriteVecNormal',
+ 'BfWriteAngles',
+ 'BfReadBool',
+ 'BfReadByte',
+ 'BfReadChar',
+ 'BfReadShort',
+ 'BfReadWord',
+ 'BfReadNum',
+ 'BfReadFloat',
+ 'BfReadString',
+ 'BfReadEntity',
+ 'BfReadAngle',
+ 'BfReadCoord',
+ 'BfReadVecCoord',
+ 'BfReadVecNormal',
+ 'BfReadAngles',
+ 'BfGetNumBytesLeft',
+ 'CreateProfiler',
+ 'StartProfiling',
+ 'StopProfiling',
+ 'GetProfilerTime',
+ 'OnPluginStart',
+ 'AskPluginLoad2',
+ 'OnPluginEnd',
+ 'OnPluginPauseChange',
+ 'OnGameFrame',
+ 'OnMapStart',
+ 'OnMapEnd',
+ 'OnConfigsExecuted',
+ 'OnAutoConfigsBuffered',
+ 'OnAllPluginsLoaded',
+ 'GetMyHandle',
+ 'GetPluginIterator',
+ 'MorePlugins',
+ 'ReadPlugin',
+ 'GetPluginStatus',
+ 'GetPluginFilename',
+ 'IsPluginDebugging',
+ 'GetPluginInfo',
+ 'FindPluginByNumber',
+ 'SetFailState',
+ 'ThrowError',
+ 'GetTime',
+ 'FormatTime',
+ 'LoadGameConfigFile',
+ 'GameConfGetOffset',
+ 'GameConfGetKeyValue',
+ 'GameConfGetAddress',
+ 'GetSysTickCount',
+ 'AutoExecConfig',
+ 'RegPluginLibrary',
+ 'LibraryExists',
+ 'GetExtensionFileStatus',
+ 'OnLibraryAdded',
+ 'OnLibraryRemoved',
+ 'ReadMapList',
+ 'SetMapListCompatBind',
+ 'OnClientFloodCheck',
+ 'OnClientFloodResult',
+ 'CanTestFeatures',
+ 'GetFeatureStatus',
+ 'RequireFeature',
+ 'LoadFromAddress',
+ 'StoreToAddress',
+ 'CreateStack',
+ 'PushStackCell',
+ 'PushStackString',
+ 'PushStackArray',
+ 'PopStackCell',
+ 'PopStackString',
+ 'PopStackArray',
+ 'IsStackEmpty',
+ 'PopStack',
+ 'OnPlayerRunCmd',
+ 'BuildPath',
+ 'OpenDirectory',
+ 'ReadDirEntry',
+ 'OpenFile',
+ 'DeleteFile',
+ 'ReadFileLine',
+ 'ReadFile',
+ 'ReadFileString',
+ 'WriteFile',
+ 'WriteFileString',
+ 'WriteFileLine',
+ 'ReadFileCell',
+ 'WriteFileCell',
+ 'IsEndOfFile',
+ 'FileSeek',
+ 'FilePosition',
+ 'FileExists',
+ 'RenameFile',
+ 'DirExists',
+ 'FileSize',
+ 'FlushFile',
+ 'RemoveDir',
+ 'CreateDirectory',
+ 'GetFileTime',
+ 'LogToOpenFile',
+ 'LogToOpenFileEx',
+ 'PbReadInt',
+ 'PbReadFloat',
+ 'PbReadBool',
+ 'PbReadString',
+ 'PbReadColor',
+ 'PbReadAngle',
+ 'PbReadVector',
+ 'PbReadVector2D',
+ 'PbGetRepeatedFieldCount',
+ 'PbSetInt',
+ 'PbSetFloat',
+ 'PbSetBool',
+ 'PbSetString',
+ 'PbSetColor',
+ 'PbSetAngle',
+ 'PbSetVector',
+ 'PbSetVector2D',
+ 'PbAddInt',
+ 'PbAddFloat',
+ 'PbAddBool',
+ 'PbAddString',
+ 'PbAddColor',
+ 'PbAddAngle',
+ 'PbAddVector',
+ 'PbAddVector2D',
+ 'PbRemoveRepeatedFieldValue',
+ 'PbReadMessage',
+ 'PbReadRepeatedMessage',
+ 'PbAddMessage',
+ 'SetNextMap',
+ 'GetNextMap',
+ 'ForceChangeLevel',
+ 'GetMapHistorySize',
+ 'GetMapHistory',
+ 'GeoipCode2',
+ 'GeoipCode3',
+ 'GeoipCountry',
+ 'MarkNativeAsOptional',
+ 'RegClientCookie',
+ 'FindClientCookie',
+ 'SetClientCookie',
+ 'GetClientCookie',
+ 'SetAuthIdCookie',
+ 'AreClientCookiesCached',
+ 'OnClientCookiesCached',
+ 'CookieMenuHandler',
+ 'SetCookiePrefabMenu',
+ 'SetCookieMenuItem',
+ 'ShowCookieMenu',
+ 'GetCookieIterator',
+ 'ReadCookieIterator',
+ 'GetCookieAccess',
+ 'GetClientCookieTime',
+ 'LoadTranslations',
+ 'SetGlobalTransTarget',
+ 'GetClientLanguage',
+ 'GetServerLanguage',
+ 'GetLanguageCount',
+ 'GetLanguageInfo',
+ 'SetClientLanguage',
+ 'GetLanguageByCode',
+ 'GetLanguageByName',
+ 'CS_OnBuyCommand',
+ 'CS_OnCSWeaponDrop',
+ 'CS_OnGetWeaponPrice',
+ 'CS_OnTerminateRound',
+ 'CS_RespawnPlayer',
+ 'CS_SwitchTeam',
+ 'CS_DropWeapon',
+ 'CS_TerminateRound',
+ 'CS_GetTranslatedWeaponAlias',
+ 'CS_GetWeaponPrice',
+ 'CS_GetClientClanTag',
+ 'CS_SetClientClanTag',
+ 'CS_GetTeamScore',
+ 'CS_SetTeamScore',
+ 'CS_GetMVPCount',
+ 'CS_SetMVPCount',
+ 'CS_GetClientContributionScore',
+ 'CS_SetClientContributionScore',
+ 'CS_GetClientAssists',
+ 'CS_SetClientAssists',
+ 'CS_AliasToWeaponID',
+ 'CS_WeaponIDToAlias',
+ 'CS_IsValidWeaponID',
+ 'CS_UpdateClientModel',
+ 'LogToGame',
+ 'SetRandomSeed',
+ 'GetRandomFloat',
+ 'GetRandomInt',
+ 'IsMapValid',
+ 'IsDedicatedServer',
+ 'GetEngineTime',
+ 'GetGameTime',
+ 'GetGameTickCount',
+ 'GetGameDescription',
+ 'GetGameFolderName',
+ 'GetCurrentMap',
+ 'PrecacheModel',
+ 'PrecacheSentenceFile',
+ 'PrecacheDecal',
+ 'PrecacheGeneric',
+ 'IsModelPrecached',
+ 'IsDecalPrecached',
+ 'IsGenericPrecached',
+ 'PrecacheSound',
+ 'IsSoundPrecached',
+ 'CreateDialog',
+ 'GetEngineVersion',
+ 'PrintToChat',
+ 'PrintToChatAll',
+ 'PrintCenterText',
+ 'PrintCenterTextAll',
+ 'PrintHintText',
+ 'PrintHintTextToAll',
+ 'ShowVGUIPanel',
+ 'CreateHudSynchronizer',
+ 'SetHudTextParams',
+ 'SetHudTextParamsEx',
+ 'ShowSyncHudText',
+ 'ClearSyncHud',
+ 'ShowHudText',
+ 'ShowMOTDPanel',
+ 'DisplayAskConnectBox',
+ 'EntIndexToEntRef',
+ 'EntRefToEntIndex',
+ 'MakeCompatEntRef',
+ 'SetClientViewEntity',
+ 'SetLightStyle',
+ 'GetClientEyePosition',
+ 'CreateDataPack',
+ 'WritePackCell',
+ 'WritePackFloat',
+ 'WritePackString',
+ 'ReadPackCell',
+ 'ReadPackFloat',
+ 'ReadPackString',
+ 'ResetPack',
+ 'GetPackPosition',
+ 'SetPackPosition',
+ 'IsPackReadable',
+ 'LogMessage',
+ 'LogToFile',
+ 'LogToFileEx',
+ 'LogAction',
+ 'LogError',
+ 'OnLogAction',
+ 'GameLogHook',
+ 'AddGameLogHook',
+ 'RemoveGameLogHook',
+ 'FindTeamByName',
+ 'StartPrepSDKCall',
+ 'PrepSDKCall_SetVirtual',
+ 'PrepSDKCall_SetSignature',
+ 'PrepSDKCall_SetAddress',
+ 'PrepSDKCall_SetFromConf',
+ 'PrepSDKCall_SetReturnInfo',
+ 'PrepSDKCall_AddParameter',
+ 'EndPrepSDKCall',
+ 'SDKCall',
+ 'GetPlayerResourceEntity',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ import sys
+ try:
+ from urllib import FancyURLopener
+ except ImportError:
+ from urllib.request import FancyURLopener
+
+ from pygments.util import format_lines
+
+ # urllib ends up wanting to import a module called 'math' -- if
+ # pygments/lexers is in the path, this ends badly.
+ for i in range(len(sys.path)-1, -1, -1):
+ if sys.path[i].endswith('/lexers'):
+ del sys.path[i]
+
+ class Opener(FancyURLopener):
+ version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
+
+ opener = Opener()
+
+ def get_version():
+ f = opener.open('http://docs.sourcemod.net/api/index.php')
+ r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
+ for line in f:
+ m = r.search(line)
+ if m is not None:
+ return m.groups()[0]
+ raise ValueError('No version in api docs')
+
+ def get_sm_functions():
+ f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
+ r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def regenerate(filename, natives):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('FUNCTIONS = (')]
+ footer = content[content.find("if __name__ == '__main__':")-1:]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write(format_lines('FUNCTIONS', natives))
+ fp.write(footer)
+
+ def run():
+ version = get_version()
+ print('> Downloading function index for SourceMod %s' % version)
+ functions = get_sm_functions()
+ print('> %d functions found:' % len(functions))
+
+ functionlist = []
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ functionlist.append(full_function_name)
+
+ regenerate(__file__, functionlist)
+
+
+ run()
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_stan_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_stan_builtins.py
index e95f5b1e98..019dfd8b26 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_stan_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_stan_builtins.py
@@ -1,558 +1,558 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._stan_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of functions for Stan used by
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._stan_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0.
-
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-KEYWORDS = (
+ :license: BSD, see LICENSE for details.
+"""
+
+KEYWORDS = (
'break',
'continue',
- 'else',
- 'for',
- 'if',
- 'in',
- 'print',
- 'reject',
- 'return',
+ 'else',
+ 'for',
+ 'if',
+ 'in',
+ 'print',
+ 'reject',
+ 'return',
'while',
-)
-
-TYPES = (
- 'cholesky_factor_corr',
- 'cholesky_factor_cov',
- 'corr_matrix',
- 'cov_matrix',
- 'int',
- 'matrix',
- 'ordered',
- 'positive_ordered',
- 'real',
- 'row_vector',
- 'simplex',
- 'unit_vector',
- 'vector',
+)
+
+TYPES = (
+ 'cholesky_factor_corr',
+ 'cholesky_factor_cov',
+ 'corr_matrix',
+ 'cov_matrix',
+ 'int',
+ 'matrix',
+ 'ordered',
+ 'positive_ordered',
+ 'real',
+ 'row_vector',
+ 'simplex',
+ 'unit_vector',
+ 'vector',
'void',
)
-
-FUNCTIONS = (
- 'abs',
- 'acos',
- 'acosh',
+
+FUNCTIONS = (
+ 'abs',
+ 'acos',
+ 'acosh',
'algebra_solver',
'append_array',
- 'append_col',
- 'append_row',
- 'asin',
- 'asinh',
- 'atan',
- 'atan2',
- 'atanh',
- 'bernoulli_cdf',
+ 'append_col',
+ 'append_row',
+ 'asin',
+ 'asinh',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'bernoulli_cdf',
'bernoulli_lccdf',
'bernoulli_lcdf',
'bernoulli_logit_lpmf',
'bernoulli_logit_rng',
'bernoulli_lpmf',
- 'bernoulli_rng',
- 'bessel_first_kind',
- 'bessel_second_kind',
- 'beta_binomial_cdf',
+ 'bernoulli_rng',
+ 'bessel_first_kind',
+ 'bessel_second_kind',
+ 'beta_binomial_cdf',
'beta_binomial_lccdf',
'beta_binomial_lcdf',
'beta_binomial_lpmf',
- 'beta_binomial_rng',
- 'beta_cdf',
+ 'beta_binomial_rng',
+ 'beta_cdf',
'beta_lccdf',
'beta_lcdf',
'beta_lpdf',
- 'beta_rng',
- 'binary_log_loss',
- 'binomial_cdf',
- 'binomial_coefficient_log',
+ 'beta_rng',
+ 'binary_log_loss',
+ 'binomial_cdf',
+ 'binomial_coefficient_log',
'binomial_lccdf',
'binomial_lcdf',
'binomial_logit_lpmf',
'binomial_lpmf',
- 'binomial_rng',
- 'block',
+ 'binomial_rng',
+ 'block',
'categorical_logit_lpmf',
'categorical_logit_rng',
'categorical_lpmf',
- 'categorical_rng',
- 'cauchy_cdf',
+ 'categorical_rng',
+ 'cauchy_cdf',
'cauchy_lccdf',
'cauchy_lcdf',
'cauchy_lpdf',
- 'cauchy_rng',
- 'cbrt',
- 'ceil',
- 'chi_square_cdf',
+ 'cauchy_rng',
+ 'cbrt',
+ 'ceil',
+ 'chi_square_cdf',
'chi_square_lccdf',
'chi_square_lcdf',
'chi_square_lpdf',
- 'chi_square_rng',
- 'cholesky_decompose',
+ 'chi_square_rng',
+ 'cholesky_decompose',
'choose',
- 'col',
- 'cols',
- 'columns_dot_product',
- 'columns_dot_self',
- 'cos',
- 'cosh',
+ 'col',
+ 'cols',
+ 'columns_dot_product',
+ 'columns_dot_self',
+ 'cos',
+ 'cosh',
'cov_exp_quad',
- 'crossprod',
- 'csr_extract_u',
- 'csr_extract_v',
- 'csr_extract_w',
- 'csr_matrix_times_vector',
- 'csr_to_dense_matrix',
- 'cumulative_sum',
- 'determinant',
- 'diag_matrix',
- 'diag_post_multiply',
- 'diag_pre_multiply',
- 'diagonal',
- 'digamma',
- 'dims',
+ 'crossprod',
+ 'csr_extract_u',
+ 'csr_extract_v',
+ 'csr_extract_w',
+ 'csr_matrix_times_vector',
+ 'csr_to_dense_matrix',
+ 'cumulative_sum',
+ 'determinant',
+ 'diag_matrix',
+ 'diag_post_multiply',
+ 'diag_pre_multiply',
+ 'diagonal',
+ 'digamma',
+ 'dims',
'dirichlet_lpdf',
- 'dirichlet_rng',
- 'distance',
- 'dot_product',
- 'dot_self',
- 'double_exponential_cdf',
+ 'dirichlet_rng',
+ 'distance',
+ 'dot_product',
+ 'dot_self',
+ 'double_exponential_cdf',
'double_exponential_lccdf',
'double_exponential_lcdf',
'double_exponential_lpdf',
- 'double_exponential_rng',
- 'e',
- 'eigenvalues_sym',
- 'eigenvectors_sym',
- 'erf',
- 'erfc',
- 'exp',
- 'exp2',
- 'exp_mod_normal_cdf',
+ 'double_exponential_rng',
+ 'e',
+ 'eigenvalues_sym',
+ 'eigenvectors_sym',
+ 'erf',
+ 'erfc',
+ 'exp',
+ 'exp2',
+ 'exp_mod_normal_cdf',
'exp_mod_normal_lccdf',
'exp_mod_normal_lcdf',
'exp_mod_normal_lpdf',
- 'exp_mod_normal_rng',
- 'expm1',
- 'exponential_cdf',
+ 'exp_mod_normal_rng',
+ 'expm1',
+ 'exponential_cdf',
'exponential_lccdf',
'exponential_lcdf',
'exponential_lpdf',
- 'exponential_rng',
- 'fabs',
- 'falling_factorial',
- 'fdim',
- 'floor',
- 'fma',
- 'fmax',
- 'fmin',
- 'fmod',
- 'frechet_cdf',
+ 'exponential_rng',
+ 'fabs',
+ 'falling_factorial',
+ 'fdim',
+ 'floor',
+ 'fma',
+ 'fmax',
+ 'fmin',
+ 'fmod',
+ 'frechet_cdf',
'frechet_lccdf',
'frechet_lcdf',
'frechet_lpdf',
- 'frechet_rng',
- 'gamma_cdf',
+ 'frechet_rng',
+ 'gamma_cdf',
'gamma_lccdf',
'gamma_lcdf',
'gamma_lpdf',
- 'gamma_p',
- 'gamma_q',
- 'gamma_rng',
+ 'gamma_p',
+ 'gamma_q',
+ 'gamma_rng',
'gaussian_dlm_obs_lpdf',
- 'get_lp',
- 'gumbel_cdf',
+ 'get_lp',
+ 'gumbel_cdf',
'gumbel_lccdf',
'gumbel_lcdf',
'gumbel_lpdf',
- 'gumbel_rng',
- 'head',
+ 'gumbel_rng',
+ 'head',
'hypergeometric_lpmf',
- 'hypergeometric_rng',
- 'hypot',
+ 'hypergeometric_rng',
+ 'hypot',
'inc_beta',
- 'int_step',
+ 'int_step',
'integrate_ode',
'integrate_ode_bdf',
'integrate_ode_rk45',
- 'inv',
- 'inv_chi_square_cdf',
+ 'inv',
+ 'inv_chi_square_cdf',
'inv_chi_square_lccdf',
'inv_chi_square_lcdf',
'inv_chi_square_lpdf',
- 'inv_chi_square_rng',
- 'inv_cloglog',
- 'inv_gamma_cdf',
+ 'inv_chi_square_rng',
+ 'inv_cloglog',
+ 'inv_gamma_cdf',
'inv_gamma_lccdf',
'inv_gamma_lcdf',
'inv_gamma_lpdf',
- 'inv_gamma_rng',
- 'inv_logit',
+ 'inv_gamma_rng',
+ 'inv_logit',
'inv_Phi',
- 'inv_sqrt',
- 'inv_square',
+ 'inv_sqrt',
+ 'inv_square',
'inv_wishart_lpdf',
- 'inv_wishart_rng',
- 'inverse',
- 'inverse_spd',
- 'is_inf',
- 'is_nan',
- 'lbeta',
+ 'inv_wishart_rng',
+ 'inverse',
+ 'inverse_spd',
+ 'is_inf',
+ 'is_nan',
+ 'lbeta',
'lchoose',
- 'lgamma',
+ 'lgamma',
'lkj_corr_cholesky_lpdf',
- 'lkj_corr_cholesky_rng',
+ 'lkj_corr_cholesky_rng',
'lkj_corr_lpdf',
- 'lkj_corr_rng',
- 'lmgamma',
+ 'lkj_corr_rng',
+ 'lmgamma',
'lmultiply',
- 'log',
- 'log10',
- 'log1m',
- 'log1m_exp',
- 'log1m_inv_logit',
- 'log1p',
- 'log1p_exp',
- 'log2',
- 'log_determinant',
- 'log_diff_exp',
- 'log_falling_factorial',
- 'log_inv_logit',
- 'log_mix',
- 'log_rising_factorial',
- 'log_softmax',
- 'log_sum_exp',
- 'logistic_cdf',
+ 'log',
+ 'log10',
+ 'log1m',
+ 'log1m_exp',
+ 'log1m_inv_logit',
+ 'log1p',
+ 'log1p_exp',
+ 'log2',
+ 'log_determinant',
+ 'log_diff_exp',
+ 'log_falling_factorial',
+ 'log_inv_logit',
+ 'log_mix',
+ 'log_rising_factorial',
+ 'log_softmax',
+ 'log_sum_exp',
+ 'logistic_cdf',
'logistic_lccdf',
'logistic_lcdf',
'logistic_lpdf',
- 'logistic_rng',
- 'logit',
- 'lognormal_cdf',
+ 'logistic_rng',
+ 'logit',
+ 'lognormal_cdf',
'lognormal_lccdf',
'lognormal_lcdf',
'lognormal_lpdf',
- 'lognormal_rng',
- 'machine_precision',
+ 'lognormal_rng',
+ 'machine_precision',
'matrix_exp',
- 'max',
+ 'max',
'mdivide_left_spd',
- 'mdivide_left_tri_low',
+ 'mdivide_left_tri_low',
'mdivide_right_spd',
- 'mdivide_right_tri_low',
- 'mean',
- 'min',
- 'modified_bessel_first_kind',
- 'modified_bessel_second_kind',
+ 'mdivide_right_tri_low',
+ 'mean',
+ 'min',
+ 'modified_bessel_first_kind',
+ 'modified_bessel_second_kind',
'multi_gp_cholesky_lpdf',
'multi_gp_lpdf',
'multi_normal_cholesky_lpdf',
- 'multi_normal_cholesky_rng',
+ 'multi_normal_cholesky_rng',
'multi_normal_lpdf',
'multi_normal_prec_lpdf',
- 'multi_normal_rng',
+ 'multi_normal_rng',
'multi_student_t_lpdf',
- 'multi_student_t_rng',
+ 'multi_student_t_rng',
'multinomial_lpmf',
- 'multinomial_rng',
- 'multiply_log',
- 'multiply_lower_tri_self_transpose',
- 'neg_binomial_2_cdf',
+ 'multinomial_rng',
+ 'multiply_log',
+ 'multiply_lower_tri_self_transpose',
+ 'neg_binomial_2_cdf',
'neg_binomial_2_lccdf',
'neg_binomial_2_lcdf',
'neg_binomial_2_log_lpmf',
- 'neg_binomial_2_log_rng',
+ 'neg_binomial_2_log_rng',
'neg_binomial_2_lpmf',
- 'neg_binomial_2_rng',
- 'neg_binomial_cdf',
+ 'neg_binomial_2_rng',
+ 'neg_binomial_cdf',
'neg_binomial_lccdf',
'neg_binomial_lcdf',
'neg_binomial_lpmf',
- 'neg_binomial_rng',
- 'negative_infinity',
- 'normal_cdf',
+ 'neg_binomial_rng',
+ 'negative_infinity',
+ 'normal_cdf',
'normal_lccdf',
'normal_lcdf',
'normal_lpdf',
- 'normal_rng',
- 'not_a_number',
- 'num_elements',
+ 'normal_rng',
+ 'not_a_number',
+ 'num_elements',
'ordered_logistic_lpmf',
- 'ordered_logistic_rng',
- 'owens_t',
- 'pareto_cdf',
+ 'ordered_logistic_rng',
+ 'owens_t',
+ 'pareto_cdf',
'pareto_lccdf',
'pareto_lcdf',
'pareto_lpdf',
- 'pareto_rng',
- 'pareto_type_2_cdf',
+ 'pareto_rng',
+ 'pareto_type_2_cdf',
'pareto_type_2_lccdf',
'pareto_type_2_lcdf',
'pareto_type_2_lpdf',
- 'pareto_type_2_rng',
+ 'pareto_type_2_rng',
'Phi',
'Phi_approx',
- 'pi',
- 'poisson_cdf',
+ 'pi',
+ 'poisson_cdf',
'poisson_lccdf',
'poisson_lcdf',
'poisson_log_lpmf',
- 'poisson_log_rng',
+ 'poisson_log_rng',
'poisson_lpmf',
- 'poisson_rng',
- 'positive_infinity',
- 'pow',
+ 'poisson_rng',
+ 'positive_infinity',
+ 'pow',
'print',
- 'prod',
- 'qr_Q',
- 'qr_R',
- 'quad_form',
- 'quad_form_diag',
- 'quad_form_sym',
- 'rank',
- 'rayleigh_cdf',
+ 'prod',
+ 'qr_Q',
+ 'qr_R',
+ 'quad_form',
+ 'quad_form_diag',
+ 'quad_form_sym',
+ 'rank',
+ 'rayleigh_cdf',
'rayleigh_lccdf',
'rayleigh_lcdf',
'rayleigh_lpdf',
- 'rayleigh_rng',
+ 'rayleigh_rng',
'reject',
- 'rep_array',
- 'rep_matrix',
- 'rep_row_vector',
- 'rep_vector',
- 'rising_factorial',
- 'round',
- 'row',
- 'rows',
- 'rows_dot_product',
- 'rows_dot_self',
- 'scaled_inv_chi_square_cdf',
+ 'rep_array',
+ 'rep_matrix',
+ 'rep_row_vector',
+ 'rep_vector',
+ 'rising_factorial',
+ 'round',
+ 'row',
+ 'rows',
+ 'rows_dot_product',
+ 'rows_dot_self',
+ 'scaled_inv_chi_square_cdf',
'scaled_inv_chi_square_lccdf',
'scaled_inv_chi_square_lcdf',
'scaled_inv_chi_square_lpdf',
- 'scaled_inv_chi_square_rng',
- 'sd',
- 'segment',
- 'sin',
- 'singular_values',
- 'sinh',
- 'size',
- 'skew_normal_cdf',
+ 'scaled_inv_chi_square_rng',
+ 'sd',
+ 'segment',
+ 'sin',
+ 'singular_values',
+ 'sinh',
+ 'size',
+ 'skew_normal_cdf',
'skew_normal_lccdf',
'skew_normal_lcdf',
'skew_normal_lpdf',
- 'skew_normal_rng',
- 'softmax',
- 'sort_asc',
- 'sort_desc',
- 'sort_indices_asc',
- 'sort_indices_desc',
- 'sqrt',
- 'sqrt2',
- 'square',
- 'squared_distance',
- 'step',
- 'student_t_cdf',
+ 'skew_normal_rng',
+ 'softmax',
+ 'sort_asc',
+ 'sort_desc',
+ 'sort_indices_asc',
+ 'sort_indices_desc',
+ 'sqrt',
+ 'sqrt2',
+ 'square',
+ 'squared_distance',
+ 'step',
+ 'student_t_cdf',
'student_t_lccdf',
'student_t_lcdf',
'student_t_lpdf',
- 'student_t_rng',
- 'sub_col',
- 'sub_row',
- 'sum',
- 'tail',
- 'tan',
- 'tanh',
+ 'student_t_rng',
+ 'sub_col',
+ 'sub_row',
+ 'sum',
+ 'tail',
+ 'tan',
+ 'tanh',
'target',
- 'tcrossprod',
- 'tgamma',
- 'to_array_1d',
- 'to_array_2d',
- 'to_matrix',
- 'to_row_vector',
- 'to_vector',
- 'trace',
- 'trace_gen_quad_form',
- 'trace_quad_form',
- 'trigamma',
- 'trunc',
- 'uniform_cdf',
+ 'tcrossprod',
+ 'tgamma',
+ 'to_array_1d',
+ 'to_array_2d',
+ 'to_matrix',
+ 'to_row_vector',
+ 'to_vector',
+ 'trace',
+ 'trace_gen_quad_form',
+ 'trace_quad_form',
+ 'trigamma',
+ 'trunc',
+ 'uniform_cdf',
'uniform_lccdf',
'uniform_lcdf',
'uniform_lpdf',
- 'uniform_rng',
- 'variance',
+ 'uniform_rng',
+ 'variance',
'von_mises_lpdf',
- 'von_mises_rng',
- 'weibull_cdf',
+ 'von_mises_rng',
+ 'weibull_cdf',
'weibull_lccdf',
'weibull_lcdf',
'weibull_lpdf',
- 'weibull_rng',
+ 'weibull_rng',
'wiener_lpdf',
'wishart_lpdf',
'wishart_rng',
-)
-
-DISTRIBUTIONS = (
- 'bernoulli',
- 'bernoulli_logit',
- 'beta',
- 'beta_binomial',
- 'binomial',
- 'binomial_logit',
- 'categorical',
- 'categorical_logit',
- 'cauchy',
- 'chi_square',
- 'dirichlet',
- 'double_exponential',
- 'exp_mod_normal',
- 'exponential',
- 'frechet',
- 'gamma',
- 'gaussian_dlm_obs',
- 'gumbel',
- 'hypergeometric',
- 'inv_chi_square',
- 'inv_gamma',
- 'inv_wishart',
- 'lkj_corr',
- 'lkj_corr_cholesky',
- 'logistic',
- 'lognormal',
- 'multi_gp',
- 'multi_gp_cholesky',
- 'multi_normal',
- 'multi_normal_cholesky',
- 'multi_normal_prec',
- 'multi_student_t',
- 'multinomial',
- 'neg_binomial',
- 'neg_binomial_2',
- 'neg_binomial_2_log',
- 'normal',
- 'ordered_logistic',
- 'pareto',
- 'pareto_type_2',
- 'poisson',
- 'poisson_log',
- 'rayleigh',
- 'scaled_inv_chi_square',
- 'skew_normal',
- 'student_t',
- 'uniform',
- 'von_mises',
- 'weibull',
- 'wiener',
+)
+
+DISTRIBUTIONS = (
+ 'bernoulli',
+ 'bernoulli_logit',
+ 'beta',
+ 'beta_binomial',
+ 'binomial',
+ 'binomial_logit',
+ 'categorical',
+ 'categorical_logit',
+ 'cauchy',
+ 'chi_square',
+ 'dirichlet',
+ 'double_exponential',
+ 'exp_mod_normal',
+ 'exponential',
+ 'frechet',
+ 'gamma',
+ 'gaussian_dlm_obs',
+ 'gumbel',
+ 'hypergeometric',
+ 'inv_chi_square',
+ 'inv_gamma',
+ 'inv_wishart',
+ 'lkj_corr',
+ 'lkj_corr_cholesky',
+ 'logistic',
+ 'lognormal',
+ 'multi_gp',
+ 'multi_gp_cholesky',
+ 'multi_normal',
+ 'multi_normal_cholesky',
+ 'multi_normal_prec',
+ 'multi_student_t',
+ 'multinomial',
+ 'neg_binomial',
+ 'neg_binomial_2',
+ 'neg_binomial_2_log',
+ 'normal',
+ 'ordered_logistic',
+ 'pareto',
+ 'pareto_type_2',
+ 'poisson',
+ 'poisson_log',
+ 'rayleigh',
+ 'scaled_inv_chi_square',
+ 'skew_normal',
+ 'student_t',
+ 'uniform',
+ 'von_mises',
+ 'weibull',
+ 'wiener',
'wishart',
-)
-
-RESERVED = (
- 'alignas',
- 'alignof',
- 'and',
- 'and_eq',
- 'asm',
- 'auto',
- 'bitand',
- 'bitor',
- 'bool',
- 'break',
- 'case',
- 'catch',
- 'char',
- 'char16_t',
- 'char32_t',
- 'class',
- 'compl',
- 'const',
- 'const_cast',
- 'constexpr',
- 'continue',
- 'decltype',
- 'default',
- 'delete',
- 'do',
- 'double',
- 'dynamic_cast',
+)
+
+RESERVED = (
+ 'alignas',
+ 'alignof',
+ 'and',
+ 'and_eq',
+ 'asm',
+ 'auto',
+ 'bitand',
+ 'bitor',
+ 'bool',
+ 'break',
+ 'case',
+ 'catch',
+ 'char',
+ 'char16_t',
+ 'char32_t',
+ 'class',
+ 'compl',
+ 'const',
+ 'const_cast',
+ 'constexpr',
+ 'continue',
+ 'decltype',
+ 'default',
+ 'delete',
+ 'do',
+ 'double',
+ 'dynamic_cast',
'else',
- 'enum',
- 'explicit',
- 'export',
- 'extern',
- 'false',
- 'float',
+ 'enum',
+ 'explicit',
+ 'export',
+ 'extern',
+ 'false',
+ 'float',
'for',
- 'friend',
- 'fvar',
- 'goto',
+ 'friend',
+ 'fvar',
+ 'goto',
'if',
'in',
- 'inline',
- 'int',
- 'long',
+ 'inline',
+ 'int',
+ 'long',
'lp__',
- 'mutable',
- 'namespace',
- 'new',
- 'noexcept',
- 'not',
- 'not_eq',
- 'nullptr',
- 'operator',
- 'or',
- 'or_eq',
- 'private',
- 'protected',
- 'public',
- 'register',
- 'reinterpret_cast',
- 'repeat',
+ 'mutable',
+ 'namespace',
+ 'new',
+ 'noexcept',
+ 'not',
+ 'not_eq',
+ 'nullptr',
+ 'operator',
+ 'or',
+ 'or_eq',
+ 'private',
+ 'protected',
+ 'public',
+ 'register',
+ 'reinterpret_cast',
+ 'repeat',
'return',
- 'short',
- 'signed',
- 'sizeof',
+ 'short',
+ 'signed',
+ 'sizeof',
'STAN_MAJOR',
'STAN_MATH_MAJOR',
'STAN_MATH_MINOR',
'STAN_MATH_PATCH',
'STAN_MINOR',
'STAN_PATCH',
- 'static',
- 'static_assert',
- 'static_cast',
- 'struct',
- 'switch',
- 'template',
- 'then',
- 'this',
- 'thread_local',
- 'throw',
- 'true',
- 'try',
- 'typedef',
- 'typeid',
- 'typename',
- 'union',
- 'unsigned',
- 'until',
- 'using',
- 'var',
- 'virtual',
- 'void',
- 'volatile',
- 'wchar_t',
+ 'static',
+ 'static_assert',
+ 'static_cast',
+ 'struct',
+ 'switch',
+ 'template',
+ 'then',
+ 'this',
+ 'thread_local',
+ 'throw',
+ 'true',
+ 'try',
+ 'typedef',
+ 'typeid',
+ 'typename',
+ 'union',
+ 'unsigned',
+ 'until',
+ 'using',
+ 'var',
+ 'virtual',
+ 'void',
+ 'volatile',
+ 'wchar_t',
'while',
- 'xor',
+ 'xor',
'xor_eq',
-)
+)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/_vim_builtins.py b/contrib/python/Pygments/py2/pygments/lexers/_vim_builtins.py
index 39c9ed198d..d42b37e948 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/_vim_builtins.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/_vim_builtins.py
@@ -1,1939 +1,1939 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._vim_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file is autogenerated by scripts/get_vimkw.py
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
- var = (
- ('BufAdd','BufAdd'),
- ('BufCreate','BufCreate'),
- ('BufDelete','BufDelete'),
- ('BufEnter','BufEnter'),
- ('BufFilePost','BufFilePost'),
- ('BufFilePre','BufFilePre'),
- ('BufHidden','BufHidden'),
- ('BufLeave','BufLeave'),
- ('BufNew','BufNew'),
- ('BufNewFile','BufNewFile'),
- ('BufRead','BufRead'),
- ('BufReadCmd','BufReadCmd'),
- ('BufReadPost','BufReadPost'),
- ('BufReadPre','BufReadPre'),
- ('BufUnload','BufUnload'),
- ('BufWinEnter','BufWinEnter'),
- ('BufWinLeave','BufWinLeave'),
- ('BufWipeout','BufWipeout'),
- ('BufWrite','BufWrite'),
- ('BufWriteCmd','BufWriteCmd'),
- ('BufWritePost','BufWritePost'),
- ('BufWritePre','BufWritePre'),
- ('Cmd','Cmd'),
- ('CmdwinEnter','CmdwinEnter'),
- ('CmdwinLeave','CmdwinLeave'),
- ('ColorScheme','ColorScheme'),
- ('CompleteDone','CompleteDone'),
- ('CursorHold','CursorHold'),
- ('CursorHoldI','CursorHoldI'),
- ('CursorMoved','CursorMoved'),
- ('CursorMovedI','CursorMovedI'),
- ('EncodingChanged','EncodingChanged'),
- ('FileAppendCmd','FileAppendCmd'),
- ('FileAppendPost','FileAppendPost'),
- ('FileAppendPre','FileAppendPre'),
- ('FileChangedRO','FileChangedRO'),
- ('FileChangedShell','FileChangedShell'),
- ('FileChangedShellPost','FileChangedShellPost'),
- ('FileEncoding','FileEncoding'),
- ('FileReadCmd','FileReadCmd'),
- ('FileReadPost','FileReadPost'),
- ('FileReadPre','FileReadPre'),
- ('FileType','FileType'),
- ('FileWriteCmd','FileWriteCmd'),
- ('FileWritePost','FileWritePost'),
- ('FileWritePre','FileWritePre'),
- ('FilterReadPost','FilterReadPost'),
- ('FilterReadPre','FilterReadPre'),
- ('FilterWritePost','FilterWritePost'),
- ('FilterWritePre','FilterWritePre'),
- ('FocusGained','FocusGained'),
- ('FocusLost','FocusLost'),
- ('FuncUndefined','FuncUndefined'),
- ('GUIEnter','GUIEnter'),
- ('GUIFailed','GUIFailed'),
- ('InsertChange','InsertChange'),
- ('InsertCharPre','InsertCharPre'),
- ('InsertEnter','InsertEnter'),
- ('InsertLeave','InsertLeave'),
- ('MenuPopup','MenuPopup'),
- ('QuickFixCmdPost','QuickFixCmdPost'),
- ('QuickFixCmdPre','QuickFixCmdPre'),
- ('QuitPre','QuitPre'),
- ('RemoteReply','RemoteReply'),
- ('SessionLoadPost','SessionLoadPost'),
- ('ShellCmdPost','ShellCmdPost'),
- ('ShellFilterPost','ShellFilterPost'),
- ('SourceCmd','SourceCmd'),
- ('SourcePre','SourcePre'),
- ('SpellFileMissing','SpellFileMissing'),
- ('StdinReadPost','StdinReadPost'),
- ('StdinReadPre','StdinReadPre'),
- ('SwapExists','SwapExists'),
- ('Syntax','Syntax'),
- ('TabEnter','TabEnter'),
- ('TabLeave','TabLeave'),
- ('TermChanged','TermChanged'),
- ('TermResponse','TermResponse'),
- ('TextChanged','TextChanged'),
- ('TextChangedI','TextChangedI'),
- ('User','User'),
- ('UserGettingBored','UserGettingBored'),
- ('VimEnter','VimEnter'),
- ('VimLeave','VimLeave'),
- ('VimLeavePre','VimLeavePre'),
- ('VimResized','VimResized'),
- ('WinEnter','WinEnter'),
- ('WinLeave','WinLeave'),
- ('event','event'),
- )
- return var
-auto = _getauto()
-
-def _getcommand():
- var = (
- ('a','a'),
- ('ab','ab'),
- ('abc','abclear'),
- ('abo','aboveleft'),
- ('al','all'),
- ('ar','ar'),
- ('ar','args'),
- ('arga','argadd'),
- ('argd','argdelete'),
- ('argdo','argdo'),
- ('arge','argedit'),
- ('argg','argglobal'),
- ('argl','arglocal'),
- ('argu','argument'),
- ('as','ascii'),
- ('au','au'),
- ('b','buffer'),
- ('bN','bNext'),
- ('ba','ball'),
- ('bad','badd'),
- ('bd','bdelete'),
- ('bel','belowright'),
- ('bf','bfirst'),
- ('bl','blast'),
- ('bm','bmodified'),
- ('bn','bnext'),
- ('bo','botright'),
- ('bp','bprevious'),
- ('br','br'),
- ('br','brewind'),
- ('brea','break'),
- ('breaka','breakadd'),
- ('breakd','breakdel'),
- ('breakl','breaklist'),
- ('bro','browse'),
- ('bu','bu'),
- ('buf','buf'),
- ('bufdo','bufdo'),
- ('buffers','buffers'),
- ('bun','bunload'),
- ('bw','bwipeout'),
- ('c','c'),
- ('c','change'),
- ('cN','cN'),
- ('cN','cNext'),
- ('cNf','cNf'),
- ('cNf','cNfile'),
- ('cabc','cabclear'),
- ('cad','cad'),
- ('cad','caddexpr'),
- ('caddb','caddbuffer'),
- ('caddf','caddfile'),
- ('cal','call'),
- ('cat','catch'),
- ('cb','cbuffer'),
- ('cc','cc'),
- ('ccl','cclose'),
- ('cd','cd'),
- ('ce','center'),
- ('cex','cexpr'),
- ('cf','cfile'),
- ('cfir','cfirst'),
- ('cg','cgetfile'),
- ('cgetb','cgetbuffer'),
- ('cgete','cgetexpr'),
- ('changes','changes'),
- ('chd','chdir'),
- ('che','checkpath'),
- ('checkt','checktime'),
- ('cl','cl'),
- ('cl','clist'),
- ('cla','clast'),
- ('clo','close'),
- ('cmapc','cmapclear'),
- ('cn','cn'),
- ('cn','cnext'),
- ('cnew','cnewer'),
- ('cnf','cnf'),
- ('cnf','cnfile'),
- ('co','copy'),
- ('col','colder'),
- ('colo','colorscheme'),
- ('com','com'),
- ('comc','comclear'),
- ('comp','compiler'),
- ('con','con'),
- ('con','continue'),
- ('conf','confirm'),
- ('cope','copen'),
- ('cp','cprevious'),
- ('cpf','cpfile'),
- ('cq','cquit'),
- ('cr','crewind'),
- ('cs','cs'),
- ('cscope','cscope'),
- ('cstag','cstag'),
- ('cuna','cunabbrev'),
- ('cw','cwindow'),
- ('d','d'),
- ('d','delete'),
- ('de','de'),
- ('debug','debug'),
- ('debugg','debuggreedy'),
- ('del','del'),
- ('delc','delcommand'),
- ('delel','delel'),
- ('delep','delep'),
- ('deletel','deletel'),
- ('deletep','deletep'),
- ('deletl','deletl'),
- ('deletp','deletp'),
- ('delf','delf'),
- ('delf','delfunction'),
- ('dell','dell'),
- ('delm','delmarks'),
- ('delp','delp'),
- ('dep','dep'),
- ('di','di'),
- ('di','display'),
- ('diffg','diffget'),
- ('diffo','diffoff'),
- ('diffp','diffpatch'),
- ('diffpu','diffput'),
- ('diffs','diffsplit'),
- ('difft','diffthis'),
- ('diffu','diffupdate'),
- ('dig','dig'),
- ('dig','digraphs'),
- ('dir','dir'),
- ('dj','djump'),
- ('dl','dl'),
- ('dli','dlist'),
- ('do','do'),
- ('doau','doau'),
- ('dp','dp'),
- ('dr','drop'),
- ('ds','dsearch'),
- ('dsp','dsplit'),
- ('e','e'),
- ('e','edit'),
- ('ea','ea'),
- ('earlier','earlier'),
- ('ec','ec'),
- ('echoe','echoerr'),
- ('echom','echomsg'),
- ('echon','echon'),
- ('el','else'),
- ('elsei','elseif'),
- ('em','emenu'),
- ('en','en'),
- ('en','endif'),
- ('endf','endf'),
- ('endf','endfunction'),
- ('endfo','endfor'),
- ('endfun','endfun'),
- ('endt','endtry'),
- ('endw','endwhile'),
- ('ene','enew'),
- ('ex','ex'),
- ('exi','exit'),
- ('exu','exusage'),
- ('f','f'),
- ('f','file'),
- ('files','files'),
- ('filet','filet'),
- ('filetype','filetype'),
- ('fin','fin'),
- ('fin','find'),
- ('fina','finally'),
- ('fini','finish'),
- ('fir','first'),
- ('fix','fixdel'),
- ('fo','fold'),
- ('foldc','foldclose'),
- ('foldd','folddoopen'),
- ('folddoc','folddoclosed'),
- ('foldo','foldopen'),
- ('for','for'),
- ('fu','fu'),
- ('fu','function'),
- ('fun','fun'),
- ('g','g'),
- ('go','goto'),
- ('gr','grep'),
- ('grepa','grepadd'),
- ('gui','gui'),
- ('gvim','gvim'),
- ('h','h'),
- ('h','help'),
- ('ha','hardcopy'),
- ('helpf','helpfind'),
- ('helpg','helpgrep'),
- ('helpt','helptags'),
- ('hi','hi'),
- ('hid','hide'),
- ('his','history'),
- ('i','i'),
- ('ia','ia'),
- ('iabc','iabclear'),
- ('if','if'),
- ('ij','ijump'),
- ('il','ilist'),
- ('imapc','imapclear'),
- ('in','in'),
- ('intro','intro'),
- ('is','isearch'),
- ('isp','isplit'),
- ('iuna','iunabbrev'),
- ('j','join'),
- ('ju','jumps'),
- ('k','k'),
- ('kee','keepmarks'),
- ('keepa','keepa'),
- ('keepalt','keepalt'),
- ('keepj','keepjumps'),
- ('keepp','keeppatterns'),
- ('l','l'),
- ('l','list'),
- ('lN','lN'),
- ('lN','lNext'),
- ('lNf','lNf'),
- ('lNf','lNfile'),
- ('la','la'),
- ('la','last'),
- ('lad','lad'),
- ('lad','laddexpr'),
- ('laddb','laddbuffer'),
- ('laddf','laddfile'),
- ('lan','lan'),
- ('lan','language'),
- ('lat','lat'),
- ('later','later'),
- ('lb','lbuffer'),
- ('lc','lcd'),
- ('lch','lchdir'),
- ('lcl','lclose'),
- ('lcs','lcs'),
- ('lcscope','lcscope'),
- ('le','left'),
- ('lefta','leftabove'),
- ('lex','lexpr'),
- ('lf','lfile'),
- ('lfir','lfirst'),
- ('lg','lgetfile'),
- ('lgetb','lgetbuffer'),
- ('lgete','lgetexpr'),
- ('lgr','lgrep'),
- ('lgrepa','lgrepadd'),
- ('lh','lhelpgrep'),
- ('ll','ll'),
- ('lla','llast'),
- ('lli','llist'),
- ('lmak','lmake'),
- ('lmapc','lmapclear'),
- ('lne','lne'),
- ('lne','lnext'),
- ('lnew','lnewer'),
- ('lnf','lnf'),
- ('lnf','lnfile'),
- ('lo','lo'),
- ('lo','loadview'),
- ('loadk','loadk'),
- ('loadkeymap','loadkeymap'),
- ('loc','lockmarks'),
- ('lockv','lockvar'),
- ('lol','lolder'),
- ('lop','lopen'),
- ('lp','lprevious'),
- ('lpf','lpfile'),
- ('lr','lrewind'),
- ('ls','ls'),
- ('lt','ltag'),
- ('lua','lua'),
- ('luado','luado'),
- ('luafile','luafile'),
- ('lv','lvimgrep'),
- ('lvimgrepa','lvimgrepadd'),
- ('lw','lwindow'),
- ('m','move'),
- ('ma','ma'),
- ('ma','mark'),
- ('mak','make'),
- ('marks','marks'),
- ('mat','match'),
- ('menut','menut'),
- ('menut','menutranslate'),
- ('mes','mes'),
- ('messages','messages'),
- ('mk','mk'),
- ('mk','mkexrc'),
- ('mks','mksession'),
- ('mksp','mkspell'),
- ('mkv','mkv'),
- ('mkv','mkvimrc'),
- ('mkvie','mkview'),
- ('mo','mo'),
- ('mod','mode'),
- ('mz','mz'),
- ('mz','mzscheme'),
- ('mzf','mzfile'),
- ('n','n'),
- ('n','next'),
- ('nb','nbkey'),
- ('nbc','nbclose'),
- ('nbs','nbstart'),
- ('ne','ne'),
- ('new','new'),
- ('nmapc','nmapclear'),
- ('noa','noa'),
- ('noautocmd','noautocmd'),
- ('noh','nohlsearch'),
- ('nu','number'),
- ('o','o'),
- ('o','open'),
- ('ol','oldfiles'),
- ('omapc','omapclear'),
- ('on','only'),
- ('opt','options'),
- ('ownsyntax','ownsyntax'),
- ('p','p'),
- ('p','print'),
- ('pc','pclose'),
- ('pe','pe'),
- ('pe','perl'),
- ('ped','pedit'),
- ('perld','perldo'),
- ('po','pop'),
- ('popu','popu'),
- ('popu','popup'),
- ('pp','ppop'),
- ('pr','pr'),
- ('pre','preserve'),
- ('prev','previous'),
- ('pro','pro'),
- ('prof','profile'),
- ('profd','profdel'),
- ('promptf','promptfind'),
- ('promptr','promptrepl'),
- ('ps','psearch'),
- ('ptN','ptN'),
- ('ptN','ptNext'),
- ('pta','ptag'),
- ('ptf','ptfirst'),
- ('ptj','ptjump'),
- ('ptl','ptlast'),
- ('ptn','ptn'),
- ('ptn','ptnext'),
- ('ptp','ptprevious'),
- ('ptr','ptrewind'),
- ('pts','ptselect'),
- ('pu','put'),
- ('pw','pwd'),
- ('py','py'),
- ('py','python'),
- ('py3','py3'),
- ('py3','py3'),
- ('py3do','py3do'),
- ('pydo','pydo'),
- ('pyf','pyfile'),
- ('python3','python3'),
- ('q','q'),
- ('q','quit'),
- ('qa','qall'),
- ('quita','quitall'),
- ('r','r'),
- ('r','read'),
- ('re','re'),
- ('rec','recover'),
- ('red','red'),
- ('red','redo'),
- ('redi','redir'),
- ('redr','redraw'),
- ('redraws','redrawstatus'),
- ('reg','registers'),
- ('res','resize'),
- ('ret','retab'),
- ('retu','return'),
- ('rew','rewind'),
- ('ri','right'),
- ('rightb','rightbelow'),
- ('ru','ru'),
- ('ru','runtime'),
- ('rub','ruby'),
- ('rubyd','rubydo'),
- ('rubyf','rubyfile'),
- ('rundo','rundo'),
- ('rv','rviminfo'),
- ('sN','sNext'),
- ('sa','sargument'),
- ('sal','sall'),
- ('san','sandbox'),
- ('sav','saveas'),
- ('sb','sbuffer'),
- ('sbN','sbNext'),
- ('sba','sball'),
- ('sbf','sbfirst'),
- ('sbl','sblast'),
- ('sbm','sbmodified'),
- ('sbn','sbnext'),
- ('sbp','sbprevious'),
- ('sbr','sbrewind'),
- ('scrip','scrip'),
- ('scrip','scriptnames'),
- ('scripte','scriptencoding'),
- ('scs','scs'),
- ('scscope','scscope'),
- ('se','set'),
- ('setf','setfiletype'),
- ('setg','setglobal'),
- ('setl','setlocal'),
- ('sf','sfind'),
- ('sfir','sfirst'),
- ('sh','shell'),
- ('si','si'),
- ('sig','sig'),
- ('sign','sign'),
- ('sil','silent'),
- ('sim','simalt'),
- ('sl','sl'),
- ('sl','sleep'),
- ('sla','slast'),
- ('sm','smagic'),
- ('sm','smap'),
- ('sme','sme'),
- ('smenu','smenu'),
- ('sn','snext'),
- ('sni','sniff'),
- ('sno','snomagic'),
- ('snoreme','snoreme'),
- ('snoremenu','snoremenu'),
- ('so','so'),
- ('so','source'),
- ('sor','sort'),
- ('sp','split'),
- ('spe','spe'),
- ('spe','spellgood'),
- ('spelld','spelldump'),
- ('spelli','spellinfo'),
- ('spellr','spellrepall'),
- ('spellu','spellundo'),
- ('spellw','spellwrong'),
- ('spr','sprevious'),
- ('sre','srewind'),
- ('st','st'),
- ('st','stop'),
- ('sta','stag'),
- ('star','star'),
- ('star','startinsert'),
- ('start','start'),
- ('startg','startgreplace'),
- ('startr','startreplace'),
- ('stj','stjump'),
- ('stopi','stopinsert'),
- ('sts','stselect'),
- ('sun','sunhide'),
- ('sunme','sunme'),
- ('sunmenu','sunmenu'),
- ('sus','suspend'),
- ('sv','sview'),
- ('sw','swapname'),
- ('sy','sy'),
- ('syn','syn'),
- ('sync','sync'),
- ('syncbind','syncbind'),
- ('syntime','syntime'),
- ('t','t'),
- ('tN','tN'),
- ('tN','tNext'),
- ('ta','ta'),
- ('ta','tag'),
- ('tab','tab'),
- ('tabN','tabN'),
- ('tabN','tabNext'),
- ('tabc','tabclose'),
- ('tabd','tabdo'),
- ('tabe','tabedit'),
- ('tabf','tabfind'),
- ('tabfir','tabfirst'),
- ('tabl','tablast'),
- ('tabm','tabmove'),
- ('tabn','tabnext'),
- ('tabnew','tabnew'),
- ('tabo','tabonly'),
- ('tabp','tabprevious'),
- ('tabr','tabrewind'),
- ('tabs','tabs'),
- ('tags','tags'),
- ('tc','tcl'),
- ('tcld','tcldo'),
- ('tclf','tclfile'),
- ('te','tearoff'),
- ('tf','tfirst'),
- ('th','throw'),
- ('tj','tjump'),
- ('tl','tlast'),
- ('tm','tm'),
- ('tm','tmenu'),
- ('tn','tn'),
- ('tn','tnext'),
- ('to','topleft'),
- ('tp','tprevious'),
- ('tr','tr'),
- ('tr','trewind'),
- ('try','try'),
- ('ts','tselect'),
- ('tu','tu'),
- ('tu','tunmenu'),
- ('u','u'),
- ('u','undo'),
- ('un','un'),
- ('una','unabbreviate'),
- ('undoj','undojoin'),
- ('undol','undolist'),
- ('unh','unhide'),
- ('unl','unl'),
- ('unlo','unlockvar'),
- ('uns','unsilent'),
- ('up','update'),
- ('v','v'),
- ('ve','ve'),
- ('ve','version'),
- ('verb','verbose'),
- ('vert','vertical'),
- ('vi','vi'),
- ('vi','visual'),
- ('vie','view'),
- ('vim','vimgrep'),
- ('vimgrepa','vimgrepadd'),
- ('viu','viusage'),
- ('vmapc','vmapclear'),
- ('vne','vnew'),
- ('vs','vsplit'),
- ('w','w'),
- ('w','write'),
- ('wN','wNext'),
- ('wa','wall'),
- ('wh','while'),
- ('win','win'),
- ('win','winsize'),
- ('winc','wincmd'),
- ('windo','windo'),
- ('winp','winpos'),
- ('wn','wnext'),
- ('wp','wprevious'),
- ('wq','wq'),
- ('wqa','wqall'),
- ('ws','wsverb'),
- ('wundo','wundo'),
- ('wv','wviminfo'),
- ('x','x'),
- ('x','xit'),
- ('xa','xall'),
- ('xmapc','xmapclear'),
- ('xme','xme'),
- ('xmenu','xmenu'),
- ('xnoreme','xnoreme'),
- ('xnoremenu','xnoremenu'),
- ('xunme','xunme'),
- ('xunmenu','xunmenu'),
- ('xwininfo','xwininfo'),
- ('y','yank'),
- )
- return var
-command = _getcommand()
-
-def _getoption():
- var = (
- ('acd','acd'),
- ('ai','ai'),
- ('akm','akm'),
- ('al','al'),
- ('aleph','aleph'),
- ('allowrevins','allowrevins'),
- ('altkeymap','altkeymap'),
- ('ambiwidth','ambiwidth'),
- ('ambw','ambw'),
- ('anti','anti'),
- ('antialias','antialias'),
- ('ar','ar'),
- ('arab','arab'),
- ('arabic','arabic'),
- ('arabicshape','arabicshape'),
- ('ari','ari'),
- ('arshape','arshape'),
- ('autochdir','autochdir'),
- ('autoindent','autoindent'),
- ('autoread','autoread'),
- ('autowrite','autowrite'),
- ('autowriteall','autowriteall'),
- ('aw','aw'),
- ('awa','awa'),
- ('background','background'),
- ('backspace','backspace'),
- ('backup','backup'),
- ('backupcopy','backupcopy'),
- ('backupdir','backupdir'),
- ('backupext','backupext'),
- ('backupskip','backupskip'),
- ('balloondelay','balloondelay'),
- ('ballooneval','ballooneval'),
- ('balloonexpr','balloonexpr'),
- ('bdir','bdir'),
- ('bdlay','bdlay'),
- ('beval','beval'),
- ('bex','bex'),
- ('bexpr','bexpr'),
- ('bg','bg'),
- ('bh','bh'),
- ('bin','bin'),
- ('binary','binary'),
- ('biosk','biosk'),
- ('bioskey','bioskey'),
- ('bk','bk'),
- ('bkc','bkc'),
- ('bl','bl'),
- ('bomb','bomb'),
- ('breakat','breakat'),
- ('brk','brk'),
- ('browsedir','browsedir'),
- ('bs','bs'),
- ('bsdir','bsdir'),
- ('bsk','bsk'),
- ('bt','bt'),
- ('bufhidden','bufhidden'),
- ('buflisted','buflisted'),
- ('buftype','buftype'),
- ('casemap','casemap'),
- ('cb','cb'),
- ('cc','cc'),
- ('ccv','ccv'),
- ('cd','cd'),
- ('cdpath','cdpath'),
- ('cedit','cedit'),
- ('cf','cf'),
- ('cfu','cfu'),
- ('ch','ch'),
- ('charconvert','charconvert'),
- ('ci','ci'),
- ('cin','cin'),
- ('cindent','cindent'),
- ('cink','cink'),
- ('cinkeys','cinkeys'),
- ('cino','cino'),
- ('cinoptions','cinoptions'),
- ('cinw','cinw'),
- ('cinwords','cinwords'),
- ('clipboard','clipboard'),
- ('cmdheight','cmdheight'),
- ('cmdwinheight','cmdwinheight'),
- ('cmp','cmp'),
- ('cms','cms'),
- ('co','co'),
- ('cocu','cocu'),
- ('cole','cole'),
- ('colorcolumn','colorcolumn'),
- ('columns','columns'),
- ('com','com'),
- ('comments','comments'),
- ('commentstring','commentstring'),
- ('compatible','compatible'),
- ('complete','complete'),
- ('completefunc','completefunc'),
- ('completeopt','completeopt'),
- ('concealcursor','concealcursor'),
- ('conceallevel','conceallevel'),
- ('confirm','confirm'),
- ('consk','consk'),
- ('conskey','conskey'),
- ('copyindent','copyindent'),
- ('cot','cot'),
- ('cp','cp'),
- ('cpo','cpo'),
- ('cpoptions','cpoptions'),
- ('cpt','cpt'),
- ('crb','crb'),
- ('cryptmethod','cryptmethod'),
- ('cscopepathcomp','cscopepathcomp'),
- ('cscopeprg','cscopeprg'),
- ('cscopequickfix','cscopequickfix'),
- ('cscoperelative','cscoperelative'),
- ('cscopetag','cscopetag'),
- ('cscopetagorder','cscopetagorder'),
- ('cscopeverbose','cscopeverbose'),
- ('cspc','cspc'),
- ('csprg','csprg'),
- ('csqf','csqf'),
- ('csre','csre'),
- ('cst','cst'),
- ('csto','csto'),
- ('csverb','csverb'),
- ('cuc','cuc'),
- ('cul','cul'),
- ('cursorbind','cursorbind'),
- ('cursorcolumn','cursorcolumn'),
- ('cursorline','cursorline'),
- ('cwh','cwh'),
- ('debug','debug'),
- ('deco','deco'),
- ('def','def'),
- ('define','define'),
- ('delcombine','delcombine'),
- ('dex','dex'),
- ('dg','dg'),
- ('dict','dict'),
- ('dictionary','dictionary'),
- ('diff','diff'),
- ('diffexpr','diffexpr'),
- ('diffopt','diffopt'),
- ('digraph','digraph'),
- ('dip','dip'),
- ('dir','dir'),
- ('directory','directory'),
- ('display','display'),
- ('dy','dy'),
- ('ea','ea'),
- ('ead','ead'),
- ('eadirection','eadirection'),
- ('eb','eb'),
- ('ed','ed'),
- ('edcompatible','edcompatible'),
- ('ef','ef'),
- ('efm','efm'),
- ('ei','ei'),
- ('ek','ek'),
- ('enc','enc'),
- ('encoding','encoding'),
- ('endofline','endofline'),
- ('eol','eol'),
- ('ep','ep'),
- ('equalalways','equalalways'),
- ('equalprg','equalprg'),
- ('errorbells','errorbells'),
- ('errorfile','errorfile'),
- ('errorformat','errorformat'),
- ('esckeys','esckeys'),
- ('et','et'),
- ('eventignore','eventignore'),
- ('ex','ex'),
- ('expandtab','expandtab'),
- ('exrc','exrc'),
- ('fcl','fcl'),
- ('fcs','fcs'),
- ('fdc','fdc'),
- ('fde','fde'),
- ('fdi','fdi'),
- ('fdl','fdl'),
- ('fdls','fdls'),
- ('fdm','fdm'),
- ('fdn','fdn'),
- ('fdo','fdo'),
- ('fdt','fdt'),
- ('fen','fen'),
- ('fenc','fenc'),
- ('fencs','fencs'),
- ('fex','fex'),
- ('ff','ff'),
- ('ffs','ffs'),
- ('fic','fic'),
- ('fileencoding','fileencoding'),
- ('fileencodings','fileencodings'),
- ('fileformat','fileformat'),
- ('fileformats','fileformats'),
- ('fileignorecase','fileignorecase'),
- ('filetype','filetype'),
- ('fillchars','fillchars'),
- ('fk','fk'),
- ('fkmap','fkmap'),
- ('flp','flp'),
- ('fml','fml'),
- ('fmr','fmr'),
- ('fo','fo'),
- ('foldclose','foldclose'),
- ('foldcolumn','foldcolumn'),
- ('foldenable','foldenable'),
- ('foldexpr','foldexpr'),
- ('foldignore','foldignore'),
- ('foldlevel','foldlevel'),
- ('foldlevelstart','foldlevelstart'),
- ('foldmarker','foldmarker'),
- ('foldmethod','foldmethod'),
- ('foldminlines','foldminlines'),
- ('foldnestmax','foldnestmax'),
- ('foldopen','foldopen'),
- ('foldtext','foldtext'),
- ('formatexpr','formatexpr'),
- ('formatlistpat','formatlistpat'),
- ('formatoptions','formatoptions'),
- ('formatprg','formatprg'),
- ('fp','fp'),
- ('fs','fs'),
- ('fsync','fsync'),
- ('ft','ft'),
- ('gcr','gcr'),
- ('gd','gd'),
- ('gdefault','gdefault'),
- ('gfm','gfm'),
- ('gfn','gfn'),
- ('gfs','gfs'),
- ('gfw','gfw'),
- ('ghr','ghr'),
- ('go','go'),
- ('gp','gp'),
- ('grepformat','grepformat'),
- ('grepprg','grepprg'),
- ('gtl','gtl'),
- ('gtt','gtt'),
- ('guicursor','guicursor'),
- ('guifont','guifont'),
- ('guifontset','guifontset'),
- ('guifontwide','guifontwide'),
- ('guiheadroom','guiheadroom'),
- ('guioptions','guioptions'),
- ('guipty','guipty'),
- ('guitablabel','guitablabel'),
- ('guitabtooltip','guitabtooltip'),
- ('helpfile','helpfile'),
- ('helpheight','helpheight'),
- ('helplang','helplang'),
- ('hf','hf'),
- ('hh','hh'),
- ('hi','hi'),
- ('hid','hid'),
- ('hidden','hidden'),
- ('highlight','highlight'),
- ('history','history'),
- ('hk','hk'),
- ('hkmap','hkmap'),
- ('hkmapp','hkmapp'),
- ('hkp','hkp'),
- ('hl','hl'),
- ('hlg','hlg'),
- ('hls','hls'),
- ('hlsearch','hlsearch'),
- ('ic','ic'),
- ('icon','icon'),
- ('iconstring','iconstring'),
- ('ignorecase','ignorecase'),
- ('im','im'),
- ('imactivatefunc','imactivatefunc'),
- ('imactivatekey','imactivatekey'),
- ('imaf','imaf'),
- ('imak','imak'),
- ('imc','imc'),
- ('imcmdline','imcmdline'),
- ('imd','imd'),
- ('imdisable','imdisable'),
- ('imi','imi'),
- ('iminsert','iminsert'),
- ('ims','ims'),
- ('imsearch','imsearch'),
- ('imsf','imsf'),
- ('imstatusfunc','imstatusfunc'),
- ('inc','inc'),
- ('include','include'),
- ('includeexpr','includeexpr'),
- ('incsearch','incsearch'),
- ('inde','inde'),
- ('indentexpr','indentexpr'),
- ('indentkeys','indentkeys'),
- ('indk','indk'),
- ('inex','inex'),
- ('inf','inf'),
- ('infercase','infercase'),
- ('inoremap','inoremap'),
- ('insertmode','insertmode'),
- ('invacd','invacd'),
- ('invai','invai'),
- ('invakm','invakm'),
- ('invallowrevins','invallowrevins'),
- ('invaltkeymap','invaltkeymap'),
- ('invanti','invanti'),
- ('invantialias','invantialias'),
- ('invar','invar'),
- ('invarab','invarab'),
- ('invarabic','invarabic'),
- ('invarabicshape','invarabicshape'),
- ('invari','invari'),
- ('invarshape','invarshape'),
- ('invautochdir','invautochdir'),
- ('invautoindent','invautoindent'),
- ('invautoread','invautoread'),
- ('invautowrite','invautowrite'),
- ('invautowriteall','invautowriteall'),
- ('invaw','invaw'),
- ('invawa','invawa'),
- ('invbackup','invbackup'),
- ('invballooneval','invballooneval'),
- ('invbeval','invbeval'),
- ('invbin','invbin'),
- ('invbinary','invbinary'),
- ('invbiosk','invbiosk'),
- ('invbioskey','invbioskey'),
- ('invbk','invbk'),
- ('invbl','invbl'),
- ('invbomb','invbomb'),
- ('invbuflisted','invbuflisted'),
- ('invcf','invcf'),
- ('invci','invci'),
- ('invcin','invcin'),
- ('invcindent','invcindent'),
- ('invcompatible','invcompatible'),
- ('invconfirm','invconfirm'),
- ('invconsk','invconsk'),
- ('invconskey','invconskey'),
- ('invcopyindent','invcopyindent'),
- ('invcp','invcp'),
- ('invcrb','invcrb'),
- ('invcscoperelative','invcscoperelative'),
- ('invcscopetag','invcscopetag'),
- ('invcscopeverbose','invcscopeverbose'),
- ('invcsre','invcsre'),
- ('invcst','invcst'),
- ('invcsverb','invcsverb'),
- ('invcuc','invcuc'),
- ('invcul','invcul'),
- ('invcursorbind','invcursorbind'),
- ('invcursorcolumn','invcursorcolumn'),
- ('invcursorline','invcursorline'),
- ('invdeco','invdeco'),
- ('invdelcombine','invdelcombine'),
- ('invdg','invdg'),
- ('invdiff','invdiff'),
- ('invdigraph','invdigraph'),
- ('invea','invea'),
- ('inveb','inveb'),
- ('inved','inved'),
- ('invedcompatible','invedcompatible'),
- ('invek','invek'),
- ('invendofline','invendofline'),
- ('inveol','inveol'),
- ('invequalalways','invequalalways'),
- ('inverrorbells','inverrorbells'),
- ('invesckeys','invesckeys'),
- ('invet','invet'),
- ('invex','invex'),
- ('invexpandtab','invexpandtab'),
- ('invexrc','invexrc'),
- ('invfen','invfen'),
- ('invfic','invfic'),
- ('invfileignorecase','invfileignorecase'),
- ('invfk','invfk'),
- ('invfkmap','invfkmap'),
- ('invfoldenable','invfoldenable'),
- ('invgd','invgd'),
- ('invgdefault','invgdefault'),
- ('invguipty','invguipty'),
- ('invhid','invhid'),
- ('invhidden','invhidden'),
- ('invhk','invhk'),
- ('invhkmap','invhkmap'),
- ('invhkmapp','invhkmapp'),
- ('invhkp','invhkp'),
- ('invhls','invhls'),
- ('invhlsearch','invhlsearch'),
- ('invic','invic'),
- ('invicon','invicon'),
- ('invignorecase','invignorecase'),
- ('invim','invim'),
- ('invimc','invimc'),
- ('invimcmdline','invimcmdline'),
- ('invimd','invimd'),
- ('invimdisable','invimdisable'),
- ('invincsearch','invincsearch'),
- ('invinf','invinf'),
- ('invinfercase','invinfercase'),
- ('invinsertmode','invinsertmode'),
- ('invis','invis'),
- ('invjoinspaces','invjoinspaces'),
- ('invjs','invjs'),
- ('invlazyredraw','invlazyredraw'),
- ('invlbr','invlbr'),
- ('invlinebreak','invlinebreak'),
- ('invlisp','invlisp'),
- ('invlist','invlist'),
- ('invloadplugins','invloadplugins'),
- ('invlpl','invlpl'),
- ('invlz','invlz'),
- ('invma','invma'),
- ('invmacatsui','invmacatsui'),
- ('invmagic','invmagic'),
- ('invmh','invmh'),
- ('invml','invml'),
- ('invmod','invmod'),
- ('invmodeline','invmodeline'),
- ('invmodifiable','invmodifiable'),
- ('invmodified','invmodified'),
- ('invmore','invmore'),
- ('invmousef','invmousef'),
- ('invmousefocus','invmousefocus'),
- ('invmousehide','invmousehide'),
- ('invnu','invnu'),
- ('invnumber','invnumber'),
- ('invodev','invodev'),
- ('invopendevice','invopendevice'),
- ('invpaste','invpaste'),
- ('invpi','invpi'),
- ('invpreserveindent','invpreserveindent'),
- ('invpreviewwindow','invpreviewwindow'),
- ('invprompt','invprompt'),
- ('invpvw','invpvw'),
- ('invreadonly','invreadonly'),
- ('invrelativenumber','invrelativenumber'),
- ('invremap','invremap'),
- ('invrestorescreen','invrestorescreen'),
- ('invrevins','invrevins'),
- ('invri','invri'),
- ('invrightleft','invrightleft'),
- ('invrl','invrl'),
- ('invrnu','invrnu'),
- ('invro','invro'),
- ('invrs','invrs'),
- ('invru','invru'),
- ('invruler','invruler'),
- ('invsb','invsb'),
- ('invsc','invsc'),
- ('invscb','invscb'),
- ('invscrollbind','invscrollbind'),
- ('invscs','invscs'),
- ('invsecure','invsecure'),
- ('invsft','invsft'),
- ('invshellslash','invshellslash'),
- ('invshelltemp','invshelltemp'),
- ('invshiftround','invshiftround'),
- ('invshortname','invshortname'),
- ('invshowcmd','invshowcmd'),
- ('invshowfulltag','invshowfulltag'),
- ('invshowmatch','invshowmatch'),
- ('invshowmode','invshowmode'),
- ('invsi','invsi'),
- ('invsm','invsm'),
- ('invsmartcase','invsmartcase'),
- ('invsmartindent','invsmartindent'),
- ('invsmarttab','invsmarttab'),
- ('invsmd','invsmd'),
- ('invsn','invsn'),
- ('invsol','invsol'),
- ('invspell','invspell'),
- ('invsplitbelow','invsplitbelow'),
- ('invsplitright','invsplitright'),
- ('invspr','invspr'),
- ('invsr','invsr'),
- ('invssl','invssl'),
- ('invsta','invsta'),
- ('invstartofline','invstartofline'),
- ('invstmp','invstmp'),
- ('invswapfile','invswapfile'),
- ('invswf','invswf'),
- ('invta','invta'),
- ('invtagbsearch','invtagbsearch'),
- ('invtagrelative','invtagrelative'),
- ('invtagstack','invtagstack'),
- ('invtbi','invtbi'),
- ('invtbidi','invtbidi'),
- ('invtbs','invtbs'),
- ('invtermbidi','invtermbidi'),
- ('invterse','invterse'),
- ('invtextauto','invtextauto'),
- ('invtextmode','invtextmode'),
- ('invtf','invtf'),
- ('invtgst','invtgst'),
- ('invtildeop','invtildeop'),
- ('invtimeout','invtimeout'),
- ('invtitle','invtitle'),
- ('invto','invto'),
- ('invtop','invtop'),
- ('invtr','invtr'),
- ('invttimeout','invttimeout'),
- ('invttybuiltin','invttybuiltin'),
- ('invttyfast','invttyfast'),
- ('invtx','invtx'),
- ('invudf','invudf'),
- ('invundofile','invundofile'),
- ('invvb','invvb'),
- ('invvisualbell','invvisualbell'),
- ('invwa','invwa'),
- ('invwarn','invwarn'),
- ('invwb','invwb'),
- ('invweirdinvert','invweirdinvert'),
- ('invwfh','invwfh'),
- ('invwfw','invwfw'),
- ('invwic','invwic'),
- ('invwildignorecase','invwildignorecase'),
- ('invwildmenu','invwildmenu'),
- ('invwinfixheight','invwinfixheight'),
- ('invwinfixwidth','invwinfixwidth'),
- ('invwiv','invwiv'),
- ('invwmnu','invwmnu'),
- ('invwrap','invwrap'),
- ('invwrapscan','invwrapscan'),
- ('invwrite','invwrite'),
- ('invwriteany','invwriteany'),
- ('invwritebackup','invwritebackup'),
- ('invws','invws'),
- ('is','is'),
- ('isf','isf'),
- ('isfname','isfname'),
- ('isi','isi'),
- ('isident','isident'),
- ('isk','isk'),
- ('iskeyword','iskeyword'),
- ('isp','isp'),
- ('isprint','isprint'),
- ('joinspaces','joinspaces'),
- ('js','js'),
- ('key','key'),
- ('keymap','keymap'),
- ('keymodel','keymodel'),
- ('keywordprg','keywordprg'),
- ('km','km'),
- ('kmp','kmp'),
- ('kp','kp'),
- ('langmap','langmap'),
- ('langmenu','langmenu'),
- ('laststatus','laststatus'),
- ('lazyredraw','lazyredraw'),
- ('lbr','lbr'),
- ('lcs','lcs'),
- ('linebreak','linebreak'),
- ('lines','lines'),
- ('linespace','linespace'),
- ('lisp','lisp'),
- ('lispwords','lispwords'),
- ('list','list'),
- ('listchars','listchars'),
- ('lm','lm'),
- ('lmap','lmap'),
- ('loadplugins','loadplugins'),
- ('lpl','lpl'),
- ('ls','ls'),
- ('lsp','lsp'),
- ('lw','lw'),
- ('lz','lz'),
- ('ma','ma'),
- ('macatsui','macatsui'),
- ('magic','magic'),
- ('makeef','makeef'),
- ('makeprg','makeprg'),
- ('mat','mat'),
- ('matchpairs','matchpairs'),
- ('matchtime','matchtime'),
- ('maxcombine','maxcombine'),
- ('maxfuncdepth','maxfuncdepth'),
- ('maxmapdepth','maxmapdepth'),
- ('maxmem','maxmem'),
- ('maxmempattern','maxmempattern'),
- ('maxmemtot','maxmemtot'),
- ('mco','mco'),
- ('mef','mef'),
- ('menuitems','menuitems'),
- ('mfd','mfd'),
- ('mh','mh'),
- ('mis','mis'),
- ('mkspellmem','mkspellmem'),
- ('ml','ml'),
- ('mls','mls'),
- ('mm','mm'),
- ('mmd','mmd'),
- ('mmp','mmp'),
- ('mmt','mmt'),
- ('mod','mod'),
- ('modeline','modeline'),
- ('modelines','modelines'),
- ('modifiable','modifiable'),
- ('modified','modified'),
- ('more','more'),
- ('mouse','mouse'),
- ('mousef','mousef'),
- ('mousefocus','mousefocus'),
- ('mousehide','mousehide'),
- ('mousem','mousem'),
- ('mousemodel','mousemodel'),
- ('mouses','mouses'),
- ('mouseshape','mouseshape'),
- ('mouset','mouset'),
- ('mousetime','mousetime'),
- ('mp','mp'),
- ('mps','mps'),
- ('msm','msm'),
- ('mzq','mzq'),
- ('mzquantum','mzquantum'),
- ('nf','nf'),
- ('nnoremap','nnoremap'),
- ('noacd','noacd'),
- ('noai','noai'),
- ('noakm','noakm'),
- ('noallowrevins','noallowrevins'),
- ('noaltkeymap','noaltkeymap'),
- ('noanti','noanti'),
- ('noantialias','noantialias'),
- ('noar','noar'),
- ('noarab','noarab'),
- ('noarabic','noarabic'),
- ('noarabicshape','noarabicshape'),
- ('noari','noari'),
- ('noarshape','noarshape'),
- ('noautochdir','noautochdir'),
- ('noautoindent','noautoindent'),
- ('noautoread','noautoread'),
- ('noautowrite','noautowrite'),
- ('noautowriteall','noautowriteall'),
- ('noaw','noaw'),
- ('noawa','noawa'),
- ('nobackup','nobackup'),
- ('noballooneval','noballooneval'),
- ('nobeval','nobeval'),
- ('nobin','nobin'),
- ('nobinary','nobinary'),
- ('nobiosk','nobiosk'),
- ('nobioskey','nobioskey'),
- ('nobk','nobk'),
- ('nobl','nobl'),
- ('nobomb','nobomb'),
- ('nobuflisted','nobuflisted'),
- ('nocf','nocf'),
- ('noci','noci'),
- ('nocin','nocin'),
- ('nocindent','nocindent'),
- ('nocompatible','nocompatible'),
- ('noconfirm','noconfirm'),
- ('noconsk','noconsk'),
- ('noconskey','noconskey'),
- ('nocopyindent','nocopyindent'),
- ('nocp','nocp'),
- ('nocrb','nocrb'),
- ('nocscoperelative','nocscoperelative'),
- ('nocscopetag','nocscopetag'),
- ('nocscopeverbose','nocscopeverbose'),
- ('nocsre','nocsre'),
- ('nocst','nocst'),
- ('nocsverb','nocsverb'),
- ('nocuc','nocuc'),
- ('nocul','nocul'),
- ('nocursorbind','nocursorbind'),
- ('nocursorcolumn','nocursorcolumn'),
- ('nocursorline','nocursorline'),
- ('nodeco','nodeco'),
- ('nodelcombine','nodelcombine'),
- ('nodg','nodg'),
- ('nodiff','nodiff'),
- ('nodigraph','nodigraph'),
- ('noea','noea'),
- ('noeb','noeb'),
- ('noed','noed'),
- ('noedcompatible','noedcompatible'),
- ('noek','noek'),
- ('noendofline','noendofline'),
- ('noeol','noeol'),
- ('noequalalways','noequalalways'),
- ('noerrorbells','noerrorbells'),
- ('noesckeys','noesckeys'),
- ('noet','noet'),
- ('noex','noex'),
- ('noexpandtab','noexpandtab'),
- ('noexrc','noexrc'),
- ('nofen','nofen'),
- ('nofic','nofic'),
- ('nofileignorecase','nofileignorecase'),
- ('nofk','nofk'),
- ('nofkmap','nofkmap'),
- ('nofoldenable','nofoldenable'),
- ('nogd','nogd'),
- ('nogdefault','nogdefault'),
- ('noguipty','noguipty'),
- ('nohid','nohid'),
- ('nohidden','nohidden'),
- ('nohk','nohk'),
- ('nohkmap','nohkmap'),
- ('nohkmapp','nohkmapp'),
- ('nohkp','nohkp'),
- ('nohls','nohls'),
- ('nohlsearch','nohlsearch'),
- ('noic','noic'),
- ('noicon','noicon'),
- ('noignorecase','noignorecase'),
- ('noim','noim'),
- ('noimc','noimc'),
- ('noimcmdline','noimcmdline'),
- ('noimd','noimd'),
- ('noimdisable','noimdisable'),
- ('noincsearch','noincsearch'),
- ('noinf','noinf'),
- ('noinfercase','noinfercase'),
- ('noinsertmode','noinsertmode'),
- ('nois','nois'),
- ('nojoinspaces','nojoinspaces'),
- ('nojs','nojs'),
- ('nolazyredraw','nolazyredraw'),
- ('nolbr','nolbr'),
- ('nolinebreak','nolinebreak'),
- ('nolisp','nolisp'),
- ('nolist','nolist'),
- ('noloadplugins','noloadplugins'),
- ('nolpl','nolpl'),
- ('nolz','nolz'),
- ('noma','noma'),
- ('nomacatsui','nomacatsui'),
- ('nomagic','nomagic'),
- ('nomh','nomh'),
- ('noml','noml'),
- ('nomod','nomod'),
- ('nomodeline','nomodeline'),
- ('nomodifiable','nomodifiable'),
- ('nomodified','nomodified'),
- ('nomore','nomore'),
- ('nomousef','nomousef'),
- ('nomousefocus','nomousefocus'),
- ('nomousehide','nomousehide'),
- ('nonu','nonu'),
- ('nonumber','nonumber'),
- ('noodev','noodev'),
- ('noopendevice','noopendevice'),
- ('nopaste','nopaste'),
- ('nopi','nopi'),
- ('nopreserveindent','nopreserveindent'),
- ('nopreviewwindow','nopreviewwindow'),
- ('noprompt','noprompt'),
- ('nopvw','nopvw'),
- ('noreadonly','noreadonly'),
- ('norelativenumber','norelativenumber'),
- ('noremap','noremap'),
- ('norestorescreen','norestorescreen'),
- ('norevins','norevins'),
- ('nori','nori'),
- ('norightleft','norightleft'),
- ('norl','norl'),
- ('nornu','nornu'),
- ('noro','noro'),
- ('nors','nors'),
- ('noru','noru'),
- ('noruler','noruler'),
- ('nosb','nosb'),
- ('nosc','nosc'),
- ('noscb','noscb'),
- ('noscrollbind','noscrollbind'),
- ('noscs','noscs'),
- ('nosecure','nosecure'),
- ('nosft','nosft'),
- ('noshellslash','noshellslash'),
- ('noshelltemp','noshelltemp'),
- ('noshiftround','noshiftround'),
- ('noshortname','noshortname'),
- ('noshowcmd','noshowcmd'),
- ('noshowfulltag','noshowfulltag'),
- ('noshowmatch','noshowmatch'),
- ('noshowmode','noshowmode'),
- ('nosi','nosi'),
- ('nosm','nosm'),
- ('nosmartcase','nosmartcase'),
- ('nosmartindent','nosmartindent'),
- ('nosmarttab','nosmarttab'),
- ('nosmd','nosmd'),
- ('nosn','nosn'),
- ('nosol','nosol'),
- ('nospell','nospell'),
- ('nosplitbelow','nosplitbelow'),
- ('nosplitright','nosplitright'),
- ('nospr','nospr'),
- ('nosr','nosr'),
- ('nossl','nossl'),
- ('nosta','nosta'),
- ('nostartofline','nostartofline'),
- ('nostmp','nostmp'),
- ('noswapfile','noswapfile'),
- ('noswf','noswf'),
- ('nota','nota'),
- ('notagbsearch','notagbsearch'),
- ('notagrelative','notagrelative'),
- ('notagstack','notagstack'),
- ('notbi','notbi'),
- ('notbidi','notbidi'),
- ('notbs','notbs'),
- ('notermbidi','notermbidi'),
- ('noterse','noterse'),
- ('notextauto','notextauto'),
- ('notextmode','notextmode'),
- ('notf','notf'),
- ('notgst','notgst'),
- ('notildeop','notildeop'),
- ('notimeout','notimeout'),
- ('notitle','notitle'),
- ('noto','noto'),
- ('notop','notop'),
- ('notr','notr'),
- ('nottimeout','nottimeout'),
- ('nottybuiltin','nottybuiltin'),
- ('nottyfast','nottyfast'),
- ('notx','notx'),
- ('noudf','noudf'),
- ('noundofile','noundofile'),
- ('novb','novb'),
- ('novisualbell','novisualbell'),
- ('nowa','nowa'),
- ('nowarn','nowarn'),
- ('nowb','nowb'),
- ('noweirdinvert','noweirdinvert'),
- ('nowfh','nowfh'),
- ('nowfw','nowfw'),
- ('nowic','nowic'),
- ('nowildignorecase','nowildignorecase'),
- ('nowildmenu','nowildmenu'),
- ('nowinfixheight','nowinfixheight'),
- ('nowinfixwidth','nowinfixwidth'),
- ('nowiv','nowiv'),
- ('nowmnu','nowmnu'),
- ('nowrap','nowrap'),
- ('nowrapscan','nowrapscan'),
- ('nowrite','nowrite'),
- ('nowriteany','nowriteany'),
- ('nowritebackup','nowritebackup'),
- ('nows','nows'),
- ('nrformats','nrformats'),
- ('nu','nu'),
- ('number','number'),
- ('numberwidth','numberwidth'),
- ('nuw','nuw'),
- ('odev','odev'),
- ('oft','oft'),
- ('ofu','ofu'),
- ('omnifunc','omnifunc'),
- ('opendevice','opendevice'),
- ('operatorfunc','operatorfunc'),
- ('opfunc','opfunc'),
- ('osfiletype','osfiletype'),
- ('pa','pa'),
- ('para','para'),
- ('paragraphs','paragraphs'),
- ('paste','paste'),
- ('pastetoggle','pastetoggle'),
- ('patchexpr','patchexpr'),
- ('patchmode','patchmode'),
- ('path','path'),
- ('pdev','pdev'),
- ('penc','penc'),
- ('pex','pex'),
- ('pexpr','pexpr'),
- ('pfn','pfn'),
- ('ph','ph'),
- ('pheader','pheader'),
- ('pi','pi'),
- ('pm','pm'),
- ('pmbcs','pmbcs'),
- ('pmbfn','pmbfn'),
- ('popt','popt'),
- ('preserveindent','preserveindent'),
- ('previewheight','previewheight'),
- ('previewwindow','previewwindow'),
- ('printdevice','printdevice'),
- ('printencoding','printencoding'),
- ('printexpr','printexpr'),
- ('printfont','printfont'),
- ('printheader','printheader'),
- ('printmbcharset','printmbcharset'),
- ('printmbfont','printmbfont'),
- ('printoptions','printoptions'),
- ('prompt','prompt'),
- ('pt','pt'),
- ('pumheight','pumheight'),
- ('pvh','pvh'),
- ('pvw','pvw'),
- ('qe','qe'),
- ('quoteescape','quoteescape'),
- ('rdt','rdt'),
- ('re','re'),
- ('readonly','readonly'),
- ('redrawtime','redrawtime'),
- ('regexpengine','regexpengine'),
- ('relativenumber','relativenumber'),
- ('remap','remap'),
- ('report','report'),
- ('restorescreen','restorescreen'),
- ('revins','revins'),
- ('ri','ri'),
- ('rightleft','rightleft'),
- ('rightleftcmd','rightleftcmd'),
- ('rl','rl'),
- ('rlc','rlc'),
- ('rnu','rnu'),
- ('ro','ro'),
- ('rs','rs'),
- ('rtp','rtp'),
- ('ru','ru'),
- ('ruf','ruf'),
- ('ruler','ruler'),
- ('rulerformat','rulerformat'),
- ('runtimepath','runtimepath'),
- ('sb','sb'),
- ('sbo','sbo'),
- ('sbr','sbr'),
- ('sc','sc'),
- ('scb','scb'),
- ('scr','scr'),
- ('scroll','scroll'),
- ('scrollbind','scrollbind'),
- ('scrolljump','scrolljump'),
- ('scrolloff','scrolloff'),
- ('scrollopt','scrollopt'),
- ('scs','scs'),
- ('sect','sect'),
- ('sections','sections'),
- ('secure','secure'),
- ('sel','sel'),
- ('selection','selection'),
- ('selectmode','selectmode'),
- ('sessionoptions','sessionoptions'),
- ('sft','sft'),
- ('sh','sh'),
- ('shcf','shcf'),
- ('shell','shell'),
- ('shellcmdflag','shellcmdflag'),
- ('shellpipe','shellpipe'),
- ('shellquote','shellquote'),
- ('shellredir','shellredir'),
- ('shellslash','shellslash'),
- ('shelltemp','shelltemp'),
- ('shelltype','shelltype'),
- ('shellxescape','shellxescape'),
- ('shellxquote','shellxquote'),
- ('shiftround','shiftround'),
- ('shiftwidth','shiftwidth'),
- ('shm','shm'),
- ('shortmess','shortmess'),
- ('shortname','shortname'),
- ('showbreak','showbreak'),
- ('showcmd','showcmd'),
- ('showfulltag','showfulltag'),
- ('showmatch','showmatch'),
- ('showmode','showmode'),
- ('showtabline','showtabline'),
- ('shq','shq'),
- ('si','si'),
- ('sidescroll','sidescroll'),
- ('sidescrolloff','sidescrolloff'),
- ('siso','siso'),
- ('sj','sj'),
- ('slm','slm'),
- ('sm','sm'),
- ('smartcase','smartcase'),
- ('smartindent','smartindent'),
- ('smarttab','smarttab'),
- ('smc','smc'),
- ('smd','smd'),
- ('sn','sn'),
- ('so','so'),
- ('softtabstop','softtabstop'),
- ('sol','sol'),
- ('sp','sp'),
- ('spc','spc'),
- ('spell','spell'),
- ('spellcapcheck','spellcapcheck'),
- ('spellfile','spellfile'),
- ('spelllang','spelllang'),
- ('spellsuggest','spellsuggest'),
- ('spf','spf'),
- ('spl','spl'),
- ('splitbelow','splitbelow'),
- ('splitright','splitright'),
- ('spr','spr'),
- ('sps','sps'),
- ('sr','sr'),
- ('srr','srr'),
- ('ss','ss'),
- ('ssl','ssl'),
- ('ssop','ssop'),
- ('st','st'),
- ('sta','sta'),
- ('stal','stal'),
- ('startofline','startofline'),
- ('statusline','statusline'),
- ('stl','stl'),
- ('stmp','stmp'),
- ('sts','sts'),
- ('su','su'),
- ('sua','sua'),
- ('suffixes','suffixes'),
- ('suffixesadd','suffixesadd'),
- ('sw','sw'),
- ('swapfile','swapfile'),
- ('swapsync','swapsync'),
- ('swb','swb'),
- ('swf','swf'),
- ('switchbuf','switchbuf'),
- ('sws','sws'),
- ('sxe','sxe'),
- ('sxq','sxq'),
- ('syn','syn'),
- ('synmaxcol','synmaxcol'),
- ('syntax','syntax'),
- ('t_AB','t_AB'),
- ('t_AF','t_AF'),
- ('t_AL','t_AL'),
- ('t_CS','t_CS'),
- ('t_CV','t_CV'),
- ('t_Ce','t_Ce'),
- ('t_Co','t_Co'),
- ('t_Cs','t_Cs'),
- ('t_DL','t_DL'),
- ('t_EI','t_EI'),
- ('t_F1','t_F1'),
- ('t_F2','t_F2'),
- ('t_F3','t_F3'),
- ('t_F4','t_F4'),
- ('t_F5','t_F5'),
- ('t_F6','t_F6'),
- ('t_F7','t_F7'),
- ('t_F8','t_F8'),
- ('t_F9','t_F9'),
- ('t_IE','t_IE'),
- ('t_IS','t_IS'),
- ('t_K1','t_K1'),
- ('t_K3','t_K3'),
- ('t_K4','t_K4'),
- ('t_K5','t_K5'),
- ('t_K6','t_K6'),
- ('t_K7','t_K7'),
- ('t_K8','t_K8'),
- ('t_K9','t_K9'),
- ('t_KA','t_KA'),
- ('t_KB','t_KB'),
- ('t_KC','t_KC'),
- ('t_KD','t_KD'),
- ('t_KE','t_KE'),
- ('t_KF','t_KF'),
- ('t_KG','t_KG'),
- ('t_KH','t_KH'),
- ('t_KI','t_KI'),
- ('t_KJ','t_KJ'),
- ('t_KK','t_KK'),
- ('t_KL','t_KL'),
- ('t_RI','t_RI'),
- ('t_RV','t_RV'),
- ('t_SI','t_SI'),
- ('t_Sb','t_Sb'),
- ('t_Sf','t_Sf'),
- ('t_WP','t_WP'),
- ('t_WS','t_WS'),
- ('t_ZH','t_ZH'),
- ('t_ZR','t_ZR'),
- ('t_al','t_al'),
- ('t_bc','t_bc'),
- ('t_cd','t_cd'),
- ('t_ce','t_ce'),
- ('t_cl','t_cl'),
- ('t_cm','t_cm'),
- ('t_cs','t_cs'),
- ('t_da','t_da'),
- ('t_db','t_db'),
- ('t_dl','t_dl'),
- ('t_fs','t_fs'),
- ('t_k1','t_k1'),
- ('t_k2','t_k2'),
- ('t_k3','t_k3'),
- ('t_k4','t_k4'),
- ('t_k5','t_k5'),
- ('t_k6','t_k6'),
- ('t_k7','t_k7'),
- ('t_k8','t_k8'),
- ('t_k9','t_k9'),
- ('t_kB','t_kB'),
- ('t_kD','t_kD'),
- ('t_kI','t_kI'),
- ('t_kN','t_kN'),
- ('t_kP','t_kP'),
- ('t_kb','t_kb'),
- ('t_kd','t_kd'),
- ('t_ke','t_ke'),
- ('t_kh','t_kh'),
- ('t_kl','t_kl'),
- ('t_kr','t_kr'),
- ('t_ks','t_ks'),
- ('t_ku','t_ku'),
- ('t_le','t_le'),
- ('t_mb','t_mb'),
- ('t_md','t_md'),
- ('t_me','t_me'),
- ('t_mr','t_mr'),
- ('t_ms','t_ms'),
- ('t_nd','t_nd'),
- ('t_op','t_op'),
- ('t_se','t_se'),
- ('t_so','t_so'),
- ('t_sr','t_sr'),
- ('t_te','t_te'),
- ('t_ti','t_ti'),
- ('t_ts','t_ts'),
- ('t_u7','t_u7'),
- ('t_ue','t_ue'),
- ('t_us','t_us'),
- ('t_ut','t_ut'),
- ('t_vb','t_vb'),
- ('t_ve','t_ve'),
- ('t_vi','t_vi'),
- ('t_vs','t_vs'),
- ('t_xs','t_xs'),
- ('ta','ta'),
- ('tabline','tabline'),
- ('tabpagemax','tabpagemax'),
- ('tabstop','tabstop'),
- ('tag','tag'),
- ('tagbsearch','tagbsearch'),
- ('taglength','taglength'),
- ('tagrelative','tagrelative'),
- ('tags','tags'),
- ('tagstack','tagstack'),
- ('tal','tal'),
- ('tb','tb'),
- ('tbi','tbi'),
- ('tbidi','tbidi'),
- ('tbis','tbis'),
- ('tbs','tbs'),
- ('tenc','tenc'),
- ('term','term'),
- ('termbidi','termbidi'),
- ('termencoding','termencoding'),
- ('terse','terse'),
- ('textauto','textauto'),
- ('textmode','textmode'),
- ('textwidth','textwidth'),
- ('tf','tf'),
- ('tgst','tgst'),
- ('thesaurus','thesaurus'),
- ('tildeop','tildeop'),
- ('timeout','timeout'),
- ('timeoutlen','timeoutlen'),
- ('title','title'),
- ('titlelen','titlelen'),
- ('titleold','titleold'),
- ('titlestring','titlestring'),
- ('tl','tl'),
- ('tm','tm'),
- ('to','to'),
- ('toolbar','toolbar'),
- ('toolbariconsize','toolbariconsize'),
- ('top','top'),
- ('tpm','tpm'),
- ('tr','tr'),
- ('ts','ts'),
- ('tsl','tsl'),
- ('tsr','tsr'),
- ('ttimeout','ttimeout'),
- ('ttimeoutlen','ttimeoutlen'),
- ('ttm','ttm'),
- ('tty','tty'),
- ('ttybuiltin','ttybuiltin'),
- ('ttyfast','ttyfast'),
- ('ttym','ttym'),
- ('ttymouse','ttymouse'),
- ('ttyscroll','ttyscroll'),
- ('ttytype','ttytype'),
- ('tw','tw'),
- ('tx','tx'),
- ('uc','uc'),
- ('udf','udf'),
- ('udir','udir'),
- ('ul','ul'),
- ('undodir','undodir'),
- ('undofile','undofile'),
- ('undolevels','undolevels'),
- ('undoreload','undoreload'),
- ('updatecount','updatecount'),
- ('updatetime','updatetime'),
- ('ur','ur'),
- ('ut','ut'),
- ('vb','vb'),
- ('vbs','vbs'),
- ('vdir','vdir'),
- ('ve','ve'),
- ('verbose','verbose'),
- ('verbosefile','verbosefile'),
- ('vfile','vfile'),
- ('vi','vi'),
- ('viewdir','viewdir'),
- ('viewoptions','viewoptions'),
- ('viminfo','viminfo'),
- ('virtualedit','virtualedit'),
- ('visualbell','visualbell'),
- ('vnoremap','vnoremap'),
- ('vop','vop'),
- ('wa','wa'),
- ('wak','wak'),
- ('warn','warn'),
- ('wb','wb'),
- ('wc','wc'),
- ('wcm','wcm'),
- ('wd','wd'),
- ('weirdinvert','weirdinvert'),
- ('wfh','wfh'),
- ('wfw','wfw'),
- ('wh','wh'),
- ('whichwrap','whichwrap'),
- ('wi','wi'),
- ('wic','wic'),
- ('wig','wig'),
- ('wildchar','wildchar'),
- ('wildcharm','wildcharm'),
- ('wildignore','wildignore'),
- ('wildignorecase','wildignorecase'),
- ('wildmenu','wildmenu'),
- ('wildmode','wildmode'),
- ('wildoptions','wildoptions'),
- ('wim','wim'),
- ('winaltkeys','winaltkeys'),
- ('window','window'),
- ('winfixheight','winfixheight'),
- ('winfixwidth','winfixwidth'),
- ('winheight','winheight'),
- ('winminheight','winminheight'),
- ('winminwidth','winminwidth'),
- ('winwidth','winwidth'),
- ('wiv','wiv'),
- ('wiw','wiw'),
- ('wm','wm'),
- ('wmh','wmh'),
- ('wmnu','wmnu'),
- ('wmw','wmw'),
- ('wop','wop'),
- ('wrap','wrap'),
- ('wrapmargin','wrapmargin'),
- ('wrapscan','wrapscan'),
- ('write','write'),
- ('writeany','writeany'),
- ('writebackup','writebackup'),
- ('writedelay','writedelay'),
- ('ws','ws'),
- ('ww','ww'),
- )
- return var
-option = _getoption()
-
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+
+def _getauto():
+ var = (
+ ('BufAdd','BufAdd'),
+ ('BufCreate','BufCreate'),
+ ('BufDelete','BufDelete'),
+ ('BufEnter','BufEnter'),
+ ('BufFilePost','BufFilePost'),
+ ('BufFilePre','BufFilePre'),
+ ('BufHidden','BufHidden'),
+ ('BufLeave','BufLeave'),
+ ('BufNew','BufNew'),
+ ('BufNewFile','BufNewFile'),
+ ('BufRead','BufRead'),
+ ('BufReadCmd','BufReadCmd'),
+ ('BufReadPost','BufReadPost'),
+ ('BufReadPre','BufReadPre'),
+ ('BufUnload','BufUnload'),
+ ('BufWinEnter','BufWinEnter'),
+ ('BufWinLeave','BufWinLeave'),
+ ('BufWipeout','BufWipeout'),
+ ('BufWrite','BufWrite'),
+ ('BufWriteCmd','BufWriteCmd'),
+ ('BufWritePost','BufWritePost'),
+ ('BufWritePre','BufWritePre'),
+ ('Cmd','Cmd'),
+ ('CmdwinEnter','CmdwinEnter'),
+ ('CmdwinLeave','CmdwinLeave'),
+ ('ColorScheme','ColorScheme'),
+ ('CompleteDone','CompleteDone'),
+ ('CursorHold','CursorHold'),
+ ('CursorHoldI','CursorHoldI'),
+ ('CursorMoved','CursorMoved'),
+ ('CursorMovedI','CursorMovedI'),
+ ('EncodingChanged','EncodingChanged'),
+ ('FileAppendCmd','FileAppendCmd'),
+ ('FileAppendPost','FileAppendPost'),
+ ('FileAppendPre','FileAppendPre'),
+ ('FileChangedRO','FileChangedRO'),
+ ('FileChangedShell','FileChangedShell'),
+ ('FileChangedShellPost','FileChangedShellPost'),
+ ('FileEncoding','FileEncoding'),
+ ('FileReadCmd','FileReadCmd'),
+ ('FileReadPost','FileReadPost'),
+ ('FileReadPre','FileReadPre'),
+ ('FileType','FileType'),
+ ('FileWriteCmd','FileWriteCmd'),
+ ('FileWritePost','FileWritePost'),
+ ('FileWritePre','FileWritePre'),
+ ('FilterReadPost','FilterReadPost'),
+ ('FilterReadPre','FilterReadPre'),
+ ('FilterWritePost','FilterWritePost'),
+ ('FilterWritePre','FilterWritePre'),
+ ('FocusGained','FocusGained'),
+ ('FocusLost','FocusLost'),
+ ('FuncUndefined','FuncUndefined'),
+ ('GUIEnter','GUIEnter'),
+ ('GUIFailed','GUIFailed'),
+ ('InsertChange','InsertChange'),
+ ('InsertCharPre','InsertCharPre'),
+ ('InsertEnter','InsertEnter'),
+ ('InsertLeave','InsertLeave'),
+ ('MenuPopup','MenuPopup'),
+ ('QuickFixCmdPost','QuickFixCmdPost'),
+ ('QuickFixCmdPre','QuickFixCmdPre'),
+ ('QuitPre','QuitPre'),
+ ('RemoteReply','RemoteReply'),
+ ('SessionLoadPost','SessionLoadPost'),
+ ('ShellCmdPost','ShellCmdPost'),
+ ('ShellFilterPost','ShellFilterPost'),
+ ('SourceCmd','SourceCmd'),
+ ('SourcePre','SourcePre'),
+ ('SpellFileMissing','SpellFileMissing'),
+ ('StdinReadPost','StdinReadPost'),
+ ('StdinReadPre','StdinReadPre'),
+ ('SwapExists','SwapExists'),
+ ('Syntax','Syntax'),
+ ('TabEnter','TabEnter'),
+ ('TabLeave','TabLeave'),
+ ('TermChanged','TermChanged'),
+ ('TermResponse','TermResponse'),
+ ('TextChanged','TextChanged'),
+ ('TextChangedI','TextChangedI'),
+ ('User','User'),
+ ('UserGettingBored','UserGettingBored'),
+ ('VimEnter','VimEnter'),
+ ('VimLeave','VimLeave'),
+ ('VimLeavePre','VimLeavePre'),
+ ('VimResized','VimResized'),
+ ('WinEnter','WinEnter'),
+ ('WinLeave','WinLeave'),
+ ('event','event'),
+ )
+ return var
+auto = _getauto()
+
+def _getcommand():
+ var = (
+ ('a','a'),
+ ('ab','ab'),
+ ('abc','abclear'),
+ ('abo','aboveleft'),
+ ('al','all'),
+ ('ar','ar'),
+ ('ar','args'),
+ ('arga','argadd'),
+ ('argd','argdelete'),
+ ('argdo','argdo'),
+ ('arge','argedit'),
+ ('argg','argglobal'),
+ ('argl','arglocal'),
+ ('argu','argument'),
+ ('as','ascii'),
+ ('au','au'),
+ ('b','buffer'),
+ ('bN','bNext'),
+ ('ba','ball'),
+ ('bad','badd'),
+ ('bd','bdelete'),
+ ('bel','belowright'),
+ ('bf','bfirst'),
+ ('bl','blast'),
+ ('bm','bmodified'),
+ ('bn','bnext'),
+ ('bo','botright'),
+ ('bp','bprevious'),
+ ('br','br'),
+ ('br','brewind'),
+ ('brea','break'),
+ ('breaka','breakadd'),
+ ('breakd','breakdel'),
+ ('breakl','breaklist'),
+ ('bro','browse'),
+ ('bu','bu'),
+ ('buf','buf'),
+ ('bufdo','bufdo'),
+ ('buffers','buffers'),
+ ('bun','bunload'),
+ ('bw','bwipeout'),
+ ('c','c'),
+ ('c','change'),
+ ('cN','cN'),
+ ('cN','cNext'),
+ ('cNf','cNf'),
+ ('cNf','cNfile'),
+ ('cabc','cabclear'),
+ ('cad','cad'),
+ ('cad','caddexpr'),
+ ('caddb','caddbuffer'),
+ ('caddf','caddfile'),
+ ('cal','call'),
+ ('cat','catch'),
+ ('cb','cbuffer'),
+ ('cc','cc'),
+ ('ccl','cclose'),
+ ('cd','cd'),
+ ('ce','center'),
+ ('cex','cexpr'),
+ ('cf','cfile'),
+ ('cfir','cfirst'),
+ ('cg','cgetfile'),
+ ('cgetb','cgetbuffer'),
+ ('cgete','cgetexpr'),
+ ('changes','changes'),
+ ('chd','chdir'),
+ ('che','checkpath'),
+ ('checkt','checktime'),
+ ('cl','cl'),
+ ('cl','clist'),
+ ('cla','clast'),
+ ('clo','close'),
+ ('cmapc','cmapclear'),
+ ('cn','cn'),
+ ('cn','cnext'),
+ ('cnew','cnewer'),
+ ('cnf','cnf'),
+ ('cnf','cnfile'),
+ ('co','copy'),
+ ('col','colder'),
+ ('colo','colorscheme'),
+ ('com','com'),
+ ('comc','comclear'),
+ ('comp','compiler'),
+ ('con','con'),
+ ('con','continue'),
+ ('conf','confirm'),
+ ('cope','copen'),
+ ('cp','cprevious'),
+ ('cpf','cpfile'),
+ ('cq','cquit'),
+ ('cr','crewind'),
+ ('cs','cs'),
+ ('cscope','cscope'),
+ ('cstag','cstag'),
+ ('cuna','cunabbrev'),
+ ('cw','cwindow'),
+ ('d','d'),
+ ('d','delete'),
+ ('de','de'),
+ ('debug','debug'),
+ ('debugg','debuggreedy'),
+ ('del','del'),
+ ('delc','delcommand'),
+ ('delel','delel'),
+ ('delep','delep'),
+ ('deletel','deletel'),
+ ('deletep','deletep'),
+ ('deletl','deletl'),
+ ('deletp','deletp'),
+ ('delf','delf'),
+ ('delf','delfunction'),
+ ('dell','dell'),
+ ('delm','delmarks'),
+ ('delp','delp'),
+ ('dep','dep'),
+ ('di','di'),
+ ('di','display'),
+ ('diffg','diffget'),
+ ('diffo','diffoff'),
+ ('diffp','diffpatch'),
+ ('diffpu','diffput'),
+ ('diffs','diffsplit'),
+ ('difft','diffthis'),
+ ('diffu','diffupdate'),
+ ('dig','dig'),
+ ('dig','digraphs'),
+ ('dir','dir'),
+ ('dj','djump'),
+ ('dl','dl'),
+ ('dli','dlist'),
+ ('do','do'),
+ ('doau','doau'),
+ ('dp','dp'),
+ ('dr','drop'),
+ ('ds','dsearch'),
+ ('dsp','dsplit'),
+ ('e','e'),
+ ('e','edit'),
+ ('ea','ea'),
+ ('earlier','earlier'),
+ ('ec','ec'),
+ ('echoe','echoerr'),
+ ('echom','echomsg'),
+ ('echon','echon'),
+ ('el','else'),
+ ('elsei','elseif'),
+ ('em','emenu'),
+ ('en','en'),
+ ('en','endif'),
+ ('endf','endf'),
+ ('endf','endfunction'),
+ ('endfo','endfor'),
+ ('endfun','endfun'),
+ ('endt','endtry'),
+ ('endw','endwhile'),
+ ('ene','enew'),
+ ('ex','ex'),
+ ('exi','exit'),
+ ('exu','exusage'),
+ ('f','f'),
+ ('f','file'),
+ ('files','files'),
+ ('filet','filet'),
+ ('filetype','filetype'),
+ ('fin','fin'),
+ ('fin','find'),
+ ('fina','finally'),
+ ('fini','finish'),
+ ('fir','first'),
+ ('fix','fixdel'),
+ ('fo','fold'),
+ ('foldc','foldclose'),
+ ('foldd','folddoopen'),
+ ('folddoc','folddoclosed'),
+ ('foldo','foldopen'),
+ ('for','for'),
+ ('fu','fu'),
+ ('fu','function'),
+ ('fun','fun'),
+ ('g','g'),
+ ('go','goto'),
+ ('gr','grep'),
+ ('grepa','grepadd'),
+ ('gui','gui'),
+ ('gvim','gvim'),
+ ('h','h'),
+ ('h','help'),
+ ('ha','hardcopy'),
+ ('helpf','helpfind'),
+ ('helpg','helpgrep'),
+ ('helpt','helptags'),
+ ('hi','hi'),
+ ('hid','hide'),
+ ('his','history'),
+ ('i','i'),
+ ('ia','ia'),
+ ('iabc','iabclear'),
+ ('if','if'),
+ ('ij','ijump'),
+ ('il','ilist'),
+ ('imapc','imapclear'),
+ ('in','in'),
+ ('intro','intro'),
+ ('is','isearch'),
+ ('isp','isplit'),
+ ('iuna','iunabbrev'),
+ ('j','join'),
+ ('ju','jumps'),
+ ('k','k'),
+ ('kee','keepmarks'),
+ ('keepa','keepa'),
+ ('keepalt','keepalt'),
+ ('keepj','keepjumps'),
+ ('keepp','keeppatterns'),
+ ('l','l'),
+ ('l','list'),
+ ('lN','lN'),
+ ('lN','lNext'),
+ ('lNf','lNf'),
+ ('lNf','lNfile'),
+ ('la','la'),
+ ('la','last'),
+ ('lad','lad'),
+ ('lad','laddexpr'),
+ ('laddb','laddbuffer'),
+ ('laddf','laddfile'),
+ ('lan','lan'),
+ ('lan','language'),
+ ('lat','lat'),
+ ('later','later'),
+ ('lb','lbuffer'),
+ ('lc','lcd'),
+ ('lch','lchdir'),
+ ('lcl','lclose'),
+ ('lcs','lcs'),
+ ('lcscope','lcscope'),
+ ('le','left'),
+ ('lefta','leftabove'),
+ ('lex','lexpr'),
+ ('lf','lfile'),
+ ('lfir','lfirst'),
+ ('lg','lgetfile'),
+ ('lgetb','lgetbuffer'),
+ ('lgete','lgetexpr'),
+ ('lgr','lgrep'),
+ ('lgrepa','lgrepadd'),
+ ('lh','lhelpgrep'),
+ ('ll','ll'),
+ ('lla','llast'),
+ ('lli','llist'),
+ ('lmak','lmake'),
+ ('lmapc','lmapclear'),
+ ('lne','lne'),
+ ('lne','lnext'),
+ ('lnew','lnewer'),
+ ('lnf','lnf'),
+ ('lnf','lnfile'),
+ ('lo','lo'),
+ ('lo','loadview'),
+ ('loadk','loadk'),
+ ('loadkeymap','loadkeymap'),
+ ('loc','lockmarks'),
+ ('lockv','lockvar'),
+ ('lol','lolder'),
+ ('lop','lopen'),
+ ('lp','lprevious'),
+ ('lpf','lpfile'),
+ ('lr','lrewind'),
+ ('ls','ls'),
+ ('lt','ltag'),
+ ('lua','lua'),
+ ('luado','luado'),
+ ('luafile','luafile'),
+ ('lv','lvimgrep'),
+ ('lvimgrepa','lvimgrepadd'),
+ ('lw','lwindow'),
+ ('m','move'),
+ ('ma','ma'),
+ ('ma','mark'),
+ ('mak','make'),
+ ('marks','marks'),
+ ('mat','match'),
+ ('menut','menut'),
+ ('menut','menutranslate'),
+ ('mes','mes'),
+ ('messages','messages'),
+ ('mk','mk'),
+ ('mk','mkexrc'),
+ ('mks','mksession'),
+ ('mksp','mkspell'),
+ ('mkv','mkv'),
+ ('mkv','mkvimrc'),
+ ('mkvie','mkview'),
+ ('mo','mo'),
+ ('mod','mode'),
+ ('mz','mz'),
+ ('mz','mzscheme'),
+ ('mzf','mzfile'),
+ ('n','n'),
+ ('n','next'),
+ ('nb','nbkey'),
+ ('nbc','nbclose'),
+ ('nbs','nbstart'),
+ ('ne','ne'),
+ ('new','new'),
+ ('nmapc','nmapclear'),
+ ('noa','noa'),
+ ('noautocmd','noautocmd'),
+ ('noh','nohlsearch'),
+ ('nu','number'),
+ ('o','o'),
+ ('o','open'),
+ ('ol','oldfiles'),
+ ('omapc','omapclear'),
+ ('on','only'),
+ ('opt','options'),
+ ('ownsyntax','ownsyntax'),
+ ('p','p'),
+ ('p','print'),
+ ('pc','pclose'),
+ ('pe','pe'),
+ ('pe','perl'),
+ ('ped','pedit'),
+ ('perld','perldo'),
+ ('po','pop'),
+ ('popu','popu'),
+ ('popu','popup'),
+ ('pp','ppop'),
+ ('pr','pr'),
+ ('pre','preserve'),
+ ('prev','previous'),
+ ('pro','pro'),
+ ('prof','profile'),
+ ('profd','profdel'),
+ ('promptf','promptfind'),
+ ('promptr','promptrepl'),
+ ('ps','psearch'),
+ ('ptN','ptN'),
+ ('ptN','ptNext'),
+ ('pta','ptag'),
+ ('ptf','ptfirst'),
+ ('ptj','ptjump'),
+ ('ptl','ptlast'),
+ ('ptn','ptn'),
+ ('ptn','ptnext'),
+ ('ptp','ptprevious'),
+ ('ptr','ptrewind'),
+ ('pts','ptselect'),
+ ('pu','put'),
+ ('pw','pwd'),
+ ('py','py'),
+ ('py','python'),
+ ('py3','py3'),
+ ('py3','py3'),
+ ('py3do','py3do'),
+ ('pydo','pydo'),
+ ('pyf','pyfile'),
+ ('python3','python3'),
+ ('q','q'),
+ ('q','quit'),
+ ('qa','qall'),
+ ('quita','quitall'),
+ ('r','r'),
+ ('r','read'),
+ ('re','re'),
+ ('rec','recover'),
+ ('red','red'),
+ ('red','redo'),
+ ('redi','redir'),
+ ('redr','redraw'),
+ ('redraws','redrawstatus'),
+ ('reg','registers'),
+ ('res','resize'),
+ ('ret','retab'),
+ ('retu','return'),
+ ('rew','rewind'),
+ ('ri','right'),
+ ('rightb','rightbelow'),
+ ('ru','ru'),
+ ('ru','runtime'),
+ ('rub','ruby'),
+ ('rubyd','rubydo'),
+ ('rubyf','rubyfile'),
+ ('rundo','rundo'),
+ ('rv','rviminfo'),
+ ('sN','sNext'),
+ ('sa','sargument'),
+ ('sal','sall'),
+ ('san','sandbox'),
+ ('sav','saveas'),
+ ('sb','sbuffer'),
+ ('sbN','sbNext'),
+ ('sba','sball'),
+ ('sbf','sbfirst'),
+ ('sbl','sblast'),
+ ('sbm','sbmodified'),
+ ('sbn','sbnext'),
+ ('sbp','sbprevious'),
+ ('sbr','sbrewind'),
+ ('scrip','scrip'),
+ ('scrip','scriptnames'),
+ ('scripte','scriptencoding'),
+ ('scs','scs'),
+ ('scscope','scscope'),
+ ('se','set'),
+ ('setf','setfiletype'),
+ ('setg','setglobal'),
+ ('setl','setlocal'),
+ ('sf','sfind'),
+ ('sfir','sfirst'),
+ ('sh','shell'),
+ ('si','si'),
+ ('sig','sig'),
+ ('sign','sign'),
+ ('sil','silent'),
+ ('sim','simalt'),
+ ('sl','sl'),
+ ('sl','sleep'),
+ ('sla','slast'),
+ ('sm','smagic'),
+ ('sm','smap'),
+ ('sme','sme'),
+ ('smenu','smenu'),
+ ('sn','snext'),
+ ('sni','sniff'),
+ ('sno','snomagic'),
+ ('snoreme','snoreme'),
+ ('snoremenu','snoremenu'),
+ ('so','so'),
+ ('so','source'),
+ ('sor','sort'),
+ ('sp','split'),
+ ('spe','spe'),
+ ('spe','spellgood'),
+ ('spelld','spelldump'),
+ ('spelli','spellinfo'),
+ ('spellr','spellrepall'),
+ ('spellu','spellundo'),
+ ('spellw','spellwrong'),
+ ('spr','sprevious'),
+ ('sre','srewind'),
+ ('st','st'),
+ ('st','stop'),
+ ('sta','stag'),
+ ('star','star'),
+ ('star','startinsert'),
+ ('start','start'),
+ ('startg','startgreplace'),
+ ('startr','startreplace'),
+ ('stj','stjump'),
+ ('stopi','stopinsert'),
+ ('sts','stselect'),
+ ('sun','sunhide'),
+ ('sunme','sunme'),
+ ('sunmenu','sunmenu'),
+ ('sus','suspend'),
+ ('sv','sview'),
+ ('sw','swapname'),
+ ('sy','sy'),
+ ('syn','syn'),
+ ('sync','sync'),
+ ('syncbind','syncbind'),
+ ('syntime','syntime'),
+ ('t','t'),
+ ('tN','tN'),
+ ('tN','tNext'),
+ ('ta','ta'),
+ ('ta','tag'),
+ ('tab','tab'),
+ ('tabN','tabN'),
+ ('tabN','tabNext'),
+ ('tabc','tabclose'),
+ ('tabd','tabdo'),
+ ('tabe','tabedit'),
+ ('tabf','tabfind'),
+ ('tabfir','tabfirst'),
+ ('tabl','tablast'),
+ ('tabm','tabmove'),
+ ('tabn','tabnext'),
+ ('tabnew','tabnew'),
+ ('tabo','tabonly'),
+ ('tabp','tabprevious'),
+ ('tabr','tabrewind'),
+ ('tabs','tabs'),
+ ('tags','tags'),
+ ('tc','tcl'),
+ ('tcld','tcldo'),
+ ('tclf','tclfile'),
+ ('te','tearoff'),
+ ('tf','tfirst'),
+ ('th','throw'),
+ ('tj','tjump'),
+ ('tl','tlast'),
+ ('tm','tm'),
+ ('tm','tmenu'),
+ ('tn','tn'),
+ ('tn','tnext'),
+ ('to','topleft'),
+ ('tp','tprevious'),
+ ('tr','tr'),
+ ('tr','trewind'),
+ ('try','try'),
+ ('ts','tselect'),
+ ('tu','tu'),
+ ('tu','tunmenu'),
+ ('u','u'),
+ ('u','undo'),
+ ('un','un'),
+ ('una','unabbreviate'),
+ ('undoj','undojoin'),
+ ('undol','undolist'),
+ ('unh','unhide'),
+ ('unl','unl'),
+ ('unlo','unlockvar'),
+ ('uns','unsilent'),
+ ('up','update'),
+ ('v','v'),
+ ('ve','ve'),
+ ('ve','version'),
+ ('verb','verbose'),
+ ('vert','vertical'),
+ ('vi','vi'),
+ ('vi','visual'),
+ ('vie','view'),
+ ('vim','vimgrep'),
+ ('vimgrepa','vimgrepadd'),
+ ('viu','viusage'),
+ ('vmapc','vmapclear'),
+ ('vne','vnew'),
+ ('vs','vsplit'),
+ ('w','w'),
+ ('w','write'),
+ ('wN','wNext'),
+ ('wa','wall'),
+ ('wh','while'),
+ ('win','win'),
+ ('win','winsize'),
+ ('winc','wincmd'),
+ ('windo','windo'),
+ ('winp','winpos'),
+ ('wn','wnext'),
+ ('wp','wprevious'),
+ ('wq','wq'),
+ ('wqa','wqall'),
+ ('ws','wsverb'),
+ ('wundo','wundo'),
+ ('wv','wviminfo'),
+ ('x','x'),
+ ('x','xit'),
+ ('xa','xall'),
+ ('xmapc','xmapclear'),
+ ('xme','xme'),
+ ('xmenu','xmenu'),
+ ('xnoreme','xnoreme'),
+ ('xnoremenu','xnoremenu'),
+ ('xunme','xunme'),
+ ('xunmenu','xunmenu'),
+ ('xwininfo','xwininfo'),
+ ('y','yank'),
+ )
+ return var
+command = _getcommand()
+
+def _getoption():
+ var = (
+ ('acd','acd'),
+ ('ai','ai'),
+ ('akm','akm'),
+ ('al','al'),
+ ('aleph','aleph'),
+ ('allowrevins','allowrevins'),
+ ('altkeymap','altkeymap'),
+ ('ambiwidth','ambiwidth'),
+ ('ambw','ambw'),
+ ('anti','anti'),
+ ('antialias','antialias'),
+ ('ar','ar'),
+ ('arab','arab'),
+ ('arabic','arabic'),
+ ('arabicshape','arabicshape'),
+ ('ari','ari'),
+ ('arshape','arshape'),
+ ('autochdir','autochdir'),
+ ('autoindent','autoindent'),
+ ('autoread','autoread'),
+ ('autowrite','autowrite'),
+ ('autowriteall','autowriteall'),
+ ('aw','aw'),
+ ('awa','awa'),
+ ('background','background'),
+ ('backspace','backspace'),
+ ('backup','backup'),
+ ('backupcopy','backupcopy'),
+ ('backupdir','backupdir'),
+ ('backupext','backupext'),
+ ('backupskip','backupskip'),
+ ('balloondelay','balloondelay'),
+ ('ballooneval','ballooneval'),
+ ('balloonexpr','balloonexpr'),
+ ('bdir','bdir'),
+ ('bdlay','bdlay'),
+ ('beval','beval'),
+ ('bex','bex'),
+ ('bexpr','bexpr'),
+ ('bg','bg'),
+ ('bh','bh'),
+ ('bin','bin'),
+ ('binary','binary'),
+ ('biosk','biosk'),
+ ('bioskey','bioskey'),
+ ('bk','bk'),
+ ('bkc','bkc'),
+ ('bl','bl'),
+ ('bomb','bomb'),
+ ('breakat','breakat'),
+ ('brk','brk'),
+ ('browsedir','browsedir'),
+ ('bs','bs'),
+ ('bsdir','bsdir'),
+ ('bsk','bsk'),
+ ('bt','bt'),
+ ('bufhidden','bufhidden'),
+ ('buflisted','buflisted'),
+ ('buftype','buftype'),
+ ('casemap','casemap'),
+ ('cb','cb'),
+ ('cc','cc'),
+ ('ccv','ccv'),
+ ('cd','cd'),
+ ('cdpath','cdpath'),
+ ('cedit','cedit'),
+ ('cf','cf'),
+ ('cfu','cfu'),
+ ('ch','ch'),
+ ('charconvert','charconvert'),
+ ('ci','ci'),
+ ('cin','cin'),
+ ('cindent','cindent'),
+ ('cink','cink'),
+ ('cinkeys','cinkeys'),
+ ('cino','cino'),
+ ('cinoptions','cinoptions'),
+ ('cinw','cinw'),
+ ('cinwords','cinwords'),
+ ('clipboard','clipboard'),
+ ('cmdheight','cmdheight'),
+ ('cmdwinheight','cmdwinheight'),
+ ('cmp','cmp'),
+ ('cms','cms'),
+ ('co','co'),
+ ('cocu','cocu'),
+ ('cole','cole'),
+ ('colorcolumn','colorcolumn'),
+ ('columns','columns'),
+ ('com','com'),
+ ('comments','comments'),
+ ('commentstring','commentstring'),
+ ('compatible','compatible'),
+ ('complete','complete'),
+ ('completefunc','completefunc'),
+ ('completeopt','completeopt'),
+ ('concealcursor','concealcursor'),
+ ('conceallevel','conceallevel'),
+ ('confirm','confirm'),
+ ('consk','consk'),
+ ('conskey','conskey'),
+ ('copyindent','copyindent'),
+ ('cot','cot'),
+ ('cp','cp'),
+ ('cpo','cpo'),
+ ('cpoptions','cpoptions'),
+ ('cpt','cpt'),
+ ('crb','crb'),
+ ('cryptmethod','cryptmethod'),
+ ('cscopepathcomp','cscopepathcomp'),
+ ('cscopeprg','cscopeprg'),
+ ('cscopequickfix','cscopequickfix'),
+ ('cscoperelative','cscoperelative'),
+ ('cscopetag','cscopetag'),
+ ('cscopetagorder','cscopetagorder'),
+ ('cscopeverbose','cscopeverbose'),
+ ('cspc','cspc'),
+ ('csprg','csprg'),
+ ('csqf','csqf'),
+ ('csre','csre'),
+ ('cst','cst'),
+ ('csto','csto'),
+ ('csverb','csverb'),
+ ('cuc','cuc'),
+ ('cul','cul'),
+ ('cursorbind','cursorbind'),
+ ('cursorcolumn','cursorcolumn'),
+ ('cursorline','cursorline'),
+ ('cwh','cwh'),
+ ('debug','debug'),
+ ('deco','deco'),
+ ('def','def'),
+ ('define','define'),
+ ('delcombine','delcombine'),
+ ('dex','dex'),
+ ('dg','dg'),
+ ('dict','dict'),
+ ('dictionary','dictionary'),
+ ('diff','diff'),
+ ('diffexpr','diffexpr'),
+ ('diffopt','diffopt'),
+ ('digraph','digraph'),
+ ('dip','dip'),
+ ('dir','dir'),
+ ('directory','directory'),
+ ('display','display'),
+ ('dy','dy'),
+ ('ea','ea'),
+ ('ead','ead'),
+ ('eadirection','eadirection'),
+ ('eb','eb'),
+ ('ed','ed'),
+ ('edcompatible','edcompatible'),
+ ('ef','ef'),
+ ('efm','efm'),
+ ('ei','ei'),
+ ('ek','ek'),
+ ('enc','enc'),
+ ('encoding','encoding'),
+ ('endofline','endofline'),
+ ('eol','eol'),
+ ('ep','ep'),
+ ('equalalways','equalalways'),
+ ('equalprg','equalprg'),
+ ('errorbells','errorbells'),
+ ('errorfile','errorfile'),
+ ('errorformat','errorformat'),
+ ('esckeys','esckeys'),
+ ('et','et'),
+ ('eventignore','eventignore'),
+ ('ex','ex'),
+ ('expandtab','expandtab'),
+ ('exrc','exrc'),
+ ('fcl','fcl'),
+ ('fcs','fcs'),
+ ('fdc','fdc'),
+ ('fde','fde'),
+ ('fdi','fdi'),
+ ('fdl','fdl'),
+ ('fdls','fdls'),
+ ('fdm','fdm'),
+ ('fdn','fdn'),
+ ('fdo','fdo'),
+ ('fdt','fdt'),
+ ('fen','fen'),
+ ('fenc','fenc'),
+ ('fencs','fencs'),
+ ('fex','fex'),
+ ('ff','ff'),
+ ('ffs','ffs'),
+ ('fic','fic'),
+ ('fileencoding','fileencoding'),
+ ('fileencodings','fileencodings'),
+ ('fileformat','fileformat'),
+ ('fileformats','fileformats'),
+ ('fileignorecase','fileignorecase'),
+ ('filetype','filetype'),
+ ('fillchars','fillchars'),
+ ('fk','fk'),
+ ('fkmap','fkmap'),
+ ('flp','flp'),
+ ('fml','fml'),
+ ('fmr','fmr'),
+ ('fo','fo'),
+ ('foldclose','foldclose'),
+ ('foldcolumn','foldcolumn'),
+ ('foldenable','foldenable'),
+ ('foldexpr','foldexpr'),
+ ('foldignore','foldignore'),
+ ('foldlevel','foldlevel'),
+ ('foldlevelstart','foldlevelstart'),
+ ('foldmarker','foldmarker'),
+ ('foldmethod','foldmethod'),
+ ('foldminlines','foldminlines'),
+ ('foldnestmax','foldnestmax'),
+ ('foldopen','foldopen'),
+ ('foldtext','foldtext'),
+ ('formatexpr','formatexpr'),
+ ('formatlistpat','formatlistpat'),
+ ('formatoptions','formatoptions'),
+ ('formatprg','formatprg'),
+ ('fp','fp'),
+ ('fs','fs'),
+ ('fsync','fsync'),
+ ('ft','ft'),
+ ('gcr','gcr'),
+ ('gd','gd'),
+ ('gdefault','gdefault'),
+ ('gfm','gfm'),
+ ('gfn','gfn'),
+ ('gfs','gfs'),
+ ('gfw','gfw'),
+ ('ghr','ghr'),
+ ('go','go'),
+ ('gp','gp'),
+ ('grepformat','grepformat'),
+ ('grepprg','grepprg'),
+ ('gtl','gtl'),
+ ('gtt','gtt'),
+ ('guicursor','guicursor'),
+ ('guifont','guifont'),
+ ('guifontset','guifontset'),
+ ('guifontwide','guifontwide'),
+ ('guiheadroom','guiheadroom'),
+ ('guioptions','guioptions'),
+ ('guipty','guipty'),
+ ('guitablabel','guitablabel'),
+ ('guitabtooltip','guitabtooltip'),
+ ('helpfile','helpfile'),
+ ('helpheight','helpheight'),
+ ('helplang','helplang'),
+ ('hf','hf'),
+ ('hh','hh'),
+ ('hi','hi'),
+ ('hid','hid'),
+ ('hidden','hidden'),
+ ('highlight','highlight'),
+ ('history','history'),
+ ('hk','hk'),
+ ('hkmap','hkmap'),
+ ('hkmapp','hkmapp'),
+ ('hkp','hkp'),
+ ('hl','hl'),
+ ('hlg','hlg'),
+ ('hls','hls'),
+ ('hlsearch','hlsearch'),
+ ('ic','ic'),
+ ('icon','icon'),
+ ('iconstring','iconstring'),
+ ('ignorecase','ignorecase'),
+ ('im','im'),
+ ('imactivatefunc','imactivatefunc'),
+ ('imactivatekey','imactivatekey'),
+ ('imaf','imaf'),
+ ('imak','imak'),
+ ('imc','imc'),
+ ('imcmdline','imcmdline'),
+ ('imd','imd'),
+ ('imdisable','imdisable'),
+ ('imi','imi'),
+ ('iminsert','iminsert'),
+ ('ims','ims'),
+ ('imsearch','imsearch'),
+ ('imsf','imsf'),
+ ('imstatusfunc','imstatusfunc'),
+ ('inc','inc'),
+ ('include','include'),
+ ('includeexpr','includeexpr'),
+ ('incsearch','incsearch'),
+ ('inde','inde'),
+ ('indentexpr','indentexpr'),
+ ('indentkeys','indentkeys'),
+ ('indk','indk'),
+ ('inex','inex'),
+ ('inf','inf'),
+ ('infercase','infercase'),
+ ('inoremap','inoremap'),
+ ('insertmode','insertmode'),
+ ('invacd','invacd'),
+ ('invai','invai'),
+ ('invakm','invakm'),
+ ('invallowrevins','invallowrevins'),
+ ('invaltkeymap','invaltkeymap'),
+ ('invanti','invanti'),
+ ('invantialias','invantialias'),
+ ('invar','invar'),
+ ('invarab','invarab'),
+ ('invarabic','invarabic'),
+ ('invarabicshape','invarabicshape'),
+ ('invari','invari'),
+ ('invarshape','invarshape'),
+ ('invautochdir','invautochdir'),
+ ('invautoindent','invautoindent'),
+ ('invautoread','invautoread'),
+ ('invautowrite','invautowrite'),
+ ('invautowriteall','invautowriteall'),
+ ('invaw','invaw'),
+ ('invawa','invawa'),
+ ('invbackup','invbackup'),
+ ('invballooneval','invballooneval'),
+ ('invbeval','invbeval'),
+ ('invbin','invbin'),
+ ('invbinary','invbinary'),
+ ('invbiosk','invbiosk'),
+ ('invbioskey','invbioskey'),
+ ('invbk','invbk'),
+ ('invbl','invbl'),
+ ('invbomb','invbomb'),
+ ('invbuflisted','invbuflisted'),
+ ('invcf','invcf'),
+ ('invci','invci'),
+ ('invcin','invcin'),
+ ('invcindent','invcindent'),
+ ('invcompatible','invcompatible'),
+ ('invconfirm','invconfirm'),
+ ('invconsk','invconsk'),
+ ('invconskey','invconskey'),
+ ('invcopyindent','invcopyindent'),
+ ('invcp','invcp'),
+ ('invcrb','invcrb'),
+ ('invcscoperelative','invcscoperelative'),
+ ('invcscopetag','invcscopetag'),
+ ('invcscopeverbose','invcscopeverbose'),
+ ('invcsre','invcsre'),
+ ('invcst','invcst'),
+ ('invcsverb','invcsverb'),
+ ('invcuc','invcuc'),
+ ('invcul','invcul'),
+ ('invcursorbind','invcursorbind'),
+ ('invcursorcolumn','invcursorcolumn'),
+ ('invcursorline','invcursorline'),
+ ('invdeco','invdeco'),
+ ('invdelcombine','invdelcombine'),
+ ('invdg','invdg'),
+ ('invdiff','invdiff'),
+ ('invdigraph','invdigraph'),
+ ('invea','invea'),
+ ('inveb','inveb'),
+ ('inved','inved'),
+ ('invedcompatible','invedcompatible'),
+ ('invek','invek'),
+ ('invendofline','invendofline'),
+ ('inveol','inveol'),
+ ('invequalalways','invequalalways'),
+ ('inverrorbells','inverrorbells'),
+ ('invesckeys','invesckeys'),
+ ('invet','invet'),
+ ('invex','invex'),
+ ('invexpandtab','invexpandtab'),
+ ('invexrc','invexrc'),
+ ('invfen','invfen'),
+ ('invfic','invfic'),
+ ('invfileignorecase','invfileignorecase'),
+ ('invfk','invfk'),
+ ('invfkmap','invfkmap'),
+ ('invfoldenable','invfoldenable'),
+ ('invgd','invgd'),
+ ('invgdefault','invgdefault'),
+ ('invguipty','invguipty'),
+ ('invhid','invhid'),
+ ('invhidden','invhidden'),
+ ('invhk','invhk'),
+ ('invhkmap','invhkmap'),
+ ('invhkmapp','invhkmapp'),
+ ('invhkp','invhkp'),
+ ('invhls','invhls'),
+ ('invhlsearch','invhlsearch'),
+ ('invic','invic'),
+ ('invicon','invicon'),
+ ('invignorecase','invignorecase'),
+ ('invim','invim'),
+ ('invimc','invimc'),
+ ('invimcmdline','invimcmdline'),
+ ('invimd','invimd'),
+ ('invimdisable','invimdisable'),
+ ('invincsearch','invincsearch'),
+ ('invinf','invinf'),
+ ('invinfercase','invinfercase'),
+ ('invinsertmode','invinsertmode'),
+ ('invis','invis'),
+ ('invjoinspaces','invjoinspaces'),
+ ('invjs','invjs'),
+ ('invlazyredraw','invlazyredraw'),
+ ('invlbr','invlbr'),
+ ('invlinebreak','invlinebreak'),
+ ('invlisp','invlisp'),
+ ('invlist','invlist'),
+ ('invloadplugins','invloadplugins'),
+ ('invlpl','invlpl'),
+ ('invlz','invlz'),
+ ('invma','invma'),
+ ('invmacatsui','invmacatsui'),
+ ('invmagic','invmagic'),
+ ('invmh','invmh'),
+ ('invml','invml'),
+ ('invmod','invmod'),
+ ('invmodeline','invmodeline'),
+ ('invmodifiable','invmodifiable'),
+ ('invmodified','invmodified'),
+ ('invmore','invmore'),
+ ('invmousef','invmousef'),
+ ('invmousefocus','invmousefocus'),
+ ('invmousehide','invmousehide'),
+ ('invnu','invnu'),
+ ('invnumber','invnumber'),
+ ('invodev','invodev'),
+ ('invopendevice','invopendevice'),
+ ('invpaste','invpaste'),
+ ('invpi','invpi'),
+ ('invpreserveindent','invpreserveindent'),
+ ('invpreviewwindow','invpreviewwindow'),
+ ('invprompt','invprompt'),
+ ('invpvw','invpvw'),
+ ('invreadonly','invreadonly'),
+ ('invrelativenumber','invrelativenumber'),
+ ('invremap','invremap'),
+ ('invrestorescreen','invrestorescreen'),
+ ('invrevins','invrevins'),
+ ('invri','invri'),
+ ('invrightleft','invrightleft'),
+ ('invrl','invrl'),
+ ('invrnu','invrnu'),
+ ('invro','invro'),
+ ('invrs','invrs'),
+ ('invru','invru'),
+ ('invruler','invruler'),
+ ('invsb','invsb'),
+ ('invsc','invsc'),
+ ('invscb','invscb'),
+ ('invscrollbind','invscrollbind'),
+ ('invscs','invscs'),
+ ('invsecure','invsecure'),
+ ('invsft','invsft'),
+ ('invshellslash','invshellslash'),
+ ('invshelltemp','invshelltemp'),
+ ('invshiftround','invshiftround'),
+ ('invshortname','invshortname'),
+ ('invshowcmd','invshowcmd'),
+ ('invshowfulltag','invshowfulltag'),
+ ('invshowmatch','invshowmatch'),
+ ('invshowmode','invshowmode'),
+ ('invsi','invsi'),
+ ('invsm','invsm'),
+ ('invsmartcase','invsmartcase'),
+ ('invsmartindent','invsmartindent'),
+ ('invsmarttab','invsmarttab'),
+ ('invsmd','invsmd'),
+ ('invsn','invsn'),
+ ('invsol','invsol'),
+ ('invspell','invspell'),
+ ('invsplitbelow','invsplitbelow'),
+ ('invsplitright','invsplitright'),
+ ('invspr','invspr'),
+ ('invsr','invsr'),
+ ('invssl','invssl'),
+ ('invsta','invsta'),
+ ('invstartofline','invstartofline'),
+ ('invstmp','invstmp'),
+ ('invswapfile','invswapfile'),
+ ('invswf','invswf'),
+ ('invta','invta'),
+ ('invtagbsearch','invtagbsearch'),
+ ('invtagrelative','invtagrelative'),
+ ('invtagstack','invtagstack'),
+ ('invtbi','invtbi'),
+ ('invtbidi','invtbidi'),
+ ('invtbs','invtbs'),
+ ('invtermbidi','invtermbidi'),
+ ('invterse','invterse'),
+ ('invtextauto','invtextauto'),
+ ('invtextmode','invtextmode'),
+ ('invtf','invtf'),
+ ('invtgst','invtgst'),
+ ('invtildeop','invtildeop'),
+ ('invtimeout','invtimeout'),
+ ('invtitle','invtitle'),
+ ('invto','invto'),
+ ('invtop','invtop'),
+ ('invtr','invtr'),
+ ('invttimeout','invttimeout'),
+ ('invttybuiltin','invttybuiltin'),
+ ('invttyfast','invttyfast'),
+ ('invtx','invtx'),
+ ('invudf','invudf'),
+ ('invundofile','invundofile'),
+ ('invvb','invvb'),
+ ('invvisualbell','invvisualbell'),
+ ('invwa','invwa'),
+ ('invwarn','invwarn'),
+ ('invwb','invwb'),
+ ('invweirdinvert','invweirdinvert'),
+ ('invwfh','invwfh'),
+ ('invwfw','invwfw'),
+ ('invwic','invwic'),
+ ('invwildignorecase','invwildignorecase'),
+ ('invwildmenu','invwildmenu'),
+ ('invwinfixheight','invwinfixheight'),
+ ('invwinfixwidth','invwinfixwidth'),
+ ('invwiv','invwiv'),
+ ('invwmnu','invwmnu'),
+ ('invwrap','invwrap'),
+ ('invwrapscan','invwrapscan'),
+ ('invwrite','invwrite'),
+ ('invwriteany','invwriteany'),
+ ('invwritebackup','invwritebackup'),
+ ('invws','invws'),
+ ('is','is'),
+ ('isf','isf'),
+ ('isfname','isfname'),
+ ('isi','isi'),
+ ('isident','isident'),
+ ('isk','isk'),
+ ('iskeyword','iskeyword'),
+ ('isp','isp'),
+ ('isprint','isprint'),
+ ('joinspaces','joinspaces'),
+ ('js','js'),
+ ('key','key'),
+ ('keymap','keymap'),
+ ('keymodel','keymodel'),
+ ('keywordprg','keywordprg'),
+ ('km','km'),
+ ('kmp','kmp'),
+ ('kp','kp'),
+ ('langmap','langmap'),
+ ('langmenu','langmenu'),
+ ('laststatus','laststatus'),
+ ('lazyredraw','lazyredraw'),
+ ('lbr','lbr'),
+ ('lcs','lcs'),
+ ('linebreak','linebreak'),
+ ('lines','lines'),
+ ('linespace','linespace'),
+ ('lisp','lisp'),
+ ('lispwords','lispwords'),
+ ('list','list'),
+ ('listchars','listchars'),
+ ('lm','lm'),
+ ('lmap','lmap'),
+ ('loadplugins','loadplugins'),
+ ('lpl','lpl'),
+ ('ls','ls'),
+ ('lsp','lsp'),
+ ('lw','lw'),
+ ('lz','lz'),
+ ('ma','ma'),
+ ('macatsui','macatsui'),
+ ('magic','magic'),
+ ('makeef','makeef'),
+ ('makeprg','makeprg'),
+ ('mat','mat'),
+ ('matchpairs','matchpairs'),
+ ('matchtime','matchtime'),
+ ('maxcombine','maxcombine'),
+ ('maxfuncdepth','maxfuncdepth'),
+ ('maxmapdepth','maxmapdepth'),
+ ('maxmem','maxmem'),
+ ('maxmempattern','maxmempattern'),
+ ('maxmemtot','maxmemtot'),
+ ('mco','mco'),
+ ('mef','mef'),
+ ('menuitems','menuitems'),
+ ('mfd','mfd'),
+ ('mh','mh'),
+ ('mis','mis'),
+ ('mkspellmem','mkspellmem'),
+ ('ml','ml'),
+ ('mls','mls'),
+ ('mm','mm'),
+ ('mmd','mmd'),
+ ('mmp','mmp'),
+ ('mmt','mmt'),
+ ('mod','mod'),
+ ('modeline','modeline'),
+ ('modelines','modelines'),
+ ('modifiable','modifiable'),
+ ('modified','modified'),
+ ('more','more'),
+ ('mouse','mouse'),
+ ('mousef','mousef'),
+ ('mousefocus','mousefocus'),
+ ('mousehide','mousehide'),
+ ('mousem','mousem'),
+ ('mousemodel','mousemodel'),
+ ('mouses','mouses'),
+ ('mouseshape','mouseshape'),
+ ('mouset','mouset'),
+ ('mousetime','mousetime'),
+ ('mp','mp'),
+ ('mps','mps'),
+ ('msm','msm'),
+ ('mzq','mzq'),
+ ('mzquantum','mzquantum'),
+ ('nf','nf'),
+ ('nnoremap','nnoremap'),
+ ('noacd','noacd'),
+ ('noai','noai'),
+ ('noakm','noakm'),
+ ('noallowrevins','noallowrevins'),
+ ('noaltkeymap','noaltkeymap'),
+ ('noanti','noanti'),
+ ('noantialias','noantialias'),
+ ('noar','noar'),
+ ('noarab','noarab'),
+ ('noarabic','noarabic'),
+ ('noarabicshape','noarabicshape'),
+ ('noari','noari'),
+ ('noarshape','noarshape'),
+ ('noautochdir','noautochdir'),
+ ('noautoindent','noautoindent'),
+ ('noautoread','noautoread'),
+ ('noautowrite','noautowrite'),
+ ('noautowriteall','noautowriteall'),
+ ('noaw','noaw'),
+ ('noawa','noawa'),
+ ('nobackup','nobackup'),
+ ('noballooneval','noballooneval'),
+ ('nobeval','nobeval'),
+ ('nobin','nobin'),
+ ('nobinary','nobinary'),
+ ('nobiosk','nobiosk'),
+ ('nobioskey','nobioskey'),
+ ('nobk','nobk'),
+ ('nobl','nobl'),
+ ('nobomb','nobomb'),
+ ('nobuflisted','nobuflisted'),
+ ('nocf','nocf'),
+ ('noci','noci'),
+ ('nocin','nocin'),
+ ('nocindent','nocindent'),
+ ('nocompatible','nocompatible'),
+ ('noconfirm','noconfirm'),
+ ('noconsk','noconsk'),
+ ('noconskey','noconskey'),
+ ('nocopyindent','nocopyindent'),
+ ('nocp','nocp'),
+ ('nocrb','nocrb'),
+ ('nocscoperelative','nocscoperelative'),
+ ('nocscopetag','nocscopetag'),
+ ('nocscopeverbose','nocscopeverbose'),
+ ('nocsre','nocsre'),
+ ('nocst','nocst'),
+ ('nocsverb','nocsverb'),
+ ('nocuc','nocuc'),
+ ('nocul','nocul'),
+ ('nocursorbind','nocursorbind'),
+ ('nocursorcolumn','nocursorcolumn'),
+ ('nocursorline','nocursorline'),
+ ('nodeco','nodeco'),
+ ('nodelcombine','nodelcombine'),
+ ('nodg','nodg'),
+ ('nodiff','nodiff'),
+ ('nodigraph','nodigraph'),
+ ('noea','noea'),
+ ('noeb','noeb'),
+ ('noed','noed'),
+ ('noedcompatible','noedcompatible'),
+ ('noek','noek'),
+ ('noendofline','noendofline'),
+ ('noeol','noeol'),
+ ('noequalalways','noequalalways'),
+ ('noerrorbells','noerrorbells'),
+ ('noesckeys','noesckeys'),
+ ('noet','noet'),
+ ('noex','noex'),
+ ('noexpandtab','noexpandtab'),
+ ('noexrc','noexrc'),
+ ('nofen','nofen'),
+ ('nofic','nofic'),
+ ('nofileignorecase','nofileignorecase'),
+ ('nofk','nofk'),
+ ('nofkmap','nofkmap'),
+ ('nofoldenable','nofoldenable'),
+ ('nogd','nogd'),
+ ('nogdefault','nogdefault'),
+ ('noguipty','noguipty'),
+ ('nohid','nohid'),
+ ('nohidden','nohidden'),
+ ('nohk','nohk'),
+ ('nohkmap','nohkmap'),
+ ('nohkmapp','nohkmapp'),
+ ('nohkp','nohkp'),
+ ('nohls','nohls'),
+ ('nohlsearch','nohlsearch'),
+ ('noic','noic'),
+ ('noicon','noicon'),
+ ('noignorecase','noignorecase'),
+ ('noim','noim'),
+ ('noimc','noimc'),
+ ('noimcmdline','noimcmdline'),
+ ('noimd','noimd'),
+ ('noimdisable','noimdisable'),
+ ('noincsearch','noincsearch'),
+ ('noinf','noinf'),
+ ('noinfercase','noinfercase'),
+ ('noinsertmode','noinsertmode'),
+ ('nois','nois'),
+ ('nojoinspaces','nojoinspaces'),
+ ('nojs','nojs'),
+ ('nolazyredraw','nolazyredraw'),
+ ('nolbr','nolbr'),
+ ('nolinebreak','nolinebreak'),
+ ('nolisp','nolisp'),
+ ('nolist','nolist'),
+ ('noloadplugins','noloadplugins'),
+ ('nolpl','nolpl'),
+ ('nolz','nolz'),
+ ('noma','noma'),
+ ('nomacatsui','nomacatsui'),
+ ('nomagic','nomagic'),
+ ('nomh','nomh'),
+ ('noml','noml'),
+ ('nomod','nomod'),
+ ('nomodeline','nomodeline'),
+ ('nomodifiable','nomodifiable'),
+ ('nomodified','nomodified'),
+ ('nomore','nomore'),
+ ('nomousef','nomousef'),
+ ('nomousefocus','nomousefocus'),
+ ('nomousehide','nomousehide'),
+ ('nonu','nonu'),
+ ('nonumber','nonumber'),
+ ('noodev','noodev'),
+ ('noopendevice','noopendevice'),
+ ('nopaste','nopaste'),
+ ('nopi','nopi'),
+ ('nopreserveindent','nopreserveindent'),
+ ('nopreviewwindow','nopreviewwindow'),
+ ('noprompt','noprompt'),
+ ('nopvw','nopvw'),
+ ('noreadonly','noreadonly'),
+ ('norelativenumber','norelativenumber'),
+ ('noremap','noremap'),
+ ('norestorescreen','norestorescreen'),
+ ('norevins','norevins'),
+ ('nori','nori'),
+ ('norightleft','norightleft'),
+ ('norl','norl'),
+ ('nornu','nornu'),
+ ('noro','noro'),
+ ('nors','nors'),
+ ('noru','noru'),
+ ('noruler','noruler'),
+ ('nosb','nosb'),
+ ('nosc','nosc'),
+ ('noscb','noscb'),
+ ('noscrollbind','noscrollbind'),
+ ('noscs','noscs'),
+ ('nosecure','nosecure'),
+ ('nosft','nosft'),
+ ('noshellslash','noshellslash'),
+ ('noshelltemp','noshelltemp'),
+ ('noshiftround','noshiftround'),
+ ('noshortname','noshortname'),
+ ('noshowcmd','noshowcmd'),
+ ('noshowfulltag','noshowfulltag'),
+ ('noshowmatch','noshowmatch'),
+ ('noshowmode','noshowmode'),
+ ('nosi','nosi'),
+ ('nosm','nosm'),
+ ('nosmartcase','nosmartcase'),
+ ('nosmartindent','nosmartindent'),
+ ('nosmarttab','nosmarttab'),
+ ('nosmd','nosmd'),
+ ('nosn','nosn'),
+ ('nosol','nosol'),
+ ('nospell','nospell'),
+ ('nosplitbelow','nosplitbelow'),
+ ('nosplitright','nosplitright'),
+ ('nospr','nospr'),
+ ('nosr','nosr'),
+ ('nossl','nossl'),
+ ('nosta','nosta'),
+ ('nostartofline','nostartofline'),
+ ('nostmp','nostmp'),
+ ('noswapfile','noswapfile'),
+ ('noswf','noswf'),
+ ('nota','nota'),
+ ('notagbsearch','notagbsearch'),
+ ('notagrelative','notagrelative'),
+ ('notagstack','notagstack'),
+ ('notbi','notbi'),
+ ('notbidi','notbidi'),
+ ('notbs','notbs'),
+ ('notermbidi','notermbidi'),
+ ('noterse','noterse'),
+ ('notextauto','notextauto'),
+ ('notextmode','notextmode'),
+ ('notf','notf'),
+ ('notgst','notgst'),
+ ('notildeop','notildeop'),
+ ('notimeout','notimeout'),
+ ('notitle','notitle'),
+ ('noto','noto'),
+ ('notop','notop'),
+ ('notr','notr'),
+ ('nottimeout','nottimeout'),
+ ('nottybuiltin','nottybuiltin'),
+ ('nottyfast','nottyfast'),
+ ('notx','notx'),
+ ('noudf','noudf'),
+ ('noundofile','noundofile'),
+ ('novb','novb'),
+ ('novisualbell','novisualbell'),
+ ('nowa','nowa'),
+ ('nowarn','nowarn'),
+ ('nowb','nowb'),
+ ('noweirdinvert','noweirdinvert'),
+ ('nowfh','nowfh'),
+ ('nowfw','nowfw'),
+ ('nowic','nowic'),
+ ('nowildignorecase','nowildignorecase'),
+ ('nowildmenu','nowildmenu'),
+ ('nowinfixheight','nowinfixheight'),
+ ('nowinfixwidth','nowinfixwidth'),
+ ('nowiv','nowiv'),
+ ('nowmnu','nowmnu'),
+ ('nowrap','nowrap'),
+ ('nowrapscan','nowrapscan'),
+ ('nowrite','nowrite'),
+ ('nowriteany','nowriteany'),
+ ('nowritebackup','nowritebackup'),
+ ('nows','nows'),
+ ('nrformats','nrformats'),
+ ('nu','nu'),
+ ('number','number'),
+ ('numberwidth','numberwidth'),
+ ('nuw','nuw'),
+ ('odev','odev'),
+ ('oft','oft'),
+ ('ofu','ofu'),
+ ('omnifunc','omnifunc'),
+ ('opendevice','opendevice'),
+ ('operatorfunc','operatorfunc'),
+ ('opfunc','opfunc'),
+ ('osfiletype','osfiletype'),
+ ('pa','pa'),
+ ('para','para'),
+ ('paragraphs','paragraphs'),
+ ('paste','paste'),
+ ('pastetoggle','pastetoggle'),
+ ('patchexpr','patchexpr'),
+ ('patchmode','patchmode'),
+ ('path','path'),
+ ('pdev','pdev'),
+ ('penc','penc'),
+ ('pex','pex'),
+ ('pexpr','pexpr'),
+ ('pfn','pfn'),
+ ('ph','ph'),
+ ('pheader','pheader'),
+ ('pi','pi'),
+ ('pm','pm'),
+ ('pmbcs','pmbcs'),
+ ('pmbfn','pmbfn'),
+ ('popt','popt'),
+ ('preserveindent','preserveindent'),
+ ('previewheight','previewheight'),
+ ('previewwindow','previewwindow'),
+ ('printdevice','printdevice'),
+ ('printencoding','printencoding'),
+ ('printexpr','printexpr'),
+ ('printfont','printfont'),
+ ('printheader','printheader'),
+ ('printmbcharset','printmbcharset'),
+ ('printmbfont','printmbfont'),
+ ('printoptions','printoptions'),
+ ('prompt','prompt'),
+ ('pt','pt'),
+ ('pumheight','pumheight'),
+ ('pvh','pvh'),
+ ('pvw','pvw'),
+ ('qe','qe'),
+ ('quoteescape','quoteescape'),
+ ('rdt','rdt'),
+ ('re','re'),
+ ('readonly','readonly'),
+ ('redrawtime','redrawtime'),
+ ('regexpengine','regexpengine'),
+ ('relativenumber','relativenumber'),
+ ('remap','remap'),
+ ('report','report'),
+ ('restorescreen','restorescreen'),
+ ('revins','revins'),
+ ('ri','ri'),
+ ('rightleft','rightleft'),
+ ('rightleftcmd','rightleftcmd'),
+ ('rl','rl'),
+ ('rlc','rlc'),
+ ('rnu','rnu'),
+ ('ro','ro'),
+ ('rs','rs'),
+ ('rtp','rtp'),
+ ('ru','ru'),
+ ('ruf','ruf'),
+ ('ruler','ruler'),
+ ('rulerformat','rulerformat'),
+ ('runtimepath','runtimepath'),
+ ('sb','sb'),
+ ('sbo','sbo'),
+ ('sbr','sbr'),
+ ('sc','sc'),
+ ('scb','scb'),
+ ('scr','scr'),
+ ('scroll','scroll'),
+ ('scrollbind','scrollbind'),
+ ('scrolljump','scrolljump'),
+ ('scrolloff','scrolloff'),
+ ('scrollopt','scrollopt'),
+ ('scs','scs'),
+ ('sect','sect'),
+ ('sections','sections'),
+ ('secure','secure'),
+ ('sel','sel'),
+ ('selection','selection'),
+ ('selectmode','selectmode'),
+ ('sessionoptions','sessionoptions'),
+ ('sft','sft'),
+ ('sh','sh'),
+ ('shcf','shcf'),
+ ('shell','shell'),
+ ('shellcmdflag','shellcmdflag'),
+ ('shellpipe','shellpipe'),
+ ('shellquote','shellquote'),
+ ('shellredir','shellredir'),
+ ('shellslash','shellslash'),
+ ('shelltemp','shelltemp'),
+ ('shelltype','shelltype'),
+ ('shellxescape','shellxescape'),
+ ('shellxquote','shellxquote'),
+ ('shiftround','shiftround'),
+ ('shiftwidth','shiftwidth'),
+ ('shm','shm'),
+ ('shortmess','shortmess'),
+ ('shortname','shortname'),
+ ('showbreak','showbreak'),
+ ('showcmd','showcmd'),
+ ('showfulltag','showfulltag'),
+ ('showmatch','showmatch'),
+ ('showmode','showmode'),
+ ('showtabline','showtabline'),
+ ('shq','shq'),
+ ('si','si'),
+ ('sidescroll','sidescroll'),
+ ('sidescrolloff','sidescrolloff'),
+ ('siso','siso'),
+ ('sj','sj'),
+ ('slm','slm'),
+ ('sm','sm'),
+ ('smartcase','smartcase'),
+ ('smartindent','smartindent'),
+ ('smarttab','smarttab'),
+ ('smc','smc'),
+ ('smd','smd'),
+ ('sn','sn'),
+ ('so','so'),
+ ('softtabstop','softtabstop'),
+ ('sol','sol'),
+ ('sp','sp'),
+ ('spc','spc'),
+ ('spell','spell'),
+ ('spellcapcheck','spellcapcheck'),
+ ('spellfile','spellfile'),
+ ('spelllang','spelllang'),
+ ('spellsuggest','spellsuggest'),
+ ('spf','spf'),
+ ('spl','spl'),
+ ('splitbelow','splitbelow'),
+ ('splitright','splitright'),
+ ('spr','spr'),
+ ('sps','sps'),
+ ('sr','sr'),
+ ('srr','srr'),
+ ('ss','ss'),
+ ('ssl','ssl'),
+ ('ssop','ssop'),
+ ('st','st'),
+ ('sta','sta'),
+ ('stal','stal'),
+ ('startofline','startofline'),
+ ('statusline','statusline'),
+ ('stl','stl'),
+ ('stmp','stmp'),
+ ('sts','sts'),
+ ('su','su'),
+ ('sua','sua'),
+ ('suffixes','suffixes'),
+ ('suffixesadd','suffixesadd'),
+ ('sw','sw'),
+ ('swapfile','swapfile'),
+ ('swapsync','swapsync'),
+ ('swb','swb'),
+ ('swf','swf'),
+ ('switchbuf','switchbuf'),
+ ('sws','sws'),
+ ('sxe','sxe'),
+ ('sxq','sxq'),
+ ('syn','syn'),
+ ('synmaxcol','synmaxcol'),
+ ('syntax','syntax'),
+ ('t_AB','t_AB'),
+ ('t_AF','t_AF'),
+ ('t_AL','t_AL'),
+ ('t_CS','t_CS'),
+ ('t_CV','t_CV'),
+ ('t_Ce','t_Ce'),
+ ('t_Co','t_Co'),
+ ('t_Cs','t_Cs'),
+ ('t_DL','t_DL'),
+ ('t_EI','t_EI'),
+ ('t_F1','t_F1'),
+ ('t_F2','t_F2'),
+ ('t_F3','t_F3'),
+ ('t_F4','t_F4'),
+ ('t_F5','t_F5'),
+ ('t_F6','t_F6'),
+ ('t_F7','t_F7'),
+ ('t_F8','t_F8'),
+ ('t_F9','t_F9'),
+ ('t_IE','t_IE'),
+ ('t_IS','t_IS'),
+ ('t_K1','t_K1'),
+ ('t_K3','t_K3'),
+ ('t_K4','t_K4'),
+ ('t_K5','t_K5'),
+ ('t_K6','t_K6'),
+ ('t_K7','t_K7'),
+ ('t_K8','t_K8'),
+ ('t_K9','t_K9'),
+ ('t_KA','t_KA'),
+ ('t_KB','t_KB'),
+ ('t_KC','t_KC'),
+ ('t_KD','t_KD'),
+ ('t_KE','t_KE'),
+ ('t_KF','t_KF'),
+ ('t_KG','t_KG'),
+ ('t_KH','t_KH'),
+ ('t_KI','t_KI'),
+ ('t_KJ','t_KJ'),
+ ('t_KK','t_KK'),
+ ('t_KL','t_KL'),
+ ('t_RI','t_RI'),
+ ('t_RV','t_RV'),
+ ('t_SI','t_SI'),
+ ('t_Sb','t_Sb'),
+ ('t_Sf','t_Sf'),
+ ('t_WP','t_WP'),
+ ('t_WS','t_WS'),
+ ('t_ZH','t_ZH'),
+ ('t_ZR','t_ZR'),
+ ('t_al','t_al'),
+ ('t_bc','t_bc'),
+ ('t_cd','t_cd'),
+ ('t_ce','t_ce'),
+ ('t_cl','t_cl'),
+ ('t_cm','t_cm'),
+ ('t_cs','t_cs'),
+ ('t_da','t_da'),
+ ('t_db','t_db'),
+ ('t_dl','t_dl'),
+ ('t_fs','t_fs'),
+ ('t_k1','t_k1'),
+ ('t_k2','t_k2'),
+ ('t_k3','t_k3'),
+ ('t_k4','t_k4'),
+ ('t_k5','t_k5'),
+ ('t_k6','t_k6'),
+ ('t_k7','t_k7'),
+ ('t_k8','t_k8'),
+ ('t_k9','t_k9'),
+ ('t_kB','t_kB'),
+ ('t_kD','t_kD'),
+ ('t_kI','t_kI'),
+ ('t_kN','t_kN'),
+ ('t_kP','t_kP'),
+ ('t_kb','t_kb'),
+ ('t_kd','t_kd'),
+ ('t_ke','t_ke'),
+ ('t_kh','t_kh'),
+ ('t_kl','t_kl'),
+ ('t_kr','t_kr'),
+ ('t_ks','t_ks'),
+ ('t_ku','t_ku'),
+ ('t_le','t_le'),
+ ('t_mb','t_mb'),
+ ('t_md','t_md'),
+ ('t_me','t_me'),
+ ('t_mr','t_mr'),
+ ('t_ms','t_ms'),
+ ('t_nd','t_nd'),
+ ('t_op','t_op'),
+ ('t_se','t_se'),
+ ('t_so','t_so'),
+ ('t_sr','t_sr'),
+ ('t_te','t_te'),
+ ('t_ti','t_ti'),
+ ('t_ts','t_ts'),
+ ('t_u7','t_u7'),
+ ('t_ue','t_ue'),
+ ('t_us','t_us'),
+ ('t_ut','t_ut'),
+ ('t_vb','t_vb'),
+ ('t_ve','t_ve'),
+ ('t_vi','t_vi'),
+ ('t_vs','t_vs'),
+ ('t_xs','t_xs'),
+ ('ta','ta'),
+ ('tabline','tabline'),
+ ('tabpagemax','tabpagemax'),
+ ('tabstop','tabstop'),
+ ('tag','tag'),
+ ('tagbsearch','tagbsearch'),
+ ('taglength','taglength'),
+ ('tagrelative','tagrelative'),
+ ('tags','tags'),
+ ('tagstack','tagstack'),
+ ('tal','tal'),
+ ('tb','tb'),
+ ('tbi','tbi'),
+ ('tbidi','tbidi'),
+ ('tbis','tbis'),
+ ('tbs','tbs'),
+ ('tenc','tenc'),
+ ('term','term'),
+ ('termbidi','termbidi'),
+ ('termencoding','termencoding'),
+ ('terse','terse'),
+ ('textauto','textauto'),
+ ('textmode','textmode'),
+ ('textwidth','textwidth'),
+ ('tf','tf'),
+ ('tgst','tgst'),
+ ('thesaurus','thesaurus'),
+ ('tildeop','tildeop'),
+ ('timeout','timeout'),
+ ('timeoutlen','timeoutlen'),
+ ('title','title'),
+ ('titlelen','titlelen'),
+ ('titleold','titleold'),
+ ('titlestring','titlestring'),
+ ('tl','tl'),
+ ('tm','tm'),
+ ('to','to'),
+ ('toolbar','toolbar'),
+ ('toolbariconsize','toolbariconsize'),
+ ('top','top'),
+ ('tpm','tpm'),
+ ('tr','tr'),
+ ('ts','ts'),
+ ('tsl','tsl'),
+ ('tsr','tsr'),
+ ('ttimeout','ttimeout'),
+ ('ttimeoutlen','ttimeoutlen'),
+ ('ttm','ttm'),
+ ('tty','tty'),
+ ('ttybuiltin','ttybuiltin'),
+ ('ttyfast','ttyfast'),
+ ('ttym','ttym'),
+ ('ttymouse','ttymouse'),
+ ('ttyscroll','ttyscroll'),
+ ('ttytype','ttytype'),
+ ('tw','tw'),
+ ('tx','tx'),
+ ('uc','uc'),
+ ('udf','udf'),
+ ('udir','udir'),
+ ('ul','ul'),
+ ('undodir','undodir'),
+ ('undofile','undofile'),
+ ('undolevels','undolevels'),
+ ('undoreload','undoreload'),
+ ('updatecount','updatecount'),
+ ('updatetime','updatetime'),
+ ('ur','ur'),
+ ('ut','ut'),
+ ('vb','vb'),
+ ('vbs','vbs'),
+ ('vdir','vdir'),
+ ('ve','ve'),
+ ('verbose','verbose'),
+ ('verbosefile','verbosefile'),
+ ('vfile','vfile'),
+ ('vi','vi'),
+ ('viewdir','viewdir'),
+ ('viewoptions','viewoptions'),
+ ('viminfo','viminfo'),
+ ('virtualedit','virtualedit'),
+ ('visualbell','visualbell'),
+ ('vnoremap','vnoremap'),
+ ('vop','vop'),
+ ('wa','wa'),
+ ('wak','wak'),
+ ('warn','warn'),
+ ('wb','wb'),
+ ('wc','wc'),
+ ('wcm','wcm'),
+ ('wd','wd'),
+ ('weirdinvert','weirdinvert'),
+ ('wfh','wfh'),
+ ('wfw','wfw'),
+ ('wh','wh'),
+ ('whichwrap','whichwrap'),
+ ('wi','wi'),
+ ('wic','wic'),
+ ('wig','wig'),
+ ('wildchar','wildchar'),
+ ('wildcharm','wildcharm'),
+ ('wildignore','wildignore'),
+ ('wildignorecase','wildignorecase'),
+ ('wildmenu','wildmenu'),
+ ('wildmode','wildmode'),
+ ('wildoptions','wildoptions'),
+ ('wim','wim'),
+ ('winaltkeys','winaltkeys'),
+ ('window','window'),
+ ('winfixheight','winfixheight'),
+ ('winfixwidth','winfixwidth'),
+ ('winheight','winheight'),
+ ('winminheight','winminheight'),
+ ('winminwidth','winminwidth'),
+ ('winwidth','winwidth'),
+ ('wiv','wiv'),
+ ('wiw','wiw'),
+ ('wm','wm'),
+ ('wmh','wmh'),
+ ('wmnu','wmnu'),
+ ('wmw','wmw'),
+ ('wop','wop'),
+ ('wrap','wrap'),
+ ('wrapmargin','wrapmargin'),
+ ('wrapscan','wrapscan'),
+ ('write','write'),
+ ('writeany','writeany'),
+ ('writebackup','writebackup'),
+ ('writedelay','writedelay'),
+ ('ws','ws'),
+ ('ww','ww'),
+ )
+ return var
+option = _getoption()
+
diff --git a/contrib/python/Pygments/py2/pygments/lexers/actionscript.py b/contrib/python/Pygments/py2/pygments/lexers/actionscript.py
index f4b4964e57..27217e57f5 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/actionscript.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/actionscript.py
@@ -1,240 +1,240 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.actionscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for ActionScript and MXML.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.actionscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ActionScript and MXML.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, using, this, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
-
-
-class ActionScriptLexer(RegexLexer):
- """
- For ActionScript source code.
-
- .. versionadded:: 0.9
- """
-
- name = 'ActionScript'
- aliases = ['as', 'actionscript']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript', 'text/x-actionscript',
- 'text/actionscript']
-
- flags = re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
- (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
- (r'[{}\[\]();.]+', Punctuation),
- (words((
- 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
- 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
- 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
- 'switch'), suffix=r'\b'),
- Keyword),
- (words((
- 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
- 'protected', 'static', 'import', 'extends', 'implements', 'interface',
- 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
- 'namespace', 'package', 'set'), suffix=r'\b'),
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
- Keyword.Constant),
- (words((
- 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
- 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
- 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
- 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
- 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
- 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
- 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
- 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
- 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
- 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
- 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
- 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
- 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
- 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
- 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
- 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
- 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
- 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
- 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
- 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
- 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
- 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
- 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
- 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
- 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
- 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
- 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
- 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
- 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
- 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
- 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
- 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
- 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
- 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
- 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
- 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
- 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
- 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
- 'XMLUI'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
- 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
- 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
- 'unescape'), suffix=r'\b'),
- Name.Function),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class ActionScript3Lexer(RegexLexer):
- """
- For ActionScript 3 source code.
-
- .. versionadded:: 0.11
- """
-
- name = 'ActionScript 3'
- aliases = ['as3', 'actionscript3']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
- 'text/actionscript3']
-
- identifier = r'[$a-zA-Z_]\w*'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, using, this, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
+
+
+class ActionScriptLexer(RegexLexer):
+ """
+ For ActionScript source code.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'ActionScript'
+ aliases = ['as', 'actionscript']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript', 'text/x-actionscript',
+ 'text/actionscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
+ (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
+ (r'[{}\[\]();.]+', Punctuation),
+ (words((
+ 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
+ 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
+ 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
+ 'switch'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
+ 'protected', 'static', 'import', 'extends', 'implements', 'interface',
+ 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
+ 'namespace', 'package', 'set'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
+ Keyword.Constant),
+ (words((
+ 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
+ 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
+ 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
+ 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
+ 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
+ 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
+ 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
+ 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
+ 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
+ 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
+ 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
+ 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
+ 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
+ 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
+ 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
+ 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
+ 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
+ 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
+ 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
+ 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
+ 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
+ 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
+ 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
+ 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
+ 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
+ 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
+ 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
+ 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
+ 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
+ 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
+ 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
+ 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
+ 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
+ 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
+ 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
+ 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
+ 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
+ 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
+ 'XMLUI'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
+ 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
+ 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
+ 'unescape'), suffix=r'\b'),
+ Name.Function),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class ActionScript3Lexer(RegexLexer):
+ """
+ For ActionScript 3 source code.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'ActionScript 3'
+ aliases = ['as3', 'actionscript3']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
+ 'text/actionscript3']
+
+ identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + r'(?:\.<\w+>)?'
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(function\s+)(' + identifier + r')(\s*)(\()',
- bygroups(Keyword.Declaration, Name.Function, Text, Operator),
- 'funcparams'),
- (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r')',
- bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
- Keyword.Type)),
- (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
- bygroups(Keyword, Text, Name.Namespace, Text)),
- (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
- bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
- (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
- r'switch|import|include|as|is)\b',
- Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b', Name.Function),
- (identifier, Name),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
- ],
- 'funcparams': [
- (r'\s+', Text),
- (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r'|\*)(\s*)',
- bygroups(Text, Punctuation, Name, Text, Operator, Text,
- Keyword.Type, Text), 'defval'),
- (r'\)', Operator, 'type')
- ],
- 'type': [
- (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
- bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
- (r'\s+', Text, '#pop:2'),
- default('#pop:2')
- ],
- 'defval': [
- (r'(=)(\s*)([^(),]+)(\s*)(,?)',
- bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
- (r',', Operator, '#pop'),
- default('#pop')
- ]
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
- return 0
-
-
-class MxmlLexer(RegexLexer):
- """
- For MXML markup.
- Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- .. versionadded:: 1.1
- """
- flags = re.MULTILINE | re.DOTALL
- name = 'MXML'
- aliases = ['mxml']
- filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
- bygroups(String, using(ActionScript3Lexer), String)),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(function\s+)(' + identifier + r')(\s*)(\()',
+ bygroups(Keyword.Declaration, Name.Function, Text, Operator),
+ 'funcparams'),
+ (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r')',
+ bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
+ Keyword.Type)),
+ (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
+ bygroups(Keyword, Text, Name.Namespace, Text)),
+ (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
+ bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
+ (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
+ (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
+ r'switch|import|include|as|is)\b',
+ Keyword),
+ (r'(class|public|final|internal|native|override|private|protected|'
+ r'static|import|extends|implements|interface|intrinsic|return|super|'
+ r'dynamic|function|const|get|namespace|package|set)\b',
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
+ r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
+ r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
+ r'unescape)\b', Name.Function),
+ (identifier, Name),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
+ ],
+ 'funcparams': [
+ (r'\s+', Text),
+ (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r'|\*)(\s*)',
+ bygroups(Text, Punctuation, Name, Text, Operator, Text,
+ Keyword.Type, Text), 'defval'),
+ (r'\)', Operator, 'type')
+ ],
+ 'type': [
+ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
+ bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
+ (r'\s+', Text, '#pop:2'),
+ default('#pop:2')
+ ],
+ 'defval': [
+ (r'(=)(\s*)([^(),]+)(\s*)(,?)',
+ bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
+ (r',', Operator, '#pop'),
+ default('#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+ return 0
+
+
+class MxmlLexer(RegexLexer):
+ """
+ For MXML markup.
+ Nested AS3 in <script> tags is highlighted by the appropriate lexer.
+
+ .. versionadded:: 1.1
+ """
+ flags = re.MULTILINE | re.DOTALL
+ name = 'MXML'
+ aliases = ['mxml']
+ filenames = ['*.mxml']
+ mimetimes = ['text/xml', 'application/xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
+ bygroups(String, using(ActionScript3Lexer), String)),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
(r'\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/agile.py b/contrib/python/Pygments/py2/pygments/lexers/agile.py
index 0e726339e3..de4f12ebc0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/agile.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/agile.py
@@ -1,24 +1,24 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.agile
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.agile
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer
-from pygments.lexers.jvm import IokeLexer, ClojureLexer
-from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
- PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
-from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
-from pygments.lexers.perl import PerlLexer, Perl6Lexer
-from pygments.lexers.d import CrocLexer, MiniDLexer
-from pygments.lexers.iolang import IoLexer
-from pygments.lexers.tcl import TclLexer
-from pygments.lexers.factor import FactorLexer
-from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.lisp import SchemeLexer
+from pygments.lexers.jvm import IokeLexer, ClojureLexer
+from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
+ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
+from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
+from pygments.lexers.perl import PerlLexer, Perl6Lexer
+from pygments.lexers.d import CrocLexer, MiniDLexer
+from pygments.lexers.iolang import IoLexer
+from pygments.lexers.tcl import TclLexer
+from pygments.lexers.factor import FactorLexer
+from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/algebra.py b/contrib/python/Pygments/py2/pygments/lexers/algebra.py
index ba2a61a971..3d7a1099dc 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/algebra.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/algebra.py
@@ -1,221 +1,221 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.algebra
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer algebra systems.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.algebra
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer algebra systems.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer']
-
-
-class GAPLexer(RegexLexer):
- """
- For `GAP <http://www.gap-system.org>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'GAP'
- aliases = ['gap']
- filenames = ['*.g', '*.gd', '*.gi', '*.gap']
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- if|then|elif|else|fi|
- for|while|do|od|
- repeat|until|
- break|continue|
- function|local|return|end|
- rec|
- quit|QUIT|
- IsBound|Unbind|
- TryNextMethod|
- Info|Assert
- )\b''', Keyword),
- (r'''(?x)\b(?:
- true|false|fail|infinity
- )\b''',
- Name.Constant),
- (r'''(?x)\b(?:
- (Declare|Install)([A-Z][A-Za-z]+)|
- BindGlobal|BIND_GLOBAL
- )\b''',
- Name.Builtin),
- (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
- (r'''(?x)\b(?:
- and|or|not|mod|in
- )\b''',
- Operator.Word),
- (r'''(?x)
- (?:\w+|`[^`]*`)
- (?:::\w+|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- }
-
-
-class MathematicaLexer(RegexLexer):
- """
- Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Mathematica'
- aliases = ['mathematica', 'mma', 'nb']
- filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
- mimetypes = ['application/mathematica',
- 'application/vnd.wolfram.mathematica',
- 'application/vnd.wolfram.mathematica.package',
- 'application/vnd.wolfram.cdf']
-
- # http://reference.wolfram.com/mathematica/guide/Syntax.html
- operators = (
- ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
- "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
- "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
- )
-
- punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
-
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
-
- tokens = {
- 'root': [
- (r'(?s)\(\*.*?\*\)', Comment),
-
- (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
- (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
- (r'#\d*', Name.Variable),
- (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer']
+
+
+class GAPLexer(RegexLexer):
+ """
+ For `GAP <http://www.gap-system.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'GAP'
+ aliases = ['gap']
+ filenames = ['*.g', '*.gd', '*.gi', '*.gap']
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ if|then|elif|else|fi|
+ for|while|do|od|
+ repeat|until|
+ break|continue|
+ function|local|return|end|
+ rec|
+ quit|QUIT|
+ IsBound|Unbind|
+ TryNextMethod|
+ Info|Assert
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ true|false|fail|infinity
+ )\b''',
+ Name.Constant),
+ (r'''(?x)\b(?:
+ (Declare|Install)([A-Z][A-Za-z]+)|
+ BindGlobal|BIND_GLOBAL
+ )\b''',
+ Name.Builtin),
+ (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|mod|in
+ )\b''',
+ Operator.Word),
+ (r'''(?x)
+ (?:\w+|`[^`]*`)
+ (?:::\w+|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ],
+ }
+
+
+class MathematicaLexer(RegexLexer):
+ """
+ Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mathematica'
+ aliases = ['mathematica', 'mma', 'nb']
+ filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
+ mimetypes = ['application/mathematica',
+ 'application/vnd.wolfram.mathematica',
+ 'application/vnd.wolfram.mathematica.package',
+ 'application/vnd.wolfram.cdf']
+
+ # http://reference.wolfram.com/mathematica/guide/Syntax.html
+ operators = (
+ ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
+ "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
+ "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
+ )
+
+ punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
+
+ def _multi_escape(entries):
+ return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
+
+ tokens = {
+ 'root': [
+ (r'(?s)\(\*.*?\*\)', Comment),
+
+ (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
+ (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
+ (r'#\d*', Name.Variable),
+ (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
+
(r'-?\d+\.\d*', Number.Float),
(r'-?\d*\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
-
- (words(operators), Operator),
- (words(punctuation), Punctuation),
- (r'".*?"', String),
- (r'\s+', Text.Whitespace),
- ],
- }
-
-
-class MuPADLexer(RegexLexer):
- """
- A `MuPAD <http://www.mupad.com>`_ lexer.
- Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- .. versionadded:: 0.8
- """
- name = 'MuPAD'
- aliases = ['mupad']
- filenames = ['*.mu']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- next|break|end|
- axiom|end_axiom|category|end_category|domain|end_domain|inherits|
- if|%if|then|elif|else|end_if|
- case|of|do|otherwise|end_case|
- while|end_while|
- repeat|until|end_repeat|
- for|from|to|downto|step|end_for|
- proc|local|option|save|begin|end_proc|
- delete|frame
- )\b''', Keyword),
- (r'''(?x)\b(?:
- DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
- DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
- DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
- DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
- )\b''', Name.Class),
- (r'''(?x)\b(?:
- PI|EULER|E|CATALAN|
- NIL|FAIL|undefined|infinity|
- TRUE|FALSE|UNKNOWN
- )\b''',
- Name.Constant),
- (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
- (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
- (r'''(?x)\b(?:
- and|or|not|xor|
- assuming|
- div|mod|
- union|minus|intersect|in|subset
- )\b''',
- Operator.Word),
- (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
- # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
- (r'''(?x)
- ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class BCLexer(RegexLexer):
- """
- A `BC <https://www.gnu.org/software/bc/>`_ lexer.
-
- .. versionadded:: 2.1
- """
- name = 'BC'
- aliases = ['bc']
- filenames = ['*.bc']
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'[{}();,]', Punctuation),
- (words(('if', 'else', 'while', 'for', 'break', 'continue',
- 'halt', 'return', 'define', 'auto', 'print', 'read',
- 'length', 'scale', 'sqrt', 'limits', 'quit',
- 'warranty'), suffix=r'\b'), Keyword),
- (r'\+\+|--|\|\||&&|'
- r'([-<>+*%\^/!=])=?', Operator),
- # bc doesn't support exponential
- (r'[0-9]+(\.[0-9]*)?', Number),
- (r'\.[0-9]+', Number),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
+
+ (words(operators), Operator),
+ (words(punctuation), Punctuation),
+ (r'".*?"', String),
+ (r'\s+', Text.Whitespace),
+ ],
+ }
+
+
+class MuPADLexer(RegexLexer):
+ """
+ A `MuPAD <http://www.mupad.com>`_ lexer.
+ Contributed by Christopher Creutzig <christopher@creutzig.de>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'MuPAD'
+ aliases = ['mupad']
+ filenames = ['*.mu']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ next|break|end|
+ axiom|end_axiom|category|end_category|domain|end_domain|inherits|
+ if|%if|then|elif|else|end_if|
+ case|of|do|otherwise|end_case|
+ while|end_while|
+ repeat|until|end_repeat|
+ for|from|to|downto|step|end_for|
+ proc|local|option|save|begin|end_proc|
+ delete|frame
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
+ DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
+ DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
+ DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
+ )\b''', Name.Class),
+ (r'''(?x)\b(?:
+ PI|EULER|E|CATALAN|
+ NIL|FAIL|undefined|infinity|
+ TRUE|FALSE|UNKNOWN
+ )\b''',
+ Name.Constant),
+ (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
+ (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|xor|
+ assuming|
+ div|mod|
+ union|minus|intersect|in|subset
+ )\b''',
+ Operator.Word),
+ (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
+ # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
+ (r'''(?x)
+ ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'''(?x)
+ (?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
+
+
+class BCLexer(RegexLexer):
+ """
+ A `BC <https://www.gnu.org/software/bc/>`_ lexer.
+
+ .. versionadded:: 2.1
+ """
+ name = 'BC'
+ aliases = ['bc']
+ filenames = ['*.bc']
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'[{}();,]', Punctuation),
+ (words(('if', 'else', 'while', 'for', 'break', 'continue',
+ 'halt', 'return', 'define', 'auto', 'print', 'read',
+ 'length', 'scale', 'sqrt', 'limits', 'quit',
+ 'warranty'), suffix=r'\b'), Keyword),
+ (r'\+\+|--|\|\||&&|'
+ r'([-<>+*%\^/!=])=?', Operator),
+ # bc doesn't support exponential
+ (r'[0-9]+(\.[0-9]*)?', Number),
+ (r'\.[0-9]+', Number),
+ (r'.', Text)
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ambient.py b/contrib/python/Pygments/py2/pygments/lexers/ambient.py
index 7d42d12ad6..7e1761702d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ambient.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ambient.py
@@ -1,76 +1,76 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ambient
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for AmbientTalk language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ambient
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for AmbientTalk language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['AmbientTalkLexer']
-
-
-class AmbientTalkLexer(RegexLexer):
- """
- Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'AmbientTalk'
- filenames = ['*.at']
- aliases = ['at', 'ambienttalk', 'ambienttalk/2']
- mimetypes = ['text/x-ambienttalk']
-
- flags = re.MULTILINE | re.DOTALL
-
- builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
- 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
- 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
- 'mirroredBy:', 'is:'))
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(def|deftype|import|alias|exclude)\b', Keyword),
- (builtin, Name.Builtin),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\|', Punctuation, 'arglist'),
- (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
- (r"`[a-zA-Z_]\w*", String.Symbol),
- (r"[a-zA-Z_]\w*:", Name.Function),
- (r"[{}()\[\];`]", Punctuation),
- (r'(self|super)\b', Name.Variable.Instance),
- (r"[a-zA-Z_]\w*", Name.Variable),
- (r"@[a-zA-Z_]\w*", Name.Class),
- (r"@\[", Name.Class, 'annotations'),
- include('numbers'),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'namespace': [
- (r'[a-zA-Z_]\w*\.', Name.Namespace),
- (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
- (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
- ],
- 'annotations': [
- (r"(.*?)\]", Name.Class, '#pop')
- ],
- 'arglist': [
- (r'\|', Punctuation, '#pop'),
- (r'\s*(,)\s*', Punctuation),
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['AmbientTalkLexer']
+
+
+class AmbientTalkLexer(RegexLexer):
+ """
+ Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'AmbientTalk'
+ filenames = ['*.at']
+ aliases = ['at', 'ambienttalk', 'ambienttalk/2']
+ mimetypes = ['text/x-ambienttalk']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
+ 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
+ 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
+ 'mirroredBy:', 'is:'))
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(def|deftype|import|alias|exclude)\b', Keyword),
+ (builtin, Name.Builtin),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\|', Punctuation, 'arglist'),
+ (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
+ (r"`[a-zA-Z_]\w*", String.Symbol),
+ (r"[a-zA-Z_]\w*:", Name.Function),
+ (r"[{}()\[\];`]", Punctuation),
+ (r'(self|super)\b', Name.Variable.Instance),
+ (r"[a-zA-Z_]\w*", Name.Variable),
+ (r"@[a-zA-Z_]\w*", Name.Class),
+ (r"@\[", Name.Class, 'annotations'),
+ include('numbers'),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_]\w*\.', Name.Namespace),
+ (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
+ (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
+ ],
+ 'annotations': [
+ (r"(.*?)\]", Name.Class, '#pop')
+ ],
+ 'arglist': [
+ (r'\|', Punctuation, '#pop'),
+ (r'\s*(,)\s*', Punctuation),
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/apl.py b/contrib/python/Pygments/py2/pygments/lexers/apl.py
index 2696250486..6b9140e808 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/apl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/apl.py
@@ -1,101 +1,101 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.apl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for APL.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.apl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for APL.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['APLLexer']
-
-
-class APLLexer(RegexLexer):
- """
- A simple APL lexer.
-
- .. versionadded:: 2.0
- """
- name = 'APL'
- aliases = ['apl']
- filenames = ['*.apl']
-
- tokens = {
- 'root': [
- # Whitespace
- # ==========
- (r'\s+', Text),
- #
- # Comment
- # =======
- # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
- (u'[⍝#].*$', Comment.Single),
- #
- # Strings
- # =======
- (r'\'((\'\')|[^\'])*\'', String.Single),
- (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
- #
- # Punctuation
- # ===========
- # This token type is used for diamond and parenthesis
- # but not for bracket and ; (see below)
- (u'[⋄◇()]', Punctuation),
- #
- # Array indexing
- # ==============
- # Since this token type is very important in APL, it is not included in
- # the punctuation token type but rather in the following one
- (r'[\[\];]', String.Regex),
- #
- # Distinguished names
- # ===================
- # following IBM APL2 standard
- (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
- #
- # Labels
- # ======
- # following IBM APL2 standard
- # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
- #
- # Variables
- # =========
- # following IBM APL2 standard
- (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
- #
- # Numbers
- # =======
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['APLLexer']
+
+
+class APLLexer(RegexLexer):
+ """
+ A simple APL lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'APL'
+ aliases = ['apl']
+ filenames = ['*.apl']
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ # ==========
+ (r'\s+', Text),
+ #
+ # Comment
+ # =======
+ # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
+ (u'[⍝#].*$', Comment.Single),
+ #
+ # Strings
+ # =======
+ (r'\'((\'\')|[^\'])*\'', String.Single),
+ (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
+ #
+ # Punctuation
+ # ===========
+ # This token type is used for diamond and parenthesis
+ # but not for bracket and ; (see below)
+ (u'[⋄◇()]', Punctuation),
+ #
+ # Array indexing
+ # ==============
+ # Since this token type is very important in APL, it is not included in
+ # the punctuation token type but rather in the following one
+ (r'[\[\];]', String.Regex),
+ #
+ # Distinguished names
+ # ===================
+ # following IBM APL2 standard
+ (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
+ #
+ # Labels
+ # ======
+ # following IBM APL2 standard
+ # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
+ #
+ # Variables
+ # =========
+ # following IBM APL2 standard
+ (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
+ #
+ # Numbers
+ # =======
(u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
- Number),
- #
- # Operators
- # ==========
+ Number),
+ #
+ # Operators
+ # ==========
(u'[\\.\\\\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
(u'[+\\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
- Operator),
- #
- # Constant
- # ========
- (u'⍬', Name.Constant),
- #
- # Quad symbol
- # ===========
- (u'[⎕⍞]', Name.Variable.Global),
- #
- # Arrows left/right
- # =================
- (u'[←→]', Keyword.Declaration),
- #
- # D-Fn
- # ====
- (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
- (r'[{}]', Keyword.Type),
- ],
- }
+ Operator),
+ #
+ # Constant
+ # ========
+ (u'⍬', Name.Constant),
+ #
+ # Quad symbol
+ # ===========
+ (u'[⎕⍞]', Name.Variable.Global),
+ #
+ # Arrows left/right
+ # =================
+ (u'[←→]', Keyword.Declaration),
+ #
+ # D-Fn
+ # ====
+ (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
+ (r'[{}]', Keyword.Type),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/archetype.py b/contrib/python/Pygments/py2/pygments/lexers/archetype.py
index 68ec5c0498..8cf6500845 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/archetype.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/archetype.py
@@ -1,318 +1,318 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.archetype
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Archetype-related syntaxes, including:
-
- - ODIN syntax <https://github.com/openEHR/odin>
- - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
- - cADL sub-syntax of ADL
-
- For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
-
- Contributed by Thomas Beale <https://github.com/wolandscat>,
- <https://bitbucket.org/thomas_beale>.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.archetype
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Archetype-related syntaxes, including:
+
+ - ODIN syntax <https://github.com/openEHR/odin>
+ - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
+ - cADL sub-syntax of ADL
+
+ For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
+
+ Contributed by Thomas Beale <https://github.com/wolandscat>,
+ <https://bitbucket.org/thomas_beale>.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Name, Literal, Number, String, \
- Punctuation, Keyword, Operator, Generic
-
-__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
-
-
-class AtomsLexer(RegexLexer):
- """
- Lexer for Values used in ADL and ODIN.
-
- .. versionadded:: 2.1
- """
-
- tokens = {
- # ----- pseudo-states for inclusion -----
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'[ \t]*--.*$', Comment),
- ],
- 'archetype_id': [
- (r'[ \t]*([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
- r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?', Name.Decorator),
- ],
- 'date_constraints': [
- # ISO 8601-based date/time constraints
- (r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
- # ISO 8601-based duration constraints + optional trailing slash
- (r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
- ],
- 'ordered_values': [
- # ISO 8601 date with optional 'T' ligature
- (r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
- # ISO 8601 time
- (r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
- # ISO 8601 duration
- (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
- r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
- (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'[+-]?(\d+)*\.\d+%?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[+-]?\d+%?', Number.Integer),
- ],
- 'values': [
- include('ordered_values'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
- # term code
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
- Punctuation)),
- (r'\|', Punctuation, 'interval'),
- # list continuation
- (r'\.\.\.', Punctuation),
- ],
- 'constraint_values': [
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
- bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
- # ADL 1.4 ordinal constraint
- (r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
- bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
- include('date_constraints'),
- include('values'),
- ],
-
- # ----- real states -----
- 'string': [
- ('"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- # all other characters
- (r'[^\\"]+', String),
- # stray backslash
- (r'\\', String),
- ],
- 'uri': [
- # effective URI terminators
- (r'[,>\s]', Punctuation, '#pop'),
- (r'[^>\s,]+', Literal),
- ],
- 'interval': [
- (r'\|', Punctuation, '#pop'),
- include('ordered_values'),
- (r'\.\.', Punctuation),
- (r'[<>=] *', Punctuation),
- # handle +/-
- (r'\+/-', Punctuation),
- (r'\s+', Text),
- ],
- 'any_code': [
- include('archetype_id'),
- # if it is a code
- (r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
- # if it is tuple with attribute names
- (r'[a-z_]\w*', Name.Class),
- # if it is an integer, i.e. Xpath child index
- (r'[0-9]+', Text),
- (r'\|', Punctuation, 'code_rubric'),
- (r'\]', Punctuation, '#pop'),
- # handle use_archetype statement
- (r'\s*,\s*', Punctuation),
- ],
- 'code_rubric': [
- (r'\|', Punctuation, '#pop'),
- (r'[^|]+', String),
- ],
- 'adl14_code_constraint': [
- (r'\]', Punctuation, '#pop'),
- (r'\|', Punctuation, 'code_rubric'),
- (r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
- include('whitespace'),
- ],
- }
-
-
-class OdinLexer(AtomsLexer):
- """
- Lexer for ODIN syntax.
-
- .. versionadded:: 2.1
- """
- name = 'ODIN'
- aliases = ['odin']
- filenames = ['*.odin']
- mimetypes = ['text/odin']
-
- tokens = {
- 'path': [
- (r'>', Punctuation, '#pop'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'key'),
- (r'\s*,\s*', Punctuation, '#pop'),
- (r'\s+', Text, '#pop'),
- ],
- 'key': [
- include('values'),
- (r'\]', Punctuation, '#pop'),
- ],
- 'type_cast': [
- (r'\)', Punctuation, '#pop'),
- (r'[^)]+', Name.Class),
- ],
- 'root': [
- include('whitespace'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- include('values'),
- # x-ref path
- (r'/', Punctuation, 'path'),
- # x-ref path starting with key
- (r'\[', Punctuation, 'key'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'=', Operator),
- (r'\(', Punctuation, 'type_cast'),
- (r',', Punctuation),
- (r'<', Punctuation),
- (r'>', Punctuation),
- (r';', Punctuation),
- ],
- }
-
-
-class CadlLexer(AtomsLexer):
- """
- Lexer for cADL syntax.
-
- .. versionadded:: 2.1
- """
- name = 'cADL'
- aliases = ['cadl']
- filenames = ['*.cadl']
-
- tokens = {
- 'path': [
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'any_code'),
- (r'\s+', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- (r'(cardinality|existence|occurrences|group|include|exclude|'
- r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
- (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
- (r'(after|before|closed)\W', Keyword.Type),
- (r'(not)\W', Operator),
- (r'(matches|is_in)\W', Operator),
- # is_in / not is_in char
- (u'(\u2208|\u2209)', Operator),
- # there_exists / not there_exists / for_all / and / or
- (u'(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
- Operator),
- # regex in slot or as string constraint
- (r'(\{)(\s*/[^}]+/\s*)(\})',
- bygroups(Punctuation, String.Regex, Punctuation)),
- # regex in slot or as string constraint
- (r'(\{)(\s*\^[^}]+\^\s*)(\})',
- bygroups(Punctuation, String.Regex, Punctuation)),
- (r'/', Punctuation, 'path'),
- # for cardinality etc
- (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
- r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
- bygroups(Punctuation, Number, Number, Punctuation)),
- # [{ is start of a tuple value
- (r'\[\{', Punctuation),
- (r'\}\]', Punctuation),
- (r'\{', Punctuation),
- (r'\}', Punctuation),
- include('constraint_values'),
- # type name
- (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'\[', Punctuation, 'any_code'),
- (r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
- (r'\(', Punctuation),
- (r'\)', Punctuation),
- # for lists of values
- (r',', Punctuation),
- (r'"', String, 'string'),
- # for assumed value
- (r';', Punctuation),
- ],
- }
-
-
-class AdlLexer(AtomsLexer):
- """
- Lexer for ADL syntax.
-
- .. versionadded:: 2.1
- """
-
- name = 'ADL'
- aliases = ['adl']
- filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
-
- tokens = {
- 'whitespace': [
- # blank line ends
- (r'\s*\n', Text),
- # comment-only line
- (r'^[ \t]*--.*$', Comment),
- ],
- 'odin_section': [
- # repeating the following two rules from the root state enable multi-line
- # strings that start in the first column to be dealt with
- (r'^(language|description|ontology|terminology|annotations|'
- r'component_terminologies|revision_history)[ \t]*\n', Generic.Heading),
- (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
- (r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
- (r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
- # template overlay delimiter
- (r'^----------*\n', Text, '#pop'),
- (r'^.*\n', String),
- default('#pop'),
- ],
- 'cadl_section': [
- (r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'rules_section': [
- (r'^[ \t]+.*\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'metadata': [
- (r'\)', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'([Tt]rue|[Ff]alse)', Literal),
- # numbers and version ids
- (r'\d+(\.\d+)*', Literal),
- # Guids
- (r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
- (r'\w+', Name.Class),
- (r'"', String, 'string'),
- (r'=', Operator),
- (r'[ \t]+', Text),
- default('#pop'),
- ],
- 'root': [
- (r'^(archetype|template_overlay|operational_template|template|'
- r'speciali[sz]e)', Generic.Heading),
- (r'^(language|description|ontology|terminology|annotations|'
- r'component_terminologies|revision_history)[ \t]*\n',
- Generic.Heading, 'odin_section'),
- (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
- (r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
- include('archetype_id'),
- (r'[ \t]*\(', Punctuation, 'metadata'),
- include('whitespace'),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Name, Literal, Number, String, \
+ Punctuation, Keyword, Operator, Generic
+
+__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
+
+
+class AtomsLexer(RegexLexer):
+ """
+ Lexer for Values used in ADL and ODIN.
+
+ .. versionadded:: 2.1
+ """
+
+ tokens = {
+ # ----- pseudo-states for inclusion -----
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'[ \t]*--.*$', Comment),
+ ],
+ 'archetype_id': [
+ (r'[ \t]*([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
+ r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?', Name.Decorator),
+ ],
+ 'date_constraints': [
+ # ISO 8601-based date/time constraints
+ (r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
+ # ISO 8601-based duration constraints + optional trailing slash
+ (r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
+ ],
+ 'ordered_values': [
+ # ISO 8601 date with optional 'T' ligature
+ (r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
+ # ISO 8601 time
+ (r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
+ # ISO 8601 duration
+ (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
+ r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
+ (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'[+-]?(\d+)*\.\d+%?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[+-]?\d+%?', Number.Integer),
+ ],
+ 'values': [
+ include('ordered_values'),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
+ # term code
+ (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
+ Punctuation)),
+ (r'\|', Punctuation, 'interval'),
+ # list continuation
+ (r'\.\.\.', Punctuation),
+ ],
+ 'constraint_values': [
+ (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
+ bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
+ # ADL 1.4 ordinal constraint
+ (r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
+ bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
+ include('date_constraints'),
+ include('values'),
+ ],
+
+ # ----- real states -----
+ 'string': [
+ ('"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ # all other characters
+ (r'[^\\"]+', String),
+ # stray backslash
+ (r'\\', String),
+ ],
+ 'uri': [
+ # effective URI terminators
+ (r'[,>\s]', Punctuation, '#pop'),
+ (r'[^>\s,]+', Literal),
+ ],
+ 'interval': [
+ (r'\|', Punctuation, '#pop'),
+ include('ordered_values'),
+ (r'\.\.', Punctuation),
+ (r'[<>=] *', Punctuation),
+ # handle +/-
+ (r'\+/-', Punctuation),
+ (r'\s+', Text),
+ ],
+ 'any_code': [
+ include('archetype_id'),
+ # if it is a code
+ (r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
+ # if it is tuple with attribute names
+ (r'[a-z_]\w*', Name.Class),
+ # if it is an integer, i.e. Xpath child index
+ (r'[0-9]+', Text),
+ (r'\|', Punctuation, 'code_rubric'),
+ (r'\]', Punctuation, '#pop'),
+ # handle use_archetype statement
+ (r'\s*,\s*', Punctuation),
+ ],
+ 'code_rubric': [
+ (r'\|', Punctuation, '#pop'),
+ (r'[^|]+', String),
+ ],
+ 'adl14_code_constraint': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\|', Punctuation, 'code_rubric'),
+ (r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
+ include('whitespace'),
+ ],
+ }
+
+
+class OdinLexer(AtomsLexer):
+ """
+ Lexer for ODIN syntax.
+
+ .. versionadded:: 2.1
+ """
+ name = 'ODIN'
+ aliases = ['odin']
+ filenames = ['*.odin']
+ mimetypes = ['text/odin']
+
+ tokens = {
+ 'path': [
+ (r'>', Punctuation, '#pop'),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'/', Punctuation),
+ (r'\[', Punctuation, 'key'),
+ (r'\s*,\s*', Punctuation, '#pop'),
+ (r'\s+', Text, '#pop'),
+ ],
+ 'key': [
+ include('values'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ 'type_cast': [
+ (r'\)', Punctuation, '#pop'),
+ (r'[^)]+', Name.Class),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ include('values'),
+ # x-ref path
+ (r'/', Punctuation, 'path'),
+ # x-ref path starting with key
+ (r'\[', Punctuation, 'key'),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'=', Operator),
+ (r'\(', Punctuation, 'type_cast'),
+ (r',', Punctuation),
+ (r'<', Punctuation),
+ (r'>', Punctuation),
+ (r';', Punctuation),
+ ],
+ }
+
+
+class CadlLexer(AtomsLexer):
+ """
+ Lexer for cADL syntax.
+
+ .. versionadded:: 2.1
+ """
+ name = 'cADL'
+ aliases = ['cadl']
+ filenames = ['*.cadl']
+
+ tokens = {
+ 'path': [
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'/', Punctuation),
+ (r'\[', Punctuation, 'any_code'),
+ (r'\s+', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'(cardinality|existence|occurrences|group|include|exclude|'
+ r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
+ (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
+ (r'(after|before|closed)\W', Keyword.Type),
+ (r'(not)\W', Operator),
+ (r'(matches|is_in)\W', Operator),
+ # is_in / not is_in char
+ (u'(\u2208|\u2209)', Operator),
+ # there_exists / not there_exists / for_all / and / or
+ (u'(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
+ Operator),
+ # regex in slot or as string constraint
+ (r'(\{)(\s*/[^}]+/\s*)(\})',
+ bygroups(Punctuation, String.Regex, Punctuation)),
+ # regex in slot or as string constraint
+ (r'(\{)(\s*\^[^}]+\^\s*)(\})',
+ bygroups(Punctuation, String.Regex, Punctuation)),
+ (r'/', Punctuation, 'path'),
+ # for cardinality etc
+ (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
+ r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
+ bygroups(Punctuation, Number, Number, Punctuation)),
+ # [{ is start of a tuple value
+ (r'\[\{', Punctuation),
+ (r'\}\]', Punctuation),
+ (r'\{', Punctuation),
+ (r'\}', Punctuation),
+ include('constraint_values'),
+ # type name
+ (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'\[', Punctuation, 'any_code'),
+ (r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
+ (r'\(', Punctuation),
+ (r'\)', Punctuation),
+ # for lists of values
+ (r',', Punctuation),
+ (r'"', String, 'string'),
+ # for assumed value
+ (r';', Punctuation),
+ ],
+ }
+
+
+class AdlLexer(AtomsLexer):
+ """
+ Lexer for ADL syntax.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ADL'
+ aliases = ['adl']
+ filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
+
+ tokens = {
+ 'whitespace': [
+ # blank line ends
+ (r'\s*\n', Text),
+ # comment-only line
+ (r'^[ \t]*--.*$', Comment),
+ ],
+ 'odin_section': [
+ # repeating the following two rules from the root state enable multi-line
+ # strings that start in the first column to be dealt with
+ (r'^(language|description|ontology|terminology|annotations|'
+ r'component_terminologies|revision_history)[ \t]*\n', Generic.Heading),
+ (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
+ (r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
+ (r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
+ # template overlay delimiter
+ (r'^----------*\n', Text, '#pop'),
+ (r'^.*\n', String),
+ default('#pop'),
+ ],
+ 'cadl_section': [
+ (r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
+ default('#pop'),
+ ],
+ 'rules_section': [
+ (r'^[ \t]+.*\n', using(CadlLexer)),
+ default('#pop'),
+ ],
+ 'metadata': [
+ (r'\)', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ # numbers and version ids
+ (r'\d+(\.\d+)*', Literal),
+ # Guids
+ (r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
+ (r'\w+', Name.Class),
+ (r'"', String, 'string'),
+ (r'=', Operator),
+ (r'[ \t]+', Text),
+ default('#pop'),
+ ],
+ 'root': [
+ (r'^(archetype|template_overlay|operational_template|template|'
+ r'speciali[sz]e)', Generic.Heading),
+ (r'^(language|description|ontology|terminology|annotations|'
+ r'component_terminologies|revision_history)[ \t]*\n',
+ Generic.Heading, 'odin_section'),
+ (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
+ (r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
+ include('archetype_id'),
+ (r'[ \t]*\(', Punctuation, 'metadata'),
+ include('whitespace'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/asm.py b/contrib/python/Pygments/py2/pygments/lexers/asm.py
index 32ac936127..386e93f1cb 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/asm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/asm.py
@@ -1,206 +1,206 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.asm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for assembly languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.asm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for assembly languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, bygroups, using, words, \
DelegatingLexer
-from pygments.lexers.c_cpp import CppLexer, CLexer
-from pygments.lexers.d import DLexer
-from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
- Other, Keyword, Operator
-
-__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
+from pygments.lexers.c_cpp import CppLexer, CLexer
+from pygments.lexers.d import DLexer
+from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
+ Other, Keyword, Operator
+
+__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'NasmLexer',
'NasmObjdumpLexer', 'TasmLexer', 'Ca65Lexer', 'Dasm16Lexer']
-
-
-class GasLexer(RegexLexer):
- """
- For Gas (AT&T) assembly code.
- """
- name = 'GAS'
- aliases = ['gas', 'asm']
- filenames = ['*.s', '*.S']
- mimetypes = ['text/x-gas']
-
- #: optional Comment or Whitespace
- string = r'"(\\"|[^"])*"'
- char = r'[\w$.@-]'
+
+
+class GasLexer(RegexLexer):
+ """
+ For Gas (AT&T) assembly code.
+ """
+ name = 'GAS'
+ aliases = ['gas', 'asm']
+ filenames = ['*.s', '*.S']
+ mimetypes = ['text/x-gas']
+
+ #: optional Comment or Whitespace
+ string = r'"(\\"|[^"])*"'
+ char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
- number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
+ number = r'(?:0[xX][a-zA-Z0-9]+|\d+)'
register = '%' + identifier
-
- tokens = {
- 'root': [
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'\.' + identifier, Name.Attribute, 'directive-args'),
- (r'lock|rep(n?z)?|data\d+', Name.Attribute),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Text)
- ],
- 'directive-args': [
- (identifier, Name.Constant),
- (string, String),
- ('@' + identifier, Name.Attribute),
- (number, Number.Integer),
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (identifier + ':', Name.Label),
+ (r'\.' + identifier, Name.Attribute, 'directive-args'),
+ (r'lock|rep(n?z)?|data\d+', Name.Attribute),
+ (identifier, Name.Function, 'instruction-args'),
+ (r'[\r\n]+', Text)
+ ],
+ 'directive-args': [
+ (identifier, Name.Constant),
+ (string, String),
+ ('@' + identifier, Name.Attribute),
+ (number, Number.Integer),
(register, Name.Variable),
- (r'[\r\n]+', Text, '#pop'),
+ (r'[\r\n]+', Text, '#pop'),
(r'[;#].*?\n', Comment, '#pop'),
-
- include('punctuation'),
- include('whitespace')
- ],
- 'instruction-args': [
- # For objdump-disassembled code, shouldn't occur in
- # actual assembler input
- ('([a-z0-9]+)( )(<)('+identifier+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation)),
- ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation, Number.Integer, Punctuation)),
-
- # Address constants
- (identifier, Name.Constant),
- (number, Number.Integer),
- # Registers
+
+ include('punctuation'),
+ include('whitespace')
+ ],
+ 'instruction-args': [
+ # For objdump-disassembled code, shouldn't occur in
+ # actual assembler input
+ ('([a-z0-9]+)( )(<)('+identifier+')(>)',
+ bygroups(Number.Hex, Text, Punctuation, Name.Constant,
+ Punctuation)),
+ ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
+ bygroups(Number.Hex, Text, Punctuation, Name.Constant,
+ Punctuation, Number.Integer, Punctuation)),
+
+ # Address constants
+ (identifier, Name.Constant),
+ (number, Number.Integer),
+ # Registers
(register, Name.Variable),
- # Numeric constants
- ('$'+number, Number.Integer),
- (r"$'(.|\\')'", String.Char),
- (r'[\r\n]+', Text, '#pop'),
+ # Numeric constants
+ ('$'+number, Number.Integer),
+ (r"$'(.|\\')'", String.Char),
+ (r'[\r\n]+', Text, '#pop'),
(r'[;#].*?\n', Comment, '#pop'),
- include('punctuation'),
- include('whitespace')
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
+ include('punctuation'),
+ include('whitespace')
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
(r'[;#].*?\n', Comment)
- ],
- 'punctuation': [
- (r'[-*,.()\[\]!:]+', Punctuation)
- ]
- }
-
- def analyse_text(text):
- if re.match(r'^\.(text|data|section)', text, re.M):
- return True
- elif re.match(r'^\.\w+', text, re.M):
- return 0.1
-
-
-def _objdump_lexer_tokens(asm_lexer):
- """
- Common objdump lexer tokens to wrap an ASM lexer.
- """
- hex_re = r'[0-9A-Za-z]'
- return {
- 'root': [
- # File name & format:
- ('(.*?)(:)( +file format )(.*?)$',
- bygroups(Name.Label, Punctuation, Text, String)),
- # Section header
- ('(Disassembly of section )(.*?)(:)$',
- bygroups(Text, Name.Label, Punctuation)),
- # Function labels
- # (With offset)
- ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
- bygroups(Number.Hex, Text, Punctuation, Name.Function,
- Punctuation, Number.Hex, Punctuation)),
- # (Without offset)
- ('('+hex_re+'+)( )(<)(.*?)(>:)$',
- bygroups(Number.Hex, Text, Punctuation, Name.Function,
- Punctuation)),
- # Code line with disassembled instructions
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
- bygroups(Text, Name.Label, Text, Number.Hex, Text,
- using(asm_lexer))),
- # Code line with ascii
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
- bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
- # Continued code line, only raw opcodes without disassembled
- # instruction
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
- bygroups(Text, Name.Label, Text, Number.Hex)),
- # Skipped a few bytes
- (r'\t\.\.\.$', Text),
- # Relocation line
- # (With offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
- bygroups(Text, Name.Label, Text, Name.Property, Text,
- Name.Constant, Punctuation, Number.Hex)),
- # (Without offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
- bygroups(Text, Name.Label, Text, Name.Property, Text,
- Name.Constant)),
- (r'[^\n]+\n', Other)
- ]
- }
-
-
-class ObjdumpLexer(RegexLexer):
- """
- For the output of 'objdump -dr'
- """
- name = 'objdump'
- aliases = ['objdump']
- filenames = ['*.objdump']
- mimetypes = ['text/x-objdump']
-
- tokens = _objdump_lexer_tokens(GasLexer)
-
-
-class DObjdumpLexer(DelegatingLexer):
- """
- For the output of 'objdump -Sr on compiled D files'
- """
- name = 'd-objdump'
- aliases = ['d-objdump']
- filenames = ['*.d-objdump']
- mimetypes = ['text/x-d-objdump']
-
- def __init__(self, **options):
- super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options)
-
-
-class CppObjdumpLexer(DelegatingLexer):
- """
- For the output of 'objdump -Sr on compiled C++ files'
- """
- name = 'cpp-objdump'
- aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
- filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
- mimetypes = ['text/x-cpp-objdump']
-
- def __init__(self, **options):
- super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options)
-
-
-class CObjdumpLexer(DelegatingLexer):
- """
- For the output of 'objdump -Sr on compiled C files'
- """
- name = 'c-objdump'
- aliases = ['c-objdump']
- filenames = ['*.c-objdump']
- mimetypes = ['text/x-c-objdump']
-
- def __init__(self, **options):
- super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
-
-
+ ],
+ 'punctuation': [
+ (r'[-*,.()\[\]!:]+', Punctuation)
+ ]
+ }
+
+ def analyse_text(text):
+ if re.match(r'^\.(text|data|section)', text, re.M):
+ return True
+ elif re.match(r'^\.\w+', text, re.M):
+ return 0.1
+
+
+def _objdump_lexer_tokens(asm_lexer):
+ """
+ Common objdump lexer tokens to wrap an ASM lexer.
+ """
+ hex_re = r'[0-9A-Za-z]'
+ return {
+ 'root': [
+ # File name & format:
+ ('(.*?)(:)( +file format )(.*?)$',
+ bygroups(Name.Label, Punctuation, Text, String)),
+ # Section header
+ ('(Disassembly of section )(.*?)(:)$',
+ bygroups(Text, Name.Label, Punctuation)),
+ # Function labels
+ # (With offset)
+ ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
+ bygroups(Number.Hex, Text, Punctuation, Name.Function,
+ Punctuation, Number.Hex, Punctuation)),
+ # (Without offset)
+ ('('+hex_re+'+)( )(<)(.*?)(>:)$',
+ bygroups(Number.Hex, Text, Punctuation, Name.Function,
+ Punctuation)),
+ # Code line with disassembled instructions
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
+ bygroups(Text, Name.Label, Text, Number.Hex, Text,
+ using(asm_lexer))),
+ # Code line with ascii
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
+ bygroups(Text, Name.Label, Text, Number.Hex, Text, String)),
+ # Continued code line, only raw opcodes without disassembled
+ # instruction
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
+ bygroups(Text, Name.Label, Text, Number.Hex)),
+ # Skipped a few bytes
+ (r'\t\.\.\.$', Text),
+ # Relocation line
+ # (With offset)
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
+ bygroups(Text, Name.Label, Text, Name.Property, Text,
+ Name.Constant, Punctuation, Number.Hex)),
+ # (Without offset)
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
+ bygroups(Text, Name.Label, Text, Name.Property, Text,
+ Name.Constant)),
+ (r'[^\n]+\n', Other)
+ ]
+ }
+
+
+class ObjdumpLexer(RegexLexer):
+ """
+ For the output of 'objdump -dr'
+ """
+ name = 'objdump'
+ aliases = ['objdump']
+ filenames = ['*.objdump']
+ mimetypes = ['text/x-objdump']
+
+ tokens = _objdump_lexer_tokens(GasLexer)
+
+
+class DObjdumpLexer(DelegatingLexer):
+ """
+ For the output of 'objdump -Sr on compiled D files'
+ """
+ name = 'd-objdump'
+ aliases = ['d-objdump']
+ filenames = ['*.d-objdump']
+ mimetypes = ['text/x-d-objdump']
+
+ def __init__(self, **options):
+ super(DObjdumpLexer, self).__init__(DLexer, ObjdumpLexer, **options)
+
+
+class CppObjdumpLexer(DelegatingLexer):
+ """
+ For the output of 'objdump -Sr on compiled C++ files'
+ """
+ name = 'cpp-objdump'
+ aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
+ filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
+ mimetypes = ['text/x-cpp-objdump']
+
+ def __init__(self, **options):
+ super(CppObjdumpLexer, self).__init__(CppLexer, ObjdumpLexer, **options)
+
+
+class CObjdumpLexer(DelegatingLexer):
+ """
+ For the output of 'objdump -Sr on compiled C files'
+ """
+ name = 'c-objdump'
+ aliases = ['c-objdump']
+ filenames = ['*.c-objdump']
+ mimetypes = ['text/x-c-objdump']
+
+ def __init__(self, **options):
+ super(CObjdumpLexer, self).__init__(CLexer, ObjdumpLexer, **options)
+
+
class HsailLexer(RegexLexer):
"""
For HSAIL assembly code.
@@ -336,48 +336,48 @@ class HsailLexer(RegexLexer):
}
-class LlvmLexer(RegexLexer):
- """
- For LLVM assembly code.
- """
- name = 'LLVM'
- aliases = ['llvm']
- filenames = ['*.ll']
- mimetypes = ['text/x-llvm']
-
- #: optional Comment or Whitespace
- string = r'"[^"]*?"'
- identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Before keywords, because keywords are valid label names :(...
+class LlvmLexer(RegexLexer):
+ """
+ For LLVM assembly code.
+ """
+ name = 'LLVM'
+ aliases = ['llvm']
+ filenames = ['*.ll']
+ mimetypes = ['text/x-llvm']
+
+ #: optional Comment or Whitespace
+ string = r'"[^"]*?"'
+ identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Before keywords, because keywords are valid label names :(...
(identifier + r'\s*:', Name.Label),
-
- include('keyword'),
-
- (r'%' + identifier, Name.Variable),
- (r'@' + identifier, Name.Variable.Global),
- (r'%\d+', Name.Variable.Anonymous),
- (r'@\d+', Name.Variable.Global),
- (r'#\d+', Name.Variable.Global),
- (r'!' + identifier, Name.Variable),
- (r'!\d+', Name.Variable.Anonymous),
- (r'c?' + string, String),
-
- (r'0[xX][a-fA-F0-9]+', Number),
- (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
- (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
- ],
- 'whitespace': [
- (r'(\n|\s)+', Text),
- (r';.*?\n', Comment)
- ],
- 'keyword': [
- # Regular keywords
+
+ include('keyword'),
+
+ (r'%' + identifier, Name.Variable),
+ (r'@' + identifier, Name.Variable.Global),
+ (r'%\d+', Name.Variable.Anonymous),
+ (r'@\d+', Name.Variable.Global),
+ (r'#\d+', Name.Variable.Global),
+ (r'!' + identifier, Name.Variable),
+ (r'!\d+', Name.Variable.Anonymous),
+ (r'c?' + string, String),
+
+ (r'0[xX][a-fA-F0-9]+', Number),
+ (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
+
+ (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
+ ],
+ 'whitespace': [
+ (r'(\n|\s)+', Text),
+ (r';.*?\n', Comment)
+ ],
+ 'keyword': [
+ # Regular keywords
(words((
'acq_rel', 'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
'aliasee', 'align', 'alignLog2', 'alignstack', 'alloca', 'allocsize', 'allOnes',
@@ -437,115 +437,115 @@ class LlvmLexer(RegexLexer):
'x86_regcallcc', 'x86_stdcallcc', 'x86_thiscallcc', 'x86_vectorcallcc', 'xchg',
'xor', 'zeroext', 'zeroinitializer', 'zext'),
suffix=r'\b'), Keyword),
-
- # Types
+
+ # Types
(words(('void', 'half', 'float', 'double', 'x86_fp80', 'fp128',
'ppc_fp128', 'label', 'metadata', 'token')), Keyword.Type),
-
- # Integer types
- (r'i[1-9]\d*', Keyword)
- ]
- }
-
-
-class NasmLexer(RegexLexer):
- """
- For Nasm (Intel) assembly code.
- """
- name = 'NASM'
- aliases = ['nasm']
- filenames = ['*.asm', '*.ASM']
- mimetypes = ['text/x-nasm']
-
+
+ # Integer types
+ (r'i[1-9]\d*', Keyword)
+ ]
+ }
+
+
+class NasmLexer(RegexLexer):
+ """
+ For Nasm (Intel) assembly code.
+ """
+ name = 'NASM'
+ aliases = ['nasm']
+ filenames = ['*.asm', '*.ASM']
+ mimetypes = ['text/x-nasm']
+
# Tasm uses the same file endings, but TASM is not as common as NASM, so
# we prioritize NASM higher by default
priority = 1.0
- identifier = r'[a-z$._?][\w$.?#@~]*'
- hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
- octn = r'[0-7]+q'
- binn = r'[01]+b'
- decn = r'[0-9]+'
- floatn = decn + r'\.e?' + decn
- string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
- declkw = r'(?:res|d)[bwdqt]|times'
- register = (r'r[0-9][0-5]?[bwd]|'
- r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
- r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
- wordop = r'seg|wrt|strict'
- type = r'byte|[dq]?word'
+ identifier = r'[a-z$._?][\w$.?#@~]*'
+ hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
+ octn = r'[0-7]+q'
+ binn = r'[01]+b'
+ decn = r'[0-9]+'
+ floatn = decn + r'\.e?' + decn
+ string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
+ declkw = r'(?:res|d)[bwdqt]|times'
+ register = (r'r[0-9][0-5]?[bwd]|'
+ r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
+ r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]')
+ wordop = r'seg|wrt|strict'
+ type = r'byte|[dq]?word'
# Directives must be followed by whitespace, otherwise CPU will match
# cpuid for instance.
directives = (r'(?:BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
- r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
+ r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
r'EXPORT|LIBRARY|MODULE)\s+')
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- (r'^\s*%', Comment.Preproc, 'preproc'),
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'(%s)(\s+)(equ)' % identifier,
- bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
- 'instruction-args'),
- (directives, Keyword, 'instruction-args'),
- (declkw, Keyword.Declaration, 'instruction-args'),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Text)
- ],
- 'instruction-args': [
- (string, String),
- (hexn, Number.Hex),
- (octn, Number.Oct),
- (binn, Number.Bin),
- (floatn, Number.Float),
- (decn, Number.Integer),
- include('punctuation'),
- (register, Name.Builtin),
- (identifier, Name.Variable),
- (r'[\r\n]+', Text, '#pop'),
- include('whitespace')
- ],
- 'preproc': [
- (r'[^;\n]+', Comment.Preproc),
- (r';.*?\n', Comment.Single, '#pop'),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'[ \t]+', Text),
- (r';.*', Comment.Single)
- ],
- 'punctuation': [
- (r'[,():\[\]]+', Punctuation),
- (r'[&|^<>+*/%~-]+', Operator),
- (r'[$]+', Keyword.Constant),
- (wordop, Operator.Word),
- (type, Keyword.Type)
- ],
- }
-
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'^\s*%', Comment.Preproc, 'preproc'),
+ include('whitespace'),
+ (identifier + ':', Name.Label),
+ (r'(%s)(\s+)(equ)' % identifier,
+ bygroups(Name.Constant, Keyword.Declaration, Keyword.Declaration),
+ 'instruction-args'),
+ (directives, Keyword, 'instruction-args'),
+ (declkw, Keyword.Declaration, 'instruction-args'),
+ (identifier, Name.Function, 'instruction-args'),
+ (r'[\r\n]+', Text)
+ ],
+ 'instruction-args': [
+ (string, String),
+ (hexn, Number.Hex),
+ (octn, Number.Oct),
+ (binn, Number.Bin),
+ (floatn, Number.Float),
+ (decn, Number.Integer),
+ include('punctuation'),
+ (register, Name.Builtin),
+ (identifier, Name.Variable),
+ (r'[\r\n]+', Text, '#pop'),
+ include('whitespace')
+ ],
+ 'preproc': [
+ (r'[^;\n]+', Comment.Preproc),
+ (r';.*?\n', Comment.Single, '#pop'),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'[ \t]+', Text),
+ (r';.*', Comment.Single)
+ ],
+ 'punctuation': [
+ (r'[,():\[\]]+', Punctuation),
+ (r'[&|^<>+*/%~-]+', Operator),
+ (r'[$]+', Keyword.Constant),
+ (wordop, Operator.Word),
+ (type, Keyword.Type)
+ ],
+ }
+
def analyse_text(text):
# Probably TASM
if re.match(r'PROC', text, re.IGNORECASE):
return False
-
-
-class NasmObjdumpLexer(ObjdumpLexer):
- """
- For the output of 'objdump -d -M intel'.
-
- .. versionadded:: 2.0
- """
- name = 'objdump-nasm'
- aliases = ['objdump-nasm']
- filenames = ['*.objdump-intel']
- mimetypes = ['text/x-nasm-objdump']
-
- tokens = _objdump_lexer_tokens(NasmLexer)
-
-
+
+
+class NasmObjdumpLexer(ObjdumpLexer):
+ """
+ For the output of 'objdump -d -M intel'.
+
+ .. versionadded:: 2.0
+ """
+ name = 'objdump-nasm'
+ aliases = ['objdump-nasm']
+ filenames = ['*.objdump-intel']
+ mimetypes = ['text/x-nasm-objdump']
+
+ tokens = _objdump_lexer_tokens(NasmLexer)
+
+
class TasmLexer(RegexLexer):
"""
For Tasm (Turbo Assembler) assembly code.
@@ -631,43 +631,43 @@ class TasmLexer(RegexLexer):
return True
-class Ca65Lexer(RegexLexer):
- """
- For ca65 assembler sources.
-
- .. versionadded:: 1.6
- """
- name = 'ca65 assembler'
- aliases = ['ca65']
- filenames = ['*.s']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r';.*', Comment.Single),
- (r'\s+', Text),
- (r'[a-z_.@$][\w.@$]*:', Name.Label),
- (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
- r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
- r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
- r'|bit)\b', Keyword),
- (r'\.\w+', Keyword.Pseudo),
- (r'[-+~*/^&|!<>=]', Operator),
- (r'"[^"\n]*.', String),
- (r"'[^'\n]*.", String.Char),
- (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
- (r'\d+', Number.Integer),
- (r'%[01]+', Number.Bin),
- (r'[#,.:()=\[\]]', Punctuation),
- (r'[a-z_.@$][\w.@$]*', Name),
- ]
- }
-
- def analyse_text(self, text):
- # comments in GAS start with "#"
- if re.match(r'^\s*;', text, re.MULTILINE):
- return 0.9
+class Ca65Lexer(RegexLexer):
+ """
+ For ca65 assembler sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'ca65 assembler'
+ aliases = ['ca65']
+ filenames = ['*.s']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r';.*', Comment.Single),
+ (r'\s+', Text),
+ (r'[a-z_.@$][\w.@$]*:', Name.Label),
+ (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
+ r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
+ r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
+ r'|bit)\b', Keyword),
+ (r'\.\w+', Keyword.Pseudo),
+ (r'[-+~*/^&|!<>=]', Operator),
+ (r'"[^"\n]*.', String),
+ (r"'[^'\n]*.", String.Char),
+ (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'%[01]+', Number.Bin),
+ (r'[#,.:()=\[\]]', Punctuation),
+ (r'[a-z_.@$][\w.@$]*', Name),
+ ]
+ }
+
+ def analyse_text(self, text):
+ # comments in GAS start with "#"
+ if re.match(r'^\s*;', text, re.MULTILINE):
+ return 0.9
class Dasm16Lexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/automation.py b/contrib/python/Pygments/py2/pygments/lexers/automation.py
index 5f27b6c6f4..b3babe3df3 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/automation.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/automation.py
@@ -1,374 +1,374 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.automation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for automation scripting languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.automation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for automation scripting languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, combined
-from pygments.token import Text, Comment, Operator, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['AutohotkeyLexer', 'AutoItLexer']
-
-
-class AutohotkeyLexer(RegexLexer):
- """
- For `autohotkey <http://www.autohotkey.com/>`_ source code.
-
- .. versionadded:: 1.4
- """
- name = 'autohotkey'
- aliases = ['ahk', 'autohotkey']
- filenames = ['*.ahk', '*.ahkl']
- mimetypes = ['text/x-autohotkey']
-
- tokens = {
- 'root': [
- (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
- (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, combined
+from pygments.token import Text, Comment, Operator, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['AutohotkeyLexer', 'AutoItLexer']
+
+
+class AutohotkeyLexer(RegexLexer):
+ """
+ For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'autohotkey'
+ aliases = ['ahk', 'autohotkey']
+ filenames = ['*.ahk', '*.ahkl']
+ mimetypes = ['text/x-autohotkey']
+
+ tokens = {
+ 'root': [
+ (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
+ (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
(r'\s+;.*?$', Comment.Single),
(r'^;.*?$', Comment.Single),
- (r'[]{}(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInVariables'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- include('garbage'),
- ],
- 'incomment': [
- (r'^\s*\*/', Comment.Multiline, '#pop'),
- (r'[^*/]', Comment.Multiline),
- (r'[*/]', Comment.Multiline)
- ],
- 'incontinuation': [
- (r'^\s*\)', Generic, '#pop'),
- (r'[^)]', Generic),
- (r'[)]', Generic),
- ],
- 'commands': [
- (r'(?i)^(\s*)(global|local|static|'
- r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
- r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
- r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
- r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
- r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
- r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
- r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
- r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
- r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
- r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
- r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
- r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
- r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
- r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
- r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
- r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
- r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
- r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
- r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
- r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
- r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
- r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
- r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
- r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
- r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
- r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
- r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
- r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
- r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
- r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
- r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
- r'SetBatchLines|SetCapslockState|SetControlDelay|'
- r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
- r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
- r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
- r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
- r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
- r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
- r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
- r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
- r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
- r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
- r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
- r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
- r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
- r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
- r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
- r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
- r'WinWait)\b', bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
- r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
- r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
- r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
- r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
- r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
- r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
- r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
- r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
- r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
- r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
- r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
- r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
- r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
- r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
- r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
- Name.Function),
- ],
- 'builtInVariables': [
- (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
- r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
- r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
- r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
- r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
- r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
- r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
- r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
- r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
- r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
- r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
- r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
- r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
- r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
- r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
- r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
- r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
- r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
- r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
- r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
- r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
- r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
- r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
- r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
- r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
- r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
- r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
- r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
- r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
- r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
- Name.Variable),
- ],
- 'labels': [
- # hotkeys and labels
- # technically, hotkey names are limited to named keys and buttons
- (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
- (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- # (r'.', Text), # no cheating
- ],
- }
-
-
-class AutoItLexer(RegexLexer):
- """
- For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
-
- AutoIt is a freeware BASIC-like scripting language
- designed for automating the Windows GUI and general scripting
-
- .. versionadded:: 1.6
- """
- name = 'AutoIt'
- aliases = ['autoit']
- filenames = ['*.au3']
- mimetypes = ['text/x-autoit']
-
- # Keywords, functions, macros from au3.keywords.properties
- # which can be found in AutoIt installed directory, e.g.
- # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
- keywords = """\
- #include-once #include #endregion #forcedef #forceref #region
- and byref case continueloop dim do else elseif endfunc endif
- endselect exit exitloop for func global
- if local next not or return select step
- then to until wend while exit""".split()
-
- functions = """\
- abs acos adlibregister adlibunregister asc ascw asin assign atan
- autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
- binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
- blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
- consolewrite consolewriteerror controlclick controlcommand controldisable
- controlenable controlfocus controlgetfocus controlgethandle controlgetpos
- controlgettext controlhide controllistview controlmove controlsend
- controlsettext controlshow controltreeview cos dec dircopy dircreate
- dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
- dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
- dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
- drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
- drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
- drivespacetotal drivestatus envget envset envupdate eval execute exp
- filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
- filedelete fileexists filefindfirstfile filefindnextfile fileflush
- filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
- filegetshortname filegetsize filegettime filegetversion fileinstall filemove
- fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
- filesavedialog fileselectfolder filesetattrib filesetpos filesettime
- filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
- guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
- guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
- guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
- guictrlcreateinput guictrlcreatelabel guictrlcreatelist
- guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
- guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
- guictrlcreatepic guictrlcreateprogress guictrlcreateradio
- guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
- guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
- guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
- guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
- guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
- guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
- guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
- guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
- guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
- guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
- guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
- httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
- inetread inidelete iniread inireadsection inireadsectionnames
- inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
- isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
- isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
- mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
- number objcreate objcreateinterface objevent objevent objget objname
- onautoitexitregister onautoitexitunregister opt ping pixelchecksum
- pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
- processgetstats processlist processsetpriority processwait processwaitclose
- progressoff progresson progressset ptr random regdelete regenumkey
- regenumval regread regwrite round run runas runaswait runwait send
- sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
- sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
- sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
- string stringaddcr stringcompare stringformat stringfromasciiarray
- stringinstr stringisalnum stringisalpha stringisascii stringisdigit
- stringisfloat stringisint stringislower stringisspace stringisupper
- stringisxdigit stringleft stringlen stringlower stringmid stringregexp
- stringregexpreplace stringreplace stringright stringsplit stringstripcr
- stringstripws stringtoasciiarray stringtobinary stringtrimleft
- stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
- tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
- timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
- trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
- trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
- traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
- udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
- winactivate winactive winclose winexists winflash wingetcaretpos
- wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
- wingetstate wingettext wingettitle winkill winlist winmenuselectitem
- winminimizeall winminimizeallundo winmove winsetontop winsetstate
- winsettitle winsettrans winwait winwaitactive winwaitclose
- winwaitnotactive""".split()
-
- macros = """\
- @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
- @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
- @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
- @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
- @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
- @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
- @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
- @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
- @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
- @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
- @osversion @programfilesdir @programscommondir @programsdir @scriptdir
- @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
- @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
- @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
- @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
- @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
- @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
- @windowsdir @workingdir @yday @year""".split()
-
- tokens = {
- 'root': [
- (r';.*\n', Comment.Single),
- (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
- Comment.Multiline),
- (r'[\[\]{}(),;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
- (r'[$|@][a-zA-Z_]\w*', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInMarcros'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- (r'_\n', Text), # Line continuation
- include('garbage'),
- ],
- 'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
- bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
- Name.Function),
- ],
- 'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
- Name.Variable.Global),
- ],
- 'labels': [
- # sendkeys
- (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- ],
- }
+ (r'[]{}(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInVariables'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ include('garbage'),
+ ],
+ 'incomment': [
+ (r'^\s*\*/', Comment.Multiline, '#pop'),
+ (r'[^*/]', Comment.Multiline),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'incontinuation': [
+ (r'^\s*\)', Generic, '#pop'),
+ (r'[^)]', Generic),
+ (r'[)]', Generic),
+ ],
+ 'commands': [
+ (r'(?i)^(\s*)(global|local|static|'
+ r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
+ r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
+ r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
+ r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
+ r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
+ r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
+ r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
+ r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
+ r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
+ r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
+ r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
+ r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
+ r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
+ r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
+ r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
+ r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
+ r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
+ r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
+ r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
+ r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
+ r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
+ r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
+ r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
+ r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
+ r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
+ r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
+ r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
+ r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
+ r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
+ r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
+ r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
+ r'SetBatchLines|SetCapslockState|SetControlDelay|'
+ r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
+ r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
+ r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
+ r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
+ r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
+ r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
+ r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
+ r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
+ r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
+ r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
+ r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
+ r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
+ r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
+ r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
+ r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
+ r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
+ r'WinWait)\b', bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
+ r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
+ r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
+ r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
+ r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
+ r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
+ r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
+ r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
+ r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
+ r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
+ r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
+ r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
+ r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
+ r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
+ r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
+ r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
+ Name.Function),
+ ],
+ 'builtInVariables': [
+ (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
+ r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
+ r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
+ r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
+ r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
+ r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
+ r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
+ r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
+ r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
+ r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
+ r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
+ r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
+ r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
+ r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
+ r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
+ r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
+ r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
+ r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
+ r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
+ r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
+ r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
+ r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
+ r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
+ r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
+ r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
+ r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
+ r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
+ r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
+ r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
+ r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
+ Name.Variable),
+ ],
+ 'labels': [
+ # hotkeys and labels
+ # technically, hotkey names are limited to named keys and buttons
+ (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
+ (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ # (r'.', Text), # no cheating
+ ],
+ }
+
+
+class AutoItLexer(RegexLexer):
+ """
+ For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
+
+ AutoIt is a freeware BASIC-like scripting language
+ designed for automating the Windows GUI and general scripting
+
+ .. versionadded:: 1.6
+ """
+ name = 'AutoIt'
+ aliases = ['autoit']
+ filenames = ['*.au3']
+ mimetypes = ['text/x-autoit']
+
+ # Keywords, functions, macros from au3.keywords.properties
+ # which can be found in AutoIt installed directory, e.g.
+ # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
+
+ keywords = """\
+ #include-once #include #endregion #forcedef #forceref #region
+ and byref case continueloop dim do else elseif endfunc endif
+ endselect exit exitloop for func global
+ if local next not or return select step
+ then to until wend while exit""".split()
+
+ functions = """\
+ abs acos adlibregister adlibunregister asc ascw asin assign atan
+ autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
+ binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
+ blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
+ consolewrite consolewriteerror controlclick controlcommand controldisable
+ controlenable controlfocus controlgetfocus controlgethandle controlgetpos
+ controlgettext controlhide controllistview controlmove controlsend
+ controlsettext controlshow controltreeview cos dec dircopy dircreate
+ dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
+ dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
+ dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
+ drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
+ drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
+ drivespacetotal drivestatus envget envset envupdate eval execute exp
+ filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
+ filedelete fileexists filefindfirstfile filefindnextfile fileflush
+ filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
+ filegetshortname filegetsize filegettime filegetversion fileinstall filemove
+ fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
+ filesavedialog fileselectfolder filesetattrib filesetpos filesettime
+ filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
+ guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
+ guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
+ guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
+ guictrlcreateinput guictrlcreatelabel guictrlcreatelist
+ guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
+ guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
+ guictrlcreatepic guictrlcreateprogress guictrlcreateradio
+ guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
+ guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
+ guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
+ guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
+ guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
+ guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
+ guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
+ guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
+ guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
+ guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
+ guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
+ httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
+ inetread inidelete iniread inireadsection inireadsectionnames
+ inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
+ isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
+ isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
+ mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
+ number objcreate objcreateinterface objevent objevent objget objname
+ onautoitexitregister onautoitexitunregister opt ping pixelchecksum
+ pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
+ processgetstats processlist processsetpriority processwait processwaitclose
+ progressoff progresson progressset ptr random regdelete regenumkey
+ regenumval regread regwrite round run runas runaswait runwait send
+ sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
+ sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
+ sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
+ string stringaddcr stringcompare stringformat stringfromasciiarray
+ stringinstr stringisalnum stringisalpha stringisascii stringisdigit
+ stringisfloat stringisint stringislower stringisspace stringisupper
+ stringisxdigit stringleft stringlen stringlower stringmid stringregexp
+ stringregexpreplace stringreplace stringright stringsplit stringstripcr
+ stringstripws stringtoasciiarray stringtobinary stringtrimleft
+ stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
+ tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
+ timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
+ trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
+ trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
+ traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
+ udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
+ winactivate winactive winclose winexists winflash wingetcaretpos
+ wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
+ wingetstate wingettext wingettitle winkill winlist winmenuselectitem
+ winminimizeall winminimizeallundo winmove winsetontop winsetstate
+ winsettitle winsettrans winwait winwaitactive winwaitclose
+ winwaitnotactive""".split()
+
+ macros = """\
+ @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
+ @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
+ @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
+ @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
+ @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
+ @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
+ @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
+ @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
+ @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
+ @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
+ @osversion @programfilesdir @programscommondir @programsdir @scriptdir
+ @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
+ @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
+ @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
+ @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
+ @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
+ @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
+ @windowsdir @workingdir @yday @year""".split()
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment.Single),
+ (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
+ Comment.Multiline),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'[$|@][a-zA-Z_]\w*', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInMarcros'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ (r'_\n', Text), # Line continuation
+ include('garbage'),
+ ],
+ 'commands': [
+ (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
+ bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(%s)\b' % '|'.join(functions),
+ Name.Function),
+ ],
+ 'builtInMarcros': [
+ (r'(?i)(%s)\b' % '|'.join(macros),
+ Name.Variable.Global),
+ ],
+ 'labels': [
+ # sendkeys
+ (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/basic.py b/contrib/python/Pygments/py2/pygments/lexers/basic.py
index 372c8229d3..697429c631 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/basic.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/basic.py
@@ -1,506 +1,506 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.basic
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for BASIC like languages (other than VB.net).
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.basic
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BASIC like languages (other than VB.net).
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, words, include
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words, include
from pygments.token import Comment, Error, Keyword, Name, Number, \
Punctuation, Operator, String, Text, Whitespace
from pygments.lexers import _vbscript_builtins
+
-
-__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
+__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer']
-
-
-
-class BlitzMaxLexer(RegexLexer):
- """
- For `BlitzMax <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'BlitzMax'
- aliases = ['blitzmax', 'bmax']
- filenames = ['*.bmx']
- mimetypes = ['text/x-bmx']
-
- bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
- bmax_sktypes = r'@{1,2}|[!#$%]'
- bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
- bmax_name = r'[a-z_]\w*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
- bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- (r'\.\.\n', Text), # Line continuation
- # Comments
- (r"'.*?\n", Comment.Single),
- (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]*(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
- (r'[(),.:\[\]]', Punctuation),
- (r'(?:#[\w \t]*)', Name.Label),
- (r'(?:\?[\w \t]*)', Comment.Preproc),
- # Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
- bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
- (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
- Operator, Text, Punctuation, Text,
- Keyword.Type, Name.Class, Text,
- Keyword.Type, Text, Punctuation)),
- (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
- Text, Punctuation, Text, Keyword.Type,
- Name.Class, Text, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Ptr)\b', Keyword.Type),
- (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
- (words((
- 'TNullMethodException', 'TNullFunctionException',
- 'TNullObjectException', 'TArrayBoundsException',
- 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
- (words((
- 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
- 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
- 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
- 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
- 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
- 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
- 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
- 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
- 'RestoreData'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class BlitzBasicLexer(RegexLexer):
- """
- For `BlitzBasic <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'BlitzBasic'
- aliases = ['blitzbasic', 'b3d', 'bplus']
- filenames = ['*.bb', '*.decls']
- mimetypes = ['text/x-bb']
-
- bb_sktypes = r'@{1,2}|[#$%]'
- bb_name = r'[a-z]\w*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- # Comments
- (r";.*?\n", Comment.Single),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
- 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
- 'First', 'Last', 'Before', 'After'),
- prefix=r'\b', suffix=r'\b'),
- Operator),
- (r'([+\-*/~=<>^])', Operator),
- (r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
- # Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
- bygroups(Operator, Text, Punctuation, Text, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
- bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation,
- Text, Name.Class, Text, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
- bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
- (words((
- 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
- 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
- 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- # (r'(%s)' % (bb_name), Name.Variable),
- (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class MonkeyLexer(RegexLexer):
- """
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Monkey'
- aliases = ['monkey']
- filenames = ['*.monkey']
- mimetypes = ['text/x-monkey']
-
- name_variable = r'[a-z_]\w*'
- name_function = r'[A-Z]\w*'
- name_constant = r'[A-Z_][A-Z0-9_]*'
- name_class = r'[A-Z]\w*'
- name_module = r'[a-z0-9_]*'
-
- keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
- # ? == Bool // % == Int // # == Float // $ == String
- keyword_type_special = r'[?%#$]'
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # Text
- (r'\s+', Text),
- # Comments
- (r"'.*", Comment),
- (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
- # preprocessor directives
- (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
- # preprocessor variable (any line starting with '#' that is not a directive)
- (r'^#', Comment.Preproc, 'variables'),
- # String
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-fA-Z]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
- # Exception handling
- (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
- (r'Throwable', Name.Exception),
- # Builtins
- (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
- (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
- (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
- # Keywords
- (r'(?i)^(Import)(\s+)(.*)(\n)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
- (r'(?i)^Strict\b.*\n', Keyword.Reserved),
- (r'(?i)(Const|Local|Global|Field)(\s+)',
- bygroups(Keyword.Declaration, Text), 'variables'),
- (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
- bygroups(Keyword.Reserved, Text), 'classname'),
- (r'(?i)(Function|Method)(\s+)',
- bygroups(Keyword.Reserved, Text), 'funcname'),
- (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
- r'Final|Abstract)\b', Keyword.Reserved),
- # Flow Control stuff
- (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
- r'Select|Case|Default|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'For|To|Until|Step|EachIn|Next|'
- r'Exit|Continue)\s+', Keyword.Reserved),
- # not used yet
- (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
- # Array
- (r'[\[\]]', Punctuation),
- # Other
- (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
- (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
- (r'[(){}!#,.:]', Punctuation),
- # catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
- ],
- 'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
- (r':', Punctuation, 'classname'),
- (r'\s+', Text),
- (r'\(', Punctuation, 'variables'),
- (r'\)', Punctuation, '#pop')
- ],
- 'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
- # array (of given size)
- (r'(\[)(\s*)(\d*)(\s*)(\])',
- bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
- # generics
- (r'\s+(?!<)', Text, '#pop'),
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- default('#pop')
- ],
- 'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
- (r'\s+', Text),
- (r':', Punctuation, 'classname'),
- (r',', Punctuation, '#push'),
- default('#pop')
- ],
- 'string': [
- (r'[^"~]+', String.Double),
- (r'~q|~n|~r|~t|~z|~~', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'comment': [
- (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
- (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
- (r'\n', Comment.Multiline),
- (r'.+', Comment.Multiline),
- ],
- }
-
-
-class CbmBasicV2Lexer(RegexLexer):
- """
- For CBM BASIC V2 sources.
-
- .. versionadded:: 1.6
- """
- name = 'CBM BASIC V2'
- aliases = ['cbmbas']
- filenames = ['*.bas']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'rem.*\n', Comment.Single),
- (r'\s+', Text),
- (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
- r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
- r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
- (r'data|restore|dim|let|def|fn', Keyword.Declaration),
- (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
- r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
- (r'[-+*/^<>=]', Operator),
- (r'not|and|or', Operator.Word),
- (r'"[^"\n]*.', String),
- (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'[(),:;]', Punctuation),
- (r'\w+[$%]?', Name),
- ]
- }
-
- def analyse_text(self, text):
- # if it starts with a line number, it shouldn't be a "modern" Basic
- # like VB.net
- if re.match(r'\d+', text):
- return 0.2
-
-
-class QBasicLexer(RegexLexer):
- """
- For
- `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
- source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'QBasic'
- aliases = ['qbasic', 'basic']
- filenames = ['*.BAS', '*.bas']
- mimetypes = ['text/basic']
-
- declarations = ('DATA', 'LET')
-
- functions = (
- 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
- 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
- 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
- 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
- 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
- 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
- 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
- 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
- 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
- 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
- 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
- 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
- 'VARPTR$', 'VARSEG'
- )
-
- metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
-
- operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
-
- statements = (
- 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
- 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
- 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
- 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
- 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
- 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
- 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
- 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
- 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
- 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
- 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
- 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
- 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
- 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
- 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
- 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
- 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
- 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
- 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
- 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
- )
-
- keywords = (
- 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
- 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
- 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
- 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
- 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
- )
-
- tokens = {
- 'root': [
- (r'\n+', Text),
- (r'\s+', Text.Whitespace),
- (r'^(\s*)(\d*)(\s*)(REM .*)$',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
- Comment.Single)),
- (r'^(\s*)(\d+)(\s*)',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
- (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
- (r'(?=[^"]*)\'.*$', Comment.Single),
- (r'"[^\n"]*"', String.Double),
- (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
- bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
- (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name)),
- (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name.Variable.Global)),
- (r'(DIM)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
- (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
- bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
- Operator)),
- (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- (r'(SUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- include('declarations'),
- include('functions'),
- include('metacommands'),
- include('operators'),
- include('statements'),
- include('keywords'),
- (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
- (r'[a-zA-Z_]\w*\:', Name.Label),
- (r'\-?\d*\.\d+[@|#]?', Number.Float),
- (r'\-?\d+[@|#]', Number.Float),
- (r'\-?\d+#?', Number.Integer.Long),
- (r'\-?\d+#?', Number.Integer),
- (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
- (r'[\[\]{}(),;]', Punctuation),
- (r'[\w]+', Name.Variable.Global),
- ],
- # can't use regular \b because of X$()
- # XXX: use words() here
- 'declarations': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
- Keyword.Declaration),
- ],
- 'functions': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
- Keyword.Reserved),
- ],
- 'metacommands': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
- Keyword.Constant),
- ],
- 'operators': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
- ],
- 'statements': [
- (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
- Keyword.Reserved),
- ],
- 'keywords': [
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- ],
- }
-
- def analyse_text(text):
- if '$DYNAMIC' in text or '$STATIC' in text:
- return 0.9
+
+
+
+class BlitzMaxLexer(RegexLexer):
+ """
+ For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'BlitzMax'
+ aliases = ['blitzmax', 'bmax']
+ filenames = ['*.bmx']
+ mimetypes = ['text/x-bmx']
+
+ bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+ bmax_sktypes = r'@{1,2}|[!#$%]'
+ bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+ bmax_name = r'[a-z_]\w*'
+ bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
+ r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
+ (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+ bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ (r'\.\.\n', Text), # Line continuation
+ # Comments
+ (r"'.*?\n", Comment.Single),
+ (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]*(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+ (bmax_vopwords), Operator),
+ (r'[(),.:\[\]]', Punctuation),
+ (r'(?:#[\w \t]*)', Name.Label),
+ (r'(?:\?[\w \t]*)', Comment.Preproc),
+ # Identifiers
+ (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
+ (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+ (bmax_name, bmax_name),
+ bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
+ (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
+ Operator, Text, Punctuation, Text,
+ Keyword.Type, Name.Class, Text,
+ Keyword.Type, Text, Punctuation)),
+ (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
+ Text, Punctuation, Text, Keyword.Type,
+ Name.Class, Text, Keyword.Type)),
+ (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Ptr)\b', Keyword.Type),
+ (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+ (words((
+ 'TNullMethodException', 'TNullFunctionException',
+ 'TNullObjectException', 'TArrayBoundsException',
+ 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
+ (words((
+ 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
+ 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
+ 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
+ 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
+ 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
+ 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
+ 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
+ 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
+ 'RestoreData'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ (r'(%s)' % (bmax_name), Name.Variable),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class BlitzBasicLexer(RegexLexer):
+ """
+ For `BlitzBasic <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'BlitzBasic'
+ aliases = ['blitzbasic', 'b3d', 'bplus']
+ filenames = ['*.bb', '*.decls']
+ mimetypes = ['text/x-bb']
+
+ bb_sktypes = r'@{1,2}|[#$%]'
+ bb_name = r'[a-z]\w*'
+ bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
+ (bb_name, bb_sktypes, bb_name)
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ # Comments
+ (r";.*?\n", Comment.Single),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
+ 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
+ 'First', 'Last', 'Before', 'After'),
+ prefix=r'\b', suffix=r'\b'),
+ Operator),
+ (r'([+\-*/~=<>^])', Operator),
+ (r'[(),:\[\]\\]', Punctuation),
+ (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
+ # Identifiers
+ (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Label)),
+ (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
+ bygroups(Operator, Text, Punctuation, Text, Name.Class)),
+ (r'\b%s\b([ \t]*)(\()' % bb_var,
+ bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation,
+ Text, Name.Class, Text, Punctuation)),
+ (r'\b(Function)\b([ \t]+)%s' % bb_var,
+ bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ (r'\b(Type)([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
+ (words((
+ 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
+ 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
+ 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ # (r'(%s)' % (bb_name), Name.Variable),
+ (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class MonkeyLexer(RegexLexer):
+ """
+ For
+ `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Monkey'
+ aliases = ['monkey']
+ filenames = ['*.monkey']
+ mimetypes = ['text/x-monkey']
+
+ name_variable = r'[a-z_]\w*'
+ name_function = r'[A-Z]\w*'
+ name_constant = r'[A-Z_][A-Z0-9_]*'
+ name_class = r'[A-Z]\w*'
+ name_module = r'[a-z0-9_]*'
+
+ keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
+ # ? == Bool // % == Int // # == Float // $ == String
+ keyword_type_special = r'[?%#$]'
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # Text
+ (r'\s+', Text),
+ # Comments
+ (r"'.*", Comment),
+ (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
+ # preprocessor directives
+ (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
+ # preprocessor variable (any line starting with '#' that is not a directive)
+ (r'^#', Comment.Preproc, 'variables'),
+ # String
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-fA-Z]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Native data types
+ (r'\b%s\b' % keyword_type, Keyword.Type),
+ # Exception handling
+ (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
+ (r'Throwable', Name.Exception),
+ # Builtins
+ (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
+ (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
+ (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
+ # Keywords
+ (r'(?i)^(Import)(\s+)(.*)(\n)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
+ (r'(?i)^Strict\b.*\n', Keyword.Reserved),
+ (r'(?i)(Const|Local|Global|Field)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'variables'),
+ (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'classname'),
+ (r'(?i)(Function|Method)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'funcname'),
+ (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
+ r'Final|Abstract)\b', Keyword.Reserved),
+ # Flow Control stuff
+ (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
+ r'Select|Case|Default|'
+ r'While|Wend|'
+ r'Repeat|Until|Forever|'
+ r'For|To|Until|Step|EachIn|Next|'
+ r'Exit|Continue)\s+', Keyword.Reserved),
+ # not used yet
+ (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
+ # Array
+ (r'[\[\]]', Punctuation),
+ # Other
+ (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
+ (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
+ (r'[(){}!#,.:]', Punctuation),
+ # catch the rest
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_function, Name.Function),
+ (r'%s\b' % name_variable, Name.Variable),
+ ],
+ 'funcname': [
+ (r'(?i)%s\b' % name_function, Name.Function),
+ (r':', Punctuation, 'classname'),
+ (r'\s+', Text),
+ (r'\(', Punctuation, 'variables'),
+ (r'\)', Punctuation, '#pop')
+ ],
+ 'classname': [
+ (r'%s\.' % name_module, Name.Namespace),
+ (r'%s\b' % keyword_type, Keyword.Type),
+ (r'%s\b' % name_class, Name.Class),
+ # array (of given size)
+ (r'(\[)(\s*)(\d*)(\s*)(\])',
+ bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
+ # generics
+ (r'\s+(?!<)', Text, '#pop'),
+ (r'<', Punctuation, '#push'),
+ (r'>', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_variable, Name.Variable),
+ (r'%s' % keyword_type_special, Keyword.Type),
+ (r'\s+', Text),
+ (r':', Punctuation, 'classname'),
+ (r',', Punctuation, '#push'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'[^"~]+', String.Double),
+ (r'~q|~n|~r|~t|~z|~~', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'comment': [
+ (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
+ (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
+ (r'\n', Comment.Multiline),
+ (r'.+', Comment.Multiline),
+ ],
+ }
+
+
+class CbmBasicV2Lexer(RegexLexer):
+ """
+ For CBM BASIC V2 sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CBM BASIC V2'
+ aliases = ['cbmbas']
+ filenames = ['*.bas']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'rem.*\n', Comment.Single),
+ (r'\s+', Text),
+ (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
+ r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
+ r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
+ (r'data|restore|dim|let|def|fn', Keyword.Declaration),
+ (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
+ r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
+ (r'[-+*/^<>=]', Operator),
+ (r'not|and|or', Operator.Word),
+ (r'"[^"\n]*.', String),
+ (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
+ (r'[(),:;]', Punctuation),
+ (r'\w+[$%]?', Name),
+ ]
+ }
+
+ def analyse_text(self, text):
+ # if it starts with a line number, it shouldn't be a "modern" Basic
+ # like VB.net
+ if re.match(r'\d+', text):
+ return 0.2
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ('DATA', 'LET')
+
+ functions = (
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ )
+
+ metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
+
+ operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
+
+ statements = (
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ )
+
+ keywords = (
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ # XXX: use words() here
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ if '$DYNAMIC' in text or '$STATIC' in text:
+ return 0.9
class VBScriptLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/business.py b/contrib/python/Pygments/py2/pygments/lexers/business.py
index 37f161a036..284328713f 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/business.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/business.py
@@ -1,147 +1,147 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.business
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for "business-oriented" languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.business
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for "business-oriented" languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
-
-__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
- 'GoodDataCLLexer', 'MaqlLexer']
-
-
-class CobolLexer(RegexLexer):
- """
- Lexer for OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOL'
- aliases = ['cobol']
- filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
- mimetypes = ['text/x-cobol']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: by PICTURE and USAGE
- # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
- # Logical (?): NOT, AND, OR
-
- # Reserved words:
- # http://opencobol.add1tocobol.com/#reserved-words
- # Intrinsics:
- # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
- tokens = {
- 'root': [
- include('comment'),
- include('strings'),
- include('core'),
- include('nums'),
- (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
- # (r'[\s]+', Text),
- (r'[ \t]+', Text),
- ],
- 'comment': [
- (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
- ],
- 'core': [
- # Figurative constants
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
+
+__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
+ 'GoodDataCLLexer', 'MaqlLexer']
+
+
+class CobolLexer(RegexLexer):
+ """
+ Lexer for OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOL'
+ aliases = ['cobol']
+ filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
+ mimetypes = ['text/x-cobol']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: by PICTURE and USAGE
+ # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
+ # Logical (?): NOT, AND, OR
+
+ # Reserved words:
+ # http://opencobol.add1tocobol.com/#reserved-words
+ # Intrinsics:
+ # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
+ # (r'[\s]+', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'comment': [
+ (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
+ ],
+ 'core': [
+ # Figurative constants
(r'(^|(?<=[^\w\-]))(ALL\s+)?'
- r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
+ r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
r'\s*($|(?=[^\w\-]))',
- Name.Constant),
-
- # Reserved words STATEMENTS and other bolds
- (words((
- 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
- 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
- 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
- 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
- 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
- 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
- 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
- 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
- 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
- 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
- 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
- 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
- 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
- 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
- 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
+ Name.Constant),
+
+ # Reserved words STATEMENTS and other bolds
+ (words((
+ 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
+ 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
+ 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
+ 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
+ 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
+ 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
+ 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
+ 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
+ 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
+ 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
+ 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
+ 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
+ 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
+ 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
+ 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Reserved),
-
- # Reserved words
- (words((
- 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
- 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
- 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
- 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
+ Keyword.Reserved),
+
+ # Reserved words
+ (words((
+ 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
+ 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
+ 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
+ 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC',
'AUTOTERMINATE', 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
- 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
+ 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL',
'COLLATING', 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE',
'COMMIT', 'COMMON', 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
- 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
+ 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE',
'DEBUGGING', 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
- 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
- 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
- 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
- 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
+ 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
+ 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
+ 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
+ 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
'EXCLUSIVE', 'EXTEND', 'EXTERNAL', 'FILE-ID', 'FILLER', 'FINAL',
'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL',
'FUNCTION', 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
- 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
- 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
+ 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
+ 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
'INITIAL', 'INITIALIZED', 'INPUT', 'INTO', 'INTRINSIC', 'INVALID',
'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
- 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
- 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
+ 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
+ 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', 'MULTIPLE',
'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', 'NEGATIVE', 'NEXT', 'NO',
'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', 'NUMERIC-EDITED',
'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
- 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
- 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
- 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
- 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
- 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
- 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
- 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
- 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
- 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
- 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
- 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
- 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
- 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
- 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
- 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
- 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
- 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
- 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
- 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
- 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
- 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
+ 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
+ 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
+ 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
+ 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
+ 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
+ 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
+ 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
+ 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
+ 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
+ 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
+ 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
+ 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
+ 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
+ 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
+ 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
+ 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
+ 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
+ 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
+ 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
+ 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
+ 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Pseudo),
-
- # inactive reserved words
- (words((
+ Keyword.Pseudo),
+
+ # inactive reserved words
+ (words((
'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE',
'B-AND', 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER',
'CF', 'CH', 'CHAIN', 'CLASS-ID', 'CLASSIFICATION', 'COMMUNICATION',
@@ -152,7 +152,7 @@ class CobolLexer(RegexLexer):
'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', 'FUNCTION-POINTER',
'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', 'INHERITS',
'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
- 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
+ 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE',
'NORMAL', 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE',
'PAGE-COUNTER', 'PF', 'PH', 'PROPERTY', 'PROTOTYPE', 'PURGE',
@@ -164,218 +164,218 @@ class CobolLexer(RegexLexer):
'TYPEDEF', 'UCS-4', 'UNIVERSAL', 'USER-DEFAULT', 'UTF-16', 'UTF-8',
'VAL-STATUS', 'VALID', 'VALIDATE', 'VALIDATE-STATUS'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Error),
-
- # Data Types
+ Error),
+
+ # Data Types
(r'(^|(?<=[^\w\-]))'
- r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
- r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
- r'BINARY-C-LONG|'
- r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
+ r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
+ r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
+ r'BINARY-C-LONG|'
+ r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
r'BINARY)\s*($|(?=[^\w\-]))', Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
- # (r'(::)', Keyword.Declaration),
-
- (r'([(),;:&%.])', Punctuation),
-
- # Intrinsics
+
+ # Operators
+ (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
+
+ # (r'(::)', Keyword.Declaration),
+
+ (r'([(),;:&%.])', Punctuation),
+
+ # Intrinsics
(r'(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
- r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
- r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
- r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
- r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
- r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
- r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
- r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
- r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
- r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
- r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
- r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
+ r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
+ r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
+ r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
+ r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
+ r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
+ r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
+ r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
+ r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
+ r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
+ r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
+ r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
r'($|(?=[^\w\-]))', Name.Function),
-
- # Booleans
+
+ # Booleans
(r'(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))', Name.Builtin),
- # Comparing Operators
+ # Comparing Operators
(r'(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|'
r'greater|less|than|not|and|or)\s*($|(?=[^\w\-]))', Operator.Word),
- ],
-
- # \"[^\"\n]*\"|\'[^\'\n]*\'
- 'strings': [
- # apparently strings can be delimited by EOL if they are continued
- # in the next line
- (r'"[^"\n]*("|\n)', String.Double),
- (r"'[^'\n]*('|\n)", String.Single),
- ],
-
- 'nums': [
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
- ],
- }
-
-
-class CobolFreeformatLexer(CobolLexer):
- """
- Lexer for Free format OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOLFree'
- aliases = ['cobolfree']
- filenames = ['*.cbl', '*.CBL']
- mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'comment': [
- (r'(\*>.*\n|^\w*\*.*$)', Comment),
- ],
- }
-
-
-class ABAPLexer(RegexLexer):
- """
- Lexer for ABAP, SAP's integrated language.
-
- .. versionadded:: 1.1
- """
- name = 'ABAP'
- aliases = ['abap']
- filenames = ['*.abap', '*.ABAP']
- mimetypes = ['text/x-abap']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'common': [
- (r'\s+', Text),
- (r'^\*.*$', Comment.Single),
- (r'\".*?\n', Comment.Single),
+ ],
+
+ # \"[^\"\n]*\"|\'[^\'\n]*\'
+ 'strings': [
+ # apparently strings can be delimited by EOL if they are continued
+ # in the next line
+ (r'"[^"\n]*("|\n)', String.Double),
+ (r"'[^'\n]*('|\n)", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(\s*|\.$|$)', Number.Integer),
+ (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
+ ],
+ }
+
+
+class CobolFreeformatLexer(CobolLexer):
+ """
+ Lexer for Free format OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOLFree'
+ aliases = ['cobolfree']
+ filenames = ['*.cbl', '*.CBL']
+ mimetypes = []
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'comment': [
+ (r'(\*>.*\n|^\w*\*.*$)', Comment),
+ ],
+ }
+
+
+class ABAPLexer(RegexLexer):
+ """
+ Lexer for ABAP, SAP's integrated language.
+
+ .. versionadded:: 1.1
+ """
+ name = 'ABAP'
+ aliases = ['abap']
+ filenames = ['*.abap', '*.ABAP']
+ mimetypes = ['text/x-abap']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'common': [
+ (r'\s+', Text),
+ (r'^\*.*$', Comment.Single),
+ (r'\".*?\n', Comment.Single),
(r'##\w+', Comment.Special),
- ],
- 'variable-names': [
- (r'<\S+>', Name.Variable),
- (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
- ],
- 'root': [
- include('common'),
- # function calls
+ ],
+ 'variable-names': [
+ (r'<\S+>', Name.Variable),
+ (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
+ ],
+ 'root': [
+ include('common'),
+ # function calls
(r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
Keyword),
- (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
- r'TRANSACTION|TRANSFORMATION))\b',
- Keyword),
- (r'(FORM|PERFORM)(\s+)(\w+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(PERFORM)(\s+)(\()(\w+)(\))',
- bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation)),
- (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
- bygroups(Keyword, Text, Name.Function, Text, Keyword)),
-
- # method implementation
- (r'(METHOD)(\s+)([\w~]+)',
- bygroups(Keyword, Text, Name.Function)),
- # method calls
- (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
- bygroups(Text, Name.Variable, Operator, Name.Function)),
- # call methodnames returning style
- (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
+ (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
+ r'TRANSACTION|TRANSFORMATION))\b',
+ Keyword),
+ (r'(FORM|PERFORM)(\s+)(\w+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(PERFORM)(\s+)(\()(\w+)(\))',
+ bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation)),
+ (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
+ bygroups(Keyword, Text, Name.Function, Text, Keyword)),
+
+ # method implementation
+ (r'(METHOD)(\s+)([\w~]+)',
+ bygroups(Keyword, Text, Name.Function)),
+ # method calls
+ (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
+ bygroups(Text, Name.Variable, Operator, Name.Function)),
+ # call methodnames returning style
+ (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+
# text elements
(r'(TEXT)(-)(\d{3})',
bygroups(Keyword, Punctuation, Number.Integer)),
(r'(TEXT)(-)(\w{3})',
bygroups(Keyword, Punctuation, Name.Variable)),
- # keywords with dashes in them.
- # these need to be first, because for instance the -ID part
- # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
- # first in the list of keywords.
- (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
- r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
- r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
- r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
- r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
- r'INTERFACE-POOL|INVERTED-DATE|'
- r'LOAD-OF-PROGRAM|LOG-POINT|'
- r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
- r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
- r'OUTPUT-LENGTH|PRINT-CONTROL|'
- r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
- r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
+ # keywords with dashes in them.
+ # these need to be first, because for instance the -ID part
+ # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
+ # first in the list of keywords.
+ (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
+ r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
+ r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
+ r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
+ r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
+ r'INTERFACE-POOL|INVERTED-DATE|'
+ r'LOAD-OF-PROGRAM|LOG-POINT|'
+ r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
+ r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
+ r'OUTPUT-LENGTH|PRINT-CONTROL|'
+ r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
+ r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
- r')\b', Keyword),
-
- # keyword kombinations
+ r')\b', Keyword),
+
+ # keyword kombinations
(r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
- r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
- r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
- r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
- r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
- r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
- r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
- r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
- r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
- r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
- r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
- r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
- r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
- r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
- r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
- r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
- r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
- r'FREE\s(MEMORY|OBJECT)?|'
- r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
- r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
- r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
- r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
- r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
- r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
- r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
- r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
- r'SKIP|ULINE)|'
- r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
- r'TO LIST-PROCESSING|TO TRANSACTION)'
- r'(ENDING|STARTING)\s+AT|'
- r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
- r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
- r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
- r'(BEGIN|END)\s+OF|'
- r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
- r'COMPARING(\s+ALL\s+FIELDS)?|'
+ r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
+ r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
+ r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
+ r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
+ r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
+ r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
+ r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
+ r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
+ r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
+ r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
+ r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
+ r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
+ r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
+ r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
+ r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
+ r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
+ r'FREE\s(MEMORY|OBJECT)?|'
+ r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
+ r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
+ r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
+ r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
+ r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
+ r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
+ r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
+ r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
+ r'SKIP|ULINE)|'
+ r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
+ r'TO LIST-PROCESSING|TO TRANSACTION)'
+ r'(ENDING|STARTING)\s+AT|'
+ r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
+ r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
+ r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
+ r'(BEGIN|END)\s+OF|'
+ r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
+ r'COMPARING(\s+ALL\s+FIELDS)?|'
r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
- r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
- r'END-OF-(DEFINITION|PAGE|SELECTION)|'
- r'WITH\s+FRAME(\s+TITLE)|'
+ r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
+ r'END-OF-(DEFINITION|PAGE|SELECTION)|'
+ r'WITH\s+FRAME(\s+TITLE)|'
r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
r'(RESPECTING|IGNORING)\s+CASE|'
r'IN\s+UPDATE\s+TASK|'
r'(SOURCE|RESULT)\s+(XML)?|'
r'REFERENCE\s+INTO|'
-
- # simple kombinations
- r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
- r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
- r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
- r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
- r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
- r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
+
+ # simple kombinations
+ r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
+ r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
+ r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
+ r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
+ r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
+ r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
-
- # single word keywords.
+
+ # single word keywords.
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
- r'BACK|BLOCK|BREAK-POINT|'
- r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
- r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
+ r'BACK|BLOCK|BREAK-POINT|'
+ r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
+ r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
@@ -383,12 +383,12 @@ class ABAPLexer(RegexLexer):
r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
- r'HIDE|'
- r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
- r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
+ r'HIDE|'
+ r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
+ r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
- r'JOIN|'
- r'KEY|'
+ r'JOIN|'
+ r'KEY|'
r'NEXT|'
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
r'NODES|NUMBER|'
@@ -400,213 +400,213 @@ class ABAPLexer(RegexLexer):
r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|'
r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
- r'ULINE|UNDER|UNPACK|UPDATE|USING|'
+ r'ULINE|UNDER|UNPACK|UPDATE|USING|'
r'VALUE|VALUES|VIA|VARYING|VARY|'
r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
-
- # builtins
- (r'(abs|acos|asin|atan|'
- r'boolc|boolx|bit_set|'
- r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
- r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
- r'count|count_any_of|count_any_not_of|'
- r'dbmaxlen|distance|'
- r'escape|exp|'
- r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
- r'insert|'
- r'lines|log|log10|'
- r'match|matches|'
- r'nmax|nmin|numofchar|'
- r'repeat|replace|rescale|reverse|round|'
- r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
- r'substring|substring_after|substring_from|substring_before|substring_to|'
- r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
- r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
- (r'&[0-9]', Name),
- (r'[0-9]+', Number.Integer),
-
- # operators which look like variable names before
- # parsing variable names.
+
+ # builtins
+ (r'(abs|acos|asin|atan|'
+ r'boolc|boolx|bit_set|'
+ r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
+ r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
+ r'count|count_any_of|count_any_not_of|'
+ r'dbmaxlen|distance|'
+ r'escape|exp|'
+ r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
+ r'insert|'
+ r'lines|log|log10|'
+ r'match|matches|'
+ r'nmax|nmin|numofchar|'
+ r'repeat|replace|rescale|reverse|round|'
+ r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
+ r'substring|substring_after|substring_from|substring_before|substring_to|'
+ r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
+ r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
+
+ (r'&[0-9]', Name),
+ (r'[0-9]+', Number.Integer),
+
+ # operators which look like variable names before
+ # parsing variable names.
(r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
- r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
+ r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
-
- include('variable-names'),
-
+
+ include('variable-names'),
+
# standard operators after variable names,
- # because < and > are part of field symbols.
+ # because < and > are part of field symbols.
(r'[?*<>=\-+&]', Operator),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Single),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Single),
(r"([|}])([^{}|]*?)([|{])",
bygroups(Punctuation, String.Single, Punctuation)),
(r'[/;:()\[\],.]', Punctuation),
(r'(!)(\w+)', bygroups(Operator, Name)),
- ],
- }
-
-
-class OpenEdgeLexer(RegexLexer):
- """
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- .. versionadded:: 1.5
- """
- name = 'OpenEdge ABL'
- aliases = ['openedge', 'abl', 'progress']
- filenames = ['*.p', '*.cls']
- mimetypes = ['text/x-openedge', 'application/x-openedge']
-
+ ],
+ }
+
+
+class OpenEdgeLexer(RegexLexer):
+ """
+ Lexer for `OpenEdge ABL (formerly Progress)
+ <http://web.progress.com/en/openedge/abl.html>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'OpenEdge ABL'
+ aliases = ['openedge', 'abl', 'progress']
+ filenames = ['*.p', '*.cls']
+ mimetypes = ['text/x-openedge', 'application/x-openedge']
+
types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
- r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
- r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
- r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
+ r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
+ r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
+ r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))')
-
- keywords = words(OPENEDGEKEYWORDS,
+
+ keywords = words(OPENEDGEKEYWORDS,
prefix=r'(?i)(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))')
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'\{', Comment.Preproc, 'preprocessor'),
- (r'\s*&.*', Comment.Preproc),
- (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
- (types, Keyword.Type),
- (keywords, Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Text),
- (r'[+*/=-]', Operator),
- (r'[.:()]', Punctuation),
- (r'.', Name.Variable), # Lazy catch-all
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'preprocessor': [
- (r'[^{}]', Comment.Preproc),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- ],
- }
-
-
-class GoodDataCLLexer(RegexLexer):
- """
- Lexer for `GoodData-CL
- <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
-com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- .. versionadded:: 1.4
- """
-
- name = 'GoodData-CL'
- aliases = ['gooddata-cl']
- filenames = ['*.gdc']
- mimetypes = ['text/x-gooddata-cl']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Function call
- (r'[a-z]\w*', Name.Function),
- # Argument list
- (r'\(', Punctuation, 'args-list'),
- # Punctuation
- (r';', Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'args-list': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'[a-z]\w*', Name.Variable),
- (r'=', Operator),
- (r'"', String, 'string-literal'),
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # Space is not significant
- (r'\s', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ]
- }
-
-
-class MaqlLexer(RegexLexer):
- """
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- .. versionadded:: 1.4
- """
-
- name = 'MAQL'
- aliases = ['maql']
- filenames = ['*.maql']
- mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # IDENTITY
- (r'IDENTIFIER\b', Name.Builtin),
- # IDENTIFIER
- (r'\{[^}]+\}', Name.Variable),
- # NUMBER
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # STRING
- (r'"', String, 'string-literal'),
- # RELATION
- (r'\<\>|\!\=', Operator),
- (r'\=|\>\=|\>|\<\=|\<', Operator),
- # :=
- (r'\:\=', Operator),
- # OBJECT
- (r'\[[^]]+\]', Name.Variable.Class),
- # keywords
- (words((
- 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
- 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
- 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
- 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
- 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
- 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
- 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
- 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
- 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
- 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
- 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
- suffix=r'\b'),
- Keyword),
- # FUNCNAME
- (r'[a-z]\w*\b', Name.Function),
- # Comments
- (r'#.*', Comment.Single),
- # Punctuation
- (r'[,;()]', Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ],
- }
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'\{', Comment.Preproc, 'preprocessor'),
+ (r'\s*&.*', Comment.Preproc),
+ (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
+ (types, Keyword.Type),
+ (keywords, Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\s+', Text),
+ (r'[+*/=-]', Operator),
+ (r'[.:()]', Punctuation),
+ (r'.', Name.Variable), # Lazy catch-all
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'preprocessor': [
+ (r'[^{}]', Comment.Preproc),
+ (r'\{', Comment.Preproc, '#push'),
+ (r'\}', Comment.Preproc, '#pop'),
+ ],
+ }
+
+
+class GoodDataCLLexer(RegexLexer):
+ """
+ Lexer for `GoodData-CL
+ <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
+com/gooddata/processor/COMMANDS.txt>`_
+ script files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'GoodData-CL'
+ aliases = ['gooddata-cl']
+ filenames = ['*.gdc']
+ mimetypes = ['text/x-gooddata-cl']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Function call
+ (r'[a-z]\w*', Name.Function),
+ # Argument list
+ (r'\(', Punctuation, 'args-list'),
+ # Punctuation
+ (r';', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'args-list': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[a-z]\w*', Name.Variable),
+ (r'=', Operator),
+ (r'"', String, 'string-literal'),
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # Space is not significant
+ (r'\s', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ]
+ }
+
+
+class MaqlLexer(RegexLexer):
+ """
+ Lexer for `GoodData MAQL
+ <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+ scripts.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'MAQL'
+ aliases = ['maql']
+ filenames = ['*.maql']
+ mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # IDENTITY
+ (r'IDENTIFIER\b', Name.Builtin),
+ # IDENTIFIER
+ (r'\{[^}]+\}', Name.Variable),
+ # NUMBER
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # STRING
+ (r'"', String, 'string-literal'),
+ # RELATION
+ (r'\<\>|\!\=', Operator),
+ (r'\=|\>\=|\>|\<\=|\<', Operator),
+ # :=
+ (r'\:\=', Operator),
+ # OBJECT
+ (r'\[[^]]+\]', Name.Variable.Class),
+ # keywords
+ (words((
+ 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
+ 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
+ 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
+ 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
+ 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
+ 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
+ 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
+ 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
+ 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
+ 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
+ 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
+ suffix=r'\b'),
+ Keyword),
+ # FUNCNAME
+ (r'[a-z]\w*\b', Name.Function),
+ # Comments
+ (r'#.*', Comment.Single),
+ # Punctuation
+ (r'[,;()]', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py b/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
index 5d84a37758..299f8941e2 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/c_cpp.py
@@ -1,252 +1,252 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.c_cpp
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for C/C++ languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_cpp
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for C/C++ languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, inherit, default, words
-from pygments.util import get_bool_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['CLexer', 'CppLexer']
-
-
-class CFamilyLexer(RegexLexer):
- """
- For C family source code. This is used as a base class to avoid repetitious
- definitions.
- """
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- # The trailing ?, rather than *, avoids a geometric performance drop here.
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
-
- tokens = {
- 'whitespace': [
- # preprocessor directives: without whitespace
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, inherit, default, words
+from pygments.util import get_bool_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['CLexer', 'CppLexer']
+
+
+class CFamilyLexer(RegexLexer):
+ """
+ For C family source code. This is used as a base class to avoid repetitious
+ definitions.
+ """
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ # The trailing ?, rather than *, avoids a geometric performance drop here.
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
+
+ tokens = {
+ 'whitespace': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^(' + _ws1 + r')(#if\s+0)',
- bygroups(using(this), Comment.Preproc), 'if0'),
- ('^(' + _ws1 + ')(#)',
- bygroups(using(this), Comment.Preproc), 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^(' + _ws1 + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws1 + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
(r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
# Open until EOF, so no ending delimeter
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
- ],
- 'statements': [
+ ],
+ 'statements': [
(r'(L?)(")', bygroups(String.Affix, String), 'string'),
(r"(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')",
bygroups(String.Affix, String.Char, String.Char, String.Char)),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
(words(('asm', 'auto', 'break', 'case', 'const', 'continue',
'default', 'do', 'else', 'enum', 'extern', 'for', 'goto',
'if', 'register', 'restricted', 'return', 'sizeof',
'static', 'struct', 'switch', 'typedef', 'union',
'volatile', 'while'),
- suffix=r'\b'), Keyword),
- (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
- Keyword.Type),
- (words(('inline', '_inline', '__inline', 'naked', 'restrict',
- 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved),
- # Vector intrinsics
- (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
- # Microsoft-isms
- (words((
- 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
- 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
- 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop',
- 'identifier', 'forceinline', 'assume'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved),
- (r'(true|false|NULL)\b', Name.Builtin),
- (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
+ suffix=r'\b'), Keyword),
+ (r'(bool|int|long|float|short|double|char|unsigned|signed|void)\b',
+ Keyword.Type),
+ (words(('inline', '_inline', '__inline', 'naked', 'restrict',
+ 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved),
+ # Vector intrinsics
+ (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
+ # Microsoft-isms
+ (words((
+ 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
+ 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
+ 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop',
+ 'identifier', 'forceinline', 'assume'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ (r'(true|false|NULL)\b', Name.Builtin),
+ (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'([^;{]*)(\{)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'([^;]*)(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'([^;{]*)(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'([^;]*)(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
(r'(include)(' + _ws1 + r')([^\n]+)',
bygroups(Comment.Preproc, Text, Comment.PreprocFile)),
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
stdlib_types = {
- 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
- 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
+ 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
+ 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
c99_types = {
- '_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
- 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
- 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
- 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
- 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
+ '_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
+ 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
+ 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
+ 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
+ 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
linux_types = {
- 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
- 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
+ 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
+ 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
-
- def __init__(self, **options):
- self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
- self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
- self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.stdlibhighlighting and value in self.stdlib_types:
- token = Keyword.Type
- elif self.c99highlighting and value in self.c99_types:
- token = Keyword.Type
- elif self.platformhighlighting and value in self.linux_types:
- token = Keyword.Type
- yield index, token, value
-
-
-class CLexer(CFamilyLexer):
- """
- For C source code with preprocessor directives.
- """
- name = 'C'
- aliases = ['c']
- filenames = ['*.c', '*.h', '*.idc']
- mimetypes = ['text/x-chdr', 'text/x-csrc']
- priority = 0.1
-
- def analyse_text(text):
+
+ def __init__(self, **options):
+ self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
+ self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
+ self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.stdlibhighlighting and value in self.stdlib_types:
+ token = Keyword.Type
+ elif self.c99highlighting and value in self.c99_types:
+ token = Keyword.Type
+ elif self.platformhighlighting and value in self.linux_types:
+ token = Keyword.Type
+ yield index, token, value
+
+
+class CLexer(CFamilyLexer):
+ """
+ For C source code with preprocessor directives.
+ """
+ name = 'C'
+ aliases = ['c']
+ filenames = ['*.c', '*.h', '*.idc']
+ mimetypes = ['text/x-chdr', 'text/x-csrc']
+ priority = 0.1
+
+ def analyse_text(text):
if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
- return 0.1
+ return 0.1
if re.search(r'^\s*#ifn?def ', text, re.MULTILINE):
- return 0.1
-
-
-class CppLexer(CFamilyLexer):
- """
- For C++ source code with preprocessor directives.
- """
- name = 'C++'
- aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
- '*.cc', '*.hh', '*.cxx', '*.hxx',
- '*.C', '*.H', '*.cp', '*.CPP']
- mimetypes = ['text/x-c++hdr', 'text/x-c++src']
- priority = 0.1
-
- tokens = {
- 'statements': [
- (words((
+ return 0.1
+
+
+class CppLexer(CFamilyLexer):
+ """
+ For C++ source code with preprocessor directives.
+ """
+ name = 'C++'
+ aliases = ['cpp', 'c++']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
+ '*.cc', '*.hh', '*.cxx', '*.hxx',
+ '*.C', '*.H', '*.cp', '*.CPP']
+ mimetypes = ['text/x-c++hdr', 'text/x-c++src']
+ priority = 0.1
+
+ tokens = {
+ 'statements': [
+ (words((
'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
- 'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
- 'private', 'protected', 'public', 'reinterpret_cast',
- 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
- 'try', 'typeid', 'typename', 'using', 'virtual',
- 'constexpr', 'nullptr', 'decltype', 'thread_local',
- 'alignas', 'alignof', 'static_assert', 'noexcept', 'override',
- 'final'), suffix=r'\b'), Keyword),
- (r'char(16_t|32_t)\b', Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ 'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
+ 'private', 'protected', 'public', 'reinterpret_cast',
+ 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
+ 'try', 'typeid', 'typename', 'using', 'virtual',
+ 'constexpr', 'nullptr', 'decltype', 'thread_local',
+ 'alignas', 'alignof', 'static_assert', 'noexcept', 'override',
+ 'final'), suffix=r'\b'), Keyword),
+ (r'char(16_t|32_t)\b', Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
# C++11 raw strings
(r'(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
String, String.Delimiter, String)),
# C++11 UTF-8/16/32 strings
(r'(u8|u|U)(")', bygroups(String.Affix, String), 'string'),
- inherit,
- ],
- 'root': [
- inherit,
- # C++ Microsoft-isms
- (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
- 'multiple_inheritance', 'interface', 'event'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved),
- # Offload C++ extensions, http://offload.codeplay.com/
- (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
- def analyse_text(text):
+ inherit,
+ ],
+ 'root': [
+ inherit,
+ # C++ Microsoft-isms
+ (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
+ 'multiple_inheritance', 'interface', 'event'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ # Offload C++ extensions, http://offload.codeplay.com/
+ (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
if re.search('#include <[a-z_]+>', text):
- return 0.2
- if re.search('using namespace ', text):
- return 0.4
+ return 0.2
+ if re.search('using namespace ', text):
+ return 0.4
diff --git a/contrib/python/Pygments/py2/pygments/lexers/c_like.py b/contrib/python/Pygments/py2/pygments/lexers/c_like.py
index 82dee35ad3..a3a779386a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/c_like.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/c_like.py
@@ -1,296 +1,296 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.c_like
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for other C-like languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_like
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for other C-like languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers import _mql_builtins
-
-__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers import _mql_builtins
+
+__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer']
-
-
-class PikeLexer(CppLexer):
- """
- For `Pike <http://pike.lysator.liu.se/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Pike'
- aliases = ['pike']
- filenames = ['*.pike', '*.pmod']
- mimetypes = ['text/x-pike']
-
- tokens = {
- 'statements': [
- (words((
- 'catch', 'new', 'private', 'protected', 'public', 'gauge',
- 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
- 'this', 'super', 'constant', 'final', 'static', 'import', 'use', 'extern',
- 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
- 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
- '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
- '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
- '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
- '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
- Keyword),
- (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
- r'array|multiset|program|function|lambda|mixed|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'[~!%^&*+=|?:<>/@-]', Operator),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
-
-class NesCLexer(CLexer):
- """
- For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
- directives.
-
- .. versionadded:: 2.0
- """
- name = 'nesC'
- aliases = ['nesc']
- filenames = ['*.nc']
- mimetypes = ['text/x-nescsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
- 'components', 'configuration', 'event', 'extends', 'generic',
- 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
- 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
- Keyword),
- (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
- 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
- 'nx_uint64_t'), suffix=r'\b'),
- Keyword.Type),
- inherit,
- ],
- }
-
-
-class ClayLexer(RegexLexer):
- """
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Clay'
- filenames = ['*.clay']
- aliases = ['clay']
- mimetypes = ['text/x-clay']
- tokens = {
- 'root': [
- (r'\s', Text),
+
+
+class PikeLexer(CppLexer):
+ """
+ For `Pike <http://pike.lysator.liu.se/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Pike'
+ aliases = ['pike']
+ filenames = ['*.pike', '*.pmod']
+ mimetypes = ['text/x-pike']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'catch', 'new', 'private', 'protected', 'public', 'gauge',
+ 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'constant', 'final', 'static', 'import', 'use', 'extern',
+ 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
+ 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
+ '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
+ '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
+ '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
+ '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
+ Keyword),
+ (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
+ r'array|multiset|program|function|lambda|mixed|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'[~!%^&*+=|?:<>/@-]', Operator),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class NesCLexer(CLexer):
+ """
+ For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
+ directives.
+
+ .. versionadded:: 2.0
+ """
+ name = 'nesC'
+ aliases = ['nesc']
+ filenames = ['*.nc']
+ mimetypes = ['text/x-nescsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
+ 'components', 'configuration', 'event', 'extends', 'generic',
+ 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
+ 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
+ Keyword),
+ (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
+ 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
+ 'nx_uint64_t'), suffix=r'\b'),
+ Keyword.Type),
+ inherit,
+ ],
+ }
+
+
+class ClayLexer(RegexLexer):
+ """
+ For `Clay <http://claylabs.com/clay/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Clay'
+ filenames = ['*.clay']
+ aliases = ['clay']
+ mimetypes = ['text/x-clay']
+ tokens = {
+ 'root': [
+ (r'\s', Text),
(r'//.*?$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\b(public|private|import|as|record|variant|instance'
- r'|define|overload|default|external|alias'
- r'|rvalue|ref|forward|inline|noinline|forceinline'
- r'|enum|var|and|or|not|if|else|goto|return|while'
- r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
- r'|finally|onerror|staticassert|eval|when|newtype'
- r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
- r')\b', Keyword),
- (r'[~!%^&*+=|:<>/-]', Operator),
- (r'[#(){}\[\],;.]', Punctuation),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\b(true|false)\b', Name.Builtin),
- (r'(?i)[a-z_?][\w?]*', Name),
- (r'"""', String, 'tdqs'),
- (r'"', String, 'dqs'),
- ],
- 'strings': [
- (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
- (r'.', String),
- ],
- 'nl': [
- (r'\n', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings'),
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl'),
- ],
- }
-
-
-class ECLexer(CLexer):
- """
- For eC source code with preprocessor directives.
-
- .. versionadded:: 1.5
- """
- name = 'eC'
- aliases = ['ec']
- filenames = ['*.ec', '*.eh']
- mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'virtual', 'class', 'private', 'public', 'property', 'import',
- 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
- 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
- '__on_register_module', 'namespace', 'using', 'typed_object',
- 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
- 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
- 'class_default_property', 'property_category', 'class_data',
- 'class_property', 'thisclass', 'dbtable', 'dbindex',
- 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
- (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
- 'unichar', 'int64'), suffix=r'\b'),
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(null|value|this)\b', Name.Builtin),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
-
-class ValaLexer(RegexLexer):
- """
- For Vala source code with preprocessor directives.
-
- .. versionadded:: 1.1
- """
- name = 'Vala'
- aliases = ['vala', 'vapi']
- filenames = ['*.vala', '*.vapi']
- mimetypes = ['text/x-vala']
-
- tokens = {
- 'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'[L@]?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'(?s)""".*?"""', String), # verbatim strings
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation)),
- # TODO: "correctly" parse complex code attributes
- (r'(\[)(CCode|(?:Integer|Floating)Type)',
- bygroups(Punctuation, Name.Decorator)),
- (r'[()\[\],.]', Punctuation),
- (words((
- 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
- 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
- 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
- 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
- 'typeof', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- (words((
- 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
- 'inline', 'internal', 'override', 'owned', 'private', 'protected',
- 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
- 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
- Keyword.Declaration),
- (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
- 'namespace'),
- (r'(class|errordomain|interface|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- # void is an actual keyword, others are in glib-2.0.vapi
- (words((
- 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
- 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
- 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false|null)\b', Name.Builtin),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\b(public|private|import|as|record|variant|instance'
+ r'|define|overload|default|external|alias'
+ r'|rvalue|ref|forward|inline|noinline|forceinline'
+ r'|enum|var|and|or|not|if|else|goto|return|while'
+ r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
+ r'|finally|onerror|staticassert|eval|when|newtype'
+ r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
+ r')\b', Keyword),
+ (r'[~!%^&*+=|:<>/-]', Operator),
+ (r'[#(){}\[\],;.]', Punctuation),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\b(true|false)\b', Name.Builtin),
+ (r'(?i)[a-z_?][\w?]*', Name),
+ (r'"""', String, 'tdqs'),
+ (r'"', String, 'dqs'),
+ ],
+ 'strings': [
+ (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
+ (r'.', String),
+ ],
+ 'nl': [
+ (r'\n', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings'),
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl'),
+ ],
+ }
+
+
+class ECLexer(CLexer):
+ """
+ For eC source code with preprocessor directives.
+
+ .. versionadded:: 1.5
+ """
+ name = 'eC'
+ aliases = ['ec']
+ filenames = ['*.ec', '*.eh']
+ mimetypes = ['text/x-echdr', 'text/x-ecsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'virtual', 'class', 'private', 'public', 'property', 'import',
+ 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
+ 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
+ '__on_register_module', 'namespace', 'using', 'typed_object',
+ 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
+ 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
+ 'class_default_property', 'property_category', 'class_data',
+ 'class_property', 'thisclass', 'dbtable', 'dbindex',
+ 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
+ (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
+ 'unichar', 'int64'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(null|value|this)\b', Name.Builtin),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class ValaLexer(RegexLexer):
+ """
+ For Vala source code with preprocessor directives.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Vala'
+ aliases = ['vala', 'vapi']
+ filenames = ['*.vala', '*.vapi']
+ mimetypes = ['text/x-vala']
+
+ tokens = {
+ 'whitespace': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'[L@]?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'(?s)""".*?"""', String), # verbatim strings
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation)),
+ # TODO: "correctly" parse complex code attributes
+ (r'(\[)(CCode|(?:Integer|Floating)Type)',
+ bygroups(Punctuation, Name.Decorator)),
+ (r'[()\[\],.]', Punctuation),
+ (words((
+ 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
+ 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
+ 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
+ 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
+ 'typeof', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
+ 'inline', 'internal', 'override', 'owned', 'private', 'protected',
+ 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
+ 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'namespace'),
+ (r'(class|errordomain|interface|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ # void is an actual keyword, others are in glib-2.0.vapi
+ (words((
+ 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
+ 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
+ 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false|null)\b', Name.Builtin),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ],
- }
-
-
-class CudaLexer(CLexer):
- """
- For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- .. versionadded:: 1.6
- """
- name = 'CUDA'
- filenames = ['*.cu', '*.cuh']
- aliases = ['cuda', 'cu']
- mimetypes = ['text/x-cuda']
-
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CudaLexer(CLexer):
+ """
+ For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
+ source.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CUDA'
+ filenames = ['*.cu', '*.cuh']
+ aliases = ['cuda', 'cu']
+ mimetypes = ['text/x-cuda']
+
function_qualifiers = {'__device__', '__global__', '__host__',
'__noinline__', '__forceinline__'}
variable_qualifiers = {'__device__', '__constant__', '__shared__',
@@ -308,132 +308,132 @@ class CudaLexer(CLexer):
'__syncthreads', '__syncthreads_count', '__syncthreads_and',
'__syncthreads_or'}
execution_confs = {'<<<', '>>>'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in CLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.variable_qualifiers:
- token = Keyword.Type
- elif value in self.vector_types:
- token = Keyword.Type
- elif value in self.variables:
- token = Name.Builtin
- elif value in self.execution_confs:
- token = Keyword.Pseudo
- elif value in self.function_qualifiers:
- token = Keyword.Reserved
- elif value in self.functions:
- token = Name.Function
- yield index, token, value
-
-
-class SwigLexer(CppLexer):
- """
- For `SWIG <http://www.swig.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'SWIG'
- aliases = ['swig']
- filenames = ['*.swg', '*.i']
- mimetypes = ['text/swig']
- priority = 0.04 # Lower than C/C++ and Objective C/C++
-
- tokens = {
- 'statements': [
- # SWIG directives
- (r'(%[a-z_][a-z0-9_]*)', Name.Function),
- # Special variables
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.variable_qualifiers:
+ token = Keyword.Type
+ elif value in self.vector_types:
+ token = Keyword.Type
+ elif value in self.variables:
+ token = Name.Builtin
+ elif value in self.execution_confs:
+ token = Keyword.Pseudo
+ elif value in self.function_qualifiers:
+ token = Keyword.Reserved
+ elif value in self.functions:
+ token = Name.Function
+ yield index, token, value
+
+
+class SwigLexer(CppLexer):
+ """
+ For `SWIG <http://www.swig.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SWIG'
+ aliases = ['swig']
+ filenames = ['*.swg', '*.i']
+ mimetypes = ['text/swig']
+ priority = 0.04 # Lower than C/C++ and Objective C/C++
+
+ tokens = {
+ 'statements': [
+ # SWIG directives
+ (r'(%[a-z_][a-z0-9_]*)', Name.Function),
+ # Special variables
(r'\$\**\&?\w+', Name),
- # Stringification / additional preprocessor directives
- (r'##*[a-zA-Z_]\w*', Comment.Preproc),
- inherit,
- ],
- }
-
- # This is a far from complete set of SWIG directives
+ # Stringification / additional preprocessor directives
+ (r'##*[a-zA-Z_]\w*', Comment.Preproc),
+ inherit,
+ ],
+ }
+
+ # This is a far from complete set of SWIG directives
swig_directives = {
- # Most common directives
- '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
- '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
- '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
- '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
- # Less common directives
- '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
- '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
- '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
- '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
- '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
- '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
- '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
- '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
- '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
- '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
- '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
- '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
+ # Most common directives
+ '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
+ '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
+ '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
+ '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
+ # Less common directives
+ '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
+ '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
+ '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
+ '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
+ '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
+ '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
+ '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
+ '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
+ '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
+ '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
+ '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
+ '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
'%warnfilter'}
-
- def analyse_text(text):
- rv = 0
- # Search for SWIG directives, which are conventionally at the beginning of
- # a line. The probability of them being within a line is low, so let another
- # lexer win in this case.
- matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
- for m in matches:
- if m in SwigLexer.swig_directives:
- rv = 0.98
- break
- else:
- rv = 0.91 # Fraction higher than MatlabLexer
- return rv
-
-
-class MqlLexer(CppLexer):
- """
- For `MQL4 <http://docs.mql4.com/>`_ and
- `MQL5 <http://www.mql5.com/en/docs>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'MQL'
- aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
- filenames = ['*.mq4', '*.mq5', '*.mqh']
- mimetypes = ['text/x-mql']
-
- tokens = {
- 'statements': [
- (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
- (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
- (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
- (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
- (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
- Name.Constant),
- inherit,
- ],
- }
-
-
-class ArduinoLexer(CppLexer):
- """
- For `Arduino(tm) <https://arduino.cc/>`_ source.
-
- This is an extension of the CppLexer, as the Arduino® Language is a superset
- of C++
-
- .. versionadded:: 2.1
- """
-
- name = 'Arduino'
- aliases = ['arduino']
- filenames = ['*.ino']
- mimetypes = ['text/x-arduino']
-
- # Language sketch main structure functions
+
+ def analyse_text(text):
+ rv = 0
+ # Search for SWIG directives, which are conventionally at the beginning of
+ # a line. The probability of them being within a line is low, so let another
+ # lexer win in this case.
+ matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
+ for m in matches:
+ if m in SwigLexer.swig_directives:
+ rv = 0.98
+ break
+ else:
+ rv = 0.91 # Fraction higher than MatlabLexer
+ return rv
+
+
+class MqlLexer(CppLexer):
+ """
+ For `MQL4 <http://docs.mql4.com/>`_ and
+ `MQL5 <http://www.mql5.com/en/docs>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'MQL'
+ aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
+ filenames = ['*.mq4', '*.mq5', '*.mqh']
+ mimetypes = ['text/x-mql']
+
+ tokens = {
+ 'statements': [
+ (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
+ (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
+ (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
+ (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
+ (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
+ Name.Constant),
+ inherit,
+ ],
+ }
+
+
+class ArduinoLexer(CppLexer):
+ """
+ For `Arduino(tm) <https://arduino.cc/>`_ source.
+
+ This is an extension of the CppLexer, as the Arduino® Language is a superset
+ of C++
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Arduino'
+ aliases = ['arduino']
+ filenames = ['*.ino']
+ mimetypes = ['text/x-arduino']
+
+ # Language sketch main structure functions
structure = {'setup', 'loop'}
-
+
# Language operators
operators = {'not', 'or', 'and', 'xor'}
-
+
# Language 'variables'
variables = {
'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
@@ -454,7 +454,7 @@ class ArduinoLexer(CppLexer):
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
'atomic_llong', 'atomic_ullong', 'PROGMEM'}
- # Language shipped functions and class ( )
+ # Language shipped functions and class ( )
functions = {
'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
@@ -518,15 +518,15 @@ class ArduinoLexer(CppLexer):
'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
'isHexadecimalDigit'}
-
+
# do not highlight
suppress_highlight = {
'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
'static_assert', 'thread_local', 'restrict'}
- def get_tokens_unprocessed(self, text):
- for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
if value in self.structure:
yield index, Name.Builtin, value
elif value in self.operators:
@@ -537,8 +537,8 @@ class ArduinoLexer(CppLexer):
yield index, Name, value
elif value in self.functions:
yield index, Name.Function, value
- else:
- yield index, token, value
+ else:
+ yield index, token, value
class CharmciLexer(CppLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/chapel.py b/contrib/python/Pygments/py2/pygments/lexers/chapel.py
index f2cd80dd9c..ead07a9637 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/chapel.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/chapel.py
@@ -1,47 +1,47 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.chapel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Chapel language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.chapel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Chapel language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ChapelLexer']
-
-
-class ChapelLexer(RegexLexer):
- """
- For `Chapel <http://chapel.cray.com/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Chapel'
- filenames = ['*.chpl']
- aliases = ['chapel', 'chpl']
- # mimetypes = ['text/x-chapel']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
-
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-
- (r'(config|const|in|inout|out|param|ref|type|var)\b',
- Keyword.Declaration),
- (r'(false|nil|true)\b', Keyword.Constant),
- (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
- Keyword.Type),
- (words((
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ChapelLexer']
+
+
+class ChapelLexer(RegexLexer):
+ """
+ For `Chapel <http://chapel.cray.com/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Chapel'
+ filenames = ['*.chpl']
+ aliases = ['chapel', 'chpl']
+ # mimetypes = ['text/x-chapel']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+
+ (r'(config|const|in|inout|out|param|ref|type|var)\b',
+ Keyword.Declaration),
+ (r'(false|nil|true)\b', Keyword.Constant),
+ (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
+ Keyword.Type),
+ (words((
'align', 'as', 'atomic',
'begin', 'borrowed', 'break', 'by',
'catch', 'cobegin', 'coforall', 'continue',
@@ -60,53 +60,53 @@ class ChapelLexer(RegexLexer):
'when', 'where', 'while', 'with',
'yield',
'zip'), suffix=r'\b'),
- Keyword),
+ Keyword),
(r'(iter)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
(r'(proc)((?:\s)+)', bygroups(Keyword, Text), 'procname'),
- (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
- 'classname'),
-
- # imaginary integers
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
-
- # reals cannot end with a period due to lexical ambiguity with
- # .. operator. See reference for rationale.
- (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
-
- # integer literals
- # -- binary
- (r'0[bB][01]+', Number.Bin),
- # -- hex
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- octal
- (r'0[oO][0-7]+', Number.Oct),
- # -- decimal
- (r'[0-9]+', Number.Integer),
-
- # strings
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'(\\\\|\\'|[^'])*'", String),
-
- # tokens
- (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
- r'<=>|<~>|\.\.|by|#|\.\.\.|'
- r'&&|\|\||!|&|\||\^|~|<<|>>|'
- r'==|!=|<=|>=|<|>|'
- r'[+\-*/%]|\*\*)', Operator),
- (r'[:;,.?()\[\]{}]', Punctuation),
-
- # identifiers
- (r'[a-zA-Z_][\w$]*', Name.Other),
- ],
- 'classname': [
- (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
- ],
- 'procname': [
+ (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+
+ # imaginary integers
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+
+ # reals cannot end with a period due to lexical ambiguity with
+ # .. operator. See reference for rationale.
+ (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
+
+ # integer literals
+ # -- binary
+ (r'0[bB][01]+', Number.Bin),
+ # -- hex
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- octal
+ (r'0[oO][0-7]+', Number.Oct),
+ # -- decimal
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
+
+ # tokens
+ (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
+ r'<=>|<~>|\.\.|by|#|\.\.\.|'
+ r'&&|\|\||!|&|\||\^|~|<<|>>|'
+ r'==|!=|<=|>=|<|>|'
+ r'[+\-*/%]|\*\*)', Operator),
+ (r'[:;,.?()\[\]{}]', Punctuation),
+
+ # identifiers
+ (r'[a-zA-Z_][\w$]*', Name.Other),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
+ ],
+ 'procname': [
(r'([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-]{1,2})',
Name.Function, '#pop'),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/compiled.py b/contrib/python/Pygments/py2/pygments/lexers/compiled.py
index 0dab602ee3..58e3f67f4c 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/compiled.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/compiled.py
@@ -1,34 +1,34 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.compiled
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.compiled
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.jvm import JavaLexer, ScalaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.objective import ObjectiveCLexer, \
- ObjectiveCppLexer, LogosLexer
-from pygments.lexers.go import GoLexer
-from pygments.lexers.rust import RustLexer
-from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
-from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
-from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
-from pygments.lexers.fortran import FortranLexer
-from pygments.lexers.prolog import PrologLexer
-from pygments.lexers.python import CythonLexer
-from pygments.lexers.graphics import GLShaderLexer
-from pygments.lexers.ml import OcamlLexer
-from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
-from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
-from pygments.lexers.ooc import OocLexer
-from pygments.lexers.felix import FelixLexer
-from pygments.lexers.nimrod import NimrodLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.jvm import JavaLexer, ScalaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.objective import ObjectiveCLexer, \
+ ObjectiveCppLexer, LogosLexer
+from pygments.lexers.go import GoLexer
+from pygments.lexers.rust import RustLexer
+from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
+from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
+from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
+from pygments.lexers.fortran import FortranLexer
+from pygments.lexers.prolog import PrologLexer
+from pygments.lexers.python import CythonLexer
+from pygments.lexers.graphics import GLShaderLexer
+from pygments.lexers.ml import OcamlLexer
+from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
+from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
+from pygments.lexers.ooc import OocLexer
+from pygments.lexers.felix import FelixLexer
+from pygments.lexers.nimrod import NimrodLexer
from pygments.lexers.crystal import CrystalLexer
-
-__all__ = []
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/configs.py b/contrib/python/Pygments/py2/pygments/lexers/configs.py
index 0911b6e24c..69de6c2145 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/configs.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/configs.py
@@ -1,553 +1,553 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.configs
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for configuration file formats.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.configs
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for configuration file formats.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, default, words, bygroups, include, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace, Literal
-from pygments.lexers.shell import BashLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words, bygroups, include, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace, Literal
+from pygments.lexers.shell import BashLexer
from pygments.lexers.data import JsonLexer
-
-__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
- 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
- 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
- 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
+
+__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
+ 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
+ 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
+ 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer']
-
-
-class IniLexer(RegexLexer):
- """
- Lexer for configuration files in INI style.
- """
-
- name = 'INI'
- aliases = ['ini', 'cfg', 'dosini']
- filenames = ['*.ini', '*.cfg', '*.inf']
- mimetypes = ['text/x-ini', 'text/inf']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'\[.*?\]$', Keyword),
- (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
+
+
+class IniLexer(RegexLexer):
+ """
+ Lexer for configuration files in INI style.
+ """
+
+ name = 'INI'
+ aliases = ['ini', 'cfg', 'dosini']
+ filenames = ['*.ini', '*.cfg', '*.inf']
+ mimetypes = ['text/x-ini', 'text/inf']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'\[.*?\]$', Keyword),
+ (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Text, Operator, Text, String)),
# standalone option, supported by some INI parsers
(r'(.+?)$', Name.Attribute),
],
- }
-
- def analyse_text(text):
- npos = text.find('\n')
- if npos < 3:
- return False
- return text[0] == '[' and text[npos-1] == ']'
-
-
-class RegeditLexer(RegexLexer):
- """
- Lexer for `Windows Registry
- <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
- by regedit.
-
- .. versionadded:: 1.6
- """
-
- name = 'reg'
- aliases = ['registry']
- filenames = ['*.reg']
- mimetypes = ['text/x-windows-registry']
-
- tokens = {
- 'root': [
- (r'Windows Registry Editor.*', Text),
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
- bygroups(Keyword, Operator, Name.Builtin, Keyword)),
- # String keys, which obey somewhat normal escaping
- (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- # Bare keys (includes @)
- (r'(.*?)([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- ],
- 'value': [
- (r'-', Operator, '#pop'), # delete value
- (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
- bygroups(Name.Variable, Punctuation, Number), '#pop'),
- # As far as I know, .reg files do not support line continuation.
- (r'.+', String, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
- """
- Lexer for configuration files in Java's properties format.
-
+ }
+
+ def analyse_text(text):
+ npos = text.find('\n')
+ if npos < 3:
+ return False
+ return text[0] == '[' and text[npos-1] == ']'
+
+
+class RegeditLexer(RegexLexer):
+ """
+ Lexer for `Windows Registry
+ <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
+ by regedit.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'reg'
+ aliases = ['registry']
+ filenames = ['*.reg']
+ mimetypes = ['text/x-windows-registry']
+
+ tokens = {
+ 'root': [
+ (r'Windows Registry Editor.*', Text),
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
+ bygroups(Keyword, Operator, Name.Builtin, Keyword)),
+ # String keys, which obey somewhat normal escaping
+ (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ # Bare keys (includes @)
+ (r'(.*?)([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ ],
+ 'value': [
+ (r'-', Operator, '#pop'), # delete value
+ (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
+ bygroups(Name.Variable, Punctuation, Number), '#pop'),
+ # As far as I know, .reg files do not support line continuation.
+ (r'.+', String, '#pop'),
+ default('#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith('Windows Registry Editor')
+
+
+class PropertiesLexer(RegexLexer):
+ """
+ Lexer for configuration files in Java's properties format.
+
Note: trailing whitespace counts as part of the value as per spec
- .. versionadded:: 1.4
- """
-
- name = 'Properties'
- aliases = ['properties', 'jproperties']
- filenames = ['*.properties']
- mimetypes = ['text/x-java-properties']
-
- tokens = {
- 'root': [
+ .. versionadded:: 1.4
+ """
+
+ name = 'Properties'
+ aliases = ['properties', 'jproperties']
+ filenames = ['*.properties']
+ mimetypes = ['text/x-java-properties']
+
+ tokens = {
+ 'root': [
(r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Text, String)),
(r'^\w+(\\[ \t]\w*)*$', Name.Attribute),
(r'(^ *)([#!].*)', bygroups(Text, Comment)),
# More controversial comments
(r'(^ *)((?:;|//).*)', bygroups(Text, Comment)),
- (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
- bygroups(Name.Attribute, Text, Operator, Text, String)),
+ (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+ bygroups(Name.Attribute, Text, Operator, Text, String)),
(r'\s', Text),
- ],
- }
-
-
-def _rx_indent(level):
- # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
- # Edit this if you are in an environment where KconfigLexer gets expanded
- # input (tabs expanded to spaces) and the expansion tab width is != 8,
- # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
- # Value range here is 2 <= {tab_width} <= 8.
- tab_width = 8
- # Regex matching a given indentation {level}, assuming that indentation is
- # a multiple of {tab_width}. In other cases there might be problems.
- if tab_width == 2:
- space_repeat = '+'
- else:
- space_repeat = '{1,%d}' % (tab_width - 1)
- if level == 1:
- level_repeat = ''
- else:
- level_repeat = '{%s}' % level
- return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
-
-
-class KconfigLexer(RegexLexer):
- """
- For Linux-style Kconfig files.
-
- .. versionadded:: 1.6
- """
-
- name = 'Kconfig'
- aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
- # Adjust this if new kconfig file names appear in your environment
- filenames = ['Kconfig', '*Config.in*', 'external.in*',
- 'standard-modules.in']
- mimetypes = ['text/x-kconfig']
- # No re.MULTILINE, indentation-aware help text needs line-by-line handling
- flags = 0
-
- def call_indent(level):
- # If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
- def do_indent(level):
- # Print paragraphs of indentation level >= {level} as String.Doc,
- # ignoring blank lines. Then return to 'root' state.
- return [
- (_rx_indent(level), String.Doc),
- (r'\s*\n', Text),
- default('#pop:2')
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (words((
- 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
- 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
- 'source', 'prompt', 'select', 'depends on', 'default',
- 'range', 'option'), suffix=r'\b'),
- Keyword),
- (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
- (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
- Name.Builtin),
- (r'[!=&|]', Operator),
- (r'[()]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r'\S+', Text),
- ],
- # Help text is indented, multi-line and ends when a lower indentation
- # level is detected.
- 'help': [
- # Skip blank lines after help token, if any
- (r'\s*\n', Text),
- # Determine the first help line's indentation level heuristically(!).
- # Attention: this is not perfect, but works for 99% of "normal"
- # indentation schemes up to a max. indentation level of 7.
- call_indent(7),
- call_indent(6),
- call_indent(5),
- call_indent(4),
- call_indent(3),
- call_indent(2),
- call_indent(1),
- default('#pop'), # for incomplete help sections without text
- ],
- # Handle text for indentation levels 7 to 1
- 'indent7': do_indent(7),
- 'indent6': do_indent(6),
- 'indent5': do_indent(5),
- 'indent4': do_indent(4),
- 'indent3': do_indent(3),
- 'indent2': do_indent(2),
- 'indent1': do_indent(1),
- }
-
-
-class Cfengine3Lexer(RegexLexer):
- """
- Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
-
- .. versionadded:: 1.5
- """
-
- name = 'CFEngine3'
- aliases = ['cfengine3', 'cf3']
- filenames = ['*.cf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*?\n', Comment),
- (r'(body)(\s+)(\S+)(\s+)(control)',
- bygroups(Keyword, Text, Keyword, Text, Keyword)),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
- bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
- 'arglist'),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
- bygroups(Punctuation, Name.Variable, Punctuation,
- Text, Keyword.Type, Text, Operator, Text)),
- (r'(\S+)(\s*)(=>)(\s*)',
- bygroups(Keyword.Reserved, Text, Operator, Text)),
- (r'"', String, 'string'),
- (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
- (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
- (r'@[{(][^)}]+[})]', Name.Variable),
- (r'[(){},;]', Punctuation),
- (r'=>', Operator),
- (r'->', Operator),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\w+', Name.Function),
- (r'\s+', Text),
- ],
- 'string': [
- (r'\$[{(]', String.Interpol, 'interpol'),
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', String),
- (r'.', String),
- ],
- 'interpol': [
- (r'\$[{(]', String.Interpol, '#push'),
- (r'[})]', String.Interpol, '#pop'),
- (r'[^${()}]+', String.Interpol),
- ],
- 'arglist': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\w+', Name.Variable),
- (r'\s+', Text),
- ],
- }
-
-
-class ApacheConfLexer(RegexLexer):
- """
- Lexer for configuration files following the Apache config file
- format.
-
- .. versionadded:: 0.6
- """
-
- name = 'ApacheConf'
- aliases = ['apacheconf', 'aconf', 'apache']
- filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
- mimetypes = ['text/x-apacheconf']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+ ],
+ }
+
+
+def _rx_indent(level):
+ # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
+ # Edit this if you are in an environment where KconfigLexer gets expanded
+ # input (tabs expanded to spaces) and the expansion tab width is != 8,
+ # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
+ # Value range here is 2 <= {tab_width} <= 8.
+ tab_width = 8
+ # Regex matching a given indentation {level}, assuming that indentation is
+ # a multiple of {tab_width}. In other cases there might be problems.
+ if tab_width == 2:
+ space_repeat = '+'
+ else:
+ space_repeat = '{1,%d}' % (tab_width - 1)
+ if level == 1:
+ level_repeat = ''
+ else:
+ level_repeat = '{%s}' % level
+ return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
+
+
+class KconfigLexer(RegexLexer):
+ """
+ For Linux-style Kconfig files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Kconfig'
+ aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
+ # Adjust this if new kconfig file names appear in your environment
+ filenames = ['Kconfig', '*Config.in*', 'external.in*',
+ 'standard-modules.in']
+ mimetypes = ['text/x-kconfig']
+ # No re.MULTILINE, indentation-aware help text needs line-by-line handling
+ flags = 0
+
+ def call_indent(level):
+ # If indentation >= {level} is detected, enter state 'indent{level}'
+ return (_rx_indent(level), String.Doc, 'indent%s' % level)
+
+ def do_indent(level):
+ # Print paragraphs of indentation level >= {level} as String.Doc,
+ # ignoring blank lines. Then return to 'root' state.
+ return [
+ (_rx_indent(level), String.Doc),
+ (r'\s*\n', Text),
+ default('#pop:2')
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (words((
+ 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
+ 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
+ 'source', 'prompt', 'select', 'depends on', 'default',
+ 'range', 'option'), suffix=r'\b'),
+ Keyword),
+ (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
+ (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
+ Name.Builtin),
+ (r'[!=&|]', Operator),
+ (r'[()]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Double),
+ (r'\S+', Text),
+ ],
+ # Help text is indented, multi-line and ends when a lower indentation
+ # level is detected.
+ 'help': [
+ # Skip blank lines after help token, if any
+ (r'\s*\n', Text),
+ # Determine the first help line's indentation level heuristically(!).
+ # Attention: this is not perfect, but works for 99% of "normal"
+ # indentation schemes up to a max. indentation level of 7.
+ call_indent(7),
+ call_indent(6),
+ call_indent(5),
+ call_indent(4),
+ call_indent(3),
+ call_indent(2),
+ call_indent(1),
+ default('#pop'), # for incomplete help sections without text
+ ],
+ # Handle text for indentation levels 7 to 1
+ 'indent7': do_indent(7),
+ 'indent6': do_indent(6),
+ 'indent5': do_indent(5),
+ 'indent4': do_indent(4),
+ 'indent3': do_indent(3),
+ 'indent2': do_indent(2),
+ 'indent1': do_indent(1),
+ }
+
+
+class Cfengine3Lexer(RegexLexer):
+ """
+ Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'CFEngine3'
+ aliases = ['cfengine3', 'cf3']
+ filenames = ['*.cf']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*?\n', Comment),
+ (r'(body)(\s+)(\S+)(\s+)(control)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword)),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
+ 'arglist'),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
+ bygroups(Punctuation, Name.Variable, Punctuation,
+ Text, Keyword.Type, Text, Operator, Text)),
+ (r'(\S+)(\s*)(=>)(\s*)',
+ bygroups(Keyword.Reserved, Text, Operator, Text)),
+ (r'"', String, 'string'),
+ (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
+ (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
+ (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
+ (r'@[{(][^)}]+[})]', Name.Variable),
+ (r'[(){},;]', Punctuation),
+ (r'=>', Operator),
+ (r'->', Operator),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\w+', Name.Function),
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'\$[{(]', String.Interpol, 'interpol'),
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', String),
+ (r'.', String),
+ ],
+ 'interpol': [
+ (r'\$[{(]', String.Interpol, '#push'),
+ (r'[})]', String.Interpol, '#pop'),
+ (r'[^${()}]+', String.Interpol),
+ ],
+ 'arglist': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'\w+', Name.Variable),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class ApacheConfLexer(RegexLexer):
+ """
+ Lexer for configuration files following the Apache config file
+ format.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'ApacheConf'
+ aliases = ['apacheconf', 'aconf', 'apache']
+ filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
+ mimetypes = ['text/x-apacheconf']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'#(.*\\\n)+.*$|(#.*?)$', Comment),
(r'(<[^\s>]+)(?:(\s+)(.*))?(>)',
- bygroups(Name.Tag, Text, String, Name.Tag)),
- (r'([a-z]\w*)(\s+)',
- bygroups(Name.Builtin, Text), 'value'),
- (r'\.+', Text),
- ],
- 'value': [
- (r'\\\n', Text),
- (r'$', Text, '#pop'),
- (r'\\', Text),
- (r'[^\S\n]+', Text),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'\d+', Number),
- (r'/([a-z0-9][\w./-]+)', String.Other),
- (r'(on|off|none|any|all|double|email|dns|min|minimal|'
- r'os|productonly|full|emerg|alert|crit|error|warn|'
- r'notice|info|debug|registry|script|inetd|standalone|'
- r'user|group)\b', Keyword),
+ bygroups(Name.Tag, Text, String, Name.Tag)),
+ (r'([a-z]\w*)(\s+)',
+ bygroups(Name.Builtin, Text), 'value'),
+ (r'\.+', Text),
+ ],
+ 'value': [
+ (r'\\\n', Text),
+ (r'$', Text, '#pop'),
+ (r'\\', Text),
+ (r'[^\S\n]+', Text),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'\d+', Number),
+ (r'/([a-z0-9][\w./-]+)', String.Other),
+ (r'(on|off|none|any|all|double|email|dns|min|minimal|'
+ r'os|productonly|full|emerg|alert|crit|error|warn|'
+ r'notice|info|debug|registry|script|inetd|standalone|'
+ r'user|group)\b', Keyword),
(r'"([^"\\]*(?:\\(.|[\n])[^"\\]*)*)"', String.Double),
- (r'[^\s"\\]+', Text)
- ],
- }
-
-
-class SquidConfLexer(RegexLexer):
- """
- Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
-
- .. versionadded:: 0.9
- """
-
- name = 'SquidConf'
- aliases = ['squidconf', 'squid.conf', 'squid']
- filenames = ['squid.conf']
- mimetypes = ['text/x-squidconf']
- flags = re.IGNORECASE
-
- keywords = (
- "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to", "anonymize_headers",
- "append_domain", "as_whois_server", "auth_param_basic",
- "authenticate_children", "authenticate_program", "authenticate_ttl",
- "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
- "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
- "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
- "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters", "delay_pools",
- "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
- "dns_testnames", "emulate_httpd_log", "err_html_text",
- "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
- "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
- "header_replace", "hierarchy_stoplist", "high_response_time_warning",
- "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
- "http_anonymizer", "httpd_accel", "httpd_accel_host",
- "httpd_accel_port", "httpd_accel_uses_host_header",
- "httpd_accel_with_proxy", "http_port", "http_reply_access",
- "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average", "inside_firewall",
- "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
- "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
- "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy", "mime_table",
- "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
- "minimum_object_size", "minimum_retry_timeout", "miss_access",
- "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
- "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
- "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
- "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
- "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age",
- "refresh_pattern", "reload_into_ims", "request_body_max_size",
- "request_size", "request_timeout", "shutdown_lifetime",
- "single_parent_bypass", "siteselect_timeout", "snmp_access",
- "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
- "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
- "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
- "unlinkd_program", "uri_whitespace", "useragent_log",
- "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
- )
-
- opts = (
- "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
- "multicast-responder", "on", "off", "all", "deny", "allow", "via",
- "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
- "credentialsttl", "none", "disable", "offline_toggle", "diskd",
- )
-
- actions = (
- "shutdown", "info", "parameter", "server_list", "client_list",
- r'squid.conf',
- )
-
- actions_stats = (
- "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
- "redirector", "io", "reply_headers", "filedescriptors", "netdb",
- )
-
- actions_log = ("status", "enable", "disable", "clear")
-
- acls = (
- "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
- "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
- "dst", "time", "dstdomain", "ident", "snmp_community",
- )
-
- ip_re = (
- r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
- r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
- r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
- r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
- r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
- r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
- r'[1-9]?\d)){3}))'
- )
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#', Comment, 'comment'),
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
- # Actions
- (words(actions, prefix=r'\b', suffix=r'\b'), String),
- (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
- (words(actions_log, prefix=r'log/', suffix=r'='), String),
- (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
- (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
- (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
- (r'\S+', Text),
- ],
- 'comment': [
- (r'\s*TAG:.*', String.Escape, '#pop'),
- (r'.+', Comment, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class NginxConfLexer(RegexLexer):
- """
- Lexer for `Nginx <http://nginx.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Nginx configuration file'
- aliases = ['nginx']
+ (r'[^\s"\\]+', Text)
+ ],
+ }
+
+
+class SquidConfLexer(RegexLexer):
+ """
+ Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'SquidConf'
+ aliases = ['squidconf', 'squid.conf', 'squid']
+ filenames = ['squid.conf']
+ mimetypes = ['text/x-squidconf']
+ flags = re.IGNORECASE
+
+ keywords = (
+ "access_log", "acl", "always_direct", "announce_host",
+ "announce_period", "announce_port", "announce_to", "anonymize_headers",
+ "append_domain", "as_whois_server", "auth_param_basic",
+ "authenticate_children", "authenticate_program", "authenticate_ttl",
+ "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
+ "cache_dir", "cache_dns_program", "cache_effective_group",
+ "cache_effective_user", "cache_host", "cache_host_acl",
+ "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
+ "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
+ "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
+ "cache_stoplist_pattern", "cache_store_log", "cache_swap",
+ "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
+ "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
+ "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
+ "delay_initial_bucket_level", "delay_parameters", "delay_pools",
+ "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
+ "dns_testnames", "emulate_httpd_log", "err_html_text",
+ "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
+ "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
+ "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
+ "header_replace", "hierarchy_stoplist", "high_response_time_warning",
+ "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
+ "http_anonymizer", "httpd_accel", "httpd_accel_host",
+ "httpd_accel_port", "httpd_accel_uses_host_header",
+ "httpd_accel_with_proxy", "http_port", "http_reply_access",
+ "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
+ "ident_lookup", "ident_lookup_access", "ident_timeout",
+ "incoming_http_average", "incoming_icp_average", "inside_firewall",
+ "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
+ "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
+ "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
+ "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
+ "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
+ "memory_pools_limit", "memory_replacement_policy", "mime_table",
+ "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
+ "minimum_object_size", "minimum_retry_timeout", "miss_access",
+ "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
+ "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
+ "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
+ "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
+ "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
+ "quick_abort", "quick_abort_max", "quick_abort_min",
+ "quick_abort_pct", "range_offset_limit", "read_timeout",
+ "redirect_children", "redirect_program",
+ "redirect_rewrites_host_header", "reference_age",
+ "refresh_pattern", "reload_into_ims", "request_body_max_size",
+ "request_size", "request_timeout", "shutdown_lifetime",
+ "single_parent_bypass", "siteselect_timeout", "snmp_access",
+ "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
+ "store_avg_object_size", "store_objects_per_bucket",
+ "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
+ "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
+ "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
+ "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
+ "unlinkd_program", "uri_whitespace", "useragent_log",
+ "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
+ )
+
+ opts = (
+ "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
+ "multicast-responder", "on", "off", "all", "deny", "allow", "via",
+ "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
+ "credentialsttl", "none", "disable", "offline_toggle", "diskd",
+ )
+
+ actions = (
+ "shutdown", "info", "parameter", "server_list", "client_list",
+ r'squid.conf',
+ )
+
+ actions_stats = (
+ "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
+ "redirector", "io", "reply_headers", "filedescriptors", "netdb",
+ )
+
+ actions_log = ("status", "enable", "disable", "clear")
+
+ acls = (
+ "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
+ "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
+ "dst", "time", "dstdomain", "ident", "snmp_community",
+ )
+
+ ip_re = (
+ r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
+ r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
+ r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
+ r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
+ r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
+ r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
+ r'[1-9]?\d)){3}))'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'#', Comment, 'comment'),
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ # Actions
+ (words(actions, prefix=r'\b', suffix=r'\b'), String),
+ (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
+ (words(actions_log, prefix=r'log/', suffix=r'='), String),
+ (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
+ (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
+ (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
+ (r'\S+', Text),
+ ],
+ 'comment': [
+ (r'\s*TAG:.*', String.Escape, '#pop'),
+ (r'.+', Comment, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class NginxConfLexer(RegexLexer):
+ """
+ Lexer for `Nginx <http://nginx.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Nginx configuration file'
+ aliases = ['nginx']
filenames = ['nginx.conf']
- mimetypes = ['text/x-nginx-conf']
-
- tokens = {
- 'root': [
- (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
- (r'[^\s;#]+', Keyword, 'stmt'),
- include('base'),
- ],
- 'block': [
- (r'\}', Punctuation, '#pop:2'),
- (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
- include('base'),
- ],
- 'stmt': [
- (r'\{', Punctuation, 'block'),
- (r';', Punctuation, '#pop'),
- include('base'),
- ],
- 'base': [
- (r'#.*\n', Comment.Single),
- (r'on|off', Name.Constant),
- (r'\$[^\s;#()]+', Name.Variable),
- (r'([a-z0-9.-]+)(:)([0-9]+)',
- bygroups(Name, Punctuation, Number.Integer)),
- (r'[a-z-]+/[a-z-+]+', String), # mimetype
- # (r'[a-zA-Z._-]+', Keyword),
- (r'[0-9]+[km]?\b', Number.Integer),
- (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
- (r'[:=~]', Punctuation),
- (r'[^\s;#{}$]+', String), # catch all
- (r'/[^\s;#]*', Name), # pathname
- (r'\s+', Text),
- (r'[$;]', Text), # leftover characters
- ],
- }
-
-
-class LighttpdConfLexer(RegexLexer):
- """
- Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Lighttpd configuration file'
- aliases = ['lighty', 'lighttpd']
- filenames = []
- mimetypes = ['text/x-lighttpd-conf']
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment.Single),
- (r'/\S*', Name), # pathname
- (r'[a-zA-Z._-]+', Keyword),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'[0-9]+', Number),
- (r'=>|=~|\+=|==|=|\+', Operator),
- (r'\$[A-Z]+', Name.Builtin),
- (r'[(){}\[\],]', Punctuation),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'\s+', Text),
- ],
-
- }
-
-
-class DockerLexer(RegexLexer):
- """
- Lexer for `Docker <http://docker.io>`_ configuration files.
-
- .. versionadded:: 2.0
- """
- name = 'Docker'
- aliases = ['docker', 'dockerfile']
- filenames = ['Dockerfile', '*.docker']
- mimetypes = ['text/x-dockerfile-config']
-
+ mimetypes = ['text/x-nginx-conf']
+
+ tokens = {
+ 'root': [
+ (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
+ (r'[^\s;#]+', Keyword, 'stmt'),
+ include('base'),
+ ],
+ 'block': [
+ (r'\}', Punctuation, '#pop:2'),
+ (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
+ include('base'),
+ ],
+ 'stmt': [
+ (r'\{', Punctuation, 'block'),
+ (r';', Punctuation, '#pop'),
+ include('base'),
+ ],
+ 'base': [
+ (r'#.*\n', Comment.Single),
+ (r'on|off', Name.Constant),
+ (r'\$[^\s;#()]+', Name.Variable),
+ (r'([a-z0-9.-]+)(:)([0-9]+)',
+ bygroups(Name, Punctuation, Number.Integer)),
+ (r'[a-z-]+/[a-z-+]+', String), # mimetype
+ # (r'[a-zA-Z._-]+', Keyword),
+ (r'[0-9]+[km]?\b', Number.Integer),
+ (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
+ (r'[:=~]', Punctuation),
+ (r'[^\s;#{}$]+', String), # catch all
+ (r'/[^\s;#]*', Name), # pathname
+ (r'\s+', Text),
+ (r'[$;]', Text), # leftover characters
+ ],
+ }
+
+
+class LighttpdConfLexer(RegexLexer):
+ """
+ Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Lighttpd configuration file'
+ aliases = ['lighty', 'lighttpd']
+ filenames = []
+ mimetypes = ['text/x-lighttpd-conf']
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment.Single),
+ (r'/\S*', Name), # pathname
+ (r'[a-zA-Z._-]+', Keyword),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'[0-9]+', Number),
+ (r'=>|=~|\+=|==|=|\+', Operator),
+ (r'\$[A-Z]+', Name.Builtin),
+ (r'[(){}\[\],]', Punctuation),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'\s+', Text),
+ ],
+
+ }
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
_keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
_lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'#.*', Comment),
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'#.*', Comment),
(r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
bygroups(Keyword, Text, String, Text, Keyword, Text, String)),
(r'(ONBUILD)(%s)' % (_lb,), bygroups(Keyword, using(BashLexer))),
@@ -559,37 +559,37 @@ class DockerLexer(RegexLexer):
bygroups(Keyword, using(BashLexer))),
(r'(%s|VOLUME)\b(.*)' % (_keywords), bygroups(Keyword, String)),
(r'(%s)' % (_bash_keywords,), Keyword),
- (r'(.*\\\n)*.+', using(BashLexer)),
+ (r'(.*\\\n)*.+', using(BashLexer)),
]
- }
-
-
-class TerraformLexer(RegexLexer):
- """
- Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
-
- .. versionadded:: 2.1
- """
-
- name = 'Terraform'
- aliases = ['terraform', 'tf']
- filenames = ['*.tf']
- mimetypes = ['application/x-tf', 'application/x-terraform']
-
+ }
+
+
+class TerraformLexer(RegexLexer):
+ """
+ Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Terraform'
+ aliases = ['terraform', 'tf']
+ filenames = ['*.tf']
+ mimetypes = ['application/x-tf', 'application/x-terraform']
+
embedded_keywords = ('ingress', 'egress', 'listener', 'default',
'connection', 'alias', 'terraform', 'tags', 'vars',
'config', 'lifecycle', 'timeouts')
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
include('string'),
include('punctuation'),
include('curly'),
include('basic'),
include('whitespace'),
(r'[0-9]+', Number),
- ],
- 'basic': [
+ ],
+ 'basic': [
(words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
(r'\s*/\*', Comment.Multiline, 'comment'),
(r'\s*#.*\n', Comment.Single),
@@ -600,250 +600,250 @@ class TerraformLexer(RegexLexer):
(words(embedded_keywords, prefix=r'\b', suffix=r'\b'),
Keyword.Declaration),
(r'\$\{', String.Interpol, 'var_builtin'),
- ],
- 'function': [
+ ],
+ 'function': [
(r'(\s+)(".*")(\s+)', bygroups(Text, String, Text)),
include('punctuation'),
include('curly'),
- ],
- 'var_builtin': [
- (r'\$\{', String.Interpol, '#push'),
- (words(('concat', 'file', 'join', 'lookup', 'element'),
- prefix=r'\b', suffix=r'\b'), Name.Builtin),
- include('string'),
- include('punctuation'),
- (r'\s+', Text),
- (r'\}', String.Interpol, '#pop'),
- ],
- 'string': [
- (r'(".*")', bygroups(String.Double)),
- ],
- 'punctuation': [
+ ],
+ 'var_builtin': [
+ (r'\$\{', String.Interpol, '#push'),
+ (words(('concat', 'file', 'join', 'lookup', 'element'),
+ prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ include('string'),
+ include('punctuation'),
+ (r'\s+', Text),
+ (r'\}', String.Interpol, '#pop'),
+ ],
+ 'string': [
+ (r'(".*")', bygroups(String.Double)),
+ ],
+ 'punctuation': [
(r'[\[\](),.]', Punctuation),
- ],
- # Keep this seperate from punctuation - we sometimes want to use different
- # Tokens for { }
- 'curly': [
- (r'\{', Text.Punctuation),
- (r'\}', Text.Punctuation),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
- ],
- }
-
-
-class TermcapLexer(RegexLexer):
- """
- Lexer for termcap database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Termcap'
+ ],
+ # Keep this seperate from punctuation - we sometimes want to use different
+ # Tokens for { }
+ 'curly': [
+ (r'\{', Text.Punctuation),
+ (r'\}', Text.Punctuation),
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+ ],
+ }
+
+
+class TermcapLexer(RegexLexer):
+ """
+ Lexer for termcap database source.
+
+ This is very simple and minimal.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Termcap'
aliases = ['termcap']
filenames = ['termcap', 'termcap.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with trailing backslash
- # * separator is ':'
- # * to embed colon as data, we must use \072
- # * space after separator is not allowed (mayve)
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
+ mimetypes = []
+
+ # NOTE:
+ # * multiline with trailing backslash
+ # * separator is ':'
+ # * to embed colon as data, we must use \072
+ # * space after separator is not allowed (mayve)
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
(r'^[^\s#:|]+', Name.Tag, 'names'),
- ],
- 'names': [
- (r'\n', Text, '#pop'),
- (r':', Punctuation, 'defs'),
- (r'\|', Punctuation),
+ ],
+ 'names': [
+ (r'\n', Text, '#pop'),
+ (r':', Punctuation, 'defs'),
+ (r'\|', Punctuation),
(r'[^:|]+', Name.Attribute),
- ],
- 'defs': [
- (r'\\\n[ \t]*', Text),
- (r'\n[ \t]*', Text, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
- (r':', Punctuation),
- (r'[^\s:=#]+', Name.Class),
- ],
- 'data': [
- (r'\\072', Literal),
- (r':', Punctuation, '#pop'),
- (r'[^:\\]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class TerminfoLexer(RegexLexer):
- """
- Lexer for terminfo database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Terminfo'
+ ],
+ 'defs': [
+ (r'\\\n[ \t]*', Text),
+ (r'\n[ \t]*', Text, '#pop:2'),
+ (r'(#)([0-9]+)', bygroups(Operator, Number)),
+ (r'=', Operator, 'data'),
+ (r':', Punctuation),
+ (r'[^\s:=#]+', Name.Class),
+ ],
+ 'data': [
+ (r'\\072', Literal),
+ (r':', Punctuation, '#pop'),
+ (r'[^:\\]+', Literal), # for performance
+ (r'.', Literal),
+ ],
+ }
+
+
+class TerminfoLexer(RegexLexer):
+ """
+ Lexer for terminfo database source.
+
+ This is very simple and minimal.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Terminfo'
aliases = ['terminfo']
filenames = ['terminfo', 'terminfo.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with leading whitespace
- # * separator is ','
- # * to embed comma as data, we can use \,
- # * space after separator is allowed
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
+ mimetypes = []
+
+ # NOTE:
+ # * multiline with leading whitespace
+ # * separator is ','
+ # * to embed comma as data, we can use \,
+ # * space after separator is allowed
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
(r'^[^\s#,|]+', Name.Tag, 'names'),
- ],
- 'names': [
- (r'\n', Text, '#pop'),
- (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'),
- (r'\|', Punctuation),
+ ],
+ 'names': [
+ (r'\n', Text, '#pop'),
+ (r'(,)([ \t]*)', bygroups(Punctuation, Text), 'defs'),
+ (r'\|', Punctuation),
(r'[^,|]+', Name.Attribute),
- ],
- 'defs': [
- (r'\n[ \t]+', Text),
- (r'\n', Text, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
- (r'(,)([ \t]*)', bygroups(Punctuation, Text)),
- (r'[^\s,=#]+', Name.Class),
- ],
- 'data': [
- (r'\\[,\\]', Literal),
- (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'),
- (r'[^\\,]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class PkgConfigLexer(RegexLexer):
- """
- Lexer for `pkg-config
- <http://www.freedesktop.org/wiki/Software/pkg-config/>`_
- (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
-
- .. versionadded:: 2.1
- """
-
- name = 'PkgConfig'
+ ],
+ 'defs': [
+ (r'\n[ \t]+', Text),
+ (r'\n', Text, '#pop:2'),
+ (r'(#)([0-9]+)', bygroups(Operator, Number)),
+ (r'=', Operator, 'data'),
+ (r'(,)([ \t]*)', bygroups(Punctuation, Text)),
+ (r'[^\s,=#]+', Name.Class),
+ ],
+ 'data': [
+ (r'\\[,\\]', Literal),
+ (r'(,)([ \t]*)', bygroups(Punctuation, Text), '#pop'),
+ (r'[^\\,]+', Literal), # for performance
+ (r'.', Literal),
+ ],
+ }
+
+
+class PkgConfigLexer(RegexLexer):
+ """
+ Lexer for `pkg-config
+ <http://www.freedesktop.org/wiki/Software/pkg-config/>`_
+ (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PkgConfig'
aliases = ['pkgconfig']
filenames = ['*.pc']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
-
- # variable definitions
- (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
-
- # keyword lines
- (r'^([\w.]+)(:)',
- bygroups(Name.Tag, Punctuation), 'spvalue'),
-
- # variable references
- include('interp'),
-
- # fallback
- (r'[^${}#=:\n.]+', Text),
- (r'.', Text),
- ],
- 'interp': [
- # you can escape literal "$" as "$$"
- (r'\$\$', Text),
-
- # variable references
- (r'\$\{', String.Interpol, 'curly'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r'\w+', Name.Attribute),
- ],
- 'spvalue': [
- include('interp'),
-
- (r'#.*$', Comment.Single, '#pop'),
- (r'\n', Text, '#pop'),
-
- # fallback
- (r'[^${}#\n]+', Text),
- (r'.', Text),
- ],
- }
-
-
-class PacmanConfLexer(RegexLexer):
- """
- Lexer for `pacman.conf
- <https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
-
- Actually, IniLexer works almost fine for this format,
- but it yield error token. It is because pacman.conf has
- a form without assignment like:
-
- UseSyslog
- Color
- TotalDownload
- CheckSpace
- VerbosePkgLists
-
- These are flags to switch on.
-
- .. versionadded:: 2.1
- """
-
- name = 'PacmanConf'
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+
+ # variable definitions
+ (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
+
+ # keyword lines
+ (r'^([\w.]+)(:)',
+ bygroups(Name.Tag, Punctuation), 'spvalue'),
+
+ # variable references
+ include('interp'),
+
+ # fallback
+ (r'[^${}#=:\n.]+', Text),
+ (r'.', Text),
+ ],
+ 'interp': [
+ # you can escape literal "$" as "$$"
+ (r'\$\$', Text),
+
+ # variable references
+ (r'\$\{', String.Interpol, 'curly'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'\w+', Name.Attribute),
+ ],
+ 'spvalue': [
+ include('interp'),
+
+ (r'#.*$', Comment.Single, '#pop'),
+ (r'\n', Text, '#pop'),
+
+ # fallback
+ (r'[^${}#\n]+', Text),
+ (r'.', Text),
+ ],
+ }
+
+
+class PacmanConfLexer(RegexLexer):
+ """
+ Lexer for `pacman.conf
+ <https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
+
+ Actually, IniLexer works almost fine for this format,
+ but it yield error token. It is because pacman.conf has
+ a form without assignment like:
+
+ UseSyslog
+ Color
+ TotalDownload
+ CheckSpace
+ VerbosePkgLists
+
+ These are flags to switch on.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PacmanConf'
aliases = ['pacmanconf']
filenames = ['pacman.conf']
- mimetypes = []
-
- tokens = {
- 'root': [
- # comment
- (r'#.*$', Comment.Single),
-
- # section header
- (r'^\s*\[.*?\]\s*$', Keyword),
-
- # variable definitions
- # (Leading space is allowed...)
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
-
- # flags to on
- (r'^(\s*)(\w+)(\s*)$',
- bygroups(Text, Name.Attribute, Text)),
-
- # built-in special values
- (words((
- '$repo', # repository
- '$arch', # architecture
- '%o', # outfile
- '%u', # url
- ), suffix=r'\b'),
- Name.Variable),
-
- # fallback
- (r'.', Text),
- ],
- }
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # comment
+ (r'#.*$', Comment.Single),
+
+ # section header
+ (r'^\s*\[.*?\]\s*$', Keyword),
+
+ # variable definitions
+ # (Leading space is allowed...)
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+
+ # flags to on
+ (r'^(\s*)(\w+)(\s*)$',
+ bygroups(Text, Name.Attribute, Text)),
+
+ # built-in special values
+ (words((
+ '$repo', # repository
+ '$arch', # architecture
+ '%o', # outfile
+ '%u', # url
+ ), suffix=r'\b'),
+ Name.Variable),
+
+ # fallback
+ (r'.', Text),
+ ],
+ }
class AugeasLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/console.py b/contrib/python/Pygments/py2/pygments/lexers/console.py
index ab93b7b8cd..dc47513ad0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/console.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/console.py
@@ -1,114 +1,114 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.console
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc console output.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.console
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc console output.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
- Punctuation, Number
-
-__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
-
-
-class VCTreeStatusLexer(RegexLexer):
- """
- For colorizing output of version control status commands, like "hg
- status" or "svn status".
-
- .. versionadded:: 2.0
- """
- name = 'VCTreeStatus'
- aliases = ['vctreestatus']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^A \+ C\s+', Generic.Error),
- (r'^A\s+\+?\s+', String),
- (r'^M\s+', Generic.Inserted),
- (r'^C\s+', Generic.Error),
- (r'^D\s+', Generic.Deleted),
- (r'^[?!]\s+', Comment.Preproc),
- (r' >\s+.*\n', Comment.Preproc),
- (r'.*\n', Text)
- ]
- }
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- .. versionadded:: 1.5
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
- (r"^\+\d+: ", Comment),
- (r"--end of the loop--", Comment),
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()(\w+(?:\.\w+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
- (r"<.*?>+", Name.Builtin),
- (r"(label|debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|float_neg|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
- r"cast_int_to_float|cast_float_to_int|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
- r"virtual_ref|mark_opaque_ptr|"
- r"call_may_force|call_assembler|call_loopinvariant|"
- r"call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
- r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r":", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
- (r"\s+", Text),
- (r"#.*?$", Comment),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
+ Punctuation, Number
+
+__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
+
+
+class VCTreeStatusLexer(RegexLexer):
+ """
+ For colorizing output of version control status commands, like "hg
+ status" or "svn status".
+
+ .. versionadded:: 2.0
+ """
+ name = 'VCTreeStatus'
+ aliases = ['vctreestatus']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^A \+ C\s+', Generic.Error),
+ (r'^A\s+\+?\s+', String),
+ (r'^M\s+', Generic.Inserted),
+ (r'^C\s+', Generic.Error),
+ (r'^D\s+', Generic.Deleted),
+ (r'^[?!]\s+', Comment.Preproc),
+ (r' >\s+.*\n', Comment.Preproc),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class PyPyLogLexer(RegexLexer):
+ """
+ Lexer for PyPy log files.
+
+ .. versionadded:: 1.5
+ """
+ name = "PyPy Log"
+ aliases = ["pypylog", "pypy"]
+ filenames = ["*.pypylog"]
+ mimetypes = ['application/x-pypylog']
+
+ tokens = {
+ "root": [
+ (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
+ (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
+ include("extra-stuff"),
+ ],
+ "jit-log": [
+ (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
+ (r"^\+\d+: ", Comment),
+ (r"--end of the loop--", Comment),
+ (r"[ifp]\d+", Name),
+ (r"ptr\d+", Name),
+ (r"(\()(\w+(?:\.\w+)?)(\))",
+ bygroups(Punctuation, Name.Builtin, Punctuation)),
+ (r"[\[\]=,()]", Punctuation),
+ (r"(\d+\.\d+|inf|-inf)", Number.Float),
+ (r"-?\d+", Number.Integer),
+ (r"'.*'", String),
+ (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
+ (r"<.*?>+", Name.Builtin),
+ (r"(label|debug_merge_point|jump|finish)", Name.Class),
+ (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
+ r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
+ r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
+ r"int_is_true|"
+ r"uint_floordiv|uint_ge|uint_lt|"
+ r"float_add|float_sub|float_mul|float_truediv|float_neg|"
+ r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
+ r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
+ r"cast_int_to_float|cast_float_to_int|"
+ r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
+ r"virtual_ref|mark_opaque_ptr|"
+ r"call_may_force|call_assembler|call_loopinvariant|"
+ r"call_release_gil|call_pure|call|"
+ r"new_with_vtable|new_array|newstr|newunicode|new|"
+ r"arraylen_gc|"
+ r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
+ r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
+ r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
+ r"getfield_raw|setfield_gc|setfield_raw|"
+ r"strgetitem|strsetitem|strlen|copystrcontent|"
+ r"unicodegetitem|unicodesetitem|unicodelen|"
+ r"guard_true|guard_false|guard_value|guard_isnull|"
+ r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
+ r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
+ Name.Builtin),
+ include("extra-stuff"),
+ ],
+ "jit-backend-counts": [
+ (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
+ (r":", Punctuation),
+ (r"\d+", Number),
+ include("extra-stuff"),
+ ],
+ "extra-stuff": [
+ (r"\s+", Text),
+ (r"#.*?$", Comment),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/csound.py b/contrib/python/Pygments/py2/pygments/lexers/csound.py
index c35bd94b0e..278001b023 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/csound.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/csound.py
@@ -1,73 +1,73 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.csound
- ~~~~~~~~~~~~~~~~~~~~~~
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.csound
+ ~~~~~~~~~~~~~~~~~~~~~~
+
Lexers for Csound languages.
-
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-
-from pygments.lexer import RegexLexer, bygroups, default, include, using, words
+
+from pygments.lexer import RegexLexer, bygroups, default, include, using, words
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
String, Text, Whitespace
from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.scripting import LuaLexer
-
-__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
-
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.scripting import LuaLexer
+
+__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
+
newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text))
-
-
-class CsoundLexer(RegexLexer):
- tokens = {
- 'whitespace': [
- (r'[ \t]+', Text),
+
+
+class CsoundLexer(RegexLexer):
+ tokens = {
+ 'whitespace': [
+ (r'[ \t]+', Text),
(r'/[*](?:.|\n)*?[*]/', Comment.Multiline),
(r'(?:;|//).*$', Comment.Single),
(r'(\\)(\n)', bygroups(Whitespace, Text))
- ],
-
+ ],
+
'preprocessor directives': [
(r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
(r'#includestr', Comment.Preproc, 'includestr directive'),
(r'#include', Comment.Preproc, 'include directive'),
(r'#[ \t]*define', Comment.Preproc, 'define directive'),
(r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
- ],
-
+ ],
+
'include directive': [
- include('whitespace'),
+ include('whitespace'),
(r'([^ \t]).*?\1', String, '#pop')
- ],
+ ],
'includestr directive': [
include('whitespace'),
(r'"', String, ('#pop', 'quoted string'))
],
-
+
'define directive': [
(r'\n', Text),
include('whitespace'),
(r'([A-Z_a-z]\w*)(\()', bygroups(Comment.Preproc, Punctuation),
('#pop', 'macro parameter name list')),
(r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'before macro body'))
- ],
+ ],
'macro parameter name list': [
- include('whitespace'),
+ include('whitespace'),
(r'[A-Z_a-z]\w*', Comment.Preproc),
(r"['#]", Punctuation),
(r'\)', Punctuation, ('#pop', 'before macro body'))
- ],
+ ],
'before macro body': [
(r'\n', Text),
- include('whitespace'),
+ include('whitespace'),
(r'#', Punctuation, ('#pop', 'macro body'))
- ],
+ ],
'macro body': [
(r'(?:\\(?!#)|[^#\\]|\n)+', Comment.Preproc),
(r'\\#', Comment.Preproc),
@@ -75,15 +75,15 @@ class CsoundLexer(RegexLexer):
],
'macro directive': [
- include('whitespace'),
+ include('whitespace'),
(r'[A-Z_a-z]\w*', Comment.Preproc, '#pop')
- ],
+ ],
'macro uses': [
(r'(\$[A-Z_a-z]\w*\.?)(\()', bygroups(Comment.Preproc, Punctuation),
'macro parameter value list'),
(r'\$[A-Z_a-z]\w*(?:\.|\b)', Comment.Preproc)
- ],
+ ],
'macro parameter value list': [
(r'(?:[^\'#"{()]|\{(?!\{))+', Comment.Preproc),
(r"['#]", Punctuation),
@@ -91,12 +91,12 @@ class CsoundLexer(RegexLexer):
(r'\{\{', String, 'macro parameter value braced string'),
(r'\(', Comment.Preproc, 'macro parameter value parenthetical'),
(r'\)', Punctuation, '#pop')
- ],
+ ],
'macro parameter value quoted string': [
(r"\\[#'()]", Comment.Preproc),
(r"[#'()]", Error),
include('quoted string')
- ],
+ ],
'macro parameter value braced string': [
(r"\\[#'()]", Comment.Preproc),
(r"[#'()]", Error),
@@ -128,26 +128,26 @@ class CsoundLexer(RegexLexer):
'braced string': [
# Do nothing. This must be defined in subclasses.
- ]
- }
-
-
-class CsoundScoreLexer(CsoundLexer):
- """
+ ]
+ }
+
+
+class CsoundScoreLexer(CsoundLexer):
+ """
For `Csound <https://csound.com>`_ scores.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Score'
- aliases = ['csound-score', 'csound-sco']
- filenames = ['*.sco']
-
- tokens = {
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Csound Score'
+ aliases = ['csound-score', 'csound-sco']
+ filenames = ['*.sco']
+
+ tokens = {
'root': [
(r'\n', Text),
include('whitespace and macro uses'),
- include('preprocessor directives'),
+ include('preprocessor directives'),
(r'[abCdefiqstvxy]', Keyword),
# There is also a w statement that is generated internally and should not be
@@ -168,14 +168,14 @@ class CsoundScoreLexer(CsoundLexer):
(r'[()\[\]]', Punctuation),
(r'"', String, 'quoted string'),
(r'\{', Comment.Preproc, 'loop after left brace'),
- ],
-
+ ],
+
'mark statement': [
include('whitespace and macro uses'),
(r'[A-Z_a-z]\w*', Name.Label),
(r'\n', Text, '#pop')
- ],
-
+ ],
+
'loop after left brace': [
include('whitespace and macro uses'),
(r'\d+', Number.Integer, ('#pop', 'loop after repeat count')),
@@ -194,50 +194,50 @@ class CsoundScoreLexer(CsoundLexer):
'braced string': [
(r'\}\}', String, '#pop'),
(r'[^}]|\}(?!\})', String)
- ]
- }
-
-
-class CsoundOrchestraLexer(CsoundLexer):
- """
+ ]
+ }
+
+
+class CsoundOrchestraLexer(CsoundLexer):
+ """
For `Csound <https://csound.com>`_ orchestras.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Orchestra'
- aliases = ['csound', 'csound-orc']
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Csound Orchestra'
+ aliases = ['csound', 'csound-orc']
filenames = ['*.orc', '*.udo']
-
- user_defined_opcodes = set()
-
- def opcode_name_callback(lexer, match):
- opcode = match.group(0)
- lexer.user_defined_opcodes.add(opcode)
- yield match.start(), Name.Function, opcode
-
- def name_callback(lexer, match):
+
+ user_defined_opcodes = set()
+
+ def opcode_name_callback(lexer, match):
+ opcode = match.group(0)
+ lexer.user_defined_opcodes.add(opcode)
+ yield match.start(), Name.Function, opcode
+
+ def name_callback(lexer, match):
type_annotation_token = Keyword.Type
name = match.group(1)
if name in OPCODES or name in DEPRECATED_OPCODES:
- yield match.start(), Name.Builtin, name
- elif name in lexer.user_defined_opcodes:
- yield match.start(), Name.Function, name
- else:
+ yield match.start(), Name.Builtin, name
+ elif name in lexer.user_defined_opcodes:
+ yield match.start(), Name.Function, name
+ else:
type_annotation_token = Name
name_match = re.search(r'^(g?[afikSw])(\w+)', name)
if name_match:
yield name_match.start(1), Keyword.Type, name_match.group(1)
yield name_match.start(2), Name, name_match.group(2)
- else:
- yield match.start(), Name, name
-
+ else:
+ yield match.start(), Name, name
+
if match.group(2):
yield match.start(2), Punctuation, match.group(2)
yield match.start(3), type_annotation_token, match.group(3)
- tokens = {
+ tokens = {
'root': [
(r'\n', Text),
@@ -251,8 +251,8 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\b(?:end(?:in|op))\b', Keyword.Declaration),
include('partial statements')
- ],
-
+ ],
+
'partial statements': [
(r'\b(?:0dbfs|A4|k(?:r|smps)|nchnls(?:_i)?|sr)\b', Name.Variable.Global),
@@ -264,10 +264,10 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'"', String, 'quoted string'),
(r'\{\{', String, 'braced string'),
- (words((
- 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
- 'od', 'then', 'until', 'while',
- ), prefix=r'\b', suffix=r'\b'), Keyword),
+ (words((
+ 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
+ 'od', 'then', 'until', 'while',
+ ), prefix=r'\b', suffix=r'\b'), Keyword),
(words(('return', 'rireturn'), prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\b[ik]?goto\b', Keyword, 'goto label'),
@@ -286,30 +286,30 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\blua_(?:exec|opdef)\b', Name.Builtin, 'Lua opcode'),
(r'\bp\d+\b', Name.Variable.Instance),
(r'\b([A-Z_a-z]\w*)(?:(:)([A-Za-z]))?\b', name_callback)
- ],
-
+ ],
+
'instrument numbers and identifiers': [
include('whitespace and macro uses'),
(r'\d+|[A-Z_a-z]\w*', Name.Function),
(r'[+,]', Punctuation),
(r'\n', Text, '#pop')
- ],
-
+ ],
+
'after opcode keyword': [
include('whitespace and macro uses'),
(r'[A-Z_a-z]\w*', opcode_name_callback, ('#pop', 'opcode type signatures')),
(r'\n', Text, '#pop')
- ],
+ ],
'opcode type signatures': [
include('whitespace and macro uses'),
-
+
# https://github.com/csound/csound/search?q=XIDENT+path%3AEngine+filename%3Acsound_orc.lex
(r'0|[afijkKoOpPStV\[\]]+', Keyword.Type),
- (r',', Punctuation),
+ (r',', Punctuation),
(r'\n', Text, '#pop')
- ],
-
+ ],
+
'quoted string': [
(r'"', String, '#pop'),
(r'[^\\"$%)]+', String),
@@ -317,18 +317,18 @@ class CsoundOrchestraLexer(CsoundLexer):
include('escape sequences'),
include('format specifiers'),
(r'[\\$%)]', String)
- ],
+ ],
'braced string': [
(r'\}\}', String, '#pop'),
(r'(?:[^\\%)}]|\}(?!\}))+', String),
include('escape sequences'),
include('format specifiers'),
(r'[\\%)]', String)
- ],
+ ],
'escape sequences': [
# https://github.com/csound/csound/search?q=unquote_string+path%3AEngine+filename%3Acsound_orc_compile.c
(r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
- ],
+ ],
# Format specifiers are highlighted in all strings, even though only
# fprintks https://csound.com/docs/manual/fprintks.html
# fprints https://csound.com/docs/manual/fprints.html
@@ -347,119 +347,119 @@ class CsoundOrchestraLexer(CsoundLexer):
'format specifiers': [
(r'%[#0\- +]*\d*(?:\.\d+)?[diuoxXfFeEgGaAcs]', String.Interpol),
(r'%%', String.Escape)
- ],
-
+ ],
+
'goto argument': [
include('whitespace and macro uses'),
(r',', Punctuation, '#pop'),
include('partial statements')
],
- 'goto label': [
+ 'goto label': [
include('whitespace and macro uses'),
- (r'\w+', Name.Label, '#pop'),
- default('#pop')
- ],
-
+ (r'\w+', Name.Label, '#pop'),
+ default('#pop')
+ ],
+
'prints opcode': [
include('whitespace and macro uses'),
(r'"', String, 'prints quoted string'),
default('#pop')
- ],
+ ],
'prints quoted string': [
(r'\\\\[aAbBnNrRtT]', String.Escape),
(r'%[!nNrRtT]|[~^]{1,2}', String.Escape),
include('quoted string')
- ],
-
+ ],
+
'Csound score opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Csound score'),
(r'\n', Text, '#pop')
- ],
+ ],
'Csound score': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(CsoundScoreLexer))
- ],
-
+ ],
+
'Python opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Python'),
(r'\n', Text, '#pop')
- ],
+ ],
'Python': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(PythonLexer))
- ],
-
+ ],
+
'Lua opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Lua'),
(r'\n', Text, '#pop')
- ],
+ ],
'Lua': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(LuaLexer))
- ]
- }
-
-
-class CsoundDocumentLexer(RegexLexer):
- """
+ ]
+ }
+
+
+class CsoundDocumentLexer(RegexLexer):
+ """
For `Csound <https://csound.com>`_ documents.
-
+
.. versionadded:: 2.1
- """
-
- name = 'Csound Document'
- aliases = ['csound-document', 'csound-csd']
- filenames = ['*.csd']
-
- # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
- # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
- # better idea, since Csound Document files look like XML files. However, Csound
- # Documents can contain Csound comments (preceded by //, for example) before and
- # after the root element, unescaped bitwise AND & and less than < operators, etc. In
- # other words, while Csound Document files look like XML files, they may not actually
- # be XML files.
- tokens = {
- 'root': [
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ """
+
+ name = 'Csound Document'
+ aliases = ['csound-document', 'csound-csd']
+ filenames = ['*.csd']
+
+ # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
+ # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
+ # better idea, since Csound Document files look like XML files. However, Csound
+ # Documents can contain Csound comments (preceded by //, for example) before and
+ # after the root element, unescaped bitwise AND & and less than < operators, etc. In
+ # other words, while Csound Document files look like XML files, they may not actually
+ # be XML files.
+ tokens = {
+ 'root': [
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'(?:;|//).*$', Comment.Single),
(r'[^/;<]+|/(?!/)', Text),
- (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
- (r'<\s*CsScore', Name.Tag, ('score', 'tag')),
+ (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
+ (r'<\s*CsScore', Name.Tag, ('score', 'tag')),
(r'<\s*[Hh][Tt][Mm][Ll]', Name.Tag, ('HTML', 'tag')),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
- ],
-
- 'orchestra': [
- (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
- ],
- 'score': [
- (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
- ],
- 'HTML': [
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
+ ],
+
+ 'orchestra': [
+ (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
+ (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
+ ],
+ 'score': [
+ (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'),
+ (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
+ ],
+ 'HTML': [
(r'<\s*/\s*[Hh][Tt][Mm][Ll]\s*>', Name.Tag, '#pop'),
(r'(.|\n)+?(?=<\s*/\s*[Hh][Tt][Mm][Ll]\s*>)', using(HtmlLexer))
- ],
-
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop')
- ],
- 'attr': [
- (r'\s+', Text),
- (r'".*?"', String, '#pop'),
- (r"'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop')
- ]
- }
+ ],
+
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop')
+ ],
+ 'attr': [
+ (r'\s+', Text),
+ (r'".*?"', String, '#pop'),
+ (r"'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop')
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/css.py b/contrib/python/Pygments/py2/pygments/lexers/css.py
index 4c77efe051..6bffe38bfc 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/css.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/css.py
@@ -1,26 +1,26 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.css
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for CSS and related stylesheet formats.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.css
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for CSS and related stylesheet formats.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import copy
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default, words, inherit
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import iteritems
-
-__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
-
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import copy
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default, words, inherit
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import iteritems
+
+__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
+
+
# List of vendor prefixes obtained from:
# https://www.w3.org/TR/CSS21/syndata.html#vendor-keyword-history
_vendor_prefixes = (
@@ -266,47 +266,47 @@ _all_units = _angle_units + _frequency_units + _length_units + \
_resolution_units + _time_units
-class CssLexer(RegexLexer):
- """
- For CSS (Cascading Style Sheets).
- """
-
- name = 'CSS'
- aliases = ['css']
- filenames = ['*.css']
- mimetypes = ['text/css']
-
- tokens = {
- 'root': [
- include('basics'),
- ],
- 'basics': [
- (r'\s+', Text),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'\{', Punctuation, 'content'),
+class CssLexer(RegexLexer):
+ """
+ For CSS (Cascading Style Sheets).
+ """
+
+ name = 'CSS'
+ aliases = ['css']
+ filenames = ['*.css']
+ mimetypes = ['text/css']
+
+ tokens = {
+ 'root': [
+ include('basics'),
+ ],
+ 'basics': [
+ (r'\s+', Text),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\{', Punctuation, 'content'),
(r'(\:{1,2})([\w-]+)', bygroups(Punctuation, Name.Decorator)),
(r'(\.)([\w-]+)', bygroups(Punctuation, Name.Class)),
(r'(\#)([\w-]+)', bygroups(Punctuation, Name.Namespace)),
(r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
- (r'[\w-]+', Name.Tag),
- (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single)
- ],
- 'atrule': [
- (r'\{', Punctuation, 'atcontent'),
- (r';', Punctuation, '#pop'),
- include('basics'),
- ],
- 'atcontent': [
- include('basics'),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'content': [
- (r'\s+', Text),
- (r'\}', Punctuation, '#pop'),
+ (r'[\w-]+', Name.Tag),
+ (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ],
+ 'atrule': [
+ (r'\{', Punctuation, 'atcontent'),
+ (r';', Punctuation, '#pop'),
+ include('basics'),
+ ],
+ 'atcontent': [
+ include('basics'),
+ (r'\}', Punctuation, '#pop:2'),
+ ],
+ 'content': [
+ (r'\s+', Text),
+ (r'\}', Punctuation, '#pop'),
(r';', Punctuation),
- (r'^@.*?$', Comment.Preproc),
+ (r'^@.*?$', Comment.Preproc),
(words(_vendor_prefixes,), Keyword.Pseudo),
(r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
@@ -329,15 +329,15 @@ class CssLexer(RegexLexer):
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
# for transition-property etc.
(words(_css_properties, suffix=r'\b'), Keyword),
- (r'\!important', Comment.Preproc),
- (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\!important', Comment.Preproc),
+ (r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
(r'[~^*!%&<>|+=@:./?-]+', Operator),
(r'[\[\](),]+', Punctuation),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z_][\w-]*', Name),
(r';', Punctuation, '#pop'),
(r'\}', Punctuation, '#pop:2'),
@@ -383,310 +383,310 @@ class CssLexer(RegexLexer):
(r'%', Keyword.Type),
default('#pop'),
],
- }
-
-
-common_sass_tokens = {
- 'value': [
- (r'[ \t]+', Text),
- (r'[!$][\w-]+', Name.Variable),
- (r'url\(', String.Other, 'string-url'),
- (r'[a-z_-][\w-]*(?=\()', Name.Function),
+ }
+
+
+common_sass_tokens = {
+ 'value': [
+ (r'[ \t]+', Text),
+ (r'[!$][\w-]+', Name.Variable),
+ (r'url\(', String.Other, 'string-url'),
+ (r'[a-z_-][\w-]*(?=\()', Name.Function),
(words(_css_properties + (
- 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
- 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
- 'capitalize', 'center-left', 'center-right', 'center', 'circle',
- 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
- 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
- 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
- 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
- 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
- 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
- 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
- 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
- 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
- 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
- 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
- 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
- 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
- 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
- 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
- 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
- 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
- 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
- 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
- 'table-caption', 'table-cell', 'table-column', 'table-column-group',
- 'table-footer-group', 'table-header-group', 'table-row',
- 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
- 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
- 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
- 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
- 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
- Name.Constant),
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Name.Constant),
(words(_color_keywords, suffix=r'\b'), Name.Entity),
- (words((
- 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
- 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
- Name.Builtin),
- (r'\!(important|default)', Name.Exception),
- (r'(true|false)', Name.Pseudo),
- (r'(and|or|not)', Operator.Word),
- (r'/\*', Comment.Multiline, 'inline-comment'),
- (r'//[^\n]*', Comment.Single),
- (r'\#[a-z0-9]{1,6}', Number.Hex),
- (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
- (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[~^*!&%<>|+=@:,./?-]+', Operator),
- (r'[\[\]()]+', Punctuation),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- (r'[a-z_-][\w-]*', Name),
- ],
-
- 'interpolation': [
- (r'\}', String.Interpol, '#pop'),
- include('value'),
- ],
-
- 'selector': [
- (r'[ \t]+', Text),
- (r'\:', Name.Decorator, 'pseudo-class'),
- (r'\.', Name.Class, 'class'),
- (r'\#', Name.Namespace, 'id'),
- (r'[\w-]+', Name.Tag),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'&', Keyword),
- (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- ],
-
- 'string-double': [
- (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'"', String.Double, '#pop'),
- ],
-
- 'string-single': [
+ (words((
+ 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
+ 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!(important|default)', Name.Exception),
+ (r'(true|false)', Name.Pseudo),
+ (r'(and|or|not)', Operator.Word),
+ (r'/\*', Comment.Multiline, 'inline-comment'),
+ (r'//[^\n]*', Comment.Single),
+ (r'\#[a-z0-9]{1,6}', Number.Hex),
+ (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+ (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[~^*!&%<>|+=@:,./?-]+', Operator),
+ (r'[\[\]()]+', Punctuation),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ (r'[a-z_-][\w-]*', Name),
+ ],
+
+ 'interpolation': [
+ (r'\}', String.Interpol, '#pop'),
+ include('value'),
+ ],
+
+ 'selector': [
+ (r'[ \t]+', Text),
+ (r'\:', Name.Decorator, 'pseudo-class'),
+ (r'\.', Name.Class, 'class'),
+ (r'\#', Name.Namespace, 'id'),
+ (r'[\w-]+', Name.Tag),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'&', Keyword),
+ (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ ],
+
+ 'string-double': [
+ (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ 'string-single': [
(r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single),
- (r'#\{', String.Interpol, 'interpolation'),
+ (r'#\{', String.Interpol, 'interpolation'),
(r"'", String.Single, '#pop'),
- ],
-
- 'string-url': [
- (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'\)', String.Other, '#pop'),
- ],
-
- 'pseudo-class': [
- (r'[\w-]+', Name.Decorator),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'class': [
- (r'[\w-]+', Name.Class),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'id': [
- (r'[\w-]+', Name.Namespace),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'for': [
- (r'(from|to|through)', Operator.Word),
- include('value'),
- ],
-}
-
-
-def _indentation(lexer, match, ctx):
- indentation = match.group(0)
- yield match.start(), Text, indentation
- ctx.last_indentation = indentation
- ctx.pos = match.end()
-
- if hasattr(ctx, 'block_state') and ctx.block_state and \
- indentation.startswith(ctx.block_indentation) and \
- indentation != ctx.block_indentation:
- ctx.stack.append(ctx.block_state)
- else:
- ctx.block_state = None
- ctx.block_indentation = None
- ctx.stack.append('content')
-
-
-def _starts_block(token, state):
- def callback(lexer, match, ctx):
- yield match.start(), token, match.group(0)
-
- if hasattr(ctx, 'last_indentation'):
- ctx.block_indentation = ctx.last_indentation
- else:
- ctx.block_indentation = ''
-
- ctx.block_state = state
- ctx.pos = match.end()
-
- return callback
-
-
-class SassLexer(ExtendedRegexLexer):
- """
- For Sass stylesheets.
-
- .. versionadded:: 1.3
- """
-
- name = 'Sass'
- aliases = ['sass']
- filenames = ['*.sass']
- mimetypes = ['text/x-sass']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'content': [
- (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
- 'root'),
- (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
- 'root'),
- (r'@import', Keyword, 'import'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'=[\w-]+', Name.Function, 'value'),
- (r'\+[\w-]+', Name.Decorator, 'value'),
- (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
- bygroups(Name.Variable, Operator), 'value'),
- (r':', Name.Attribute, 'old-style-attr'),
- (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
- default('selector'),
- ],
-
- 'single-comment': [
- (r'.+', Comment.Single),
- (r'\n', Text, 'root'),
- ],
-
- 'multi-comment': [
- (r'.+', Comment.Multiline),
- (r'\n', Text, 'root'),
- ],
-
- 'import': [
- (r'[ \t]+', Text),
- (r'\S+', String),
- (r'\n', Text, 'root'),
- ],
-
- 'old-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[ \t]*=', Operator, 'value'),
- default('value'),
- ],
-
- 'new-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[ \t]*[=:]', Operator, 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in iteritems(common_sass_tokens):
- tokens[group] = copy.copy(common)
- tokens['value'].append((r'\n', Text, 'root'))
- tokens['selector'].append((r'\n', Text, 'root'))
-
-
-class ScssLexer(RegexLexer):
- """
- For SCSS stylesheets.
- """
-
- name = 'SCSS'
- aliases = ['scss']
- filenames = ['*.scss']
- mimetypes = ['text/x-scss']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@import', Keyword, 'value'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'(@media)(\s+)', bygroups(Keyword, Text), 'value'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
- # TODO: broken, and prone to infinite loops.
+ ],
+
+ 'string-url': [
+ (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'\)', String.Other, '#pop'),
+ ],
+
+ 'pseudo-class': [
+ (r'[\w-]+', Name.Decorator),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'class': [
+ (r'[\w-]+', Name.Class),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'id': [
+ (r'[\w-]+', Name.Namespace),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'for': [
+ (r'(from|to|through)', Operator.Word),
+ include('value'),
+ ],
+}
+
+
+def _indentation(lexer, match, ctx):
+ indentation = match.group(0)
+ yield match.start(), Text, indentation
+ ctx.last_indentation = indentation
+ ctx.pos = match.end()
+
+ if hasattr(ctx, 'block_state') and ctx.block_state and \
+ indentation.startswith(ctx.block_indentation) and \
+ indentation != ctx.block_indentation:
+ ctx.stack.append(ctx.block_state)
+ else:
+ ctx.block_state = None
+ ctx.block_indentation = None
+ ctx.stack.append('content')
+
+
+def _starts_block(token, state):
+ def callback(lexer, match, ctx):
+ yield match.start(), token, match.group(0)
+
+ if hasattr(ctx, 'last_indentation'):
+ ctx.block_indentation = ctx.last_indentation
+ else:
+ ctx.block_indentation = ''
+
+ ctx.block_state = state
+ ctx.pos = match.end()
+
+ return callback
+
+
+class SassLexer(ExtendedRegexLexer):
+ """
+ For Sass stylesheets.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Sass'
+ aliases = ['sass']
+ filenames = ['*.sass']
+ mimetypes = ['text/x-sass']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'content': [
+ (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
+ 'root'),
+ (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
+ 'root'),
+ (r'@import', Keyword, 'import'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'=[\w-]+', Name.Function, 'value'),
+ (r'\+[\w-]+', Name.Decorator, 'value'),
+ (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+ bygroups(Name.Variable, Operator), 'value'),
+ (r':', Name.Attribute, 'old-style-attr'),
+ (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
+ default('selector'),
+ ],
+
+ 'single-comment': [
+ (r'.+', Comment.Single),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'multi-comment': [
+ (r'.+', Comment.Multiline),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'import': [
+ (r'[ \t]+', Text),
+ (r'\S+', String),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'old-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*=', Operator, 'value'),
+ default('value'),
+ ],
+
+ 'new-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*[=:]', Operator, 'value'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].append((r'\n', Text, 'root'))
+ tokens['selector'].append((r'\n', Text, 'root'))
+
+
+class ScssLexer(RegexLexer):
+ """
+ For SCSS stylesheets.
+ """
+
+ name = 'SCSS'
+ aliases = ['scss']
+ filenames = ['*.scss']
+ mimetypes = ['text/x-scss']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@import', Keyword, 'value'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'(@media)(\s+)', bygroups(Keyword, Text), 'value'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+ # TODO: broken, and prone to infinite loops.
# (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
# (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
- default('selector'),
- ],
-
- 'attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[ \t]*:', Operator, 'value'),
- default('#pop'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in iteritems(common_sass_tokens):
- tokens[group] = copy.copy(common)
- tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
- tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
-
-
-class LessCssLexer(CssLexer):
- """
- For `LESS <http://lesscss.org/>`_ styleshets.
-
- .. versionadded:: 2.1
- """
-
- name = 'LessCss'
- aliases = ['less']
- filenames = ['*.less']
- mimetypes = ['text/x-less-css']
-
- tokens = {
- 'root': [
- (r'@\w+', Name.Variable),
- inherit,
- ],
- 'content': [
+ default('selector'),
+ ],
+
+ 'attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*:', Operator, 'value'),
+ default('#pop'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
+ tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, '#pop')])
+
+
+class LessCssLexer(CssLexer):
+ """
+ For `LESS <http://lesscss.org/>`_ styleshets.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'LessCss'
+ aliases = ['less']
+ filenames = ['*.less']
+ mimetypes = ['text/x-less-css']
+
+ tokens = {
+ 'root': [
+ (r'@\w+', Name.Variable),
+ inherit,
+ ],
+ 'content': [
(r'\{', Punctuation, '#push'),
- inherit,
- ],
- }
+ inherit,
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/d.py b/contrib/python/Pygments/py2/pygments/lexers/d.py
index b14f7dcd11..74c63f8cf0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/d.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/d.py
@@ -1,251 +1,251 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.d
- ~~~~~~~~~~~~~~~~~
-
- Lexers for D languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.d
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for D languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
-
-
-class DLexer(RegexLexer):
- """
- For D source.
-
- .. versionadded:: 1.2
- """
- name = 'D'
- filenames = ['*.d', '*.di']
- aliases = ['d']
- mimetypes = ['text/x-dsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # (r'\\\n', Text), # line continuations
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nested_comment'),
- # Keywords
- (words((
- 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
- 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
- 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
- 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
- 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
- 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
- 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
- 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
- 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
- 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof',
- 'union', 'unittest', 'version', 'volatile', 'while', 'with',
- '__gshared', '__traits', '__vector', '__parameters'),
- suffix=r'\b'),
- Keyword),
- (words((
- 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
- 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
- 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
- 'ushort', 'void', 'wchar'), suffix=r'\b'),
- Keyword.Type),
- (r'(false|true|null)\b', Keyword.Constant),
- (words((
- '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__'
- '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__',
- '__VERSION__'), suffix=r'\b'),
- Keyword.Pseudo),
- (r'macro\b', Keyword.Reserved),
- (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
- # FloatLiteral
- # -- HexFloat
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+', Number.Bin),
- # -- Octal
- (r'0[0-7_]+', Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'r"[^"]*"[cwd]?', String),
- # -- AlternateWysiwygString
- (r'`[^`]*`[cwd]?', String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
- # -- EscapeSequence
- (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
- r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
- String),
- # -- HexString
- (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
- # -- DelimitedString
- (r'q"\[', String, 'delimited_bracket'),
- (r'q"\(', String, 'delimited_parenthesis'),
- (r'q"<', String, 'delimited_angle'),
- (r'q"\{', String, 'delimited_curly'),
- (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
- (r'q"(.).*?\1"', String),
- # -- TokenString
- (r'q\{', String, 'token_string'),
- # Attributes
- (r'@([a-zA-Z_]\w*)?', Name.Decorator),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
- r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
- r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- # Line
- (r'#line\s.*\n', Comment.Special),
- ],
- 'nested_comment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ],
- 'token_string': [
- (r'\{', Punctuation, 'token_string_nest'),
- (r'\}', String, '#pop'),
- include('root'),
- ],
- 'token_string_nest': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'delimited_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, 'delimited_inside_bracket'),
- (r'\]"', String, '#pop'),
- ],
- 'delimited_inside_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, '#push'),
- (r'\]', String, '#pop'),
- ],
- 'delimited_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, 'delimited_inside_parenthesis'),
- (r'\)"', String, '#pop'),
- ],
- 'delimited_inside_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
- 'delimited_angle': [
- (r'[^<>]+', String),
- (r'<', String, 'delimited_inside_angle'),
- (r'>"', String, '#pop'),
- ],
- 'delimited_inside_angle': [
- (r'[^<>]+', String),
- (r'<', String, '#push'),
- (r'>', String, '#pop'),
- ],
- 'delimited_curly': [
- (r'[^{}]+', String),
- (r'\{', String, 'delimited_inside_curly'),
- (r'\}"', String, '#pop'),
- ],
- 'delimited_inside_curly': [
- (r'[^{}]+', String),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- }
-
-
-class CrocLexer(RegexLexer):
- """
- For `Croc <http://jfbillingsley.com/croc>`_ source.
- """
- name = 'Croc'
- filenames = ['*.croc']
- aliases = ['croc']
- mimetypes = ['text/x-crocsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'nestedcomment'),
- # Keywords
- (words((
- 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
- 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
- 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
- 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
- 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
- Keyword),
- (r'(false|true|null)\b', Keyword.Constant),
- # FloatLiteral
- (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
- Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[bB][01][01_]*', Number.Bin),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
- # -- Decimal
- (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'@"(""|[^"])*"', String),
- (r'@`(``|[^`])*`', String),
- (r"@'(''|[^'])*'", String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
- r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
- r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nestedcomment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
-
-class MiniDLexer(CrocLexer):
- """
- For MiniD source. MiniD is now known as Croc.
- """
- name = 'MiniD'
- filenames = [] # don't lex .md as MiniD, reserve for Markdown
- aliases = ['minid']
- mimetypes = ['text/x-minidsrc']
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
+
+
+class DLexer(RegexLexer):
+ """
+ For D source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'D'
+ filenames = ['*.d', '*.di']
+ aliases = ['d']
+ mimetypes = ['text/x-dsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # (r'\\\n', Text), # line continuations
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nested_comment'),
+ # Keywords
+ (words((
+ 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
+ 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
+ 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
+ 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
+ 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
+ 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
+ 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
+ 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
+ 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
+ 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof',
+ 'union', 'unittest', 'version', 'volatile', 'while', 'with',
+ '__gshared', '__traits', '__vector', '__parameters'),
+ suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
+ 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
+ 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
+ 'ushort', 'void', 'wchar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(false|true|null)\b', Keyword.Constant),
+ (words((
+ '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__'
+ '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__',
+ '__VERSION__'), suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'macro\b', Keyword.Reserved),
+ (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
+ # FloatLiteral
+ # -- HexFloat
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+', Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+', Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'r"[^"]*"[cwd]?', String),
+ # -- AlternateWysiwygString
+ (r'`[^`]*`[cwd]?', String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
+ # -- EscapeSequence
+ (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
+ r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
+ String),
+ # -- HexString
+ (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
+ # -- DelimitedString
+ (r'q"\[', String, 'delimited_bracket'),
+ (r'q"\(', String, 'delimited_parenthesis'),
+ (r'q"<', String, 'delimited_angle'),
+ (r'q"\{', String, 'delimited_curly'),
+ (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
+ (r'q"(.).*?\1"', String),
+ # -- TokenString
+ (r'q\{', String, 'token_string'),
+ # Attributes
+ (r'@([a-zA-Z_]\w*)?', Name.Decorator),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
+ r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
+ r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ # Line
+ (r'#line\s.*\n', Comment.Special),
+ ],
+ 'nested_comment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ],
+ 'token_string': [
+ (r'\{', Punctuation, 'token_string_nest'),
+ (r'\}', String, '#pop'),
+ include('root'),
+ ],
+ 'token_string_nest': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'delimited_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, 'delimited_inside_bracket'),
+ (r'\]"', String, '#pop'),
+ ],
+ 'delimited_inside_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, '#push'),
+ (r'\]', String, '#pop'),
+ ],
+ 'delimited_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, 'delimited_inside_parenthesis'),
+ (r'\)"', String, '#pop'),
+ ],
+ 'delimited_inside_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+ 'delimited_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, 'delimited_inside_angle'),
+ (r'>"', String, '#pop'),
+ ],
+ 'delimited_inside_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, '#push'),
+ (r'>', String, '#pop'),
+ ],
+ 'delimited_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, 'delimited_inside_curly'),
+ (r'\}"', String, '#pop'),
+ ],
+ 'delimited_inside_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ }
+
+
+class CrocLexer(RegexLexer):
+ """
+ For `Croc <http://jfbillingsley.com/croc>`_ source.
+ """
+ name = 'Croc'
+ filenames = ['*.croc']
+ aliases = ['croc']
+ mimetypes = ['text/x-crocsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'nestedcomment'),
+ # Keywords
+ (words((
+ 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
+ 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
+ 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
+ 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
+ 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
+ Keyword),
+ (r'(false|true|null)\b', Keyword.Constant),
+ # FloatLiteral
+ (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
+ Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[bB][01][01_]*', Number.Bin),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
+ # -- Decimal
+ (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'@"(""|[^"])*"', String),
+ (r'@`(``|[^`])*`', String),
+ (r"@'(''|[^'])*'", String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
+ r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
+ r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'nestedcomment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+
+class MiniDLexer(CrocLexer):
+ """
+ For MiniD source. MiniD is now known as Croc.
+ """
+ name = 'MiniD'
+ filenames = [] # don't lex .md as MiniD, reserve for Markdown
+ aliases = ['minid']
+ mimetypes = ['text/x-minidsrc']
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dalvik.py b/contrib/python/Pygments/py2/pygments/lexers/dalvik.py
index 0e39bb9657..880ad81ba4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dalvik.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dalvik.py
@@ -1,125 +1,125 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.dalvik
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Dalvik VM-related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dalvik
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for Dalvik VM-related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
- Punctuation
-
-__all__ = ['SmaliLexer']
-
-
-class SmaliLexer(RegexLexer):
- """
- For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
- code.
-
- .. versionadded:: 1.6
- """
- name = 'Smali'
- aliases = ['smali']
- filenames = ['*.smali']
- mimetypes = ['text/smali']
-
- tokens = {
- 'root': [
- include('comment'),
- include('label'),
- include('field'),
- include('method'),
- include('class'),
- include('directive'),
- include('access-modifier'),
- include('instruction'),
- include('literal'),
- include('punctuation'),
- include('type'),
- include('whitespace')
- ],
- 'directive': [
- (r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
- r'enum|method|registers|locals|array-data|packed-switch|'
- r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
- r'epilogue|source)', Keyword),
- (r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
- 'packed-switch|sparse-switch|parameter|local)', Keyword),
- (r'^[ \t]*\.restart local', Keyword),
- ],
- 'access-modifier': [
- (r'(public|private|protected|static|final|synchronized|bridge|'
- r'varargs|native|abstract|strictfp|synthetic|constructor|'
- r'declared-synchronized|interface|enum|annotation|volatile|'
- r'transient)', Keyword),
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- ],
- 'instruction': [
- (r'\b[vp]\d+\b', Name.Builtin), # registers
- (r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
- ],
- 'literal': [
- (r'".*"', String),
- (r'0x[0-9A-Fa-f]+t?', Number.Hex),
- (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+L?', Number.Integer),
- ],
- 'field': [
- (r'(\$?\b)([\w$]*)(:)',
- bygroups(Punctuation, Name.Variable, Punctuation)),
- ],
- 'method': [
- (r'<(?:cl)?init>', Name.Function), # constructor
- (r'(\$?\b)([\w$]*)(\()',
- bygroups(Punctuation, Name.Function, Punctuation)),
- ],
- 'label': [
- (r':\w+', Name.Label),
- ],
- 'class': [
- # class names in the form Lcom/namespace/ClassName;
- # I only want to color the ClassName part, so the namespace part is
- # treated as 'Text'
- (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
- bygroups(Keyword.Type, Text, Name.Class, Text)),
- ],
- 'punctuation': [
- (r'->', Punctuation),
- (r'[{},():=.-]', Punctuation),
- ],
- 'type': [
- (r'[ZBSCIJFDV\[]+', Keyword.Type),
- ],
- 'comment': [
- (r'#.*?\n', Comment),
- ],
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'\b((check-cast|instance-of|throw-verification-error'
- r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
- r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
- r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
- score += 0.3
- if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
- r'\b(array-data|class-change-error|declared-synchronized|'
- r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
- r'illegal-class-access|illegal-field-access|'
- r'illegal-method-access|instantiation-error|no-error|'
- r'no-such-class|no-such-field|no-such-method|'
- r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
- score += 0.6
- return score
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Keyword, Text, Comment, Name, String, Number, \
+ Punctuation
+
+__all__ = ['SmaliLexer']
+
+
+class SmaliLexer(RegexLexer):
+ """
+ For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
+ code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Smali'
+ aliases = ['smali']
+ filenames = ['*.smali']
+ mimetypes = ['text/smali']
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('label'),
+ include('field'),
+ include('method'),
+ include('class'),
+ include('directive'),
+ include('access-modifier'),
+ include('instruction'),
+ include('literal'),
+ include('punctuation'),
+ include('type'),
+ include('whitespace')
+ ],
+ 'directive': [
+ (r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
+ r'enum|method|registers|locals|array-data|packed-switch|'
+ r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
+ r'epilogue|source)', Keyword),
+ (r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
+ 'packed-switch|sparse-switch|parameter|local)', Keyword),
+ (r'^[ \t]*\.restart local', Keyword),
+ ],
+ 'access-modifier': [
+ (r'(public|private|protected|static|final|synchronized|bridge|'
+ r'varargs|native|abstract|strictfp|synthetic|constructor|'
+ r'declared-synchronized|interface|enum|annotation|volatile|'
+ r'transient)', Keyword),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ ],
+ 'instruction': [
+ (r'\b[vp]\d+\b', Name.Builtin), # registers
+ (r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
+ ],
+ 'literal': [
+ (r'".*"', String),
+ (r'0x[0-9A-Fa-f]+t?', Number.Hex),
+ (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+L?', Number.Integer),
+ ],
+ 'field': [
+ (r'(\$?\b)([\w$]*)(:)',
+ bygroups(Punctuation, Name.Variable, Punctuation)),
+ ],
+ 'method': [
+ (r'<(?:cl)?init>', Name.Function), # constructor
+ (r'(\$?\b)([\w$]*)(\()',
+ bygroups(Punctuation, Name.Function, Punctuation)),
+ ],
+ 'label': [
+ (r':\w+', Name.Label),
+ ],
+ 'class': [
+ # class names in the form Lcom/namespace/ClassName;
+ # I only want to color the ClassName part, so the namespace part is
+ # treated as 'Text'
+ (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
+ bygroups(Keyword.Type, Text, Name.Class, Text)),
+ ],
+ 'punctuation': [
+ (r'->', Punctuation),
+ (r'[{},():=.-]', Punctuation),
+ ],
+ 'type': [
+ (r'[ZBSCIJFDV\[]+', Keyword.Type),
+ ],
+ 'comment': [
+ (r'#.*?\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'\b((check-cast|instance-of|throw-verification-error'
+ r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
+ r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
+ r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
+ r'\b(array-data|class-change-error|declared-synchronized|'
+ r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
+ r'illegal-class-access|illegal-field-access|'
+ r'illegal-method-access|instantiation-error|no-error|'
+ r'no-such-class|no-such-field|no-such-method|'
+ r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
+ score += 0.6
+ return score
diff --git a/contrib/python/Pygments/py2/pygments/lexers/data.py b/contrib/python/Pygments/py2/pygments/lexers/data.py
index 46ca734006..4c616987e9 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/data.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/data.py
@@ -1,519 +1,519 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.data
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for data file format.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.data
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for data file format.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
- include, bygroups, inherit
-from pygments.token import Text, Comment, Keyword, Name, String, Number, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
+ include, bygroups, inherit
+from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Punctuation, Literal, Error
-
+
__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
-
-
-class YamlLexerContext(LexerContext):
- """Indentation context for the YAML lexer."""
-
- def __init__(self, *args, **kwds):
- super(YamlLexerContext, self).__init__(*args, **kwds)
- self.indent_stack = []
- self.indent = -1
- self.next_indent = 0
- self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
- """
- Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
- language.
-
- .. versionadded:: 0.11
- """
-
- name = 'YAML'
- aliases = ['yaml']
- filenames = ['*.yaml', '*.yml']
- mimetypes = ['text/x-yaml']
-
- def something(token_class):
- """Do not produce empty tokens."""
- def callback(lexer, match, context):
- text = match.group()
- if not text:
- return
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def reset_indent(token_class):
- """Reset the indentation levels."""
- def callback(lexer, match, context):
- text = match.group()
- context.indent_stack = []
- context.indent = -1
- context.next_indent = 0
- context.block_scalar_indent = None
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def save_indent(token_class, start=False):
- """Save a possible indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- extra = ''
- if start:
- context.next_indent = len(text)
- if context.next_indent < context.indent:
- while context.next_indent < context.indent:
- context.indent = context.indent_stack.pop()
- if context.next_indent > context.indent:
- extra = text[context.indent:]
- text = text[:context.indent]
- else:
- context.next_indent += len(text)
- if text:
- yield match.start(), token_class, text
- if extra:
- yield match.start()+len(text), token_class.Error, extra
- context.pos = match.end()
- return callback
-
- def set_indent(token_class, implicit=False):
- """Set the previously saved indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- if context.indent < context.next_indent:
- context.indent_stack.append(context.indent)
- context.indent = context.next_indent
- if not implicit:
- context.next_indent += len(text)
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def set_block_scalar_indent(token_class):
- """Set an explicit indentation level for a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- context.block_scalar_indent = None
- if not text:
- return
- increment = match.group(1)
- if increment:
- current_indent = max(context.indent, 0)
- increment = int(increment)
- context.block_scalar_indent = current_indent + increment
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_empty_line(indent_token_class, content_token_class):
- """Process an empty line in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if (context.block_scalar_indent is None or
- len(text) <= context.block_scalar_indent):
- if text:
- yield match.start(), indent_token_class, text
- else:
- indentation = text[:context.block_scalar_indent]
- content = text[context.block_scalar_indent:]
- yield match.start(), indent_token_class, indentation
- yield (match.start()+context.block_scalar_indent,
- content_token_class, content)
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_indent(token_class):
- """Process indentation spaces in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if context.block_scalar_indent is None:
- if len(text) <= max(context.indent, 0):
- context.stack.pop()
- context.stack.pop()
- return
- context.block_scalar_indent = len(text)
- else:
- if len(text) < context.block_scalar_indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_plain_scalar_indent(token_class):
- """Process indentation spaces in a plain scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if len(text) <= context.indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- tokens = {
- # the root rules
- 'root': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # the '%YAML' directive
- (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
- # the %TAG directive
- (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
- # document start and document end indicators
- (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
- 'block-line'),
- # indentation spaces
- (r'[ ]*(?!\s|$)', save_indent(Text, start=True),
- ('block-line', 'indentation')),
- ],
-
- # trailing whitespaces after directives or a block scalar indicator
- 'ignored-line': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # line break
- (r'\n', Text, '#pop:2'),
- ],
-
- # the %YAML directive
- 'yaml-directive': [
- # the version number
- (r'([ ]+)([0-9]+\.[0-9]+)',
- bygroups(Text, Number), 'ignored-line'),
- ],
-
+
+
+class YamlLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
+ super(YamlLexerContext, self).__init__(*args, **kwds)
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+class YamlLexer(ExtendedRegexLexer):
+ """
+ Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
+ language.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ def something(token_class):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def reset_indent(token_class):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def save_indent(token_class, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), token_class, text
+ if extra:
+ yield match.start()+len(text), token_class.Error, extra
+ context.pos = match.end()
+ return callback
+
+ def set_indent(token_class, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def set_block_scalar_indent(token_class):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_empty_line(indent_token_class, content_token_class):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), indent_token_class, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), indent_token_class, indentation
+ yield (match.start()+context.block_scalar_indent,
+ content_token_class, content)
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_indent(token_class):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_plain_scalar_indent(token_class):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ tokens = {
+ # the root rules
+ 'root': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
+ 'block-line'),
+ # indentation spaces
+ (r'[ ]*(?!\s|$)', save_indent(Text, start=True),
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
+ (r'\n', Text, '#pop:2'),
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
+ bygroups(Text, Number), 'ignored-line'),
+ ],
+
# the %TAG directive
- 'tag-directive': [
- # a tag handle and the corresponding prefix
- (r'([ ]+)(!|![\w-]*!)'
- r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
- bygroups(Text, Keyword.Type, Text, Keyword.Type),
- 'ignored-line'),
- ],
-
- # block scalar indicators and indentation spaces
- 'indentation': [
- # trailing whitespaces are ignored
- (r'[ ]*$', something(Text), '#pop:2'),
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![\w-]*!)'
+ r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
+ bygroups(Text, Keyword.Type, Text, Keyword.Type),
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
+ (r'[ ]*$', something(Text), '#pop:2'),
# whitespaces preceding block collection indicators
- (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
- # block collection indicators
- (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
- # the beginning a block line
- (r'[ ]*', save_indent(Text), '#pop'),
- ],
-
- # an indented line in the block context
- 'block-line': [
- # the line end
- (r'[ ]*(?=#|$)', something(Text), '#pop'),
- # whitespaces separating tokens
- (r'[ ]+', Text),
+ (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
+ (r'[ ]*', save_indent(Text), '#pop'),
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
+ (r'[ ]*(?=#|$)', something(Text), '#pop'),
+ # whitespaces separating tokens
+ (r'[ ]+', Text),
# key with colon
(r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
- # tags, anchors and aliases,
- include('descriptors'),
- # block collections and scalars
- include('block-nodes'),
- # flow collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
- something(Name.Variable),
- 'plain-scalar-in-block-context'),
- ],
-
- # tags, anchors, aliases
- 'descriptors': [
- # a full-form tag
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
+ something(Name.Variable),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors': [
+ # a full-form tag
(r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
- # a tag in the form '!', '!suffix' or '!handle!suffix'
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
(r'!(?:[\w-]+!)?'
r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]*', Keyword.Type),
- # an anchor
- (r'&[\w-]+', Name.Label),
- # an alias
- (r'\*[\w-]+', Name.Variable),
- ],
-
- # block collections and scalars
- 'block-nodes': [
- # implicit key
- (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
- # literal and folded scalars
- (r'[|>]', Punctuation.Indicator,
- ('block-scalar-content', 'block-scalar-header')),
- ],
-
- # flow collections and quoted scalars
- 'flow-nodes': [
- # a flow sequence
- (r'\[', Punctuation.Indicator, 'flow-sequence'),
- # a flow mapping
- (r'\{', Punctuation.Indicator, 'flow-mapping'),
- # a single-quoted scalar
- (r'\'', String, 'single-quoted-scalar'),
- # a double-quoted scalar
- (r'\"', String, 'double-quoted-scalar'),
- ],
-
- # the content of a flow collection
- 'flow-collection': [
- # whitespaces
- (r'[ ]+', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # simple indicators
- (r'[?:,]', Punctuation.Indicator),
- # tags, anchors and aliases
- include('descriptors'),
- # nested collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
- something(Name.Variable),
- 'plain-scalar-in-flow-context'),
- ],
-
- # a flow sequence indicated by '[' and ']'
- 'flow-sequence': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\]', Punctuation.Indicator, '#pop'),
- ],
-
- # a flow mapping indicated by '{' and '}'
- 'flow-mapping': [
+ # an anchor
+ (r'&[\w-]+', Name.Label),
+ # an alias
+ (r'\*[\w-]+', Name.Variable),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', String, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', String, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
+ (r'[ ]+', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
+ something(Name.Variable),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
# key with colon
(r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, Punctuation)),
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\}', Punctuation.Indicator, '#pop'),
- ],
-
- # block scalar lines
- 'block-scalar-content': [
- # line break
- (r'\n', Text),
- # empty line
- (r'^[ ]+$',
- parse_block_scalar_empty_line(Text, Name.Constant)),
- # indentation spaces (we may leave the state here)
- (r'^[ ]*', parse_block_scalar_indent(Text)),
- # line content
- (r'[\S\t ]+', Name.Constant),
- ],
-
- # the content of a literal or folded scalar
- 'block-scalar-header': [
- # indentation indicator followed by chomping flag
- (r'([1-9])?[+-]?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- # chomping flag followed by indentation indicator
- (r'[+-]?([1-9])?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- ],
-
- # ignored and regular whitespaces in quoted scalars
- 'quoted-scalar-whitespaces': [
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- ],
-
- # single-quoted scalars
- 'single-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of the quote character
- (r'\'\'', String.Escape),
- # regular non-whitespace characters
- (r'[^\s\']+', String),
- # the closing quote
- (r'\'', String, '#pop'),
- ],
-
- # double-quoted scalars
- 'double-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of special characters
- (r'\\[0abt\tn\nvfre "\\N_LP]', String),
- # escape codes
- (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
- String.Escape),
- # regular non-whitespace characters
- (r'[^\s"\\]+', String),
- # the closing quote
- (r'"', String, '#pop'),
- ],
-
- # the beginning of a new line while scanning a plain scalar
- 'plain-scalar-in-block-context-new-line': [
- # empty lines
- (r'^[ ]+$', Text),
- # line breaks
- (r'\n+', Text),
- # document start and document end indicators
- (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
- # indentation spaces (we may leave the block line state here)
- (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
- ],
-
- # a plain scalar in the block context
- 'plain-scalar-in-block-context': [
- # the scalar ends with the ':' indicator
- (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
- # the scalar ends with whitespaces followed by a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # trailing whitespaces are ignored
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
- # other whitespaces are a part of the value
- (r'[ ]+', Literal.Scalar.Plain),
- # regular non-whitespace characters
- (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
- ],
-
- # a plain scalar is the flow context
- 'plain-scalar-in-flow-context': [
- # the scalar ends with an indicator character
- (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
- # the scalar ends with a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- # regular non-whitespace characters
- (r'[^\s,:?\[\]{}]+', Name.Variable),
- ],
-
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- if context is None:
- context = YamlLexerContext(text, 0)
- return super(YamlLexer, self).get_tokens_unprocessed(text, context)
-
-
-class JsonLexer(RegexLexer):
- """
- For JSON data structures.
-
- .. versionadded:: 1.5
- """
-
- name = 'JSON'
- aliases = ['json']
- filenames = ['*.json']
- mimetypes = ['application/json']
-
- flags = re.DOTALL
-
- # integer part of a number
- int_part = r'-?(0|[1-9]\d*)'
-
- # fractional part of a number
- frac_part = r'\.\d+'
-
- # exponential part of a number
- exp_part = r'[eE](\+|-)?\d+'
-
- tokens = {
- 'whitespace': [
- (r'\s+', Text),
- ],
-
- # represents a simple terminal value
- 'simplevalue': [
- (r'(true|false|null)\b', Keyword.Constant),
- (('%(int_part)s(%(frac_part)s%(exp_part)s|'
- '%(exp_part)s|%(frac_part)s)') % vars(),
- Number.Float),
- (int_part, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- ],
-
-
- # the right hand side of an object, after the attribute name
- 'objectattribute': [
- include('value'),
- (r':', Punctuation),
- # comma terminates the attribute but expects more
- (r',', Punctuation, '#pop'),
- # a closing bracket terminates the entire object, so pop twice
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
+ (r'\n', Text),
+ # empty line
+ (r'^[ ]+$',
+ parse_block_scalar_empty_line(Text, Name.Constant)),
+ # indentation spaces (we may leave the state here)
+ (r'^[ ]*', parse_block_scalar_indent(Text)),
+ # line content
+ (r'[\S\t ]+', Name.Constant),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s\']+', String),
+ # the closing quote
+ (r'\'', String, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', String),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s"\\]+', String),
+ # the closing quote
+ (r'"', String, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
+ (r'^[ ]+$', Text),
+ # line breaks
+ (r'\n+', Text),
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
+ (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
+ (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
+ # the scalar ends with whitespaces followed by a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # trailing whitespaces are ignored
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
+ (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
+ # the scalar ends with a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ # regular non-whitespace characters
+ (r'[^\s,:?\[\]{}]+', Name.Variable),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YamlLexerContext(text, 0)
+ return super(YamlLexer, self).get_tokens_unprocessed(text, context)
+
+
+class JsonLexer(RegexLexer):
+ """
+ For JSON data structures.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'JSON'
+ aliases = ['json']
+ filenames = ['*.json']
+ mimetypes = ['application/json']
+
+ flags = re.DOTALL
+
+ # integer part of a number
+ int_part = r'-?(0|[1-9]\d*)'
+
+ # fractional part of a number
+ frac_part = r'\.\d+'
+
+ # exponential part of a number
+ exp_part = r'[eE](\+|-)?\d+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+
+ # represents a simple terminal value
+ 'simplevalue': [
+ (r'(true|false|null)\b', Keyword.Constant),
+ (('%(int_part)s(%(frac_part)s%(exp_part)s|'
+ '%(exp_part)s|%(frac_part)s)') % vars(),
+ Number.Float),
+ (int_part, Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ ],
+
+
+ # the right hand side of an object, after the attribute name
+ 'objectattribute': [
+ include('value'),
+ (r':', Punctuation),
+ # comma terminates the attribute but expects more
+ (r',', Punctuation, '#pop'),
+ # a closing bracket terminates the entire object, so pop twice
(r'\}', Punctuation, '#pop:2'),
- ],
-
- # a json object - { attr, attr, ... }
- 'objectvalue': [
- include('whitespace'),
- (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- # json array - [ value, value, ... }
- 'arrayvalue': [
- include('whitespace'),
- include('value'),
- (r',', Punctuation),
- (r'\]', Punctuation, '#pop'),
- ],
-
- # a json value - either a simple value or a complex value (object or array)
- 'value': [
- include('whitespace'),
- include('simplevalue'),
- (r'\{', Punctuation, 'objectvalue'),
- (r'\[', Punctuation, 'arrayvalue'),
- ],
-
- # the root of a json document whould be a value
- 'root': [
- include('value'),
- ],
- }
-
+ ],
+
+ # a json object - { attr, attr, ... }
+ 'objectvalue': [
+ include('whitespace'),
+ (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ # json array - [ value, value, ... }
+ 'arrayvalue': [
+ include('whitespace'),
+ include('value'),
+ (r',', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ # a json value - either a simple value or a complex value (object or array)
+ 'value': [
+ include('whitespace'),
+ include('simplevalue'),
+ (r'\{', Punctuation, 'objectvalue'),
+ (r'\[', Punctuation, 'arrayvalue'),
+ ],
+
+ # the root of a json document whould be a value
+ 'root': [
+ include('value'),
+ ],
+ }
+
class JsonBareObjectLexer(JsonLexer):
"""
@@ -539,23 +539,23 @@ class JsonBareObjectLexer(JsonLexer):
}
-class JsonLdLexer(JsonLexer):
- """
- For `JSON-LD <http://json-ld.org/>`_ linked data.
-
- .. versionadded:: 2.0
- """
-
- name = 'JSON-LD'
- aliases = ['jsonld', 'json-ld']
- filenames = ['*.jsonld']
- mimetypes = ['application/ld+json']
-
- tokens = {
- 'objectvalue': [
- (r'"@(context|id|value|language|type|container|list|set|'
- r'reverse|index|base|vocab|graph)"', Name.Decorator,
- 'objectattribute'),
- inherit,
- ],
- }
+class JsonLdLexer(JsonLexer):
+ """
+ For `JSON-LD <http://json-ld.org/>`_ linked data.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'JSON-LD'
+ aliases = ['jsonld', 'json-ld']
+ filenames = ['*.jsonld']
+ mimetypes = ['application/ld+json']
+
+ tokens = {
+ 'objectvalue': [
+ (r'"@(context|id|value|language|type|container|list|set|'
+ r'reverse|index|base|vocab|graph)"', Name.Decorator,
+ 'objectattribute'),
+ inherit,
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/diff.py b/contrib/python/Pygments/py2/pygments/lexers/diff.py
index 5ff8375c67..2a1087e313 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/diff.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/diff.py
@@ -1,111 +1,111 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.diff
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for diff/patch formats.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.diff
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for diff/patch formats.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
- Literal
-
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
+ Literal
+
__all__ = ['DiffLexer', 'DarcsPatchLexer', 'WDiffLexer']
-
-
-class DiffLexer(RegexLexer):
- """
- Lexer for unified or context-style diffs or patches.
- """
-
- name = 'Diff'
- aliases = ['diff', 'udiff']
- filenames = ['*.diff', '*.patch']
- mimetypes = ['text/x-diff', 'text/x-patch']
-
- tokens = {
- 'root': [
- (r' .*\n', Text),
- (r'\+.*\n', Generic.Inserted),
- (r'-.*\n', Generic.Deleted),
- (r'!.*\n', Generic.Strong),
- (r'@.*\n', Generic.Subheading),
- (r'([Ii]ndex|diff).*\n', Generic.Heading),
- (r'=.*\n', Generic.Heading),
- (r'.*\n', Text),
- ]
- }
-
- def analyse_text(text):
- if text[:7] == 'Index: ':
- return True
- if text[:5] == 'diff ':
- return True
- if text[:4] == '--- ':
- return 0.9
-
-
-class DarcsPatchLexer(RegexLexer):
- """
- DarcsPatchLexer is a lexer for the various versions of the darcs patch
- format. Examples of this format are derived by commands such as
- ``darcs annotate --patch`` and ``darcs send``.
-
- .. versionadded:: 0.10
- """
-
- name = 'Darcs Patch'
- aliases = ['dpatch']
- filenames = ['*.dpatch', '*.darcspatch']
-
- DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
- 'replace')
-
- tokens = {
- 'root': [
- (r'<', Operator),
- (r'>', Operator),
- (r'\{', Operator),
- (r'\}', Operator),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text, Operator)),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text), 'comment'),
- (r'New patches:', Generic.Heading),
- (r'Context:', Generic.Heading),
- (r'Patch bundle hash:', Generic.Heading),
- (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
- bygroups(Text, Keyword, Text)),
- (r'\+', Generic.Inserted, "insert"),
- (r'-', Generic.Deleted, "delete"),
- (r'.*\n', Text),
- ],
- 'comment': [
- (r'[^\]].*\n', Comment),
- (r'\]', Operator, "#pop"),
- ],
- 'specialText': [ # darcs add [_CODE_] special operators for clarity
- (r'\n', Text, "#pop"), # line-based
- (r'\[_[^_]*_]', Operator),
- ],
- 'insert': [
- include('specialText'),
- (r'\[', Generic.Inserted),
- (r'[^\n\[]+', Generic.Inserted),
- ],
- 'delete': [
- include('specialText'),
- (r'\[', Generic.Deleted),
- (r'[^\n\[]+', Generic.Deleted),
- ],
- }
+
+
+class DiffLexer(RegexLexer):
+ """
+ Lexer for unified or context-style diffs or patches.
+ """
+
+ name = 'Diff'
+ aliases = ['diff', 'udiff']
+ filenames = ['*.diff', '*.patch']
+ mimetypes = ['text/x-diff', 'text/x-patch']
+
+ tokens = {
+ 'root': [
+ (r' .*\n', Text),
+ (r'\+.*\n', Generic.Inserted),
+ (r'-.*\n', Generic.Deleted),
+ (r'!.*\n', Generic.Strong),
+ (r'@.*\n', Generic.Subheading),
+ (r'([Ii]ndex|diff).*\n', Generic.Heading),
+ (r'=.*\n', Generic.Heading),
+ (r'.*\n', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if text[:7] == 'Index: ':
+ return True
+ if text[:5] == 'diff ':
+ return True
+ if text[:4] == '--- ':
+ return 0.9
+
+
+class DarcsPatchLexer(RegexLexer):
+ """
+ DarcsPatchLexer is a lexer for the various versions of the darcs patch
+ format. Examples of this format are derived by commands such as
+ ``darcs annotate --patch`` and ``darcs send``.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Darcs Patch'
+ aliases = ['dpatch']
+ filenames = ['*.dpatch', '*.darcspatch']
+
+ DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
+ 'replace')
+
+ tokens = {
+ 'root': [
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'\{', Operator),
+ (r'\}', Operator),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text, Operator)),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text), 'comment'),
+ (r'New patches:', Generic.Heading),
+ (r'Context:', Generic.Heading),
+ (r'Patch bundle hash:', Generic.Heading),
+ (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
+ bygroups(Text, Keyword, Text)),
+ (r'\+', Generic.Inserted, "insert"),
+ (r'-', Generic.Deleted, "delete"),
+ (r'.*\n', Text),
+ ],
+ 'comment': [
+ (r'[^\]].*\n', Comment),
+ (r'\]', Operator, "#pop"),
+ ],
+ 'specialText': [ # darcs add [_CODE_] special operators for clarity
+ (r'\n', Text, "#pop"), # line-based
+ (r'\[_[^_]*_]', Operator),
+ ],
+ 'insert': [
+ include('specialText'),
+ (r'\[', Generic.Inserted),
+ (r'[^\n\[]+', Generic.Inserted),
+ ],
+ 'delete': [
+ include('specialText'),
+ (r'\[', Generic.Deleted),
+ (r'[^\n\[]+', Generic.Deleted),
+ ],
+ }
class WDiffLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dotnet.py b/contrib/python/Pygments/py2/pygments/lexers/dotnet.py
index 458a9eb483..daa545ea8f 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dotnet.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dotnet.py
@@ -1,399 +1,399 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.dotnet
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for .net languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dotnet
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for .net languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
using, this, default, words
-from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
-from pygments.util import get_choice_opt, iteritems
-from pygments import unistring as uni
-
-from pygments.lexers.html import XmlLexer
-
-__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
- 'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
-
-
-class CSharpLexer(RegexLexer):
- """
- For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
- source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 0.8
- """
-
- name = 'C#'
- aliases = ['csharp', 'c#']
- filenames = ['*.cs']
- mimetypes = ['text/x-csharp'] # inferred
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
+from pygments.token import Punctuation, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
+from pygments.util import get_choice_opt, iteritems
+from pygments import unistring as uni
+
+from pygments.lexers.html import XmlLexer
+
+__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
+ 'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
+
+
+class CSharpLexer(RegexLexer):
+ """
+ For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
+ source code.
+
+ Additional options accepted:
+
+ `unicodelevel`
+ Determines which Unicode characters this lexer allows for identifiers.
+ The possible values are:
+
+ * ``none`` -- only the ASCII letters and numbers are allowed. This
+ is the fastest selection.
+ * ``basic`` -- all Unicode characters from the specification except
+ category ``Lo`` are allowed.
+ * ``full`` -- all Unicode characters as specified in the C# specs
+ are allowed. Note that this means a considerable slowdown since the
+ ``Lo`` category has more than 40,000 characters in it!
+
+ The default value is ``basic``.
+
+ .. versionadded:: 0.8
+ """
+
+ name = 'C#'
+ aliases = ['csharp', 'c#']
+ filenames = ['*.cs']
+ mimetypes = ['text/x-csharp'] # inferred
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+
+ levels = {
'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
- for levelname, cs_ident in iteritems(levels):
- tokens[levelname] = {
- 'root': [
- # method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Punctuation)),
- (r'^\s*\[.*?\]', Name.Attribute),
- (r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'\n', Text),
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\"|[^"\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
- r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'#[ \t]*(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion|pragma)\b.*?\n',
- Comment.Preproc),
- (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
- Keyword)),
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
+ 'full': ('@?(?:_|[^' +
+ uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ }
+
+ tokens = {}
+ token_variants = True
+
+ for levelname, cs_ident in iteritems(levels):
+ tokens[levelname] = {
+ 'root': [
+ # method names
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Punctuation)),
+ (r'^\s*\[.*?\]', Name.Attribute),
+ (r'[^\S\n]+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
+ (r'\n', Text),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
+ (r'"(\\\\|\\"|[^"\n])*["\n]', String),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
+ r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'#[ \t]*(if|endif|else|elif|define|undef|'
+ r'line|error|warning|region|endregion|pragma)\b.*?\n',
+ Comment.Preproc),
+ (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
+ Keyword)),
(r'(abstract|as|async|await|base|break|by|case|catch|'
- r'checked|const|continue|default|delegate|'
- r'do|else|enum|event|explicit|extern|false|finally|'
- r'fixed|for|foreach|goto|if|implicit|in|interface|'
+ r'checked|const|continue|default|delegate|'
+ r'do|else|enum|event|explicit|extern|false|finally|'
+ r'fixed|for|foreach|goto|if|implicit|in|interface|'
r'internal|is|let|lock|new|null|on|operator|'
- r'out|override|params|private|protected|public|readonly|'
- r'ref|return|sealed|sizeof|stackalloc|static|'
- r'switch|this|throw|true|try|typeof|'
- r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
+ r'out|override|params|private|protected|public|readonly|'
+ r'ref|return|sealed|sizeof|stackalloc|static|'
+ r'switch|this|throw|true|try|typeof|'
+ r'unchecked|unsafe|virtual|void|while|'
+ r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
r'descending|from|group|into|orderby|select|thenby|where|'
- r'join|equals)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
- r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop'),
- default('#pop'),
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
-
-class NemerleLexer(RegexLexer):
- """
- For `Nemerle <http://nemerle.org>`_ source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nemerle'
- aliases = ['nemerle']
- filenames = ['*.n']
- mimetypes = ['text/x-nemerle'] # inferred
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
+ r'join|equals)\b', Keyword),
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
+ (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
+ r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
+ (cs_ident, Name),
+ ],
+ 'class': [
+ (cs_ident, Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'namespace': [
+ (r'(?=\()', Text, '#pop'), # using (resource)
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
+ ]
+ }
+
+ def __init__(self, **options):
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
+ if level not in self._all_tokens:
+ # compile the regexes now
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+
+ RegexLexer.__init__(self, **options)
+
+
+class NemerleLexer(RegexLexer):
+ """
+ For `Nemerle <http://nemerle.org>`_ source code.
+
+ Additional options accepted:
+
+ `unicodelevel`
+ Determines which Unicode characters this lexer allows for identifiers.
+ The possible values are:
+
+ * ``none`` -- only the ASCII letters and numbers are allowed. This
+ is the fastest selection.
+ * ``basic`` -- all Unicode characters from the specification except
+ category ``Lo`` are allowed.
+ * ``full`` -- all Unicode characters as specified in the C# specs
+ are allowed. Note that this means a considerable slowdown since the
+ ``Lo`` category has more than 40,000 characters in it!
+
+ The default value is ``basic``.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nemerle'
+ aliases = ['nemerle']
+ filenames = ['*.n']
+ mimetypes = ['text/x-nemerle'] # inferred
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+
+ levels = {
'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
- for levelname, cs_ident in iteritems(levels):
- tokens[levelname] = {
- 'root': [
- # method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Punctuation)),
- (r'^\s*\[.*?\]', Name.Attribute),
- (r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
- (r'\n', Text),
- (r'\$\s*"', String, 'splice-string'),
- (r'\$\s*<#', String, 'splice-string2'),
- (r'<#', String, 'recursive-string'),
-
- (r'(<\[)\s*(' + cs_ident + ':)?', Keyword),
- (r'\]\>', Keyword),
-
- # quasiquotation only
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
-
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\"|[^"\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
- (r'#[ \t]*(if|endif|else|elif|define|undef|'
- r'line|error|warning|region|endregion|pragma)\b.*?\n',
- Comment.Preproc),
- (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
- Keyword)),
- (r'(abstract|and|as|base|catch|def|delegate|'
- r'enum|event|extern|false|finally|'
- r'fun|implements|interface|internal|'
- r'is|macro|match|matches|module|mutable|new|'
- r'null|out|override|params|partial|private|'
- r'protected|public|ref|sealed|static|'
- r'syntax|this|throw|true|try|type|typeof|'
- r'virtual|volatile|when|where|with|'
- r'assert|assert2|async|break|checked|continue|do|else|'
- r'ensures|for|foreach|if|late|lock|new|nolate|'
- r'otherwise|regexp|repeat|requires|return|surroundwith|'
- r'unchecked|unless|using|while|yield)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
- r'short|string|uint|ulong|ushort|void|array|list)\b\??',
- Keyword.Type),
- (r'(:>?)\s*(' + cs_ident + r'\??)',
- bygroups(Punctuation, Keyword.Type)),
- (r'(class|struct|variant|module)(\s+)',
- bygroups(Keyword, Text), 'class'),
- (r'(namespace|using)(\s+)', bygroups(Keyword, Text),
- 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop')
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
- ],
- 'splice-string': [
- (r'[^"$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'\\"', String),
- (r'"', String, '#pop')
- ],
- 'splice-string2': [
- (r'[^#<>$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'recursive-string': [
- (r'[^#<>]', String),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'splice-string-content': [
- (r'if|match', Keyword),
- (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
- (cs_ident, Name),
- (r'\d+', Number),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop')
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
- 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
-
-class BooLexer(RegexLexer):
- """
- For `Boo <http://boo.codehaus.org/>`_ source code.
- """
-
- name = 'Boo'
- aliases = ['boo']
- filenames = ['*.boo']
- mimetypes = ['text/x-boo']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(#|//).*$', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment'),
- (r'[]{}:(),.;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'/(\\\\|\\/|[^/\s])/', String.Regex),
- (r'@/(\\\\|\\/|[^/])*/', String.Regex),
- (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
- (r'(as|abstract|callable|constructor|destructor|do|import|'
- r'enum|event|final|get|interface|internal|of|override|'
- r'partial|private|protected|public|return|set|static|'
- r'struct|transient|virtual|yield|super|and|break|cast|'
- r'continue|elif|else|ensure|except|for|given|goto|if|in|'
- r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
- r'while|from|as)\b', Keyword),
- (r'def(?=\s+\(.*?\))', Keyword),
- (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
- (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
- r'assert|checked|enumerate|filter|getter|len|lock|map|'
- r'matrix|max|min|normalArrayIndexing|print|property|range|'
- r'rawArrayIndexing|required|typeof|unchecked|using|'
- r'yieldAll|zip)\b', Name.Builtin),
- (r'"""(\\\\|\\"|.*?)"""', String.Double),
- (r'"(\\\\|\\"|[^"]*?)"', String.Double),
- (r"'(\\\\|\\'|[^']*?)'", String.Single),
- (r'[a-zA-Z_]\w*', Name),
- (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
- (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
- (r'0\d+', Number.Oct),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer),
- ],
- 'comment': [
- ('/[*]', Comment.Multiline, '#push'),
- ('[*]/', Comment.Multiline, '#pop'),
- ('[^/*]', Comment.Multiline),
- ('[*/]', Comment.Multiline)
- ],
- 'funcname': [
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
+ 'full': ('@?(?:_|[^' +
+ uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ }
+
+ tokens = {}
+ token_variants = True
+
+ for levelname, cs_ident in iteritems(levels):
+ tokens[levelname] = {
+ 'root': [
+ # method names
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Punctuation)),
+ (r'^\s*\[.*?\]', Name.Attribute),
+ (r'[^\S\n]+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
+ (r'\n', Text),
+ (r'\$\s*"', String, 'splice-string'),
+ (r'\$\s*<#', String, 'splice-string2'),
+ (r'<#', String, 'recursive-string'),
+
+ (r'(<\[)\s*(' + cs_ident + ':)?', Keyword),
+ (r'\]\>', Keyword),
+
+ # quasiquotation only
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
+ (r'"(\\\\|\\"|[^"\n])*["\n]', String),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
+ (r'#[ \t]*(if|endif|else|elif|define|undef|'
+ r'line|error|warning|region|endregion|pragma)\b.*?\n',
+ Comment.Preproc),
+ (r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
+ Keyword)),
+ (r'(abstract|and|as|base|catch|def|delegate|'
+ r'enum|event|extern|false|finally|'
+ r'fun|implements|interface|internal|'
+ r'is|macro|match|matches|module|mutable|new|'
+ r'null|out|override|params|partial|private|'
+ r'protected|public|ref|sealed|static|'
+ r'syntax|this|throw|true|try|type|typeof|'
+ r'virtual|volatile|when|where|with|'
+ r'assert|assert2|async|break|checked|continue|do|else|'
+ r'ensures|for|foreach|if|late|lock|new|nolate|'
+ r'otherwise|regexp|repeat|requires|return|surroundwith|'
+ r'unchecked|unless|using|while|yield)\b', Keyword),
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
+ (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
+ r'short|string|uint|ulong|ushort|void|array|list)\b\??',
+ Keyword.Type),
+ (r'(:>?)\s*(' + cs_ident + r'\??)',
+ bygroups(Punctuation, Keyword.Type)),
+ (r'(class|struct|variant|module)(\s+)',
+ bygroups(Keyword, Text), 'class'),
+ (r'(namespace|using)(\s+)', bygroups(Keyword, Text),
+ 'namespace'),
+ (cs_ident, Name),
+ ],
+ 'class': [
+ (cs_ident, Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'(?=\()', Text, '#pop'), # using (resource)
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
+ ],
+ 'splice-string': [
+ (r'[^"$]', String),
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+ (r'\\"', String),
+ (r'"', String, '#pop')
+ ],
+ 'splice-string2': [
+ (r'[^#<>$]', String),
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+ (r'<#', String, '#push'),
+ (r'#>', String, '#pop')
+ ],
+ 'recursive-string': [
+ (r'[^#<>]', String),
+ (r'<#', String, '#push'),
+ (r'#>', String, '#pop')
+ ],
+ 'splice-string-content': [
+ (r'if|match', Keyword),
+ (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
+ (cs_ident, Name),
+ (r'\d+', Number),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop')
+ ]
+ }
+
+ def __init__(self, **options):
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
+ 'basic')
+ if level not in self._all_tokens:
+ # compile the regexes now
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+
+ RegexLexer.__init__(self, **options)
+
+
+class BooLexer(RegexLexer):
+ """
+ For `Boo <http://boo.codehaus.org/>`_ source code.
+ """
+
+ name = 'Boo'
+ aliases = ['boo']
+ filenames = ['*.boo']
+ mimetypes = ['text/x-boo']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(#|//).*$', Comment.Single),
+ (r'/[*]', Comment.Multiline, 'comment'),
+ (r'[]{}:(),.;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'/(\\\\|\\/|[^/\s])/', String.Regex),
+ (r'@/(\\\\|\\/|[^/])*/', String.Regex),
+ (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
+ (r'(as|abstract|callable|constructor|destructor|do|import|'
+ r'enum|event|final|get|interface|internal|of|override|'
+ r'partial|private|protected|public|return|set|static|'
+ r'struct|transient|virtual|yield|super|and|break|cast|'
+ r'continue|elif|else|ensure|except|for|given|goto|if|in|'
+ r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
+ r'while|from|as)\b', Keyword),
+ (r'def(?=\s+\(.*?\))', Keyword),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
+ (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
+ r'assert|checked|enumerate|filter|getter|len|lock|map|'
+ r'matrix|max|min|normalArrayIndexing|print|property|range|'
+ r'rawArrayIndexing|required|typeof|unchecked|using|'
+ r'yieldAll|zip)\b', Name.Builtin),
+ (r'"""(\\\\|\\"|.*?)"""', String.Double),
+ (r'"(\\\\|\\"|[^"]*?)"', String.Double),
+ (r"'(\\\\|\\'|[^']*?)'", String.Single),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
+ (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
+ (r'0\d+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer),
+ ],
+ 'comment': [
+ ('/[*]', Comment.Multiline, '#push'),
+ ('[*]/', Comment.Multiline, '#pop'),
+ ('[^/*]', Comment.Multiline),
+ ('[*/]', Comment.Multiline)
+ ],
+ 'funcname': [
(r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'classname': [
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
+ ],
+ 'namespace': [
(r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ]
- }
-
-
-class VbNetLexer(RegexLexer):
- """
- For
- `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
- source code.
- """
-
- name = 'VB.net'
- aliases = ['vb.net', 'vbnet']
- filenames = ['*.vb', '*.bas']
- mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
-
+ ]
+ }
+
+
+class VbNetLexer(RegexLexer):
+ """
+ For
+ `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
+ source code.
+ """
+
+ name = 'VB.net'
+ aliases = ['vb.net', 'vbnet']
+ filenames = ['*.vb', '*.bas']
+ mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+
uni_name = '[_' + uni.combine('Ll', 'Lt', 'Lm', 'Nl') + ']' + \
'[' + uni.combine('Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'^\s*<.*?>', Name.Attribute),
- (r'\s+', Text),
- (r'\n', Text),
- (r'rem\b.*?\n', Comment),
- (r"'.*?\n", Comment),
- (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
- r'#ExternalSource.*?\n|#End\s+ExternalSource|'
- r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
- Comment.Preproc),
- (r'[(){}!#,.:]', Punctuation),
- (r'Option\s+(Strict|Explicit|Compare)\s+'
- r'(On|Off|Binary|Text)', Keyword.Declaration),
+ 'Cf', 'Mn', 'Mc') + ']*'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'^\s*<.*?>', Name.Attribute),
+ (r'\s+', Text),
+ (r'\n', Text),
+ (r'rem\b.*?\n', Comment),
+ (r"'.*?\n", Comment),
+ (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
+ r'#ExternalSource.*?\n|#End\s+ExternalSource|'
+ r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
+ Comment.Preproc),
+ (r'[(){}!#,.:]', Punctuation),
+ (r'Option\s+(Strict|Explicit|Compare)\s+'
+ r'(On|Off|Binary|Text)', Keyword.Declaration),
(words((
'AddHandler', 'Alias', 'ByRef', 'ByVal', 'Call', 'Case',
'Catch', 'CBool', 'CByte', 'CChar', 'CDate', 'CDec', 'CDbl',
@@ -414,275 +414,275 @@ class VbNetLexer(RegexLexer):
'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While',
'Widening', 'With', 'WithEvents', 'WriteOnly'),
prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
- (r'(?<!\.)End\b', Keyword, 'end'),
- (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
- (r'(?<!\.)(Function|Sub|Property)(\s+)',
- bygroups(Keyword, Text), 'funcname'),
- (r'(?<!\.)(Class|Structure|Enum)(\s+)',
- bygroups(Keyword, Text), 'classname'),
- (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
- bygroups(Keyword, Text), 'namespace'),
- (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
- r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
- r'UShort)\b', Keyword.Type),
- (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
- r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
- (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
- r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
- Operator),
- ('"', String, 'string'),
- (r'_\n', Text), # Line continuation (must be before Name)
- (uni_name + '[%&@!#$]?', Name),
- ('#.*?#', Literal.Date),
- (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
- (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
- ],
- 'string': [
- (r'""', String),
- (r'"C?', String, '#pop'),
- (r'[^"]+', String),
- ],
- 'dim': [
- (uni_name, Name.Variable, '#pop'),
- default('#pop'), # any other syntax
- ],
- 'funcname': [
- (uni_name, Name.Function, '#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'namespace': [
- (uni_name, Name.Namespace),
- (r'\.', Name.Namespace),
- default('#pop'),
- ],
- 'end': [
- (r'\s+', Text),
- (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
- Keyword, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
- return 0.5
-
-
-class GenericAspxLexer(RegexLexer):
- """
- Lexer for ASP.NET pages.
- """
-
- name = 'aspx-gen'
- filenames = []
- mimetypes = []
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
- Other,
- using(XmlLexer))),
- (r'(.+?)(?=<)', using(XmlLexer)),
- (r'.+', using(XmlLexer)),
- ],
- }
-
-
-# TODO support multiple languages within the same source file
-class CSharpAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting C# within ASP.NET pages.
- """
-
- name = 'aspx-cs'
- aliases = ['aspx-cs']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
- super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer,
- **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
- return 0.15
-
-
-class VbNetAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting Visual Basic.net within ASP.NET pages.
- """
-
- name = 'aspx-vb'
- aliases = ['aspx-vb']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
- super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer,
- **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
- return 0.15
-
-
-# Very close to functional.OcamlLexer
-class FSharpLexer(RegexLexer):
- """
+ (r'(?<!\.)End\b', Keyword, 'end'),
+ (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
+ (r'(?<!\.)(Function|Sub|Property)(\s+)',
+ bygroups(Keyword, Text), 'funcname'),
+ (r'(?<!\.)(Class|Structure|Enum)(\s+)',
+ bygroups(Keyword, Text), 'classname'),
+ (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
+ bygroups(Keyword, Text), 'namespace'),
+ (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
+ r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
+ r'UShort)\b', Keyword.Type),
+ (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
+ r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
+ (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
+ r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
+ Operator),
+ ('"', String, 'string'),
+ (r'_\n', Text), # Line continuation (must be before Name)
+ (uni_name + '[%&@!#$]?', Name),
+ ('#.*?#', Literal.Date),
+ (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
+ (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ ],
+ 'string': [
+ (r'""', String),
+ (r'"C?', String, '#pop'),
+ (r'[^"]+', String),
+ ],
+ 'dim': [
+ (uni_name, Name.Variable, '#pop'),
+ default('#pop'), # any other syntax
+ ],
+ 'funcname': [
+ (uni_name, Name.Function, '#pop'),
+ ],
+ 'classname': [
+ (uni_name, Name.Class, '#pop'),
+ ],
+ 'namespace': [
+ (uni_name, Name.Namespace),
+ (r'\.', Name.Namespace),
+ default('#pop'),
+ ],
+ 'end': [
+ (r'\s+', Text),
+ (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
+ Keyword, '#pop'),
+ default('#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
+ return 0.5
+
+
+class GenericAspxLexer(RegexLexer):
+ """
+ Lexer for ASP.NET pages.
+ """
+
+ name = 'aspx-gen'
+ filenames = []
+ mimetypes = []
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
+ Other,
+ using(XmlLexer))),
+ (r'(.+?)(?=<)', using(XmlLexer)),
+ (r'.+', using(XmlLexer)),
+ ],
+ }
+
+
+# TODO support multiple languages within the same source file
+class CSharpAspxLexer(DelegatingLexer):
+ """
+ Lexer for highlighting C# within ASP.NET pages.
+ """
+
+ name = 'aspx-cs'
+ aliases = ['aspx-cs']
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer,
+ **options)
+
+ def analyse_text(text):
+ if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
+ return 0.2
+ elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
+ return 0.15
+
+
+class VbNetAspxLexer(DelegatingLexer):
+ """
+ Lexer for highlighting Visual Basic.net within ASP.NET pages.
+ """
+
+ name = 'aspx-vb'
+ aliases = ['aspx-vb']
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer,
+ **options)
+
+ def analyse_text(text):
+ if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
+ return 0.2
+ elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
+ return 0.15
+
+
+# Very close to functional.OcamlLexer
+class FSharpLexer(RegexLexer):
+ """
For the `F# language <https://fsharp.org/>`_ (version 3.0).
-
- .. versionadded:: 1.5
- """
-
+
+ .. versionadded:: 1.5
+ """
+
name = 'F#'
aliases = ['fsharp', 'f#']
- filenames = ['*.fs', '*.fsi']
- mimetypes = ['text/x-fsharp']
-
- keywords = [
- 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
- 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
- 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
- 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
- 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
- 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
- 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
- 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
- 'while', 'with', 'yield!', 'yield',
- ]
- # Reserved words; cannot hurt to color them as keywords too.
- keywords += [
- 'atomic', 'break', 'checked', 'component', 'const', 'constraint',
- 'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
- 'functor', 'include', 'method', 'mixin', 'object', 'parallel',
- 'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
- 'virtual', 'volatile',
- ]
- keyopts = [
+ filenames = ['*.fs', '*.fsi']
+ mimetypes = ['text/x-fsharp']
+
+ keywords = [
+ 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
+ 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
+ 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
+ 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
+ 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
+ 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
+ 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
+ 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
+ 'while', 'with', 'yield!', 'yield',
+ ]
+ # Reserved words; cannot hurt to color them as keywords too.
+ keywords += [
+ 'atomic', 'break', 'checked', 'component', 'const', 'constraint',
+ 'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
+ 'functor', 'include', 'method', 'mixin', 'object', 'parallel',
+ 'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
+ 'virtual', 'volatile',
+ ]
+ keyopts = [
'!=', '#', '&&', '&', r'\(', r'\)', r'\*', r'\+', ',', r'-\.',
'->', '-', r'\.\.', r'\.', '::', ':=', ':>', ':', ';;', ';', '<-',
r'<\]', '<', r'>\]', '>', r'\?\?', r'\?', r'\[<', r'\[\|', r'\[', r'\]',
'_', '`', r'\{', r'\|\]', r'\|', r'\}', '~', '<@@', '<@', '=', '@>', '@@>',
- ]
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'or', 'not']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = [
- 'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
- 'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
- 'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
- 'list', 'exn', 'obj', 'enum',
- ]
-
- # See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
- # http://fsharp.org/about/files/spec.pdf for reference. Good luck.
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbrafv]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\U[0-9a-fA-F]{8}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
- (r'///.*?\n', String.Doc),
- (r'//.*?\n', Comment.Single),
- (r'\(\*(?!\))', Comment, 'comment'),
-
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
-
- (r'\b(open|module)(\s+)([\w.]+)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'\b(let!?)(\s+)(\w+)',
- bygroups(Keyword, Text, Name.Variable)),
- (r'\b(type)(\s+)(\w+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
- bygroups(Keyword, Text, Name, Punctuation, Name.Function)),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
- (r'(%s)' % '|'.join(keyopts), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
- (r'#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n',
- Comment.Preproc),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
- (r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
- (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
- Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'@?"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- # e.g. dictionary index access
- default('#pop'),
- ],
- 'comment': [
- (r'[^(*)@"]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- # comments cannot be closed within strings in comments
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
- (r'[(*)@]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String),
- include('escape-sequence'),
- (r'\\\n', String),
- (r'\n', String), # newlines are allowed in any string
- (r'"B?', String, '#pop'),
- ],
- 'lstring': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'""', String),
- (r'"B?', String, '#pop'),
- ],
- 'tqs': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'"""B?', String, '#pop'),
- (r'"', String),
- ],
- }
+ ]
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ['and', 'or', 'not']
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = [
+ 'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
+ 'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
+ 'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
+ 'list', 'exn', 'obj', 'enum',
+ ]
+
+ # See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
+ # http://fsharp.org/about/files/spec.pdf for reference. Good luck.
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbrafv]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\U[0-9a-fA-F]{8}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
+ Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name),
+ (r'///.*?\n', String.Doc),
+ (r'//.*?\n', Comment.Single),
+ (r'\(\*(?!\))', Comment, 'comment'),
+
+ (r'@"', String, 'lstring'),
+ (r'"""', String, 'tqs'),
+ (r'"', String, 'string'),
+
+ (r'\b(open|module)(\s+)([\w.]+)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'\b(let!?)(\s+)(\w+)',
+ bygroups(Keyword, Text, Name.Variable)),
+ (r'\b(type)(\s+)(\w+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
+ bygroups(Keyword, Text, Name, Punctuation, Name.Function)),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
+ (r'(%s)' % '|'.join(keyopts), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n',
+ Comment.Preproc),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
+ (r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
+ (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
+ Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'@?"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ # e.g. dictionary index access
+ default('#pop'),
+ ],
+ 'comment': [
+ (r'[^(*)@"]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ # comments cannot be closed within strings in comments
+ (r'@"', String, 'lstring'),
+ (r'"""', String, 'tqs'),
+ (r'"', String, 'string'),
+ (r'[(*)@]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ include('escape-sequence'),
+ (r'\\\n', String),
+ (r'\n', String), # newlines are allowed in any string
+ (r'"B?', String, '#pop'),
+ ],
+ 'lstring': [
+ (r'[^"]+', String),
+ (r'\n', String),
+ (r'""', String),
+ (r'"B?', String, '#pop'),
+ ],
+ 'tqs': [
+ (r'[^"]+', String),
+ (r'\n', String),
+ (r'"""B?', String, '#pop'),
+ (r'"', String),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dsls.py b/contrib/python/Pygments/py2/pygments/lexers/dsls.py
index 0af3c6c273..9428ba1ecf 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dsls.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dsls.py
@@ -1,209 +1,209 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.dsls
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various domain-specific languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dsls
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various domain-specific languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
include, default, this, using, combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Whitespace
-
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal, Whitespace
+
__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
- 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
+ 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
-
-
-class ProtoBufLexer(RegexLexer):
- """
- Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
- definition files.
-
- .. versionadded:: 1.4
- """
-
- name = 'Protocol Buffer'
- aliases = ['protobuf', 'proto']
- filenames = ['*.proto']
-
- tokens = {
- 'root': [
- (r'[ \t]+', Text),
+
+
+class ProtoBufLexer(RegexLexer):
+ """
+ Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+ definition files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Protocol Buffer'
+ aliases = ['protobuf', 'proto']
+ filenames = ['*.proto']
+
+ tokens = {
+ 'root': [
+ (r'[ \t]+', Text),
(r'[,;{}\[\]()<>]', Punctuation),
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (words((
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (words((
'import', 'option', 'optional', 'required', 'repeated',
'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
'max', 'rpc', 'returns', 'oneof'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
- 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
- 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
- (r'(message|extend)(\s+)',
- bygroups(Keyword.Declaration, Text), 'message'),
- (r'(enum|group|service)(\s+)',
- bygroups(Keyword.Declaration, Text), 'type'),
- (r'\".*?\"', String),
- (r'\'.*?\'', String),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'(\-?(inf|nan))\b', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[+-=]', Operator),
- (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
+ Keyword),
+ (words((
+ 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
+ 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
+ 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
+ (r'(message|extend)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'message'),
+ (r'(enum|group|service)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'type'),
+ (r'\".*?\"', String),
+ (r'\'.*?\'', String),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'(\-?(inf|nan))\b', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[+-=]', Operator),
+ (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
(r'[a-zA-Z_][\w.]*', Name),
- ],
- 'package': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'message': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'type': [
- (r'[a-zA-Z_]\w*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class ThriftLexer(RegexLexer):
- """
- For `Thrift <https://thrift.apache.org/>`__ interface definitions.
-
- .. versionadded:: 2.1
- """
- name = 'Thrift'
- aliases = ['thrift']
- filenames = ['*.thrift']
- mimetypes = ['application/x-thrift']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'"', String.Double, combined('stringescape', 'dqs')),
- (r'\'', String.Single, combined('stringescape', 'sqs')),
- (r'(namespace)(\s+)',
- bygroups(Keyword.Namespace, Text.Whitespace), 'namespace'),
- (r'(enum|union|struct|service|exception)(\s+)',
- bygroups(Keyword.Declaration, Text.Whitespace), 'class'),
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- include('keywords'),
- include('numbers'),
- (r'[&=]', Operator),
+ ],
+ 'package': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'message': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'type': [
+ (r'[a-zA-Z_]\w*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class ThriftLexer(RegexLexer):
+ """
+ For `Thrift <https://thrift.apache.org/>`__ interface definitions.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Thrift'
+ aliases = ['thrift']
+ filenames = ['*.thrift']
+ mimetypes = ['application/x-thrift']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ (r'"', String.Double, combined('stringescape', 'dqs')),
+ (r'\'', String.Single, combined('stringescape', 'sqs')),
+ (r'(namespace)(\s+)',
+ bygroups(Keyword.Namespace, Text.Whitespace), 'namespace'),
+ (r'(enum|union|struct|service|exception)(\s+)',
+ bygroups(Keyword.Declaration, Text.Whitespace), 'class'),
+ (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ include('keywords'),
+ include('numbers'),
+ (r'[&=]', Operator),
(r'[:;,{}()<>\[\]]', Punctuation),
(r'[a-zA-Z_](\.\w|\w)*', Name),
- ],
- 'whitespace': [
- (r'\n', Text.Whitespace),
- (r'\s+', Text.Whitespace),
- ],
- 'comments': [
- (r'#.*$', Comment),
- (r'//.*?\n', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- ],
- 'stringescape': [
- (r'\\([\\nrt"\'])', String.Escape),
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'[^\\"\n]+', String.Double),
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r'[^\\\'\n]+', String.Single),
- ],
- 'namespace': [
+ ],
+ 'whitespace': [
+ (r'\n', Text.Whitespace),
+ (r'\s+', Text.Whitespace),
+ ],
+ 'comments': [
+ (r'#.*$', Comment),
+ (r'//.*?\n', Comment),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ ],
+ 'stringescape': [
+ (r'\\([\\nrt"\'])', String.Escape),
+ ],
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'[^\\"\n]+', String.Double),
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r'[^\\\'\n]+', String.Single),
+ ],
+ 'namespace': [
(r'[a-z*](\.\w|\w)*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'keywords': [
- (r'(async|oneway|extends|throws|required|optional)\b', Keyword),
- (r'(true|false)\b', Keyword.Constant),
- (r'(const|typedef)\b', Keyword.Declaration),
- (words((
- 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
- 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
- 'php_namespace', 'py_module', 'perl_package',
- 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
- 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
- 'xsd_attrs', 'include'), suffix=r'\b'),
- Keyword.Namespace),
- (words((
- 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
+ default('#pop'),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'keywords': [
+ (r'(async|oneway|extends|throws|required|optional)\b', Keyword),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(const|typedef)\b', Keyword.Declaration),
+ (words((
+ 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
+ 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
+ 'php_namespace', 'py_module', 'perl_package',
+ 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
+ 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
+ 'xsd_attrs', 'include'), suffix=r'\b'),
+ Keyword.Namespace),
+ (words((
+ 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
'string', 'binary', 'map', 'list', 'set', 'slist',
- 'senum'), suffix=r'\b'),
- Keyword.Type),
- (words((
- 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
- '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
- 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
- 'break', 'case', 'catch', 'class', 'clone', 'continue',
- 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
- 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
- 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
- 'ensure', 'except', 'exec', 'finally', 'float', 'for',
- 'foreach', 'function', 'global', 'goto', 'if', 'implements',
- 'import', 'in', 'inline', 'instanceof', 'interface', 'is',
- 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
- 'or', 'pass', 'public', 'print', 'private', 'protected',
- 'raise', 'redo', 'rescue', 'retry', 'register', 'return',
- 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
- 'then', 'this', 'throw', 'transient', 'try', 'undef',
- 'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
- 'volatile', 'when', 'while', 'with', 'xor', 'yield'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'numbers': [
- (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
- (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- }
-
-
+ 'senum'), suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
+ '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
+ 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
+ 'break', 'case', 'catch', 'class', 'clone', 'continue',
+ 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
+ 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
+ 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
+ 'ensure', 'except', 'exec', 'finally', 'float', 'for',
+ 'foreach', 'function', 'global', 'goto', 'if', 'implements',
+ 'import', 'in', 'inline', 'instanceof', 'interface', 'is',
+ 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
+ 'or', 'pass', 'public', 'print', 'private', 'protected',
+ 'raise', 'redo', 'rescue', 'retry', 'register', 'return',
+ 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
+ 'then', 'this', 'throw', 'transient', 'try', 'undef',
+ 'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
+ 'volatile', 'when', 'while', 'with', 'xor', 'yield'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'numbers': [
+ (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
+ (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
+ (r'[+-]?[0-9]+', Number.Integer),
+ ],
+ }
+
+
class ZeekLexer(RegexLexer):
- """
+ """
For `Zeek <https://www.zeek.org/>`_ scripts.
-
+
.. versionadded:: 2.5
- """
+ """
name = 'Zeek'
aliases = ['zeek', 'bro']
filenames = ['*.zeek', '*.bro']
-
+
_hex = r'[0-9a-fA-F]'
- _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
- _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
- tokens = {
- 'root': [
+ _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
+ _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
+
+ tokens = {
+ 'root': [
include('whitespace'),
include('comments'),
include('directives'),
@@ -219,9 +219,9 @@ class ZeekLexer(RegexLexer):
],
'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
],
'comments': [
@@ -288,7 +288,7 @@ class ZeekLexer(RegexLexer):
# operator.
(r'/(?=.*/)', String.Regex, 'regex'),
- (r'(T|F)\b', Keyword.Constant),
+ (r'(T|F)\b', Keyword.Constant),
# Port
(r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
@@ -305,13 +305,13 @@ class ZeekLexer(RegexLexer):
(_float + r'\b', Number.Float),
(r'(\d+)\b', Number.Integer),
- # Hostnames
- (_h + r'(\.' + _h + r')+', String),
+ # Hostnames
+ (_h + r'(\.' + _h + r')+', String),
],
'operators': [
(r'[!%*/+<=>~|&^-]', Operator),
- (r'([-+=&|]{2}|[+=!><-]=)', Operator),
+ (r'([-+=&|]{2}|[+=!><-]=)', Operator),
(r'(in|as|is|of)\b', Operator.Word),
(r'\??\$', Operator),
],
@@ -327,453 +327,453 @@ class ZeekLexer(RegexLexer):
'identifiers': [
(r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
- (r'[a-zA-Z_]\w*', Name)
- ],
+ (r'[a-zA-Z_]\w*', Name)
+ ],
- 'string': [
+ 'string': [
(r'\\.', String.Escape),
(r'%-?[0-9]*(\.[0-9]+)?[DTdxsefg]', String.Escape),
- (r'"', String, '#pop'),
+ (r'"', String, '#pop'),
(r'.', String),
- ],
+ ],
- 'regex': [
+ 'regex': [
(r'\\.', String.Escape),
- (r'/', String.Regex, '#pop'),
+ (r'/', String.Regex, '#pop'),
(r'.', String.Regex),
],
- }
-
-
+ }
+
+
BroLexer = ZeekLexer
-class PuppetLexer(RegexLexer):
- """
- For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
-
- .. versionadded:: 1.6
- """
- name = 'Puppet'
- aliases = ['puppet']
- filenames = ['*.pp']
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('names'),
- include('numbers'),
- include('operators'),
- include('strings'),
-
- (r'[]{}:(),;[]', Punctuation),
- (r'[^\S\n]+', Text),
- ],
-
- 'comments': [
- (r'\s*#.*$', Comment),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
-
- 'operators': [
- (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
- (r'(in|and|or|not)\b', Operator.Word),
- ],
-
- 'names': [
+class PuppetLexer(RegexLexer):
+ """
+ For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Puppet'
+ aliases = ['puppet']
+ filenames = ['*.pp']
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('keywords'),
+ include('names'),
+ include('numbers'),
+ include('operators'),
+ include('strings'),
+
+ (r'[]{}:(),;[]', Punctuation),
+ (r'[^\S\n]+', Text),
+ ],
+
+ 'comments': [
+ (r'\s*#.*$', Comment),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+
+ 'operators': [
+ (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
+ (r'(in|and|or|not)\b', Operator.Word),
+ ],
+
+ 'names': [
(r'[a-zA-Z_]\w*', Name.Attribute),
- (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
- String, Punctuation)),
- (r'\$\S+', Name.Variable),
- ],
-
- 'numbers': [
- # Copypasta from the Python lexer
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'keywords': [
- # Left out 'group' and 'require'
- # Since they're often used as attributes
- (words((
- 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
- 'check', 'class', 'computer', 'configured', 'contained',
- 'create_resources', 'crit', 'cron', 'debug', 'default',
- 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
- 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
- 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
- 'include', 'info', 'inherits', 'inline_template', 'installed',
- 'interface', 'k5login', 'latest', 'link', 'loglevel',
- 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
- 'mount', 'mounted', 'nagios_command', 'nagios_contact',
- 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
- 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
- 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
- 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
- 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
- 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
- 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
- 'service', 'sha1', 'shellquote', 'split', 'sprintf',
- 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
- 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
- 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
- 'zpool'), prefix='(?i)', suffix=r'\b'),
- Keyword),
- ],
-
- 'strings': [
- (r'"([^"])*"', String),
- (r"'(\\'|[^'])*'", String),
- ],
-
- }
-
-
-class RslLexer(RegexLexer):
- """
- `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
- language used in RAISE (Rigorous Approach to Industrial Software Engineering)
- method.
-
- .. versionadded:: 2.0
- """
- name = 'RSL'
- aliases = ['rsl']
- filenames = ['*.rsl']
- mimetypes = ['text/rsl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (words((
- 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
- 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
- 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
- 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
- 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
- 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
- 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
- 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
- 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
- 'with', 'write', '~isin', '-inflist', '-infset', '-list',
- '-set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'(variable|value)\b', Keyword.Declaration),
- (r'--.*?\n', Comment),
- (r'<:.*?:>', Comment),
- (r'\{!.*?!\}', Comment),
- (r'/\*.*?\*/', Comment),
- (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
- (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
- bygroups(Text, Name.Function, Text, Keyword)),
- (r'\b[A-Z]\w*\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'".*"', String),
- (r'\'.\'', String.Char),
- (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
- r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
- Operator),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """
- Check for the most common text in the beginning of a RSL file.
- """
- if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
- return 1.0
-
-
-class MscgenLexer(RegexLexer):
- """
- For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Mscgen'
- aliases = ['mscgen', 'msc']
- filenames = ['*.msc']
-
- _var = r'(\w+|"(?:\\"|[^"])*")'
-
- tokens = {
- 'root': [
- (r'msc\b', Keyword.Type),
- # Options
- (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
- r'|arcgradient|ARCGRADIENT)\b', Name.Property),
- # Operators
- (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
- (r'(\.|-|\|){3}', Keyword),
- (r'(?:-|=|\.|:){2}'
- r'|<<=>>|<->|<=>|<<>>|<:>'
- r'|->|=>>|>>|=>|:>|-x|-X'
- r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
- # Names
- (r'\*', Name.Builtin),
- (_var, Name.Variable),
- # Other
- (r'\[', Punctuation, 'attrs'),
- (r'\{|\}|,|;', Punctuation),
- include('comments')
- ],
- 'attrs': [
- (r'\]', Punctuation, '#pop'),
- (_var + r'(\s*)(=)(\s*)' + _var,
- bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
- String)),
- (r',', Punctuation),
- include('comments')
- ],
- 'comments': [
- (r'(?://|#).*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'[ \t\r\n]+', Text.Whitespace)
- ]
- }
-
-
-class VGLLexer(RegexLexer):
- """
- For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
- source code.
-
- .. versionadded:: 1.6
- """
- name = 'VGL'
- aliases = ['vgl']
- filenames = ['*.rpf']
-
- flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\{[^}]*\}', Comment.Multiline),
- (r'declare', Keyword.Constant),
- (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
- r'|create|on|line|with|global|routine|value|endroutine|constant'
- r'|global|set|join|library|compile_option|file|exists|create|copy'
- r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
- Keyword),
- (r'(true|false|null|empty|error|locked)', Keyword.Constant),
- (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
- (r'"[^"]*"', String),
- (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
- (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
- (r'[a-z_$][\w$]*', Name),
- (r'[\r\n]+', Text),
- (r'\s+', Text)
- ]
- }
-
-
-class AlloyLexer(RegexLexer):
- """
- For `Alloy <http://alloy.mit.edu>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Alloy'
- aliases = ['alloy']
- filenames = ['*.als']
- mimetypes = ['text/x-alloy']
-
- flags = re.MULTILINE | re.DOTALL
-
- iden_rex = r'[a-zA-Z_][\w\']*'
- text_tuple = (r'[^\S\n]+', Text)
-
- tokens = {
- 'sig': [
- (r'(extends)\b', Keyword, '#pop'),
- (iden_rex, Name),
- text_tuple,
- (r',', Punctuation),
- (r'\{', Operator, '#pop'),
- ],
- 'module': [
- text_tuple,
- (iden_rex, Name, '#pop'),
- ],
- 'fun': [
- text_tuple,
- (r'\{', Operator, '#pop'),
- (iden_rex, Name, '#pop'),
- ],
- 'root': [
- (r'--.*?$', Comment.Single),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- text_tuple,
- (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text),
- 'module'),
- (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'),
- (r'(iden|univ|none)\b', Keyword.Constant),
- (r'(int|Int)\b', Keyword.Type),
- (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
- (r'(all|some|no|sum|disj|when|else)\b', Keyword),
- (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
- (r'(and|or|implies|iff|in)\b', Operator.Word),
- (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'),
- (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
- (r'[-+/*%=<>&!^|~{}\[\]().]', Operator),
- (iden_rex, Name),
- (r'[:,]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\n', Text),
- ]
- }
-
-
-class PanLexer(RegexLexer):
- """
- Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
-
- Based on tcsh lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pan'
- aliases = ['pan']
- filenames = ['*.pan']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\(', Keyword, 'paren'),
- (r'\{', Keyword, 'curly'),
- include('data'),
- ],
- 'basic': [
- (words((
- 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
- 'prefix', 'unique', 'object', 'foreach', 'include', 'template',
- 'function', 'variable', 'structure', 'extensible', 'declaration'),
+ (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
+ String, Punctuation)),
+ (r'\$\S+', Name.Variable),
+ ],
+
+ 'numbers': [
+ # Copypasta from the Python lexer
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+
+ 'keywords': [
+ # Left out 'group' and 'require'
+ # Since they're often used as attributes
+ (words((
+ 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
+ 'check', 'class', 'computer', 'configured', 'contained',
+ 'create_resources', 'crit', 'cron', 'debug', 'default',
+ 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
+ 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
+ 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
+ 'include', 'info', 'inherits', 'inline_template', 'installed',
+ 'interface', 'k5login', 'latest', 'link', 'loglevel',
+ 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
+ 'mount', 'mounted', 'nagios_command', 'nagios_contact',
+ 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
+ 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
+ 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
+ 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
+ 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
+ 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
+ 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
+ 'service', 'sha1', 'shellquote', 'split', 'sprintf',
+ 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
+ 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
+ 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
+ 'zpool'), prefix='(?i)', suffix=r'\b'),
+ Keyword),
+ ],
+
+ 'strings': [
+ (r'"([^"])*"', String),
+ (r"'(\\'|[^'])*'", String),
+ ],
+
+ }
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (words((
+ 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
+ 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
+ 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
+ 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
+ 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
+ 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
+ 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
+ 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
+ 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
+ 'with', 'write', '~isin', '-inflist', '-infset', '-list',
+ '-set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
+ (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
+ bygroups(Text, Name.Function, Text, Keyword)),
+ (r'\b[A-Z]\w*\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"', String),
+ (r'\'.\'', String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+
+
+class MscgenLexer(RegexLexer):
+ """
+ For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Mscgen'
+ aliases = ['mscgen', 'msc']
+ filenames = ['*.msc']
+
+ _var = r'(\w+|"(?:\\"|[^"])*")'
+
+ tokens = {
+ 'root': [
+ (r'msc\b', Keyword.Type),
+ # Options
+ (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
+ r'|arcgradient|ARCGRADIENT)\b', Name.Property),
+ # Operators
+ (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
+ (r'(\.|-|\|){3}', Keyword),
+ (r'(?:-|=|\.|:){2}'
+ r'|<<=>>|<->|<=>|<<>>|<:>'
+ r'|->|=>>|>>|=>|:>|-x|-X'
+ r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
+ # Names
+ (r'\*', Name.Builtin),
+ (_var, Name.Variable),
+ # Other
+ (r'\[', Punctuation, 'attrs'),
+ (r'\{|\}|,|;', Punctuation),
+ include('comments')
+ ],
+ 'attrs': [
+ (r'\]', Punctuation, '#pop'),
+ (_var + r'(\s*)(=)(\s*)' + _var,
+ bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
+ String)),
+ (r',', Punctuation),
+ include('comments')
+ ],
+ 'comments': [
+ (r'(?://|#).*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ (r'[ \t\r\n]+', Text.Whitespace)
+ ]
+ }
+
+
+class VGLLexer(RegexLexer):
+ """
+ For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'VGL'
+ aliases = ['vgl']
+ filenames = ['*.rpf']
+
+ flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\{[^}]*\}', Comment.Multiline),
+ (r'declare', Keyword.Constant),
+ (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
+ r'|create|on|line|with|global|routine|value|endroutine|constant'
+ r'|global|set|join|library|compile_option|file|exists|create|copy'
+ r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
+ Keyword),
+ (r'(true|false|null|empty|error|locked)', Keyword.Constant),
+ (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
+ (r'"[^"]*"', String),
+ (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
+ (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
+ (r'[a-z_$][\w$]*', Name),
+ (r'[\r\n]+', Text),
+ (r'\s+', Text)
+ ]
+ }
+
+
+class AlloyLexer(RegexLexer):
+ """
+ For `Alloy <http://alloy.mit.edu>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Alloy'
+ aliases = ['alloy']
+ filenames = ['*.als']
+ mimetypes = ['text/x-alloy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ iden_rex = r'[a-zA-Z_][\w\']*'
+ text_tuple = (r'[^\S\n]+', Text)
+
+ tokens = {
+ 'sig': [
+ (r'(extends)\b', Keyword, '#pop'),
+ (iden_rex, Name),
+ text_tuple,
+ (r',', Punctuation),
+ (r'\{', Operator, '#pop'),
+ ],
+ 'module': [
+ text_tuple,
+ (iden_rex, Name, '#pop'),
+ ],
+ 'fun': [
+ text_tuple,
+ (r'\{', Operator, '#pop'),
+ (iden_rex, Name, '#pop'),
+ ],
+ 'root': [
+ (r'--.*?$', Comment.Single),
+ (r'//.*?$', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ text_tuple,
+ (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'module'),
+ (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'),
+ (r'(iden|univ|none)\b', Keyword.Constant),
+ (r'(int|Int)\b', Keyword.Type),
+ (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
+ (r'(all|some|no|sum|disj|when|else)\b', Keyword),
+ (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
+ (r'(and|or|implies|iff|in)\b', Operator.Word),
+ (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'),
+ (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
+ (r'[-+/*%=<>&!^|~{}\[\]().]', Operator),
+ (iden_rex, Name),
+ (r'[:,]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\n', Text),
+ ]
+ }
+
+
+class PanLexer(RegexLexer):
+ """
+ Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'\{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (words((
+ 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
+ 'prefix', 'unique', 'object', 'foreach', 'include', 'template',
+ 'function', 'variable', 'structure', 'extensible', 'declaration'),
prefix=r'\b', suffix=r'\s*\b'),
- Keyword),
- (words((
- 'file_contents', 'format', 'index', 'length', 'match', 'matches',
- 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
- 'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
- 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
- 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
- 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
- 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
- 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
- 'path_exists', 'if_exists', 'return', 'value'),
+ Keyword),
+ (words((
+ 'file_contents', 'format', 'index', 'length', 'match', 'matches',
+ 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
+ 'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
+ 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
+ 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
+ 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
+ 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
+ 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
+ 'path_exists', 'if_exists', 'return', 'value'),
prefix=r'\b', suffix=r'\s*\b'),
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- }
-
-
-class CrmshLexer(RegexLexer):
- """
- Lexer for `crmsh <http://crmsh.github.io/>`_ configuration files
- for Pacemaker clusters.
-
- .. versionadded:: 2.1
- """
- name = 'Crmsh'
- aliases = ['crmsh', 'pcmk']
- filenames = ['*.crmsh', '*.pcmk']
- mimetypes = []
-
- elem = words((
- 'node', 'primitive', 'group', 'clone', 'ms', 'location',
- 'colocation', 'order', 'fencing_topology', 'rsc_ticket',
- 'rsc_template', 'property', 'rsc_defaults',
- 'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
- 'tag'), suffix=r'(?![\w#$-])')
- sub = words((
- 'params', 'meta', 'operations', 'op', 'rule',
- 'attributes', 'utilization'), suffix=r'(?![\w#$-])')
- acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
- bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
- un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
- date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
- acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
- bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
- val_qual = (r'(?:string|version|number)')
- rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
- r'start|promote|demote|stop)')
-
- tokens = {
- 'root': [
- (r'^#.*\n?', Comment),
- # attr=value (nvpair)
- (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
- bygroups(Name.Attribute, Punctuation, String)),
- # need this construct, otherwise numeric node ids
- # are matched as scores
- # elem id:
- (r'(node)(\s+)([\w#$-]+)(:)',
- bygroups(Keyword, Whitespace, Name, Punctuation)),
- # scores
- (r'([+-]?([0-9]+|inf)):', Number),
- # keywords (elements and other)
- (elem, Keyword),
- (sub, Keyword),
- (acl, Keyword),
- # binary operators
- (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
- # other operators
- (bin_rel, Operator.Word),
- (un_ops, Operator.Word),
- (date_exp, Operator.Word),
- # builtin attributes (e.g. #uname)
- (r'#[a-z]+(?![\w#$-])', Name.Builtin),
- # acl_mod:blah
- (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
- bygroups(Keyword, Punctuation, Name)),
- # rsc_id[:(role|action)]
- # NB: this matches all other identifiers
- (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
- bygroups(Name, Punctuation, Operator.Word)),
- # punctuation
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class CrmshLexer(RegexLexer):
+ """
+ Lexer for `crmsh <http://crmsh.github.io/>`_ configuration files
+ for Pacemaker clusters.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Crmsh'
+ aliases = ['crmsh', 'pcmk']
+ filenames = ['*.crmsh', '*.pcmk']
+ mimetypes = []
+
+ elem = words((
+ 'node', 'primitive', 'group', 'clone', 'ms', 'location',
+ 'colocation', 'order', 'fencing_topology', 'rsc_ticket',
+ 'rsc_template', 'property', 'rsc_defaults',
+ 'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
+ 'tag'), suffix=r'(?![\w#$-])')
+ sub = words((
+ 'params', 'meta', 'operations', 'op', 'rule',
+ 'attributes', 'utilization'), suffix=r'(?![\w#$-])')
+ acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
+ bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
+ un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
+ date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
+ acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
+ bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
+ val_qual = (r'(?:string|version|number)')
+ rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
+ r'start|promote|demote|stop)')
+
+ tokens = {
+ 'root': [
+ (r'^#.*\n?', Comment),
+ # attr=value (nvpair)
+ (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
+ bygroups(Name.Attribute, Punctuation, String)),
+ # need this construct, otherwise numeric node ids
+ # are matched as scores
+ # elem id:
+ (r'(node)(\s+)([\w#$-]+)(:)',
+ bygroups(Keyword, Whitespace, Name, Punctuation)),
+ # scores
+ (r'([+-]?([0-9]+|inf)):', Number),
+ # keywords (elements and other)
+ (elem, Keyword),
+ (sub, Keyword),
+ (acl, Keyword),
+ # binary operators
+ (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
+ # other operators
+ (bin_rel, Operator.Word),
+ (un_ops, Operator.Word),
+ (date_exp, Operator.Word),
+ # builtin attributes (e.g. #uname)
+ (r'#[a-z]+(?![\w#$-])', Name.Builtin),
+ # acl_mod:blah
+ (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
+ bygroups(Keyword, Punctuation, Name)),
+ # rsc_id[:(role|action)]
+ # NB: this matches all other identifiers
+ (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
+ bygroups(Name, Punctuation, Operator.Word)),
+ # punctuation
(r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
- (r'\s+|\n', Whitespace),
- ],
- }
+ (r'\s+|\n', Whitespace),
+ ],
+ }
class FlatlineLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/dylan.py b/contrib/python/Pygments/py2/pygments/lexers/dylan.py
index dd972bf4d6..73b2b2bc00 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/dylan.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/dylan.py
@@ -1,289 +1,289 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.dylan
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Dylan language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dylan
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Dylan language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Literal
-
-__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
-
-
-class DylanLexer(RegexLexer):
- """
- For the `Dylan <http://www.opendylan.org/>`_ language.
-
- .. versionadded:: 0.7
- """
-
- name = 'Dylan'
- aliases = ['dylan']
- filenames = ['*.dylan', '*.dyl', '*.intr']
- mimetypes = ['text/x-dylan']
-
- flags = re.IGNORECASE
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Literal
+
+__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
+
+
+class DylanLexer(RegexLexer):
+ """
+ For the `Dylan <http://www.opendylan.org/>`_ language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Dylan'
+ aliases = ['dylan']
+ filenames = ['*.dylan', '*.dyl', '*.intr']
+ mimetypes = ['text/x-dylan']
+
+ flags = re.IGNORECASE
+
builtins = {
- 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
- 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
- 'each-subclass', 'exception', 'exclude', 'function', 'generic',
- 'handler', 'inherited', 'inline', 'inline-only', 'instance',
- 'interface', 'import', 'keyword', 'library', 'macro', 'method',
- 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
+ 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
+ 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
+ 'each-subclass', 'exception', 'exclude', 'function', 'generic',
+ 'handler', 'inherited', 'inline', 'inline-only', 'instance',
+ 'interface', 'import', 'keyword', 'library', 'macro', 'method',
+ 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
'singleton', 'slot', 'thread', 'variable', 'virtual'}
-
+
keywords = {
- 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
- 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
- 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
- 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
+ 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
+ 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
+ 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
+ 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
'while'}
-
+
operators = {
- '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
+ '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
'>', '>=', '&', '|'}
-
+
functions = {
- 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
- 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
- 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
- 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
- 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
- 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
- 'condition-format-arguments', 'condition-format-string', 'conjoin',
- 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
- 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
- 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
- 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
- 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
- 'function-arguments', 'function-return-values',
- 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
- 'generic-function-methods', 'head', 'head-setter', 'identity',
- 'initialize', 'instance?', 'integral?', 'intersection',
- 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
- 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
- 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
- 'min', 'modulo', 'negative', 'negative?', 'next-method',
- 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
- 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
- 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
- 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
- 'remove-method', 'replace-elements!', 'replace-subsequence!',
- 'restart-query', 'return-allowed?', 'return-description',
- 'return-query', 'reverse', 'reverse!', 'round', 'round/',
- 'row-major-index', 'second', 'second-setter', 'shallow-copy',
- 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
- 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
- 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
- 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
- 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
+ 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
+ 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
+ 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
+ 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
+ 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
+ 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
+ 'condition-format-arguments', 'condition-format-string', 'conjoin',
+ 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
+ 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
+ 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
+ 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
+ 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
+ 'function-arguments', 'function-return-values',
+ 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
+ 'generic-function-methods', 'head', 'head-setter', 'identity',
+ 'initialize', 'instance?', 'integral?', 'intersection',
+ 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
+ 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
+ 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
+ 'min', 'modulo', 'negative', 'negative?', 'next-method',
+ 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
+ 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
+ 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
+ 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
+ 'remove-method', 'replace-elements!', 'replace-subsequence!',
+ 'restart-query', 'return-allowed?', 'return-description',
+ 'return-query', 'reverse', 'reverse!', 'round', 'round/',
+ 'row-major-index', 'second', 'second-setter', 'shallow-copy',
+ 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
+ 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
+ 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
+ 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
+ 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
'vector', 'zero?'}
-
- valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- lowercase_value = value.lower()
- if lowercase_value in self.builtins:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.keywords:
- yield index, Keyword, value
- continue
- if lowercase_value in self.functions:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.operators:
- yield index, Operator, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
-
- default('code') # no header match, switch to code
- ],
- 'code': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # multi-line comment
- (r'/\*', Comment.Multiline, 'comment'),
-
- # strings and characters
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- # binary integer
- (r'#b[01]+', Number.Bin),
-
- # octal integer
- (r'#o[0-7]+', Number.Oct),
-
- # floating point
- (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
- # decimal integer
- (r'[-+]?\d+', Number.Integer),
-
- # hex integer
- (r'#x[0-9a-f]+', Number.Hex),
-
- # Macro parameters
- (r'(\?' + valid_name + ')(:)'
- r'(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'\?' + valid_name, Name.Tag),
-
- # Punctuation
- (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
-
- # Most operators are picked up as names and then re-flagged.
- # This one isn't valid in a name though, so we pick it up now.
- (r':=', Operator),
-
- # Pick up #t / #f before we match other stuff with #.
- (r'#[tf]', Literal),
-
- # #"foo" style keywords
- (r'#"', String.Symbol, 'keyword'),
-
- # #rest, #key, #all-keys, etc.
- (r'#[a-z0-9-]+', Keyword),
-
- # required-init-keyword: style keywords.
- (valid_name + ':', Keyword),
-
- # class names
+
+ valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ lowercase_value = value.lower()
+ if lowercase_value in self.builtins:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.keywords:
+ yield index, Keyword, value
+ continue
+ if lowercase_value in self.functions:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.operators:
+ yield index, Operator, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+
+ default('code') # no header match, switch to code
+ ],
+ 'code': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # multi-line comment
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
+
+ # binary integer
+ (r'#b[01]+', Number.Bin),
+
+ # octal integer
+ (r'#o[0-7]+', Number.Oct),
+
+ # floating point
+ (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
+
+ # decimal integer
+ (r'[-+]?\d+', Number.Integer),
+
+ # hex integer
+ (r'#x[0-9a-f]+', Number.Hex),
+
+ # Macro parameters
+ (r'(\?' + valid_name + ')(:)'
+ r'(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'\?' + valid_name, Name.Tag),
+
+ # Punctuation
+ (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
+
+ # Most operators are picked up as names and then re-flagged.
+ # This one isn't valid in a name though, so we pick it up now.
+ (r':=', Operator),
+
+ # Pick up #t / #f before we match other stuff with #.
+ (r'#[tf]', Literal),
+
+ # #"foo" style keywords
+ (r'#"', String.Symbol, 'keyword'),
+
+ # #rest, #key, #all-keys, etc.
+ (r'#[a-z0-9-]+', Keyword),
+
+ # required-init-keyword: style keywords.
+ (valid_name + ':', Keyword),
+
+ # class names
('<' + valid_name + '>', Name.Class),
-
- # define variable forms.
+
+ # define variable forms.
(r'\*' + valid_name + r'\*', Name.Variable.Global),
-
- # define constant forms.
- (r'\$' + valid_name, Name.Constant),
-
- # everything else. We re-flag some of these in the method above.
- (valid_name, Name),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'keyword': [
- (r'"', String.Symbol, '#pop'),
- (r'[^\\"]+', String.Symbol), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
-
-class DylanLidLexer(RegexLexer):
- """
- For Dylan LID (Library Interchange Definition) files.
-
- .. versionadded:: 1.6
- """
-
- name = 'DylanLID'
- aliases = ['dylan-lid', 'lid']
- filenames = ['*.lid', '*.hdp']
- mimetypes = ['text/x-dylan-lid']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
- ]
- }
-
-
-class DylanConsoleLexer(Lexer):
- """
- For Dylan interactive console output like:
-
- .. sourcecode:: dylan-console
-
- ? let a = 1;
- => 1
- ? a
- => 1
-
- This is based on a copy of the RubyConsoleLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Dylan session'
- aliases = ['dylan-console', 'dylan-repl']
- filenames = ['*.dylan-console']
- mimetypes = ['text/x-dylan-console']
-
- _line_re = re.compile('.*?\n')
+
+ # define constant forms.
+ (r'\$' + valid_name, Name.Constant),
+
+ # everything else. We re-flag some of these in the method above.
+ (valid_name, Name),
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'keyword': [
+ (r'"', String.Symbol, '#pop'),
+ (r'[^\\"]+', String.Symbol), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ]
+ }
+
+
+class DylanLidLexer(RegexLexer):
+ """
+ For Dylan LID (Library Interchange Definition) files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'DylanLID'
+ aliases = ['dylan-lid', 'lid']
+ filenames = ['*.lid', '*.hdp']
+ mimetypes = ['text/x-dylan-lid']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+ ]
+ }
+
+
+class DylanConsoleLexer(Lexer):
+ """
+ For Dylan interactive console output like:
+
+ .. sourcecode:: dylan-console
+
+ ? let a = 1;
+ => 1
+ ? a
+ => 1
+
+ This is based on a copy of the RubyConsoleLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Dylan session'
+ aliases = ['dylan-console', 'dylan-repl']
+ filenames = ['*.dylan-console']
+ mimetypes = ['text/x-dylan-console']
+
+ _line_re = re.compile('.*?\n')
_prompt_re = re.compile(r'\?| ')
-
- def get_tokens_unprocessed(self, text):
- dylexer = DylanLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in self._line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
+
+ def get_tokens_unprocessed(self, text):
+ dylexer = DylanLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in self._line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ecl.py b/contrib/python/Pygments/py2/pygments/lexers/ecl.py
index c695c18c89..d57a58e9b1 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ecl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ecl.py
@@ -1,125 +1,125 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ecl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the ECL language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ecl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the ECL language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['ECLLexer']
-
-
-class ECLLexer(RegexLexer):
- """
- Lexer for the declarative big-data `ECL
- <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
- language.
-
- .. versionadded:: 1.5
- """
-
- name = 'ECL'
- aliases = ['ecl']
- filenames = ['*.ecl']
- mimetypes = ['application/x-ecl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('statements'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'\/\/.*', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- include('types'),
- include('keywords'),
- include('functions'),
- include('hash'),
- (r'"', String, 'string'),
- (r'\'', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
- (r'0x[0-9a-f]+[lu]*', Number.Hex),
- (r'0[0-7]+[lu]*', Number.Oct),
- (r'\d+[lu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]+', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[a-z_]\w*', Name),
- ],
- 'hash': [
- (r'^#.*$', Comment.Preproc),
- ],
- 'types': [
- (r'(RECORD|END)\D', Keyword.Declaration),
- (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
- r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
- r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
- bygroups(Keyword.Type, Text)),
- ],
- 'keywords': [
- (words((
- 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
- 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
- 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
- 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
- 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
- 'WAIT', 'WHEN'), suffix=r'\b'),
- Keyword.Reserved),
- # These are classed differently, check later
- (words((
- 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
- 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
- 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
- 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
- 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
- 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
- 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
- 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
- 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
- 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
- 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
- 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'functions': [
- (words((
- 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
- 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
- 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
- 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
- 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
- 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
- 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
- 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
- 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
- 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
- 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
- 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
- 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
- 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
- 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
- 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
- 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
- Name.Function),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\'', String, '#pop'),
- (r'[^"\']+', String),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['ECLLexer']
+
+
+class ECLLexer(RegexLexer):
+ """
+ Lexer for the declarative big-data `ECL
+ <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
+ language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'ECL'
+ aliases = ['ecl']
+ filenames = ['*.ecl']
+ mimetypes = ['application/x-ecl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('statements'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\/\/.*', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ include('types'),
+ include('keywords'),
+ include('functions'),
+ include('hash'),
+ (r'"', String, 'string'),
+ (r'\'', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
+ (r'0x[0-9a-f]+[lu]*', Number.Hex),
+ (r'0[0-7]+[lu]*', Number.Oct),
+ (r'\d+[lu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]+', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[a-z_]\w*', Name),
+ ],
+ 'hash': [
+ (r'^#.*$', Comment.Preproc),
+ ],
+ 'types': [
+ (r'(RECORD|END)\D', Keyword.Declaration),
+ (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
+ r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
+ r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
+ bygroups(Keyword.Type, Text)),
+ ],
+ 'keywords': [
+ (words((
+ 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
+ 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
+ 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
+ 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
+ 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
+ 'WAIT', 'WHEN'), suffix=r'\b'),
+ Keyword.Reserved),
+ # These are classed differently, check later
+ (words((
+ 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
+ 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
+ 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
+ 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
+ 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
+ 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
+ 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
+ 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
+ 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
+ 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
+ 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
+ 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'functions': [
+ (words((
+ 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
+ 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
+ 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
+ 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
+ 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
+ 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
+ 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
+ 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
+ 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
+ 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
+ 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
+ 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
+ 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
+ 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
+ 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
+ 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
+ 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
+ Name.Function),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\'', String, '#pop'),
+ (r'[^"\']+', String),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/eiffel.py b/contrib/python/Pygments/py2/pygments/lexers/eiffel.py
index 3f7ce55d42..a17670236a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/eiffel.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/eiffel.py
@@ -1,65 +1,65 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.eiffel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Eiffel language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.eiffel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Eiffel language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['EiffelLexer']
-
-
-class EiffelLexer(RegexLexer):
- """
- For `Eiffel <http://www.eiffel.com>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Eiffel'
- aliases = ['eiffel']
- filenames = ['*.e']
- mimetypes = ['text/x-eiffel']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- # Please note that keyword and operator are case insensitive.
- (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
- (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word),
- (words((
- 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
- 'attribute', 'check', 'class', 'convert', 'create', 'debug',
- 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
- 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
- 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
- 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
- 'require', 'rescue', 'retry', 'select', 'separate', 'then',
- 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
- Keyword.Reserved),
- (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String),
- (r'"([^"%\n]|%.)*?"', String),
- include('numbers'),
- (r"'([^'%]|%'|%%)'", String.Char),
- (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
- (r"([{}():;,.])", Punctuation),
- (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
- (r'([A-Z][A-Z0-9_]*)', Name.Class),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'0[bB][01]+', Number.Bin),
- (r'0[cC][0-7]+', Number.Oct),
- (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
- (r'[0-9]+', Number.Integer),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['EiffelLexer']
+
+
+class EiffelLexer(RegexLexer):
+ """
+ For `Eiffel <http://www.eiffel.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Eiffel'
+ aliases = ['eiffel']
+ filenames = ['*.e']
+ mimetypes = ['text/x-eiffel']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ # Please note that keyword and operator are case insensitive.
+ (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
+ (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word),
+ (words((
+ 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
+ 'attribute', 'check', 'class', 'convert', 'create', 'debug',
+ 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
+ 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
+ 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
+ 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
+ 'require', 'rescue', 'retry', 'select', 'separate', 'then',
+ 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String),
+ (r'"([^"%\n]|%.)*?"', String),
+ include('numbers'),
+ (r"'([^'%]|%'|%%)'", String.Char),
+ (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
+ (r"([{}():;,.])", Punctuation),
+ (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
+ (r'([A-Z][A-Z0-9_]*)', Name.Class),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[cC][0-7]+', Number.Oct),
+ (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/elm.py b/contrib/python/Pygments/py2/pygments/lexers/elm.py
index ee941d7dbb..a6df964d27 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/elm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/elm.py
@@ -1,121 +1,121 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.elm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Elm programming language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.elm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Elm programming language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text
-
-__all__ = ['ElmLexer']
-
-
-class ElmLexer(RegexLexer):
- """
- For `Elm <http://elm-lang.org/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'Elm'
- aliases = ['elm']
- filenames = ['*.elm']
- mimetypes = ['text/x-elm']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, Text
+
+__all__ = ['ElmLexer']
+
+
+class ElmLexer(RegexLexer):
+ """
+ For `Elm <http://elm-lang.org/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Elm'
+ aliases = ['elm']
+ filenames = ['*.elm']
+ mimetypes = ['text/x-elm']
+
validName = r'[a-z_][a-zA-Z0-9_\']*'
-
- specialName = r'^main '
-
- builtinOps = (
- '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
- '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
- '..', '.', '->', '-', '++', '+', '*', '&&', '%',
- )
-
- reservedWords = words((
- 'alias', 'as', 'case', 'else', 'if', 'import', 'in',
- 'let', 'module', 'of', 'port', 'then', 'type', 'where',
- ), suffix=r'\b')
-
- tokens = {
- 'root': [
-
- # Comments
+
+ specialName = r'^main '
+
+ builtinOps = (
+ '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
+ '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
+ '..', '.', '->', '-', '++', '+', '*', '&&', '%',
+ )
+
+ reservedWords = words((
+ 'alias', 'as', 'case', 'else', 'if', 'import', 'in',
+ 'let', 'module', 'of', 'port', 'then', 'type', 'where',
+ ), suffix=r'\b')
+
+ tokens = {
+ 'root': [
+
+ # Comments
(r'\{-', Comment.Multiline, 'comment'),
- (r'--.*', Comment.Single),
-
- # Whitespace
- (r'\s+', Text),
-
- # Strings
- (r'"', String, 'doublequote'),
-
- # Modules
- (r'^\s*module\s*', Keyword.Namespace, 'imports'),
-
- # Imports
- (r'^\s*import\s*', Keyword.Namespace, 'imports'),
-
- # Shaders
- (r'\[glsl\|.*', Name.Entity, 'shader'),
-
- # Keywords
- (reservedWords, Keyword.Reserved),
-
- # Types
- (r'[A-Z]\w*', Keyword.Type),
-
- # Main
- (specialName, Keyword.Reserved),
-
- # Prefix Operators
- (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
-
- # Infix Operators
+ (r'--.*', Comment.Single),
+
+ # Whitespace
+ (r'\s+', Text),
+
+ # Strings
+ (r'"', String, 'doublequote'),
+
+ # Modules
+ (r'^\s*module\s*', Keyword.Namespace, 'imports'),
+
+ # Imports
+ (r'^\s*import\s*', Keyword.Namespace, 'imports'),
+
+ # Shaders
+ (r'\[glsl\|.*', Name.Entity, 'shader'),
+
+ # Keywords
+ (reservedWords, Keyword.Reserved),
+
+ # Types
+ (r'[A-Z]\w*', Keyword.Type),
+
+ # Main
+ (specialName, Keyword.Reserved),
+
+ # Prefix Operators
+ (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
+
+ # Infix Operators
(words(builtinOps), Name.Function),
-
- # Numbers
- include('numbers'),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Parens
+
+ # Numbers
+ include('numbers'),
+
+ # Variable Names
+ (validName, Name.Variable),
+
+ # Parens
(r'[,()\[\]{}]', Punctuation),
-
- ],
-
- 'comment': [
+
+ ],
+
+ 'comment': [
(r'-(?!\})', Comment.Multiline),
(r'\{-', Comment.Multiline, 'comment'),
- (r'[^-}]', Comment.Multiline),
+ (r'[^-}]', Comment.Multiline),
(r'-\}', Comment.Multiline, '#pop'),
- ],
-
- 'doublequote': [
+ ],
+
+ 'doublequote': [
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\[nrfvb\\"]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- 'imports': [
- (r'\w+(\.\w+)*', Name.Class, '#pop'),
- ],
-
- 'numbers': [
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+', Number.Integer),
- ],
-
- 'shader': [
- (r'\|(?!\])', Name.Entity),
- (r'\|\]', Name.Entity, '#pop'),
- (r'.*\n', Name.Entity),
- ],
- }
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ 'imports': [
+ (r'\w+(\.\w+)*', Name.Class, '#pop'),
+ ],
+
+ 'numbers': [
+ (r'_?\d+\.(?=\d+)', Number.Float),
+ (r'_?\d+', Number.Integer),
+ ],
+
+ 'shader': [
+ (r'\|(?!\])', Name.Entity),
+ (r'\|\]', Name.Entity, '#pop'),
+ (r'.*\n', Name.Entity),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/erlang.py b/contrib/python/Pygments/py2/pygments/lexers/erlang.py
index 07a46c80ff..c022c35167 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/erlang.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/erlang.py
@@ -1,121 +1,121 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.erlang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Erlang.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.erlang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Erlang.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
- include, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
- 'ElixirLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class ErlangLexer(RegexLexer):
- """
- For the Erlang functional programming language.
-
- Blame Jeremy Thurgood (http://jerith.za.net/).
-
- .. versionadded:: 0.9
- """
-
- name = 'Erlang'
- aliases = ['erlang']
- filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
- mimetypes = ['text/x-erlang']
-
- keywords = (
- 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
- 'let', 'of', 'query', 'receive', 'try', 'when',
- )
-
- builtins = ( # See erlang(3) man page
- 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
- 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
- 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
- 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
- 'float', 'float_to_list', 'fun_info', 'fun_to_list',
- 'function_exported', 'garbage_collect', 'get', 'get_keys',
- 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
- 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
- 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
- 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
- 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
- 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
- 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
- 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
- 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
- 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
- 'pid_to_list', 'port_close', 'port_command', 'port_connect',
- 'port_control', 'port_call', 'port_info', 'port_to_list',
- 'process_display', 'process_flag', 'process_info', 'purge_module',
- 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
- 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
- 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
- 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
- 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
- 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
- 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
- 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
- )
-
- operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
- word_operators = (
- 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
- 'div', 'not', 'or', 'orelse', 'rem', 'xor'
- )
-
- atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
-
- variable_re = r'(?:[A-Z_]\w*)'
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
+ include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
+ 'ElixirLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class ErlangLexer(RegexLexer):
+ """
+ For the Erlang functional programming language.
+
+ Blame Jeremy Thurgood (http://jerith.za.net/).
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'Erlang'
+ aliases = ['erlang']
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
+ mimetypes = ['text/x-erlang']
+
+ keywords = (
+ 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
+ 'let', 'of', 'query', 'receive', 'try', 'when',
+ )
+
+ builtins = ( # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ )
+
+ operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
+ word_operators = (
+ 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
+ 'div', 'not', 'or', 'orelse', 'rem', 'xor'
+ )
+
+ atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
+
+ variable_re = r'(?:[A-Z_]\w*)'
+
esc_char_re = r'[bdefnrstv\'"\\]'
esc_octal_re = r'[0-7][0-7]?[0-7]?'
esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})'
esc_ctrl_re = r'\^[a-zA-Z]'
escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))'
-
- macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
- base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'%.*\n', Comment),
- (words(keywords, suffix=r'\b'), Keyword),
- (words(builtins, suffix=r'\b'), Name.Builtin),
- (words(word_operators, suffix=r'\b'), Operator.Word),
- (r'^-', Punctuation, 'directive'),
- (operators, Operator),
- (r'"', String, 'string'),
- (r'<<', Name.Label),
- (r'>>', Name.Label),
- ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
- ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
- (r'[+-]?\d+', Number.Integer),
- (r'[+-]?\d+.\d+', Number.Float),
- (r'[]\[:_@\".{}()|;,]', Punctuation),
- (variable_re, Name.Variable),
- (atom_re, Name),
- (r'\?'+macro_re, Name.Constant),
- (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
- (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
+
+ macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
+
+ base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'%.*\n', Comment),
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words(word_operators, suffix=r'\b'), Operator.Word),
+ (r'^-', Punctuation, 'directive'),
+ (operators, Operator),
+ (r'"', String, 'string'),
+ (r'<<', Name.Label),
+ (r'>>', Name.Label),
+ ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
+ ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
+ (r'[+-]?\d+', Number.Integer),
+ (r'[+-]?\d+.\d+', Number.Float),
+ (r'[]\[:_@\".{}()|;,]', Punctuation),
+ (variable_re, Name.Variable),
+ (atom_re, Name),
+ (r'\?'+macro_re, Name.Constant),
+ (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
+ (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
# Erlang script shebang
(r'\A#!.+\n', Comment.Hashbang),
@@ -123,21 +123,21 @@ class ErlangLexer(RegexLexer):
# EEP 43: Maps
# http://www.erlang.org/eeps/eep-0043.html
(r'#\{', Punctuation, 'map_key'),
- ],
- 'string': [
- (escape_re, String.Escape),
- (r'"', String, '#pop'),
+ ],
+ 'string': [
+ (escape_re, String.Escape),
+ (r'"', String, '#pop'),
(r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol),
- (r'[^"\\~]+', String),
- (r'~', String),
- ],
- 'directive': [
- (r'(define)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
- (r'(record)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
- (atom_re, Name.Entity, '#pop'),
- ],
+ (r'[^"\\~]+', String),
+ (r'~', String),
+ ],
+ 'directive': [
+ (r'(define)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
+ (r'(record)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
+ (atom_re, Name.Entity, '#pop'),
+ ],
'map_key': [
include('root'),
(r'=>', Punctuation, 'map_val'),
@@ -149,385 +149,385 @@ class ErlangLexer(RegexLexer):
(r',', Punctuation, '#pop'),
(r'(?=\})', Punctuation, '#pop'),
],
- }
-
-
-class ErlangShellLexer(Lexer):
- """
- Shell sessions in erl (for Erlang code).
-
- .. versionadded:: 1.1
- """
- name = 'Erlang erl session'
- aliases = ['erl']
- filenames = ['*.erl-sh']
- mimetypes = ['text/x-erl-shellsession']
-
+ }
+
+
+class ErlangShellLexer(Lexer):
+ """
+ Shell sessions in erl (for Erlang code).
+
+ .. versionadded:: 1.1
+ """
+ name = 'Erlang erl session'
+ aliases = ['erl']
+ filenames = ['*.erl-sh']
+ mimetypes = ['text/x-erl-shellsession']
+
_prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
-
- def get_tokens_unprocessed(self, text):
- erlexer = ErlangLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if line.startswith('*'):
- yield match.start(), Generic.Traceback, line
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-def gen_elixir_string_rules(name, symbol, token):
- states = {}
- states['string_' + name] = [
- (r'[^#%s\\]+' % (symbol,), token),
- include('escapes'),
- (r'\\.', token),
- (r'(%s)' % (symbol,), bygroups(token), "#pop"),
- include('interpol')
- ]
- return states
-
-
-def gen_elixir_sigstr_rules(term, token, interpol=True):
- if interpol:
- return [
- (r'[^#%s\\]+' % (term,), token),
- include('escapes'),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- include('interpol')
- ]
- else:
- return [
- (r'[^%s\\]+' % (term,), token),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- ]
-
-
-class ElixirLexer(RegexLexer):
- """
- For the `Elixir language <http://elixir-lang.org>`_.
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir'
- aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.exs']
- mimetypes = ['text/x-elixir']
-
- KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
- KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
- BUILTIN = (
- 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
+
+ def get_tokens_unprocessed(self, text):
+ erlexer = ErlangLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('*'):
+ yield match.start(), Generic.Traceback, line
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+def gen_elixir_string_rules(name, symbol, token):
+ states = {}
+ states['string_' + name] = [
+ (r'[^#%s\\]+' % (symbol,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'(%s)' % (symbol,), bygroups(token), "#pop"),
+ include('interpol')
+ ]
+ return states
+
+
+def gen_elixir_sigstr_rules(term, token, interpol=True):
+ if interpol:
+ return [
+ (r'[^#%s\\]+' % (term,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ include('interpol')
+ ]
+ else:
+ return [
+ (r'[^%s\\]+' % (term,), token),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ ]
+
+
+class ElixirLexer(RegexLexer):
+ """
+ For the `Elixir language <http://elixir-lang.org>`_.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir'
+ aliases = ['elixir', 'ex', 'exs']
+ filenames = ['*.ex', '*.exs']
+ mimetypes = ['text/x-elixir']
+
+ KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
+ KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
+ BUILTIN = (
+ 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
'quote', 'unquote', 'unquote_splicing', 'throw', 'super',
- )
- BUILTIN_DECLARATION = (
- 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
+ )
+ BUILTIN_DECLARATION = (
+ 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback',
- )
-
- BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
- CONSTANT = ('nil', 'true', 'false')
-
- PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
-
- OPERATORS3 = (
- '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
- '~>>', '<~>', '|~>', '<|>',
- )
- OPERATORS2 = (
- '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
- '->', '<-', '|', '.', '=', '~>', '<~',
- )
- OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
-
- PUNCTUATION = (
+ )
+
+ BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
+ CONSTANT = ('nil', 'true', 'false')
+
+ PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
+
+ OPERATORS3 = (
+ '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
+ '~>>', '<~>', '|~>', '<|>',
+ )
+ OPERATORS2 = (
+ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
+ '->', '<-', '|', '.', '=', '~>', '<~',
+ )
+ OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
+
+ PUNCTUATION = (
'\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']',
- )
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.KEYWORD:
- yield index, Keyword, value
- elif value in self.KEYWORD_OPERATOR:
- yield index, Operator.Word, value
- elif value in self.BUILTIN:
- yield index, Keyword, value
- elif value in self.BUILTIN_DECLARATION:
- yield index, Keyword.Declaration, value
- elif value in self.BUILTIN_NAMESPACE:
- yield index, Keyword.Namespace, value
- elif value in self.CONSTANT:
- yield index, Name.Constant, value
- elif value in self.PSEUDO_VAR:
- yield index, Name.Builtin.Pseudo, value
- else:
- yield index, token, value
- else:
- yield index, token, value
-
- def gen_elixir_sigil_rules():
- # all valid sigil terminators (excluding heredocs)
- terminators = [
- (r'\{', r'\}', 'cb'),
- (r'\[', r'\]', 'sb'),
- (r'\(', r'\)', 'pa'),
- (r'<', r'>', 'ab'),
- (r'/', r'/', 'slas'),
- (r'\|', r'\|', 'pipe'),
- ('"', '"', 'quot'),
- ("'", "'", 'apos'),
- ]
-
- # heredocs have slightly different rules
- triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
-
- token = String.Other
- states = {'sigils': []}
-
- for term, name in triquotes:
- states['sigils'] += [
- (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-intp')),
- (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-no-intp')),
- ]
-
- states[name + '-end'] = [
- (r'[a-zA-Z]+', token, '#pop'),
- default('#pop'),
- ]
- states[name + '-intp'] = [
- (r'^\s*' + term, String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ]
- states[name + '-no-intp'] = [
- (r'^\s*' + term, String.Heredoc, '#pop'),
- include('heredoc_no_interpol'),
- ]
-
- for lterm, rterm, name in terminators:
- states['sigils'] += [
- (r'~[a-z]' + lterm, token, name + '-intp'),
- (r'~[A-Z]' + lterm, token, name + '-no-intp'),
- ]
- states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token)
- states[name + '-no-intp'] = \
- gen_elixir_sigstr_rules(rterm, token, interpol=False)
-
- return states
-
- op3_re = "|".join(re.escape(s) for s in OPERATORS3)
- op2_re = "|".join(re.escape(s) for s in OPERATORS2)
- op1_re = "|".join(re.escape(s) for s in OPERATORS1)
- ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
- punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
+ )
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.KEYWORD:
+ yield index, Keyword, value
+ elif value in self.KEYWORD_OPERATOR:
+ yield index, Operator.Word, value
+ elif value in self.BUILTIN:
+ yield index, Keyword, value
+ elif value in self.BUILTIN_DECLARATION:
+ yield index, Keyword.Declaration, value
+ elif value in self.BUILTIN_NAMESPACE:
+ yield index, Keyword.Namespace, value
+ elif value in self.CONSTANT:
+ yield index, Name.Constant, value
+ elif value in self.PSEUDO_VAR:
+ yield index, Name.Builtin.Pseudo, value
+ else:
+ yield index, token, value
+ else:
+ yield index, token, value
+
+ def gen_elixir_sigil_rules():
+ # all valid sigil terminators (excluding heredocs)
+ terminators = [
+ (r'\{', r'\}', 'cb'),
+ (r'\[', r'\]', 'sb'),
+ (r'\(', r'\)', 'pa'),
+ (r'<', r'>', 'ab'),
+ (r'/', r'/', 'slas'),
+ (r'\|', r'\|', 'pipe'),
+ ('"', '"', 'quot'),
+ ("'", "'", 'apos'),
+ ]
+
+ # heredocs have slightly different rules
+ triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
+
+ token = String.Other
+ states = {'sigils': []}
+
+ for term, name in triquotes:
+ states['sigils'] += [
+ (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-intp')),
+ (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-no-intp')),
+ ]
+
+ states[name + '-end'] = [
+ (r'[a-zA-Z]+', token, '#pop'),
+ default('#pop'),
+ ]
+ states[name + '-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ]
+ states[name + '-no-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_no_interpol'),
+ ]
+
+ for lterm, rterm, name in terminators:
+ states['sigils'] += [
+ (r'~[a-z]' + lterm, token, name + '-intp'),
+ (r'~[A-Z]' + lterm, token, name + '-no-intp'),
+ ]
+ states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token)
+ states[name + '-no-intp'] = \
+ gen_elixir_sigstr_rules(rterm, token, interpol=False)
+
+ return states
+
+ op3_re = "|".join(re.escape(s) for s in OPERATORS3)
+ op2_re = "|".join(re.escape(s) for s in OPERATORS2)
+ op1_re = "|".join(re.escape(s) for s in OPERATORS1)
+ ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
+ punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
alnum = r'\w'
- name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
- modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
- complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
- special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
-
- long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
- hex_char_re = r'(\\x[\da-fA-F]{1,2})'
- escape_char_re = r'(\\[abdefnrstv])'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single),
-
- # Various kinds of characters
- (r'(\?)' + long_hex_char_re,
- bygroups(String.Char,
- String.Escape, Number.Hex, String.Escape)),
- (r'(\?)' + hex_char_re,
- bygroups(String.Char, String.Escape)),
- (r'(\?)' + escape_char_re,
- bygroups(String.Char, String.Escape)),
- (r'\?\\?.', String.Char),
-
- # '::' has to go before atoms
- (r':::', String.Symbol),
- (r'::', Operator),
-
- # atoms
- (r':' + special_atom_re, String.Symbol),
- (r':' + complex_name_re, String.Symbol),
- (r':"', String.Symbol, 'string_double_atom'),
- (r":'", String.Symbol, 'string_single_atom'),
-
- # [keywords: ...]
- (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
- bygroups(String.Symbol, Punctuation)),
-
- # @attributes
- (r'@' + name_re, Name.Attribute),
-
- # identifiers
- (name_re, Name),
- (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
-
- # operators and punctuation
- (op3_re, Operator),
- (op2_re, Operator),
- (punctuation_re, Punctuation),
- (r'&\d', Name.Entity), # anon func arguments
- (op1_re, Operator),
-
- # numbers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[\da-fA-F]+', Number.Hex),
- (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
- (r'\d(_?\d)*', Number.Integer),
-
- # strings and heredocs
- (r'"""\s*', String.Heredoc, 'heredoc_double'),
- (r"'''\s*$", String.Heredoc, 'heredoc_single'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single'),
-
- include('sigils'),
-
- (r'%\{', Punctuation, 'map_key'),
- (r'\{', Punctuation, 'tuple'),
- ],
- 'heredoc_double': [
- (r'^\s*"""', String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_single': [
- (r"^\s*'''", String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_interpol': [
- (r'[^#\\\n]+', String.Heredoc),
- include('escapes'),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- include('interpol'),
- ],
- 'heredoc_no_interpol': [
- (r'[^\\\n]+', String.Heredoc),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- ],
- 'escapes': [
- (long_hex_char_re,
- bygroups(String.Escape, Number.Hex, String.Escape)),
- (hex_char_re, String.Escape),
- (escape_char_re, String.Escape),
- ],
- 'interpol': [
- (r'#\{', String.Interpol, 'interpol_string'),
- ],
- 'interpol_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'map_key': [
- include('root'),
- (r':', Punctuation, 'map_val'),
- (r'=>', Punctuation, 'map_val'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'map_val': [
- include('root'),
- (r',', Punctuation, '#pop'),
- (r'(?=\})', Punctuation, '#pop'),
- ],
- 'tuple': [
- include('root'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
- tokens.update(gen_elixir_string_rules('double', '"', String.Double))
- tokens.update(gen_elixir_string_rules('single', "'", String.Single))
- tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
- tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
- tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
- """
- For Elixir interactive console (iex) output like:
-
- .. sourcecode:: iex
-
- iex> [head | tail] = [1,2,3]
- [1,2,3]
- iex> head
- 1
- iex> tail
- [2,3]
- iex> [head | tail]
- [1,2,3]
- iex> length [head | tail]
- 3
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir iex session'
- aliases = ['iex']
- mimetypes = ['text/x-elixir-shellsession']
-
+ name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
+ modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
+ complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
+ special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
+
+ long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
+ hex_char_re = r'(\\x[\da-fA-F]{1,2})'
+ escape_char_re = r'(\\[abdefnrstv])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single),
+
+ # Various kinds of characters
+ (r'(\?)' + long_hex_char_re,
+ bygroups(String.Char,
+ String.Escape, Number.Hex, String.Escape)),
+ (r'(\?)' + hex_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'(\?)' + escape_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'\?\\?.', String.Char),
+
+ # '::' has to go before atoms
+ (r':::', String.Symbol),
+ (r'::', Operator),
+
+ # atoms
+ (r':' + special_atom_re, String.Symbol),
+ (r':' + complex_name_re, String.Symbol),
+ (r':"', String.Symbol, 'string_double_atom'),
+ (r":'", String.Symbol, 'string_single_atom'),
+
+ # [keywords: ...]
+ (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
+ bygroups(String.Symbol, Punctuation)),
+
+ # @attributes
+ (r'@' + name_re, Name.Attribute),
+
+ # identifiers
+ (name_re, Name),
+ (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
+
+ # operators and punctuation
+ (op3_re, Operator),
+ (op2_re, Operator),
+ (punctuation_re, Punctuation),
+ (r'&\d', Name.Entity), # anon func arguments
+ (op1_re, Operator),
+
+ # numbers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
+ (r'\d(_?\d)*', Number.Integer),
+
+ # strings and heredocs
+ (r'"""\s*', String.Heredoc, 'heredoc_double'),
+ (r"'''\s*$", String.Heredoc, 'heredoc_single'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single'),
+
+ include('sigils'),
+
+ (r'%\{', Punctuation, 'map_key'),
+ (r'\{', Punctuation, 'tuple'),
+ ],
+ 'heredoc_double': [
+ (r'^\s*"""', String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_single': [
+ (r"^\s*'''", String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_interpol': [
+ (r'[^#\\\n]+', String.Heredoc),
+ include('escapes'),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ include('interpol'),
+ ],
+ 'heredoc_no_interpol': [
+ (r'[^\\\n]+', String.Heredoc),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ ],
+ 'escapes': [
+ (long_hex_char_re,
+ bygroups(String.Escape, Number.Hex, String.Escape)),
+ (hex_char_re, String.Escape),
+ (escape_char_re, String.Escape),
+ ],
+ 'interpol': [
+ (r'#\{', String.Interpol, 'interpol_string'),
+ ],
+ 'interpol_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'map_key': [
+ include('root'),
+ (r':', Punctuation, 'map_val'),
+ (r'=>', Punctuation, 'map_val'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'map_val': [
+ include('root'),
+ (r',', Punctuation, '#pop'),
+ (r'(?=\})', Punctuation, '#pop'),
+ ],
+ 'tuple': [
+ include('root'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ }
+ tokens.update(gen_elixir_string_rules('double', '"', String.Double))
+ tokens.update(gen_elixir_string_rules('single', "'", String.Single))
+ tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
+ tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
+ tokens.update(gen_elixir_sigil_rules())
+
+
+class ElixirConsoleLexer(Lexer):
+ """
+ For Elixir interactive console (iex) output like:
+
+ .. sourcecode:: iex
+
+ iex> [head | tail] = [1,2,3]
+ [1,2,3]
+ iex> head
+ 1
+ iex> tail
+ [2,3]
+ iex> [head | tail]
+ [1,2,3]
+ iex> length [head | tail]
+ 3
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir iex session'
+ aliases = ['iex']
+ mimetypes = ['text/x-elixir-shellsession']
+
_prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
-
- def get_tokens_unprocessed(self, text):
- exlexer = ElixirLexer(**self.options)
-
- curcode = ''
- in_error = False
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'** '):
- in_error = True
- insertions.append((len(curcode),
- [(0, Generic.Error, line[:-1])]))
- curcode += line[-1:]
- else:
- m = self._prompt_re.match(line)
- if m is not None:
- in_error = False
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- token = Generic.Error if in_error else Generic.Output
- yield match.start(), token, line
- if curcode:
- for item in do_insertions(
- insertions, exlexer.get_tokens_unprocessed(curcode)):
- yield item
+
+ def get_tokens_unprocessed(self, text):
+ exlexer = ElixirLexer(**self.options)
+
+ curcode = ''
+ in_error = False
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'** '):
+ in_error = True
+ insertions.append((len(curcode),
+ [(0, Generic.Error, line[:-1])]))
+ curcode += line[-1:]
+ else:
+ m = self._prompt_re.match(line)
+ if m is not None:
+ in_error = False
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ token = Generic.Error if in_error else Generic.Output
+ yield match.start(), token, line
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/contrib/python/Pygments/py2/pygments/lexers/esoteric.py b/contrib/python/Pygments/py2/pygments/lexers/esoteric.py
index 6946c90243..727a686d61 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/esoteric.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/esoteric.py
@@ -1,149 +1,149 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.esoteric
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for esoteric languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.esoteric
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for esoteric languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
-
+
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
'CapDLLexer', 'AheuiLexer']
-
-
-class BrainfuckLexer(RegexLexer):
- """
- Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
- language.
- """
-
- name = 'Brainfuck'
- aliases = ['brainfuck', 'bf']
- filenames = ['*.bf', '*.b']
- mimetypes = ['application/x-brainfuck']
-
- tokens = {
- 'common': [
- # use different colors for different instruction types
- (r'[.,]+', Name.Tag),
- (r'[+-]+', Name.Builtin),
- (r'[<>]+', Name.Variable),
- (r'[^.,+\-<>\[\]]+', Comment),
- ],
+
+
+class BrainfuckLexer(RegexLexer):
+ """
+ Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
+ language.
+ """
+
+ name = 'Brainfuck'
+ aliases = ['brainfuck', 'bf']
+ filenames = ['*.bf', '*.b']
+ mimetypes = ['application/x-brainfuck']
+
+ tokens = {
+ 'common': [
+ # use different colors for different instruction types
+ (r'[.,]+', Name.Tag),
+ (r'[+-]+', Name.Builtin),
+ (r'[<>]+', Name.Variable),
+ (r'[^.,+\-<>\[\]]+', Comment),
+ ],
+ 'root': [
+ (r'\[', Keyword, 'loop'),
+ (r'\]', Error),
+ include('common'),
+ ],
+ 'loop': [
+ (r'\[', Keyword, '#push'),
+ (r'\]', Keyword, '#pop'),
+ include('common'),
+ ]
+ }
+
+
+class BefungeLexer(RegexLexer):
+ """
+ Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
+ language.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Befunge'
+ aliases = ['befunge']
+ filenames = ['*.befunge']
+ mimetypes = ['application/x-befunge']
+
+ tokens = {
+ 'root': [
+ (r'[0-9a-f]', Number),
+ (r'[+*/%!`-]', Operator), # Traditional math
+ (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
+ (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
+ (r'[|_mw]', Keyword),
+ (r'[{}]', Name.Tag), # Befunge-98 stack ops
+ (r'".*?"', String.Double), # Strings don't appear to allow escapes
+ (r'\'.', String.Single), # Single character
+ (r'[#;]', Comment), # Trampoline... depends on direction hit
+ (r'[pg&~=@iotsy]', Keyword), # Misc
+ (r'[()A-Z]', Comment), # Fingerprints
+ (r'\s+', Text), # Whitespace doesn't matter
+ ],
+ }
+
+
+class CAmkESLexer(RegexLexer):
+ """
+ Basic lexer for the input language for the
+ `CAmkES <https://sel4.systems/CAmkES/>`_ component platform.
+
+ .. versionadded:: 2.1
+ """
+ name = 'CAmkES'
+ aliases = ['camkes', 'idl4']
+ filenames = ['*.camkes', '*.idl4']
+
+ tokens = {
'root': [
- (r'\[', Keyword, 'loop'),
- (r'\]', Error),
- include('common'),
- ],
- 'loop': [
- (r'\[', Keyword, '#push'),
- (r'\]', Keyword, '#pop'),
- include('common'),
- ]
- }
-
-
-class BefungeLexer(RegexLexer):
- """
- Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
- language.
-
- .. versionadded:: 0.7
- """
- name = 'Befunge'
- aliases = ['befunge']
- filenames = ['*.befunge']
- mimetypes = ['application/x-befunge']
-
- tokens = {
- 'root': [
- (r'[0-9a-f]', Number),
- (r'[+*/%!`-]', Operator), # Traditional math
- (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
- (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
- (r'[|_mw]', Keyword),
- (r'[{}]', Name.Tag), # Befunge-98 stack ops
- (r'".*?"', String.Double), # Strings don't appear to allow escapes
- (r'\'.', String.Single), # Single character
- (r'[#;]', Comment), # Trampoline... depends on direction hit
- (r'[pg&~=@iotsy]', Keyword), # Misc
- (r'[()A-Z]', Comment), # Fingerprints
- (r'\s+', Text), # Whitespace doesn't matter
- ],
- }
-
-
-class CAmkESLexer(RegexLexer):
- """
- Basic lexer for the input language for the
- `CAmkES <https://sel4.systems/CAmkES/>`_ component platform.
-
- .. versionadded:: 2.1
- """
- name = 'CAmkES'
- aliases = ['camkes', 'idl4']
- filenames = ['*.camkes', '*.idl4']
-
- tokens = {
- 'root': [
- # C pre-processor directive
- (r'^\s*#.*\n', Comment.Preproc),
-
- # Whitespace, comments
- (r'\s+', Text),
- (r'/\*(.|\n)*?\*/', Comment),
- (r'//.*\n', Comment),
-
+ # C pre-processor directive
+ (r'^\s*#.*\n', Comment.Preproc),
+
+ # Whitespace, comments
+ (r'\s+', Text),
+ (r'/\*(.|\n)*?\*/', Comment),
+ (r'//.*\n', Comment),
+
(r'[\[(){},.;\]]', Punctuation),
(r'[~!%^&*+=|?:<>/-]', Operator),
-
- (words(('assembly', 'attribute', 'component', 'composition',
- 'configuration', 'connection', 'connector', 'consumes',
+
+ (words(('assembly', 'attribute', 'component', 'composition',
+ 'configuration', 'connection', 'connector', 'consumes',
'control', 'dataport', 'Dataport', 'Dataports', 'emits',
'event', 'Event', 'Events', 'export', 'from', 'group',
'hardware', 'has', 'interface', 'Interface', 'maybe',
'procedure', 'Procedure', 'Procedures', 'provides',
'template', 'thread', 'threads', 'to', 'uses', 'with'),
suffix=r'\b'), Keyword),
-
- (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
- 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
- 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
+
+ (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
+ 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
+ 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
'refin', 'semaphore', 'signed', 'string', 'struct',
'uint16_t', 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t',
'unsigned', 'void'),
suffix=r'\b'), Keyword.Type),
-
- # Recognised attributes
- (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
- (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
- Keyword.Reserved),
-
- # CAmkES-level include
- (r'import\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
-
- # C-level include
- (r'include\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
-
- # Literals
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'-?[\d]+', Number),
- (r'-?[\d]+\.[\d]+', Number.Float),
- (r'"[^"]*"', String),
+
+ # Recognised attributes
+ (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
+ (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
+ Keyword.Reserved),
+
+ # CAmkES-level include
+ (r'import\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
+
+ # C-level include
+ (r'include\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
+
+ # Literals
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'-?[\d]+', Number),
+ (r'-?[\d]+\.[\d]+', Number.Float),
+ (r'"[^"]*"', String),
(r'[Tt]rue|[Ff]alse', Name.Builtin),
-
- # Identifiers
- (r'[a-zA-Z_]\w*', Name),
- ],
- }
-
-
+
+ # Identifiers
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ }
+
+
class CapDLLexer(RegexLexer):
"""
Basic lexer for
@@ -203,58 +203,58 @@ class CapDLLexer(RegexLexer):
}
-class RedcodeLexer(RegexLexer):
- """
- A simple Redcode lexer based on ICWS'94.
- Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- .. versionadded:: 0.8
- """
- name = 'Redcode'
- aliases = ['redcode']
- filenames = ['*.cw']
-
- opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
- 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
- 'ORG', 'EQU', 'END')
- modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- (r';.*$', Comment.Single),
- # Lexemes:
- # Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
- (r'[A-Za-z_]\w+', Name),
- # Operators
- (r'[-+*/%]', Operator),
- (r'[#$@<>]', Operator), # mode
- (r'[.,]', Punctuation), # mode
- # Numbers
- (r'[-+]?\d+', Number.Integer),
- ],
- }
-
-
+class RedcodeLexer(RegexLexer):
+ """
+ A simple Redcode lexer based on ICWS'94.
+ Contributed by Adam Blinkinsop <blinks@acm.org>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Redcode'
+ aliases = ['redcode']
+ filenames = ['*.cw']
+
+ opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
+ 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
+ 'ORG', 'EQU', 'END')
+ modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ (r';.*$', Comment.Single),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
+ (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
+ (r'[A-Za-z_]\w+', Name),
+ # Operators
+ (r'[-+*/%]', Operator),
+ (r'[#$@<>]', Operator), # mode
+ (r'[.,]', Punctuation), # mode
+ # Numbers
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ }
+
+
class AheuiLexer(RegexLexer):
- """
+ """
Aheui_ Lexer.
-
+
Aheui_ is esoteric language based on Korean alphabets.
.. _Aheui: http://aheui.github.io/
- """
-
+ """
+
name = 'Aheui'
aliases = ['aheui']
filenames = ['*.aheui']
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
(u'['
u'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
u'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
@@ -273,5 +273,5 @@ class AheuiLexer(RegexLexer):
u'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
u']', Operator),
('.', Comment),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ezhil.py b/contrib/python/Pygments/py2/pygments/lexers/ezhil.py
index ee465885c9..a8e51a5103 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ezhil.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ezhil.py
@@ -1,42 +1,42 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ezhil
- ~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Ezhil language.
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ezhil
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for Ezhil language.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Keyword, Text, Comment, Name
-from pygments.token import String, Number, Punctuation, Operator
-
-__all__ = ['EzhilLexer']
-
-
-class EzhilLexer(RegexLexer):
- """
- Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Keyword, Text, Comment, Name
+from pygments.token import String, Number, Punctuation, Operator
+
+__all__ = ['EzhilLexer']
+
- .. versionadded:: 2.1
- """
- name = 'Ezhil'
- aliases = ['ezhil']
- filenames = ['*.n']
- mimetypes = ['text/x-ezhil']
- flags = re.MULTILINE | re.UNICODE
- # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
- # This much simpler version is close enough, and includes combining marks.
- _TALETTERS = u'[a-zA-Z_]|[\u0b80-\u0bff]'
- tokens = {
- 'root': [
- include('keywords'),
- (r'#.*\n', Comment.Single),
- (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
- (u'இல்', Operator.Word),
+class EzhilLexer(RegexLexer):
+ """
+ Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
+
+ .. versionadded:: 2.1
+ """
+ name = 'Ezhil'
+ aliases = ['ezhil']
+ filenames = ['*.n']
+ mimetypes = ['text/x-ezhil']
+ flags = re.MULTILINE | re.UNICODE
+ # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
+ # This much simpler version is close enough, and includes combining marks.
+ _TALETTERS = u'[a-zA-Z_]|[\u0b80-\u0bff]'
+ tokens = {
+ 'root': [
+ include('keywords'),
+ (r'#.*\n', Comment.Single),
+ (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
+ (u'இல்', Operator.Word),
(words((u'assert', u'max', u'min',
u'நீளம்', u'சரம்_இடமாற்று', u'சரம்_கண்டுபிடி',
u'பட்டியல்', u'பின்இணை', u'வரிசைப்படுத்து',
@@ -44,26 +44,26 @@ class EzhilLexer(RegexLexer):
u'கோப்பை_திற', u'கோப்பை_எழுது', u'கோப்பை_மூடு',
u'pi', u'sin', u'cos', u'tan', u'sqrt', u'hypot', u'pow',
u'exp', u'log', u'log10', u'exit',
- ), suffix=r'\b'), Name.Builtin),
- (r'(True|False)\b', Keyword.Constant),
- (r'[^\S\n]+', Text),
- include('identifier'),
- include('literal'),
- (r'[(){}\[\]:;.]', Punctuation),
- ],
- 'keywords': [
- (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
- ],
- 'identifier': [
- (u'(?:'+_TALETTERS+u')(?:[0-9]|'+_TALETTERS+u')*', Name),
- ],
- 'literal': [
- (r'".*?"', String),
- (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
- (r'(?u)\d+', Number.Integer),
- ]
- }
+ ), suffix=r'\b'), Name.Builtin),
+ (r'(True|False)\b', Keyword.Constant),
+ (r'[^\S\n]+', Text),
+ include('identifier'),
+ include('literal'),
+ (r'[(){}\[\]:;.]', Punctuation),
+ ],
+ 'keywords': [
+ (u'பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
+ ],
+ 'identifier': [
+ (u'(?:'+_TALETTERS+u')(?:[0-9]|'+_TALETTERS+u')*', Name),
+ ],
+ 'literal': [
+ (r'".*?"', String),
+ (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
+ (r'(?u)\d+', Number.Integer),
+ ]
+ }
- def __init__(self, **options):
- super(EzhilLexer, self).__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
+ def __init__(self, **options):
+ super(EzhilLexer, self).__init__(**options)
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py2/pygments/lexers/factor.py b/contrib/python/Pygments/py2/pygments/lexers/factor.py
index 4aed8f6e3e..1d9282dcd0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/factor.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/factor.py
@@ -1,344 +1,344 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.factor
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Factor language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.factor
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Factor language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, words
-from pygments.token import Text, Comment, Keyword, Name, String, Number
-
-__all__ = ['FactorLexer']
-
-
-class FactorLexer(RegexLexer):
- """
- Lexer for the `Factor <http://factorcode.org>`_ language.
-
- .. versionadded:: 1.4
- """
- name = 'Factor'
- aliases = ['factor']
- filenames = ['*.factor']
- mimetypes = ['text/x-factor']
-
- flags = re.MULTILINE | re.UNICODE
-
- builtin_kernel = words((
- '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
- '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
- '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
- '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
- 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
- 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
- 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
- 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
- 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
- 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
- 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
- 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
- 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
- 'wrapper', 'wrapper?', 'xor'), suffix=r'\s')
-
- builtin_assocs = words((
- '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
- 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
- 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
- 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
- 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
- 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
- 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
- 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
- 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
- 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
- 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
- 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'\s')
-
- builtin_combinators = words((
- '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
- '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
- 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
- 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
- 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
- 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'\s')
-
- builtin_math = words((
- '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
- '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
- '(each-integer)', '(find-integer)', '*', '+', '?1+',
- 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
- 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
- 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
- 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
- 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
- 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
- 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
- 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
- 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
- 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
- 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
- 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
- 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
- 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
- 'zero?'), suffix=r'\s')
-
- builtin_sequences = words((
- '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
- '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
- '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
- '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
- 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
- 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
- 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
- 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
- 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
- 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
- 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
- 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
- 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
- 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
- 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
- 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
- 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
- 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
- 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
- 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
- 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
- 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
- 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
- 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
- 'non-negative-integer-expected', 'non-negative-integer-expected?',
- 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
- 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
- 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
- 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
- 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
- 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
- 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
- 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
- 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
- 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
- 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
- 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
- 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
- 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
- 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
- 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
- 'when-empty'), suffix=r'\s')
-
- builtin_namespaces = words((
- '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
- 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
- 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
- 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
- suffix=r'\s')
-
- builtin_arrays = words((
- '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
- 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'\s')
-
- builtin_io = words((
- '(each-stream-block-slice)', '(each-stream-block)',
- '(stream-contents-by-block)', '(stream-contents-by-element)',
- '(stream-contents-by-length-or-block)',
- '(stream-contents-by-length)', '+byte+', '+character+',
- 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
- 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
- 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
- 'error-stream', 'flush', 'input-stream', 'input-stream?',
- 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
- 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
- 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
- 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
- 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
- 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
- 'stream-copy*', 'stream-element-type', 'stream-flush',
- 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
- 'stream-read', 'stream-read-into', 'stream-read-partial',
- 'stream-read-partial-into', 'stream-read-partial-unsafe',
- 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
- 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
- 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
- 'with-error-stream', 'with-error-stream*', 'with-error>output',
- 'with-input-output+error-streams',
- 'with-input-output+error-streams*', 'with-input-stream',
- 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
- 'with-output>error', 'with-output+error-stream',
- 'with-output+error-stream*', 'with-streams', 'with-streams*',
- 'write', 'write1'), suffix=r'\s')
-
- builtin_strings = words((
- '1string', '<string>', '>string', 'resize-string', 'string',
- 'string?'), suffix=r'\s')
-
- builtin_vectors = words((
- '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
- suffix=r'\s')
-
- builtin_continuations = words((
- '<condition>', '<continuation>', '<restart>', 'attempt-all',
- 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
- 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
- 'condition?', 'continuation', 'continuation?', 'continue',
- 'continue-restart', 'continue-with', 'current-continuation',
- 'error', 'error-continuation', 'error-in-thread', 'error-thread',
- 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
- 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
- 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
- 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'\s')
-
- tokens = {
- 'root': [
- # factor allows a file to start with a shebang
- (r'#!.*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- (r'\s+', Text),
-
- # defining words
- (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Function)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
- (r'(GENERIC:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Function)),
- (r'\(\s', Name.Function, 'stackeffect'),
- (r';\s', Keyword),
-
- # imports and namespaces
- (r'(USING:)(\s+)',
- bygroups(Keyword.Namespace, Text), 'vocabs'),
- (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)),
- (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'),
- (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)),
- (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)),
- (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function)),
-
- # tuples and classes
- (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
- (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class), 'slots'),
- (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
- (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
- (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
- (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'SINGLETONS:', Keyword, 'classes'),
-
- # other syntax
- (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'SYMBOLS:\s', Keyword, 'words'),
- (r'SYNTAX:\s', Keyword),
- (r'ALIEN:\s', Keyword),
- (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text)),
- (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)),
-
- # vocab.private
- (r'(?:<PRIVATE|PRIVATE>)\s', Keyword.Namespace),
-
- # strings
- (r'"""\s+(?:.|\n)*?\s+"""', String),
- (r'"(?:\\\\|\\"|[^"])*"', String),
- (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String),
- (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char),
-
- # comments
- (r'!\s+.*$', Comment),
- (r'#!\s+.*$', Comment),
- (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment),
-
- # boolean constants
- (r'[tf]\s', Name.Constant),
-
- # symbols and literals
- (r'[\\$]\s+\S+', Name.Constant),
- (r'M\\\s+\S+\s+\S+', Name.Constant),
-
- # numbers
- (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
- (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
- (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'0b[01]+\s', Number.Bin),
- (r'0o[0-7]+\s', Number.Oct),
- (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
- (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
-
- # keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
- Keyword),
-
- # builtins
- (builtin_kernel, Name.Builtin),
- (builtin_assocs, Name.Builtin),
- (builtin_combinators, Name.Builtin),
- (builtin_math, Name.Builtin),
- (builtin_sequences, Name.Builtin),
- (builtin_namespaces, Name.Builtin),
- (builtin_arrays, Name.Builtin),
- (builtin_io, Name.Builtin),
- (builtin_strings, Name.Builtin),
- (builtin_vectors, Name.Builtin),
- (builtin_continuations, Name.Builtin),
-
- # everything else is text
- (r'\S+', Text),
- ],
- 'stackeffect': [
- (r'\s+', Text),
- (r'\(\s+', Name.Function, 'stackeffect'),
- (r'\)\s', Name.Function, '#pop'),
- (r'--\s', Name.Function),
- (r'\S+', Name.Variable),
- ],
- 'slots': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'(\{\s+)(\S+)(\s+[^}]+\s+\}\s)',
- bygroups(Text, Name.Variable, Text)),
- (r'\S+', Name.Variable),
- ],
- 'vocabs': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Namespace),
- ],
- 'classes': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Class),
- ],
- 'words': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Function),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import Text, Comment, Keyword, Name, String, Number
+
+__all__ = ['FactorLexer']
+
+
+class FactorLexer(RegexLexer):
+ """
+ Lexer for the `Factor <http://factorcode.org>`_ language.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Factor'
+ aliases = ['factor']
+ filenames = ['*.factor']
+ mimetypes = ['text/x-factor']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ builtin_kernel = words((
+ '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
+ '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
+ '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
+ '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
+ 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
+ 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
+ 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
+ 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
+ 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
+ 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
+ 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
+ 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
+ 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
+ 'wrapper', 'wrapper?', 'xor'), suffix=r'\s')
+
+ builtin_assocs = words((
+ '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
+ 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
+ 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
+ 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
+ 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
+ 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
+ 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
+ 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
+ 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
+ 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
+ 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
+ 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'\s')
+
+ builtin_combinators = words((
+ '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
+ '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
+ 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
+ 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
+ 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
+ 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'\s')
+
+ builtin_math = words((
+ '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
+ '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
+ '(each-integer)', '(find-integer)', '*', '+', '?1+',
+ 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
+ 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
+ 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
+ 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
+ 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
+ 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
+ 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
+ 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
+ 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
+ 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
+ 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
+ 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
+ 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
+ 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
+ 'zero?'), suffix=r'\s')
+
+ builtin_sequences = words((
+ '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
+ '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
+ '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
+ '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
+ 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
+ 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
+ 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
+ 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
+ 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
+ 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
+ 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
+ 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
+ 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
+ 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
+ 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
+ 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
+ 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
+ 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
+ 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
+ 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
+ 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
+ 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
+ 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
+ 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
+ 'non-negative-integer-expected', 'non-negative-integer-expected?',
+ 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
+ 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
+ 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
+ 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
+ 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
+ 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
+ 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
+ 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
+ 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
+ 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
+ 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
+ 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
+ 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
+ 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
+ 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
+ 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
+ 'when-empty'), suffix=r'\s')
+
+ builtin_namespaces = words((
+ '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
+ 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
+ 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
+ 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
+ suffix=r'\s')
+
+ builtin_arrays = words((
+ '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
+ 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'\s')
+
+ builtin_io = words((
+ '(each-stream-block-slice)', '(each-stream-block)',
+ '(stream-contents-by-block)', '(stream-contents-by-element)',
+ '(stream-contents-by-length-or-block)',
+ '(stream-contents-by-length)', '+byte+', '+character+',
+ 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
+ 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
+ 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
+ 'error-stream', 'flush', 'input-stream', 'input-stream?',
+ 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
+ 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
+ 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
+ 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
+ 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
+ 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
+ 'stream-copy*', 'stream-element-type', 'stream-flush',
+ 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
+ 'stream-read', 'stream-read-into', 'stream-read-partial',
+ 'stream-read-partial-into', 'stream-read-partial-unsafe',
+ 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
+ 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
+ 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
+ 'with-error-stream', 'with-error-stream*', 'with-error>output',
+ 'with-input-output+error-streams',
+ 'with-input-output+error-streams*', 'with-input-stream',
+ 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
+ 'with-output>error', 'with-output+error-stream',
+ 'with-output+error-stream*', 'with-streams', 'with-streams*',
+ 'write', 'write1'), suffix=r'\s')
+
+ builtin_strings = words((
+ '1string', '<string>', '>string', 'resize-string', 'string',
+ 'string?'), suffix=r'\s')
+
+ builtin_vectors = words((
+ '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
+ suffix=r'\s')
+
+ builtin_continuations = words((
+ '<condition>', '<continuation>', '<restart>', 'attempt-all',
+ 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
+ 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
+ 'condition?', 'continuation', 'continuation?', 'continue',
+ 'continue-restart', 'continue-with', 'current-continuation',
+ 'error', 'error-continuation', 'error-in-thread', 'error-thread',
+ 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
+ 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
+ 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
+ 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'\s')
+
+ tokens = {
+ 'root': [
+ # factor allows a file to start with a shebang
+ (r'#!.*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ (r'\s+', Text),
+
+ # defining words
+ (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Function)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(GENERIC:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Function)),
+ (r'\(\s', Name.Function, 'stackeffect'),
+ (r';\s', Keyword),
+
+ # imports and namespaces
+ (r'(USING:)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'vocabs'),
+ (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)),
+ (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'),
+ (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)),
+ (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)),
+ (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function)),
+
+ # tuples and classes
+ (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+ (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class), 'slots'),
+ (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
+ (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'SINGLETONS:', Keyword, 'classes'),
+
+ # other syntax
+ (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'SYMBOLS:\s', Keyword, 'words'),
+ (r'SYNTAX:\s', Keyword),
+ (r'ALIEN:\s', Keyword),
+ (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text)),
+ (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)),
+
+ # vocab.private
+ (r'(?:<PRIVATE|PRIVATE>)\s', Keyword.Namespace),
+
+ # strings
+ (r'"""\s+(?:.|\n)*?\s+"""', String),
+ (r'"(?:\\\\|\\"|[^"])*"', String),
+ (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String),
+ (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char),
+
+ # comments
+ (r'!\s+.*$', Comment),
+ (r'#!\s+.*$', Comment),
+ (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment),
+
+ # boolean constants
+ (r'[tf]\s', Name.Constant),
+
+ # symbols and literals
+ (r'[\\$]\s+\S+', Name.Constant),
+ (r'M\\\s+\S+\s+\S+', Name.Constant),
+
+ # numbers
+ (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'0b[01]+\s', Number.Bin),
+ (r'0o[0-7]+\s', Number.Oct),
+ (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+ (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+
+ # keywords
+ (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
+ Keyword),
+
+ # builtins
+ (builtin_kernel, Name.Builtin),
+ (builtin_assocs, Name.Builtin),
+ (builtin_combinators, Name.Builtin),
+ (builtin_math, Name.Builtin),
+ (builtin_sequences, Name.Builtin),
+ (builtin_namespaces, Name.Builtin),
+ (builtin_arrays, Name.Builtin),
+ (builtin_io, Name.Builtin),
+ (builtin_strings, Name.Builtin),
+ (builtin_vectors, Name.Builtin),
+ (builtin_continuations, Name.Builtin),
+
+ # everything else is text
+ (r'\S+', Text),
+ ],
+ 'stackeffect': [
+ (r'\s+', Text),
+ (r'\(\s+', Name.Function, 'stackeffect'),
+ (r'\)\s', Name.Function, '#pop'),
+ (r'--\s', Name.Function),
+ (r'\S+', Name.Variable),
+ ],
+ 'slots': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'(\{\s+)(\S+)(\s+[^}]+\s+\}\s)',
+ bygroups(Text, Name.Variable, Text)),
+ (r'\S+', Name.Variable),
+ ],
+ 'vocabs': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Namespace),
+ ],
+ 'classes': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Class),
+ ],
+ 'words': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Function),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/fantom.py b/contrib/python/Pygments/py2/pygments/lexers/fantom.py
index b1b0dd9490..a695c9c151 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/fantom.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/fantom.py
@@ -1,250 +1,250 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.fantom
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Fantom language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fantom
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Fantom language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from string import Template
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['FantomLexer']
-
-
-class FantomLexer(RegexLexer):
- """
- For Fantom source code.
-
- .. versionadded:: 1.5
- """
- name = 'Fantom'
- aliases = ['fan']
- filenames = ['*.fan']
- mimetypes = ['application/x-fantom']
-
- # often used regexes
- def s(str):
- return Template(str).substitute(
- dict(
- pod=r'[\"\w\.]+',
- eos=r'\n|;',
- id=r'[a-zA-Z_]\w*',
- # all chars which can be part of type definition. Starts with
- # either letter, or [ (maps), or | (funcs)
- type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
- )
- )
-
- tokens = {
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
- (r'//.*?\n', Comment.Single), # Single line
- # TODO: highlight references in fandocs
- (r'\*\*.*?\n', Comment.Special), # Fandoc
- (r'#.*\n', Comment.Single) # Shell-style
- ],
- 'literals': [
- (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
- (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
- (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
- (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
- (r'\b-?[\d_]+', Number.Integer), # Int
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
- (r'"', Punctuation, 'insideStr'), # Opening quote
- (r'`', Punctuation, 'insideUri'), # Opening accent
- (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
- (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, String, Punctuation)),
- (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, Name.Function)),
- (r'\[,\]', Literal), # Empty list
- (s(r'($type)(\[,\])'), # Typed empty list
- bygroups(using(this, state='inType'), Literal)),
- (r'\[:\]', Literal), # Empty Map
- (s(r'($type)(\[:\])'),
- bygroups(using(this, state='inType'), Literal)),
- ],
- 'insideStr': [
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'"', Punctuation, '#pop'), # Closing quot
- (r'.', String) # String content
- ],
- 'insideUri': [ # TODO: remove copy/paste str/uri
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'`', Punctuation, '#pop'), # Closing tick
- (r'.', String.Backtick) # URI content
- ],
- 'protectionKeywords': [
- (r'\b(public|protected|private|internal)\b', Keyword),
- ],
- 'typeKeywords': [
- (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
- ],
- 'methodKeywords': [
- (r'\b(abstract|native|once|override|static|virtual|final)\b',
- Keyword),
- ],
- 'fieldKeywords': [
- (r'\b(abstract|const|final|native|override|static|virtual|'
- r'readonly)\b', Keyword)
- ],
- 'otherKeywords': [
- (words((
- 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
- 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
- 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
- ],
- 'operators': [
- (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
- ],
- 'inType': [
- (r'[\[\]|\->:?]', Punctuation),
- (s(r'$id'), Name.Class),
- default('#pop'),
-
- ],
- 'root': [
- include('comments'),
- include('protectionKeywords'),
- include('typeKeywords'),
- include('methodKeywords'),
- include('fieldKeywords'),
- include('literals'),
- include('otherKeywords'),
- include('operators'),
- (r'using\b', Keyword.Namespace, 'using'), # Using stmt
- (r'@\w+', Name.Decorator, 'facet'), # Symbol
- (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
- 'inheritance'), # Inheritance list
-
- # Type var := val
- (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
- bygroups(using(this, state='inType'), Text,
- Name.Variable, Text, Operator)),
-
- # var := val
- (s(r'($id)(\s*)(:=)'),
- bygroups(Name.Variable, Text, Operator)),
-
- # .someId( or ->someId( ###
- (s(r'(\.|(?:\->))($id)(\s*)(\()'),
- bygroups(Operator, Name.Function, Text, Punctuation),
- 'insideParen'),
-
- # .someId or ->someId
- (s(r'(\.|(?:\->))($id)'),
- bygroups(Operator, Name.Function)),
-
- # new makeXXX (
- (r'(new)(\s+)(make\w*)(\s*)(\()',
- bygroups(Keyword, Text, Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- # Type name (
- (s(r'($type)([ \t]+)' # Return type and whitespace
- r'($id)(\s*)(\()'), # method name + open brace
- bygroups(using(this, state='inType'), Text,
- Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- # ArgType argName,
- (s(r'($type)(\s+)($id)(\s*)(,)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- # ArgType argName)
- # Covered in 'insideParen' state
-
- # ArgType argName -> ArgType|
- (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation, Text, using(this, state='inType'),
- Punctuation)),
-
- # ArgType argName|
- (s(r'($type)(\s+)($id)(\s*)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- # Type var
- (s(r'($type)([ \t]+)($id)'),
- bygroups(using(this, state='inType'), Text,
- Name.Variable)),
-
- (r'\(', Punctuation, 'insideParen'),
- (r'\{', Punctuation, 'insideBrace'),
- (r'.', Text)
- ],
- 'insideParen': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'insideMethodDeclArgs': [
- (r'\)', Punctuation, '#pop'),
- (s(r'($type)(\s+)($id)(\s*)(\))'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation), '#pop'),
- include('root'),
- ],
- 'insideBrace': [
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'inheritance': [
- (r'\s+', Text), # Whitespace
- (r':|,', Punctuation),
- (r'(?:(\w+)(::))?(\w+)',
- bygroups(Name.Namespace, Punctuation, Name.Class)),
- (r'\{', Punctuation, '#pop')
- ],
- 'using': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(\[)(\w+)(\])',
- bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
- (r'(\")?([\w.]+)(\")?',
- bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
- (r'::', Punctuation, 'usingClass'),
- default('#pop')
- ],
- 'usingClass': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(as)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
- (r'[\w$]+', Name.Class),
- default('#pop:2') # jump out to root state
- ],
- 'facet': [
- (r'\s+', Text),
- (r'\{', Punctuation, 'facetFields'),
- default('#pop')
- ],
- 'facetFields': [
- include('comments'),
- include('literals'),
- include('operators'),
- (r'\s+', Text),
- (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
- (r'\}', Punctuation, '#pop'),
- (r'.', Text)
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from string import Template
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['FantomLexer']
+
+
+class FantomLexer(RegexLexer):
+ """
+ For Fantom source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fantom'
+ aliases = ['fan']
+ filenames = ['*.fan']
+ mimetypes = ['application/x-fantom']
+
+ # often used regexes
+ def s(str):
+ return Template(str).substitute(
+ dict(
+ pod=r'[\"\w\.]+',
+ eos=r'\n|;',
+ id=r'[a-zA-Z_]\w*',
+ # all chars which can be part of type definition. Starts with
+ # either letter, or [ (maps), or | (funcs)
+ type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
+ )
+ )
+
+ tokens = {
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
+ (r'//.*?\n', Comment.Single), # Single line
+ # TODO: highlight references in fandocs
+ (r'\*\*.*?\n', Comment.Special), # Fandoc
+ (r'#.*\n', Comment.Single) # Shell-style
+ ],
+ 'literals': [
+ (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
+ (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
+ (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
+ (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
+ (r'\b-?[\d_]+', Number.Integer), # Int
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
+ (r'"', Punctuation, 'insideStr'), # Opening quote
+ (r'`', Punctuation, 'insideUri'), # Opening accent
+ (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
+ (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, String, Punctuation)),
+ (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, Name.Function)),
+ (r'\[,\]', Literal), # Empty list
+ (s(r'($type)(\[,\])'), # Typed empty list
+ bygroups(using(this, state='inType'), Literal)),
+ (r'\[:\]', Literal), # Empty Map
+ (s(r'($type)(\[:\])'),
+ bygroups(using(this, state='inType'), Literal)),
+ ],
+ 'insideStr': [
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'"', Punctuation, '#pop'), # Closing quot
+ (r'.', String) # String content
+ ],
+ 'insideUri': [ # TODO: remove copy/paste str/uri
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'`', Punctuation, '#pop'), # Closing tick
+ (r'.', String.Backtick) # URI content
+ ],
+ 'protectionKeywords': [
+ (r'\b(public|protected|private|internal)\b', Keyword),
+ ],
+ 'typeKeywords': [
+ (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
+ ],
+ 'methodKeywords': [
+ (r'\b(abstract|native|once|override|static|virtual|final)\b',
+ Keyword),
+ ],
+ 'fieldKeywords': [
+ (r'\b(abstract|const|final|native|override|static|virtual|'
+ r'readonly)\b', Keyword)
+ ],
+ 'otherKeywords': [
+ (words((
+ 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
+ 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
+ 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
+ ],
+ 'operators': [
+ (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
+ ],
+ 'inType': [
+ (r'[\[\]|\->:?]', Punctuation),
+ (s(r'$id'), Name.Class),
+ default('#pop'),
+
+ ],
+ 'root': [
+ include('comments'),
+ include('protectionKeywords'),
+ include('typeKeywords'),
+ include('methodKeywords'),
+ include('fieldKeywords'),
+ include('literals'),
+ include('otherKeywords'),
+ include('operators'),
+ (r'using\b', Keyword.Namespace, 'using'), # Using stmt
+ (r'@\w+', Name.Decorator, 'facet'), # Symbol
+ (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
+ 'inheritance'), # Inheritance list
+
+ # Type var := val
+ (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable, Text, Operator)),
+
+ # var := val
+ (s(r'($id)(\s*)(:=)'),
+ bygroups(Name.Variable, Text, Operator)),
+
+ # .someId( or ->someId( ###
+ (s(r'(\.|(?:\->))($id)(\s*)(\()'),
+ bygroups(Operator, Name.Function, Text, Punctuation),
+ 'insideParen'),
+
+ # .someId or ->someId
+ (s(r'(\.|(?:\->))($id)'),
+ bygroups(Operator, Name.Function)),
+
+ # new makeXXX (
+ (r'(new)(\s+)(make\w*)(\s*)(\()',
+ bygroups(Keyword, Text, Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # Type name (
+ (s(r'($type)([ \t]+)' # Return type and whitespace
+ r'($id)(\s*)(\()'), # method name + open brace
+ bygroups(using(this, state='inType'), Text,
+ Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # ArgType argName,
+ (s(r'($type)(\s+)($id)(\s*)(,)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # ArgType argName)
+ # Covered in 'insideParen' state
+
+ # ArgType argName -> ArgType|
+ (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation, Text, using(this, state='inType'),
+ Punctuation)),
+
+ # ArgType argName|
+ (s(r'($type)(\s+)($id)(\s*)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # Type var
+ (s(r'($type)([ \t]+)($id)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable)),
+
+ (r'\(', Punctuation, 'insideParen'),
+ (r'\{', Punctuation, 'insideBrace'),
+ (r'.', Text)
+ ],
+ 'insideParen': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'insideMethodDeclArgs': [
+ (r'\)', Punctuation, '#pop'),
+ (s(r'($type)(\s+)($id)(\s*)(\))'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'insideBrace': [
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'inheritance': [
+ (r'\s+', Text), # Whitespace
+ (r':|,', Punctuation),
+ (r'(?:(\w+)(::))?(\w+)',
+ bygroups(Name.Namespace, Punctuation, Name.Class)),
+ (r'\{', Punctuation, '#pop')
+ ],
+ 'using': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(\[)(\w+)(\])',
+ bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
+ (r'(\")?([\w.]+)(\")?',
+ bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
+ (r'::', Punctuation, 'usingClass'),
+ default('#pop')
+ ],
+ 'usingClass': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(as)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
+ (r'[\w$]+', Name.Class),
+ default('#pop:2') # jump out to root state
+ ],
+ 'facet': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, 'facetFields'),
+ default('#pop')
+ ],
+ 'facetFields': [
+ include('comments'),
+ include('literals'),
+ include('operators'),
+ (r'\s+', Text),
+ (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
+ (r'\}', Punctuation, '#pop'),
+ (r'.', Text)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/felix.py b/contrib/python/Pygments/py2/pygments/lexers/felix.py
index cf768a4511..8da8d60ed3 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/felix.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/felix.py
@@ -1,273 +1,273 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.felix
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Felix language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.felix
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Felix language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words, \
- combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['FelixLexer']
-
-
-class FelixLexer(RegexLexer):
- """
- For `Felix <http://www.felix-lang.org>`_ source code.
-
- .. versionadded:: 1.2
- """
-
- name = 'Felix'
- aliases = ['felix', 'flx']
- filenames = ['*.flx', '*.flxh']
- mimetypes = ['text/x-felix']
-
- preproc = (
- 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
- )
-
- keywords = (
- '_', '_deref', 'all', 'as',
- 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
- 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
- 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
- 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
- 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
- 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
- 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
- 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
- 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
- 'when', 'whilst', 'with', 'yield',
- )
-
- keyword_directives = (
- '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
- 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
- 'package', 'private', 'pod', 'property', 'public', 'publish',
- 'requires', 'todo', 'virtual', 'use',
- )
-
- keyword_declarations = (
- 'def', 'let', 'ref', 'val', 'var',
- )
-
- keyword_types = (
- 'unit', 'void', 'any', 'bool',
- 'byte', 'offset',
- 'address', 'caddress', 'cvaddress', 'vaddress',
- 'tiny', 'short', 'int', 'long', 'vlong',
- 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
- 'int8', 'int16', 'int32', 'int64',
- 'uint8', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'ldouble',
- 'complex', 'dcomplex', 'lcomplex',
- 'imaginary', 'dimaginary', 'limaginary',
- 'char', 'wchar', 'uchar',
- 'charp', 'charcp', 'ucharp', 'ucharcp',
- 'string', 'wstring', 'ustring',
- 'cont',
- 'array', 'varray', 'list',
- 'lvalue', 'opt', 'slice',
- )
-
- keyword_constants = (
- 'false', 'true',
- )
-
- operator_words = (
- 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
- )
-
- name_builtins = (
- '_svc', 'while',
- )
-
- name_pseudo = (
- 'root', 'self', 'this',
- )
-
- decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Keywords
- (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
- 'union'), suffix=r'\b'),
- Keyword, 'funcname'),
- (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
- Keyword, 'classname'),
- (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
- (words(keywords, suffix=r'\b'), Keyword),
- (words(keyword_directives, suffix=r'\b'), Name.Decorator),
- (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
- (words(keyword_types, suffix=r'\b'), Keyword.Type),
- (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
-
- # Operators
- include('operators'),
-
- # Float Literal
- # -- Hex Float
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
- Number.Float),
-
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
- # -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
- # Strings
- ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
- ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
- ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
- ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
- ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
- ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
- ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
- ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
- # Punctuation
- (r'[\[\]{}:(),;?]', Punctuation),
-
- # Labels
- (r'[a-zA-Z_]\w*:>', Name.Label),
-
- # Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
-
- include('comment'),
-
- # Preprocessor
- (r'#\s*if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
- ],
- 'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
- (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
- ],
- 'comment': [
- (r'//(.*?)\n', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment2'),
- ],
- 'comment2': [
- (r'[^/*]', Comment.Multiline),
- (r'/[*]', Comment.Multiline, '#push'),
- (r'[*]/', Comment.Multiline, '#pop'),
- (r'[/*]', Comment.Multiline),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'macro': [
- include('comment'),
- (r'(import|include)(\s+)(<[^>]*?>)',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'(import|include)(\s+)("[^"]*?")',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r"(import|include)(\s+)('[^']*?')",
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'[^/\n]+', Comment.Preproc),
- # (r'/[*](.|\n)*?[*]/', Comment),
- # (r'//.*?\n', Comment, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'funcname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- # anonymous functions
- (r'(?=\()', Text, '#pop'),
- ],
- 'classname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # anonymous classes
- (r'(?=\{)', Text, '#pop'),
- ],
- 'modulename': [
- include('whitespace'),
- (r'\[', Punctuation, ('modulename2', 'tvarlist')),
- default('modulename2'),
- ],
- 'modulename2': [
- include('whitespace'),
- (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
- ],
- 'tvarlist': [
- include('whitespace'),
- include('operators'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(with|where)\b', Keyword),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['FelixLexer']
+
+
+class FelixLexer(RegexLexer):
+ """
+ For `Felix <http://www.felix-lang.org>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+
+ name = 'Felix'
+ aliases = ['felix', 'flx']
+ filenames = ['*.flx', '*.flxh']
+ mimetypes = ['text/x-felix']
+
+ preproc = (
+ 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
+ )
+
+ keywords = (
+ '_', '_deref', 'all', 'as',
+ 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
+ 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
+ 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
+ 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
+ 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
+ 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
+ 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
+ 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
+ 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
+ 'when', 'whilst', 'with', 'yield',
+ )
+
+ keyword_directives = (
+ '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
+ 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
+ 'package', 'private', 'pod', 'property', 'public', 'publish',
+ 'requires', 'todo', 'virtual', 'use',
+ )
+
+ keyword_declarations = (
+ 'def', 'let', 'ref', 'val', 'var',
+ )
+
+ keyword_types = (
+ 'unit', 'void', 'any', 'bool',
+ 'byte', 'offset',
+ 'address', 'caddress', 'cvaddress', 'vaddress',
+ 'tiny', 'short', 'int', 'long', 'vlong',
+ 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
+ 'int8', 'int16', 'int32', 'int64',
+ 'uint8', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'ldouble',
+ 'complex', 'dcomplex', 'lcomplex',
+ 'imaginary', 'dimaginary', 'limaginary',
+ 'char', 'wchar', 'uchar',
+ 'charp', 'charcp', 'ucharp', 'ucharcp',
+ 'string', 'wstring', 'ustring',
+ 'cont',
+ 'array', 'varray', 'list',
+ 'lvalue', 'opt', 'slice',
+ )
+
+ keyword_constants = (
+ 'false', 'true',
+ )
+
+ operator_words = (
+ 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
+ )
+
+ name_builtins = (
+ '_svc', 'while',
+ )
+
+ name_pseudo = (
+ 'root', 'self', 'this',
+ )
+
+ decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Keywords
+ (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
+ 'union'), suffix=r'\b'),
+ Keyword, 'funcname'),
+ (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
+ Keyword, 'classname'),
+ (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(keyword_directives, suffix=r'\b'), Name.Decorator),
+ (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(keyword_types, suffix=r'\b'), Keyword.Type),
+ (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
+
+ # Operators
+ include('operators'),
+
+ # Float Literal
+ # -- Hex Float
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
+ Number.Float),
+
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
+
+ # Strings
+ ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
+ ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
+ ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
+ ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
+ ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
+ ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
+
+ # Punctuation
+ (r'[\[\]{}:(),;?]', Punctuation),
+
+ # Labels
+ (r'[a-zA-Z_]\w*:>', Name.Label),
+
+ # Identifiers
+ (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
+ (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ include('comment'),
+
+ # Preprocessor
+ (r'#\s*if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+ ],
+ 'operators': [
+ (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
+ (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
+ ],
+ 'comment': [
+ (r'//(.*?)\n', Comment.Single),
+ (r'/[*]', Comment.Multiline, 'comment2'),
+ ],
+ 'comment2': [
+ (r'[^/*]', Comment.Multiline),
+ (r'/[*]', Comment.Multiline, '#push'),
+ (r'[*]/', Comment.Multiline, '#pop'),
+ (r'[/*]', Comment.Multiline),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'macro': [
+ include('comment'),
+ (r'(import|include)(\s+)(<[^>]*?>)',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'(import|include)(\s+)("[^"]*?")',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r"(import|include)(\s+)('[^']*?')",
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'[^/\n]+', Comment.Preproc),
+ # (r'/[*](.|\n)*?[*]/', Comment),
+ # (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'funcname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ # anonymous functions
+ (r'(?=\()', Text, '#pop'),
+ ],
+ 'classname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # anonymous classes
+ (r'(?=\{)', Text, '#pop'),
+ ],
+ 'modulename': [
+ include('whitespace'),
+ (r'\[', Punctuation, ('modulename2', 'tvarlist')),
+ default('modulename2'),
+ ],
+ 'modulename2': [
+ include('whitespace'),
+ (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
+ ],
+ 'tvarlist': [
+ include('whitespace'),
+ include('operators'),
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(with|where)\b', Keyword),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- # included here again for raw strings
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- # included here again for raw strings
- (r"\\\\|\\'|\\\n", String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ # included here again for raw strings
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ # included here again for raw strings
+ (r"\\\\|\\'|\\\n", String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/fortran.py b/contrib/python/Pygments/py2/pygments/lexers/fortran.py
index 4f5c3b2546..54e3167bdd 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/fortran.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/fortran.py
@@ -1,206 +1,206 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.fortran
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Fortran languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fortran
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Fortran languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, bygroups, include, words, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['FortranLexer', 'FortranFixedLexer']
-
-
-class FortranLexer(RegexLexer):
- """
- Lexer for FORTRAN 90 code.
-
- .. versionadded:: 0.10
- """
- name = 'Fortran'
- aliases = ['fortran']
- filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
- mimetypes = ['text/x-fortran']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
- # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
- # Logical (?): NOT, AND, OR, EQV, NEQV
-
- # Builtins:
- # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
- tokens = {
- 'root': [
- (r'^#.*\n', Comment.Preproc),
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-z][\w$]*', Name),
- include('nums'),
- (r'[\s]+', Text),
- ],
- 'core': [
- # Statements
- (words((
- 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
- 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
- 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
- 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS',
- 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
- 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
- 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS',
- 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
- 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT',
- 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
- 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
- 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL',
- 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE',
- 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
- 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
- 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
- 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
- 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
- Keyword),
-
- # Data Types
- (words((
- 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
- 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG',
- 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T',
- 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T',
- 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T',
- 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE',
- 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX',
- 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'),
- prefix=r'\b', suffix=r'\s*\b'),
- Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
- (r'(::)', Keyword.Declaration),
-
- (r'[()\[\],:&%;.]', Punctuation),
- # Intrinsics
- (words((
- 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
- 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
- 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
- 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
- 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
- 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
- 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
- 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
- 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
- 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
- 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
- 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
- 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
- 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
- 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
- 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
- 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
- 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
- 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
- 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
- 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
- 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
- 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
- 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
- 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
- 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
- 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
- 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
- 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
- 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
- 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
- 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
- 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
- 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
- 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
- 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
- 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
- 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
- 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
- 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
- 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
- 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
- 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
- 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
- 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
- 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
- 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
- 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
- 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
- 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
- Name.Builtin),
-
- # Booleans
- (r'\.(true|false)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- ],
-
- 'nums': [
- (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['FortranLexer', 'FortranFixedLexer']
+
+
+class FortranLexer(RegexLexer):
+ """
+ Lexer for FORTRAN 90 code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Fortran'
+ aliases = ['fortran']
+ filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
+ mimetypes = ['text/x-fortran']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
+ # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
+ # Logical (?): NOT, AND, OR, EQV, NEQV
+
+ # Builtins:
+ # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
+
+ tokens = {
+ 'root': [
+ (r'^#.*\n', Comment.Preproc),
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ (r'[a-z][\w$]*', Name),
+ include('nums'),
+ (r'[\s]+', Text),
+ ],
+ 'core': [
+ # Statements
+ (words((
+ 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
+ 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
+ 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
+ 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS',
+ 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
+ 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
+ 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS',
+ 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
+ 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT',
+ 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
+ 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
+ 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL',
+ 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE',
+ 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
+ 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
+ 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
+ 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
+ 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+
+ # Data Types
+ (words((
+ 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
+ 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG',
+ 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T',
+ 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T',
+ 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T',
+ 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE',
+ 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX',
+ 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'),
+ prefix=r'\b', suffix=r'\s*\b'),
+ Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+
+ (r'(::)', Keyword.Declaration),
+
+ (r'[()\[\],:&%;.]', Punctuation),
+ # Intrinsics
+ (words((
+ 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
+ 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
+ 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
+ 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
+ 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
+ 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
+ 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
+ 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
+ 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
+ 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
+ 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
+ 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
+ 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
+ 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
+ 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
+ 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
+ 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
+ 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
+ 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
+ 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
+ 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
+ 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
+ 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
+ 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
+ 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
+ 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
+ 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
+ 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
+ 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
+ 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
+ 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
+ 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
+ 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
+ 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
+ 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
+ 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
+ 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
+ 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
+ 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
+ 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
+ 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
+ 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
+ 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
+ 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
+ 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
+ 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
+ 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
+ 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
+ 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
+ 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+
+ # Booleans
+ (r'\.(true|false)\.', Name.Builtin),
+ # Comparing Operators
+ (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
+ ],
+
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
(r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
(r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?', Number.Float),
(r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_[a-z]\w+)?', Number.Float),
- ],
- }
-
-
-class FortranFixedLexer(RegexLexer):
- """
- Lexer for fixed format Fortran.
-
- .. versionadded:: 2.1
- """
- name = 'FortranFixed'
- aliases = ['fortranfixed']
- filenames = ['*.f', '*.F']
-
- flags = re.IGNORECASE
-
- def _lex_fortran(self, match, ctx=None):
- """Lex a line just as free form fortran without line break."""
- lexer = FortranLexer()
- text = match.group(0) + "\n"
- for index, token, value in lexer.get_tokens_unprocessed(text):
- value = value.replace('\n', '')
- if value != '':
- yield index, token, value
-
- tokens = {
- 'root': [
- (r'[C*].*\n', Comment),
- (r'#.*\n', Comment.Preproc),
- (r' {0,4}!.*\n', Comment),
- (r'(.{5})', Name.Label, 'cont-char'),
- (r'.*\n', using(FortranLexer)),
- ],
- 'cont-char': [
- (' ', Text, 'code'),
- ('0', Comment, 'code'),
+ ],
+ }
+
+
+class FortranFixedLexer(RegexLexer):
+ """
+ Lexer for fixed format Fortran.
+
+ .. versionadded:: 2.1
+ """
+ name = 'FortranFixed'
+ aliases = ['fortranfixed']
+ filenames = ['*.f', '*.F']
+
+ flags = re.IGNORECASE
+
+ def _lex_fortran(self, match, ctx=None):
+ """Lex a line just as free form fortran without line break."""
+ lexer = FortranLexer()
+ text = match.group(0) + "\n"
+ for index, token, value in lexer.get_tokens_unprocessed(text):
+ value = value.replace('\n', '')
+ if value != '':
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ (r'[C*].*\n', Comment),
+ (r'#.*\n', Comment.Preproc),
+ (r' {0,4}!.*\n', Comment),
+ (r'(.{5})', Name.Label, 'cont-char'),
+ (r'.*\n', using(FortranLexer)),
+ ],
+ 'cont-char': [
+ (' ', Text, 'code'),
+ ('0', Comment, 'code'),
('.', Generic.Strong, 'code'),
- ],
- 'code': [
- (r'(.{66})(.*)(\n)',
- bygroups(_lex_fortran, Comment, Text), 'root'),
- (r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
+ ],
+ 'code': [
+ (r'(.{66})(.*)(\n)',
+ bygroups(_lex_fortran, Comment, Text), 'root'),
+ (r'(.*)(\n)', bygroups(_lex_fortran, Text), 'root'),
default('root'),
]
- }
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/foxpro.py b/contrib/python/Pygments/py2/pygments/lexers/foxpro.py
index 868a44d820..7b916dfddd 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/foxpro.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/foxpro.py
@@ -1,428 +1,428 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.foxpro
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Simple lexer for Microsoft Visual FoxPro source code.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.foxpro
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Simple lexer for Microsoft Visual FoxPro source code.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String
-
-__all__ = ['FoxProLexer']
-
-
-class FoxProLexer(RegexLexer):
- """Lexer for Microsoft Visual FoxPro language.
-
- FoxPro syntax allows to shorten all keywords and function names
- to 4 characters. Shortened forms are not recognized by this lexer.
-
- .. versionadded:: 1.6
- """
-
- name = 'FoxPro'
- aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
- filenames = ['*.PRG', '*.prg']
- mimetype = []
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r';\s*\n', Punctuation), # consume newline
- (r'(^|\n)\s*', Text, 'newline'),
-
- # Square brackets may be used for array indices
- # and for string literal. Look for arrays
- # before matching string literals.
- (r'(?<=\w)\[[0-9, ]+\]', Text),
- (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
- (r'(^\s*\*|&&|&amp;&amp;).*?\n', Comment.Single),
-
- (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
- r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
- r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
- r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
- r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
- r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
- r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
- r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
- r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
- r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
- r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
- r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
- r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
- r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
- r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
- r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
- r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
- r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
- r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
- r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
- r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
- r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
- r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
- r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
- r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
- r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
- r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
- r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
- r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
- r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
- r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
- r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
- r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
- r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
- r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
- r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
- r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
- r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
- r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
- r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
- r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
- r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
- r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
- r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
- r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
- r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
- r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
- r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
- r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
- r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
- r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
- r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
- r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
- r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
- r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
- r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
- r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
- r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
- r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
- r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
- r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
- r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
- r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
- r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
- r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
- r'YEAR)(?=\s*\()', Name.Function),
-
- (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
- r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
- r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
- r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
- r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
- r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
- r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
- r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
- r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
- r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
- r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
- r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
- r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
-
- (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
-
- (r'Application|CheckBox|Collection|Column|ComboBox|'
- r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
- r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
- r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
- r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
- r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
- r'Project|Relation|ReportListener|Separator|Servers|Server|'
- r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
- r'XMLAdapter|XMLField|XMLTable', Name.Class),
-
- (r'm\.[a-z_]\w*', Name.Variable),
- (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
-
- (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
- r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
- r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
- r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
- r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
- r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
- r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
- r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
- r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
- r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
- r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
- r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
- r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
- r'BreakOnError|BufferModeOverride|BufferMode|'
- r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
- r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
- r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
- r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
- r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
- r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
- r'ContinuousScroll|ControlBox|ControlCount|Controls|'
- r'ControlSource|ConversionFunc|Count|CurrentControl|'
- r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
- r'CursorSchema|CursorSource|CursorStatus|Curvature|'
- r'Database|DataSessionID|DataSession|DataSourceType|'
- r'DataSource|DataType|DateFormat|DateMark|Debug|'
- r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
- r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
- r'DeleteCmd|DeleteMark|Description|Desktop|'
- r'Details|DisabledBackColor|DisabledForeColor|'
- r'DisabledItemBackColor|DisabledItemForeColor|'
- r'DisabledPicture|DisableEncode|DisplayCount|'
- r'DisplayValue|Dockable|Docked|DockPosition|'
- r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
- r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
- r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
- r'DynamicFontItalic|DynamicFontStrikethru|'
- r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
- r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
- r'DynamicLineHeight|EditorOptions|Enabled|'
- r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
- r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
- r'FetchMemoDataSource|FetchMemo|FetchSize|'
- r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
- r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
- r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
- r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
- r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
- r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
- r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
- r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
- r'HelpContextID|HideSelection|HighlightBackColor|'
- r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
- r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
- r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
- r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
- r'InsertCmdDataSource|InsertCmdRefreshCmd|'
- r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
- r'InsertCmd|Instancing|IntegralHeight|'
- r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
- r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
- r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
- r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
- r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
- r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
- r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
- r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
- r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
- r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
- r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
- r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
- r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
- r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
- r'NumberOfElements|Object|OLEClass|OLEDragMode|'
- r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
- r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
- r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
- r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
- r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
- r'OutputPageCount|OutputType|PageCount|PageHeight|'
- r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
- r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
- r'Parent|Partition|PasswordChar|PictureMargin|'
- r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
- r'PictureVal|Picture|Prepared|'
- r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
- r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
- r'ProjectHookLibrary|ProjectHook|QuietMode|'
- r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
- r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
- r'RecordSource|RefreshAlias|'
- r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
- r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
- r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
- r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
- r'Rotation|RowColChange|RowHeight|RowSourceType|'
- r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
- r'Seconds|SelectCmd|SelectedID|'
- r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
- r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
- r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
- r'ServerClass|ServerHelpFile|ServerName|'
- r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
- r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
- r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
- r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
- r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
- r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
- r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
- r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
- r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
- r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
- r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
- r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
- r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
- r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
- r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
- r'VersionCompany|VersionCopyright|VersionDescription|'
- r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
- r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
- r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
- r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
- r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
- r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
- r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
- r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
- r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
- r'XSDtype|ZoomBox)', Name.Attribute),
-
- (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
- r'AddProperty|AddTableSchema|AddToSCC|Add|'
- r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
- r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
- r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
- r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
- r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
- r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
- r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
- r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
- r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
- r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
- r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
- r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
- r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
- r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
- r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
- r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
- r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
- r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
- r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
-
- (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
- r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
- r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
- r'AfterCursorUpdate|AfterDelete|AfterInsert|'
- r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
- r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
- r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
- r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
- r'BeforeInsert|BeforeDock|BeforeOpenTables|'
- r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
- r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
- r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
- r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
- r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
- r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
- r'dbc_AfterDropOffline|dbc_AfterDropTable|'
- r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
- r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
- r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
- r'dbc_AfterRenameTable|dbc_AfterRenameView|'
- r'dbc_AfterValidateData|dbc_BeforeAddTable|'
- r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
- r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
- r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
- r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
- r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
- r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
- r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
- r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
- r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
- r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
- r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
- r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
- r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
- r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
- r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
- r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
- r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
- r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
- r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
- r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
- r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
- r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
- r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
- r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
-
- (r'\s+', Text),
- # everything else is not colored
- (r'.', Text),
- ],
- 'newline': [
- (r'\*.*?$', Comment.Single, '#pop'),
- (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
- r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
- r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
- r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
- r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
- r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
- r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
- r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
- r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
- r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
- r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
- r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
- r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
- r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
- r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
- r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
- r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
- r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
- r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
- r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
- r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
- r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
- r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
- r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
- r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
- r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
- r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
- r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
- r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
- r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
- r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
- r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
- r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
- r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
- r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
- r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
- r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
- r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
- r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
- r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
- r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
- r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
- r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
- r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
- r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
- r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
- r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
- r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
- r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
- r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
- r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
- r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
- r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
- r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
- r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
- r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
- r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
- r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
- r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
- r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
- r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
- r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
- r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
- r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
- r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
- r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
- r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
- r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
- r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
- r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
- r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
- r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
- r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
- r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
- Keyword.Reserved, '#pop'),
- (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
- Comment.Preproc, '#pop'),
- (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
- (r'.', Text, '#pop'),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer
+from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
+ Name, String
+
+__all__ = ['FoxProLexer']
+
+
+class FoxProLexer(RegexLexer):
+ """Lexer for Microsoft Visual FoxPro language.
+
+ FoxPro syntax allows to shorten all keywords and function names
+ to 4 characters. Shortened forms are not recognized by this lexer.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'FoxPro'
+ aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
+ filenames = ['*.PRG', '*.prg']
+ mimetype = []
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r';\s*\n', Punctuation), # consume newline
+ (r'(^|\n)\s*', Text, 'newline'),
+
+ # Square brackets may be used for array indices
+ # and for string literal. Look for arrays
+ # before matching string literals.
+ (r'(?<=\w)\[[0-9, ]+\]', Text),
+ (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
+ (r'(^\s*\*|&&|&amp;&amp;).*?\n', Comment.Single),
+
+ (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
+ r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
+ r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
+ r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
+ r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
+ r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
+ r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
+ r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
+ r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
+ r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
+ r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
+ r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
+ r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
+ r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
+ r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
+ r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
+ r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
+ r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
+ r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
+ r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
+ r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
+ r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
+ r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
+ r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
+ r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
+ r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
+ r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
+ r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
+ r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
+ r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
+ r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
+ r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
+ r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
+ r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
+ r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
+ r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
+ r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
+ r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
+ r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
+ r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
+ r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
+ r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
+ r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
+ r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
+ r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
+ r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
+ r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
+ r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
+ r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
+ r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
+ r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
+ r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
+ r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
+ r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
+ r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
+ r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
+ r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
+ r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
+ r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
+ r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
+ r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
+ r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
+ r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
+ r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
+ r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
+ r'YEAR)(?=\s*\()', Name.Function),
+
+ (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
+ r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
+ r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
+ r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
+ r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
+ r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
+ r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
+ r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
+ r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
+ r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
+ r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
+ r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
+ r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
+
+ (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
+
+ (r'Application|CheckBox|Collection|Column|ComboBox|'
+ r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
+ r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
+ r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
+ r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
+ r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
+ r'Project|Relation|ReportListener|Separator|Servers|Server|'
+ r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
+ r'XMLAdapter|XMLField|XMLTable', Name.Class),
+
+ (r'm\.[a-z_]\w*', Name.Variable),
+ (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
+
+ (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
+ r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
+ r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
+ r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
+ r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
+ r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
+ r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
+ r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
+ r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
+ r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
+ r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
+ r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
+ r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
+ r'BreakOnError|BufferModeOverride|BufferMode|'
+ r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
+ r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
+ r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
+ r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
+ r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
+ r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
+ r'ContinuousScroll|ControlBox|ControlCount|Controls|'
+ r'ControlSource|ConversionFunc|Count|CurrentControl|'
+ r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
+ r'CursorSchema|CursorSource|CursorStatus|Curvature|'
+ r'Database|DataSessionID|DataSession|DataSourceType|'
+ r'DataSource|DataType|DateFormat|DateMark|Debug|'
+ r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
+ r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
+ r'DeleteCmd|DeleteMark|Description|Desktop|'
+ r'Details|DisabledBackColor|DisabledForeColor|'
+ r'DisabledItemBackColor|DisabledItemForeColor|'
+ r'DisabledPicture|DisableEncode|DisplayCount|'
+ r'DisplayValue|Dockable|Docked|DockPosition|'
+ r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
+ r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
+ r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
+ r'DynamicFontItalic|DynamicFontStrikethru|'
+ r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
+ r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
+ r'DynamicLineHeight|EditorOptions|Enabled|'
+ r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
+ r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
+ r'FetchMemoDataSource|FetchMemo|FetchSize|'
+ r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
+ r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
+ r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
+ r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
+ r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
+ r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
+ r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
+ r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
+ r'HelpContextID|HideSelection|HighlightBackColor|'
+ r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
+ r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
+ r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
+ r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
+ r'InsertCmdDataSource|InsertCmdRefreshCmd|'
+ r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
+ r'InsertCmd|Instancing|IntegralHeight|'
+ r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
+ r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
+ r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
+ r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
+ r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
+ r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
+ r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
+ r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
+ r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
+ r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
+ r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
+ r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
+ r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
+ r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
+ r'NumberOfElements|Object|OLEClass|OLEDragMode|'
+ r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
+ r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
+ r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
+ r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
+ r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
+ r'OutputPageCount|OutputType|PageCount|PageHeight|'
+ r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
+ r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
+ r'Parent|Partition|PasswordChar|PictureMargin|'
+ r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
+ r'PictureVal|Picture|Prepared|'
+ r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
+ r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
+ r'ProjectHookLibrary|ProjectHook|QuietMode|'
+ r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
+ r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
+ r'RecordSource|RefreshAlias|'
+ r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
+ r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
+ r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
+ r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
+ r'Rotation|RowColChange|RowHeight|RowSourceType|'
+ r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
+ r'Seconds|SelectCmd|SelectedID|'
+ r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
+ r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
+ r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
+ r'ServerClass|ServerHelpFile|ServerName|'
+ r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
+ r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
+ r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
+ r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
+ r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
+ r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
+ r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
+ r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
+ r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
+ r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
+ r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
+ r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
+ r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
+ r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
+ r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
+ r'VersionCompany|VersionCopyright|VersionDescription|'
+ r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
+ r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
+ r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
+ r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
+ r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
+ r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
+ r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
+ r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
+ r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
+ r'XSDtype|ZoomBox)', Name.Attribute),
+
+ (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
+ r'AddProperty|AddTableSchema|AddToSCC|Add|'
+ r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
+ r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
+ r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
+ r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
+ r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
+ r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
+ r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
+ r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
+ r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
+ r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
+ r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
+ r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
+ r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
+ r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
+ r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
+ r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
+ r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
+ r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
+ r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
+
+ (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
+ r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
+ r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
+ r'AfterCursorUpdate|AfterDelete|AfterInsert|'
+ r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
+ r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
+ r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
+ r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
+ r'BeforeInsert|BeforeDock|BeforeOpenTables|'
+ r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
+ r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
+ r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
+ r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
+ r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
+ r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
+ r'dbc_AfterDropOffline|dbc_AfterDropTable|'
+ r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
+ r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
+ r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
+ r'dbc_AfterRenameTable|dbc_AfterRenameView|'
+ r'dbc_AfterValidateData|dbc_BeforeAddTable|'
+ r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
+ r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
+ r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
+ r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
+ r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
+ r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
+ r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
+ r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
+ r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
+ r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
+ r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
+ r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
+ r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
+ r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
+ r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
+ r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
+ r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
+ r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
+ r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
+ r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
+ r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
+ r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
+ r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
+ r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
+
+ (r'\s+', Text),
+ # everything else is not colored
+ (r'.', Text),
+ ],
+ 'newline': [
+ (r'\*.*?$', Comment.Single, '#pop'),
+ (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
+ r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
+ r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
+ r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
+ r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
+ r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
+ r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
+ r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
+ r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
+ r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
+ r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
+ r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
+ r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
+ r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
+ r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
+ r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
+ r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
+ r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
+ r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
+ r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
+ r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
+ r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
+ r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
+ r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
+ r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
+ r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
+ r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
+ r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
+ r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
+ r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
+ r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
+ r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
+ r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
+ r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
+ r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
+ r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
+ r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
+ r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
+ r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
+ r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
+ r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
+ r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
+ r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
+ r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
+ r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
+ r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
+ r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
+ r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
+ r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
+ r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
+ r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
+ r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
+ r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
+ r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
+ r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
+ r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
+ r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
+ r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
+ r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
+ r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
+ r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
+ r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
+ r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
+ r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
+ r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
+ r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
+ r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
+ r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
+ r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
+ r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
+ r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
+ r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
+ r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
+ r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
+ Keyword.Reserved, '#pop'),
+ (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
+ Comment.Preproc, '#pop'),
+ (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
+ (r'.', Text, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/functional.py b/contrib/python/Pygments/py2/pygments/lexers/functional.py
index 2d94aca3b3..359047727b 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/functional.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/functional.py
@@ -1,21 +1,21 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.functional
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.functional
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
- NewLispLexer, ShenLexer
-from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
- KokaLexer
-from pygments.lexers.theorem import CoqLexer
-from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
- ElixirConsoleLexer, ElixirLexer
-from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
+ NewLispLexer, ShenLexer
+from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
+ KokaLexer
+from pygments.lexers.theorem import CoqLexer
+from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
+ ElixirConsoleLexer, ElixirLexer
+from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/go.py b/contrib/python/Pygments/py2/pygments/lexers/go.py
index f6bb7fc886..00eda96026 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/go.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/go.py
@@ -1,101 +1,101 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.go
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for the Google Go language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.go
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Google Go language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['GoLexer']
-
-
-class GoLexer(RegexLexer):
- """
- For `Go <http://golang.org>`_ source.
-
- .. versionadded:: 1.2
- """
- name = 'Go'
- filenames = ['*.go']
- aliases = ['go']
- mimetypes = ['text/x-gosrc']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuations
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(import|package)\b', Keyword.Namespace),
- (r'(var|func|struct|map|chan|type|interface|const)\b',
- Keyword.Declaration),
- (words((
- 'break', 'default', 'select', 'case', 'defer', 'go',
- 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
- 'continue', 'for', 'return'), suffix=r'\b'),
- Keyword),
- (r'(true|false|iota|nil)\b', Keyword.Constant),
- # It seems the builtin types aren't actually keywords, but
- # can be used as functions. So we need two declarations.
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr',
- 'print', 'println', 'panic', 'recover', 'close', 'complex',
- 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
- 'new', 'make'), suffix=r'\b(\()'),
- bygroups(Name.Builtin, Punctuation)),
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
- Keyword.Type),
- # imaginary_lit
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
- # float_lit
- (r'\d+(\.\d+[eE][+\-]?\d+|'
- r'\.\d*|[eE][+\-]?\d+)', Number.Float),
- (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
- # int_lit
- # -- octal_lit
- (r'0[0-7]+', Number.Oct),
- # -- hex_lit
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal_lit
- (r'(0|[1-9][0-9]*)', Number.Integer),
- # char_lit
- (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
- String.Char),
- # StringLiteral
- # -- raw_string_lit
- (r'`[^`]*`', String),
- # -- interpreted_string_lit
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
- r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
- (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
- # identifier
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GoLexer']
+
+
+class GoLexer(RegexLexer):
+ """
+ For `Go <http://golang.org>`_ source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Go'
+ filenames = ['*.go']
+ aliases = ['go']
+ mimetypes = ['text/x-gosrc']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuations
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'(import|package)\b', Keyword.Namespace),
+ (r'(var|func|struct|map|chan|type|interface|const)\b',
+ Keyword.Declaration),
+ (words((
+ 'break', 'default', 'select', 'case', 'defer', 'go',
+ 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
+ 'continue', 'for', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|iota|nil)\b', Keyword.Constant),
+ # It seems the builtin types aren't actually keywords, but
+ # can be used as functions. So we need two declarations.
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr',
+ 'print', 'println', 'panic', 'recover', 'close', 'complex',
+ 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
+ 'new', 'make'), suffix=r'\b(\()'),
+ bygroups(Name.Builtin, Punctuation)),
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
+ Keyword.Type),
+ # imaginary_lit
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+ # float_lit
+ (r'\d+(\.\d+[eE][+\-]?\d+|'
+ r'\.\d*|[eE][+\-]?\d+)', Number.Float),
+ (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
+ # int_lit
+ # -- octal_lit
+ (r'0[0-7]+', Number.Oct),
+ # -- hex_lit
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- decimal_lit
+ (r'(0|[1-9][0-9]*)', Number.Integer),
+ # char_lit
+ (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
+ String.Char),
+ # StringLiteral
+ # -- raw_string_lit
+ (r'`[^`]*`', String),
+ # -- interpreted_string_lit
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
+ r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
+ (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
+ # identifier
+ (r'[^\W\d]\w*', Name.Other),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/grammar_notation.py b/contrib/python/Pygments/py2/pygments/lexers/grammar_notation.py
index 66d3eb3639..72fb8e15ff 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/grammar_notation.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/grammar_notation.py
@@ -1,136 +1,136 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.grammar_notation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for grammer notations like BNF.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.grammar_notation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for grammer notations like BNF.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-
+
from pygments.lexer import RegexLexer, bygroups, include, this, using, words
from pygments.token import Comment, Keyword, Literal, Name, Number, \
Operator, Punctuation, String, Text
-
+
__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer']
-
-
-class BnfLexer(RegexLexer):
- """
- This lexer is for grammer notations which are similar to
- original BNF.
-
- In order to maximize a number of targets of this lexer,
- let's decide some designs:
-
- * We don't distinguish `Terminal Symbol`.
-
- * We do assume that `NonTerminal Symbol` are always enclosed
- with arrow brackets.
-
- * We do assume that `NonTerminal Symbol` may include
- any printable characters except arrow brackets and ASCII 0x20.
- This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
-
- * We do assume that target notation doesn't support comment.
-
- * We don't distinguish any operators and punctuation except
- `::=`.
-
- Though these desision making might cause too minimal highlighting
- and you might be disappointed, but it is reasonable for us.
-
- .. versionadded:: 2.1
- """
-
- name = 'BNF'
- aliases = ['bnf']
- filenames = ['*.bnf']
- mimetypes = ['text/x-bnf']
-
- tokens = {
- 'root': [
- (r'(<)([ -;=?-~]+)(>)',
- bygroups(Punctuation, Name.Class, Punctuation)),
-
- # an only operator
- (r'::=', Operator),
-
- # fallback
- (r'[^<>:]+', Text), # for performance
- (r'.', Text),
- ],
- }
-
-
-class AbnfLexer(RegexLexer):
- """
- Lexer for `IETF 7405 ABNF
- <http://www.ietf.org/rfc/rfc7405.txt>`_
- (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_)
- grammars.
-
- .. versionadded:: 2.1
- """
-
- name = 'ABNF'
- aliases = ['abnf']
- filenames = ['*.abnf']
- mimetypes = ['text/x-abnf']
-
- _core_rules = (
- 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
- 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
- 'SP', 'VCHAR', 'WSP')
-
- tokens = {
- 'root': [
- # comment
- (r';.*$', Comment.Single),
-
- # quoted
- # double quote itself in this state, it is as '%x22'.
- (r'(%[si])?"[^"]*"', Literal),
-
- # binary (but i have never seen...)
- (r'%b[01]+\-[01]+\b', Literal), # range
- (r'%b[01]+(\.[01]+)*\b', Literal), # concat
-
- # decimal
- (r'%d[0-9]+\-[0-9]+\b', Literal), # range
- (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
-
- # hexadecimal
- (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
- (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
-
- # repetition (<a>*<b>element) including nRule
- (r'\b[0-9]+\*[0-9]+', Operator),
- (r'\b[0-9]+\*', Operator),
- (r'\b[0-9]+', Operator),
- (r'\*', Operator),
-
- # Strictly speaking, these are not keyword but
- # are called `Core Rule'.
- (words(_core_rules, suffix=r'\b'), Keyword),
-
- # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
- (r'[a-zA-Z][a-zA-Z0-9-]+\b', Name.Class),
-
- # operators
- (r'(=/|=|/)', Operator),
-
- # punctuation
- (r'[\[\]()]', Punctuation),
-
- # fallback
- (r'\s+', Text),
- (r'.', Text),
- ],
- }
+
+
+class BnfLexer(RegexLexer):
+ """
+ This lexer is for grammer notations which are similar to
+ original BNF.
+
+ In order to maximize a number of targets of this lexer,
+ let's decide some designs:
+
+ * We don't distinguish `Terminal Symbol`.
+
+ * We do assume that `NonTerminal Symbol` are always enclosed
+ with arrow brackets.
+
+ * We do assume that `NonTerminal Symbol` may include
+ any printable characters except arrow brackets and ASCII 0x20.
+ This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
+
+ * We do assume that target notation doesn't support comment.
+
+ * We don't distinguish any operators and punctuation except
+ `::=`.
+
+ Though these desision making might cause too minimal highlighting
+ and you might be disappointed, but it is reasonable for us.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'BNF'
+ aliases = ['bnf']
+ filenames = ['*.bnf']
+ mimetypes = ['text/x-bnf']
+
+ tokens = {
+ 'root': [
+ (r'(<)([ -;=?-~]+)(>)',
+ bygroups(Punctuation, Name.Class, Punctuation)),
+
+ # an only operator
+ (r'::=', Operator),
+
+ # fallback
+ (r'[^<>:]+', Text), # for performance
+ (r'.', Text),
+ ],
+ }
+
+
+class AbnfLexer(RegexLexer):
+ """
+ Lexer for `IETF 7405 ABNF
+ <http://www.ietf.org/rfc/rfc7405.txt>`_
+ (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_)
+ grammars.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ABNF'
+ aliases = ['abnf']
+ filenames = ['*.abnf']
+ mimetypes = ['text/x-abnf']
+
+ _core_rules = (
+ 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
+ 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
+ 'SP', 'VCHAR', 'WSP')
+
+ tokens = {
+ 'root': [
+ # comment
+ (r';.*$', Comment.Single),
+
+ # quoted
+ # double quote itself in this state, it is as '%x22'.
+ (r'(%[si])?"[^"]*"', Literal),
+
+ # binary (but i have never seen...)
+ (r'%b[01]+\-[01]+\b', Literal), # range
+ (r'%b[01]+(\.[01]+)*\b', Literal), # concat
+
+ # decimal
+ (r'%d[0-9]+\-[0-9]+\b', Literal), # range
+ (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
+
+ # hexadecimal
+ (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
+ (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
+
+ # repetition (<a>*<b>element) including nRule
+ (r'\b[0-9]+\*[0-9]+', Operator),
+ (r'\b[0-9]+\*', Operator),
+ (r'\b[0-9]+', Operator),
+ (r'\*', Operator),
+
+ # Strictly speaking, these are not keyword but
+ # are called `Core Rule'.
+ (words(_core_rules, suffix=r'\b'), Keyword),
+
+ # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
+ (r'[a-zA-Z][a-zA-Z0-9-]+\b', Name.Class),
+
+ # operators
+ (r'(=/|=|/)', Operator),
+
+ # punctuation
+ (r'[\[\]()]', Punctuation),
+
+ # fallback
+ (r'\s+', Text),
+ (r'.', Text),
+ ],
+ }
class JsgfLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/graph.py b/contrib/python/Pygments/py2/pygments/lexers/graph.py
index f7b45e29ee..86bdf5bff1 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/graph.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/graph.py
@@ -1,57 +1,57 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.graph
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for graph query languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graph
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for graph query languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this
-from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
- String, Number, Whitespace
-
-
-__all__ = ['CypherLexer']
-
-
-class CypherLexer(RegexLexer):
- """
- For `Cypher Query Language
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace
+
+
+__all__ = ['CypherLexer']
+
+
+class CypherLexer(RegexLexer):
+ """
+ For `Cypher Query Language
<https://neo4j.com/docs/developer-manual/3.3/cypher/>`_
-
+
For the Cypher version in Neo4j 3.3
-
- .. versionadded:: 2.0
- """
- name = 'Cypher'
- aliases = ['cypher']
- filenames = ['*.cyp', '*.cypher']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- include('comment'),
- include('keywords'),
- include('clauses'),
- include('relations'),
- include('strings'),
- include('whitespace'),
- include('barewords'),
- ],
- 'comment': [
- (r'^.*//.*\n', Comment.Single),
- ],
- 'keywords': [
- (r'(create|order|match|limit|set|skip|start|return|with|where|'
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cypher'
+ aliases = ['cypher']
+ filenames = ['*.cyp', '*.cypher']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('keywords'),
+ include('clauses'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ ],
+ 'comment': [
+ (r'^.*//.*\n', Comment.Single),
+ ],
+ 'keywords': [
+ (r'(create|order|match|limit|set|skip|start|return|with|where|'
r'delete|foreach|not|by|true|false)\b', Keyword),
- ],
- 'clauses': [
+ ],
+ 'clauses': [
# based on https://neo4j.com/docs/cypher-refcard/3.3/
(r'(all|any|as|asc|ascending|assert|call|case|create|'
r'create\s+index|create\s+unique|delete|desc|descending|'
@@ -62,24 +62,24 @@ class CypherLexer(RegexLexer):
r'remove|return|set|skip|single|start|starts\s+with|then|union|'
r'union\s+all|unwind|using\s+periodic\s+commit|yield|where|when|'
r'with)\b', Keyword),
- ],
- 'relations': [
- (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
- (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'-->|<--|\[|\]', Operator),
- (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
- (r'[.*{}]', Punctuation),
- ],
- 'strings': [
- (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
- (r'`(?:``|[^`])+`', Name.Variable),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'barewords': [
- (r'[a-z]\w*', Name),
- (r'\d+', Number),
- ],
- }
+ ],
+ 'relations': [
+ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
+ (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'-->|<--|\[|\]', Operator),
+ (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
+ (r'`(?:``|[^`])+`', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z]\w*', Name),
+ (r'\d+', Number),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/graphics.py b/contrib/python/Pygments/py2/pygments/lexers/graphics.py
index b0b9145e0a..2a87a72253 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/graphics.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/graphics.py
@@ -1,51 +1,51 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.graphics
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer graphics and plotting related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graphics
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer graphics and plotting related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups, using, \
- this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- Number, Punctuation, String
-
-__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include, bygroups, using, \
+ this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ Number, Punctuation, String
+
+__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
'PovrayLexer', 'HLSLShaderLexer']
-
-
-class GLShaderLexer(RegexLexer):
- """
- GLSL (OpenGL Shader) lexer.
-
- .. versionadded:: 1.1
- """
- name = 'GLSL'
- aliases = ['glsl']
- filenames = ['*.vert', '*.frag', '*.geo']
- mimetypes = ['text/x-glslsrc']
-
- tokens = {
- 'root': [
- (r'^#.*', Comment.Preproc),
- (r'//.*', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),\[\]]', Punctuation),
- # FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (words((
+
+
+class GLShaderLexer(RegexLexer):
+ """
+ GLSL (OpenGL Shader) lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'GLSL'
+ aliases = ['glsl']
+ filenames = ['*.vert', '*.frag', '*.geo']
+ mimetypes = ['text/x-glslsrc']
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Preproc),
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (words((
# Storage qualifiers
'attribute', 'const', 'uniform', 'varying',
'buffer', 'shared', 'in', 'out',
@@ -69,12 +69,12 @@ class GLShaderLexer(RegexLexer):
'break', 'continue', 'do', 'for', 'while', 'switch',
'case', 'default', 'if', 'else', 'subroutine',
'discard', 'return', 'struct'),
- prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
# Boolean values
'true', 'false'),
- prefix=r'\b', suffix=r'\b'),
+ prefix=r'\b', suffix=r'\b'),
Keyword.Constant),
(words((
# Miscellaneous types
@@ -142,13 +142,13 @@ class GLShaderLexer(RegexLexer):
Keyword.Reserved),
# All names beginning with "gl_" are reserved.
(r'gl_\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
- (r'\.', Punctuation),
- (r'\s+', Text),
- ],
- }
-
-
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\.', Punctuation),
+ (r'\s+', Text),
+ ],
+ }
+
+
class HLSLShaderLexer(RegexLexer):
"""
HLSL (Microsoft Direct3D Shader) lexer.
@@ -303,479 +303,479 @@ class HLSLShaderLexer(RegexLexer):
}
-class PostScriptLexer(RegexLexer):
- """
- Lexer for PostScript files.
-
- The PostScript Language Reference published by Adobe at
- <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
- is the authority for this.
-
- .. versionadded:: 1.4
- """
- name = 'PostScript'
- aliases = ['postscript', 'postscr']
- filenames = ['*.ps', '*.eps']
- mimetypes = ['application/postscript']
-
- delimiter = r'()<>\[\]{}/%\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
- tokens = {
- 'root': [
- # All comment types
- (r'^%!.+\n', Comment.Preproc),
- (r'%%.*\n', Comment.Special),
- (r'(^%.*\n){2,}', Comment.Multiline),
- (r'%.*\n', Comment.Single),
-
- # String literals are awkward; enter separate state.
- (r'\(', String, 'stringliteral'),
-
- (r'[{}<>\[\]]', Punctuation),
-
- # Numbers
- (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
- # Slight abuse: use Oct to signify any explicit base system
- (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
- r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
- (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
- + delimiter_end, Number.Float),
- (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
- # References
- (r'\/%s' % valid_name, Name.Variable),
-
- # Names
- (valid_name, Name.Function), # Anything else is executed
-
- # These keywords taken from
- # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
- # Is there an authoritative list anywhere that doesn't involve
- # trawling documentation?
-
- (r'(false|true)' + delimiter_end, Keyword.Constant),
-
- # Conditionals / flow control
- (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
- + delimiter_end, Keyword.Reserved),
-
- (words((
- 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
- 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
- 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
- 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
- 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
- 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
- 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
- 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
- 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
- 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
- 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
- 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
- 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
- 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
- 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
- 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
- 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
- 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
- Name.Builtin),
-
- (r'\s+', Text),
- ],
-
- 'stringliteral': [
- (r'[^()\\]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
-
- 'escape': [
- (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class AsymptoteLexer(RegexLexer):
- """
- For `Asymptote <http://asymptote.sf.net/>`_ source code.
-
- .. versionadded:: 1.2
- """
- name = 'Asymptote'
- aliases = ['asy', 'asymptote']
- filenames = ['*.asy']
- mimetypes = ['text/x-asymptote']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- # simple string (TeX friendly)
- (r'"(\\\\|\\"|[^"])*"', String),
- # C style string (with character escapes)
- (r"'", String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
- r'return|break|continue|struct|typedef|new|access|import|'
- r'unravel|from|include|quote|static|public|private|restricted|'
- r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
- # Since an asy-type-name can be also an asy-function-name,
- # in the following we test if the string " [a-zA-Z]" follows
- # the Keyword.Type.
- # Of course it is not perfect !
- (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
- r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
- r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
- r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
- r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
- r'path3|pen|picture|point|position|projection|real|revolution|'
- r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
- r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
- r'transformation|tree|triangle|trilinear|triple|vector|'
- r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
- # Now the asy-type-name which are not asy-function-name
- # except yours !
- # Perhaps useless
- (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
- r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
- r'picture|position|real|revolution|slice|splitface|ticksgridT|'
- r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
+class PostScriptLexer(RegexLexer):
+ """
+ Lexer for PostScript files.
+
+ The PostScript Language Reference published by Adobe at
+ <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+ is the authority for this.
+
+ .. versionadded:: 1.4
+ """
+ name = 'PostScript'
+ aliases = ['postscript', 'postscr']
+ filenames = ['*.ps', '*.eps']
+ mimetypes = ['application/postscript']
+
+ delimiter = r'()<>\[\]{}/%\s'
+ delimiter_end = r'(?=[%s])' % delimiter
+
+ valid_name_chars = r'[^%s]' % delimiter
+ valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+ tokens = {
+ 'root': [
+ # All comment types
+ (r'^%!.+\n', Comment.Preproc),
+ (r'%%.*\n', Comment.Special),
+ (r'(^%.*\n){2,}', Comment.Multiline),
+ (r'%.*\n', Comment.Single),
+
+ # String literals are awkward; enter separate state.
+ (r'\(', String, 'stringliteral'),
+
+ (r'[{}<>\[\]]', Punctuation),
+
+ # Numbers
+ (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+ # Slight abuse: use Oct to signify any explicit base system
+ (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+ r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+ (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+ + delimiter_end, Number.Float),
+ (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+ # References
+ (r'\/%s' % valid_name, Name.Variable),
+
+ # Names
+ (valid_name, Name.Function), # Anything else is executed
+
+ # These keywords taken from
+ # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+ # Is there an authoritative list anywhere that doesn't involve
+ # trawling documentation?
+
+ (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+ # Conditionals / flow control
+ (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
+ + delimiter_end, Keyword.Reserved),
+
+ (words((
+ 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
+ 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
+ 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
+ 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
+ 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
+ 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
+ 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
+ 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
+ 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
+ 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
+ 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
+ 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
+ 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
+ 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
+ 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
+ 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
+ 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
+ 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
+ Name.Builtin),
+
+ (r'\s+', Text),
+ ],
+
+ 'stringliteral': [
+ (r'[^()\\]+', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+
+ 'escape': [
+ (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class AsymptoteLexer(RegexLexer):
+ """
+ For `Asymptote <http://asymptote.sf.net/>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Asymptote'
+ aliases = ['asy', 'asymptote']
+ filenames = ['*.asy']
+ mimetypes = ['text/x-asymptote']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
+ ],
+ 'statements': [
+ # simple string (TeX friendly)
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # C style string (with character escapes)
+ (r"'", String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
+ (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
+ r'return|break|continue|struct|typedef|new|access|import|'
+ r'unravel|from|include|quote|static|public|private|restricted|'
+ r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
+ # Since an asy-type-name can be also an asy-function-name,
+ # in the following we test if the string " [a-zA-Z]" follows
+ # the Keyword.Type.
+ # Of course it is not perfect !
+ (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
+ r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
+ r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
+ r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
+ r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
+ r'path3|pen|picture|point|position|projection|real|revolution|'
+ r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
+ r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
+ r'transformation|tree|triangle|trilinear|triple|vector|'
+ r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
+ # Now the asy-type-name which are not asy-function-name
+ # except yours !
+ # Perhaps useless
+ (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
+ r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
+ r'picture|position|real|revolution|slice|splitface|ticksgridT|'
+ r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(\{)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'\n', String),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String),
- (r'\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
-
-
-def _shortened(word):
- dpos = word.find('$')
- return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
- for i in range(len(word), dpos, -1))
-
-
-def _shortened_many(*words):
- return '|'.join(map(_shortened, words))
-
-
-class GnuplotLexer(RegexLexer):
- """
- For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
-
- .. versionadded:: 0.11
- """
-
- name = 'Gnuplot'
- aliases = ['gnuplot']
- filenames = ['*.plot', '*.plt']
- mimetypes = ['text/x-gnuplot']
-
- tokens = {
- 'root': [
- include('whitespace'),
- (_shortened('bi$nd'), Keyword, 'bind'),
- (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
- (_shortened('f$it'), Keyword, 'fit'),
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
- (r'else\b', Keyword),
- (_shortened('pa$use'), Keyword, 'pause'),
- (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
- (_shortened('sa$ve'), Keyword, 'save'),
- (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
- (_shortened_many('sh$ow', 'uns$et'),
- Keyword, ('noargs', 'optionarg')),
- (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
- 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
- 'pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'sy$stem', 'up$date'),
- Keyword, 'genericargs'),
- (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'test$'),
- Keyword, 'noargs'),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'\n', String),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String),
+ (r'\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in ASYFUNCNAME:
+ token = Name.Function
+ elif token is Name and value in ASYVARNAME:
+ token = Name.Variable
+ yield index, token, value
+
+
+def _shortened(word):
+ dpos = word.find('$')
+ return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
+ for i in range(len(word), dpos, -1))
+
+
+def _shortened_many(*words):
+ return '|'.join(map(_shortened, words))
+
+
+class GnuplotLexer(RegexLexer):
+ """
+ For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'Gnuplot'
+ aliases = ['gnuplot']
+ filenames = ['*.plot', '*.plt']
+ mimetypes = ['text/x-gnuplot']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (_shortened('bi$nd'), Keyword, 'bind'),
+ (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
+ (_shortened('f$it'), Keyword, 'fit'),
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
+ (r'else\b', Keyword),
+ (_shortened('pa$use'), Keyword, 'pause'),
+ (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
+ (_shortened('sa$ve'), Keyword, 'save'),
+ (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
+ (_shortened_many('sh$ow', 'uns$et'),
+ Keyword, ('noargs', 'optionarg')),
+ (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
+ 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
+ 'pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'sy$stem', 'up$date'),
+ Keyword, 'genericargs'),
+ (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'test$'),
+ Keyword, 'noargs'),
(r'([a-zA-Z_]\w*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator), 'genericargs'),
+ bygroups(Name.Variable, Text, Operator), 'genericargs'),
(r'([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
- bygroups(Name.Function, Text, Operator), 'genericargs'),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r';', Keyword),
- ],
- 'comment': [
- (r'[^\\\n]', Comment),
- (r'\\\n', Comment),
- (r'\\', Comment),
- # don't add the newline to the Comment token
- default('#pop'),
- ],
- 'whitespace': [
- ('#', Comment, 'comment'),
- (r'[ \t\v\f]+', Text),
- ],
- 'noargs': [
- include('whitespace'),
- # semicolon and newline end the argument list
- (r';', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- ],
- 'dqstring': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'sqstring': [
- (r"''", String), # escaped single quote
- (r"'", String, '#pop'),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # normal backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'genericargs': [
- include('noargs'),
- (r'"', String, 'dqstring'),
- (r"'", String, 'sqstring'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'-?\d+', Number.Integer),
- ('[,.~!%^&*+=|?:<>/-]', Operator),
+ bygroups(Name.Function, Text, Operator), 'genericargs'),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r';', Keyword),
+ ],
+ 'comment': [
+ (r'[^\\\n]', Comment),
+ (r'\\\n', Comment),
+ (r'\\', Comment),
+ # don't add the newline to the Comment token
+ default('#pop'),
+ ],
+ 'whitespace': [
+ ('#', Comment, 'comment'),
+ (r'[ \t\v\f]+', Text),
+ ],
+ 'noargs': [
+ include('whitespace'),
+ # semicolon and newline end the argument list
+ (r';', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'dqstring': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'sqstring': [
+ (r"''", String), # escaped single quote
+ (r"'", String, '#pop'),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # normal backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'genericargs': [
+ include('noargs'),
+ (r'"', String, 'dqstring'),
+ (r"'", String, 'sqstring'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'-?\d+', Number.Integer),
+ ('[,.~!%^&*+=|?:<>/-]', Operator),
(r'[{}()\[\]]', Punctuation),
- (r'(eq|ne)\b', Operator.Word),
- (r'([a-zA-Z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_]\w*', Name),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r'\\\n', Text),
- ],
- 'optionarg': [
- include('whitespace'),
- (_shortened_many(
- "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
- "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
- "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
- "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
- "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
- "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
- "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
- "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
- "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
- "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
- "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
- "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
- "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
- "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
- "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
- "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
- "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
- "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
- "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
- "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
- "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
- "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
- "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
- "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
- "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
- "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
- "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
- "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
- ],
- 'bind': [
- ('!', Keyword, '#pop'),
- (_shortened('all$windows'), Name.Builtin),
- include('genericargs'),
- ],
- 'quit': [
- (r'gnuplot\b', Keyword),
- include('noargs'),
- ],
- 'fit': [
- (r'via\b', Name.Builtin),
- include('plot'),
- ],
- 'if': [
- (r'\)', Punctuation, '#pop'),
- include('genericargs'),
- ],
- 'pause': [
- (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
- (_shortened('key$press'), Name.Builtin),
- include('genericargs'),
- ],
- 'plot': [
- (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
- 'mat$rix', 's$mooth', 'thru$', 't$itle',
- 'not$itle', 'u$sing', 'w$ith'),
- Name.Builtin),
- include('genericargs'),
- ],
- 'save': [
- (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
- Name.Builtin),
- include('genericargs'),
- ],
- }
-
-
-class PovrayLexer(RegexLexer):
- """
- For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
-
- .. versionadded:: 0.11
- """
- name = 'POVRay'
- aliases = ['pov']
- filenames = ['*.pov', '*.inc']
- mimetypes = ['text/x-povray']
-
- tokens = {
- 'root': [
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*\n', Comment.Single),
- (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
- (words((
- 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
- 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
- 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
- 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
- 'write'), prefix=r'#', suffix=r'\b'),
- Comment.Preproc),
- (words((
- 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
- 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
- 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
- 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
- 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
- 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
- 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
- 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
- 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
- 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
- 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
- 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
- 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
- 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
- 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
- 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
- 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
- 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
- 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
- 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
- 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
- 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
- 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
- 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
- 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
- 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
- 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
- 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
- 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
- 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
- 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
- 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
- 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
- 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
- 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
- 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
- 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
- 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
- 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
- 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
- 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
- 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
- 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
- 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
- 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
- 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
- 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
- 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
- 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
- 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
- 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
- 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
- Name.Builtin),
- # TODO: <=, etc
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=]', Operator),
- (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\s+', Text),
- ]
- }
+ (r'(eq|ne)\b', Operator.Word),
+ (r'([a-zA-Z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r'\\\n', Text),
+ ],
+ 'optionarg': [
+ include('whitespace'),
+ (_shortened_many(
+ "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
+ "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
+ "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
+ "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
+ "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
+ "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
+ "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
+ "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
+ "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
+ "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
+ "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
+ "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
+ "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
+ "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
+ "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
+ "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
+ "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
+ "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
+ "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
+ "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
+ "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
+ "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
+ "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
+ "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
+ "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
+ "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
+ "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
+ "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
+ ],
+ 'bind': [
+ ('!', Keyword, '#pop'),
+ (_shortened('all$windows'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'quit': [
+ (r'gnuplot\b', Keyword),
+ include('noargs'),
+ ],
+ 'fit': [
+ (r'via\b', Name.Builtin),
+ include('plot'),
+ ],
+ 'if': [
+ (r'\)', Punctuation, '#pop'),
+ include('genericargs'),
+ ],
+ 'pause': [
+ (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
+ (_shortened('key$press'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'plot': [
+ (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
+ 'mat$rix', 's$mooth', 'thru$', 't$itle',
+ 'not$itle', 'u$sing', 'w$ith'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ 'save': [
+ (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ }
+
+
+class PovrayLexer(RegexLexer):
+ """
+ For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'POVRay'
+ aliases = ['pov']
+ filenames = ['*.pov', '*.inc']
+ mimetypes = ['text/x-povray']
+
+ tokens = {
+ 'root': [
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'//.*\n', Comment.Single),
+ (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
+ (words((
+ 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
+ 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
+ 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
+ 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
+ 'write'), prefix=r'#', suffix=r'\b'),
+ Comment.Preproc),
+ (words((
+ 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
+ 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
+ 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
+ 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
+ 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
+ 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
+ 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
+ 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
+ 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
+ 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
+ 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
+ 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
+ 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
+ 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
+ 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
+ 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
+ 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
+ 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
+ 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
+ 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
+ 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
+ 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
+ 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
+ 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
+ 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
+ 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
+ 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
+ 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
+ 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
+ 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
+ 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
+ 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
+ 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
+ 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
+ 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
+ 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
+ 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
+ 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
+ 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
+ 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
+ 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
+ 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
+ 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
+ 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
+ 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
+ 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
+ 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
+ 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
+ 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
+ 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
+ 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
+ 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
+ Name.Builtin),
+ # TODO: <=, etc
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=]', Operator),
+ (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/haskell.py b/contrib/python/Pygments/py2/pygments/lexers/haskell.py
index 0c0917e721..bcd2e370b9 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/haskell.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/haskell.py
@@ -1,77 +1,77 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.haskell
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haskell and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haskell
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haskell and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
default, include, inherit
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-from pygments import unistring as uni
-
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments import unistring as uni
+
__all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
- 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
- 'LiterateCryptolLexer', 'KokaLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class HaskellLexer(RegexLexer):
- """
- A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 0.8
- """
- name = 'Haskell'
- aliases = ['haskell', 'hs']
- filenames = ['*.hs']
- mimetypes = ['text/x-haskell']
-
- flags = re.MULTILINE | re.UNICODE
-
- reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
+ 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
+ 'LiterateCryptolLexer', 'KokaLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class HaskellLexer(RegexLexer):
+ """
+ A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Haskell'
+ aliases = ['haskell', 'hs']
+ filenames = ['*.hs']
+ mimetypes = ['text/x-haskell']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
'family', 'if', 'in', 'infix[lr]?', 'instance',
- 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- # (r'--\s*|.*$', Comment.Doc),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r"'[^\\]'", String.Char), # this has to come before the TH quote
- (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
- (r"'?[_" + uni.Ll + r"][\w']*", Name),
- (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r"'[^\\]'", String.Char), # this has to come before the TH quote
+ (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
+ (r"'?[_" + uni.Ll + r"][\w']*", Name),
+ (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
(r"(')\([^)]*\)", Keyword.Type), # ..
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*', Number.Float),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
r'(_*[pP][+-]?\d(_*\d)*)?', Number.Float),
@@ -81,83 +81,83 @@ class HaskellLexer(RegexLexer):
(r'0[oO]_*[0-7](_*[0-7])*', Number.Oct),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*', Number.Hex),
(r'\d(_*\d)*', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Text),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
- bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
- # import X hiding (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
- # import X (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Text),
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[' + uni.Lu + r']\w*', Keyword.Type),
- (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop'),
- ],
- }
-
-
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[' + uni.Lu + r']\w*', Keyword.Type),
+ (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
+
class HspecLexer(HaskellLexer):
"""
A Haskell lexer with support for Hspec constructs.
@@ -180,690 +180,690 @@ class HspecLexer(HaskellLexer):
}
-class IdrisLexer(RegexLexer):
- """
- A lexer for the dependently typed programming language Idris.
-
- Based on the Haskell and Agda Lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Idris'
- aliases = ['idris', 'idr']
- filenames = ['*.idr']
- mimetypes = ['text/x-idris']
-
- reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
- 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
- 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
- 'total', 'partial',
- 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
- 'pattern', 'term', 'syntax', 'prefix',
- 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
- 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
-
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
- 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
-
- tokens = {
- 'root': [
- # Comments
- (r'^(\s*)(%%%s)' % '|'.join(directives),
- bygroups(Text, Keyword.Reserved)),
- (r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Text, Comment.Single)),
- (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)),
- (r'(\s*)(\{-)', bygroups(Text, Comment.Multiline), 'comment'),
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
- bygroups(Text, Name.Function, Text, Operator.Word, Text)),
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- (r'[a-z][\w\']*', Text),
- # Special Symbols
- (r'(<-|::|->|=>|=)', Operator.Word), # specials
- (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
- (r'\s+?', Text), # Whitespace
- ],
- 'module': [
- (r'\s+', Text),
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--.*$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']", String.Char),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop')
- ],
- }
-
-
-class AgdaLexer(RegexLexer):
- """
- For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
- dependently typed functional programming language and proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Agda'
- aliases = ['agda']
- filenames = ['*.agda']
- mimetypes = ['text/x-agda']
-
- reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
- 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
- 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
- 'pattern', 'postulate', 'primitive', 'private',
- 'quote', 'quoteGoal', 'quoteTerm',
- 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
- 'unquote', 'unquoteDecl', 'using', 'where', 'with']
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
- bygroups(Text, Name.Function, Text, Operator.Word, Text)),
- # Comments
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Holes
- (r'\{!', Comment.Directive, 'hole'),
- # Lexemes:
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+class IdrisLexer(RegexLexer):
+ """
+ A lexer for the dependently typed programming language Idris.
+
+ Based on the Haskell and Agda Lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Idris'
+ aliases = ['idris', 'idr']
+ filenames = ['*.idr']
+ mimetypes = ['text/x-idris']
+
+ reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
+ 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
+ 'total', 'partial',
+ 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
+ 'pattern', 'term', 'syntax', 'prefix',
+ 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
+ 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
+
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
+ 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'^(\s*)(%%%s)' % '|'.join(directives),
+ bygroups(Text, Keyword.Reserved)),
+ (r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)(\{-)', bygroups(Text, Comment.Multiline), 'comment'),
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ (r'[a-z][\w\']*', Text),
+ # Special Symbols
+ (r'(<-|::|->|=>|=)', Operator.Word), # specials
+ (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ (r'--.*$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']", String.Char),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop')
+ ],
+ }
+
+
+class AgdaLexer(RegexLexer):
+ """
+ For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
+ dependently typed functional programming language and proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Agda'
+ aliases = ['agda']
+ filenames = ['*.agda']
+ mimetypes = ['text/x-agda']
+
+ reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
+ 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
+ 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
+ 'pattern', 'postulate', 'primitive', 'private',
+ 'quote', 'quoteGoal', 'quoteTerm',
+ 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
+ 'unquote', 'unquoteDecl', 'using', 'where', 'with']
+
+ tokens = {
+ 'root': [
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Comments
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Holes
+ (r'\{!', Comment.Directive, 'hole'),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
(u'\\b(Set|Prop)[\u2080-\u2089]*\\b', Keyword.Type),
- # Special Symbols
- (r'(\(|\)|\{|\})', Operator),
+ # Special Symbols
+ (r'(\(|\)|\{|\})', Operator),
(u'(\\.{1,3}|\\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
- (r'\s+?', Text), # Whitespace
- ],
- 'hole': [
- # Holes
- (r'[^!{}]+', Comment.Directive),
- (r'\{!', Comment.Directive, '#push'),
- (r'!\}', Comment.Directive, '#pop'),
- (r'[!{}]', Comment.Directive),
- ],
- 'module': [
- (r'\{-', Comment.Multiline, 'comment'),
- (r'[a-zA-Z][\w.]*', Name, '#pop'),
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'hole': [
+ # Holes
+ (r'[^!{}]+', Comment.Directive),
+ (r'\{!', Comment.Directive, '#push'),
+ (r'!\}', Comment.Directive, '#pop'),
+ (r'[!{}]', Comment.Directive),
+ ],
+ 'module': [
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r'[a-zA-Z][\w.]*', Name, '#pop'),
(r'[\W0-9_]+', Text)
- ],
- 'comment': HaskellLexer.tokens['comment'],
- 'character': HaskellLexer.tokens['character'],
- 'string': HaskellLexer.tokens['string'],
- 'escape': HaskellLexer.tokens['escape']
- }
-
-
-class CryptolLexer(RegexLexer):
- """
- FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 2.0
- """
- name = 'Cryptol'
- aliases = ['cryptol', 'cry']
- filenames = ['*.cry']
- mimetypes = ['text/x-cryptol']
-
- reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
- 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
- 'max', 'min', 'module', 'newtype', 'pragma', 'property',
- 'then', 'type', 'where', 'width')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- # (r'--\s*|.*$', Comment.Doc),
- (r'//.*$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_a-z][\w\']*', Name.Function),
- (r"'?[_a-z][\w']*", Name),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Text),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
- bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
- # import X hiding (functions)
- (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
- # import X (functions)
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Text),
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- # TODO: these don't match the comments in docs, remove.
+ ],
+ 'comment': HaskellLexer.tokens['comment'],
+ 'character': HaskellLexer.tokens['character'],
+ 'string': HaskellLexer.tokens['string'],
+ 'escape': HaskellLexer.tokens['escape']
+ }
+
+
+class CryptolLexer(RegexLexer):
+ """
+ FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cryptol'
+ aliases = ['cryptol', 'cry']
+ filenames = ['*.cry']
+ mimetypes = ['text/x-cryptol']
+
+ reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
+ 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
+ 'max', 'min', 'module', 'newtype', 'pragma', 'property',
+ 'then', 'type', 'where', 'width')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'//.*$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'^[_a-z][\w\']*', Name.Function),
+ (r"'?[_a-z][\w']*", Name),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ # TODO: these don't match the comments in docs, remove.
# (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
# (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop'),
- ],
- }
-
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
'trace'}
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Name.Builtin, value
- else:
- yield index, token, value
-
-
-class LiterateLexer(Lexer):
- """
- Base class for lexers of literate file formats based on LaTeX or Bird-style
- (prefixing each code line with ">").
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
- """
-
- bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
- def __init__(self, baselexer, **options):
- self.baselexer = baselexer
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- style = self.options.get('litstyle')
- if style is None:
- style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
- code = ''
- insertions = []
- if style == 'bird':
- # bird-style
- for match in line_re.finditer(text):
- line = match.group()
- m = self.bird_re.match(line)
- if m:
- insertions.append((len(code),
- [(0, Comment.Special, m.group(1))]))
- code += m.group(2)
- else:
- insertions.append((len(code), [(0, Text, line)]))
- else:
- # latex-style
- from pygments.lexers.markup import TexLexer
- lxlexer = TexLexer(**self.options)
- codelines = 0
- latex = ''
- for match in line_re.finditer(text):
- line = match.group()
- if codelines:
- if line.lstrip().startswith('\\end{code}'):
- codelines = 0
- latex += line
- else:
- code += line
- elif line.lstrip().startswith('\\begin{code}'):
- codelines = 1
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- latex = ''
- else:
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
- yield item
-
-
-class LiterateHaskellLexer(LiterateLexer):
- """
- For Literate Haskell (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
- """
- name = 'Literate Haskell'
- aliases = ['lhs', 'literate-haskell', 'lhaskell']
- filenames = ['*.lhs']
- mimetypes = ['text/x-literate-haskell']
-
- def __init__(self, **options):
- hslexer = HaskellLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateIdrisLexer(LiterateLexer):
- """
- For Literate Idris (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Idris'
- aliases = ['lidr', 'literate-idris', 'lidris']
- filenames = ['*.lidr']
- mimetypes = ['text/x-literate-idris']
-
- def __init__(self, **options):
- hslexer = IdrisLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateAgdaLexer(LiterateLexer):
- """
- For Literate Agda source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Agda'
- aliases = ['lagda', 'literate-agda']
- filenames = ['*.lagda']
- mimetypes = ['text/x-literate-agda']
-
- def __init__(self, **options):
- agdalexer = AgdaLexer(**options)
- LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
-
-
-class LiterateCryptolLexer(LiterateLexer):
- """
- For Literate Cryptol (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Cryptol'
- aliases = ['lcry', 'literate-cryptol', 'lcryptol']
- filenames = ['*.lcry']
- mimetypes = ['text/x-literate-cryptol']
-
- def __init__(self, **options):
- crylexer = CryptolLexer(**options)
- LiterateLexer.__init__(self, crylexer, **options)
-
-
-class KokaLexer(RegexLexer):
- """
- Lexer for the `Koka <http://koka.codeplex.com>`_
- language.
-
- .. versionadded:: 1.6
- """
-
- name = 'Koka'
- aliases = ['koka']
- filenames = ['*.kk', '*.kki']
- mimetypes = ['text/x-koka']
-
- keywords = [
- 'infix', 'infixr', 'infixl',
- 'type', 'cotype', 'rectype', 'alias',
- 'struct', 'con',
- 'fun', 'function', 'val', 'var',
- 'external',
- 'if', 'then', 'else', 'elif', 'return', 'match',
- 'private', 'public', 'private',
- 'module', 'import', 'as',
- 'include', 'inline',
- 'rec',
- 'try', 'yield', 'enum',
- 'interface', 'instance',
- ]
-
- # keywords that are followed by a type
- typeStartKeywords = [
- 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
- ]
-
- # keywords valid in a type
- typekeywords = [
- 'forall', 'exists', 'some', 'with',
- ]
-
- # builtin names and special names
- builtin = [
- 'for', 'while', 'repeat',
- 'foreach', 'foreach-indexed',
- 'error', 'catch', 'finally',
- 'cs', 'js', 'file', 'ref', 'assigned',
- ]
-
- # symbols that can be in an operator
- symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
-
- # symbol boundary: an operator keyword should not be followed by any of these
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ else:
+ yield index, token, value
+
+
+class LiterateLexer(Lexer):
+ """
+ Base class for lexers of literate file formats based on LaTeX or Bird-style
+ (prefixing each code line with ">").
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+ """
+
+ bird_re = re.compile(r'(>[ \t]*)(.*\n)')
+
+ def __init__(self, baselexer, **options):
+ self.baselexer = baselexer
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ style = self.options.get('litstyle')
+ if style is None:
+ style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
+
+ code = ''
+ insertions = []
+ if style == 'bird':
+ # bird-style
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self.bird_re.match(line)
+ if m:
+ insertions.append((len(code),
+ [(0, Comment.Special, m.group(1))]))
+ code += m.group(2)
+ else:
+ insertions.append((len(code), [(0, Text, line)]))
+ else:
+ # latex-style
+ from pygments.lexers.markup import TexLexer
+ lxlexer = TexLexer(**self.options)
+ codelines = 0
+ latex = ''
+ for match in line_re.finditer(text):
+ line = match.group()
+ if codelines:
+ if line.lstrip().startswith('\\end{code}'):
+ codelines = 0
+ latex += line
+ else:
+ code += line
+ elif line.lstrip().startswith('\\begin{code}'):
+ codelines = 1
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ latex = ''
+ else:
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
+ yield item
+
+
+class LiterateHaskellLexer(LiterateLexer):
+ """
+ For Literate Haskell (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Literate Haskell'
+ aliases = ['lhs', 'literate-haskell', 'lhaskell']
+ filenames = ['*.lhs']
+ mimetypes = ['text/x-literate-haskell']
+
+ def __init__(self, **options):
+ hslexer = HaskellLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateIdrisLexer(LiterateLexer):
+ """
+ For Literate Idris (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Idris'
+ aliases = ['lidr', 'literate-idris', 'lidris']
+ filenames = ['*.lidr']
+ mimetypes = ['text/x-literate-idris']
+
+ def __init__(self, **options):
+ hslexer = IdrisLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateAgdaLexer(LiterateLexer):
+ """
+ For Literate Agda source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Agda'
+ aliases = ['lagda', 'literate-agda']
+ filenames = ['*.lagda']
+ mimetypes = ['text/x-literate-agda']
+
+ def __init__(self, **options):
+ agdalexer = AgdaLexer(**options)
+ LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
+
+
+class LiterateCryptolLexer(LiterateLexer):
+ """
+ For Literate Cryptol (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Cryptol'
+ aliases = ['lcry', 'literate-cryptol', 'lcryptol']
+ filenames = ['*.lcry']
+ mimetypes = ['text/x-literate-cryptol']
+
+ def __init__(self, **options):
+ crylexer = CryptolLexer(**options)
+ LiterateLexer.__init__(self, crylexer, **options)
+
+
+class KokaLexer(RegexLexer):
+ """
+ Lexer for the `Koka <http://koka.codeplex.com>`_
+ language.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Koka'
+ aliases = ['koka']
+ filenames = ['*.kk', '*.kki']
+ mimetypes = ['text/x-koka']
+
+ keywords = [
+ 'infix', 'infixr', 'infixl',
+ 'type', 'cotype', 'rectype', 'alias',
+ 'struct', 'con',
+ 'fun', 'function', 'val', 'var',
+ 'external',
+ 'if', 'then', 'else', 'elif', 'return', 'match',
+ 'private', 'public', 'private',
+ 'module', 'import', 'as',
+ 'include', 'inline',
+ 'rec',
+ 'try', 'yield', 'enum',
+ 'interface', 'instance',
+ ]
+
+ # keywords that are followed by a type
+ typeStartKeywords = [
+ 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
+ ]
+
+ # keywords valid in a type
+ typekeywords = [
+ 'forall', 'exists', 'some', 'with',
+ ]
+
+ # builtin names and special names
+ builtin = [
+ 'for', 'while', 'repeat',
+ 'foreach', 'foreach-indexed',
+ 'error', 'catch', 'finally',
+ 'cs', 'js', 'file', 'ref', 'assigned',
+ ]
+
+ # symbols that can be in an operator
+ symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
+
+ # symbol boundary: an operator keyword should not be followed by any of these
sboundary = '(?!' + symbols + ')'
-
- # name boundary: a keyword should not be followed by any of these
+
+ # name boundary: a keyword should not be followed by any of these
boundary = r'(?![\w/])'
-
- # koka token abstractions
- tokenType = Name.Attribute
- tokenTypeDef = Name.Class
- tokenConstructor = Generic.Emph
-
- # main lexer
- tokens = {
- 'root': [
- include('whitespace'),
-
- # go into type mode
- (r'::?' + sboundary, tokenType, 'type'),
- (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'alias-type'),
- (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
- r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'type'),
-
- # special sequences of tokens (we use ?: for non-capturing group as
- # required by 'bygroups')
- (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
- bygroups(Keyword, Text, Keyword, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
- r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
- r'((?:[a-z]\w*/)*[a-z]\w*))?',
- bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
- Keyword, Name.Namespace)),
-
- (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
- r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Name.Function)),
- (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
- r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Keyword, Name.Function)),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::?|:=|\->|[=.]' + sboundary, Keyword),
-
- # names
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenConstructor)),
- (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
- (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
- bygroups(Name.Namespace, Name)),
- (r'_\w*', Name.Variable),
-
- # literal string
- (r'@"', String.Double, 'litstring'),
-
- # operators
- (symbols + "|/(?![*/])", Operator),
- (r'`', Operator),
- (r'[{}()\[\];,]', Punctuation),
-
- # literals. No check for literal characters with len > 1
- (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- ],
-
- # type started by alias
- 'alias-type': [
- (r'=', Keyword),
- include('type')
- ],
-
- # type started by struct
- 'struct-type': [
- (r'(?=\((?!,*\)))', Punctuation, '#pop'),
- include('type')
- ],
-
- # type started by colon
- 'type': [
- (r'[(\[<]', tokenType, 'type-nested'),
- include('type-content')
- ],
-
- # type nested in brackets: can contain parameters, comma etc.
- 'type-nested': [
- (r'[)\]>]', tokenType, '#pop'),
- (r'[(\[<]', tokenType, 'type-nested'),
- (r',', tokenType),
- (r'([a-z]\w*)(\s*)(:)(?!:)',
- bygroups(Name, Text, tokenType)), # parameter name
- include('type-content')
- ],
-
- # shared contents of a type
- 'type-content': [
- include('whitespace'),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
- Keyword, '#pop'), # need to match because names overlap...
-
- # kinds
- (r'[EPHVX]' + boundary, tokenType),
-
- # type names
- (r'[a-z][0-9]*(?![\w/])', tokenType),
- (r'_\w*', tokenType.Variable), # Generic.Emph
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenType)),
- (r'((?:[a-z]\w*/)*)([a-z]\w+)',
- bygroups(Name.Namespace, tokenType)),
-
- # type keyword operators
- (r'::|->|[.:|]', tokenType),
-
- # catchall
- default('#pop')
- ],
-
- # comments and literals
- 'whitespace': [
- (r'\n\s*#.*$', Comment.Preproc),
- (r'\s+', Text),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment.Single)
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'litstring': [
- (r'[^"]+', String.Double),
- (r'""', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'string': [
- (r'[^\\"\n]+', String.Double),
- include('escape-sequence'),
- (r'["\n]', String.Double, '#pop'),
- ],
- 'char': [
- (r'[^\\\'\n]+', String.Char),
- include('escape-sequence'),
- (r'[\'\n]', String.Char, '#pop'),
- ],
- 'escape-sequence': [
- (r'\\[nrt\\"\']', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- # Yes, \U literals are 6 hex digits.
- (r'\\U[0-9a-fA-F]{6}', String.Escape)
- ]
- }
+
+ # koka token abstractions
+ tokenType = Name.Attribute
+ tokenTypeDef = Name.Class
+ tokenConstructor = Generic.Emph
+
+ # main lexer
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # go into type mode
+ (r'::?' + sboundary, tokenType, 'type'),
+ (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'alias-type'),
+ (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'struct-type'),
+ ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'type'),
+
+ # special sequences of tokens (we use ?: for non-capturing group as
+ # required by 'bygroups')
+ (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
+ bygroups(Keyword, Text, Keyword, Name.Namespace)),
+ (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
+ r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
+ r'((?:[a-z]\w*/)*[a-z]\w*))?',
+ bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
+ Keyword, Name.Namespace)),
+
+ (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
+ r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
+ r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Keyword, Name.Function)),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
+ (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
+ (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
+ (r'::?|:=|\->|[=.]' + sboundary, Keyword),
+
+ # names
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenConstructor)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
+ (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
+ bygroups(Name.Namespace, Name)),
+ (r'_\w*', Name.Variable),
+
+ # literal string
+ (r'@"', String.Double, 'litstring'),
+
+ # operators
+ (symbols + "|/(?![*/])", Operator),
+ (r'`', Operator),
+ (r'[{}()\[\];,]', Punctuation),
+
+ # literals. No check for literal characters with len > 1
+ (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ (r"'", String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ ],
+
+ # type started by alias
+ 'alias-type': [
+ (r'=', Keyword),
+ include('type')
+ ],
+
+ # type started by struct
+ 'struct-type': [
+ (r'(?=\((?!,*\)))', Punctuation, '#pop'),
+ include('type')
+ ],
+
+ # type started by colon
+ 'type': [
+ (r'[(\[<]', tokenType, 'type-nested'),
+ include('type-content')
+ ],
+
+ # type nested in brackets: can contain parameters, comma etc.
+ 'type-nested': [
+ (r'[)\]>]', tokenType, '#pop'),
+ (r'[(\[<]', tokenType, 'type-nested'),
+ (r',', tokenType),
+ (r'([a-z]\w*)(\s*)(:)(?!:)',
+ bygroups(Name, Text, tokenType)), # parameter name
+ include('type-content')
+ ],
+
+ # shared contents of a type
+ 'type-content': [
+ include('whitespace'),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
+ (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
+ Keyword, '#pop'), # need to match because names overlap...
+
+ # kinds
+ (r'[EPHVX]' + boundary, tokenType),
+
+ # type names
+ (r'[a-z][0-9]*(?![\w/])', tokenType),
+ (r'_\w*', tokenType.Variable), # Generic.Emph
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenType)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w+)',
+ bygroups(Name.Namespace, tokenType)),
+
+ # type keyword operators
+ (r'::|->|[.:|]', tokenType),
+
+ # catchall
+ default('#pop')
+ ],
+
+ # comments and literals
+ 'whitespace': [
+ (r'\n\s*#.*$', Comment.Preproc),
+ (r'\s+', Text),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'//.*$', Comment.Single)
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'litstring': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"\n]+', String.Double),
+ include('escape-sequence'),
+ (r'["\n]', String.Double, '#pop'),
+ ],
+ 'char': [
+ (r'[^\\\'\n]+', String.Char),
+ include('escape-sequence'),
+ (r'[\'\n]', String.Char, '#pop'),
+ ],
+ 'escape-sequence': [
+ (r'\\[nrt\\"\']', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ # Yes, \U literals are 6 hex digits.
+ (r'\\U[0-9a-fA-F]{6}', String.Escape)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/haxe.py b/contrib/python/Pygments/py2/pygments/lexers/haxe.py
index b3575080d3..b8587e723a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/haxe.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/haxe.py
@@ -1,936 +1,936 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.haxe
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haxe and related stuff.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haxe
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haxe and related stuff.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['HaxeLexer', 'HxmlLexer']
-
-
-class HaxeLexer(ExtendedRegexLexer):
- """
- For Haxe source code (http://haxe.org/).
-
- .. versionadded:: 1.3
- """
-
- name = 'Haxe'
- aliases = ['hx', 'haxe', 'hxsl']
- filenames = ['*.hx', '*.hxsl']
- mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
-
- # keywords extracted from lexer.mll in the haxe compiler source
- keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
- r'break|return|continue|extends|implements|import|'
- r'switch|case|default|public|private|try|untyped|'
- r'catch|new|this|throw|extern|enum|in|interface|'
- r'cast|override|dynamic|typedef|package|'
- r'inline|using|null|true|false|abstract)\b')
-
- # idtype in lexer.mll
- typeid = r'_*[A-Z]\w*'
-
- # combined ident and dollar and idtype
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+__all__ = ['HaxeLexer', 'HxmlLexer']
+
+
+class HaxeLexer(ExtendedRegexLexer):
+ """
+ For Haxe source code (http://haxe.org/).
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haxe'
+ aliases = ['hx', 'haxe', 'hxsl']
+ filenames = ['*.hx', '*.hxsl']
+ mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+
+ # keywords extracted from lexer.mll in the haxe compiler source
+ keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
+ r'break|return|continue|extends|implements|import|'
+ r'switch|case|default|public|private|try|untyped|'
+ r'catch|new|this|throw|extern|enum|in|interface|'
+ r'cast|override|dynamic|typedef|package|'
+ r'inline|using|null|true|false|abstract)\b')
+
+ # idtype in lexer.mll
+ typeid = r'_*[A-Z]\w*'
+
+ # combined ident and dollar and idtype
ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + r'|_+|\$\w+)'
-
- binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
- r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
- r'/|\-|=>|=)')
-
- # ident except keywords
- ident_no_keyword = r'(?!' + keyword + ')' + ident
-
- flags = re.DOTALL | re.MULTILINE
-
- preproc_stack = []
-
- def preproc_callback(self, match, ctx):
- proc = match.group(2)
-
- if proc == 'if':
- # store the current stack
- self.preproc_stack.append(ctx.stack[:])
- elif proc in ['else', 'elseif']:
- # restore the stack back to right before #if
- if self.preproc_stack:
- ctx.stack = self.preproc_stack[-1][:]
- elif proc == 'end':
- # remove the saved stack of previous #if
- if self.preproc_stack:
- self.preproc_stack.pop()
-
- # #if and #elseif should follow by an expr
- if proc in ['if', 'elseif']:
- ctx.stack.append('preproc-expr')
-
- # #error can be optionally follow by the error msg
- if proc in ['error']:
- ctx.stack.append('preproc-error')
-
+
+ binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
+ r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
+ r'/|\-|=>|=)')
+
+ # ident except keywords
+ ident_no_keyword = r'(?!' + keyword + ')' + ident
+
+ flags = re.DOTALL | re.MULTILINE
+
+ preproc_stack = []
+
+ def preproc_callback(self, match, ctx):
+ proc = match.group(2)
+
+ if proc == 'if':
+ # store the current stack
+ self.preproc_stack.append(ctx.stack[:])
+ elif proc in ['else', 'elseif']:
+ # restore the stack back to right before #if
+ if self.preproc_stack:
+ ctx.stack = self.preproc_stack[-1][:]
+ elif proc == 'end':
+ # remove the saved stack of previous #if
+ if self.preproc_stack:
+ self.preproc_stack.pop()
+
+ # #if and #elseif should follow by an expr
+ if proc in ['if', 'elseif']:
+ ctx.stack.append('preproc-expr')
+
+ # #error can be optionally follow by the error msg
+ if proc in ['error']:
+ ctx.stack.append('preproc-error')
+
yield match.start(), Comment.Preproc, u'#' + proc
- ctx.pos = match.end()
-
- tokens = {
- 'root': [
- include('spaces'),
- include('meta'),
- (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
- (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
- (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
- (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
- (r'(?:enum)\b', Keyword.Declaration, 'enum'),
- (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
-
- # top-level expression
- # although it is not supported in haxe, but it is common to write
- # expression in web pages the positive lookahead here is to prevent
- # an infinite loop at the EOF
- (r'(?=.)', Text, 'expr-statement'),
- ],
-
- # space/tab/comment/preproc
- 'spaces': [
- (r'\s+', Text),
- (r'//[^\n\r]*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
- ],
-
- 'string-single-interpol': [
- (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
- (r'\$\$', String.Escape),
- (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
- include('string-single'),
- ],
-
- 'string-single': [
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Single),
- ],
-
- 'string-double': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Double),
- ],
-
- 'string-interpol-close': [
- (r'\$'+ident, String.Interpol),
- (r'\}', String.Interpol, '#pop'),
- ],
-
- 'package': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'import': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\*', Keyword), # wildcard import
- (r'\.', Punctuation, 'import-ident'),
- (r'in', Keyword.Namespace, 'ident'),
- default('#pop'),
- ],
-
- 'import-ident': [
- include('spaces'),
- (r'\*', Keyword, '#pop'), # wildcard import
- (ident, Name.Namespace, '#pop'),
- ],
-
- 'using': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'preproc-error': [
- (r'\s+', Comment.Preproc),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- default('#pop'),
- ],
-
- 'preproc-expr': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, '#pop'),
-
- # Float
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]+', Number.Float),
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('spaces'),
+ include('meta'),
+ (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
+ (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
+ (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
+ (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
+ (r'(?:enum)\b', Keyword.Declaration, 'enum'),
+ (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
+
+ # top-level expression
+ # although it is not supported in haxe, but it is common to write
+ # expression in web pages the positive lookahead here is to prevent
+ # an infinite loop at the EOF
+ (r'(?=.)', Text, 'expr-statement'),
+ ],
+
+ # space/tab/comment/preproc
+ 'spaces': [
+ (r'\s+', Text),
+ (r'//[^\n\r]*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
+ ],
+
+ 'string-single-interpol': [
+ (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
+ (r'\$\$', String.Escape),
+ (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
+ include('string-single'),
+ ],
+
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Single),
+ ],
+
+ 'string-double': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Double),
+ ],
+
+ 'string-interpol-close': [
+ (r'\$'+ident, String.Interpol),
+ (r'\}', String.Interpol, '#pop'),
+ ],
+
+ 'package': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'import': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\*', Keyword), # wildcard import
+ (r'\.', Punctuation, 'import-ident'),
+ (r'in', Keyword.Namespace, 'ident'),
+ default('#pop'),
+ ],
+
+ 'import-ident': [
+ include('spaces'),
+ (r'\*', Keyword, '#pop'), # wildcard import
+ (ident, Name.Namespace, '#pop'),
+ ],
+
+ 'using': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'preproc-error': [
+ (r'\s+', Comment.Preproc),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ default('#pop'),
+ ],
+
+ 'preproc-expr': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, '#pop'),
+
+ # Float
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]+', Number.Float),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- 'preproc-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\)', Comment.Preproc, '#pop'),
- default('preproc-expr-in-parenthesis'),
- ],
-
- 'preproc-expr-chain': [
- (r'\s+', Comment.Preproc),
- (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
- default('#pop'),
- ],
-
- # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
- 'preproc-expr-in-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc,
- ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ 'preproc-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\)', Comment.Preproc, '#pop'),
+ default('preproc-expr-in-parenthesis'),
+ ],
+
+ 'preproc-expr-chain': [
+ (r'\s+', Comment.Preproc),
+ (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
+ default('#pop'),
+ ],
+
+ # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
+ 'preproc-expr-in-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc,
+ ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
-
- # String
- (r"'", String.Single,
- ('#pop', 'preproc-expr-chain', 'string-single')),
- (r'"', String.Double,
- ('#pop', 'preproc-expr-chain', 'string-double')),
- ],
-
- 'abstract': [
- include('spaces'),
- default(('#pop', 'abstract-body', 'abstract-relation',
- 'abstract-opaque', 'type-param-constraint', 'type-name')),
- ],
-
- 'abstract-body': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'class-body')),
- ],
-
- 'abstract-opaque': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
- default('#pop'),
- ],
-
- 'abstract-relation': [
- include('spaces'),
- (r'(?:to|from)', Keyword.Declaration, 'type'),
- (r',', Punctuation),
- default('#pop'),
- ],
-
- 'meta': [
- include('spaces'),
- (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
- ],
-
- # optional colon
- 'meta-colon': [
- include('spaces'),
- (r':', Name.Decorator, '#pop'),
- default('#pop'),
- ],
-
- # same as 'ident' but set token as Name.Decorator instead of Name
- 'meta-ident': [
- include('spaces'),
- (ident, Name.Decorator, '#pop'),
- ],
-
- 'meta-body': [
- include('spaces'),
- (r'\(', Name.Decorator, ('#pop', 'meta-call')),
- default('#pop'),
- ],
-
- 'meta-call': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- default(('#pop', 'meta-call-sep', 'expr')),
- ],
-
- 'meta-call-sep': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r',', Punctuation, ('#pop', 'meta-call')),
- ],
-
- 'typedef': [
- include('spaces'),
- default(('#pop', 'typedef-body', 'type-param-constraint',
- 'type-name')),
- ],
-
- 'typedef-body': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
- ],
-
- 'enum': [
- include('spaces'),
- default(('#pop', 'enum-body', 'bracket-open',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'enum-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
- ],
-
- 'enum-member': [
- include('spaces'),
- (r'\(', Punctuation,
- ('#pop', 'semicolon', 'flag', 'function-param')),
- default(('#pop', 'semicolon', 'flag')),
- ],
-
- 'class': [
- include('spaces'),
- default(('#pop', 'class-body', 'bracket-open', 'extends',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'extends': [
- include('spaces'),
- (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
- (r',', Punctuation), # the comma is made optional here, since haxe2
- # requires the comma but haxe3 does not allow it
- default('#pop'),
- ],
-
- 'bracket-open': [
- include('spaces'),
- (r'\{', Punctuation, '#pop'),
- ],
-
- 'bracket-close': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'class-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (r'(?:static|public|private|override|dynamic|inline|macro)\b',
- Keyword.Declaration),
- default('class-member'),
- ],
-
- 'class-member': [
- include('spaces'),
- (r'(var)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'var')),
- (r'(function)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'class-method')),
- ],
-
- # local function, anonymous or not
- 'function-local': [
- include('spaces'),
- (ident_no_keyword, Name.Function,
- ('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- default(('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- ],
-
- 'optional-expr': [
- include('spaces'),
- include('expr'),
- default('#pop'),
- ],
-
- 'class-method': [
- include('spaces'),
- (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
- 'function-param', 'parenthesis-open',
- 'type-param-constraint')),
- ],
-
- # function arguments
- 'function-param': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (ident_no_keyword, Name,
- ('#pop', 'function-param-sep', 'assign', 'flag')),
- ],
-
- 'function-param-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'function-param')),
- ],
-
- 'prop-get-set': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
- default('#pop'),
- ],
-
- 'prop-get-set-opt': [
- include('spaces'),
- (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
- (ident_no_keyword, Text, '#pop'), # custom getter/setter
- ],
-
- 'expr-statement': [
- include('spaces'),
- # makes semicolon optional here, just to avoid checking the last
- # one is bracket or not.
- default(('#pop', 'optional-semicolon', 'expr')),
- ],
-
- 'expr': [
- include('spaces'),
- (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
- 'meta-ident', 'meta-colon')),
- (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
- (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
- (r'(?:static|public|private|override|dynamic|inline)\b',
- Keyword.Declaration),
- (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
- 'function-local')),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
- (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
- (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
- (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
- (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
- (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
- (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
- (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
- (r'(?:if)\b', Keyword, ('#pop', 'if')),
- (r'(?:do)\b', Keyword, ('#pop', 'do')),
- (r'(?:while)\b', Keyword, ('#pop', 'while')),
- (r'(?:for)\b', Keyword, ('#pop', 'for')),
- (r'(?:untyped|throw)\b', Keyword),
- (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
- (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
- (r'(?:continue|break)\b', Keyword, '#pop'),
- (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
- (ident_no_keyword, Name, ('#pop', 'expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
+
+ # String
+ (r"'", String.Single,
+ ('#pop', 'preproc-expr-chain', 'string-single')),
+ (r'"', String.Double,
+ ('#pop', 'preproc-expr-chain', 'string-double')),
+ ],
+
+ 'abstract': [
+ include('spaces'),
+ default(('#pop', 'abstract-body', 'abstract-relation',
+ 'abstract-opaque', 'type-param-constraint', 'type-name')),
+ ],
+
+ 'abstract-body': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ ],
+
+ 'abstract-opaque': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
+ default('#pop'),
+ ],
+
+ 'abstract-relation': [
+ include('spaces'),
+ (r'(?:to|from)', Keyword.Declaration, 'type'),
+ (r',', Punctuation),
+ default('#pop'),
+ ],
+
+ 'meta': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
+ ],
+
+ # optional colon
+ 'meta-colon': [
+ include('spaces'),
+ (r':', Name.Decorator, '#pop'),
+ default('#pop'),
+ ],
+
+ # same as 'ident' but set token as Name.Decorator instead of Name
+ 'meta-ident': [
+ include('spaces'),
+ (ident, Name.Decorator, '#pop'),
+ ],
+
+ 'meta-body': [
+ include('spaces'),
+ (r'\(', Name.Decorator, ('#pop', 'meta-call')),
+ default('#pop'),
+ ],
+
+ 'meta-call': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ default(('#pop', 'meta-call-sep', 'expr')),
+ ],
+
+ 'meta-call-sep': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r',', Punctuation, ('#pop', 'meta-call')),
+ ],
+
+ 'typedef': [
+ include('spaces'),
+ default(('#pop', 'typedef-body', 'type-param-constraint',
+ 'type-name')),
+ ],
+
+ 'typedef-body': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
+ ],
+
+ 'enum': [
+ include('spaces'),
+ default(('#pop', 'enum-body', 'bracket-open',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'enum-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
+ ],
+
+ 'enum-member': [
+ include('spaces'),
+ (r'\(', Punctuation,
+ ('#pop', 'semicolon', 'flag', 'function-param')),
+ default(('#pop', 'semicolon', 'flag')),
+ ],
+
+ 'class': [
+ include('spaces'),
+ default(('#pop', 'class-body', 'bracket-open', 'extends',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'extends': [
+ include('spaces'),
+ (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
+ (r',', Punctuation), # the comma is made optional here, since haxe2
+ # requires the comma but haxe3 does not allow it
+ default('#pop'),
+ ],
+
+ 'bracket-open': [
+ include('spaces'),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ 'bracket-close': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'class-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (r'(?:static|public|private|override|dynamic|inline|macro)\b',
+ Keyword.Declaration),
+ default('class-member'),
+ ],
+
+ 'class-member': [
+ include('spaces'),
+ (r'(var)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'var')),
+ (r'(function)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'class-method')),
+ ],
+
+ # local function, anonymous or not
+ 'function-local': [
+ include('spaces'),
+ (ident_no_keyword, Name.Function,
+ ('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ default(('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ ],
+
+ 'optional-expr': [
+ include('spaces'),
+ include('expr'),
+ default('#pop'),
+ ],
+
+ 'class-method': [
+ include('spaces'),
+ (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
+ 'function-param', 'parenthesis-open',
+ 'type-param-constraint')),
+ ],
+
+ # function arguments
+ 'function-param': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (ident_no_keyword, Name,
+ ('#pop', 'function-param-sep', 'assign', 'flag')),
+ ],
+
+ 'function-param-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'function-param')),
+ ],
+
+ 'prop-get-set': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
+ default('#pop'),
+ ],
+
+ 'prop-get-set-opt': [
+ include('spaces'),
+ (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
+ (ident_no_keyword, Text, '#pop'), # custom getter/setter
+ ],
+
+ 'expr-statement': [
+ include('spaces'),
+ # makes semicolon optional here, just to avoid checking the last
+ # one is bracket or not.
+ default(('#pop', 'optional-semicolon', 'expr')),
+ ],
+
+ 'expr': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
+ 'meta-ident', 'meta-colon')),
+ (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
+ (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
+ (r'(?:static|public|private|override|dynamic|inline)\b',
+ Keyword.Declaration),
+ (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
+ 'function-local')),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
+ (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
+ (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
+ (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
+ (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
+ (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
+ (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
+ (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
+ (r'(?:if)\b', Keyword, ('#pop', 'if')),
+ (r'(?:do)\b', Keyword, ('#pop', 'do')),
+ (r'(?:while)\b', Keyword, ('#pop', 'while')),
+ (r'(?:for)\b', Keyword, ('#pop', 'for')),
+ (r'(?:untyped|throw)\b', Keyword),
+ (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
+ (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
+ (r'(?:continue|break)\b', Keyword, '#pop'),
+ (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
+ (ident_no_keyword, Name, ('#pop', 'expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
-
- # String
- (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
- (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
-
- # Array
- (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
- ],
-
- 'expr-chain': [
- include('spaces'),
- (r'(?:\+\+|\-\-)', Operator),
- (binop, Operator, ('#pop', 'expr')),
- (r'(?:in)\b', Keyword, ('#pop', 'expr')),
- (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
- (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
- (r'\[', Punctuation, 'array-access'),
- (r'\(', Punctuation, 'call'),
- default('#pop'),
- ],
-
- # macro reification
- 'macro': [
- include('spaces'),
- include('meta'),
- (r':', Punctuation, ('#pop', 'type')),
-
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
- (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
- (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
- (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
-
- default(('#pop', 'expr')),
- ],
-
- 'macro-class': [
- (r'\{', Punctuation, ('#pop', 'class-body')),
- include('class')
- ],
-
- # cast can be written as "cast expr" or "cast(expr, type)"
- 'cast': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'cast-type', 'expr')),
- default(('#pop', 'expr')),
- ],
-
- # optionally give a type as the 2nd argument of cast()
- 'cast-type': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- 'catch': [
- include('spaces'),
- (r'(?:catch)\b', Keyword, ('expr', 'function-param',
- 'parenthesis-open')),
- default('#pop'),
- ],
-
- # do-while loop
- 'do': [
- include('spaces'),
- default(('#pop', 'do-while', 'expr')),
- ],
-
- # the while after do
- 'do-while': [
- include('spaces'),
- (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
- 'parenthesis-open')),
- ],
-
- 'while': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'for': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'if': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
- 'parenthesis')),
- ],
-
- 'else': [
- include('spaces'),
- (r'(?:else)\b', Keyword, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- 'switch': [
- include('spaces'),
- default(('#pop', 'switch-body', 'bracket-open', 'expr')),
- ],
-
- 'switch-body': [
- include('spaces'),
- (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'case': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- default(('#pop', 'case-sep', 'case-guard', 'expr')),
- ],
-
- 'case-sep': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'case')),
- ],
-
- 'case-guard': [
- include('spaces'),
- (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
- default('#pop'),
- ],
-
- # optional multiple expr under a case
- 'case-block': [
- include('spaces'),
- (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
- default('#pop'),
- ],
-
- 'new': [
- include('spaces'),
- default(('#pop', 'call', 'parenthesis-open', 'type')),
- ],
-
- 'array-decl': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- default(('#pop', 'array-decl-sep', 'expr')),
- ],
-
- 'array-decl-sep': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'array-decl')),
- ],
-
- 'array-access': [
- include('spaces'),
- default(('#pop', 'array-access-close', 'expr')),
- ],
-
- 'array-access-close': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- ],
-
- 'comma': [
- include('spaces'),
- (r',', Punctuation, '#pop'),
- ],
-
- 'colon': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- ],
-
- 'semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- ],
-
- 'optional-semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- default('#pop'),
- ],
-
- # identity that CAN be a Haxe keyword
- 'ident': [
- include('spaces'),
- (ident, Name, '#pop'),
- ],
-
- 'dollar': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
- default(('#pop', 'expr-chain')),
- ],
-
- 'type-name': [
- include('spaces'),
- (typeid, Name, '#pop'),
- ],
-
- 'type-full-name': [
- include('spaces'),
- (r'\.', Punctuation, 'ident'),
- default('#pop'),
- ],
-
- 'type': [
- include('spaces'),
- (r'\?', Punctuation),
- (ident, Name, ('#pop', 'type-check', 'type-full-name')),
- (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
- (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
- ],
-
- 'type-parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'type')),
- ],
-
- 'type-check': [
- include('spaces'),
- (r'->', Punctuation, ('#pop', 'type')),
- (r'<(?!=)', Punctuation, 'type-param'),
- default('#pop'),
- ],
-
- 'type-struct': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (r'>', Punctuation, ('comma', 'type')),
- (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
- include('class-body'),
- ],
-
- 'type-struct-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-struct')),
- ],
-
- # type-param can be a normal type or a constant literal...
- 'type-param-type': [
- # Float
- (r'\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
+
+ # String
+ (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
+ (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
+
+ # Array
+ (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
+ ],
+
+ 'expr-chain': [
+ include('spaces'),
+ (r'(?:\+\+|\-\-)', Operator),
+ (binop, Operator, ('#pop', 'expr')),
+ (r'(?:in)\b', Keyword, ('#pop', 'expr')),
+ (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
+ (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
+ (r'\[', Punctuation, 'array-access'),
+ (r'\(', Punctuation, 'call'),
+ default('#pop'),
+ ],
+
+ # macro reification
+ 'macro': [
+ include('spaces'),
+ include('meta'),
+ (r':', Punctuation, ('#pop', 'type')),
+
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
+ (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
+ (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
+ (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
+
+ default(('#pop', 'expr')),
+ ],
+
+ 'macro-class': [
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ include('class')
+ ],
+
+ # cast can be written as "cast expr" or "cast(expr, type)"
+ 'cast': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'cast-type', 'expr')),
+ default(('#pop', 'expr')),
+ ],
+
+ # optionally give a type as the 2nd argument of cast()
+ 'cast-type': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ 'catch': [
+ include('spaces'),
+ (r'(?:catch)\b', Keyword, ('expr', 'function-param',
+ 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # do-while loop
+ 'do': [
+ include('spaces'),
+ default(('#pop', 'do-while', 'expr')),
+ ],
+
+ # the while after do
+ 'do-while': [
+ include('spaces'),
+ (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
+ 'parenthesis-open')),
+ ],
+
+ 'while': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'for': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'if': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
+ 'parenthesis')),
+ ],
+
+ 'else': [
+ include('spaces'),
+ (r'(?:else)\b', Keyword, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ 'switch': [
+ include('spaces'),
+ default(('#pop', 'switch-body', 'bracket-open', 'expr')),
+ ],
+
+ 'switch-body': [
+ include('spaces'),
+ (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'case': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ default(('#pop', 'case-sep', 'case-guard', 'expr')),
+ ],
+
+ 'case-sep': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'case')),
+ ],
+
+ 'case-guard': [
+ include('spaces'),
+ (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # optional multiple expr under a case
+ 'case-block': [
+ include('spaces'),
+ (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
+ default('#pop'),
+ ],
+
+ 'new': [
+ include('spaces'),
+ default(('#pop', 'call', 'parenthesis-open', 'type')),
+ ],
+
+ 'array-decl': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ default(('#pop', 'array-decl-sep', 'expr')),
+ ],
+
+ 'array-decl-sep': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'array-decl')),
+ ],
+
+ 'array-access': [
+ include('spaces'),
+ default(('#pop', 'array-access-close', 'expr')),
+ ],
+
+ 'array-access-close': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ 'comma': [
+ include('spaces'),
+ (r',', Punctuation, '#pop'),
+ ],
+
+ 'colon': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ ],
+
+ 'semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ ],
+
+ 'optional-semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ default('#pop'),
+ ],
+
+ # identity that CAN be a Haxe keyword
+ 'ident': [
+ include('spaces'),
+ (ident, Name, '#pop'),
+ ],
+
+ 'dollar': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
+ default(('#pop', 'expr-chain')),
+ ],
+
+ 'type-name': [
+ include('spaces'),
+ (typeid, Name, '#pop'),
+ ],
+
+ 'type-full-name': [
+ include('spaces'),
+ (r'\.', Punctuation, 'ident'),
+ default('#pop'),
+ ],
+
+ 'type': [
+ include('spaces'),
+ (r'\?', Punctuation),
+ (ident, Name, ('#pop', 'type-check', 'type-full-name')),
+ (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
+ (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
+ ],
+
+ 'type-parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'type')),
+ ],
+
+ 'type-check': [
+ include('spaces'),
+ (r'->', Punctuation, ('#pop', 'type')),
+ (r'<(?!=)', Punctuation, 'type-param'),
+ default('#pop'),
+ ],
+
+ 'type-struct': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (r'>', Punctuation, ('comma', 'type')),
+ (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
+ include('class-body'),
+ ],
+
+ 'type-struct-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-struct')),
+ ],
+
+ # type-param can be a normal type or a constant literal...
+ 'type-param-type': [
+ # Float
+ (r'\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, '#pop'),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
-
- # Array
- (r'\[', Operator, ('#pop', 'array-decl')),
-
- include('type'),
- ],
-
- # type-param part of a type
- # ie. the <A,B> path in Map<A,B>
- 'type-param': [
- include('spaces'),
- default(('#pop', 'type-param-sep', 'type-param-type')),
- ],
-
- 'type-param-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param')),
- ],
-
- # optional type-param that may include constraint
- # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
- 'type-param-constraint': [
- include('spaces'),
- (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- default('#pop'),
- ],
-
- 'type-param-constraint-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- ],
-
- # the optional constraint inside type-param
- 'type-param-constraint-flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
- default('#pop'),
- ],
-
- 'type-param-constraint-flag-type': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
- 'type')),
- default(('#pop', 'type')),
- ],
-
- 'type-param-constraint-flag-type-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, 'type'),
- ],
-
- # a parenthesis expr that contain exactly one expr
- 'parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'flag', 'expr')),
- ],
-
- 'parenthesis-open': [
- include('spaces'),
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'parenthesis-close': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- ],
-
- 'var': [
- include('spaces'),
- (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
- ],
-
- # optional more var decl.
- 'var-sep': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'var')),
- default('#pop'),
- ],
-
- # optional assignment
- 'assign': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- # optional type flag
- 'flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- # colon as part of a ternary operator (?:)
- 'ternary': [
- include('spaces'),
- (r':', Operator, '#pop'),
- ],
-
- # function call
- 'call': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- default(('#pop', 'call-sep', 'expr')),
- ],
-
- # after a call param
- 'call-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'call')),
- ],
-
- # bracket can be block or object
- 'bracket': [
- include('spaces'),
- (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
- ('#pop', 'bracket-check')),
- (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
- (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
- default(('#pop', 'block')),
- ],
-
- 'bracket-check': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
- default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
- ],
-
- # code block
- 'block': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default('expr-statement'),
- ],
-
- # object in key-value pairs
- 'object': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
- ],
-
- # a key of an object
- 'ident-or-string': [
- include('spaces'),
- (ident_no_keyword, Name, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- # after a key-value pair in object
- 'object-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'object')),
- ],
-
-
-
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
-
-
-class HxmlLexer(RegexLexer):
- """
- Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Hxml'
- aliases = ['haxeml', 'hxml']
- filenames = ['*.hxml']
-
- tokens = {
- 'root': [
- # Seperator
- (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
- # Compiler switches with one dash
- (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
- # Compilerswitches with two dashes
- (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
- r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
- # Targets and other options that take an argument
- (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
- r'cp|cmd)( +)(.+)',
- bygroups(Punctuation, Keyword, Whitespace, String)),
- # Options that take only numerical arguments
- (r'(-)(swf-version)( +)(\d+)',
- bygroups(Punctuation, Keyword, Number.Integer)),
- # An Option that defines the size, the fps and the background
- # color of an flash movie
- (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
- Punctuation, Number.Integer, Punctuation, Number.Integer,
- Punctuation, Number.Hex)),
- # options with two dashes that takes arguments
- (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
- r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
- # Single line comment, multiline ones are not allowed.
- (r'#.*', Comment.Single)
- ]
- }
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
+
+ # Array
+ (r'\[', Operator, ('#pop', 'array-decl')),
+
+ include('type'),
+ ],
+
+ # type-param part of a type
+ # ie. the <A,B> path in Map<A,B>
+ 'type-param': [
+ include('spaces'),
+ default(('#pop', 'type-param-sep', 'type-param-type')),
+ ],
+
+ 'type-param-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param')),
+ ],
+
+ # optional type-param that may include constraint
+ # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
+ 'type-param-constraint': [
+ include('spaces'),
+ (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ ],
+
+ # the optional constraint inside type-param
+ 'type-param-constraint-flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-flag-type': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
+ 'type')),
+ default(('#pop', 'type')),
+ ],
+
+ 'type-param-constraint-flag-type-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, 'type'),
+ ],
+
+ # a parenthesis expr that contain exactly one expr
+ 'parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'flag', 'expr')),
+ ],
+
+ 'parenthesis-open': [
+ include('spaces'),
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'parenthesis-close': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+
+ 'var': [
+ include('spaces'),
+ (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
+ ],
+
+ # optional more var decl.
+ 'var-sep': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'var')),
+ default('#pop'),
+ ],
+
+ # optional assignment
+ 'assign': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ # optional type flag
+ 'flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ # colon as part of a ternary operator (?:)
+ 'ternary': [
+ include('spaces'),
+ (r':', Operator, '#pop'),
+ ],
+
+ # function call
+ 'call': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ default(('#pop', 'call-sep', 'expr')),
+ ],
+
+ # after a call param
+ 'call-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'call')),
+ ],
+
+ # bracket can be block or object
+ 'bracket': [
+ include('spaces'),
+ (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
+ ('#pop', 'bracket-check')),
+ (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
+ (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
+ default(('#pop', 'block')),
+ ],
+
+ 'bracket-check': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
+ default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
+ ],
+
+ # code block
+ 'block': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default('expr-statement'),
+ ],
+
+ # object in key-value pairs
+ 'object': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
+ ],
+
+ # a key of an object
+ 'ident-or-string': [
+ include('spaces'),
+ (ident_no_keyword, Name, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ # after a key-value pair in object
+ 'object-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'object')),
+ ],
+
+
+
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+
+
+class HxmlLexer(RegexLexer):
+ """
+ Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Hxml'
+ aliases = ['haxeml', 'hxml']
+ filenames = ['*.hxml']
+
+ tokens = {
+ 'root': [
+ # Seperator
+ (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
+ # Compiler switches with one dash
+ (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
+ # Compilerswitches with two dashes
+ (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
+ r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
+ # Targets and other options that take an argument
+ (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
+ r'cp|cmd)( +)(.+)',
+ bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Options that take only numerical arguments
+ (r'(-)(swf-version)( +)(\d+)',
+ bygroups(Punctuation, Keyword, Number.Integer)),
+ # An Option that defines the size, the fps and the background
+ # color of an flash movie
+ (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
+ bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
+ Punctuation, Number.Integer, Punctuation, Number.Integer,
+ Punctuation, Number.Hex)),
+ # options with two dashes that takes arguments
+ (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
+ r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Single line comment, multiline ones are not allowed.
+ (r'#.*', Comment.Single)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/hdl.py b/contrib/python/Pygments/py2/pygments/lexers/hdl.py
index b45654ebc8..067354ae7c 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/hdl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/hdl.py
@@ -1,180 +1,180 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.hdl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for hardware descriptor languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.hdl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for hardware descriptor languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, using, this, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
-
-
-class VerilogLexer(RegexLexer):
- """
- For verilog source code with preprocessor directives.
-
- .. versionadded:: 1.4
- """
- name = 'verilog'
- aliases = ['verilog', 'v']
- filenames = ['*.v']
- mimetypes = ['text/x-verilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- (r'^\s*`define', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[01]+', Number.Bin),
- (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
- (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
- (r'\'[01xz]', Number),
- (r'\d+[Ll]?', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
- (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
- (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
- 'import'),
-
- (words((
- 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
- 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
- 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
- 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
- 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
- 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
- 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
- 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
- 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
- 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
- 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
- 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
- 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
- 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
- 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
- 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
- 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
-
- (words((
- 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
- 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
- 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
- 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
- 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
- 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
- 'undef'), prefix=r'`', suffix=r'\b'),
- Comment.Preproc),
-
- (words((
- 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
- 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
- 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
- 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
- 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
- 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
- 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
- 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
- prefix=r'\$', suffix=r'\b'),
- Name.Builtin),
-
- (words((
- 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
- 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
- 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
- 'shortreal', 'real', 'realtime'), suffix=r'\b'),
- Keyword.Type),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, using, this, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
+
+
+class VerilogLexer(RegexLexer):
+ """
+ For verilog source code with preprocessor directives.
+
+ .. versionadded:: 1.4
+ """
+ name = 'verilog'
+ aliases = ['verilog', 'v']
+ filenames = ['*.v']
+ mimetypes = ['text/x-verilog']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ (r'^\s*`define', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'[{}#@]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+ (r'([0-9]+)|(\'b)[01]+', Number.Bin),
+ (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+ (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+ (r'\'[01xz]', Number),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;\']', Punctuation),
+ (r'`[a-zA-Z_]\w*', Name.Constant),
+
+ (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
+ (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text),
+ 'import'),
+
+ (words((
+ 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
+ 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
+ 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
+ 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
+ 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
+ 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
+ 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
+ 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
+ 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
+ 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
+ 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
+ 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
+ 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
+ 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
+ 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
+ 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+
+ (words((
+ 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
+ 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
+ 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
+ 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
+ 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
+ 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
+ 'undef'), prefix=r'`', suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
+ 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
+ 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
+ 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
+ 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
+ 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
+ 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
+ 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
+ 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
+ prefix=r'\$', suffix=r'\b'),
+ Name.Builtin),
+
+ (words((
+ 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
+ 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
+ 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
+ 'shortreal', 'real', 'realtime'), suffix=r'\b'),
+ Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
-
-class SystemVerilogLexer(RegexLexer):
- """
- Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
- 1800-2009 standard.
-
- .. versionadded:: 1.5
- """
- name = 'systemverilog'
- aliases = ['systemverilog', 'sv']
- filenames = ['*.sv', '*.svh']
- mimetypes = ['text/x-systemverilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- (r'^\s*`define', Comment.Preproc, 'macro'),
- (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
- (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'),
-
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[01]+', Number.Bin),
- (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
- (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
- (r'\'[01xz]', Number),
- (r'\d+[Ll]?', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
- (words((
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'import': [
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
+ ]
+ }
+
+
+class SystemVerilogLexer(RegexLexer):
+ """
+ Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
+ 1800-2009 standard.
+
+ .. versionadded:: 1.5
+ """
+ name = 'systemverilog'
+ aliases = ['systemverilog', 'sv']
+ filenames = ['*.sv', '*.svh']
+ mimetypes = ['text/x-systemverilog']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ (r'^\s*`define', Comment.Preproc, 'macro'),
+ (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)),
+ (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'),
+
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'[{}#@]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+ (r'([0-9]+)|(\'b)[01]+', Number.Bin),
+ (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+ (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+ (r'\'[01xz]', Number),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;\']', Punctuation),
+ (r'`[a-zA-Z_]\w*', Name.Constant),
+
+ (words((
'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
'before', 'begin', 'bind', 'bins', 'binsof', 'bit', 'break', 'buf',
@@ -216,18 +216,18 @@ class SystemVerilogLexer(RegexLexer):
'virtual', 'void', 'wait', 'wait_order', 'wand', 'weak', 'weak0',
'weak1', 'while', 'wildcard', 'wire', 'with', 'within', 'wor',
'xnor', 'xor'), suffix=r'\b'),
- Keyword),
-
- (words((
+ Keyword),
+
+ (words((
'`__FILE__', '`__LINE__', '`begin_keywords', '`celldefine',
'`default_nettype', '`define', '`else', '`elsif', '`end_keywords',
'`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
'`line', '`nounconnected_drive', '`pragma', '`resetall',
'`timescale', '`unconnected_drive', '`undef', '`undefineall'),
- suffix=r'\b'),
- Comment.Preproc),
-
- (words((
+ suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
'$display', '$displayb', '$displayh', '$displayo', '$dumpall',
'$dumpfile', '$dumpflush', '$dumplimit', '$dumpoff', '$dumpon',
'$dumpports', '$dumpportsall', '$dumpportsflush', '$dumpportslimit',
@@ -236,141 +236,141 @@ class SystemVerilogLexer(RegexLexer):
'$ferror', '$fflush', '$fgetc', '$fgets', '$finish', '$fmonitor',
'$fmonitorb', '$fmonitorh', '$fmonitoro', '$fopen', '$fread',
'$fscanf', '$fseek', '$fstrobe', '$fstrobeb', '$fstrobeh',
- '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo',
- '$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
+ '$fstrobeo', '$ftell', '$fwrite', '$fwriteb', '$fwriteh', '$fwriteo',
+ '$monitor', '$monitorb', '$monitorh', '$monitoro', '$monitoroff',
'$monitoron', '$plusargs', '$random', '$readmemb', '$readmemh',
'$rewind', '$sformat', '$sformatf', '$sscanf', '$strobe',
'$strobeb', '$strobeh', '$strobeo', '$swrite', '$swriteb',
'$swriteh', '$swriteo', '$test', '$ungetc', '$value$plusargs',
'$write', '$writeb', '$writeh', '$writememb', '$writememh',
'$writeo'), suffix=r'\b'),
- Name.Builtin),
-
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (words((
- 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
- 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
- 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
- 'shortreal', 'real', 'realtime'), suffix=r'\b'),
- Keyword.Type),
+ Name.Builtin),
+
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (words((
+ 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
+ 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
+ 'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wo'
+ 'shortreal', 'real', 'realtime'), suffix=r'\b'),
+ Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
-
-class VhdlLexer(RegexLexer):
- """
- For VHDL source code.
-
- .. versionadded:: 1.5
- """
- name = 'vhdl'
- aliases = ['vhdl']
- filenames = ['*.vhdl', '*.vhd']
- mimetypes = ['text/x-vhdl']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'--.*?$', Comment.Single),
- (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r"'[a-z_]\w*", Name.Attribute),
- (r'[()\[\],.;\']', Punctuation),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'import': [
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
+ ]
+ }
+
+
+class VhdlLexer(RegexLexer):
+ """
+ For VHDL source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'vhdl'
+ aliases = ['vhdl']
+ filenames = ['*.vhdl', '*.vhd']
+ mimetypes = ['text/x-vhdl']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'--.*?$', Comment.Single),
+ (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r"'[a-z_]\w*", Name.Attribute),
+ (r'[()\[\],.;\']', Punctuation),
(r'"[^\n\\"]*"', String),
-
- (r'(library)(\s+)([a-z_]\w*)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
+
+ (r'(library)(\s+)([a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)),
(r'(use)(\s+)([a-z_][\w.]*\.)(all)',
bygroups(Keyword, Text, Name.Namespace, Keyword)),
- (r'(use)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Text, Name.Namespace)),
+ (r'(use)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword, Text, Name.Namespace)),
(r'(std|ieee)(\.[a-z_]\w*)',
bygroups(Name.Namespace, Name.Namespace)),
(words(('std', 'ieee', 'work'), suffix=r'\b'),
Name.Namespace),
- (r'(entity|component)(\s+)([a-z_]\w*)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
- r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
- bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
- Name.Class, Text, Keyword)),
+ (r'(entity|component)(\s+)([a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
+ r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
+ bygroups(Keyword, Text, Name.Class, Text, Keyword, Text,
+ Name.Class, Text, Keyword)),
(r'([a-z_]\w*)(:)(\s+)(process|for)',
bygroups(Name.Class, Operator, Text, Keyword)),
- (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
-
- include('types'),
- include('keywords'),
- include('numbers'),
-
- (r'[a-z_]\w*', Name),
- ],
- 'endblock': [
- include('keywords'),
- (r'[a-z_]\w*', Name.Class),
- (r'(\s+)', Text),
- (r';', Punctuation, '#pop'),
- ],
- 'types': [
- (words((
- 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
- 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
- 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
+ (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'),
+
+ include('types'),
+ include('keywords'),
+ include('numbers'),
+
+ (r'[a-z_]\w*', Name),
+ ],
+ 'endblock': [
+ include('keywords'),
+ (r'[a-z_]\w*', Name.Class),
+ (r'(\s+)', Text),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'types': [
+ (words((
+ 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
+ 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
+ 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
'std_logic', 'std_logic_vector', 'signed', 'unsigned'), suffix=r'\b'),
- Keyword.Type),
- ],
- 'keywords': [
- (words((
- 'abs', 'access', 'after', 'alias', 'all', 'and',
- 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
- 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
- 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
- 'entity', 'exit', 'file', 'for', 'function', 'generate',
- 'generic', 'group', 'guarded', 'if', 'impure', 'in',
- 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
- 'literal', 'loop', 'map', 'mod', 'nand', 'new',
- 'next', 'nor', 'not', 'null', 'of', 'on',
- 'open', 'or', 'others', 'out', 'package', 'port',
- 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
+ Keyword.Type),
+ ],
+ 'keywords': [
+ (words((
+ 'abs', 'access', 'after', 'alias', 'all', 'and',
+ 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
+ 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
+ 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
+ 'entity', 'exit', 'file', 'for', 'function', 'generate',
+ 'generic', 'group', 'guarded', 'if', 'impure', 'in',
+ 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
+ 'literal', 'loop', 'map', 'mod', 'nand', 'new',
+ 'next', 'nor', 'not', 'null', 'of', 'on',
+ 'open', 'or', 'others', 'out', 'package', 'port',
+ 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
'register', 'reject', 'rem', 'return', 'rol', 'ror', 'select',
'severity', 'signal', 'shared', 'sla', 'sll', 'sra',
- 'srl', 'subtype', 'then', 'to', 'transport', 'type',
- 'units', 'until', 'use', 'variable', 'wait', 'when',
- 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
- ],
- 'numbers': [
- (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
- (r'\d+', Number.Integer),
- (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
- (r'X"[0-9a-f_]+"', Number.Hex),
- (r'O"[0-7_]+"', Number.Oct),
- (r'B"[01_]+"', Number.Bin),
- ],
- }
+ 'srl', 'subtype', 'then', 'to', 'transport', 'type',
+ 'units', 'until', 'use', 'variable', 'wait', 'when',
+ 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'numbers': [
+ (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
+ (r'\d+', Number.Integer),
+ (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
+ (r'X"[0-9a-f_]+"', Number.Hex),
+ (r'O"[0-7_]+"', Number.Oct),
+ (r'B"[01_]+"', Number.Bin),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/hexdump.py b/contrib/python/Pygments/py2/pygments/lexers/hexdump.py
index da28543270..69377f4cb9 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/hexdump.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/hexdump.py
@@ -1,54 +1,54 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.hexdump
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for hexadecimal dumps.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.hexdump
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for hexadecimal dumps.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Text, Name, Number, String, Punctuation
-
-__all__ = ['HexdumpLexer']
-
-
-class HexdumpLexer(RegexLexer):
- """
- For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
- ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
-
- .. sourcecode:: hexdump
-
- 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
- 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
-
- The specific supported formats are the outputs of:
-
- * ``hexdump FILE``
- * ``hexdump -C FILE`` -- the `canonical` format used in the example.
- * ``hd FILE`` -- same as ``hexdump -C FILE``.
- * ``hexcat FILE``
- * ``od -t x1z FILE``
- * ``xxd FILE``
- * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
- .. versionadded:: 2.1
- """
- name = 'Hexdump'
- aliases = ['hexdump']
-
- hd = r'[0-9A-Ha-h]'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Text, Name, Number, String, Punctuation
+
+__all__ = ['HexdumpLexer']
+
+
+class HexdumpLexer(RegexLexer):
+ """
+ For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
+ ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
+
+ .. sourcecode:: hexdump
+
+ 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
+ 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
+
+ The specific supported formats are the outputs of:
+
+ * ``hexdump FILE``
+ * ``hexdump -C FILE`` -- the `canonical` format used in the example.
+ * ``hd FILE`` -- same as ``hexdump -C FILE``.
+ * ``hexcat FILE``
+ * ``od -t x1z FILE``
+ * ``xxd FILE``
+ * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
- tokens = {
- 'root': [
- (r'\n', Text),
- include('offset'),
+ .. versionadded:: 2.1
+ """
+ name = 'Hexdump'
+ aliases = ['hexdump']
+
+ hd = r'[0-9A-Ha-h]'
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{16})(\<)$',
bygroups(Text, Punctuation, String, Punctuation), 'bracket-strings'),
(r'(\s{2,3})(\|)(.{16})(\|)$',
@@ -57,47 +57,47 @@ class HexdumpLexer(RegexLexer):
bygroups(Text, Punctuation, String, Punctuation)),
(r'(\s{2,3})(\|)(.{1,15})(\|)$',
bygroups(Text, Punctuation, String, Punctuation)),
- (r'(\s{2,3})(.{1,15})$', bygroups(Text, String)),
- (r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'),
- (r'\s', Text),
- (r'^\*', Punctuation),
- ],
- 'offset': [
- (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
- (r'^'+hd+'+', Name.Label),
- ],
- 'offset-mode': [
- (r'\s', Text, '#pop'),
- (hd+'+', Name.Label),
- (r':', Punctuation)
- ],
- 'piped-strings': [
- (r'\n', Text),
- include('offset'),
- (hd+r'{2}', Number.Hex),
+ (r'(\s{2,3})(.{1,15})$', bygroups(Text, String)),
+ (r'(\s{2,3})(.{16}|.{20})$', bygroups(Text, String), 'nonpiped-strings'),
+ (r'\s', Text),
+ (r'^\*', Punctuation),
+ ],
+ 'offset': [
+ (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
+ (r'^'+hd+'+', Name.Label),
+ ],
+ 'offset-mode': [
+ (r'\s', Text, '#pop'),
+ (hd+'+', Name.Label),
+ (r':', Punctuation)
+ ],
+ 'piped-strings': [
+ (r'\n', Text),
+ include('offset'),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\|)(.{1,16})(\|)$',
bygroups(Text, Punctuation, String, Punctuation)),
- (r'\s', Text),
- (r'^\*', Punctuation),
- ],
- 'bracket-strings': [
- (r'\n', Text),
- include('offset'),
- (hd+r'{2}', Number.Hex),
+ (r'\s', Text),
+ (r'^\*', Punctuation),
+ ],
+ 'bracket-strings': [
+ (r'\n', Text),
+ include('offset'),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{1,16})(\<)$',
bygroups(Text, Punctuation, String, Punctuation)),
- (r'\s', Text),
- (r'^\*', Punctuation),
- ],
- 'nonpiped-strings': [
- (r'\n', Text),
- include('offset'),
+ (r'\s', Text),
+ (r'^\*', Punctuation),
+ ],
+ 'nonpiped-strings': [
+ (r'\n', Text),
+ include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
- (r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)),
- (r'(\s{2,3})(.{1,20})$', bygroups(Text, String)),
- (r'\s', Text),
- (r'^\*', Punctuation),
- ],
- }
+ (hd+r'{2}', Number.Hex),
+ (r'(\s{19,})(.{1,20}?)$', bygroups(Text, String)),
+ (r'(\s{2,3})(.{1,20})$', bygroups(Text, String)),
+ (r'\s', Text),
+ (r'^\*', Punctuation),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/html.py b/contrib/python/Pygments/py2/pygments/lexers/html.py
index cbef4f7e98..cb13457798 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/html.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/html.py
@@ -1,602 +1,602 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.html
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for HTML, XML and related markup.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.html
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for HTML, XML and related markup.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation
-from pygments.util import looks_like_xml, html_doctype_matches
-
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.jvm import ScalaLexer
-from pygments.lexers.css import CssLexer, _indentation, _starts_block
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.util import looks_like_xml, html_doctype_matches
+
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.jvm import ScalaLexer
+from pygments.lexers.css import CssLexer, _indentation, _starts_block
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
'ScamlLexer', 'PugLexer']
-
-
-class HtmlLexer(RegexLexer):
- """
- For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
- by the appropriate lexer.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
- mimetypes = ['text/html', 'application/xhtml+xml']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'(<)(\s*)(script)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('script-content', 'tag')),
- (r'(<)(\s*)(style)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('style-content', 'tag')),
- # note: this allows tag names not used in HTML like <x:with-dash>,
- # this is to support yet-unknown template engines and the like
- (r'(<)(\s*)([\w:.-]+)',
- bygroups(Punctuation, Text, Name.Tag), 'tag'),
- (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation)),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
+
+
+class HtmlLexer(RegexLexer):
+ """
+ For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
+ by the appropriate lexer.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
+ mimetypes = ['text/html', 'application/xhtml+xml']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'(<)(\s*)(script)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('script-content', 'tag')),
+ (r'(<)(\s*)(style)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('style-content', 'tag')),
+ # note: this allows tag names not used in HTML like <x:with-dash>,
+ # this is to support yet-unknown template engines and the like
+ (r'(<)(\s*)([\w:.-]+)',
+ bygroups(Punctuation, Text, Name.Tag), 'tag'),
+ (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation)),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
+ 'attr'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
+ ],
+ 'script-content': [
+ (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation), '#pop'),
+ (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
+ ],
+ 'style-content': [
+ (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation),'#pop'),
+ (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if html_doctype_matches(text):
+ return 0.5
+
+
+class DtdLexer(RegexLexer):
+ """
+ A lexer for DTDs (Document Type Definitions).
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ name = 'DTD'
+ aliases = ['dtd']
+ filenames = ['*.dtd']
+ mimetypes = ['application/xml-dtd']
+
+ tokens = {
+ 'root': [
+ include('common'),
+
+ (r'(<!ELEMENT)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'element'),
+ (r'(<!ATTLIST)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'attlist'),
+ (r'(<!ENTITY)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Entity), 'entity'),
+ (r'(<!NOTATION)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'notation'),
+ (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
+ bygroups(Keyword, Name.Entity, Text, Keyword)),
+
+ (r'(<!DOCTYPE)(\s+)([^>\s]+)',
+ bygroups(Keyword, Text, Name.Tag)),
+ (r'PUBLIC|SYSTEM', Keyword.Constant),
+ (r'[\[\]>]', Keyword),
+ ],
+
+ 'common': [
+ (r'\s+', Text),
+ (r'(%|&)[^;]*;', Name.Entity),
+ ('<!--', Comment, 'comment'),
+ (r'[(|)*,?+]', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\'[^\']*\'', String.Single),
+ ],
+
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+
+ 'element': [
+ include('common'),
+ (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Tag),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'attlist': [
+ include('common'),
+ (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
+ Keyword.Constant),
+ (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
+ (r'xml:space|xml:lang', Keyword.Reserved),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'entity': [
+ include('common'),
+ (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Entity),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'notation': [
+ include('common'),
+ (r'SYSTEM|PUBLIC', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if not looks_like_xml(text) and \
+ ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
+ return 0.8
+
+
+class XmlLexer(RegexLexer):
+ """
+ Generic lexer for XML (eXtensible Markup Language).
+ """
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ name = 'XML'
+ aliases = ['xml']
+ filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
+ '*.wsdl', '*.wsf']
+ mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
+ 'application/rss+xml', 'application/atom+xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
(r'\s+', Text),
- (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
- 'attr'),
- (r'[\w:-]+', Name.Attribute),
- (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
- ],
- 'script-content': [
- (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation), '#pop'),
- (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
- ],
- 'style-content': [
- (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation),'#pop'),
- (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if html_doctype_matches(text):
- return 0.5
-
-
-class DtdLexer(RegexLexer):
- """
- A lexer for DTDs (Document Type Definitions).
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'DTD'
- aliases = ['dtd']
- filenames = ['*.dtd']
- mimetypes = ['application/xml-dtd']
-
- tokens = {
- 'root': [
- include('common'),
-
- (r'(<!ELEMENT)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'element'),
- (r'(<!ATTLIST)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'attlist'),
- (r'(<!ENTITY)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Entity), 'entity'),
- (r'(<!NOTATION)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'notation'),
- (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
- bygroups(Keyword, Name.Entity, Text, Keyword)),
-
- (r'(<!DOCTYPE)(\s+)([^>\s]+)',
- bygroups(Keyword, Text, Name.Tag)),
- (r'PUBLIC|SYSTEM', Keyword.Constant),
- (r'[\[\]>]', Keyword),
- ],
-
- 'common': [
- (r'\s+', Text),
- (r'(%|&)[^;]*;', Name.Entity),
- ('<!--', Comment, 'comment'),
- (r'[(|)*,?+]', Operator),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- ],
-
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
-
- 'element': [
- include('common'),
- (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Tag),
- (r'>', Keyword, '#pop'),
- ],
-
- 'attlist': [
- include('common'),
- (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
- Keyword.Constant),
- (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
- (r'xml:space|xml:lang', Keyword.Reserved),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
-
- 'entity': [
- include('common'),
- (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Entity),
- (r'>', Keyword, '#pop'),
- ],
-
- 'notation': [
- include('common'),
- (r'SYSTEM|PUBLIC', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if not looks_like_xml(text) and \
- ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
- return 0.8
-
-
-class XmlLexer(RegexLexer):
- """
- Generic lexer for XML (eXtensible Markup Language).
- """
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- name = 'XML'
- aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
- '*.wsdl', '*.wsf']
- mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
- 'application/rss+xml', 'application/atom+xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- (r'\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if looks_like_xml(text):
- return 0.45 # less than HTML
-
-
-class XsltLexer(XmlLexer):
- """
- A lexer for XSLT.
-
- .. versionadded:: 0.10
- """
-
- name = 'XSLT'
- aliases = ['xslt']
- filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
- mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if looks_like_xml(text):
+ return 0.45 # less than HTML
+
+
+class XsltLexer(XmlLexer):
+ """
+ A lexer for XSLT.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'XSLT'
+ aliases = ['xslt']
+ filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
+ mimetypes = ['application/xsl+xml', 'application/xslt+xml']
+
EXTRA_KEYWORDS = {
- 'apply-imports', 'apply-templates', 'attribute',
- 'attribute-set', 'call-template', 'choose', 'comment',
- 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
- 'for-each', 'if', 'import', 'include', 'key', 'message',
- 'namespace-alias', 'number', 'otherwise', 'output', 'param',
- 'preserve-space', 'processing-instruction', 'sort',
- 'strip-space', 'stylesheet', 'template', 'text', 'transform',
- 'value-of', 'variable', 'when', 'with-param'
+ 'apply-imports', 'apply-templates', 'attribute',
+ 'attribute-set', 'call-template', 'choose', 'comment',
+ 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
+ 'for-each', 'if', 'import', 'include', 'key', 'message',
+ 'namespace-alias', 'number', 'otherwise', 'output', 'param',
+ 'preserve-space', 'processing-instruction', 'sort',
+ 'strip-space', 'stylesheet', 'template', 'text', 'transform',
+ 'value-of', 'variable', 'when', 'with-param'
}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
- m = re.match('</?xsl:([^>]*)/?>?', value)
-
- if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
- yield index, Keyword, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- if looks_like_xml(text) and '<xsl' in text:
- return 0.8
-
-
-class HamlLexer(ExtendedRegexLexer):
- """
- For Haml markup.
-
- .. versionadded:: 1.3
- """
-
- name = 'Haml'
- aliases = ['haml']
- filenames = ['*.haml']
- mimetypes = ['text/x-haml']
-
- flags = re.IGNORECASE
- # Haml can include " |\n" anywhere,
- # which is ignored and used to wrap long lines.
- # To accomodate this, use this custom faux dot instead.
- _dot = r'(?: \|\n(?=.* \|)|.)'
-
- # In certain places, a comma at the end of the line
- # allows line wrapping as well.
- _comma_dot = r'(?:,\s*\n|' + _dot + ')'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
+ m = re.match('</?xsl:([^>]*)/?>?', value)
+
+ if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ if looks_like_xml(text) and '<xsl' in text:
+ return 0.8
+
+
+class HamlLexer(ExtendedRegexLexer):
+ """
+ For Haml markup.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haml'
+ aliases = ['haml']
+ filenames = ['*.haml']
+ mimetypes = ['text/x-haml']
+
+ flags = re.IGNORECASE
+ # Haml can include " |\n" anywhere,
+ # which is ignored and used to wrap long lines.
+ # To accomodate this, use this custom faux dot instead.
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+
+ # In certain places, a comma at the end of the line
+ # allows line wrapping as well.
+ _comma_dot = r'(?:,\s*\n|' + _dot + ')'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'haml-comment-block'), '#pop'),
- (r'(-)(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'haml-comment-block'), '#pop'),
+ (r'(-)(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(RubyLexer)),
(r'\[' + _dot + r'*?\]', using(RubyLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'haml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'haml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class ScamlLexer(ExtendedRegexLexer):
- """
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- .. versionadded:: 1.4
- """
-
- name = 'Scaml'
- aliases = ['scaml']
- filenames = ['*.scaml']
- mimetypes = ['text/x-scaml']
-
- flags = re.IGNORECASE
- # Scaml does not yet support the " |\n" notation to
- # wrap long lines. Once it does, use the custom faux
- # dot instead.
- # _dot = r'(?: \|\n(?=.* \|)|.)'
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+ """
+ For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Scaml'
+ aliases = ['scaml']
+ filenames = ['*.scaml']
+ mimetypes = ['text/x-scaml']
+
+ flags = re.IGNORECASE
+ # Scaml does not yet support the " |\n" notation to
+ # wrap long lines. Once it does, use the custom faux
+ # dot instead.
+ # _dot = r'(?: \|\n(?=.* \|)|.)'
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
(r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
class PugLexer(ExtendedRegexLexer):
- """
+ """
For Pug markup.
Pug is a variant of Scaml, see:
- http://scalate.fusesource.org/documentation/scaml-reference.html
-
- .. versionadded:: 1.4
- """
-
+ http://scalate.fusesource.org/documentation/scaml-reference.html
+
+ .. versionadded:: 1.4
+ """
+
name = 'Pug'
aliases = ['pug', 'jade']
filenames = ['*.pug', '*.jade']
mimetypes = ['text/x-pug', 'text/x-jade']
-
- flags = re.IGNORECASE
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)), 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+
+ flags = re.IGNORECASE
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)), 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- (r'\|', Text, 'eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ (r'\|', Text, 'eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
(r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
JadeLexer = PugLexer # compat
diff --git a/contrib/python/Pygments/py2/pygments/lexers/idl.py b/contrib/python/Pygments/py2/pygments/lexers/idl.py
index 292f2de724..4e927cc103 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/idl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/idl.py
@@ -1,263 +1,263 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.idl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for IDL.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.idl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for IDL.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, Number, String
-
-__all__ = ['IDLLexer']
-
-
-class IDLLexer(RegexLexer):
- """
- Pygments Lexer for IDL (Interactive Data Language).
-
- .. versionadded:: 1.6
- """
- name = 'IDL'
- aliases = ['idl']
- filenames = ['*.pro']
- mimetypes = ['text/idl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- _RESERVED = (
- 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
- 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
- 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
- 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
- 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
- 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
- 'repeat', 'switch', 'then', 'until', 'while', 'xor')
- """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
- _BUILTIN_LIB = (
- 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
- 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
- 'arg_present', 'array_equal', 'array_indices', 'arrow',
- 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
- 'a_correlate', 'bandpass_filter', 'bandreject_filter',
- 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
- 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
- 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
- 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
- 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
- 'bytscl', 'caldat', 'calendar', 'call_external',
- 'call_function', 'call_method', 'call_procedure', 'canny',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, Number, String
+
+__all__ = ['IDLLexer']
+
+
+class IDLLexer(RegexLexer):
+ """
+ Pygments Lexer for IDL (Interactive Data Language).
+
+ .. versionadded:: 1.6
+ """
+ name = 'IDL'
+ aliases = ['idl']
+ filenames = ['*.pro']
+ mimetypes = ['text/idl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ _RESERVED = (
+ 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
+ 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
+ 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
+ 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
+ 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
+ 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
+ 'repeat', 'switch', 'then', 'until', 'while', 'xor')
+ """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
+
+ _BUILTIN_LIB = (
+ 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
+ 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
+ 'arg_present', 'array_equal', 'array_indices', 'arrow',
+ 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
+ 'a_correlate', 'bandpass_filter', 'bandreject_filter',
+ 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
+ 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
+ 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
+ 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
+ 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
+ 'bytscl', 'caldat', 'calendar', 'call_external',
+ 'call_function', 'call_method', 'call_procedure', 'canny',
'catch', 'cd', r'cdf_\w*', 'ceil', 'chebyshev',
- 'check_math',
- 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
- 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
- 'cmyk_convert', 'colorbar', 'colorize_sample',
- 'colormap_applicable', 'colormap_gradient',
- 'colormap_rotation', 'colortable', 'color_convert',
- 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
- 'command_line_args', 'complex', 'complexarr', 'complexround',
- 'compute_mesh_normals', 'cond', 'congrid', 'conj',
- 'constrained_min', 'contour', 'convert_coord', 'convol',
- 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
- 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
- 'create_view', 'crossp', 'crvlength', 'cti_test',
- 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
- 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
- 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
- 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
- 'cw_form', 'cw_fslider', 'cw_light_editor',
- 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
- 'cw_palette_editor', 'cw_palette_editor_get',
- 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
- 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
- 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
- 'define_msgblk', 'define_msgblk_from_file', 'defroi',
- 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
- 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
- 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
- 'dialog_printersetup', 'dialog_printjob',
- 'dialog_read_image', 'dialog_write_image', 'digital_filter',
- 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
- 'dlm_load', 'dlm_register', 'doc_library', 'double',
- 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
- 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
+ 'check_math',
+ 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
+ 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
+ 'cmyk_convert', 'colorbar', 'colorize_sample',
+ 'colormap_applicable', 'colormap_gradient',
+ 'colormap_rotation', 'colortable', 'color_convert',
+ 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
+ 'command_line_args', 'complex', 'complexarr', 'complexround',
+ 'compute_mesh_normals', 'cond', 'congrid', 'conj',
+ 'constrained_min', 'contour', 'convert_coord', 'convol',
+ 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
+ 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
+ 'create_view', 'crossp', 'crvlength', 'cti_test',
+ 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
+ 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
+ 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
+ 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
+ 'cw_form', 'cw_fslider', 'cw_light_editor',
+ 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
+ 'cw_palette_editor', 'cw_palette_editor_get',
+ 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
+ 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
+ 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
+ 'define_msgblk', 'define_msgblk_from_file', 'defroi',
+ 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
+ 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
+ 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
+ 'dialog_printersetup', 'dialog_printjob',
+ 'dialog_read_image', 'dialog_write_image', 'digital_filter',
+ 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
+ 'dlm_load', 'dlm_register', 'doc_library', 'double',
+ 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
+ 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
'eof', r'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
- 'erode', 'errorplot', 'errplot', 'estimator_filter',
- 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
- 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
- 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
- 'file_dirname', 'file_expand_path', 'file_info',
- 'file_lines', 'file_link', 'file_mkdir', 'file_move',
- 'file_poll_input', 'file_readlink', 'file_same',
- 'file_search', 'file_test', 'file_which', 'findgen',
- 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
- 'fltarr', 'flush', 'format_axis_values', 'free_lun',
- 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
- 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
- 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
- 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
- 'getwindows', 'get_drive_list', 'get_dxf_objects',
- 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
+ 'erode', 'errorplot', 'errplot', 'estimator_filter',
+ 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
+ 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
+ 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
+ 'file_dirname', 'file_expand_path', 'file_info',
+ 'file_lines', 'file_link', 'file_mkdir', 'file_move',
+ 'file_poll_input', 'file_readlink', 'file_same',
+ 'file_search', 'file_test', 'file_which', 'findgen',
+ 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
+ 'fltarr', 'flush', 'format_axis_values', 'free_lun',
+ 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
+ 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
+ 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
+ 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
+ 'getwindows', 'get_drive_list', 'get_dxf_objects',
+ 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
'greg2jul', r'grib_\w*', 'grid3', 'griddata',
- 'grid_input', 'grid_tps', 'gs_iter',
+ 'grid_input', 'grid_tps', 'gs_iter',
r'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
- 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
+ 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
'hanning', 'hash', r'hdf_\w*', 'heap_free',
- 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
- 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
- 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
- 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
- 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
- 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
- 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
- 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
- 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
- 'image_cont', 'image_statistics', 'imaginary', 'imap',
- 'indgen', 'intarr', 'interpol', 'interpolate',
- 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
- 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
- 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
- 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
- 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
- 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
- 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
- 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
- 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
- 'label_region', 'ladfit', 'laguerre', 'laplacian',
- 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
- 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
- 'la_gm_linear_model', 'la_hqr', 'la_invert',
- 'la_least_squares', 'la_least_square_equality',
- 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
- 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
- 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
- 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
- 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
- 'lngamma', 'lnp_test', 'loadct', 'locale_get',
- 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
- 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
- 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
- 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
- 'map_continents', 'map_grid', 'map_image', 'map_patch',
- 'map_proj_forward', 'map_proj_image', 'map_proj_info',
- 'map_proj_init', 'map_proj_inverse', 'map_set',
- 'matrix_multiply', 'matrix_power', 'max', 'md_test',
- 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
- 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
- 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
- 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
- 'message', 'min', 'min_curve_surf', 'mk_html_help',
- 'modifyct', 'moment', 'morph_close', 'morph_distance',
- 'morph_gradient', 'morph_hitormiss', 'morph_open',
- 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
+ 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
+ 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
+ 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
+ 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
+ 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
+ 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
+ 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
+ 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
+ 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
+ 'image_cont', 'image_statistics', 'imaginary', 'imap',
+ 'indgen', 'intarr', 'interpol', 'interpolate',
+ 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
+ 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
+ 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
+ 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
+ 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
+ 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
+ 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
+ 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
+ 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
+ 'label_region', 'ladfit', 'laguerre', 'laplacian',
+ 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
+ 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
+ 'la_gm_linear_model', 'la_hqr', 'la_invert',
+ 'la_least_squares', 'la_least_square_equality',
+ 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
+ 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
+ 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
+ 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
+ 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
+ 'lngamma', 'lnp_test', 'loadct', 'locale_get',
+ 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
+ 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
+ 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
+ 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
+ 'map_continents', 'map_grid', 'map_image', 'map_patch',
+ 'map_proj_forward', 'map_proj_image', 'map_proj_info',
+ 'map_proj_init', 'map_proj_inverse', 'map_set',
+ 'matrix_multiply', 'matrix_power', 'max', 'md_test',
+ 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
+ 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
+ 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
+ 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
+ 'message', 'min', 'min_curve_surf', 'mk_html_help',
+ 'modifyct', 'moment', 'morph_close', 'morph_distance',
+ 'morph_gradient', 'morph_hitormiss', 'morph_open',
+ 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
r'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
- 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
- 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
- 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
- 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
- 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
- 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
- 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
- 'polar_contour', 'polar_surface', 'poly', 'polyfill',
- 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
- 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
- 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
- 'print', 'printd', 'product', 'profile', 'profiler',
- 'profiles', 'project_vol', 'psafm', 'pseudo',
- 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
- 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
- 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
- 'query_csv', 'query_dicom', 'query_gif', 'query_image',
- 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
- 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
- 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
- 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
- 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
- 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
- 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
- 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
- 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
- 'read_xwd', 'real_part', 'rebin', 'recall_commands',
- 'recon3', 'reduce_colors', 'reform', 'region_grow',
- 'register_cursor', 'regress', 'replicate',
- 'replicate_inplace', 'resolve_all', 'resolve_routine',
- 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
- 'rot', 'rotate', 'round', 'routine_filepath',
- 'routine_info', 'rs_test', 'r_correlate', 'r_test',
- 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
- 'scope_traceback', 'scope_varfetch', 'scope_varname',
- 'search2d', 'search3d', 'sem_create', 'sem_delete',
- 'sem_lock', 'sem_release', 'setenv', 'set_plot',
- 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
- 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
- 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
- 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
- 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
- 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
- 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
- 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
- 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
- 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
- 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
- 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
- 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
- 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
- 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
- 'tanh', 'tek_color', 'temporary', 'tetra_clip',
- 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
- 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
- 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
- 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
- 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
- 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
- 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
- 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
- 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
- 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
- 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
- 'where', 'widget_base', 'widget_button', 'widget_combobox',
- 'widget_control', 'widget_displaycontextmen', 'widget_draw',
- 'widget_droplist', 'widget_event', 'widget_info',
- 'widget_label', 'widget_list', 'widget_propertysheet',
- 'widget_slider', 'widget_tab', 'widget_table',
- 'widget_text', 'widget_tree', 'widget_tree_move',
- 'widget_window', 'wiener_filter', 'window', 'writeu',
- 'write_bmp', 'write_csv', 'write_gif', 'write_image',
- 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
- 'write_png', 'write_ppm', 'write_spr', 'write_srf',
- 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
- 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
- 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
- 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
- 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
- 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
- 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
- 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
- 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
- 'xmtool', 'xobjview', 'xobjview_rotate',
- 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
- 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
- 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
- 'xyouts', 'zoom', 'zoom_24')
- """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
- tokens = {
- 'root': [
+ 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
+ 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
+ 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
+ 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
+ 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
+ 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
+ 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
+ 'polar_contour', 'polar_surface', 'poly', 'polyfill',
+ 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
+ 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
+ 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
+ 'print', 'printd', 'product', 'profile', 'profiler',
+ 'profiles', 'project_vol', 'psafm', 'pseudo',
+ 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
+ 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
+ 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
+ 'query_csv', 'query_dicom', 'query_gif', 'query_image',
+ 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
+ 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
+ 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
+ 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
+ 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
+ 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
+ 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
+ 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
+ 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
+ 'read_xwd', 'real_part', 'rebin', 'recall_commands',
+ 'recon3', 'reduce_colors', 'reform', 'region_grow',
+ 'register_cursor', 'regress', 'replicate',
+ 'replicate_inplace', 'resolve_all', 'resolve_routine',
+ 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
+ 'rot', 'rotate', 'round', 'routine_filepath',
+ 'routine_info', 'rs_test', 'r_correlate', 'r_test',
+ 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
+ 'scope_traceback', 'scope_varfetch', 'scope_varname',
+ 'search2d', 'search3d', 'sem_create', 'sem_delete',
+ 'sem_lock', 'sem_release', 'setenv', 'set_plot',
+ 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
+ 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
+ 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
+ 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
+ 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
+ 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
+ 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
+ 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
+ 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
+ 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
+ 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
+ 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
+ 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
+ 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
+ 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
+ 'tanh', 'tek_color', 'temporary', 'tetra_clip',
+ 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
+ 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
+ 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
+ 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
+ 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
+ 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
+ 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
+ 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
+ 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
+ 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
+ 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
+ 'where', 'widget_base', 'widget_button', 'widget_combobox',
+ 'widget_control', 'widget_displaycontextmen', 'widget_draw',
+ 'widget_droplist', 'widget_event', 'widget_info',
+ 'widget_label', 'widget_list', 'widget_propertysheet',
+ 'widget_slider', 'widget_tab', 'widget_table',
+ 'widget_text', 'widget_tree', 'widget_tree_move',
+ 'widget_window', 'wiener_filter', 'window', 'writeu',
+ 'write_bmp', 'write_csv', 'write_gif', 'write_image',
+ 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
+ 'write_png', 'write_ppm', 'write_spr', 'write_srf',
+ 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
+ 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
+ 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
+ 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
+ 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
+ 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
+ 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
+ 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
+ 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
+ 'xmtool', 'xobjview', 'xobjview_rotate',
+ 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
+ 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
+ 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
+ 'xyouts', 'zoom', 'zoom_24')
+ """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
+
+ tokens = {
+ 'root': [
(r'^\s*;.*?\n', Comment.Single),
- (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
- (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
- (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
- (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
- (r'"[^\"]*"', String.Double),
- (r"'[^\']*'", String.Single),
+ (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
+ (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
+ (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
+ (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
+ (r'"[^\"]*"', String.Double),
+ (r"'[^\']*'", String.Single),
(r'\b[+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([+\-]?[0-9]+)?\b',
Number.Float),
(r'\b\'[+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex),
@@ -265,6 +265,6 @@ class IDLLexer(RegexLexer):
(r'\b[+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long),
(r'\b[+\-]?[0-9]+U?S?\b', Number.Integer),
(r'\b[+\-]?[0-9]+B\b', Number),
- (r'.', Text),
- ]
- }
+ (r'.', Text),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/igor.py b/contrib/python/Pygments/py2/pygments/lexers/igor.py
index f4a22e1e70..eeaa95f819 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/igor.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/igor.py
@@ -1,53 +1,53 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Igor Pro.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Igor Pro.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Keyword, Name, String
-
-__all__ = ['IgorLexer']
-
-
-class IgorLexer(RegexLexer):
- """
- Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- .. versionadded:: 2.0
- """
-
- name = 'Igor'
- aliases = ['igor', 'igorpro']
- filenames = ['*.ipf']
- mimetypes = ['text/ipf']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- flowControl = (
- 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
- 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
- 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
- )
- types = (
- 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
- 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Keyword, Name, String
+
+__all__ = ['IgorLexer']
+
+
+class IgorLexer(RegexLexer):
+ """
+ Pygments Lexer for Igor Pro procedure files (.ipf).
+ See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Igor'
+ aliases = ['igor', 'igorpro']
+ filenames = ['*.ipf']
+ mimetypes = ['text/ipf']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ flowControl = (
+ 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
+ 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
+ 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
+ )
+ types = (
+ 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
+ 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
'uint32', 'int64', 'uint64', 'float', 'double'
- )
- keywords = (
- 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
- 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
- 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
- )
- operations = (
+ )
+ keywords = (
+ 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
+ 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
+ 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
+ )
+ operations = (
'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame',
'AddWavesToBoxPlot', 'AddWavesToViolinPlot', 'AdoptFiles', 'APMath', 'Append',
'AppendBoxPlot', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour',
@@ -135,7 +135,7 @@ class IgorLexer(RegexLexer):
'ModifyContour', 'ModifyControl', 'ModifyControlList', 'ModifyFreeAxis',
'ModifyGizmo', 'ModifyGraph', 'ModifyImage', 'ModifyLayout', 'ModifyPanel',
'ModifyTable', 'ModifyViolinPlot', 'ModifyWaterfall', 'MoveDataFolder',
- 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
'MoveWave', 'MoveWindow', 'MultiTaperPSD', 'MultiThreadingControl',
'NC_CloseFile', 'NC_DumpErrors', 'NC_Inquire', 'NC_ListAttributes',
'NC_ListObjects', 'NC_LoadData', 'NC_OpenFile', 'NeuralNetworkRun',
@@ -170,7 +170,7 @@ class IgorLexer(RegexLexer):
'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable',
'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKDE', 'StatsKendallTauTest',
- 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
+ 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
'StatsLinearRegression', 'StatsMultiCorrelationTest', 'StatsNPMCTest',
'StatsNPNominalSRTest', 'StatsQuantiles', 'StatsRankCorrelationTest',
'StatsResample', 'StatsSample', 'StatsScheffeTest', 'StatsShapiroWilkTest',
@@ -189,8 +189,8 @@ class IgorLexer(RegexLexer):
'VISAReadBinaryWave', 'VISAReadWave', 'VISAWrite', 'VISAWriteBinary',
'VISAWriteBinaryWave', 'VISAWriteWave', 'WaveMeanStdv', 'WaveStats',
'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave'
- )
- functions = (
+ )
+ functions = (
'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD',
'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh',
'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel',
@@ -322,9 +322,9 @@ class IgorLexer(RegexLexer):
'StatsInvMaxwellCDF', 'StatsInvMooreCDF', 'StatsInvNBinomialCDF',
'StatsInvNCChiCDF', 'StatsInvNCFCDF', 'StatsInvNormalCDF', 'StatsInvParetoCDF',
'StatsInvPoissonCDF', 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
- 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
- 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
- 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
+ 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
+ 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
+ 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF', 'StatsLogNormalCDF',
'StatsLogNormalPDF', 'StatsMaxwellCDF', 'StatsMaxwellPDF', 'StatsMedian',
'StatsMooreCDF', 'StatsNBinomialCDF', 'StatsNBinomialPDF', 'StatsNCChiCDF',
@@ -334,7 +334,7 @@ class IgorLexer(RegexLexer):
'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF', 'StatsRayleighCDF',
'StatsRayleighPDF', 'StatsRectangularCDF', 'StatsRectangularPDF', 'StatsRunsCDF',
'StatsSpearmanRhoCDF', 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
- 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
+ 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise', 'StatsVonMisesPDF',
'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF', 'StatsWeibullPDF',
'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList',
@@ -400,26 +400,26 @@ class IgorLexer(RegexLexer):
'zeromq_stop', 'zeromq_stop', 'zeromq_test_callfunction',
'zeromq_test_callfunction', 'zeromq_test_serializeWave',
'zeromq_test_serializeWave', 'zeta'
- )
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String),
- # Flow Control.
- (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
- # Types.
- (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- # Keywords.
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # Built-in operations.
- (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
- # Built-in functions.
- (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
- # Compiler directives.
+ )
+
+ tokens = {
+ 'root': [
+ (r'//.*$', Comment.Single),
+ (r'"([^"\\]|\\.)*"', String),
+ # Flow Control.
+ (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Types.
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ # Keywords.
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # Built-in operations.
+ (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
+ # Built-in functions.
+ (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
+ # Compiler directives.
(r'^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)',
- Name.Decorator),
- (r'[^a-z"/]+$', Text),
- (r'.', Text),
- ],
- }
+ Name.Decorator),
+ (r'[^a-z"/]+$', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/inferno.py b/contrib/python/Pygments/py2/pygments/lexers/inferno.py
index f29808cf9d..1f17fd9d5b 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/inferno.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/inferno.py
@@ -1,96 +1,96 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.inferno
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Inferno os and all the related stuff.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.inferno
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Inferno os and all the related stuff.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String, Number
-
-__all__ = ['LimboLexer']
-
-
-class LimboLexer(RegexLexer):
- """
- Lexer for `Limbo programming language <http://www.vitanuova.com/inferno/limbo.html>`_
-
- TODO:
- - maybe implement better var declaration highlighting
- - some simple syntax error highlighting
-
- .. versionadded:: 2.0
- """
- name = 'Limbo'
- aliases = ['limbo']
- filenames = ['*.b']
- mimetypes = ['text/limbo']
-
- tokens = {
- 'whitespace': [
- (r'^(\s*)([a-zA-Z_]\w*:(\s*)\n)',
- bygroups(Text, Name.Label)),
- (r'\n', Text),
- (r'\s+', Text),
- (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\', String), # stray backslash
- ],
- 'statements': [
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
- (r'16r[0-9a-fA-F]+', Number.Hex),
- (r'8r[0-7]+', Number.Oct),
- (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
- (r'[()\[\],.]', Punctuation),
- (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
- (r'(alt|break|case|continue|cyclic|do|else|exit'
- r'for|hd|if|implement|import|include|len|load|or'
- r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
- (r'(byte|int|big|real|string|array|chan|list|adt'
- r'|fn|ref|of|module|self|type)\b', Keyword.Type),
- (r'(con|iota|nil)\b', Keyword.Constant),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
+ Name, String, Number
+
+__all__ = ['LimboLexer']
+
+
+class LimboLexer(RegexLexer):
+ """
+ Lexer for `Limbo programming language <http://www.vitanuova.com/inferno/limbo.html>`_
+
+ TODO:
+ - maybe implement better var declaration highlighting
+ - some simple syntax error highlighting
+
+ .. versionadded:: 2.0
+ """
+ name = 'Limbo'
+ aliases = ['limbo']
+ filenames = ['*.b']
+ mimetypes = ['text/limbo']
+
+ tokens = {
+ 'whitespace': [
+ (r'^(\s*)([a-zA-Z_]\w*:(\s*)\n)',
+ bygroups(Text, Name.Label)),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\', String), # stray backslash
+ ],
+ 'statements': [
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
+ (r'16r[0-9a-fA-F]+', Number.Hex),
+ (r'8r[0-7]+', Number.Oct),
+ (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
+ (r'[()\[\],.]', Punctuation),
+ (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
+ (r'(alt|break|case|continue|cyclic|do|else|exit'
+ r'for|hd|if|implement|import|include|len|load|or'
+ r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
+ (r'(byte|int|big|real|string|array|chan|list|adt'
+ r'|fn|ref|of|module|self|type)\b', Keyword.Type),
+ (r'(con|iota|nil)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- }
-
- def analyse_text(text):
- # Any limbo module implements something
- if re.search(r'^implement \w+;', text, re.MULTILINE):
- return 0.7
-
-# TODO:
-# - Make lexers for:
-# - asm sources
-# - man pages
-# - mkfiles
-# - module definitions
-# - namespace definitions
-# - shell scripts
-# - maybe keyfiles and fonts
-# they all seem to be quite similar to their equivalents
-# from unix world, so there should not be a lot of problems
+ ],
+ 'statement' : [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ # Any limbo module implements something
+ if re.search(r'^implement \w+;', text, re.MULTILINE):
+ return 0.7
+
+# TODO:
+# - Make lexers for:
+# - asm sources
+# - man pages
+# - mkfiles
+# - module definitions
+# - namespace definitions
+# - shell scripts
+# - maybe keyfiles and fonts
+# they all seem to be quite similar to their equivalents
+# from unix world, so there should not be a lot of problems
diff --git a/contrib/python/Pygments/py2/pygments/lexers/installers.py b/contrib/python/Pygments/py2/pygments/lexers/installers.py
index 8c8c39c860..ad24530e1e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/installers.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/installers.py
@@ -1,322 +1,322 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.installers
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for installer/packager DSLs and formats.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.installers
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for installer/packager DSLs and formats.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation, Generic, Number, Whitespace
-
-__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
- 'DebianControlLexer']
-
-
-class NSISLexer(RegexLexer):
- """
- For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
-
- .. versionadded:: 1.6
- """
- name = 'NSIS'
- aliases = ['nsis', 'nsi', 'nsh']
- filenames = ['*.nsi', '*.nsh']
- mimetypes = ['text/x-nsis']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[;#].*\n', Comment),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'str_double'),
- (r'`', String.Backtick, 'str_backtick'),
- include('macro'),
- include('interpol'),
- include('basic'),
- (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
- (r'/[a-z_]\w*', Name.Attribute),
- ('.', Text),
- ],
- 'basic': [
- (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
- bygroups(Keyword.Namespace, Punctuation, Name.Function)),
- (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
- (r'[|+-]', Operator),
- (r'\\', Punctuation),
- (r'\b(Abort|Add(?:BrandingImage|Size)|'
- r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
- r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
- r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
- r'ComponentText|CopyFiles|CRCCheck|'
- r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
- r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
- r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
- r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
- r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
- r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
- r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
- r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
- r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
- r'InstDirError|LabelAddress|TempFileName)|'
- r'Goto|HideWindow|Icon|'
- r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
- r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
- r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
- r'IsWindow|LangString(?:UP)?|'
- r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
- r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
- r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
- r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
- r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
- r'Return|RMDir|SearchPath|Section(?:Divider|End|'
- r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
- r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
- r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
- r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
- r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
- r'Silent|StaticBkColor)|'
- r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
- r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
- r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
- r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
- r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
- r'XPStyle)\b', Keyword),
- (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
- r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
- r'HK(CC|CR|CU|DD|LM|PD|U)|'
- r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
- r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
- r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
- r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
- r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
- r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
- r'YESNO(?:CANCEL)?)|SET|SHCTX|'
- r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
- r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
- r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
- r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
- r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
- r'true|try|user|zlib)\b', Name.Constant),
- ],
- 'macro': [
- (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
- r'delfilefile|echo(?:message)?|else|endif|error|execute|'
- r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
- r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
- r'warning)\b', Comment.Preproc),
- ],
- 'interpol': [
- (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
- (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
- r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
- r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
- r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
- r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
- r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
- Name.Builtin),
- (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
- (r'\$[a-z_]\w*', Name.Variable),
- ],
- 'str_double': [
- (r'"', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'str_backtick': [
- (r'`', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- }
-
-
-class RPMSpecLexer(RegexLexer):
- """
- For RPM ``.spec`` files.
-
- .. versionadded:: 1.6
- """
-
- name = 'RPMSpec'
- aliases = ['spec']
- filenames = ['*.spec']
- mimetypes = ['text/x-rpm-spec']
-
- _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
- 'post[a-z]*|trigger[a-z]*|files)')
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment),
- include('basic'),
- ],
- 'description': [
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'changelog': [
- (r'\*.*\n', Generic.Subheading),
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'basic': [
- include('macro'),
- (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
- r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
- r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
- bygroups(Generic.Heading, Punctuation, using(this))),
- (r'^%description', Name.Decorator, 'description'),
- (r'^%changelog', Name.Decorator, 'changelog'),
- (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
- (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
- r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
- Keyword),
- include('interpol'),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'string'),
- (r'.', Text),
- ],
- 'macro': [
- (r'%define.*\n', Comment.Preproc),
- (r'%\{\!\?.*%define.*\}', Comment.Preproc),
- (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
- bygroups(Comment.Preproc, Text)),
- ],
- 'interpol': [
- (r'%\{?__[a-z_]+\}?', Name.Function),
- (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
- (r'%\{\?\w+\}', Name.Variable),
- (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
- (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
- ]
- }
-
-
-class SourcesListLexer(RegexLexer):
- """
- Lexer that highlights debian sources.list files.
-
- .. versionadded:: 0.7
- """
-
- name = 'Debian Sourcelist'
- aliases = ['sourceslist', 'sources.list', 'debsources']
- filenames = ['sources.list']
- mimetype = ['application/x-debian-sourceslist']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment),
- (r'^(deb(?:-src)?)(\s+)',
- bygroups(Keyword, Text), 'distribution')
- ],
- 'distribution': [
- (r'#.*?$', Comment, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\s$[]+', String),
- (r'\[', String.Other, 'escaped-distribution'),
- (r'\$', String),
- (r'\s+', Text, 'components')
- ],
- 'escaped-distribution': [
- (r'\]', String.Other, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\]$]+', String.Other),
- (r'\$', String.Other)
- ],
- 'components': [
- (r'#.*?$', Comment, '#pop:2'),
- (r'$', Text, '#pop:2'),
- (r'\s+', Text),
- (r'\S+', Keyword.Pseudo),
- ]
- }
-
- def analyse_text(text):
- for line in text.splitlines():
- line = line.strip()
- if line.startswith('deb ') or line.startswith('deb-src '):
- return True
-
-
-class DebianControlLexer(RegexLexer):
- """
- Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
- .. versionadded:: 0.9
- """
- name = 'Debian Control file'
- aliases = ['control', 'debcontrol']
- filenames = ['control']
-
- tokens = {
- 'root': [
- (r'^(Description)', Keyword, 'description'),
- (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
- (r'^((Build-)?Depends)', Keyword, 'depends'),
- (r'^((?:Python-)?Version)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
- bygroups(Keyword, Whitespace, String)),
- ],
- 'maintainer': [
- (r'<[^>]+>', Generic.Strong),
- (r'<[^>]+>$', Generic.Strong, '#pop'),
- (r',\n?', Text),
- (r'.', Text),
- ],
- 'description': [
- (r'(.*)(Homepage)(: )(\S+)',
- bygroups(Text, String, Name, Name.Class)),
- (r':.*\n', Generic.Strong),
- (r' .*\n', Text),
- default('#pop'),
- ],
- 'depends': [
- (r':\s*', Text),
- (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
- (r'\(', Text, 'depend_vers'),
- (r',', Text),
- (r'\|', Operator),
- (r'[\s]+', Text),
- (r'[})]\s*$', Text, '#pop'),
- (r'\}', Text),
- (r'[^,]$', Name.Function, '#pop'),
- (r'([+.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
- (r'\[.*?\]', Name.Entity),
- ],
- 'depend_vers': [
- (r'\),', Text, '#pop'),
- (r'\)[^,]', Text, '#pop:2'),
- (r'([><=]+)(\s*)([^)]+)', bygroups(Operator, Text, Number))
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation, Generic, Number, Whitespace
+
+__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
+ 'DebianControlLexer']
+
+
+class NSISLexer(RegexLexer):
+ """
+ For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
+
+ .. versionadded:: 1.6
+ """
+ name = 'NSIS'
+ aliases = ['nsis', 'nsi', 'nsh']
+ filenames = ['*.nsi', '*.nsh']
+ mimetypes = ['text/x-nsis']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[;#].*\n', Comment),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'str_double'),
+ (r'`', String.Backtick, 'str_backtick'),
+ include('macro'),
+ include('interpol'),
+ include('basic'),
+ (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
+ (r'/[a-z_]\w*', Name.Attribute),
+ ('.', Text),
+ ],
+ 'basic': [
+ (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
+ bygroups(Text, Keyword, Text, Name.Function)),
+ (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
+ bygroups(Keyword.Namespace, Punctuation, Name.Function)),
+ (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
+ (r'[|+-]', Operator),
+ (r'\\', Punctuation),
+ (r'\b(Abort|Add(?:BrandingImage|Size)|'
+ r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
+ r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
+ r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
+ r'ComponentText|CopyFiles|CRCCheck|'
+ r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
+ r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
+ r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
+ r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
+ r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
+ r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
+ r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
+ r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
+ r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
+ r'InstDirError|LabelAddress|TempFileName)|'
+ r'Goto|HideWindow|Icon|'
+ r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
+ r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
+ r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
+ r'IsWindow|LangString(?:UP)?|'
+ r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
+ r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
+ r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
+ r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
+ r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
+ r'Return|RMDir|SearchPath|Section(?:Divider|End|'
+ r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
+ r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
+ r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
+ r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
+ r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
+ r'Silent|StaticBkColor)|'
+ r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
+ r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
+ r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
+ r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
+ r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
+ r'XPStyle)\b', Keyword),
+ (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
+ r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
+ r'HK(CC|CR|CU|DD|LM|PD|U)|'
+ r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
+ r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
+ r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
+ r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
+ r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
+ r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
+ r'YESNO(?:CANCEL)?)|SET|SHCTX|'
+ r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
+ r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
+ r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
+ r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
+ r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
+ r'true|try|user|zlib)\b', Name.Constant),
+ ],
+ 'macro': [
+ (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
+ r'delfilefile|echo(?:message)?|else|endif|error|execute|'
+ r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
+ r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
+ r'warning)\b', Comment.Preproc),
+ ],
+ 'interpol': [
+ (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
+ (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
+ r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
+ r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
+ r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
+ r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
+ r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
+ Name.Builtin),
+ (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
+ (r'\$[a-z_]\w*', Name.Variable),
+ ],
+ 'str_double': [
+ (r'"', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'str_backtick': [
+ (r'`', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ }
+
+
+class RPMSpecLexer(RegexLexer):
+ """
+ For RPM ``.spec`` files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'RPMSpec'
+ aliases = ['spec']
+ filenames = ['*.spec']
+ mimetypes = ['text/x-rpm-spec']
+
+ _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
+ 'post[a-z]*|trigger[a-z]*|files)')
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment),
+ include('basic'),
+ ],
+ 'description': [
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'changelog': [
+ (r'\*.*\n', Generic.Subheading),
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'basic': [
+ include('macro'),
+ (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
+ r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
+ r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
+ r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
+ bygroups(Generic.Heading, Punctuation, using(this))),
+ (r'^%description', Name.Decorator, 'description'),
+ (r'^%changelog', Name.Decorator, 'changelog'),
+ (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
+ (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
+ r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
+ Keyword),
+ include('interpol'),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'string'),
+ (r'.', Text),
+ ],
+ 'macro': [
+ (r'%define.*\n', Comment.Preproc),
+ (r'%\{\!\?.*%define.*\}', Comment.Preproc),
+ (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
+ bygroups(Comment.Preproc, Text)),
+ ],
+ 'interpol': [
+ (r'%\{?__[a-z_]+\}?', Name.Function),
+ (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
+ (r'%\{\?\w+\}', Name.Variable),
+ (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
+ (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
+ ]
+ }
+
+
+class SourcesListLexer(RegexLexer):
+ """
+ Lexer that highlights debian sources.list files.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Debian Sourcelist'
+ aliases = ['sourceslist', 'sources.list', 'debsources']
+ filenames = ['sources.list']
+ mimetype = ['application/x-debian-sourceslist']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment),
+ (r'^(deb(?:-src)?)(\s+)',
+ bygroups(Keyword, Text), 'distribution')
+ ],
+ 'distribution': [
+ (r'#.*?$', Comment, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\s$[]+', String),
+ (r'\[', String.Other, 'escaped-distribution'),
+ (r'\$', String),
+ (r'\s+', Text, 'components')
+ ],
+ 'escaped-distribution': [
+ (r'\]', String.Other, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\]$]+', String.Other),
+ (r'\$', String.Other)
+ ],
+ 'components': [
+ (r'#.*?$', Comment, '#pop:2'),
+ (r'$', Text, '#pop:2'),
+ (r'\s+', Text),
+ (r'\S+', Keyword.Pseudo),
+ ]
+ }
+
+ def analyse_text(text):
+ for line in text.splitlines():
+ line = line.strip()
+ if line.startswith('deb ') or line.startswith('deb-src '):
+ return True
+
+
+class DebianControlLexer(RegexLexer):
+ """
+ Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Debian Control file'
+ aliases = ['control', 'debcontrol']
+ filenames = ['control']
+
+ tokens = {
+ 'root': [
+ (r'^(Description)', Keyword, 'description'),
+ (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
+ (r'^((Build-)?Depends)', Keyword, 'depends'),
+ (r'^((?:Python-)?Version)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
+ bygroups(Keyword, Whitespace, String)),
+ ],
+ 'maintainer': [
+ (r'<[^>]+>', Generic.Strong),
+ (r'<[^>]+>$', Generic.Strong, '#pop'),
+ (r',\n?', Text),
+ (r'.', Text),
+ ],
+ 'description': [
+ (r'(.*)(Homepage)(: )(\S+)',
+ bygroups(Text, String, Name, Name.Class)),
+ (r':.*\n', Generic.Strong),
+ (r' .*\n', Text),
+ default('#pop'),
+ ],
+ 'depends': [
+ (r':\s*', Text),
+ (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
+ (r'\(', Text, 'depend_vers'),
+ (r',', Text),
+ (r'\|', Operator),
+ (r'[\s]+', Text),
+ (r'[})]\s*$', Text, '#pop'),
+ (r'\}', Text),
+ (r'[^,]$', Name.Function, '#pop'),
+ (r'([+.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
+ (r'\[.*?\]', Name.Entity),
+ ],
+ 'depend_vers': [
+ (r'\),', Text, '#pop'),
+ (r'\)[^,]', Text, '#pop:2'),
+ (r'([><=]+)(\s*)([^)]+)', bygroups(Operator, Text, Number))
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/int_fiction.py b/contrib/python/Pygments/py2/pygments/lexers/int_fiction.py
index 89d5bccc85..5eaf138b74 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/int_fiction.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/int_fiction.py
@@ -1,1343 +1,1343 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.int_fiction
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for interactive fiction languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.int_fiction
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for interactive fiction languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-
-__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
- 'Tads3Lexer']
-
-
-class Inform6Lexer(RegexLexer):
- """
- For `Inform 6 <http://inform-fiction.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6'
- aliases = ['inform6', 'i6']
- filenames = ['*.inf']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _name = r'[a-zA-Z_]\w*'
-
- # Inform 7 maps these four character classes to their ASCII
- # equivalents. To support Inform 6 inclusions within Inform 7,
- # Inform6Lexer maps them too.
- _dash = u'\\-\u2010-\u2014'
- _dquote = u'"\u201c\u201d'
- _squote = u"'\u2018\u2019"
- _newline = u'\\n\u0085\u2028\u2029'
-
- tokens = {
- 'root': [
- (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
- 'directive'),
- default('directive')
- ],
- '_whitespace': [
- (r'\s+', Text),
- (r'![^%s]*' % _newline, Comment.Single)
- ],
- 'default': [
- include('_whitespace'),
- (r'\[', Punctuation, 'many-values'), # Array initialization
- (r':|(?=;)', Punctuation, '#pop'),
- (r'<', Punctuation), # Second angle bracket in an action statement
- default(('expression', '_expression'))
- ],
-
- # Expressions
- '_expression': [
- include('_whitespace'),
- (r'(?=sp\b)', Text, '#pop'),
- (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
- ('#pop', 'value')),
- (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
- (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
- ],
- 'expression': [
- include('_whitespace'),
- (r'\(', Punctuation, ('expression', '_expression')),
- (r'\)', Punctuation, '#pop'),
- (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
- (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
- (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
- (r',', Punctuation, '_expression'),
- (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
- Operator, '_expression'),
- (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
- '_expression'),
- (r'sp\b', Name),
- (r'\?~?', Name.Label, 'label?'),
- (r'[@{]', Error),
- default('#pop')
- ],
- '_assembly-expression': [
- (r'\(', Punctuation, ('#push', '_expression')),
- (r'[\[\]]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, '_expression'),
- (r'sp\b', Keyword.Pseudo),
- (r';', Punctuation, '#pop:3'),
- include('expression')
- ],
- '_for-expression': [
- (r'\)', Punctuation, '#pop:2'),
- (r':', Punctuation, '#pop'),
- include('expression')
- ],
- '_keyword-expression': [
- (r'(from|near|to)\b', Keyword, '_expression'),
- include('expression')
- ],
- '_list-expression': [
- (r',', Punctuation, '#pop'),
- include('expression')
- ],
- '_object-expression': [
- (r'has\b', Keyword.Declaration, '#pop'),
- include('_list-expression')
- ],
-
- # Values
- 'value': [
- include('_whitespace'),
- # Strings
- (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
- (r'([%s])(@\{[0-9a-fA-F]{1,4}\})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'([%s])(@.{2})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
- # Numbers
- (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
- Number.Float, '#pop'),
- (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'\$\$[01]+', Number.Bin, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
- # Values prefixed by hashes
- (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
- (r'(#g\$)(%s)' % _name,
- bygroups(Operator, Name.Variable.Global), '#pop'),
- (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
- (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
- (r'#', Name.Builtin, ('#pop', 'system-constant')),
- # System functions
- (words((
- 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
- 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Metaclasses
- (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
- # Veneer routines
- (words((
- 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
- 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
- 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
- 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
- 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
- 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
- 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
- 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
- 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other built-in symbols
- (words((
- 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
- 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
- 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
- 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
- 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
- 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
- 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
- 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
- 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
- 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
- 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other values
- (_name, Name, '#pop')
- ],
- # Strings
- 'dictionary-word': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _squote, String.Single),
- (r'[({]', String.Single),
- (r'@\{[0-9a-fA-F]{,4}\}', String.Escape),
- (r'@.{2}', String.Escape),
- (r'[%s]' % _squote, String.Single, '#pop')
- ],
- 'string': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _dquote, String.Double),
- (r'[({]', String.Double),
- (r'\\', String.Escape),
- (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
- (_newline, _newline), String.Escape),
- (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}'
- r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
- String.Escape),
- (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
- String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- 'plain-string': [
- (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
- (r'[~^({\[\]]', String.Double),
- (r'\\', String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- # Names
- '_constant': [
- include('_whitespace'),
- (_name, Name.Constant, '#pop'),
- include('value')
- ],
- '_global': [
- include('_whitespace'),
- (_name, Name.Variable.Global, '#pop'),
- include('value')
- ],
- 'label?': [
- include('_whitespace'),
- (_name, Name.Label, '#pop'),
- default('#pop')
- ],
- 'variable?': [
- include('_whitespace'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- # Values after hashes
- 'obsolete-dictionary-word': [
- (r'\S\w*', String.Other, '#pop')
- ],
- 'system-constant': [
- include('_whitespace'),
- (_name, Name.Builtin, '#pop')
- ],
-
- # Directives
- 'directive': [
- include('_whitespace'),
- (r'#', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'\[', Punctuation,
- ('default', 'statements', 'locals', 'routine-name?')),
- (words((
- 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
- 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
- 'version'), prefix='(?i)', suffix=r'\b'),
- Keyword, 'default'),
- (r'(?i)(array|global)\b', Keyword,
- ('default', 'directive-keyword?', '_global')),
- (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
- (r'(?i)class\b', Keyword,
- ('object-body', 'duplicates', 'class-name')),
- (r'(?i)(constant|default)\b', Keyword,
- ('default', 'expression', '_constant')),
- (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
- (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
- (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
- (r'(?i)import\b', Keyword, 'manifest'),
- (r'(?i)(include|link)\b', Keyword,
- ('default', 'before-plain-string')),
- (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
- (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
- (r'(?i)(nearby|object)\b', Keyword,
- ('object-body', '_object-head')),
- (r'(?i)property\b', Keyword,
- ('default', 'alias?', '_constant', 'property-keyword*')),
- (r'(?i)replace\b', Keyword,
- ('default', 'routine-name?', 'routine-name?')),
- (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
- (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
- (r'(?i)trace\b', Keyword,
- ('default', 'trace-keyword?', 'trace-keyword?')),
- (r'(?i)zcharacter\b', Keyword,
- ('default', 'directive-keyword?', 'directive-keyword?')),
- (_name, Name.Class, ('object-body', '_object-head'))
- ],
- # [, Replace, Stub
- 'routine-name?': [
- include('_whitespace'),
- (_name, Name.Function, '#pop'),
- default('#pop')
- ],
- 'locals': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'\*', Punctuation),
- (r'"', String.Double, 'plain-string'),
- (_name, Name.Variable)
- ],
- # Array
- 'many-values': [
- include('_whitespace'),
- (r';', Punctuation),
- (r'\]', Punctuation, '#pop'),
- (r':', Error),
- default(('expression', '_expression'))
- ],
- # Attribute, Property
- 'alias?': [
- include('_whitespace'),
- (r'alias\b', Keyword, ('#pop', '_constant')),
- default('#pop')
- ],
- # Class, Object, Nearby
- 'class-name': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class, '#pop')
- ],
- 'duplicates': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
- default('#pop')
- ],
- '_object-head': [
- (r'[%s]>' % _dash, Punctuation),
- (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
- include('_global')
- ],
- 'object-body': [
- include('_whitespace'),
- (r';', Punctuation, '#pop:2'),
- (r',', Punctuation),
- (r'class\b', Keyword.Declaration, 'class-segment'),
- (r'(has|private|with)\b', Keyword.Declaration),
- (r':', Error),
- default(('_object-expression', '_expression'))
- ],
- 'class-segment': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class),
- default('value')
- ],
- # Extend, Verb
- 'grammar': [
- include('_whitespace'),
- (r'=', Punctuation, ('#pop', 'default')),
- (r'\*', Punctuation, ('#pop', 'grammar-line')),
- default('_directive-keyword')
- ],
- 'grammar-line': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'[/*]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, 'value'),
- (r'(noun|scope)\b', Keyword, '=routine'),
- default('_directive-keyword')
- ],
- '=routine': [
- include('_whitespace'),
- (r'=', Punctuation, 'routine-name?'),
- default('#pop')
- ],
- # Import
- 'manifest': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(?i)global\b', Keyword, '_global'),
- default('_global')
- ],
- # Include, Link, Message
- 'diagnostic': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
- default(('#pop', 'before-plain-string', 'directive-keyword?'))
- ],
- 'before-plain-string': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string'))
- ],
- 'message-string': [
- (r'[~^]+', String.Escape),
- include('plain-string')
- ],
-
- # Keywords used in directives
- '_directive-keyword!': [
- include('_whitespace'),
- (words((
- 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
- 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
- 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
- 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
- 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
- Keyword, '#pop'),
- (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
- ],
- '_directive-keyword': [
- include('_directive-keyword!'),
- include('value')
- ],
- 'directive-keyword?': [
- include('_directive-keyword!'),
- default('#pop')
- ],
- 'property-keyword*': [
- include('_whitespace'),
- (r'(additive|long)\b', Keyword),
- default('#pop')
- ],
- 'trace-keyword?': [
- include('_whitespace'),
- (words((
- 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
- 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
- Keyword, '#pop'),
- default('#pop')
- ],
-
- # Statements
- 'statements': [
- include('_whitespace'),
- (r'\]', Punctuation, '#pop'),
- (r'[;{}]', Punctuation),
- (words((
- 'box', 'break', 'continue', 'default', 'give', 'inversion',
- 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
- 'spaces', 'string', 'until'), suffix=r'\b'),
- Keyword, 'default'),
- (r'(do|else)\b', Keyword),
- (r'(font|style)\b', Keyword,
- ('default', 'miscellaneous-keyword?')),
- (r'for\b', Keyword, ('for', '(?')),
- (r'(if|switch|while)', Keyword,
- ('expression', '_expression', '(?')),
- (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
- (r'objectloop\b', Keyword,
- ('_keyword-expression', 'variable?', '(?')),
- (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
- (r'\.', Name.Label, 'label?'),
- (r'@', Keyword, 'opcode'),
- (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
- (r'<', Punctuation, 'default'),
- (r'move\b', Keyword,
- ('default', '_keyword-expression', '_expression')),
- default(('default', '_keyword-expression', '_expression'))
- ],
- 'miscellaneous-keyword?': [
- include('_whitespace'),
- (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
- Keyword, '#pop'),
- (r'(a|A|an|address|char|name|number|object|property|string|the|'
- r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
- '#pop'),
- (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
- '#pop'),
- default('#pop')
- ],
- '(?': [
- include('_whitespace'),
- (r'\(', Punctuation, '#pop'),
- default('#pop')
- ],
- 'for': [
- include('_whitespace'),
- (r';', Punctuation, ('_for-expression', '_expression')),
- default(('_for-expression', '_expression'))
- ],
- 'print-list': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r':', Error),
- default(('_list-expression', '_expression', '_list-expression', 'form'))
- ],
- 'form': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
- default('#pop')
- ],
-
- # Assembly
- 'opcode': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
- (_name, Keyword, 'operands')
- ],
- 'operands': [
- (r':', Error),
- default(('_assembly-expression', '_expression'))
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # 'in' is either a keyword or an operator.
- # If the token two tokens after 'in' is ')', 'in' is a keyword:
- # objectloop(a in b)
- # Otherwise, it is an operator:
- # objectloop(a in b && true)
- objectloop_queue = []
- objectloop_token_count = -1
- previous_token = None
- for index, token, value in RegexLexer.get_tokens_unprocessed(self,
- text):
- if previous_token is Name.Variable and value == 'in':
- objectloop_queue = [[index, token, value]]
- objectloop_token_count = 2
- elif objectloop_token_count > 0:
- if token not in Comment and token not in Text:
- objectloop_token_count -= 1
- objectloop_queue.append((index, token, value))
- else:
- if objectloop_token_count == 0:
- if objectloop_queue[-1][2] == ')':
- objectloop_queue[0][1] = Keyword
- while objectloop_queue:
- yield objectloop_queue.pop(0)
- objectloop_token_count = -1
- yield index, token, value
- if token not in Comment and token not in Text:
- previous_token = token
- while objectloop_queue:
- yield objectloop_queue.pop(0)
-
-
-class Inform7Lexer(RegexLexer):
- """
- For `Inform 7 <http://inform7.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 7'
- aliases = ['inform7', 'i7']
- filenames = ['*.ni', '*.i7x']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _dash = Inform6Lexer._dash
- _dquote = Inform6Lexer._dquote
- _newline = Inform6Lexer._newline
- _start = r'\A|(?<=[%s])' % _newline
-
- # There are three variants of Inform 7, differing in how to
- # interpret at signs and braces in I6T. In top-level inclusions, at
- # signs in the first column are inweb syntax. In phrase definitions
- # and use options, tokens in braces are treated as I7. Use options
- # also interpret "{N}".
- tokens = {}
- token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
-
- for level in token_variants:
- tokens[level] = {
- '+i6-root': list(Inform6Lexer.tokens['root']),
- '+i6t-root': [ # For Inform6TemplateLexer
- (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
- ('directive', '+p'))
- ],
- 'root': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]' % _dquote, Generic.Heading,
- ('+main', '+titling', '+titling-string')),
- default(('+main', '+heading?'))
- ],
- '+titling-string': [
- (r'[^%s]+' % _dquote, Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '#pop')
- ],
- '+titling': [
- (r'\[', Comment.Multiline, '+comment'),
- (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
- (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
- Text, ('#pop', '+heading?')),
- (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
- (r'[|%s]' % _newline, Generic.Heading)
- ],
- '+main': [
- (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
- (r'[%s]' % _dquote, String.Double, '+text'),
- (r':', Text, '+phrase-definition'),
- (r'(?i)\bas\b', Text, '+use-option'),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-not-inline'), Punctuation)),
- (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
- (_start, _dquote, _newline), Text, '+heading?'),
- (r'(?i)[a(|%s]' % _newline, Text)
- ],
- '+phrase-definition': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive',
- 'default', 'statements'),
- i6t='+i6t-inline'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+use-option': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-use-option'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+comment': [
- (r'[^\[\]]+', Comment.Multiline),
- (r'\[', Comment.Multiline, '#push'),
- (r'\]', Comment.Multiline, '#pop')
- ],
- '+text': [
- (r'[^\[%s]+' % _dquote, String.Double),
- (r'\[.*?\]', String.Interpol),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- '+heading?': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
- (r'[%s]{1,3}' % _dash, Text),
- (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
- Generic.Heading, '#pop'),
- default('#pop')
- ],
- '+documentation-heading': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(?i)documentation\s+', Text, '+documentation-heading2'),
- default('#pop')
- ],
- '+documentation-heading2': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s' % _dash, Text, '+documentation'),
- default('#pop:2')
- ],
- '+documentation': [
- (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
- (_start, _newline), Generic.Heading),
- (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
- Generic.Subheading),
- (r'((%s)\t.*?[%s])+' % (_start, _newline),
- using(this, state='+main')),
- (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
- (r'\[', Comment.Multiline, '+comment'),
- ],
- '+i6t-not-inline': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p')
- ],
- '+i6t-use-option': [
- include('+i6t-not-inline'),
- (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
- ],
- '+i6t-inline': [
- (r'(\{)(\S[^}]*)?(\})',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+i6t': [
- (r'(\{[%s])(![^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Comment.Single, Punctuation)),
- (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation), '+lines'),
- (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation)),
- (r'(\(\+)(.*?)(\+\)|\Z)',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+p': [
- (r'[^@]+', Comment.Preproc),
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc, '#pop'),
- (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading),
- (r'@', Comment.Preproc)
- ],
- '+lines': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p'),
- (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
- (r'![^%s]*' % _newline, Comment.Single),
- (r'(\{)([%s]endlines)(\})' % _dash,
- bygroups(Punctuation, Keyword, Punctuation), '#pop'),
- (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
- ]
- }
- # Inform 7 can include snippets of Inform 6 template language,
- # so all of Inform6Lexer's states are copied here, with
- # modifications to account for template syntax. Inform7Lexer's
- # own states begin with '+' to avoid name conflicts. Some of
- # Inform6Lexer's states begin with '_': these are not modified.
- # They deal with template syntax either by including modified
- # states, or by matching r'' then pushing to modified states.
- for token in Inform6Lexer.tokens:
- if token == 'root':
- continue
- tokens[level][token] = list(Inform6Lexer.tokens[token])
- if not token.startswith('_'):
- tokens[level][token][:0] = [include('+i6t'), include(level)]
-
- def __init__(self, **options):
- level = options.get('i6t', '+i6t-not-inline')
- if level not in self._all_tokens:
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
- RegexLexer.__init__(self, **options)
-
-
-class Inform6TemplateLexer(Inform7Lexer):
- """
- For `Inform 6 template
- <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6 template'
- aliases = ['i6t']
- filenames = ['*.i6t']
-
- def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
- return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
-
-
-class Tads3Lexer(RegexLexer):
- """
- For `TADS 3 <http://www.tads.org/>`_ source code.
- """
-
- name = 'TADS 3'
- aliases = ['tads3']
- filenames = ['*.t']
-
- flags = re.DOTALL | re.MULTILINE
-
- _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
- _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
- _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
- r'[0-3]?[0-7]{1,2}))')
- _name = r'(?:[_a-zA-Z]\w*)'
- _no_quote = r'(?=\s|\\?>)'
- _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
- r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
- _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
- _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
-
- def _make_string_state(triple, double, verbatim=None, _escape=_escape):
- if verbatim:
- verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
- re.escape(c.upper()))
- for c in verbatim])
- char = r'"' if double else r"'"
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
- tag_state_name = '%sqt' % prefix
- state = []
- if triple:
- state += [
- (r'%s{3,}' % char, token, '#pop'),
- (r'\\%s+' % char, String.Escape),
- (char, token)
- ]
- else:
- state.append((char, token, '#pop'))
- state += [
- include('s/verbatim'),
- (r'[^\\<&{}%s]+' % char, token)
- ]
- if verbatim:
- # This regex can't use `(?i)` because escape sequences are
- # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
- state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
- (_escape, verbatim),
- Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
- else:
- state += [
- (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
- (char, char, escaped_quotes, _escape), Comment.Multiline),
- (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/listing' % prefix, tag_state_name)),
- (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
- (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
- (char, char, escaped_quotes, _escape), Name.Tag,
- tag_state_name),
- include('s/entity')
- ]
- state += [
- include('s/escape'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'[\\&{}<]', token)
- ]
- return state
-
- def _make_tag_state(triple, double, _escape=_escape):
- char = r'"' if double else r"'"
- quantifier = r'{3,}' if triple else r''
- state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- return [
- (r'%s%s' % (char, quantifier), token, '#pop:2'),
- (r'(\s|\\\n)+', Text),
- (r'(=)(\\?")', bygroups(Punctuation, String.Double),
- 'dqs/%s' % state_name),
- (r"(=)(\\?')", bygroups(Punctuation, String.Single),
- 'sqs/%s' % state_name),
- (r'=', Punctuation, 'uqs/%s' % state_name),
- (r'\\?>', Name.Tag, '#pop'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
- (char, char, escaped_quotes, _escape), Name.Attribute),
- include('s/escape'),
- include('s/verbatim'),
- include('s/entity'),
- (r'[\\{}&]', Name.Attribute)
- ]
-
- def _make_attribute_value_state(terminator, host_triple, host_double,
- _escape=_escape):
- token = (String.Double if terminator == r'"' else
- String.Single if terminator == r"'" else String.Other)
- host_char = r'"' if host_double else r"'"
- host_quantifier = r'{3,}' if host_triple else r''
- host_token = String.Double if host_double else String.Single
- escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
- if host_triple else r'')
- return [
- (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
- (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
- token, '#pop'),
- include('s/verbatim'),
- include('s/entity'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (host_char, host_char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
- token),
- include('s/escape'),
- (r'["\'\s&{<}\\]', token)
- ]
-
- tokens = {
- 'root': [
- (u'\ufeff', Text),
- (r'\{', Punctuation, 'object-body'),
- (r';+', Punctuation),
- (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
- r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
- r'invokee|local|nil|new|operator|replaced|return|self|switch|'
- r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
- (r'(%s)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?/root', 'more/parameters', 'main/parameters')),
- include('whitespace'),
- (r'\++', Punctuation),
- (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
- (r'(?!\Z)', Text, 'main/root')
- ],
- 'main/root': [
- include('main/basic'),
- default(('#pop', 'object-body/no-braces', 'classes', 'class'))
- ],
- 'object-body/no-braces': [
- (r';', Punctuation, '#pop'),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('object-body')
- ],
- 'object-body': [
- (r';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r':', Punctuation, ('classes', 'class')),
- (r'(%s?)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?', 'more/parameters', 'main/parameters')),
- (r'(%s)(%s*)(\{)' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation), 'block'),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation),
- ('object-body/no-braces', 'classes', 'class')),
- include('whitespace'),
- (r'->|%s' % _operator, Punctuation, 'main'),
- default('main/object-body')
- ],
- 'main/object-body': [
- include('main/basic'),
- (r'(%s)(%s*)(=?)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation), ('#pop', 'more', 'main')),
- default('#pop:2')
- ],
- 'block?/root': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+
+__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
+ 'Tads3Lexer']
+
+
+class Inform6Lexer(RegexLexer):
+ """
+ For `Inform 6 <http://inform-fiction.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6'
+ aliases = ['inform6', 'i6']
+ filenames = ['*.inf']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _name = r'[a-zA-Z_]\w*'
+
+ # Inform 7 maps these four character classes to their ASCII
+ # equivalents. To support Inform 6 inclusions within Inform 7,
+ # Inform6Lexer maps them too.
+ _dash = u'\\-\u2010-\u2014'
+ _dquote = u'"\u201c\u201d'
+ _squote = u"'\u2018\u2019"
+ _newline = u'\\n\u0085\u2028\u2029'
+
+ tokens = {
+ 'root': [
+ (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
+ 'directive'),
+ default('directive')
+ ],
+ '_whitespace': [
+ (r'\s+', Text),
+ (r'![^%s]*' % _newline, Comment.Single)
+ ],
+ 'default': [
+ include('_whitespace'),
+ (r'\[', Punctuation, 'many-values'), # Array initialization
+ (r':|(?=;)', Punctuation, '#pop'),
+ (r'<', Punctuation), # Second angle bracket in an action statement
+ default(('expression', '_expression'))
+ ],
+
+ # Expressions
+ '_expression': [
+ include('_whitespace'),
+ (r'(?=sp\b)', Text, '#pop'),
+ (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
+ ('#pop', 'value')),
+ (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
+ (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
+ ],
+ 'expression': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('expression', '_expression')),
+ (r'\)', Punctuation, '#pop'),
+ (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
+ (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
+ (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
+ (r',', Punctuation, '_expression'),
+ (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
+ Operator, '_expression'),
+ (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
+ '_expression'),
+ (r'sp\b', Name),
+ (r'\?~?', Name.Label, 'label?'),
+ (r'[@{]', Error),
+ default('#pop')
+ ],
+ '_assembly-expression': [
+ (r'\(', Punctuation, ('#push', '_expression')),
+ (r'[\[\]]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, '_expression'),
+ (r'sp\b', Keyword.Pseudo),
+ (r';', Punctuation, '#pop:3'),
+ include('expression')
+ ],
+ '_for-expression': [
+ (r'\)', Punctuation, '#pop:2'),
+ (r':', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_keyword-expression': [
+ (r'(from|near|to)\b', Keyword, '_expression'),
+ include('expression')
+ ],
+ '_list-expression': [
+ (r',', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_object-expression': [
+ (r'has\b', Keyword.Declaration, '#pop'),
+ include('_list-expression')
+ ],
+
+ # Values
+ 'value': [
+ include('_whitespace'),
+ # Strings
+ (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
+ (r'([%s])(@\{[0-9a-fA-F]{1,4}\})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'([%s])(@.{2})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
+ # Numbers
+ (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
+ Number.Float, '#pop'),
+ (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'\$\$[01]+', Number.Bin, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+ # Values prefixed by hashes
+ (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
+ (r'(#g\$)(%s)' % _name,
+ bygroups(Operator, Name.Variable.Global), '#pop'),
+ (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
+ (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
+ (r'#', Name.Builtin, ('#pop', 'system-constant')),
+ # System functions
+ (words((
+ 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
+ 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Metaclasses
+ (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
+ # Veneer routines
+ (words((
+ 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
+ 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
+ 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
+ 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
+ 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
+ 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
+ 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
+ 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
+ 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other built-in symbols
+ (words((
+ 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
+ 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
+ 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
+ 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
+ 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
+ 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
+ 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
+ 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
+ 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
+ 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
+ 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other values
+ (_name, Name, '#pop')
+ ],
+ # Strings
+ 'dictionary-word': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _squote, String.Single),
+ (r'[({]', String.Single),
+ (r'@\{[0-9a-fA-F]{,4}\}', String.Escape),
+ (r'@.{2}', String.Escape),
+ (r'[%s]' % _squote, String.Single, '#pop')
+ ],
+ 'string': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _dquote, String.Double),
+ (r'[({]', String.Double),
+ (r'\\', String.Escape),
+ (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
+ (_newline, _newline), String.Escape),
+ (r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}'
+ r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
+ String.Escape),
+ (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
+ String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ 'plain-string': [
+ (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
+ (r'[~^({\[\]]', String.Double),
+ (r'\\', String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ # Names
+ '_constant': [
+ include('_whitespace'),
+ (_name, Name.Constant, '#pop'),
+ include('value')
+ ],
+ '_global': [
+ include('_whitespace'),
+ (_name, Name.Variable.Global, '#pop'),
+ include('value')
+ ],
+ 'label?': [
+ include('_whitespace'),
+ (_name, Name.Label, '#pop'),
+ default('#pop')
+ ],
+ 'variable?': [
+ include('_whitespace'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ # Values after hashes
+ 'obsolete-dictionary-word': [
+ (r'\S\w*', String.Other, '#pop')
+ ],
+ 'system-constant': [
+ include('_whitespace'),
+ (_name, Name.Builtin, '#pop')
+ ],
+
+ # Directives
+ 'directive': [
+ include('_whitespace'),
+ (r'#', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'\[', Punctuation,
+ ('default', 'statements', 'locals', 'routine-name?')),
+ (words((
+ 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
+ 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
+ 'version'), prefix='(?i)', suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(?i)(array|global)\b', Keyword,
+ ('default', 'directive-keyword?', '_global')),
+ (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
+ (r'(?i)class\b', Keyword,
+ ('object-body', 'duplicates', 'class-name')),
+ (r'(?i)(constant|default)\b', Keyword,
+ ('default', 'expression', '_constant')),
+ (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
+ (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
+ (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
+ (r'(?i)import\b', Keyword, 'manifest'),
+ (r'(?i)(include|link)\b', Keyword,
+ ('default', 'before-plain-string')),
+ (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
+ (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
+ (r'(?i)(nearby|object)\b', Keyword,
+ ('object-body', '_object-head')),
+ (r'(?i)property\b', Keyword,
+ ('default', 'alias?', '_constant', 'property-keyword*')),
+ (r'(?i)replace\b', Keyword,
+ ('default', 'routine-name?', 'routine-name?')),
+ (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
+ (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
+ (r'(?i)trace\b', Keyword,
+ ('default', 'trace-keyword?', 'trace-keyword?')),
+ (r'(?i)zcharacter\b', Keyword,
+ ('default', 'directive-keyword?', 'directive-keyword?')),
+ (_name, Name.Class, ('object-body', '_object-head'))
+ ],
+ # [, Replace, Stub
+ 'routine-name?': [
+ include('_whitespace'),
+ (_name, Name.Function, '#pop'),
+ default('#pop')
+ ],
+ 'locals': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'\*', Punctuation),
+ (r'"', String.Double, 'plain-string'),
+ (_name, Name.Variable)
+ ],
+ # Array
+ 'many-values': [
+ include('_whitespace'),
+ (r';', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ (r':', Error),
+ default(('expression', '_expression'))
+ ],
+ # Attribute, Property
+ 'alias?': [
+ include('_whitespace'),
+ (r'alias\b', Keyword, ('#pop', '_constant')),
+ default('#pop')
+ ],
+ # Class, Object, Nearby
+ 'class-name': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class, '#pop')
+ ],
+ 'duplicates': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
+ default('#pop')
+ ],
+ '_object-head': [
+ (r'[%s]>' % _dash, Punctuation),
+ (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
+ include('_global')
+ ],
+ 'object-body': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop:2'),
+ (r',', Punctuation),
+ (r'class\b', Keyword.Declaration, 'class-segment'),
+ (r'(has|private|with)\b', Keyword.Declaration),
+ (r':', Error),
+ default(('_object-expression', '_expression'))
+ ],
+ 'class-segment': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class),
+ default('value')
+ ],
+ # Extend, Verb
+ 'grammar': [
+ include('_whitespace'),
+ (r'=', Punctuation, ('#pop', 'default')),
+ (r'\*', Punctuation, ('#pop', 'grammar-line')),
+ default('_directive-keyword')
+ ],
+ 'grammar-line': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'[/*]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, 'value'),
+ (r'(noun|scope)\b', Keyword, '=routine'),
+ default('_directive-keyword')
+ ],
+ '=routine': [
+ include('_whitespace'),
+ (r'=', Punctuation, 'routine-name?'),
+ default('#pop')
+ ],
+ # Import
+ 'manifest': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(?i)global\b', Keyword, '_global'),
+ default('_global')
+ ],
+ # Include, Link, Message
+ 'diagnostic': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
+ default(('#pop', 'before-plain-string', 'directive-keyword?'))
+ ],
+ 'before-plain-string': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string'))
+ ],
+ 'message-string': [
+ (r'[~^]+', String.Escape),
+ include('plain-string')
+ ],
+
+ # Keywords used in directives
+ '_directive-keyword!': [
+ include('_whitespace'),
+ (words((
+ 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
+ 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
+ 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
+ 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
+ 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
+ Keyword, '#pop'),
+ (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
+ ],
+ '_directive-keyword': [
+ include('_directive-keyword!'),
+ include('value')
+ ],
+ 'directive-keyword?': [
+ include('_directive-keyword!'),
+ default('#pop')
+ ],
+ 'property-keyword*': [
+ include('_whitespace'),
+ (r'(additive|long)\b', Keyword),
+ default('#pop')
+ ],
+ 'trace-keyword?': [
+ include('_whitespace'),
+ (words((
+ 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
+ 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
+ Keyword, '#pop'),
+ default('#pop')
+ ],
+
+ # Statements
+ 'statements': [
+ include('_whitespace'),
+ (r'\]', Punctuation, '#pop'),
+ (r'[;{}]', Punctuation),
+ (words((
+ 'box', 'break', 'continue', 'default', 'give', 'inversion',
+ 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
+ 'spaces', 'string', 'until'), suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(do|else)\b', Keyword),
+ (r'(font|style)\b', Keyword,
+ ('default', 'miscellaneous-keyword?')),
+ (r'for\b', Keyword, ('for', '(?')),
+ (r'(if|switch|while)', Keyword,
+ ('expression', '_expression', '(?')),
+ (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
+ (r'objectloop\b', Keyword,
+ ('_keyword-expression', 'variable?', '(?')),
+ (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
+ (r'\.', Name.Label, 'label?'),
+ (r'@', Keyword, 'opcode'),
+ (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
+ (r'<', Punctuation, 'default'),
+ (r'move\b', Keyword,
+ ('default', '_keyword-expression', '_expression')),
+ default(('default', '_keyword-expression', '_expression'))
+ ],
+ 'miscellaneous-keyword?': [
+ include('_whitespace'),
+ (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
+ Keyword, '#pop'),
+ (r'(a|A|an|address|char|name|number|object|property|string|the|'
+ r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
+ '#pop'),
+ (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
+ '#pop'),
+ default('#pop')
+ ],
+ '(?': [
+ include('_whitespace'),
+ (r'\(', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'for': [
+ include('_whitespace'),
+ (r';', Punctuation, ('_for-expression', '_expression')),
+ default(('_for-expression', '_expression'))
+ ],
+ 'print-list': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r':', Error),
+ default(('_list-expression', '_expression', '_list-expression', 'form'))
+ ],
+ 'form': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
+ default('#pop')
+ ],
+
+ # Assembly
+ 'opcode': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
+ (_name, Keyword, 'operands')
+ ],
+ 'operands': [
+ (r':', Error),
+ default(('_assembly-expression', '_expression'))
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # 'in' is either a keyword or an operator.
+ # If the token two tokens after 'in' is ')', 'in' is a keyword:
+ # objectloop(a in b)
+ # Otherwise, it is an operator:
+ # objectloop(a in b && true)
+ objectloop_queue = []
+ objectloop_token_count = -1
+ previous_token = None
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self,
+ text):
+ if previous_token is Name.Variable and value == 'in':
+ objectloop_queue = [[index, token, value]]
+ objectloop_token_count = 2
+ elif objectloop_token_count > 0:
+ if token not in Comment and token not in Text:
+ objectloop_token_count -= 1
+ objectloop_queue.append((index, token, value))
+ else:
+ if objectloop_token_count == 0:
+ if objectloop_queue[-1][2] == ')':
+ objectloop_queue[0][1] = Keyword
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+ objectloop_token_count = -1
+ yield index, token, value
+ if token not in Comment and token not in Text:
+ previous_token = token
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+
+
+class Inform7Lexer(RegexLexer):
+ """
+ For `Inform 7 <http://inform7.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 7'
+ aliases = ['inform7', 'i7']
+ filenames = ['*.ni', '*.i7x']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _dash = Inform6Lexer._dash
+ _dquote = Inform6Lexer._dquote
+ _newline = Inform6Lexer._newline
+ _start = r'\A|(?<=[%s])' % _newline
+
+ # There are three variants of Inform 7, differing in how to
+ # interpret at signs and braces in I6T. In top-level inclusions, at
+ # signs in the first column are inweb syntax. In phrase definitions
+ # and use options, tokens in braces are treated as I7. Use options
+ # also interpret "{N}".
+ tokens = {}
+ token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
+
+ for level in token_variants:
+ tokens[level] = {
+ '+i6-root': list(Inform6Lexer.tokens['root']),
+ '+i6t-root': [ # For Inform6TemplateLexer
+ (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
+ ('directive', '+p'))
+ ],
+ 'root': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]' % _dquote, Generic.Heading,
+ ('+main', '+titling', '+titling-string')),
+ default(('+main', '+heading?'))
+ ],
+ '+titling-string': [
+ (r'[^%s]+' % _dquote, Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '#pop')
+ ],
+ '+titling': [
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
+ (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
+ Text, ('#pop', '+heading?')),
+ (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
+ (r'[|%s]' % _newline, Generic.Heading)
+ ],
+ '+main': [
+ (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
+ (r'[%s]' % _dquote, String.Double, '+text'),
+ (r':', Text, '+phrase-definition'),
+ (r'(?i)\bas\b', Text, '+use-option'),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-not-inline'), Punctuation)),
+ (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
+ (_start, _dquote, _newline), Text, '+heading?'),
+ (r'(?i)[a(|%s]' % _newline, Text)
+ ],
+ '+phrase-definition': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive',
+ 'default', 'statements'),
+ i6t='+i6t-inline'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+use-option': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-use-option'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+comment': [
+ (r'[^\[\]]+', Comment.Multiline),
+ (r'\[', Comment.Multiline, '#push'),
+ (r'\]', Comment.Multiline, '#pop')
+ ],
+ '+text': [
+ (r'[^\[%s]+' % _dquote, String.Double),
+ (r'\[.*?\]', String.Interpol),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ '+heading?': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
+ (r'[%s]{1,3}' % _dash, Text),
+ (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
+ Generic.Heading, '#pop'),
+ default('#pop')
+ ],
+ '+documentation-heading': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(?i)documentation\s+', Text, '+documentation-heading2'),
+ default('#pop')
+ ],
+ '+documentation-heading2': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s' % _dash, Text, '+documentation'),
+ default('#pop:2')
+ ],
+ '+documentation': [
+ (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
+ (_start, _newline), Generic.Heading),
+ (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
+ Generic.Subheading),
+ (r'((%s)\t.*?[%s])+' % (_start, _newline),
+ using(this, state='+main')),
+ (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ ],
+ '+i6t-not-inline': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p')
+ ],
+ '+i6t-use-option': [
+ include('+i6t-not-inline'),
+ (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
+ ],
+ '+i6t-inline': [
+ (r'(\{)(\S[^}]*)?(\})',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+i6t': [
+ (r'(\{[%s])(![^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Comment.Single, Punctuation)),
+ (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation), '+lines'),
+ (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation)),
+ (r'(\(\+)(.*?)(\+\)|\Z)',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+p': [
+ (r'[^@]+', Comment.Preproc),
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc, '#pop'),
+ (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading),
+ (r'@', Comment.Preproc)
+ ],
+ '+lines': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p'),
+ (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
+ (r'![^%s]*' % _newline, Comment.Single),
+ (r'(\{)([%s]endlines)(\})' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation), '#pop'),
+ (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
+ ]
+ }
+ # Inform 7 can include snippets of Inform 6 template language,
+ # so all of Inform6Lexer's states are copied here, with
+ # modifications to account for template syntax. Inform7Lexer's
+ # own states begin with '+' to avoid name conflicts. Some of
+ # Inform6Lexer's states begin with '_': these are not modified.
+ # They deal with template syntax either by including modified
+ # states, or by matching r'' then pushing to modified states.
+ for token in Inform6Lexer.tokens:
+ if token == 'root':
+ continue
+ tokens[level][token] = list(Inform6Lexer.tokens[token])
+ if not token.startswith('_'):
+ tokens[level][token][:0] = [include('+i6t'), include(level)]
+
+ def __init__(self, **options):
+ level = options.get('i6t', '+i6t-not-inline')
+ if level not in self._all_tokens:
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+ RegexLexer.__init__(self, **options)
+
+
+class Inform6TemplateLexer(Inform7Lexer):
+ """
+ For `Inform 6 template
+ <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6 template'
+ aliases = ['i6t']
+ filenames = ['*.i6t']
+
+ def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
+ return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
+
+
+class Tads3Lexer(RegexLexer):
+ """
+ For `TADS 3 <http://www.tads.org/>`_ source code.
+ """
+
+ name = 'TADS 3'
+ aliases = ['tads3']
+ filenames = ['*.t']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
+ _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
+ _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
+ r'[0-3]?[0-7]{1,2}))')
+ _name = r'(?:[_a-zA-Z]\w*)'
+ _no_quote = r'(?=\s|\\?>)'
+ _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
+ r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
+ _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
+ _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
+
+ def _make_string_state(triple, double, verbatim=None, _escape=_escape):
+ if verbatim:
+ verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
+ re.escape(c.upper()))
+ for c in verbatim])
+ char = r'"' if double else r"'"
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
+ tag_state_name = '%sqt' % prefix
+ state = []
+ if triple:
+ state += [
+ (r'%s{3,}' % char, token, '#pop'),
+ (r'\\%s+' % char, String.Escape),
+ (char, token)
+ ]
+ else:
+ state.append((char, token, '#pop'))
+ state += [
+ include('s/verbatim'),
+ (r'[^\\<&{}%s]+' % char, token)
+ ]
+ if verbatim:
+ # This regex can't use `(?i)` because escape sequences are
+ # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
+ state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
+ (_escape, verbatim),
+ Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
+ else:
+ state += [
+ (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
+ (char, char, escaped_quotes, _escape), Comment.Multiline),
+ (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/listing' % prefix, tag_state_name)),
+ (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
+ (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
+ (char, char, escaped_quotes, _escape), Name.Tag,
+ tag_state_name),
+ include('s/entity')
+ ]
+ state += [
+ include('s/escape'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'[\\&{}<]', token)
+ ]
+ return state
+
+ def _make_tag_state(triple, double, _escape=_escape):
+ char = r'"' if double else r"'"
+ quantifier = r'{3,}' if triple else r''
+ state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ return [
+ (r'%s%s' % (char, quantifier), token, '#pop:2'),
+ (r'(\s|\\\n)+', Text),
+ (r'(=)(\\?")', bygroups(Punctuation, String.Double),
+ 'dqs/%s' % state_name),
+ (r"(=)(\\?')", bygroups(Punctuation, String.Single),
+ 'sqs/%s' % state_name),
+ (r'=', Punctuation, 'uqs/%s' % state_name),
+ (r'\\?>', Name.Tag, '#pop'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
+ (char, char, escaped_quotes, _escape), Name.Attribute),
+ include('s/escape'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'[\\{}&]', Name.Attribute)
+ ]
+
+ def _make_attribute_value_state(terminator, host_triple, host_double,
+ _escape=_escape):
+ token = (String.Double if terminator == r'"' else
+ String.Single if terminator == r"'" else String.Other)
+ host_char = r'"' if host_double else r"'"
+ host_quantifier = r'{3,}' if host_triple else r''
+ host_token = String.Double if host_double else String.Single
+ escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
+ if host_triple else r'')
+ return [
+ (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
+ (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
+ token, '#pop'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (host_char, host_char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
+ token),
+ include('s/escape'),
+ (r'["\'\s&{<}\\]', token)
+ ]
+
+ tokens = {
+ 'root': [
+ (u'\ufeff', Text),
+ (r'\{', Punctuation, 'object-body'),
+ (r';+', Punctuation),
+ (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
+ r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
+ r'invokee|local|nil|new|operator|replaced|return|self|switch|'
+ r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
+ (r'(%s)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?/root', 'more/parameters', 'main/parameters')),
+ include('whitespace'),
+ (r'\++', Punctuation),
+ (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
+ (r'(?!\Z)', Text, 'main/root')
+ ],
+ 'main/root': [
+ include('main/basic'),
+ default(('#pop', 'object-body/no-braces', 'classes', 'class'))
+ ],
+ 'object-body/no-braces': [
+ (r';', Punctuation, '#pop'),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('object-body')
+ ],
+ 'object-body': [
+ (r';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r':', Punctuation, ('classes', 'class')),
+ (r'(%s?)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?', 'more/parameters', 'main/parameters')),
+ (r'(%s)(%s*)(\{)' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation), 'block'),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation),
+ ('object-body/no-braces', 'classes', 'class')),
+ include('whitespace'),
+ (r'->|%s' % _operator, Punctuation, 'main'),
+ default('main/object-body')
+ ],
+ 'main/object-body': [
+ include('main/basic'),
+ (r'(%s)(%s*)(=?)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation), ('#pop', 'more', 'main')),
+ default('#pop:2')
+ ],
+ 'block?/root': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
(r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro.
- ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
- # It might be a macro like DefineAction.
- default(('#pop', 'object-body/no-braces'))
- ],
- 'block?': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
- default('#pop')
- ],
- 'block/basic': [
- (r'[;:]+', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r'default\b', Keyword.Reserved),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Label, using(this, state='whitespace'),
- Punctuation)),
- include('whitespace')
- ],
- 'block': [
- include('block/basic'),
- (r'(?!\Z)', Text, ('more', 'main'))
- ],
- 'block/embed': [
- (r'>>', String.Interpol, '#pop'),
- include('block/basic'),
- (r'(?!\Z)', Text, ('more/embed', 'main'))
- ],
- 'main/basic': [
- include('whitespace'),
- (r'\(', Punctuation, ('#pop', 'more', 'main')),
- (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
- (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
- 'more/parameters', 'main/parameters')),
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
- (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
- Number.Float, '#pop'),
- (r'0[0-7]+', Number.Oct, '#pop'),
- (r'\d+', Number.Integer, '#pop'),
- (r'"""', String.Double, ('#pop', 'tdqs')),
- (r"'''", String.Single, ('#pop', 'tsqs')),
- (r'"', String.Double, ('#pop', 'dqs')),
- (r"'", String.Single, ('#pop', 'sqs')),
- (r'R"""', String.Regex, ('#pop', 'tdqr')),
- (r"R'''", String.Regex, ('#pop', 'tsqr')),
- (r'R"', String.Regex, ('#pop', 'dqr')),
- (r"R'", String.Regex, ('#pop', 'sqr')),
- # Two-token keywords
- (r'(extern)(%s+)(object\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved)),
- (r'(function|method)(%s*)(\()' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Punctuation),
- ('#pop', 'block?', 'more/parameters', 'main/parameters')),
- (r'(modify)(%s+)(grammar\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved),
- ('#pop', 'object-body/no-braces', ':', 'grammar')),
- (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'))),
- (r'(object)(%s+)(template\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'template')),
- (r'(string)(%s+)(template\b)' % _ws,
- bygroups(Keyword, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'function-name')),
- # Keywords
- (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
- Name.Builtin, '#pop'),
- (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
- (r'(case|extern|if|intrinsic|return|static|while)\b',
- Keyword.Reserved),
- (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
- (r'class\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'class')),
- (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
- (r'(dictionary|property)\b', Keyword.Reserved,
- ('#pop', 'constants')),
- (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
- (r'export\b', Keyword.Reserved, ('#pop', 'main')),
- (r'(for|foreach)\b', Keyword.Reserved,
- ('#pop', 'more/inner', 'main/inner')),
- (r'(function|method)\b', Keyword.Reserved,
- ('#pop', 'block?', 'function-name')),
- (r'grammar\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'grammar')),
- (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
- (r'local\b', Keyword.Reserved,
- ('#pop', 'more/local', 'main/local')),
- (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
- '#pop'),
- (r'new\b', Keyword.Reserved, ('#pop', 'class')),
- (r'(nil|true)\b', Keyword.Constant, '#pop'),
- (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
- (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
- (r'propertyset\b', Keyword.Reserved,
- ('#pop', 'propertyset', 'main')),
- (r'self\b', Name.Builtin.Pseudo, '#pop'),
- (r'template\b', Keyword.Reserved, ('#pop', 'template')),
- # Operators
- (r'(__objref|defined)(%s*)(\()' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator), ('#pop', 'more/__objref', 'main')),
- (r'delegated\b', Operator.Word),
- # Compiler-defined macros and built-in properties
- (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
- r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
- r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
- r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
- r'miscVocab|sourceTextGroup|sourceTextGroupName|'
- r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
- ],
- 'main': [
- include('main/basic'),
- (_name, Name, '#pop'),
- default('#pop')
- ],
- 'more/basic': [
- (r'\(', Punctuation, ('more/list', 'main')),
- (r'\[', Punctuation, ('more', 'main')),
- (r'\.{3}', Punctuation),
- (r'->|\.\.', Punctuation, 'main'),
- (r'(?=;)|[:)\]]', Punctuation, '#pop'),
- include('whitespace'),
- (_operator, Operator, 'main'),
- (r'\?', Operator, ('main', 'more/conditional', 'main')),
- (r'(is|not)(%s+)(in\b)' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator.Word)),
- (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
- ],
- 'more': [
- include('more/basic'),
- default('#pop')
- ],
- # Then expression (conditional operator)
- 'more/conditional': [
- (r':(?!:)', Operator, '#pop'),
- include('more')
- ],
- # Embedded expressions
- 'more/embed': [
- (r'>>', String.Interpol, '#pop:2'),
- include('more')
- ],
- # For/foreach loop initializer or short-form anonymous function
- 'main/inner': [
- (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
- (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
- include('main')
- ],
- 'more/inner': [
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, 'main/inner'),
- (r'(in|step)\b', Keyword, 'main/inner'),
- include('more')
- ],
- # Local
- 'main/local': [
- (_name, Name.Variable, '#pop'),
- include('whitespace')
- ],
- 'more/local': [
- (r',', Punctuation, 'main/local'),
- include('more')
- ],
- # List
- 'more/list': [
- (r'[,:]', Punctuation, 'main'),
- include('more')
- ],
- # Parameter list
- 'main/parameters': [
- (r'(%s)(%s*)(?=:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
- (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
- bygroups(Name.Class, using(this, state='whitespace'),
- Name.Variable), '#pop'),
- (r'\[+', Punctuation),
- include('main/basic'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- 'more/parameters': [
- (r'(:)(%s*(?=[?=,:)]))' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'))),
- (r'[?\]]+', Punctuation),
- (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
- (r',', Punctuation, 'main/parameters'),
- (r'=', Punctuation, ('more/parameter', 'main')),
- include('more')
- ],
- 'more/parameter': [
- (r'(?=[,)])', Text, '#pop'),
- include('more')
- ],
- 'multimethod?': [
- (r'multimethod\b', Keyword, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
-
- # Statements and expressions
- 'more/__objref': [
- (r',', Punctuation, 'mode'),
- (r'\)', Operator, '#pop'),
- include('more')
- ],
- 'mode': [
- (r'(error|warn)\b', Keyword, '#pop'),
- include('whitespace')
- ],
- 'catch': [
- (r'\(+', Punctuation),
- (_name, Name.Exception, ('#pop', 'variables')),
- include('whitespace')
- ],
- 'enum': [
- include('whitespace'),
- (r'token\b', Keyword, ('#pop', 'constants')),
- default(('#pop', 'constants'))
- ],
- 'grammar': [
- (r'\)+', Punctuation),
- (r'\(', Punctuation, 'grammar-tag'),
- (r':', Punctuation, 'grammar-rules'),
- (_name, Name.Class),
- include('whitespace')
- ],
- 'grammar-tag': [
- include('whitespace'),
- (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
- r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
- r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
- r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
- r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
- r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
- r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
- ],
- 'grammar-rules': [
- include('string'),
- include('whitespace'),
- (r'(\[)(%s*)(badness)' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'), Keyword),
- 'main'),
- (r'->|%s|[()]' % _operator, Punctuation),
- (_name, Name.Constant),
- default('#pop:2')
- ],
- ':': [
- (r':', Punctuation, '#pop')
- ],
- 'function-name': [
- (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
- (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
- (_name, Name.Function, '#pop'),
- include('whitespace')
- ],
- 'inherited': [
- (r'<', Punctuation, ('#pop', 'classes', 'class')),
- include('whitespace'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'operator': [
- (r'negate\b', Operator.Word, '#pop'),
- include('whitespace'),
- (_operator, Operator),
- default('#pop')
- ],
- 'propertyset': [
- (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('whitespace')
- ],
- 'template': [
- (r'(?=;)', Text, '#pop'),
- include('string'),
- (r'inherited\b', Keyword.Reserved),
- include('whitespace'),
- (r'->|\?|%s' % _operator, Punctuation),
- (_name, Name.Variable)
- ],
-
- # Identifiers
- 'class': [
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'object\b', Keyword.Reserved, '#pop'),
- (r'transient\b', Keyword.Reserved),
- (_name, Name.Class, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'classes': [
- (r'[:,]', Punctuation, 'class'),
- include('whitespace'),
- (r'>', Punctuation, '#pop'),
- default('#pop')
- ],
- 'constants': [
- (r',+', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'property\b', Keyword.Reserved),
- (_name, Name.Constant),
- include('whitespace')
- ],
- 'label': [
- (_name, Name.Label, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'variables': [
- (r',+', Punctuation),
- (r'\)', Punctuation, '#pop'),
- include('whitespace'),
- (_name, Name.Variable)
- ],
-
- # Whitespace and comments
- 'whitespace': [
- (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
- Comment.Preproc),
- (_comment_single, Comment.Single),
- (_comment_multiline, Comment.Multiline),
- (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
- ],
-
- # Strings
- 'string': [
- (r'"""', String.Double, 'tdqs'),
- (r"'''", String.Single, 'tsqs'),
- (r'"', String.Double, 'dqs'),
- (r"'", String.Single, 'sqs')
- ],
- 's/escape': [
- (r'\{\{|\}\}|%s' % _escape, String.Escape)
- ],
- 's/verbatim': [
- (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
- r'first\s+time|one\s+of|only|or|otherwise|'
- r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
- r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
- (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
- r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
- String.Interpol, ('block/embed', 'more/embed', 'main'))
- ],
- 's/entity': [
- (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
- ],
- 'tdqs': _make_string_state(True, True),
- 'tsqs': _make_string_state(True, False),
- 'dqs': _make_string_state(False, True),
- 'sqs': _make_string_state(False, False),
- 'tdqs/listing': _make_string_state(True, True, 'listing'),
- 'tsqs/listing': _make_string_state(True, False, 'listing'),
- 'dqs/listing': _make_string_state(False, True, 'listing'),
- 'sqs/listing': _make_string_state(False, False, 'listing'),
- 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
- 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
- 'dqs/xmp': _make_string_state(False, True, 'xmp'),
- 'sqs/xmp': _make_string_state(False, False, 'xmp'),
-
- # Tags
- 'tdqt': _make_tag_state(True, True),
- 'tsqt': _make_tag_state(True, False),
- 'dqt': _make_tag_state(False, True),
- 'sqt': _make_tag_state(False, False),
- 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
- 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
- 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
- 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
- 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
- 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
- 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
- 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
- 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
- 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
- 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
- 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
-
- # Regular expressions
- 'tdqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"*', String.Regex),
- (r'"{3,}', String.Regex, '#pop'),
- (r'"', String.Regex)
- ],
- 'tsqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'*", String.Regex),
- (r"'{3,}", String.Regex, '#pop'),
- (r"'", String.Regex)
- ],
- 'dqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"?', String.Regex),
- (r'"', String.Regex, '#pop')
- ],
- 'sqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'?", String.Regex),
- (r"'", String.Regex, '#pop')
- ]
- }
-
- def get_tokens_unprocessed(self, text, **kwargs):
- pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
- if_false_level = 0
- for index, token, value in (
- RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
- if if_false_level == 0: # Not in a false #if
- if (token is Comment.Preproc and
- re.match(r'%sif%s+(0|nil)%s*$\n?' %
- (pp, self._ws_pp, self._ws_pp), value)):
- if_false_level = 1
- else: # In a false #if
- if token is Comment.Preproc:
- if (if_false_level == 1 and
- re.match(r'%sel(if|se)\b' % pp, value)):
- if_false_level = 0
- elif re.match(r'%sif' % pp, value):
- if_false_level += 1
- elif re.match(r'%sendif\b' % pp, value):
- if_false_level -= 1
- else:
- token = Comment
- yield index, token, value
+ ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
+ # It might be a macro like DefineAction.
+ default(('#pop', 'object-body/no-braces'))
+ ],
+ 'block?': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'block/basic': [
+ (r'[;:]+', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r'default\b', Keyword.Reserved),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Label, using(this, state='whitespace'),
+ Punctuation)),
+ include('whitespace')
+ ],
+ 'block': [
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more', 'main'))
+ ],
+ 'block/embed': [
+ (r'>>', String.Interpol, '#pop'),
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more/embed', 'main'))
+ ],
+ 'main/basic': [
+ include('whitespace'),
+ (r'\(', Punctuation, ('#pop', 'more', 'main')),
+ (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
+ (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
+ 'more/parameters', 'main/parameters')),
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
+ (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
+ Number.Float, '#pop'),
+ (r'0[0-7]+', Number.Oct, '#pop'),
+ (r'\d+', Number.Integer, '#pop'),
+ (r'"""', String.Double, ('#pop', 'tdqs')),
+ (r"'''", String.Single, ('#pop', 'tsqs')),
+ (r'"', String.Double, ('#pop', 'dqs')),
+ (r"'", String.Single, ('#pop', 'sqs')),
+ (r'R"""', String.Regex, ('#pop', 'tdqr')),
+ (r"R'''", String.Regex, ('#pop', 'tsqr')),
+ (r'R"', String.Regex, ('#pop', 'dqr')),
+ (r"R'", String.Regex, ('#pop', 'sqr')),
+ # Two-token keywords
+ (r'(extern)(%s+)(object\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved)),
+ (r'(function|method)(%s*)(\()' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Punctuation),
+ ('#pop', 'block?', 'more/parameters', 'main/parameters')),
+ (r'(modify)(%s+)(grammar\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved),
+ ('#pop', 'object-body/no-braces', ':', 'grammar')),
+ (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'))),
+ (r'(object)(%s+)(template\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'template')),
+ (r'(string)(%s+)(template\b)' % _ws,
+ bygroups(Keyword, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'function-name')),
+ # Keywords
+ (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
+ Name.Builtin, '#pop'),
+ (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
+ (r'(case|extern|if|intrinsic|return|static|while)\b',
+ Keyword.Reserved),
+ (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
+ (r'class\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'class')),
+ (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
+ (r'(dictionary|property)\b', Keyword.Reserved,
+ ('#pop', 'constants')),
+ (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
+ (r'export\b', Keyword.Reserved, ('#pop', 'main')),
+ (r'(for|foreach)\b', Keyword.Reserved,
+ ('#pop', 'more/inner', 'main/inner')),
+ (r'(function|method)\b', Keyword.Reserved,
+ ('#pop', 'block?', 'function-name')),
+ (r'grammar\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'grammar')),
+ (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
+ (r'local\b', Keyword.Reserved,
+ ('#pop', 'more/local', 'main/local')),
+ (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
+ '#pop'),
+ (r'new\b', Keyword.Reserved, ('#pop', 'class')),
+ (r'(nil|true)\b', Keyword.Constant, '#pop'),
+ (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
+ (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
+ (r'propertyset\b', Keyword.Reserved,
+ ('#pop', 'propertyset', 'main')),
+ (r'self\b', Name.Builtin.Pseudo, '#pop'),
+ (r'template\b', Keyword.Reserved, ('#pop', 'template')),
+ # Operators
+ (r'(__objref|defined)(%s*)(\()' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator), ('#pop', 'more/__objref', 'main')),
+ (r'delegated\b', Operator.Word),
+ # Compiler-defined macros and built-in properties
+ (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
+ r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
+ r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
+ r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
+ r'miscVocab|sourceTextGroup|sourceTextGroupName|'
+ r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
+ ],
+ 'main': [
+ include('main/basic'),
+ (_name, Name, '#pop'),
+ default('#pop')
+ ],
+ 'more/basic': [
+ (r'\(', Punctuation, ('more/list', 'main')),
+ (r'\[', Punctuation, ('more', 'main')),
+ (r'\.{3}', Punctuation),
+ (r'->|\.\.', Punctuation, 'main'),
+ (r'(?=;)|[:)\]]', Punctuation, '#pop'),
+ include('whitespace'),
+ (_operator, Operator, 'main'),
+ (r'\?', Operator, ('main', 'more/conditional', 'main')),
+ (r'(is|not)(%s+)(in\b)' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator.Word)),
+ (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
+ ],
+ 'more': [
+ include('more/basic'),
+ default('#pop')
+ ],
+ # Then expression (conditional operator)
+ 'more/conditional': [
+ (r':(?!:)', Operator, '#pop'),
+ include('more')
+ ],
+ # Embedded expressions
+ 'more/embed': [
+ (r'>>', String.Interpol, '#pop:2'),
+ include('more')
+ ],
+ # For/foreach loop initializer or short-form anonymous function
+ 'main/inner': [
+ (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
+ (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
+ include('main')
+ ],
+ 'more/inner': [
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, 'main/inner'),
+ (r'(in|step)\b', Keyword, 'main/inner'),
+ include('more')
+ ],
+ # Local
+ 'main/local': [
+ (_name, Name.Variable, '#pop'),
+ include('whitespace')
+ ],
+ 'more/local': [
+ (r',', Punctuation, 'main/local'),
+ include('more')
+ ],
+ # List
+ 'more/list': [
+ (r'[,:]', Punctuation, 'main'),
+ include('more')
+ ],
+ # Parameter list
+ 'main/parameters': [
+ (r'(%s)(%s*)(?=:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
+ (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
+ bygroups(Name.Class, using(this, state='whitespace'),
+ Name.Variable), '#pop'),
+ (r'\[+', Punctuation),
+ include('main/basic'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ 'more/parameters': [
+ (r'(:)(%s*(?=[?=,:)]))' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'))),
+ (r'[?\]]+', Punctuation),
+ (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
+ (r',', Punctuation, 'main/parameters'),
+ (r'=', Punctuation, ('more/parameter', 'main')),
+ include('more')
+ ],
+ 'more/parameter': [
+ (r'(?=[,)])', Text, '#pop'),
+ include('more')
+ ],
+ 'multimethod?': [
+ (r'multimethod\b', Keyword, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+
+ # Statements and expressions
+ 'more/__objref': [
+ (r',', Punctuation, 'mode'),
+ (r'\)', Operator, '#pop'),
+ include('more')
+ ],
+ 'mode': [
+ (r'(error|warn)\b', Keyword, '#pop'),
+ include('whitespace')
+ ],
+ 'catch': [
+ (r'\(+', Punctuation),
+ (_name, Name.Exception, ('#pop', 'variables')),
+ include('whitespace')
+ ],
+ 'enum': [
+ include('whitespace'),
+ (r'token\b', Keyword, ('#pop', 'constants')),
+ default(('#pop', 'constants'))
+ ],
+ 'grammar': [
+ (r'\)+', Punctuation),
+ (r'\(', Punctuation, 'grammar-tag'),
+ (r':', Punctuation, 'grammar-rules'),
+ (_name, Name.Class),
+ include('whitespace')
+ ],
+ 'grammar-tag': [
+ include('whitespace'),
+ (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
+ r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
+ r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
+ r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
+ r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
+ r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
+ r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
+ ],
+ 'grammar-rules': [
+ include('string'),
+ include('whitespace'),
+ (r'(\[)(%s*)(badness)' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'), Keyword),
+ 'main'),
+ (r'->|%s|[()]' % _operator, Punctuation),
+ (_name, Name.Constant),
+ default('#pop:2')
+ ],
+ ':': [
+ (r':', Punctuation, '#pop')
+ ],
+ 'function-name': [
+ (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
+ (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
+ (_name, Name.Function, '#pop'),
+ include('whitespace')
+ ],
+ 'inherited': [
+ (r'<', Punctuation, ('#pop', 'classes', 'class')),
+ include('whitespace'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'operator': [
+ (r'negate\b', Operator.Word, '#pop'),
+ include('whitespace'),
+ (_operator, Operator),
+ default('#pop')
+ ],
+ 'propertyset': [
+ (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('whitespace')
+ ],
+ 'template': [
+ (r'(?=;)', Text, '#pop'),
+ include('string'),
+ (r'inherited\b', Keyword.Reserved),
+ include('whitespace'),
+ (r'->|\?|%s' % _operator, Punctuation),
+ (_name, Name.Variable)
+ ],
+
+ # Identifiers
+ 'class': [
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'object\b', Keyword.Reserved, '#pop'),
+ (r'transient\b', Keyword.Reserved),
+ (_name, Name.Class, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'classes': [
+ (r'[:,]', Punctuation, 'class'),
+ include('whitespace'),
+ (r'>', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'constants': [
+ (r',+', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'property\b', Keyword.Reserved),
+ (_name, Name.Constant),
+ include('whitespace')
+ ],
+ 'label': [
+ (_name, Name.Label, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r',+', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ include('whitespace'),
+ (_name, Name.Variable)
+ ],
+
+ # Whitespace and comments
+ 'whitespace': [
+ (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
+ Comment.Preproc),
+ (_comment_single, Comment.Single),
+ (_comment_multiline, Comment.Multiline),
+ (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
+ ],
+
+ # Strings
+ 'string': [
+ (r'"""', String.Double, 'tdqs'),
+ (r"'''", String.Single, 'tsqs'),
+ (r'"', String.Double, 'dqs'),
+ (r"'", String.Single, 'sqs')
+ ],
+ 's/escape': [
+ (r'\{\{|\}\}|%s' % _escape, String.Escape)
+ ],
+ 's/verbatim': [
+ (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
+ r'first\s+time|one\s+of|only|or|otherwise|'
+ r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
+ r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
+ (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
+ r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
+ String.Interpol, ('block/embed', 'more/embed', 'main'))
+ ],
+ 's/entity': [
+ (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
+ ],
+ 'tdqs': _make_string_state(True, True),
+ 'tsqs': _make_string_state(True, False),
+ 'dqs': _make_string_state(False, True),
+ 'sqs': _make_string_state(False, False),
+ 'tdqs/listing': _make_string_state(True, True, 'listing'),
+ 'tsqs/listing': _make_string_state(True, False, 'listing'),
+ 'dqs/listing': _make_string_state(False, True, 'listing'),
+ 'sqs/listing': _make_string_state(False, False, 'listing'),
+ 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
+ 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
+ 'dqs/xmp': _make_string_state(False, True, 'xmp'),
+ 'sqs/xmp': _make_string_state(False, False, 'xmp'),
+
+ # Tags
+ 'tdqt': _make_tag_state(True, True),
+ 'tsqt': _make_tag_state(True, False),
+ 'dqt': _make_tag_state(False, True),
+ 'sqt': _make_tag_state(False, False),
+ 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
+ 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
+ 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
+ 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
+ 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
+ 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
+ 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
+ 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
+ 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
+ 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
+ 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
+ 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
+
+ # Regular expressions
+ 'tdqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"*', String.Regex),
+ (r'"{3,}', String.Regex, '#pop'),
+ (r'"', String.Regex)
+ ],
+ 'tsqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'*", String.Regex),
+ (r"'{3,}", String.Regex, '#pop'),
+ (r"'", String.Regex)
+ ],
+ 'dqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"?', String.Regex),
+ (r'"', String.Regex, '#pop')
+ ],
+ 'sqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'?", String.Regex),
+ (r"'", String.Regex, '#pop')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text, **kwargs):
+ pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
+ if_false_level = 0
+ for index, token, value in (
+ RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
+ if if_false_level == 0: # Not in a false #if
+ if (token is Comment.Preproc and
+ re.match(r'%sif%s+(0|nil)%s*$\n?' %
+ (pp, self._ws_pp, self._ws_pp), value)):
+ if_false_level = 1
+ else: # In a false #if
+ if token is Comment.Preproc:
+ if (if_false_level == 1 and
+ re.match(r'%sel(if|se)\b' % pp, value)):
+ if_false_level = 0
+ elif re.match(r'%sif' % pp, value):
+ if_false_level += 1
+ elif re.match(r'%sendif\b' % pp, value):
+ if_false_level -= 1
+ else:
+ token = Comment
+ yield index, token, value
diff --git a/contrib/python/Pygments/py2/pygments/lexers/iolang.py b/contrib/python/Pygments/py2/pygments/lexers/iolang.py
index f33c87139d..bc9237be5a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/iolang.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/iolang.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.iolang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Io language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.iolang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Io language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number
-
-__all__ = ['IoLexer']
-
-
-class IoLexer(RegexLexer):
- """
- For `Io <http://iolanguage.com/>`_ (a small, prototype-based
- programming language) source.
-
- .. versionadded:: 0.10
- """
- name = 'Io'
- filenames = ['*.io']
- aliases = ['io']
- mimetypes = ['text/x-iosrc']
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'#(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nestedcomment'),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Operators
- (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
- Operator),
- # keywords
- (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
- Keyword),
- # constants
- (r'(nil|false|true)\b', Name.Constant),
- # names
- (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
- Name.Builtin),
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+
+__all__ = ['IoLexer']
+
+
+class IoLexer(RegexLexer):
+ """
+ For `Io <http://iolanguage.com/>`_ (a small, prototype-based
+ programming language) source.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Io'
+ filenames = ['*.io']
+ aliases = ['io']
+ mimetypes = ['text/x-iosrc']
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'#(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nestedcomment'),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Operators
+ (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
+ Operator),
+ # keywords
+ (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
+ Keyword),
+ # constants
+ (r'(nil|false|true)\b', Name.Constant),
+ # names
+ (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
+ Name.Builtin),
(r'[a-zA-Z_]\w*', Name),
- # numbers
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'nestedcomment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ]
- }
+ # numbers
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'nestedcomment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/j.py b/contrib/python/Pygments/py2/pygments/lexers/j.py
index baec89ef56..98cc926cb6 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/j.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/j.py
@@ -1,146 +1,146 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.j
- ~~~~~~~~~~~~~~~~~
-
- Lexer for the J programming language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.j
+ ~~~~~~~~~~~~~~~~~
+
+ Lexer for the J programming language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
- String, Text
-
-__all__ = ['JLexer']
-
-
-class JLexer(RegexLexer):
- """
- For `J <http://jsoftware.com/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'J'
- aliases = ['j']
- filenames = ['*.ijs']
- mimetypes = ['text/x-j']
-
- validName = r'\b[a-zA-Z]\w*'
-
- tokens = {
- 'root': [
- # Shebang script
- (r'#!.*$', Comment.Preproc),
-
- # Comments
- (r'NB\..*', Comment.Single),
- (r'\n+\s*Note', Comment.Multiline, 'comment'),
- (r'\s*Note.*', Comment.Single),
-
- # Whitespace
- (r'\s+', Text),
-
- # Strings
- (r"'", String, 'singlequote'),
-
- # Definitions
- (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
+ String, Text
+
+__all__ = ['JLexer']
+
+
+class JLexer(RegexLexer):
+ """
+ For `J <http://jsoftware.com/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'J'
+ aliases = ['j']
+ filenames = ['*.ijs']
+ mimetypes = ['text/x-j']
+
+ validName = r'\b[a-zA-Z]\w*'
+
+ tokens = {
+ 'root': [
+ # Shebang script
+ (r'#!.*$', Comment.Preproc),
+
+ # Comments
+ (r'NB\..*', Comment.Single),
+ (r'\n+\s*Note', Comment.Multiline, 'comment'),
+ (r'\s*Note.*', Comment.Single),
+
+ # Whitespace
+ (r'\s+', Text),
+
+ # Strings
+ (r"'", String, 'singlequote'),
+
+ # Definitions
+ (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
(r'(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b',
- Name.Function, 'explicitDefinition'),
-
- # Flow Control
+ Name.Function, 'explicitDefinition'),
+
+ # Flow Control
(words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label),
- (words((
- 'assert', 'break', 'case', 'catch', 'catchd',
- 'catcht', 'continue', 'do', 'else', 'elseif',
- 'end', 'fcase', 'for', 'if', 'return',
- 'select', 'throw', 'try', 'while', 'whilst',
+ (words((
+ 'assert', 'break', 'case', 'catch', 'catchd',
+ 'catcht', 'continue', 'do', 'else', 'elseif',
+ 'end', 'fcase', 'for', 'if', 'return',
+ 'select', 'throw', 'try', 'while', 'whilst',
), suffix=r'\.'), Name.Label),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Standard Library
- (words((
- 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
- 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
- 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
- 'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
- 'clear', 'cutLF', 'cutopen', 'datatype', 'def',
- 'dfh', 'drop', 'each', 'echo', 'empty',
- 'erase', 'every', 'evtloop', 'exit', 'expand',
- 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
- 'getenv', 'hfd', 'inv', 'inverse', 'iospath',
- 'isatty', 'isutf8', 'items', 'leaf', 'list',
+
+ # Variable Names
+ (validName, Name.Variable),
+
+ # Standard Library
+ (words((
+ 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
+ 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
+ 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
+ 'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
+ 'clear', 'cutLF', 'cutopen', 'datatype', 'def',
+ 'dfh', 'drop', 'each', 'echo', 'empty',
+ 'erase', 'every', 'evtloop', 'exit', 'expand',
+ 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
+ 'getenv', 'hfd', 'inv', 'inverse', 'iospath',
+ 'isatty', 'isutf8', 'items', 'leaf', 'list',
'nameclass', 'namelist', 'names', 'nc',
'nl', 'on', 'pick', 'rows',
- 'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
- 'sort', 'split', 'stderr', 'stdin', 'stdout',
- 'table', 'take', 'timespacex', 'timex', 'tmoutput',
- 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
- 'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
- 'uucp',
- )), Name.Function),
-
- # Copula
- (r'=[.:]', Operator),
-
- # Builtins
- (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/]', Operator),
-
- # Short Keywords
- (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
- (r'[aDiLpqsStux]\:', Keyword.Reserved),
- (r'(_[0-9])\:', Keyword.Constant),
-
- # Parens
- (r'\(', Punctuation, 'parentheses'),
-
- # Numbers
- include('numbers'),
- ],
-
- 'comment': [
- (r'[^)]', Comment.Multiline),
- (r'^\)', Comment.Multiline, '#pop'),
- (r'[)]', Comment.Multiline),
- ],
-
- 'explicitDefinition': [
- (r'\b[nmuvxy]\b', Name.Decorator),
- include('root'),
- (r'[^)]', Name),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', Name),
- ],
-
- 'numbers': [
- (r'\b_{1,2}\b', Number),
- (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+x', Number.Integer.Long),
- (r'_?\d+', Number.Integer),
- ],
-
- 'nounDefinition': [
- (r'[^)]', String),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', String),
- ],
-
- 'parentheses': [
- (r'\)', Punctuation, '#pop'),
- # include('nounDefinition'),
- include('explicitDefinition'),
- include('root'),
- ],
-
- 'singlequote': [
- (r"[^']", String),
- (r"''", String),
- (r"'", String, '#pop'),
- ],
- }
+ 'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
+ 'sort', 'split', 'stderr', 'stdin', 'stdout',
+ 'table', 'take', 'timespacex', 'timex', 'tmoutput',
+ 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
+ 'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
+ 'uucp',
+ )), Name.Function),
+
+ # Copula
+ (r'=[.:]', Operator),
+
+ # Builtins
+ (r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/]', Operator),
+
+ # Short Keywords
+ (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
+ (r'[aDiLpqsStux]\:', Keyword.Reserved),
+ (r'(_[0-9])\:', Keyword.Constant),
+
+ # Parens
+ (r'\(', Punctuation, 'parentheses'),
+
+ # Numbers
+ include('numbers'),
+ ],
+
+ 'comment': [
+ (r'[^)]', Comment.Multiline),
+ (r'^\)', Comment.Multiline, '#pop'),
+ (r'[)]', Comment.Multiline),
+ ],
+
+ 'explicitDefinition': [
+ (r'\b[nmuvxy]\b', Name.Decorator),
+ include('root'),
+ (r'[^)]', Name),
+ (r'^\)', Name.Label, '#pop'),
+ (r'[)]', Name),
+ ],
+
+ 'numbers': [
+ (r'\b_{1,2}\b', Number),
+ (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
+ (r'_?\d+\.(?=\d+)', Number.Float),
+ (r'_?\d+x', Number.Integer.Long),
+ (r'_?\d+', Number.Integer),
+ ],
+
+ 'nounDefinition': [
+ (r'[^)]', String),
+ (r'^\)', Name.Label, '#pop'),
+ (r'[)]', String),
+ ],
+
+ 'parentheses': [
+ (r'\)', Punctuation, '#pop'),
+ # include('nounDefinition'),
+ include('explicitDefinition'),
+ include('root'),
+ ],
+
+ 'singlequote': [
+ (r"[^']", String),
+ (r"''", String),
+ (r"'", String, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/javascript.py b/contrib/python/Pygments/py2/pygments/lexers/javascript.py
index e9cf672278..b04f7eccd9 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/javascript.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/javascript.py
@@ -1,525 +1,525 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.javascript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for JavaScript and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.javascript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for JavaScript and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, using, \
- this, words, combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other
-from pygments.util import get_bool_opt, iteritems
-import pygments.unistring as uni
-
-__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
- 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, using, \
+ this, words, combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
+from pygments.util import get_bool_opt, iteritems
+import pygments.unistring as uni
+
+__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
+ 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer']
-
-JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
- ']|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Mn', 'Mc', 'Nd', 'Pc') +
- u'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
-
-
-class JavascriptLexer(RegexLexer):
- """
- For JavaScript source code.
- """
-
- name = 'JavaScript'
- aliases = ['js', 'javascript']
- filenames = ['*.js', '*.jsm']
- mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript']
-
- flags = re.DOTALL | re.UNICODE | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+
+JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
+ ']|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Mn', 'Mc', 'Nd', 'Pc') +
+ u'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
+
+
+class JavascriptLexer(RegexLexer):
+ """
+ For JavaScript source code.
+ """
+
+ name = 'JavaScript'
+ aliases = ['js', 'javascript']
+ filenames = ['*.js', '*.jsm']
+ mimetypes = ['application/javascript', 'application/x-javascript',
+ 'text/x-javascript', 'text/javascript']
+
+ flags = re.DOTALL | re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gimuy]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
(r'(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?', Number.Float),
(r'0[bB][01]+', Number.Bin),
(r'0[oO][0-7]+', Number.Oct),
(r'0[xX][0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'\.\.\.|=>', Punctuation),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
- r'this|of)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|Promise|Proxy|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
- r'document|this|window)\b', Name.Builtin),
- (JS_IDENT, Name.Other),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'`', String.Backtick, 'interp'),
- ],
- 'interp': [
- (r'`', String.Backtick, '#pop'),
- (r'\\\\', String.Backtick),
- (r'\\`', String.Backtick),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
+ r'this|of)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|Promise|Proxy|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|'
+ r'document|this|window)\b', Name.Builtin),
+ (JS_IDENT, Name.Other),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'`', String.Backtick, 'interp'),
+ ],
+ 'interp': [
+ (r'`', String.Backtick, '#pop'),
+ (r'\\\\', String.Backtick),
+ (r'\\`', String.Backtick),
(r'\$\{', String.Interpol, 'interp-inside'),
- (r'\$', String.Backtick),
- (r'[^`\\$]+', String.Backtick),
- ],
- 'interp-inside': [
- # TODO: should this include single-line comments and allow nesting strings?
+ (r'\$', String.Backtick),
+ (r'[^`\\$]+', String.Backtick),
+ ],
+ 'interp-inside': [
+ # TODO: should this include single-line comments and allow nesting strings?
(r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- # (\\\\|\\`|[^`])*`', String.Backtick),
- }
-
-
-class KalLexer(RegexLexer):
- """
- For `Kal`_ source code.
-
- .. _Kal: http://rzimmerman.github.io/kal
-
-
- .. versionadded:: 2.0
- """
-
- name = 'Kal'
- aliases = ['kal']
- filenames = ['*.kal']
- mimetypes = ['text/kal', 'application/kal']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'functiondef': [
- (r'[$a-zA-Z_][\w$]*\s*', Name.Function, '#pop'),
- include('commentsandwhitespace'),
- ],
- 'classdef': [
- (r'\binherits\s+from\b', Keyword),
- (r'[$a-zA-Z_][\w$]*\s*\n', Name.Class, '#pop'),
- (r'[$a-zA-Z_][\w$]*\s*', Name.Class),
- include('commentsandwhitespace'),
- ],
- 'listcomprehension': [
- (r'\]', Punctuation, '#pop'),
- (r'\b(property|value)\b', Keyword),
- include('root'),
- ],
- 'waitfor': [
- (r'\n', Punctuation, '#pop'),
- (r'\bfrom\b', Keyword),
- include('root'),
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex),
- (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
- Operator),
- (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
- r'doesnt\s+exist)\b', Operator.Word),
- (r'(?:\([^()]+\))?\s*>', Name.Function),
- (r'[{(]', Punctuation),
- (r'\[', Punctuation, 'listcomprehension'),
- (r'[})\].,]', Punctuation),
- (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
- (r'\bclass\b', Keyword.Declaration, 'classdef'),
- (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
- (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
- (r'(?<![.$])(for(\s+(parallel|series))?|in|of|while|until|'
- r'break|return|continue|'
- r'when|if|unless|else|otherwise|except\s+when|'
- r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
- r'typeof|instanceof|super|run\s+in\s+parallel|'
- r'inherits\s+from)\b', Keyword),
- (r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|'
- r'window|'
- r'print)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all kal strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class LiveScriptLexer(RegexLexer):
- """
- For `LiveScript`_ source code.
-
- .. _LiveScript: http://gkz.github.com/LiveScript/
-
- .. versionadded:: 1.6
- """
-
- name = 'LiveScript'
- aliases = ['live-script', 'livescript']
- filenames = ['*.ls']
- mimetypes = ['text/livescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'#.*?\n', Comment.Single),
- ],
- 'multilineregex': [
- include('commentsandwhitespace'),
- (r'//([gim]+\b|\B)', String.Regex, '#pop'),
- (r'/', String.Regex),
- (r'[^/#]+', String.Regex)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'//', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- default('#pop'),
- ],
- 'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
- r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
- (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
- r'[+*`%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
- r'return|continue|switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by|const|var|to|til)\b', Keyword,
- 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|'
- r'null|NaN|Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
- (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- (r'\\\S+', String),
- (r'<\[.*?\]>', String),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class DartLexer(RegexLexer):
- """
- For `Dart <http://dartlang.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Dart'
- aliases = ['dart']
- filenames = ['*.dart']
- mimetypes = ['text/x-dart']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('string_literal'),
- (r'#!(.*?)$', Comment.Preproc),
- (r'\b(import|export)\b', Keyword, 'import_decl'),
- (r'\b(library|source|part of|part)\b', Keyword),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'\b(class)\b(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
- Keyword),
+ include('root'),
+ ],
+ # (\\\\|\\`|[^`])*`', String.Backtick),
+ }
+
+
+class KalLexer(RegexLexer):
+ """
+ For `Kal`_ source code.
+
+ .. _Kal: http://rzimmerman.github.io/kal
+
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Kal'
+ aliases = ['kal']
+ filenames = ['*.kal']
+ mimetypes = ['text/kal', 'application/kal']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'functiondef': [
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Function, '#pop'),
+ include('commentsandwhitespace'),
+ ],
+ 'classdef': [
+ (r'\binherits\s+from\b', Keyword),
+ (r'[$a-zA-Z_][\w$]*\s*\n', Name.Class, '#pop'),
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Class),
+ include('commentsandwhitespace'),
+ ],
+ 'listcomprehension': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\b(property|value)\b', Keyword),
+ include('root'),
+ ],
+ 'waitfor': [
+ (r'\n', Punctuation, '#pop'),
+ (r'\bfrom\b', Keyword),
+ include('root'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex),
+ (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
+ Operator),
+ (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
+ r'doesnt\s+exist)\b', Operator.Word),
+ (r'(?:\([^()]+\))?\s*>', Name.Function),
+ (r'[{(]', Punctuation),
+ (r'\[', Punctuation, 'listcomprehension'),
+ (r'[})\].,]', Punctuation),
+ (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
+ (r'\bclass\b', Keyword.Declaration, 'classdef'),
+ (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
+ (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
+ (r'(?<![.$])(for(\s+(parallel|series))?|in|of|while|until|'
+ r'break|return|continue|'
+ r'when|if|unless|else|otherwise|except\s+when|'
+ r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
+ r'typeof|instanceof|super|run\s+in\s+parallel|'
+ r'inherits\s+from)\b', Keyword),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|'
+ r'window|'
+ r'print)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all kal strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class LiveScriptLexer(RegexLexer):
+ """
+ For `LiveScript`_ source code.
+
+ .. _LiveScript: http://gkz.github.com/LiveScript/
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'LiveScript'
+ aliases = ['live-script', 'livescript']
+ filenames = ['*.ls']
+ mimetypes = ['text/livescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'#.*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ include('commentsandwhitespace'),
+ (r'//([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'/', String.Regex),
+ (r'[^/#]+', String.Regex)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
+ (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
+ r'[+*`%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
+ r'return|continue|switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
+ 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|'
+ r'null|NaN|Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ (r'\\\S+', String),
+ (r'<\[.*?\]>', String),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class DartLexer(RegexLexer):
+ """
+ For `Dart <http://dartlang.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Dart'
+ aliases = ['dart']
+ filenames = ['*.dart']
+ mimetypes = ['text/x-dart']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('string_literal'),
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'\b(import|export)\b', Keyword, 'import_decl'),
+ (r'\b(library|source|part of|part)\b', Keyword),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'\b(class)\b(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
+ Keyword),
(r'\b(abstract|async|await|const|extends|factory|final|get|'
r'implements|native|operator|set|static|sync|typedef|var|with|'
r'yield)\b', Keyword.Declaration),
(r'\b(bool|double|dynamic|int|num|Object|String|void)\b', Keyword.Type),
- (r'\b(false|null|true)\b', Keyword.Constant),
- (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
+ (r'\b(false|null|true)\b', Keyword.Constant),
+ (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
(r'@[a-zA-Z_$]\w*', Name.Decorator),
- (r'[a-zA-Z_$]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[(){}\[\],.;]', Punctuation),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
- (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
- (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
- (r'\n', Text)
- # pseudo-keyword negate intentionally left out
- ],
- 'class': [
- (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
- ],
- 'import_decl': [
- include('string_literal'),
- (r'\s+', Text),
- (r'\b(as|show|hide)\b', Keyword),
- (r'[a-zA-Z_$]\w*', Name),
- (r'\,', Punctuation),
- (r'\;', Punctuation, '#pop')
- ],
- 'string_literal': [
- # Raw strings.
- (r'r"""([\w\W]*?)"""', String.Double),
- (r"r'''([\w\W]*?)'''", String.Single),
- (r'r"(.*?)"', String.Double),
- (r"r'(.*?)'", String.Single),
- # Normal Strings.
- (r'"""', String.Double, 'string_double_multiline'),
- (r"'''", String.Single, 'string_single_multiline'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single')
- ],
- 'string_common': [
- (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
- String.Escape),
- (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
- (r'(\$\{)(.*?)(\})',
- bygroups(String.Interpol, using(this), String.Interpol))
- ],
- 'string_double': [
- (r'"', String.Double, '#pop'),
- (r'[^"$\\\n]+', String.Double),
- include('string_common'),
- (r'\$+', String.Double)
- ],
- 'string_double_multiline': [
- (r'"""', String.Double, '#pop'),
- (r'[^"$\\]+', String.Double),
- include('string_common'),
- (r'(\$|\")+', String.Double)
- ],
- 'string_single': [
- (r"'", String.Single, '#pop'),
- (r"[^'$\\\n]+", String.Single),
- include('string_common'),
- (r'\$+', String.Single)
- ],
- 'string_single_multiline': [
- (r"'''", String.Single, '#pop'),
- (r'[^\'$\\]+', String.Single),
- include('string_common'),
- (r'(\$|\')+', String.Single)
- ]
- }
-
-
-class TypeScriptLexer(RegexLexer):
- """
- For `TypeScript <http://typescriptlang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'TypeScript'
- aliases = ['ts', 'typescript']
+ (r'[a-zA-Z_$]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[(){}\[\],.;]', Punctuation),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
+ (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
+ (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
+ (r'\n', Text)
+ # pseudo-keyword negate intentionally left out
+ ],
+ 'class': [
+ (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
+ ],
+ 'import_decl': [
+ include('string_literal'),
+ (r'\s+', Text),
+ (r'\b(as|show|hide)\b', Keyword),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'\,', Punctuation),
+ (r'\;', Punctuation, '#pop')
+ ],
+ 'string_literal': [
+ # Raw strings.
+ (r'r"""([\w\W]*?)"""', String.Double),
+ (r"r'''([\w\W]*?)'''", String.Single),
+ (r'r"(.*?)"', String.Double),
+ (r"r'(.*?)'", String.Single),
+ # Normal Strings.
+ (r'"""', String.Double, 'string_double_multiline'),
+ (r"'''", String.Single, 'string_single_multiline'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single')
+ ],
+ 'string_common': [
+ (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
+ String.Escape),
+ (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol))
+ ],
+ 'string_double': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"$\\\n]+', String.Double),
+ include('string_common'),
+ (r'\$+', String.Double)
+ ],
+ 'string_double_multiline': [
+ (r'"""', String.Double, '#pop'),
+ (r'[^"$\\]+', String.Double),
+ include('string_common'),
+ (r'(\$|\")+', String.Double)
+ ],
+ 'string_single': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'$\\\n]+", String.Single),
+ include('string_common'),
+ (r'\$+', String.Single)
+ ],
+ 'string_single_multiline': [
+ (r"'''", String.Single, '#pop'),
+ (r'[^\'$\\]+', String.Single),
+ include('string_common'),
+ (r'(\$|\')+', String.Single)
+ ]
+ }
+
+
+class TypeScriptLexer(RegexLexer):
+ """
+ For `TypeScript <http://typescriptlang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'TypeScript'
+ aliases = ['ts', 'typescript']
filenames = ['*.ts', '*.tsx']
- mimetypes = ['text/x-typescript']
-
- flags = re.DOTALL | re.MULTILINE
-
+ mimetypes = ['text/x-typescript']
+
+ flags = re.DOTALL | re.MULTILINE
+
# Higher priority than the TypoScriptLexer, as TypeScript is far more
# common these days
priority = 0.5
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|of|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- # Match stuff like: module name {...}
- (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
- bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
- # Match variable type keywords
- (r'\b(string|bool|number)\b', Keyword.Type),
- # Match stuff like: constructor
- (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
- # Match stuff like: super(argument, list)
- (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
- bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
- # Match stuff like: function() {...}
- (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
- # Match stuff like: (function: return type)
- (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
- bygroups(Name.Other, Text, Keyword.Type)),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ # Match stuff like: module name {...}
+ (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
+ bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
+ # Match variable type keywords
+ (r'\b(string|bool|number)\b', Keyword.Type),
+ # Match stuff like: constructor
+ (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
+ # Match stuff like: super(argument, list)
+ (r'(super)(\s*)(\([\w,?.$\s]+\s*\))',
+ bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
+ # Match stuff like: function() {...}
+ (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
+ # Match stuff like: (function: return type)
+ (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
+ bygroups(Name.Other, Text, Keyword.Type)),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
(r'`', String.Backtick, 'interp'),
- # Match stuff like: Decorators
- (r'@\w+', Keyword.Declaration),
+ # Match stuff like: Decorators
+ (r'@\w+', Keyword.Declaration),
],
# The 'interp*' rules match those in JavascriptLexer. Changes made
@@ -537,145 +537,145 @@ class TypeScriptLexer(RegexLexer):
(r'\}', String.Interpol, '#pop'),
include('root'),
],
- }
-
-
-class LassoLexer(RegexLexer):
- """
- For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
- syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
- HTML, use the `LassoHtmlLexer`.
-
- Additional options accepted:
-
- `builtinshighlighting`
- If given and ``True``, highlight builtin types, traits, methods, and
- members (default: ``True``).
- `requiredelimiters`
- If given and ``True``, only highlight code between delimiters as Lasso
- (default: ``False``).
-
- .. versionadded:: 1.6
- """
-
- name = 'Lasso'
- aliases = ['lasso', 'lassoscript']
- filenames = ['*.lasso', '*.lasso[89]']
- alias_filenames = ['*.incl', '*.inc', '*.las']
- mimetypes = ['text/x-lasso']
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
+ }
+
+
+class LassoLexer(RegexLexer):
+ """
+ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
+ syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
+ HTML, use the `LassoHtmlLexer`.
+
+ Additional options accepted:
+
+ `builtinshighlighting`
+ If given and ``True``, highlight builtin types, traits, methods, and
+ members (default: ``True``).
+ `requiredelimiters`
+ If given and ``True``, only highlight code between delimiters as Lasso
+ (default: ``False``).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Lasso'
+ aliases = ['lasso', 'lassoscript']
+ filenames = ['*.lasso', '*.lasso[89]']
+ alias_filenames = ['*.incl', '*.inc', '*.las']
+ mimetypes = ['text/x-lasso']
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
(r'(?=\[|<)', Other, 'delimiters'),
- (r'\s+', Other),
- default(('delimiters', 'lassofile')),
- ],
- 'delimiters': [
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Comment.Preproc, 'squarebrackets'),
+ (r'\s+', Other),
+ default(('delimiters', 'lassofile')),
+ ],
+ 'delimiters': [
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Comment.Preproc, 'squarebrackets'),
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'nosquarebrackets': [
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Other),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'nosquarebrackets': [
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Other),
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'noprocess': [
- (r'\[/noprocess\]', Comment.Preproc, '#pop'),
- (r'\[', Other),
- (r'[^[]', Other),
- ],
- 'squarebrackets': [
- (r'\]', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'anglebrackets': [
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'lassofile': [
- (r'\]|\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'whitespacecomments': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*\*!.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- ],
- 'lasso': [
- # whitespace/comments
- include('whitespacecomments'),
-
- # literals
- (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'(infinity|NaN)\b', Number),
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
- (r'`[^`]*`', String.Backtick),
-
- # names
- (r'\$[a-z_][\w.]*', Name.Variable),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'noprocess': [
+ (r'\[/noprocess\]', Comment.Preproc, '#pop'),
+ (r'\[', Other),
+ (r'[^[]', Other),
+ ],
+ 'squarebrackets': [
+ (r'\]', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'anglebrackets': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'lassofile': [
+ (r'\]|\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'whitespacecomments': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*\*!.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ ],
+ 'lasso': [
+ # whitespace/comments
+ include('whitespacecomments'),
+
+ # literals
+ (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'(infinity|NaN)\b', Number),
+ (r"'", String.Single, 'singlestring'),
+ (r'"', String.Double, 'doublestring'),
+ (r'`[^`]*`', String.Backtick),
+
+ # names
+ (r'\$[a-z_][\w.]*', Name.Variable),
(r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
- (r"(\.\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
- (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
- (r'(\.\.?\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
- (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Operator, Name.Other.Member)),
- (r'(?<!->)(self|inherited|currentcapture|givenblock)\b',
- Name.Builtin.Pseudo),
- (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
- r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
- r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
- r'Error_InvalidDatabase|Error_InvalidPassword|'
- r'Error_InvalidUsername|Error_ModuleNotFound|'
- r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
- r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
- r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
- r'Error_UpdateError)\b', Name.Exception),
-
- # definitions
- (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
- (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator,
- Name.Function), 'signature'),
- (r'(define)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
- (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
- 'signature'),
- (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Text, Name.Function)),
-
- # keywords
- (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
- (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
- (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
- r'null|boolean|bytes|keyword|list|locale|queue|set|stack|'
- r'staticarray)\b', Keyword.Type),
- (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
- (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
- (r'require\b', Keyword, 'requiresection'),
- (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
- (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
- r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
- r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
- r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
+ (r"(\.\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
+ (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
+ (r'(\.\.?\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
+ (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Operator, Name.Other.Member)),
+ (r'(?<!->)(self|inherited|currentcapture|givenblock)\b',
+ Name.Builtin.Pseudo),
+ (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
+ r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
+ r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
+ r'Error_InvalidDatabase|Error_InvalidPassword|'
+ r'Error_InvalidUsername|Error_ModuleNotFound|'
+ r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
+ r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
+ r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
+ r'Error_UpdateError)\b', Name.Exception),
+
+ # definitions
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator,
+ Name.Function), 'signature'),
+ (r'(define)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
+ (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
+ 'signature'),
+ (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword, Text, Name.Function)),
+
+ # keywords
+ (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
+ (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
+ (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
+ r'null|boolean|bytes|keyword|list|locale|queue|set|stack|'
+ r'staticarray)\b', Keyword.Type),
+ (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
+ (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
+ (r'require\b', Keyword, 'requiresection'),
+ (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
+ (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
+ r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
+ r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
+ r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
@@ -690,778 +690,778 @@ class LassoLexer(RegexLexer):
r'join|let|match|max|min|on|order|parent|protected|provide|public|'
r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
r'type|where|with|yield|yieldhome)\b',
- bygroups(Punctuation, Keyword)),
-
- # other
- (r',', Punctuation, 'commamember'),
- (r'(and|or|not)\b', Operator.Word),
- (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
- bygroups(Name, Name.Label, Operator)),
- (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
- (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
- bygroups(Operator, Operator.Word)),
- (r':=|[-+*/%=<>&|!?\\]+', Operator),
- (r'[{}():;,@^]', Punctuation),
- ],
- 'singlestring': [
- (r"'", String.Single, '#pop'),
- (r"[^'\\]+", String.Single),
- include('escape'),
- (r"\\", String.Single),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- include('escape'),
- (r'\\', String.Double),
- ],
- 'escape': [
- (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|'
- r'[abefnrtv?"\'\\]|$)', String.Escape),
- ],
- 'signature': [
- (r'=>', Operator, '#pop'),
- (r'\)', Punctuation, '#pop'),
- (r'[(,]', Punctuation, 'parameter'),
- include('lasso'),
- ],
- 'parameter': [
- (r'\)', Punctuation, '#pop'),
- (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
- (r'\.\.\.', Name.Builtin.Pseudo),
- include('lasso'),
- ],
- 'requiresection': [
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
- (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r',', Punctuation),
- include('whitespacecomments'),
- ],
- 'requiresignature': [
- (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- (r'-?[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'\.\.\.', Name.Builtin.Pseudo),
- (r'[(,]', Punctuation),
- include('whitespacecomments'),
- ],
- 'commamember': [
- (r'(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
- Name.Function, 'signature'),
- include('whitespacecomments'),
- default('#pop'),
- ],
- }
-
- def __init__(self, **options):
- self.builtinshighlighting = get_bool_opt(
- options, 'builtinshighlighting', True)
- self.requiredelimiters = get_bool_opt(
- options, 'requiredelimiters', False)
-
- self._builtins = set()
- self._members = set()
- if self.builtinshighlighting:
- from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
- for key, value in iteritems(BUILTINS):
- self._builtins.update(value)
- for key, value in iteritems(MEMBERS):
- self._members.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.requiredelimiters:
- stack.append('delimiters')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if (token is Name.Other and value.lower() in self._builtins or
- token is Name.Other.Member and
- value.lower().rstrip('=') in self._members):
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if 'bin/lasso9' in text:
- rv += 0.8
- if re.search(r'<\?lasso', text, re.I):
- rv += 0.4
- if re.search(r'local\(', text, re.I):
- rv += 0.4
- return rv
-
-
-class ObjectiveJLexer(RegexLexer):
- """
- For Objective-J source code with preprocessor directives.
-
- .. versionadded:: 1.3
- """
-
- name = 'Objective-J'
- aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
- filenames = ['*.j']
- mimetypes = ['text/x-objective-j']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # function definition
- (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
- bygroups(using(this), using(this, state='function_signature'),
- using(this))),
-
- # class definition
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- 'classname'),
- (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
- 'forward_classname'),
- (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
-
- include('statements'),
- ('[{()}]', Punctuation),
- (';', Punctuation),
- ],
- 'whitespace': [
- (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
-
- (r'#if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
-
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'<!--', Comment),
- ],
- 'slashstartsregex': [
- include('whitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop'),
- ],
- 'statements': [
- (r'(L|@)?"', String, 'string'),
- (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
-
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
- r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
- (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
- Keyword.Type),
-
- (r'(self|super)\b', Name.Builtin),
-
- (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
- r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
- r'SQRT2)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
-
- (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
- bygroups(Name.Function, using(this))),
-
- (r'[$a-zA-Z_]\w*', Name),
- ],
- 'classname': [
- # interface definition that inherits
- (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
- r')([a-zA-Z_]\w*)?',
- bygroups(Name.Class, using(this), Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
- bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
- ],
- 'forward_classname': [
- (r'([a-zA-Z_]\w*)(\s*,\s*)',
- bygroups(Name.Class, Text), '#push'),
- (r'([a-zA-Z_]\w*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop'),
- ],
- 'function_signature': [
- include('whitespace'),
-
- # start of a selector w/ parameters
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), 'function_parameters'),
-
- # no-param function
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), "#pop"),
-
- # no return type given, start of a selector w/ parameters
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(Name.Function), 'function_parameters'),
-
- # no return type given, no-param function
- (r'([$a-zA-Z_]\w+)', # function name
- bygroups(Name.Function), "#pop"),
-
- default('#pop'),
- ],
- 'function_parameters': [
- include('whitespace'),
-
- # parameters
- (r'(\(' + _ws + ')' # open paren
- r'([^)]+)' # type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # param name
- bygroups(using(this), Keyword.Type, using(this), Text)),
-
- # one piece of a selector name
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- Name.Function),
-
- # smallest possible selector piece
- (r'(:)', Name.Function),
-
- # var args
- (r'(,' + _ws + r'\.\.\.)', using(this)),
-
- # param name
- (r'([$a-zA-Z_]\w+)', Text),
- ],
- 'expression': [
- (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
- Punctuation)),
- (r'(\))', Punctuation, "#pop"),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- def analyse_text(text):
+ bygroups(Punctuation, Keyword)),
+
+ # other
+ (r',', Punctuation, 'commamember'),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
+ bygroups(Name, Name.Label, Operator)),
+ (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
+ (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
+ bygroups(Operator, Operator.Word)),
+ (r':=|[-+*/%=<>&|!?\\]+', Operator),
+ (r'[{}():;,@^]', Punctuation),
+ ],
+ 'singlestring': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'\\]+", String.Single),
+ include('escape'),
+ (r"\\", String.Single),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ include('escape'),
+ (r'\\', String.Double),
+ ],
+ 'escape': [
+ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|'
+ r'[abefnrtv?"\'\\]|$)', String.Escape),
+ ],
+ 'signature': [
+ (r'=>', Operator, '#pop'),
+ (r'\)', Punctuation, '#pop'),
+ (r'[(,]', Punctuation, 'parameter'),
+ include('lasso'),
+ ],
+ 'parameter': [
+ (r'\)', Punctuation, '#pop'),
+ (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ include('lasso'),
+ ],
+ 'requiresection': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
+ (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r',', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'requiresignature': [
+ (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
+ (r'\)', Punctuation, '#pop:2'),
+ (r'-?[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ (r'[(,]', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'commamember': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
+ Name.Function, 'signature'),
+ include('whitespacecomments'),
+ default('#pop'),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.builtinshighlighting = get_bool_opt(
+ options, 'builtinshighlighting', True)
+ self.requiredelimiters = get_bool_opt(
+ options, 'requiredelimiters', False)
+
+ self._builtins = set()
+ self._members = set()
+ if self.builtinshighlighting:
+ from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
+ for key, value in iteritems(BUILTINS):
+ self._builtins.update(value)
+ for key, value in iteritems(MEMBERS):
+ self._members.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.requiredelimiters:
+ stack.append('delimiters')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if (token is Name.Other and value.lower() in self._builtins or
+ token is Name.Other.Member and
+ value.lower().rstrip('=') in self._members):
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if 'bin/lasso9' in text:
+ rv += 0.8
+ if re.search(r'<\?lasso', text, re.I):
+ rv += 0.4
+ if re.search(r'local\(', text, re.I):
+ rv += 0.4
+ return rv
+
+
+class ObjectiveJLexer(RegexLexer):
+ """
+ For Objective-J source code with preprocessor directives.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Objective-J'
+ aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
+ filenames = ['*.j']
+ mimetypes = ['text/x-objective-j']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # function definition
+ (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
+ bygroups(using(this), using(this, state='function_signature'),
+ using(this))),
+
+ # class definition
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+ (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
+ 'forward_classname'),
+ (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
+
+ include('statements'),
+ ('[{()}]', Punctuation),
+ (';', Punctuation),
+ ],
+ 'whitespace': [
+ (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+
+ (r'#if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'<!--', Comment),
+ ],
+ 'slashstartsregex': [
+ include('whitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop'),
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop'),
+ ],
+ 'statements': [
+ (r'(L|@)?"', String, 'string'),
+ (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
+ r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
+
+ (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+
+ (r'(@selector|@private|@protected|@public|@encode|'
+ r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+ r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
+
+ (r'(int|long|float|short|double|char|unsigned|signed|void|'
+ r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
+ Keyword.Type),
+
+ (r'(self|super)\b', Name.Builtin),
+
+ (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
+ r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
+ r'SQRT2)\b', Keyword.Constant),
+
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+
+ (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
+ bygroups(Name.Function, using(this))),
+
+ (r'[$a-zA-Z_]\w*', Name),
+ ],
+ 'classname': [
+ # interface definition that inherits
+ (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
+ r')([a-zA-Z_]\w*)?',
+ bygroups(Name.Class, using(this), Name.Class), '#pop'),
+ # interface definition for a category
+ (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
+ bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
+ # simple interface / implementation
+ (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
+ ],
+ 'forward_classname': [
+ (r'([a-zA-Z_]\w*)(\s*,\s*)',
+ bygroups(Name.Class, Text), '#push'),
+ (r'([a-zA-Z_]\w*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop'),
+ ],
+ 'function_signature': [
+ include('whitespace'),
+
+ # start of a selector w/ parameters
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), 'function_parameters'),
+
+ # no-param function
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), "#pop"),
+
+ # no return type given, start of a selector w/ parameters
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(Name.Function), 'function_parameters'),
+
+ # no return type given, no-param function
+ (r'([$a-zA-Z_]\w+)', # function name
+ bygroups(Name.Function), "#pop"),
+
+ default('#pop'),
+ ],
+ 'function_parameters': [
+ include('whitespace'),
+
+ # parameters
+ (r'(\(' + _ws + ')' # open paren
+ r'([^)]+)' # type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # param name
+ bygroups(using(this), Keyword.Type, using(this), Text)),
+
+ # one piece of a selector name
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ Name.Function),
+
+ # smallest possible selector piece
+ (r'(:)', Name.Function),
+
+ # var args
+ (r'(,' + _ws + r'\.\.\.)', using(this)),
+
+ # param name
+ (r'([$a-zA-Z_]\w+)', Text),
+ ],
+ 'expression': [
+ (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
+ Punctuation)),
+ (r'(\))', Punctuation, "#pop"),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
if re.search(r'^\s*@import\s+[<"]', text, re.MULTILINE):
- # special directive found in most Objective-J files
- return True
- return False
-
-
-class CoffeeScriptLexer(RegexLexer):
- """
- For `CoffeeScript`_ source code.
-
- .. _CoffeeScript: http://coffeescript.org
-
- .. versionadded:: 1.3
- """
-
- name = 'CoffeeScript'
- aliases = ['coffee-script', 'coffeescript', 'coffee']
- filenames = ['*.coffee']
- mimetypes = ['text/coffeescript']
-
+ # special directive found in most Objective-J files
+ return True
+ return False
+
+
+class CoffeeScriptLexer(RegexLexer):
+ """
+ For `CoffeeScript`_ source code.
+
+ .. _CoffeeScript: http://coffeescript.org
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'CoffeeScript'
+ aliases = ['coffee-script', 'coffeescript', 'coffee']
+ filenames = ['*.coffee']
+ mimetypes = ['text/coffeescript']
+
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?')
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'multilineregex': [
- (r'[^/#]+', String.Regex),
- (r'///([gim]+\b|\B)', String.Regex, '#pop'),
- (r'#\{', String.Interpol, 'interpoling_string'),
- (r'[/#]', String.Regex),
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ (r'[^/#]+', String.Regex),
+ (r'///([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'#\{', String.Interpol, 'interpoling_string'),
+ (r'[/#]', String.Regex),
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'///', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
# This isn't really guarding against mishighlighting well-formed
# code, just the ability to infinite-loop between root and
# slashstartsregex.
(r'/', Operator),
- default('#pop'),
- ],
- 'root': [
- include('commentsandwhitespace'),
+ default('#pop'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
(r'^(?=\s|/)', Text, 'slashstartsregex'),
(_operator_re, Operator, 'slashstartsregex'),
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|'
- r'loop|break|return|continue|'
- r'switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|null|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|'
+ r'loop|break|return|continue|'
+ r'switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
(r'@?[$a-zA-Z_][\w$]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class MaskLexer(RegexLexer):
- """
- For `Mask <http://github.com/atmajs/MaskJS>`__ markup.
-
- .. versionadded:: 2.0
- """
- name = 'Mask'
- aliases = ['mask']
- filenames = ['*.mask']
- mimetypes = ['text/x-mask']
-
- flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'[{};>]', Punctuation),
- (r"'''", String, 'string-trpl-single'),
- (r'"""', String, 'string-trpl-double'),
- (r"'", String, 'string-single'),
- (r'"', String, 'string-double'),
- (r'([\w-]+)', Name.Tag, 'node'),
- (r'([^.#;{>\s]+)', Name.Class, 'node'),
- (r'(#[\w-]+)', Name.Function, 'node'),
- (r'(\.[\w-]+)', Name.Variable.Class, 'node')
- ],
- 'string-base': [
- (r'\\.', String.Escape),
- (r'~\[', String.Interpol, 'interpolation'),
- (r'.', String.Single),
- ],
- 'string-single': [
- (r"'", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-double': [
- (r'"', String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-single': [
- (r"'''", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-double': [
- (r'"""', String.Single, '#pop'),
- include('string-base')
- ],
- 'interpolation': [
- (r'\]', String.Interpol, '#pop'),
- (r'\s*:', String.Interpol, 'expression'),
- (r'\s*\w+:', Name.Other),
- (r'[^\]]+', String.Interpol)
- ],
- 'expression': [
- (r'[^\]]+', using(JavascriptLexer), '#pop')
- ],
- 'node': [
- (r'\s+', Text),
- (r'\.', Name.Variable.Class, 'node-class'),
- (r'\#', Name.Function, 'node-id'),
- (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'[>{;]', Punctuation, '#pop')
- ],
- 'node-class': [
- (r'[\w-]+', Name.Variable.Class),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-id': [
- (r'[\w-]+', Name.Function),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-attr-value': [
- (r'\s+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r"'", String, 'string-single-pop2'),
- (r'"', String, 'string-double-pop2'),
- default('#pop')
- ],
- 'node-attr-style-value': [
- (r'\s+', Text),
- (r"'", String.Single, 'css-single-end'),
- (r'"', String.Single, 'css-double-end'),
- include('node-attr-value')
- ],
- 'css-base': [
- (r'\s+', Text),
- (r";", Punctuation),
- (r"[\w\-]+\s*:", Name.Builtin)
- ],
- 'css-single-end': [
- include('css-base'),
- (r"'", String.Single, '#pop:2'),
- (r"[^;']+", Name.Entity)
- ],
- 'css-double-end': [
- include('css-base'),
- (r'"', String.Single, '#pop:2'),
- (r'[^;"]+', Name.Entity)
- ],
- 'string-single-pop2': [
- (r"'", String.Single, '#pop:2'),
- include('string-base')
- ],
- 'string-double-pop2': [
- (r'"', String.Single, '#pop:2'),
- include('string-base')
- ],
- }
-
-
-class EarlGreyLexer(RegexLexer):
- """
- For `Earl-Grey`_ source code.
-
- .. _Earl-Grey: https://breuleux.github.io/earl-grey/
-
- .. versionadded: 2.1
- """
-
- name = 'Earl Grey'
- aliases = ['earl-grey', 'earlgrey', 'eg']
- filenames = ['*.eg']
- mimetypes = ['text/x-earl-grey']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- include('control'),
- (r'[^\S\n]+', Text),
- (r';;.*\n', Comment),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class MaskLexer(RegexLexer):
+ """
+ For `Mask <http://github.com/atmajs/MaskJS>`__ markup.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mask'
+ aliases = ['mask']
+ filenames = ['*.mask']
+ mimetypes = ['text/x-mask']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'[{};>]', Punctuation),
+ (r"'''", String, 'string-trpl-single'),
+ (r'"""', String, 'string-trpl-double'),
+ (r"'", String, 'string-single'),
+ (r'"', String, 'string-double'),
+ (r'([\w-]+)', Name.Tag, 'node'),
+ (r'([^.#;{>\s]+)', Name.Class, 'node'),
+ (r'(#[\w-]+)', Name.Function, 'node'),
+ (r'(\.[\w-]+)', Name.Variable.Class, 'node')
+ ],
+ 'string-base': [
+ (r'\\.', String.Escape),
+ (r'~\[', String.Interpol, 'interpolation'),
+ (r'.', String.Single),
+ ],
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-double': [
+ (r'"', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-single': [
+ (r"'''", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-double': [
+ (r'"""', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'interpolation': [
+ (r'\]', String.Interpol, '#pop'),
+ (r'\s*:', String.Interpol, 'expression'),
+ (r'\s*\w+:', Name.Other),
+ (r'[^\]]+', String.Interpol)
+ ],
+ 'expression': [
+ (r'[^\]]+', using(JavascriptLexer), '#pop')
+ ],
+ 'node': [
+ (r'\s+', Text),
+ (r'\.', Name.Variable.Class, 'node-class'),
+ (r'\#', Name.Function, 'node-id'),
+ (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'[>{;]', Punctuation, '#pop')
+ ],
+ 'node-class': [
+ (r'[\w-]+', Name.Variable.Class),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-id': [
+ (r'[\w-]+', Name.Function),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-attr-value': [
+ (r'\s+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r"'", String, 'string-single-pop2'),
+ (r'"', String, 'string-double-pop2'),
+ default('#pop')
+ ],
+ 'node-attr-style-value': [
+ (r'\s+', Text),
+ (r"'", String.Single, 'css-single-end'),
+ (r'"', String.Single, 'css-double-end'),
+ include('node-attr-value')
+ ],
+ 'css-base': [
+ (r'\s+', Text),
+ (r";", Punctuation),
+ (r"[\w\-]+\s*:", Name.Builtin)
+ ],
+ 'css-single-end': [
+ include('css-base'),
+ (r"'", String.Single, '#pop:2'),
+ (r"[^;']+", Name.Entity)
+ ],
+ 'css-double-end': [
+ include('css-base'),
+ (r'"', String.Single, '#pop:2'),
+ (r'[^;"]+', Name.Entity)
+ ],
+ 'string-single-pop2': [
+ (r"'", String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ 'string-double-pop2': [
+ (r'"', String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ }
+
+
+class EarlGreyLexer(RegexLexer):
+ """
+ For `Earl-Grey`_ source code.
+
+ .. _Earl-Grey: https://breuleux.github.io/earl-grey/
+
+ .. versionadded: 2.1
+ """
+
+ name = 'Earl Grey'
+ aliases = ['earl-grey', 'earlgrey', 'eg']
+ filenames = ['*.eg']
+ mimetypes = ['text/x-earl-grey']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ include('control'),
+ (r'[^\S\n]+', Text),
+ (r';;.*\n', Comment),
(r'[\[\]{}:(),;]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- include('errors'),
- (words((
- 'with', 'where', 'when', 'and', 'not', 'or', 'in',
- 'as', 'of', 'is'),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ include('errors'),
+ (words((
+ 'with', 'where', 'when', 'and', 'not', 'or', 'in',
+ 'as', 'of', 'is'),
prefix=r'(?<=\s|\[)', suffix=r'(?![\w$\-])'),
- Operator.Word),
+ Operator.Word),
(r'[*@]?->', Name.Function),
- (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
- (r'\.{2,3}', Operator.Word), # Range Operator
- (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
+ (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
+ (r'\.{2,3}', Operator.Word), # Range Operator
+ (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
(r'(?<![\w$\-])(var|let)(?:[^\w$])', Keyword.Declaration),
- include('keywords'),
- include('builtins'),
- include('assignment'),
- (r'''(?x)
+ include('keywords'),
+ include('builtins'),
+ include('assignment'),
+ (r'''(?x)
(?:()([a-zA-Z$_](?:[\w$\-]*[\w$])?)|
(?<=[\s{\[(])(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?))
- (?=.*%)''',
- bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
- (r'[rR]?`', String.Backtick, 'bt'),
- (r'[rR]?```', String.Backtick, 'tbt'),
+ (?=.*%)''',
+ bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
+ (r'[rR]?`', String.Backtick, 'bt'),
+ (r'[rR]?```', String.Backtick, 'tbt'),
(r'(?<=[\s\[{(,;])\.([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
r'(?=[\s\]}),;])', String.Symbol),
- include('nested'),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
- (r'"""', String, combined('stringescape', 'tdqs')),
- include('tuple'),
- include('import_paths'),
- include('name'),
- include('numbers'),
- ],
- 'dbs': [
+ include('nested'),
+ (r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
+ (r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
+ (r'"""', String, combined('stringescape', 'tdqs')),
+ include('tuple'),
+ include('import_paths'),
+ include('name'),
+ include('numbers'),
+ ],
+ 'dbs': [
(r'(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?)(?=[.\[\s])',
- bygroups(Punctuation, Name.Class.DBS)),
+ bygroups(Punctuation, Name.Class.DBS)),
(r'(\[)([\^#][a-zA-Z$_](?:[\w$\-]*[\w$])?)(\])',
- bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
- (r'\s+', Text),
- (r'%', Operator.DBS, '#pop'),
- ],
- 'import_paths': [
- (r'(?<=[\s:;,])(\.{1,3}(?:[\w\-]*/)*)(\w(?:[\w\-]*\w)*)(?=[\s;,])',
- bygroups(Text.Whitespace, Text)),
- ],
- 'assignment': [
+ bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
+ (r'\s+', Text),
+ (r'%', Operator.DBS, '#pop'),
+ ],
+ 'import_paths': [
+ (r'(?<=[\s:;,])(\.{1,3}(?:[\w\-]*/)*)(\w(?:[\w\-]*\w)*)(?=[\s;,])',
+ bygroups(Text.Whitespace, Text)),
+ ],
+ 'assignment': [
(r'(\.)?([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
- r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
- bygroups(Punctuation, Name.Variable))
- ],
- 'errors': [
- (words(('Error', 'TypeError', 'ReferenceError'),
+ r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
+ bygroups(Punctuation, Name.Variable))
+ ],
+ 'errors': [
+ (words(('Error', 'TypeError', 'ReferenceError'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Exception),
- (r'''(?x)
+ Name.Exception),
+ (r'''(?x)
(?<![\w$])
E\.[\w$](?:[\w$\-]*[\w$])?
(?:\.[\w$](?:[\w$\-]*[\w$])?)*
(?=[({\[?!\s])''',
- Name.Exception),
- ],
- 'control': [
- (r'''(?x)
+ Name.Exception),
+ ],
+ 'control': [
+ (r'''(?x)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?!\n)\s+
- (?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
+ (?!\n)\s+
+ (?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
(?=(?:[+\-*/~^<>%&|?!@#.])?[a-zA-Z$_](?:[\w$-]*[\w$])?)''',
- Keyword.Control),
+ Keyword.Control),
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(?!\n)\s+(?=[\'"\d{\[(])',
- Keyword.Control),
- (r'''(?x)
- (?:
- (?<=[%=])|
- (?<=[=\-]>)|
- (?<=with|each|with)|
- (?<=each\*|where)
- )(\s+)
+ Keyword.Control),
+ (r'''(?x)
+ (?:
+ (?<=[%=])|
+ (?<=[=\-]>)|
+ (?<=with|each|with)|
+ (?<=each\*|where)
+ )(\s+)
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Text, Keyword.Control, Punctuation)),
- (r'''(?x)
- (?<![+\-*/~^<>%&|?!@#.])(\s+)
+ bygroups(Text, Keyword.Control, Punctuation)),
+ (r'''(?x)
+ (?<![+\-*/~^<>%&|?!@#.])(\s+)
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Text, Keyword.Control, Punctuation)),
- ],
- 'nested': [
- (r'''(?x)
+ bygroups(Text, Keyword.Control, Punctuation)),
+ ],
+ 'nested': [
+ (r'''(?x)
(?<=[\w$\]})])(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?=\s+with(?:\s|\n))''',
- bygroups(Punctuation, Name.Function)),
- (r'''(?x)
- (?<!\s)(\.)
+ (?=\s+with(?:\s|\n))''',
+ bygroups(Punctuation, Name.Function)),
+ (r'''(?x)
+ (?<!\s)(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
(?=[}\]).,;:\s])''',
- bygroups(Punctuation, Name.Field)),
- (r'''(?x)
+ bygroups(Punctuation, Name.Field)),
+ (r'''(?x)
(?<=[\w$\]})])(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
(?=[\[{(:])''',
- bygroups(Punctuation, Name.Function)),
- ],
- 'keywords': [
- (words((
- 'each', 'each*', 'mod', 'await', 'break', 'chain',
- 'continue', 'elif', 'expr-value', 'if', 'match',
- 'return', 'yield', 'pass', 'else', 'require', 'var',
- 'let', 'async', 'method', 'gen'),
+ bygroups(Punctuation, Name.Function)),
+ ],
+ 'keywords': [
+ (words((
+ 'each', 'each*', 'mod', 'await', 'break', 'chain',
+ 'continue', 'elif', 'expr-value', 'if', 'match',
+ 'return', 'yield', 'pass', 'else', 'require', 'var',
+ 'let', 'async', 'method', 'gen'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Keyword.Pseudo),
- (words(('this', 'self', '@'),
+ Keyword.Pseudo),
+ (words(('this', 'self', '@'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Constant),
- (words((
- 'Function', 'Object', 'Array', 'String', 'Number',
- 'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
+ Keyword.Constant),
+ (words((
+ 'Function', 'Object', 'Array', 'String', 'Number',
+ 'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Type),
- ],
- 'builtins': [
- (words((
- 'send', 'object', 'keys', 'items', 'enumerate', 'zip',
- 'product', 'neighbours', 'predicate', 'equal',
- 'nequal', 'contains', 'repr', 'clone', 'range',
- 'getChecker', 'get-checker', 'getProperty', 'get-property',
- 'getProjector', 'get-projector', 'consume', 'take',
- 'promisify', 'spawn', 'constructor'),
+ Keyword.Type),
+ ],
+ 'builtins': [
+ (words((
+ 'send', 'object', 'keys', 'items', 'enumerate', 'zip',
+ 'product', 'neighbours', 'predicate', 'equal',
+ 'nequal', 'contains', 'repr', 'clone', 'range',
+ 'getChecker', 'get-checker', 'getProperty', 'get-property',
+ 'getProjector', 'get-projector', 'consume', 'take',
+ 'promisify', 'spawn', 'constructor'),
prefix=r'(?<![\w\-#.])', suffix=r'(?![\w\-.])'),
- Name.Builtin),
- (words((
- 'true', 'false', 'null', 'undefined'),
+ Name.Builtin),
+ (words((
+ 'true', 'false', 'null', 'undefined'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Constant),
- ],
- 'name': [
+ Name.Constant),
+ ],
+ 'name': [
(r'@([a-zA-Z$_](?:[\w$-]*[\w$])?)', Name.Variable.Instance),
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(\+\+|\-\-)?',
- bygroups(Name.Symbol, Operator.Word))
- ],
- 'tuple': [
+ bygroups(Name.Symbol, Operator.Word))
+ ],
+ 'tuple': [
(r'#[a-zA-Z_][\w\-]*(?=[\s{(,;])', Name.Namespace)
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, '#pop'),
- include('root')
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'[^\\\'"]', String),
- (r'[\'"\\]', String),
- (r'\n', String) # All strings are multiline in EG
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape),
- (r'\{', String.Interpol, 'interpoling_string'),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- ],
- 'bt': [
- (r'`', String.Backtick, '#pop'),
- (r'(?<!`)\n', String.Backtick),
- (r'\^=?', String.Escape),
- (r'.+', String.Backtick),
- ],
- 'tbt': [
- (r'```', String.Backtick, '#pop'),
- (r'\n', String.Backtick),
- (r'\^=?', String.Escape),
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, '#pop'),
+ include('root')
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'[^\\\'"]', String),
+ (r'[\'"\\]', String),
+ (r'\n', String) # All strings are multiline in EG
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape),
+ (r'\{', String.Interpol, 'interpoling_string'),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ ],
+ 'bt': [
+ (r'`', String.Backtick, '#pop'),
+ (r'(?<!`)\n', String.Backtick),
+ (r'\^=?', String.Escape),
+ (r'.+', String.Backtick),
+ ],
+ 'tbt': [
+ (r'```', String.Backtick, '#pop'),
+ (r'\n', String.Backtick),
+ (r'\^=?', String.Escape),
(r'[^`]+', String.Backtick),
- ],
- 'numbers': [
- (r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'8r[0-7]+', Number.Oct),
- (r'2r[01]+', Number.Bin),
- (r'16r[a-fA-F0-9]+', Number.Hex),
+ ],
+ 'numbers': [
+ (r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'8r[0-7]+', Number.Oct),
+ (r'2r[01]+', Number.Bin),
+ (r'16r[a-fA-F0-9]+', Number.Hex),
(r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
Number.Radix),
- (r'\d+', Number.Integer)
- ],
- }
+ (r'\d+', Number.Integer)
+ ],
+ }
class JuttleLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/julia.py b/contrib/python/Pygments/py2/pygments/lexers/julia.py
index aa1609da50..9879cf360e 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/julia.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/julia.py
@@ -1,56 +1,56 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.julia
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Julia language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.julia
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Julia language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-from pygments.util import shebang_matches, unirange
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
-
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments.util import shebang_matches, unirange
+
+__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
+
allowed_variable = (
u'(?:[a-zA-Z_\u00A1-\uffff]|%s)(?:[a-zA-Z_0-9\u00A1-\uffff]|%s)*!*' %
((unirange(0x10000, 0x10ffff),) * 2))
-
-
-class JuliaLexer(RegexLexer):
- """
- For `Julia <http://julialang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Julia'
- aliases = ['julia', 'jl']
- filenames = ['*.jl']
- mimetypes = ['text/x-julia', 'application/x-julia']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'#=', Comment.Multiline, "blockcomment"),
- (r'#.*$', Comment),
+
+
+class JuliaLexer(RegexLexer):
+ """
+ For `Julia <http://julialang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Julia'
+ aliases = ['julia', 'jl']
+ filenames = ['*.jl']
+ mimetypes = ['text/x-julia', 'application/x-julia']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'#=', Comment.Multiline, "blockcomment"),
+ (r'#.*$', Comment),
(r'[\[\]{}(),;]', Punctuation),
-
- # keywords
+
+ # keywords
(r'in\b', Keyword.Pseudo),
(r'isa\b', Keyword.Pseudo),
(r'(true|false)\b', Keyword.Constant),
- (r'(local|global|const)\b', Keyword.Declaration),
+ (r'(local|global|const)\b', Keyword.Declaration),
(words([
'function', 'type', 'typealias', 'abstract', 'immutable',
'baremodule', 'begin', 'bitstype', 'break', 'catch', 'ccall',
@@ -59,18 +59,18 @@ class JuliaLexer(RegexLexer):
'mutable', 'primitive', 'quote', 'return', 'struct', 'try',
'using', 'while'],
suffix=r'\b'), Keyword),
-
+
# NOTE
# Patterns below work only for definition sites and thus hardly reliable.
#
- # functions
+ # functions
# (r'(function)(\s+)(' + allowed_variable + ')',
# bygroups(Keyword, Text, Name.Function)),
#
- # types
+ # types
# (r'(type|typealias|abstract|immutable)(\s+)(' + allowed_variable + ')',
# bygroups(Keyword, Text, Name.Class)),
-
+
# type names
(words([
'ANY', 'ASCIIString', 'AbstractArray', 'AbstractChannel',
@@ -128,8 +128,8 @@ class JuliaLexer(RegexLexer):
'Vararg', 'VecOrMat', 'Vector', 'VersionNumber', 'Void', 'WString',
'WeakKeyDict', 'WeakRef', 'WorkerConfig', 'Zip'], suffix=r'\b'),
Keyword.Type),
-
- # builtins
+
+ # builtins
(words([
u'ARGS', u'CPU_CORES', u'C_NULL', u'DevNull', u'ENDIAN_BOM',
u'ENV', u'I', u'Inf', u'Inf16', u'Inf32', u'Inf64',
@@ -142,7 +142,7 @@ class JuliaLexer(RegexLexer):
u'eulergamma', u'golden', u'im', u'nothing', u'pi', u'γ',
u'π', u'φ'],
suffix=r'\b'), Name.Builtin),
-
+
# operators
# see: https://github.com/JuliaLang/julia/blob/master/src/julia-parser.scm
(words([
@@ -185,18 +185,18 @@ class JuliaLexer(RegexLexer):
# unary op
u'+', u'-', u'!', u'√', u'∛', u'∜'
]), Operator),
-
- # chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
- r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
- # try to match trailing transpose
- (r'(?<=[.\w)\]])\'+', Operator),
-
- # strings
+
+ # chars
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
+ r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
+
+ # try to match trailing transpose
+ (r'(?<=[.\w)\]])\'+', Operator),
+
+ # strings
(r'"""', String, 'tqstring'),
(r'"', String, 'string'),
-
+
# regular expressions
(r'r"""', String.Regex, 'tqregex'),
(r'r"', String.Regex, 'regex'),
@@ -204,34 +204,34 @@ class JuliaLexer(RegexLexer):
# backticks
(r'`', String.Backtick, 'command'),
- # names
+ # names
(allowed_variable, Name),
(r'@' + allowed_variable, Name.Decorator),
-
- # numbers
- (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'0b[01]+(_[01]+)+', Number.Bin),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+(_\d+)+', Number.Integer),
- (r'\d+', Number.Integer)
- ],
-
- "blockcomment": [
- (r'[^=#]', Comment.Multiline),
- (r'#=', Comment.Multiline, '#push'),
- (r'=#', Comment.Multiline, '#pop'),
- (r'[=#]', Comment.Multiline),
- ],
-
- 'string': [
- (r'"', String, '#pop'),
+
+ # numbers
+ (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'0b[01]+(_[01]+)+', Number.Bin),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+(_\d+)+', Number.Integer),
+ (r'\d+', Number.Integer)
+ ],
+
+ "blockcomment": [
+ (r'[^=#]', Comment.Multiline),
+ (r'#=', Comment.Multiline, '#push'),
+ (r'=#', Comment.Multiline, '#pop'),
+ (r'[=#]', Comment.Multiline),
+ ],
+
+ 'string': [
+ (r'"', String, '#pop'),
# FIXME: This escape pattern is not perfect.
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
# Interpolation is defined as "$" followed by the shortest full
@@ -244,7 +244,7 @@ class JuliaLexer(RegexLexer):
(r'%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]',
String.Interpol),
(r'.|\s', String),
- ],
+ ],
'tqstring': [
(r'"""', String, '#pop'),
@@ -277,33 +277,33 @@ class JuliaLexer(RegexLexer):
(r'\)', Punctuation, '#pop'),
include('root'),
]
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'julia')
-
-
-class JuliaConsoleLexer(Lexer):
- """
- For Julia console sessions. Modeled after MatlabSessionLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Julia console'
- aliases = ['jlcon']
-
- def get_tokens_unprocessed(self, text):
- jllexer = JuliaLexer(**self.options)
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'julia')
+
+
+class JuliaConsoleLexer(Lexer):
+ """
+ For Julia console sessions. Modeled after MatlabSessionLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia console'
+ aliases = ['jlcon']
+
+ def get_tokens_unprocessed(self, text):
+ jllexer = JuliaLexer(**self.options)
start = 0
- curcode = ''
- insertions = []
+ curcode = ''
+ insertions = []
output = False
error = False
-
+
for line in text.splitlines(True):
- if line.startswith('julia>'):
+ if line.startswith('julia>'):
insertions.append((len(curcode), [(0, Generic.Prompt, line[:6])]))
- curcode += line[6:]
+ curcode += line[6:]
output = False
error = False
elif line.startswith('help?>') or line.startswith('shell>'):
@@ -314,13 +314,13 @@ class JuliaConsoleLexer(Lexer):
elif line.startswith(' ') and not output:
insertions.append((len(curcode), [(0, Text, line[:6])]))
curcode += line[6:]
- else:
- if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
if line.startswith('ERROR: ') or error:
yield start, Generic.Error, line
error = True
@@ -328,8 +328,8 @@ class JuliaConsoleLexer(Lexer):
yield start, Generic.Output, line
output = True
start += len(line)
-
+
if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/contrib/python/Pygments/py2/pygments/lexers/jvm.py b/contrib/python/Pygments/py2/pygments/lexers/jvm.py
index 5588b79660..7c216ba1eb 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/jvm.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/jvm.py
@@ -1,76 +1,76 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.jvm
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for JVM languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.jvm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for JVM languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import shebang_matches
-from pygments import unistring as uni
-
-__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
- 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
- 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ this, combined, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+from pygments import unistring as uni
+
+__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
+ 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
+ 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer']
-
-
-class JavaLexer(RegexLexer):
- """
+
+
+class JavaLexer(RegexLexer):
+ """
For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
- """
-
- name = 'Java'
- aliases = ['java']
- filenames = ['*.java']
- mimetypes = ['text/x-java']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- # keywords: go before method names to avoid lexing "throw new XYZ"
- # as a method signature
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
- Keyword),
- # method names
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
+ """
+
+ name = 'Java'
+ aliases = ['java']
+ filenames = ['*.java']
+ mimetypes = ['text/x-java']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ # keywords: go before method names to avoid lexing "throw new XYZ"
+ # as a method signature
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
+ Keyword),
+ # method names
+ (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
- (r'@[^\W\d][\w.]*', Name.Decorator),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(true|false|null)\b', Keyword.Constant),
+ (r'@[^\W\d][\w.]*', Name.Decorator),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
'var'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
Name.Attribute)),
- (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
- (r'([^\W\d]|\$)[\w$]*', Name),
+ (r'^\s*([^\W\d]|\$)[\w$]*:', Name.Label),
+ (r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
@@ -85,1001 +85,1001 @@ class JavaLexer(RegexLexer):
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\]<>|+=/?-]', Operator),
(r'[{}();:.,]', Punctuation),
- (r'\n', Text)
- ],
- 'class': [
- (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
- ],
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
+ ],
'var': [
(r'([^\W\d]|\$)[\w$]*', Name, '#pop')
],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class AspectJLexer(JavaLexer):
- """
- For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'AspectJ'
- aliases = ['aspectj']
- filenames = ['*.aj']
- mimetypes = ['text/x-aspectj']
-
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class AspectJLexer(JavaLexer):
+ """
+ For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'AspectJ'
+ aliases = ['aspectj']
+ filenames = ['*.aj']
+ mimetypes = ['text/x-aspectj']
+
aj_keywords = {
- 'aspect', 'pointcut', 'privileged', 'call', 'execution',
- 'initialization', 'preinitialization', 'handler', 'get', 'set',
- 'staticinitialization', 'target', 'args', 'within', 'withincode',
- 'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
- 'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
- 'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
- 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
- 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
- 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
+ 'aspect', 'pointcut', 'privileged', 'call', 'execution',
+ 'initialization', 'preinitialization', 'handler', 'get', 'set',
+ 'staticinitialization', 'target', 'args', 'within', 'withincode',
+ 'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
+ 'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
+ 'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
+ 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
+ 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
+ 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
}
aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.aj_keywords:
- yield index, Keyword, value
- elif token is Name.Label and value in self.aj_inter_type:
- yield index, Keyword, value[:-1]
- yield index, Operator, value[-1]
- elif token is Name.Decorator and value in self.aj_inter_type_annotation:
- yield index, Keyword, value
- else:
- yield index, token, value
-
-
-class ScalaLexer(RegexLexer):
- """
- For `Scala <http://www.scala-lang.org>`_ source code.
- """
-
- name = 'Scala'
- aliases = ['scala']
- filenames = ['*.scala']
- mimetypes = ['text/x-scala']
-
- flags = re.MULTILINE | re.DOTALL
-
- # don't use raw unicode strings!
- op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
- u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
- u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
- u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
- u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
- u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
- u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
- u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
- u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
- u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
- u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
- u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
- u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
- u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
- u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
-
- letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
- u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
- u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
- u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
- u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
- u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
- u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
- u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
- u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
- u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
- u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
- u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
- u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
- u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
- u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
- u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
- u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
- u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
- u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
- u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
- u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
- u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
- u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
- u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
- u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
- u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
- u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
- u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
- u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
- u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
- u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
- u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
- u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
-
- upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
- u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
- u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
- u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
- u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
- u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
- u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
- u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
- u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
- u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
- u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
- u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
- u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
- u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
- u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
- u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
- u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
- u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
- u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
- u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
- u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
- u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
- u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
- u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
- u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
- u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
- u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
- u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
- u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
- u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
- u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
- u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
- u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
- u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
- u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
- u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
- u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
- u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
- u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
- u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
- u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
- u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
- u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
- u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
- u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
- u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
- u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
- u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
- u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
- u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
- u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
- u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
- u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
- u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
- u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
- u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
- u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
- u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
- u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
- u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
- u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
-
- idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.aj_keywords:
+ yield index, Keyword, value
+ elif token is Name.Label and value in self.aj_inter_type:
+ yield index, Keyword, value[:-1]
+ yield index, Operator, value[-1]
+ elif token is Name.Decorator and value in self.aj_inter_type_annotation:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+
+class ScalaLexer(RegexLexer):
+ """
+ For `Scala <http://www.scala-lang.org>`_ source code.
+ """
+
+ name = 'Scala'
+ aliases = ['scala']
+ filenames = ['*.scala']
+ mimetypes = ['text/x-scala']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ # don't use raw unicode strings!
+ op = (u'[-~\\^\\*!%&\\\\<>\\|+=:/?@\u00a6-\u00a7\u00a9\u00ac\u00ae\u00b0-\u00b1'
+ u'\u00b6\u00d7\u00f7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9'
+ u'\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2'
+ u'\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38'
+ u'\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940'
+ u'\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c'
+ u'\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118'
+ u'\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144'
+ u'\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767'
+ u'\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb'
+ u'\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020'
+ u'\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3'
+ u'\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff'
+ u'\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66'
+ u'\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+')
+
+ letter = (u'[a-zA-Z\\$_\u00aa\u00b5\u00ba\u00c0-\u00d6\u00d8-\u00f6'
+ u'\u00f8-\u02af\u0370-\u0373\u0376-\u0377\u037b-\u037d\u0386'
+ u'\u0388-\u03f5\u03f7-\u0481\u048a-\u0556\u0561-\u0587\u05d0-\u05f2'
+ u'\u0621-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5'
+ u'\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5'
+ u'\u07b1\u07ca-\u07ea\u0904-\u0939\u093d\u0950\u0958-\u0961'
+ u'\u0972-\u097f\u0985-\u09b9\u09bd\u09ce\u09dc-\u09e1\u09f0-\u09f1'
+ u'\u0a05-\u0a39\u0a59-\u0a5e\u0a72-\u0a74\u0a85-\u0ab9\u0abd'
+ u'\u0ad0-\u0ae1\u0b05-\u0b39\u0b3d\u0b5c-\u0b61\u0b71\u0b83-\u0bb9'
+ u'\u0bd0\u0c05-\u0c3d\u0c58-\u0c61\u0c85-\u0cb9\u0cbd\u0cde-\u0ce1'
+ u'\u0d05-\u0d3d\u0d60-\u0d61\u0d7a-\u0d7f\u0d85-\u0dc6\u0e01-\u0e30'
+ u'\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0eb0\u0eb2-\u0eb3\u0ebd-\u0ec4'
+ u'\u0edc-\u0f00\u0f40-\u0f6c\u0f88-\u0f8b\u1000-\u102a\u103f'
+ u'\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070'
+ u'\u1075-\u1081\u108e\u10a0-\u10fa\u1100-\u135a\u1380-\u138f'
+ u'\u13a0-\u166c\u166f-\u1676\u1681-\u169a\u16a0-\u16ea\u16ee-\u1711'
+ u'\u1720-\u1731\u1740-\u1751\u1760-\u1770\u1780-\u17b3\u17dc'
+ u'\u1820-\u1842\u1844-\u18a8\u18aa-\u191c\u1950-\u19a9\u19c1-\u19c7'
+ u'\u1a00-\u1a16\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf'
+ u'\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1d00-\u1d2b\u1d62-\u1d77'
+ u'\u1d79-\u1d9a\u1e00-\u1fbc\u1fbe\u1fc2-\u1fcc\u1fd0-\u1fdb'
+ u'\u1fe0-\u1fec\u1ff2-\u1ffc\u2071\u207f\u2102\u2107\u210a-\u2113'
+ u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139'
+ u'\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c7c'
+ u'\u2c80-\u2ce4\u2d00-\u2d65\u2d80-\u2dde\u3006-\u3007\u3021-\u3029'
+ u'\u3038-\u303a\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff-\u318e'
+ u'\u31a0-\u31b7\u31f0-\u31ff\u3400-\u4db5\u4e00-\ua014\ua016-\ua48c'
+ u'\ua500-\ua60b\ua610-\ua61f\ua62a-\ua66e\ua680-\ua697\ua722-\ua76f'
+ u'\ua771-\ua787\ua78b-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822'
+ u'\ua840-\ua873\ua882-\ua8b3\ua90a-\ua925\ua930-\ua946\uaa00-\uaa28'
+ u'\uaa40-\uaa42\uaa44-\uaa4b\uac00-\ud7a3\uf900-\ufb1d\ufb1f-\ufb28'
+ u'\ufb2a-\ufd3d\ufd50-\ufdfb\ufe70-\ufefc\uff21-\uff3a\uff41-\uff5a'
+ u'\uff66-\uff6f\uff71-\uff9d\uffa0-\uffdc]')
+
+ upper = (u'[A-Z\\$_\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108'
+ u'\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c'
+ u'\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130'
+ u'\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145'
+ u'\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a'
+ u'\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e'
+ u'\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182'
+ u'\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194'
+ u'\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7'
+ u'\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc'
+ u'\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9'
+ u'\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee'
+ u'\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204'
+ u'\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218'
+ u'\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c'
+ u'\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246'
+ u'\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f'
+ u'\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0'
+ u'\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7'
+ u'\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a'
+ u'\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e'
+ u'\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a'
+ u'\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae'
+ u'\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1'
+ u'\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6'
+ u'\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea'
+ u'\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe'
+ u'\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512'
+ u'\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556'
+ u'\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e'
+ u'\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22'
+ u'\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36'
+ u'\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a'
+ u'\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e'
+ u'\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72'
+ u'\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86'
+ u'\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2'
+ u'\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6'
+ u'\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca'
+ u'\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede'
+ u'\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2'
+ u'\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d'
+ u'\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f'
+ u'\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb'
+ u'\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112'
+ u'\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133'
+ u'\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67'
+ u'\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86'
+ u'\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a'
+ u'\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae'
+ u'\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2'
+ u'\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6'
+ u'\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646'
+ u'\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a'
+ u'\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682'
+ u'\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696'
+ u'\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736'
+ u'\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a'
+ u'\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e'
+ u'\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b'
+ u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
+
+ idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
letter_letter_digit = u'%s(?:%s|\\d)*' % (letter, letter)
-
- tokens = {
- 'root': [
- # method names
- (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'[^\S\n]+', Text),
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'(class|trait|object)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'[^\S\n]+', Text),
include('comments'),
- (u'@%s' % idrest, Name.Decorator),
- (u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
- u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
- u'lazy|match|new|override|pr(?:ivate|otected)'
- u'|re(?:quires|turn)|s(?:ealed|uper)|'
- u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
- u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
- (u':(?!%s)' % op, Keyword, 'type'),
- (u'%s%s\\b' % (upper, idrest), Name.Class),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
- (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
- (r'""".*?"""(?!")', String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (u"'%s" % idrest, Text.Symbol),
- (r'[fs]"""', String, 'interptriplestring'), # interpolated strings
- (r'[fs]"', String, 'interpstring'), # interpolated strings
- (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
- # (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
- # Name.Attribute)),
- (idrest, Name),
- (r'`[^`]+`', Name),
- (r'\[', Operator, 'typeparam'),
- (r'[(){};,.#]', Operator),
- (op, Operator),
- (r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
- Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
+ (u'@%s' % idrest, Name.Decorator),
+ (u'(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|'
+ u'f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|'
+ u'lazy|match|new|override|pr(?:ivate|otected)'
+ u'|re(?:quires|turn)|s(?:ealed|uper)|'
+ u't(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\\b|'
+ u'(<[%:-]|=>|>:|[#=@_\u21D2\u2190])(\\b|(?=\\s)|$)', Keyword),
+ (u':(?!%s)' % op, Keyword, 'type'),
+ (u'%s%s\\b' % (upper, idrest), Name.Class),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(import|package)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'(type)(\s+)', bygroups(Keyword, Text), 'type'),
+ (r'""".*?"""(?!")', String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (u"'%s" % idrest, Text.Symbol),
+ (r'[fs]"""', String, 'interptriplestring'), # interpolated strings
+ (r'[fs]"', String, 'interpstring'), # interpolated strings
+ (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
+ # (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
+ # Name.Attribute)),
+ (idrest, Name),
+ (r'`[^`]+`', Name),
+ (r'\[', Operator, 'typeparam'),
+ (r'[(){};,.#]', Operator),
+ (op, Operator),
+ (r'([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?',
+ Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (u'(%s|%s|`[^`]+`)(\\s*)(\\[)' % (idrest, op),
bygroups(Name.Class, Text, Operator), ('#pop', 'typeparam')),
- (r'\s+', Text),
+ (r'\s+', Text),
include('comments'),
- (r'\{', Operator, '#pop'),
- (r'\(', Operator, '#pop'),
- (u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
- ],
- 'type': [
- (r'\s+', Text),
+ (r'\{', Operator, '#pop'),
+ (r'\(', Operator, '#pop'),
+ (u'%s|%s|`[^`]+`' % (idrest, op), Name.Class, '#pop'),
+ ],
+ 'type': [
+ (r'\s+', Text),
include('comments'),
(r'<[%:]|>:|[#_]|\bforSome\b|\btype\b', Keyword),
- (u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
- (r'[({]', Operator, '#push'),
- (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
- (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' %
- (idrest, op, idrest, op),
- bygroups(Keyword.Type, Text), '#pop'),
- (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
- ],
- 'typeparam': [
+ (u'([,);}]|=>|=|\u21d2)(\\s*)', bygroups(Operator, Text), '#pop'),
+ (r'[({]', Operator, '#push'),
+ (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)' %
+ (idrest, op, idrest, op),
+ bygroups(Keyword.Type, Text, Operator), ('#pop', 'typeparam')),
+ (u'((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$' %
+ (idrest, op, idrest, op),
+ bygroups(Keyword.Type, Text), '#pop'),
+ (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ ],
+ 'typeparam': [
(r'\s+', Text),
include('comments'),
(r',+', Punctuation),
(u'<[%:]|=>|>:|[#_\u21D2]|\bforSome\b|\btype\b', Keyword),
- (r'([\])}])', Operator, '#pop'),
- (r'[(\[{]', Operator, '#push'),
- (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
- ],
+ (r'([\])}])', Operator, '#pop'),
+ (r'[(\[{]', Operator, '#push'),
+ (u'\\.|%s|%s|`[^`]+`' % (idrest, op), Keyword.Type)
+ ],
'comments': [
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'import': [
- (u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
- ],
- 'interpstringcommon': [
- (r'[^"$\\]+', String),
- (r'\$\$', String),
- (r'\$' + letter_letter_digit, String.Interpol),
- (r'\$\{', String.Interpol, 'interpbrace'),
- (r'\\.', String),
- ],
- 'interptriplestring': [
- (r'"""(?!")', String, '#pop'),
- (r'"', String),
- include('interpstringcommon'),
- ],
- 'interpstring': [
- (r'"', String, '#pop'),
- include('interpstringcommon'),
- ],
- 'interpbrace': [
- (r'\}', String.Interpol, '#pop'),
- (r'\{', String.Interpol, '#push'),
- include('root'),
- ],
- }
-
-
-class GosuLexer(RegexLexer):
- """
- For Gosu source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu'
- aliases = ['gosu']
- filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
- mimetypes = ['text/x-gosu']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
- r'index|while|do|continue|break|return|try|catch|finally|this|'
- r'throw|new|switch|case|default|eval|super|outer|classpath|'
- r'using)\b', Keyword),
- (r'(var|delegate|construct|function|private|internal|protected|'
- r'public|abstract|override|final|static|extends|transient|'
- r'implements|represents|readonly)\b', Keyword.Declaration),
- (r'(property\s+)(get|set)?', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
- (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(uses)(\s+)([\w.]+\*?)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'"', String, 'string'),
- (r'(\??[.#])([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'(:)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_$]\w*', Name),
- (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\n', Text)
- ],
- 'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)',
- bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('templateText')
- ],
- 'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
- ],
- 'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
- ],
- }
-
-
-class GosuTemplateLexer(Lexer):
- """
- For Gosu templates.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu Template'
- aliases = ['gst']
- filenames = ['*.gst']
- mimetypes = ['text/x-gosu-template']
-
- def get_tokens_unprocessed(self, text):
- lexer = GosuLexer()
- stack = ['templateText']
- for item in lexer.get_tokens_unprocessed(text, stack):
- yield item
-
-
-class GroovyLexer(RegexLexer):
- """
- For `Groovy <http://groovy.codehaus.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Groovy'
- aliases = ['groovy']
- filenames = ['*.groovy','*.gradle']
- mimetypes = ['text/x-groovy']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # Groovy allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc, 'base'),
- default('base'),
- ],
- 'base': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
- Keyword),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'""".*?"""', String.Double),
- (r"'''.*?'''", String.Single),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'\$/((?!/\$).)*/\$', String),
- (r'/(\\\\|\\"|[^/])*/', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'groovy')
-
-
-class IokeLexer(RegexLexer):
- """
- For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
- prototype based programming language) source.
-
- .. versionadded:: 1.4
- """
- name = 'Ioke'
- filenames = ['*.ik']
- aliases = ['ioke', 'ik']
- mimetypes = ['text/x-iokesrc']
- tokens = {
- 'interpolatableText': [
- (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
- r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
- (r'#\{', Punctuation, 'textInterpolationRoot')
- ],
-
- 'text': [
- (r'(?<!\\)"', String, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String)
- ],
-
- 'documentation': [
- (r'(?<!\\)"', String.Doc, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String.Doc)
- ],
-
- 'textInterpolationRoot': [
- (r'\}', Punctuation, '#pop'),
- include('root')
- ],
-
- 'slashRegexp': [
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'import': [
+ (u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
+ ],
+ 'interpstringcommon': [
+ (r'[^"$\\]+', String),
+ (r'\$\$', String),
+ (r'\$' + letter_letter_digit, String.Interpol),
+ (r'\$\{', String.Interpol, 'interpbrace'),
+ (r'\\.', String),
+ ],
+ 'interptriplestring': [
+ (r'"""(?!")', String, '#pop'),
+ (r'"', String),
+ include('interpstringcommon'),
+ ],
+ 'interpstring': [
+ (r'"', String, '#pop'),
+ include('interpstringcommon'),
+ ],
+ 'interpbrace': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'\{', String.Interpol, '#push'),
+ include('root'),
+ ],
+ }
+
+
+class GosuLexer(RegexLexer):
+ """
+ For Gosu source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Gosu'
+ aliases = ['gosu']
+ filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
+ mimetypes = ['text/x-gosu']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
+ r'index|while|do|continue|break|return|try|catch|finally|this|'
+ r'throw|new|switch|case|default|eval|super|outer|classpath|'
+ r'using)\b', Keyword),
+ (r'(var|delegate|construct|function|private|internal|protected|'
+ r'public|abstract|override|final|static|extends|transient|'
+ r'implements|represents|readonly)\b', Keyword.Declaration),
+ (r'(property\s+)(get|set)?', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
+ (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(uses)(\s+)([\w.]+\*?)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'"', String, 'string'),
+ (r'(\??[.#])([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'(:)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'templateText': [
+ (r'(\\<)|(\\\$)', String),
+ (r'(<%@\s+)(extends|params)',
+ bygroups(Operator, Name.Decorator), 'stringTemplate'),
+ (r'<%!--.*?--%>', Comment.Multiline),
+ (r'(<%)|(<%=)', Operator, 'stringTemplate'),
+ (r'\$\{', Operator, 'stringTemplateShorthand'),
+ (r'.', String)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ include('templateText')
+ ],
+ 'stringTemplate': [
+ (r'"', String, 'string'),
+ (r'%>', Operator, '#pop'),
+ include('root')
+ ],
+ 'stringTemplateShorthand': [
+ (r'"', String, 'string'),
+ (r'\{', Operator, 'stringTemplateShorthand'),
+ (r'\}', Operator, '#pop'),
+ include('root')
+ ],
+ }
+
+
+class GosuTemplateLexer(Lexer):
+ """
+ For Gosu templates.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Gosu Template'
+ aliases = ['gst']
+ filenames = ['*.gst']
+ mimetypes = ['text/x-gosu-template']
+
+ def get_tokens_unprocessed(self, text):
+ lexer = GosuLexer()
+ stack = ['templateText']
+ for item in lexer.get_tokens_unprocessed(text, stack):
+ yield item
+
+
+class GroovyLexer(RegexLexer):
+ """
+ For `Groovy <http://groovy.codehaus.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Groovy'
+ aliases = ['groovy']
+ filenames = ['*.groovy','*.gradle']
+ mimetypes = ['text/x-groovy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # Groovy allows a file to start with a shebang
+ (r'#!(.*?)$', Comment.Preproc, 'base'),
+ default('base'),
+ ],
+ 'base': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
+ Keyword),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'""".*?"""', String.Double),
+ (r"'''.*?'''", String.Single),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'\$/((?!/\$).)*/\$', String),
+ (r'/(\\\\|\\"|[^/])*/', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'groovy')
+
+
+class IokeLexer(RegexLexer):
+ """
+ For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
+ prototype based programming language) source.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Ioke'
+ filenames = ['*.ik']
+ aliases = ['ioke', 'ik']
+ mimetypes = ['text/x-iokesrc']
+ tokens = {
+ 'interpolatableText': [
+ (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
+ r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
+ (r'#\{', Punctuation, 'textInterpolationRoot')
+ ],
+
+ 'text': [
+ (r'(?<!\\)"', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String)
+ ],
+
+ 'documentation': [
+ (r'(?<!\\)"', String.Doc, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String.Doc)
+ ],
+
+ 'textInterpolationRoot': [
+ (r'\}', Punctuation, '#pop'),
+ include('root')
+ ],
+
+ 'slashRegexp': [
(r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\/', String.Regex),
- (r'[^/]', String.Regex)
- ],
-
- 'squareRegexp': [
+ include('interpolatableText'),
+ (r'\\/', String.Regex),
+ (r'[^/]', String.Regex)
+ ],
+
+ 'squareRegexp': [
(r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\]', String.Regex),
- (r'[^\]]', String.Regex)
- ],
-
- 'squareText': [
- (r'(?<!\\)]', String, '#pop'),
- include('interpolatableText'),
- (r'[^\]]', String)
- ],
-
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
-
- # Comments
- (r';(.*?)\n', Comment),
- (r'\A#!(.*?)\n', Comment),
-
- # Regexps
- (r'#/', String.Regex, 'slashRegexp'),
- (r'#r\[', String.Regex, 'squareRegexp'),
-
- # Symbols
- (r':[\w!:?]+', String.Symbol),
- (r'[\w!:?]+:(?![\w!?])', String.Other),
- (r':"(\\\\|\\"|[^"])*"', String.Symbol),
-
- # Documentation
- (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
- r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
- r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
-
- # Text
- (r'"', String, 'text'),
- (r'#\[', String, 'squareText'),
-
- # Mimic
- (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
-
- # Assignment
- (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
- Name.Variable),
-
- # keywords
- (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
- r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
- r'with)(?![\w!:?])', Keyword.Reserved),
-
- # Origin
- (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
-
- # Base
- (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
- r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
- r'(?![\w!:?])', Keyword),
-
- # Ground
- (r'(stackTraceAsText)(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Literals
- (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Case
- (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
- r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Reflection
- (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
- r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
- r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Aspects
- (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour
- (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
- r'(?![\w!:?])', Keyword),
- (r'(use|destructuring)', Keyword.Reserved),
-
- # DefaultBehavior BaseBehavior
- (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
- r'documentation|identity|removeCell!|undefineCell)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehavior Internal
- (r'(internal:compositeRegexp|internal:concatenateText|'
- r'internal:createDecimal|internal:createNumber|'
- r'internal:createRegexp|internal:createText)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Conditions
- (r'(availableRestarts|bind|error\!|findRestart|handle|'
- r'invokeRestart|rescue|restart|signal\!|warn\!)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # constants
- (r'(nil|false|true)(?![\w!:?])', Name.Constant),
-
- # names
- (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
- r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
- r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
- r'Conditions|Definitions|FlowControl|Internal|Literals|'
- r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
- r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
- r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
- r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
- r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
- r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
-
- # functions
- (u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
- u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
+ include('interpolatableText'),
+ (r'\\]', String.Regex),
+ (r'[^\]]', String.Regex)
+ ],
+
+ 'squareText': [
+ (r'(?<!\\)]', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^\]]', String)
+ ],
+
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ # Comments
+ (r';(.*?)\n', Comment),
+ (r'\A#!(.*?)\n', Comment),
+
+ # Regexps
+ (r'#/', String.Regex, 'slashRegexp'),
+ (r'#r\[', String.Regex, 'squareRegexp'),
+
+ # Symbols
+ (r':[\w!:?]+', String.Symbol),
+ (r'[\w!:?]+:(?![\w!?])', String.Other),
+ (r':"(\\\\|\\"|[^"])*"', String.Symbol),
+
+ # Documentation
+ (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
+ r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
+ r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
+
+ # Text
+ (r'"', String, 'text'),
+ (r'#\[', String, 'squareText'),
+
+ # Mimic
+ (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
+
+ # Assignment
+ (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
+ Name.Variable),
+
+ # keywords
+ (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
+ r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
+ r'with)(?![\w!:?])', Keyword.Reserved),
+
+ # Origin
+ (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
+
+ # Base
+ (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
+ r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
+ r'(?![\w!:?])', Keyword),
+
+ # Ground
+ (r'(stackTraceAsText)(?![\w!:?])', Keyword),
+
+ # DefaultBehaviour Literals
+ (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Case
+ (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
+ r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Reflection
+ (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
+ r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
+ r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
+ r'(?![\w!:?])', Keyword),
+
+ # DefaultBehaviour Aspects
+ (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour
+ (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
+ r'(?![\w!:?])', Keyword),
+ (r'(use|destructuring)', Keyword.Reserved),
+
+ # DefaultBehavior BaseBehavior
+ (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
+ r'documentation|identity|removeCell!|undefineCell)'
+ r'(?![\w!:?])', Keyword),
+
+ # DefaultBehavior Internal
+ (r'(internal:compositeRegexp|internal:concatenateText|'
+ r'internal:createDecimal|internal:createNumber|'
+ r'internal:createRegexp|internal:createText)'
+ r'(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Conditions
+ (r'(availableRestarts|bind|error\!|findRestart|handle|'
+ r'invokeRestart|rescue|restart|signal\!|warn\!)'
+ r'(?![\w!:?])', Keyword.Reserved),
+
+ # constants
+ (r'(nil|false|true)(?![\w!:?])', Name.Constant),
+
+ # names
+ (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
+ r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
+ r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
+ r'Conditions|Definitions|FlowControl|Internal|Literals|'
+ r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
+ r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
+ r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
+ r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
+ r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
+ r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
+
+ # functions
+ (u'(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
+ u'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
u'(?![\\w!:?])', Name.Function),
-
- # Numbers
- (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
- (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (r'#\(', Punctuation),
-
- # Operators
- (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
- r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
- r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
- r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
- r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
- r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
- u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
- (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
- Operator),
-
- # Punctuation
- (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
-
- # kinds
- (r'[A-Z][\w!:?]*', Name.Class),
-
- # default cellnames
- (r'[a-z_][\w!:?]*', Name)
- ]
- }
-
-
-class ClojureLexer(RegexLexer):
- """
- Lexer for `Clojure <http://clojure.org/>`_ source code.
-
- .. versionadded:: 0.11
- """
- name = 'Clojure'
- aliases = ['clojure', 'clj']
- filenames = ['*.clj']
- mimetypes = ['text/x-clojure', 'application/x-clojure']
-
- special_forms = (
- '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
- )
-
- # It's safe to consider 'ns' a declaration thing because it defines a new
- # namespace.
- declarations = (
- 'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
- 'defstruct', 'defonce', 'declare', 'definline', 'definterface',
- 'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
- )
-
- builtins = (
- '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
- 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
- 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
- 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
- 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
- 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
- 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
- 'butlast', 'byte', 'cast', 'char', 'children', 'class',
- 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
- 'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
- 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
- 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
- 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
- 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
- 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
- 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
- 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
- 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
- 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
- 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
- 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
- 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
- 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
- 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
- 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
- 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
- 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
- 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
- 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
- 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
- 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
- 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
- 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
- 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
- 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
- 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
- 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
- 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
- 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
- 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
- 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
- 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
- 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
- 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
- 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
- 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
- 'vector?', 'when', 'when-first', 'when-let', 'when-not',
- 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
-
- # TODO / should divide keywords/symbols into namespace/rest
- # but that's hard, so just pretend / is part of the name
+
+ # Numbers
+ (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'-?\d+', Number.Integer),
+
+ (r'#\(', Punctuation),
+
+ # Operators
+ (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+ r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+ r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+ r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+ r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+ r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+ u'\\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
+ (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
+ Operator),
+
+ # Punctuation
+ (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
+
+ # kinds
+ (r'[A-Z][\w!:?]*', Name.Class),
+
+ # default cellnames
+ (r'[a-z_][\w!:?]*', Name)
+ ]
+ }
+
+
+class ClojureLexer(RegexLexer):
+ """
+ Lexer for `Clojure <http://clojure.org/>`_ source code.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Clojure'
+ aliases = ['clojure', 'clj']
+ filenames = ['*.clj']
+ mimetypes = ['text/x-clojure', 'application/x-clojure']
+
+ special_forms = (
+ '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
+ )
+
+ # It's safe to consider 'ns' a declaration thing because it defines a new
+ # namespace.
+ declarations = (
+ 'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
+ 'defstruct', 'defonce', 'declare', 'definline', 'definterface',
+ 'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
+ )
+
+ builtins = (
+ '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
+ 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
+ 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
+ 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
+ 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
+ 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
+ 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
+ 'butlast', 'byte', 'cast', 'char', 'children', 'class',
+ 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
+ 'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
+ 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
+ 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
+ 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
+ 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
+ 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
+ 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
+ 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
+ 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
+ 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
+ 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
+ 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
+ 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
+ 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
+ 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
+ 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
+ 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
+ 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
+ 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
+ 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
+ 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
+ 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
+ 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
+ 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
+ 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
+ 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
+ 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
+ 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
+ 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
+ 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
+ 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
+ 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
+ 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
+ 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
+ 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
+ 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
+ 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
+ 'vector?', 'when', 'when-first', 'when-let', 'when-not',
+ 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+
+ # TODO / should divide keywords/symbols into namespace/rest
+ # but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0x-?[abcdef\d]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
-
- # keywords
- (r'::?#?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- # highlight the special forms
- (words(special_forms, suffix=' '), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (words(declarations, suffix=' '), Keyword.Declaration),
-
- # highlight the builtins
- (words(builtins, suffix=' '), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Clojure accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Clojure accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- ],
- }
-
-
-class ClojureScriptLexer(ClojureLexer):
- """
- Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
- source code.
-
- .. versionadded:: 2.0
- """
- name = 'ClojureScript'
- aliases = ['clojurescript', 'cljs']
- filenames = ['*.cljs']
- mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
-
-
-class TeaLangLexer(RegexLexer):
- """
- For `Tea <http://teatrove.org/>`_ source code. Only used within a
- TeaTemplateLexer.
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
- (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
- Keyword),
- (r'(as|call|define)\b', Keyword.Declaration),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\'(\\\\|\\\'|[^\'])*\'', String),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_\$]\w*', Name),
- (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'template': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class CeylonLexer(RegexLexer):
- """
- For `Ceylon <http://ceylon-lang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Ceylon'
- aliases = ['ceylon']
- filenames = ['*.ceylon']
- mimetypes = ['text/x-ceylon']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
- r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
- r'annotation|sealed)\b', Name.Decorator),
- (r'(break|case|catch|continue|else|finally|for|in|'
- r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
- r'nonempty|then|outer|assert|let)\b', Keyword),
- (r'(abstracts|extends|satisfies|'
- r'super|given|of|out|assign)\b', Keyword.Declaration),
- (r'(function|value|void|new)\b',
- Keyword.Type),
- (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface|object|alias)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
- (r'".*``.*``.*"', String.Interpol),
- (r'(\.)([a-z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
- (r'#[0-9a-fA-F]+', Number.Hex),
- (r'\$([01]{4})(_[01]{4})+', Number.Bin),
- (r'\$[01]+', Number.Bin),
- (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
- (r'[0-9]+[kMGTP]?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[A-Za-z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-z][\w.]*',
- Name.Namespace, '#pop')
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class KotlinLexer(RegexLexer):
- """
- For `Kotlin <http://kotlinlang.org/>`_
- source code.
-
- .. versionadded:: 1.5
- """
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0x-?[abcdef\d]+', Number.Hex),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+
+ # keywords
+ (r'::?#?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ # highlight the special forms
+ (words(special_forms, suffix=' '), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (words(declarations, suffix=' '), Keyword.Declaration),
+
+ # highlight the builtins
+ (words(builtins, suffix=' '), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Clojure accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Clojure accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ ],
+ }
+
+
+class ClojureScriptLexer(ClojureLexer):
+ """
+ Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ClojureScript'
+ aliases = ['clojurescript', 'cljs']
+ filenames = ['*.cljs']
+ mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
+
+
+class TeaLangLexer(RegexLexer):
+ """
+ For `Tea <http://teatrove.org/>`_ source code. Only used within a
+ TeaTemplateLexer.
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
+ (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
+ Keyword),
+ (r'(as|call|define)\b', Keyword.Declaration),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\'(\\\\|\\\'|[^\'])*\'', String),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_\$]\w*', Name),
+ (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'template': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CeylonLexer(RegexLexer):
+ """
+ For `Ceylon <http://ceylon-lang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Ceylon'
+ aliases = ['ceylon']
+ filenames = ['*.ceylon']
+ mimetypes = ['text/x-ceylon']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
+ r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
+ r'annotation|sealed)\b', Name.Decorator),
+ (r'(break|case|catch|continue|else|finally|for|in|'
+ r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
+ r'nonempty|then|outer|assert|let)\b', Keyword),
+ (r'(abstracts|extends|satisfies|'
+ r'super|given|of|out|assign)\b', Keyword.Declaration),
+ (r'(function|value|void|new)\b',
+ Keyword.Type),
+ (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface|object|alias)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
+ (r'".*``.*``.*"', String.Interpol),
+ (r'(\.)([a-z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
+ (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
+ Number.Float),
+ (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
+ (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
+ Number.Float),
+ (r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
+ (r'#[0-9a-fA-F]+', Number.Hex),
+ (r'\$([01]{4})(_[01]{4})+', Number.Bin),
+ (r'\$[01]+', Number.Bin),
+ (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
+ (r'[0-9]+[kMGTP]?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[A-Za-z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[a-z][\w.]*',
+ Name.Namespace, '#pop')
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
+
+
+class KotlinLexer(RegexLexer):
+ """
+ For `Kotlin <http://kotlinlang.org/>`_
+ source code.
+
+ .. versionadded:: 1.5
+ """
- name = 'Kotlin'
- aliases = ['kotlin']
- filenames = ['*.kt']
- mimetypes = ['text/x-kotlin']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
- 'Mn', 'Mc') + ']*')
+ name = 'Kotlin'
+ aliases = ['kotlin']
+ filenames = ['*.kt']
+ mimetypes = ['text/x-kotlin']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
+ 'Mn', 'Mc') + ']*')
kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc', 'Zs') + ',-]*')
-
+
kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
- tokens = {
- 'root': [
- (r'^\s*\[.*?\]', Name.Attribute),
- (r'[^\S\n]+', Text),
+ tokens = {
+ 'root': [
+ (r'^\s*\[.*?\]', Name.Attribute),
+ (r'[^\S\n]+', Text),
(r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
+ (r'\\\n', Text), # line continuation
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
(r'""".*?"""', String),
- (r'\n', Text),
- (r'::|!!|\?[:.]', Operator),
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
- (r'"(\\\\|\\"|[^"\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'\n', Text),
+ (r'::|!!|\?[:.]', Operator),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
+ (r'"(\\\\|\\"|[^"\n])*["\n]', String),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'),
(r'(companion)(\s+)(object)', bygroups(Keyword, Text, Keyword)),
- (r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'),
- (r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
+ (r'(class|interface|object)(\s+)', bygroups(Keyword, Text), 'class'),
+ (r'(package|import)(\s+)', bygroups(Keyword, Text), 'package'),
(r'(val|var)(\s+)([(])', bygroups(Keyword, Text, Punctuation), 'property_dec'),
- (r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
- (r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
+ (r'(val|var)(\s+)', bygroups(Keyword, Text), 'property'),
+ (r'(fun)(\s+)', bygroups(Keyword, Text), 'function'),
(r'(inline fun)(\s+)', bygroups(Keyword, Text), 'function'),
- (r'(abstract|annotation|as|break|by|catch|class|companion|const|'
- r'constructor|continue|crossinline|data|do|dynamic|else|enum|'
- r'external|false|final|finally|for|fun|get|if|import|in|infix|'
- r'inline|inner|interface|internal|is|lateinit|noinline|null|'
- r'object|open|operator|out|override|package|private|protected|'
- r'public|reified|return|sealed|set|super|tailrec|this|throw|'
- r'true|try|val|var|vararg|when|where|while)\b', Keyword),
- (kt_id, Name),
- ],
- 'package': [
- (r'\S+', Name.Namespace, '#pop')
- ],
- 'class': [
- (kt_id, Name.Class, '#pop')
- ],
- 'property': [
- (kt_id, Name.Property, '#pop')
- ],
+ (r'(abstract|annotation|as|break|by|catch|class|companion|const|'
+ r'constructor|continue|crossinline|data|do|dynamic|else|enum|'
+ r'external|false|final|finally|for|fun|get|if|import|in|infix|'
+ r'inline|inner|interface|internal|is|lateinit|noinline|null|'
+ r'object|open|operator|out|override|package|private|protected|'
+ r'public|reified|return|sealed|set|super|tailrec|this|throw|'
+ r'true|try|val|var|vararg|when|where|while)\b', Keyword),
+ (kt_id, Name),
+ ],
+ 'package': [
+ (r'\S+', Name.Namespace, '#pop')
+ ],
+ 'class': [
+ (kt_id, Name.Class, '#pop')
+ ],
+ 'property': [
+ (kt_id, Name.Property, '#pop')
+ ],
'property_dec': [
(r'(,)(\s*)', bygroups(Punctuation, Text)),
(r'(:)(\s*)', bygroups(Punctuation, Text)),
@@ -1087,11 +1087,11 @@ class KotlinLexer(RegexLexer):
(r'([)])', Punctuation, '#pop'),
(kt_id, Name.Property)
],
- 'function': [
+ 'function': [
(r'<', Punctuation, 'generic'),
(r''+kt_id+'([.])'+kt_id, bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
- (kt_id, Name.Function, '#pop')
- ],
+ (kt_id, Name.Function, '#pop')
+ ],
'generic': [
(r'(>)(\s*)', bygroups(Punctuation, Text), '#pop'),
(r':',Punctuation),
@@ -1100,516 +1100,516 @@ class KotlinLexer(RegexLexer):
(r'\s+',Text),
(kt_id,Name)
]
- }
-
-
-class XtendLexer(RegexLexer):
- """
- For `Xtend <http://xtend-lang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Xtend'
- aliases = ['xtend']
- filenames = ['*.xtend']
- mimetypes = ['text/x-xtend']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
- r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
- Keyword),
- (r'(def|abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r"(''')", String, 'template'),
- (u'(\u00BB)', String, 'template'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'(\\\\|\\'|[^'])*'", String),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- 'template': [
- (r"'''", String, '#pop'),
- (u'\u00AB', String, '#pop'),
- (r'.', String)
- ],
- }
-
-
-class PigLexer(RegexLexer):
- """
- For `Pig Latin <https://pig.apache.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pig'
- aliases = ['pig']
- filenames = ['*.pig']
- mimetypes = ['text/x-pig']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'--.*', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
- include('keywords'),
- include('types'),
- include('builtins'),
- include('punct'),
- include('operators'),
- (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- (r'([a-z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[()#:]', Text),
- (r'[^(:#\'")\s]+', Text),
- (r'\S+\s+', Text) # TODO: make tests pass without \s+
- ],
- 'keywords': [
- (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
- r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
- r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
- r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
- r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
- r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
- r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
- r'stream|through|union|using|void)\b', Keyword)
- ],
- 'builtins': [
- (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
- r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
- r'TOKENIZE)\b', Name.Builtin)
- ],
- 'types': [
- (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
- r'int|long|tuple)\b', Keyword.Type)
- ],
- 'punct': [
- (r'[;(){}\[\]]', Punctuation),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class GoloLexer(RegexLexer):
- """
- For `Golo <http://golo-lang.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Golo'
- filenames = ['*.golo']
- aliases = ['golo']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
-
- (r'#.*$', Comment),
-
- (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
- Operator),
- (r'(?<=[^-])(-)(?=[^-])', Operator),
-
- (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
- (r'[]{}|(),[]', Punctuation),
-
- (r'(module|import)(\s+)',
- bygroups(Keyword.Namespace, Text),
- 'modname'),
- (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
- (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
-
- (r'(let|var)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'varname'),
- (r'(struct)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'structname'),
- (r'(function)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'funcname'),
-
- (r'(null|true|false)\b', Keyword.Constant),
- (r'(augment|pimp'
- r'|if|else|case|match|return'
- r'|case|when|then|otherwise'
- r'|while|for|foreach'
- r'|try|catch|finally|throw'
- r'|local'
- r'|continue|break)\b', Keyword),
-
- (r'(map|array|list|set|vector|tuple)(\[)',
- bygroups(Name.Builtin, Punctuation)),
- (r'(print|println|readln|raise|fun'
- r'|asInterfaceInstance)\b', Name.Builtin),
- (r'(`?[a-zA-Z_][\w$]*)(\()',
- bygroups(Name.Function, Punctuation)),
-
- (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'-?\d[\d_]*L', Number.Integer.Long),
- (r'-?\d[\d_]*', Number.Integer),
-
+ }
+
+
+class XtendLexer(RegexLexer):
+ """
+ For `Xtend <http://xtend-lang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Xtend'
+ aliases = ['xtend']
+ filenames = ['*.xtend']
+ mimetypes = ['text/x-xtend']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
+ r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
+ Keyword),
+ (r'(def|abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r"(''')", String, 'template'),
+ (u'(\u00BB)', String, 'template'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'(\\\\|\\'|[^'])*'", String),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ 'template': [
+ (r"'''", String, '#pop'),
+ (u'\u00AB', String, '#pop'),
+ (r'.', String)
+ ],
+ }
+
+
+class PigLexer(RegexLexer):
+ """
+ For `Pig Latin <https://pig.apache.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pig'
+ aliases = ['pig']
+ filenames = ['*.pig']
+ mimetypes = ['text/x-pig']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*', Comment),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
+ include('keywords'),
+ include('types'),
+ include('builtins'),
+ include('punct'),
+ include('operators'),
+ (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ (r'([a-z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[()#:]', Text),
+ (r'[^(:#\'")\s]+', Text),
+ (r'\S+\s+', Text) # TODO: make tests pass without \s+
+ ],
+ 'keywords': [
+ (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
+ r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
+ r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
+ r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
+ r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
+ r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
+ r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
+ r'stream|through|union|using|void)\b', Keyword)
+ ],
+ 'builtins': [
+ (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
+ r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
+ r'TOKENIZE)\b', Name.Builtin)
+ ],
+ 'types': [
+ (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
+ r'int|long|tuple)\b', Keyword.Type)
+ ],
+ 'punct': [
+ (r'[;(){}\[\]]', Punctuation),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class GoloLexer(RegexLexer):
+ """
+ For `Golo <http://golo-lang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Golo'
+ filenames = ['*.golo']
+ aliases = ['golo']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+
+ (r'#.*$', Comment),
+
+ (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
+ Operator),
+ (r'(?<=[^-])(-)(?=[^-])', Operator),
+
+ (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
+ (r'[]{}|(),[]', Punctuation),
+
+ (r'(module|import)(\s+)',
+ bygroups(Keyword.Namespace, Text),
+ 'modname'),
+ (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
+ (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
+
+ (r'(let|var)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'varname'),
+ (r'(struct)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'structname'),
+ (r'(function)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'funcname'),
+
+ (r'(null|true|false)\b', Keyword.Constant),
+ (r'(augment|pimp'
+ r'|if|else|case|match|return'
+ r'|case|when|then|otherwise'
+ r'|while|for|foreach'
+ r'|try|catch|finally|throw'
+ r'|local'
+ r'|continue|break)\b', Keyword),
+
+ (r'(map|array|list|set|vector|tuple)(\[)',
+ bygroups(Name.Builtin, Punctuation)),
+ (r'(print|println|readln|raise|fun'
+ r'|asInterfaceInstance)\b', Name.Builtin),
+ (r'(`?[a-zA-Z_][\w$]*)(\()',
+ bygroups(Name.Function, Punctuation)),
+
+ (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'-?\d[\d_]*L', Number.Integer.Long),
+ (r'-?\d[\d_]*', Number.Integer),
+
(r'`?[a-zA-Z_][\w$]*', Name),
- (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
-
- (r'"""', String, combined('stringescape', 'triplestring')),
- (r'"', String, combined('stringescape', 'doublestring')),
- (r"'", String, combined('stringescape', 'singlestring')),
- (r'----((.|\n)*?)----', String.Doc)
-
- ],
-
- 'funcname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
- ],
- 'modname': [
- (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
- ],
- 'structname': [
- (r'`?[\w.]+\*?', Name.Class, '#pop')
- ],
- 'varname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
- ],
- 'string': [
- (r'[^\\\'"\n]+', String),
- (r'[\'"\\]', String)
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'triplestring': [
- (r'"""', String, '#pop'),
- include('string'),
- (r'\n', String),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- include('string'),
- ],
- 'singlestring': [
- (r"'", String, '#pop'),
- include('string'),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class JasminLexer(RegexLexer):
- """
- For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Jasmin'
- aliases = ['jasmin', 'jasminxt']
- filenames = ['*.j']
-
- _whitespace = r' \n\t\r'
- _ws = r'(?:[%s]+)' % _whitespace
- _separator = r'%s:=' % _whitespace
- _break = r'(?=[%s]|$)' % _separator
- _name = r'[^%s]+' % _separator
- _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
-
- tokens = {
- 'default': [
- (r'\n', Text, '#pop'),
- (r"'", String.Single, ('#pop', 'quote')),
- (r'"', String.Double, 'string'),
- (r'=', Punctuation),
- (r':', Punctuation, 'label'),
- (_ws, Text),
- (r';.*', Comment.Single),
- (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
- (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
- (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
- r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
- (r'\$%s' % _name, Name.Variable),
-
- # Directives
- (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
- (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
- r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
- r'annotation|bridge|class|default|enum|field|final|fpstrict|'
- r'interface|native|private|protected|public|signature|static|'
- r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
- Keyword.Reserved),
- (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
- (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
- r'invisibleparam|outer|visible|visibleparam)%s' % _break,
- Keyword.Reserved, 'class/convert-dots'),
- (r'\.field%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'field')),
- (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
- 'no-verification'),
- (r'\.method%s' % _break, Keyword.Reserved, 'method'),
- (r'\.set%s' % _break, Keyword.Reserved, 'var'),
- (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
- (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
- (r'is%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'var')),
- (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
- (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
-
- # Instructions
- (words((
- 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
- 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
- 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
- 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
- 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
- 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
- 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
- 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
- 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
- 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
- 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
- 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
- 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
- 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
- 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
- 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
- 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
- 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
- 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
- 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
- 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
- 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
- (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
- Keyword.Reserved, 'class/no-dots'),
- (r'invoke(dynamic|interface|nonvirtual|special|'
- r'static|virtual)%s' % _break, Keyword.Reserved,
- 'invocation'),
- (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'field')),
- (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'static')),
- (words((
- 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
- 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
- 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
- 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
- Keyword.Reserved, 'label'),
- (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
- 'descriptor/convert-dots'),
- (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
- ],
- 'quote': [
- (r"'", String.Single, '#pop'),
- (r'\\u[\da-fA-F]{4}', String.Escape),
- (r"[^'\\]+", String.Single)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
- String.Escape),
- (r'[^"\\]+', String.Double)
- ],
- 'root': [
- (r'\n+', Text),
- (r"'", String.Single, 'quote'),
- include('default'),
- (r'(%s)([ \t\r]*)(:)' % _name,
- bygroups(Name.Label, Text, Punctuation)),
- (_name, String.Other)
- ],
- 'annotation': [
- (r'\n', Text, ('#pop', 'annotation-body')),
- (r'default%s' % _break, Keyword.Reserved,
- ('#pop', 'annotation-default')),
- include('default')
- ],
- 'annotation-body': [
- (r'\n+', Text),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-default': [
- (r'\n+', Text),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- default(('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-items': [
- (r"'", String.Single, 'quote'),
- include('default'),
- (_name, String.Other)
- ],
- 'caught-exception': [
- (r'all%s' % _break, Keyword, '#pop'),
- include('exception')
- ],
- 'class/convert-dots': [
- include('default'),
- (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'class/no-dots': [
- include('default'),
- (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'descriptor/convert-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptor/no-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptors/convert-dots': [
- (r'\)', Punctuation, '#pop'),
- default('descriptor/convert-dots')
- ],
- 'enclosing-method': [
- (_ws, Text),
- (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
- default(('#pop', 'class/convert-dots'))
- ],
- 'exception': [
- include('default'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Exception), '#pop')
- ],
- 'field': [
- (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
- '#pop')
- ],
- 'invocation': [
- include('default'),
- (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'label': [
- include('default'),
- (_name, Name.Label, '#pop')
- ],
- 'method': [
- include('default'),
- (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'no-verification': [
- (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
- include('default')
- ],
- 'static': [
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
- ],
- 'table': [
- (r'\n+', Text),
- (r'default%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, Name.Label)
- ],
- 'var': [
- include('default'),
- (_name, Name.Variable, '#pop')
- ],
- 'verification': [
- include('default'),
- (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
- _break, Keyword, '#pop'),
- (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
- (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
- ]
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
- score += 0.3
- if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
- r'inner|interface|limit|set|signature|stack)\b', text,
- re.MULTILINE):
- score += 0.6
- return score
+ (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
+
+ (r'"""', String, combined('stringescape', 'triplestring')),
+ (r'"', String, combined('stringescape', 'doublestring')),
+ (r"'", String, combined('stringescape', 'singlestring')),
+ (r'----((.|\n)*?)----', String.Doc)
+
+ ],
+
+ 'funcname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ ],
+ 'modname': [
+ (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
+ ],
+ 'structname': [
+ (r'`?[\w.]+\*?', Name.Class, '#pop')
+ ],
+ 'varname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\\'"\n]+', String),
+ (r'[\'"\\]', String)
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'triplestring': [
+ (r'"""', String, '#pop'),
+ include('string'),
+ (r'\n', String),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ include('string'),
+ ],
+ 'singlestring': [
+ (r"'", String, '#pop'),
+ include('string'),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class JasminLexer(RegexLexer):
+ """
+ For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Jasmin'
+ aliases = ['jasmin', 'jasminxt']
+ filenames = ['*.j']
+
+ _whitespace = r' \n\t\r'
+ _ws = r'(?:[%s]+)' % _whitespace
+ _separator = r'%s:=' % _whitespace
+ _break = r'(?=[%s]|$)' % _separator
+ _name = r'[^%s]+' % _separator
+ _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
+
+ tokens = {
+ 'default': [
+ (r'\n', Text, '#pop'),
+ (r"'", String.Single, ('#pop', 'quote')),
+ (r'"', String.Double, 'string'),
+ (r'=', Punctuation),
+ (r':', Punctuation, 'label'),
+ (_ws, Text),
+ (r';.*', Comment.Single),
+ (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
+ (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
+ (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
+ r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
+ (r'\$%s' % _name, Name.Variable),
+
+ # Directives
+ (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
+ (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
+ r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
+ r'annotation|bridge|class|default|enum|field|final|fpstrict|'
+ r'interface|native|private|protected|public|signature|static|'
+ r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
+ Keyword.Reserved),
+ (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
+ (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
+ r'invisibleparam|outer|visible|visibleparam)%s' % _break,
+ Keyword.Reserved, 'class/convert-dots'),
+ (r'\.field%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'field')),
+ (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
+ 'no-verification'),
+ (r'\.method%s' % _break, Keyword.Reserved, 'method'),
+ (r'\.set%s' % _break, Keyword.Reserved, 'var'),
+ (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
+ (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
+ (r'is%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'var')),
+ (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
+ (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
+
+ # Instructions
+ (words((
+ 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
+ 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
+ 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
+ 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
+ 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
+ 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
+ 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
+ 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
+ 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
+ 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
+ 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
+ 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
+ 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
+ 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
+ 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
+ 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
+ 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
+ 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
+ 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
+ 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
+ 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
+ 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
+ (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
+ Keyword.Reserved, 'class/no-dots'),
+ (r'invoke(dynamic|interface|nonvirtual|special|'
+ r'static|virtual)%s' % _break, Keyword.Reserved,
+ 'invocation'),
+ (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'field')),
+ (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'static')),
+ (words((
+ 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
+ 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
+ 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
+ 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
+ Keyword.Reserved, 'label'),
+ (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
+ 'descriptor/convert-dots'),
+ (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
+ ],
+ 'quote': [
+ (r"'", String.Single, '#pop'),
+ (r'\\u[\da-fA-F]{4}', String.Escape),
+ (r"[^'\\]+", String.Single)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
+ String.Escape),
+ (r'[^"\\]+', String.Double)
+ ],
+ 'root': [
+ (r'\n+', Text),
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (r'(%s)([ \t\r]*)(:)' % _name,
+ bygroups(Name.Label, Text, Punctuation)),
+ (_name, String.Other)
+ ],
+ 'annotation': [
+ (r'\n', Text, ('#pop', 'annotation-body')),
+ (r'default%s' % _break, Keyword.Reserved,
+ ('#pop', 'annotation-default')),
+ include('default')
+ ],
+ 'annotation-body': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-default': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ default(('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-items': [
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (_name, String.Other)
+ ],
+ 'caught-exception': [
+ (r'all%s' % _break, Keyword, '#pop'),
+ include('exception')
+ ],
+ 'class/convert-dots': [
+ include('default'),
+ (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'class/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'descriptor/convert-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptor/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptors/convert-dots': [
+ (r'\)', Punctuation, '#pop'),
+ default('descriptor/convert-dots')
+ ],
+ 'enclosing-method': [
+ (_ws, Text),
+ (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
+ default(('#pop', 'class/convert-dots'))
+ ],
+ 'exception': [
+ include('default'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Exception), '#pop')
+ ],
+ 'field': [
+ (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
+ '#pop')
+ ],
+ 'invocation': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'label': [
+ include('default'),
+ (_name, Name.Label, '#pop')
+ ],
+ 'method': [
+ include('default'),
+ (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'no-verification': [
+ (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
+ include('default')
+ ],
+ 'static': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
+ ],
+ 'table': [
+ (r'\n+', Text),
+ (r'default%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, Name.Label)
+ ],
+ 'var': [
+ include('default'),
+ (_name, Name.Variable, '#pop')
+ ],
+ 'verification': [
+ include('default'),
+ (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
+ _break, Keyword, '#pop'),
+ (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
+ (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
+ ]
+ }
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
+ r'inner|interface|limit|set|signature|stack)\b', text,
+ re.MULTILINE):
+ score += 0.6
+ return score
class SarlLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/lisp.py b/contrib/python/Pygments/py2/pygments/lexers/lisp.py
index 601d5a5f27..15ce98fb08 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/lisp.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/lisp.py
@@ -1,476 +1,476 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.lisp
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Lispy languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.lisp
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Lispy languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Error
-
-from pygments.lexers.python import PythonLexer
-
-__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal, Error
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
'XtlangLexer', 'FennelLexer']
-
-
-class SchemeLexer(RegexLexer):
- """
- A Scheme lexer, parsing a stream and outputting the tokens
- needed to highlight scheme code.
- This lexer could be most probably easily subclassed to parse
- other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
-
- This parser is checked with pastes from the LISP pastebin
- at http://paste.lisp.org/ to cover as much syntax as possible.
-
- It supports the full Scheme syntax as defined in R5RS.
-
- .. versionadded:: 0.6
- """
- name = 'Scheme'
- aliases = ['scheme', 'scm']
- filenames = ['*.scm', '*.ss']
- mimetypes = ['text/x-scheme', 'application/x-scheme']
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- keywords = (
- 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
- 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
- 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
- 'let-syntax', 'letrec-syntax', 'syntax-rules'
- )
- builtins = (
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
- 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
- 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
- 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
- 'cadr', 'call-with-current-continuation', 'call-with-input-file',
- 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
- 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
- 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
- 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
- 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
- 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
- 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
- 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
- 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
- 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
- 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
- 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
- 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
- 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
- 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
- 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
- 'null?', 'number->string', 'number?', 'numerator', 'odd?',
- 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
- 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
- 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
- 'remainder', 'reverse', 'round', 'scheme-report-environment',
- 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
- 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
- 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
- 'string-copy', 'string-fill!', 'string-length', 'string-ref',
- 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
- 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
- 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
- 'vector', 'vector->list', 'vector-fill!', 'vector-length',
- 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
- 'with-output-to-file', 'write', 'write-char', 'zero?'
- )
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- tokens = {
- 'root': [
- # the comments
- # and going to the end of the line
- (r';.*$', Comment.Single),
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
- # commented form (entire sexpr folliwng)
- (r'#;\s*\(', Comment, 'commented-form'),
- # signifies that the program text that follows is written with the
- # lexical and datum syntax described in r6rs
- (r'#!r6rs', Comment),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
- Keyword),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
+
+
+class SchemeLexer(RegexLexer):
+ """
+ A Scheme lexer, parsing a stream and outputting the tokens
+ needed to highlight scheme code.
+ This lexer could be most probably easily subclassed to parse
+ other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
+
+ This parser is checked with pastes from the LISP pastebin
+ at http://paste.lisp.org/ to cover as much syntax as possible.
+
+ It supports the full Scheme syntax as defined in R5RS.
+
+ .. versionadded:: 0.6
+ """
+ name = 'Scheme'
+ aliases = ['scheme', 'scm']
+ filenames = ['*.scm', '*.ss']
+ mimetypes = ['text/x-scheme', 'application/x-scheme']
+
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ keywords = (
+ 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
+ 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
+ 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
+ 'let-syntax', 'letrec-syntax', 'syntax-rules'
+ )
+ builtins = (
+ '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
+ 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
+ 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
+ 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
+ 'cadr', 'call-with-current-continuation', 'call-with-input-file',
+ 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
+ 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
+ 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
+ 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
+ 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
+ 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
+ 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
+ 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
+ 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
+ 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
+ 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
+ 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
+ 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
+ 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
+ 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
+ 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
+ 'null?', 'number->string', 'number?', 'numerator', 'odd?',
+ 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
+ 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
+ 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
+ 'remainder', 'reverse', 'round', 'scheme-report-environment',
+ 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
+ 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
+ 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
+ 'string-copy', 'string-fill!', 'string-length', 'string-ref',
+ 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
+ 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
+ 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
+ 'vector', 'vector->list', 'vector-fill!', 'vector-length',
+ 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
+ 'with-output-to-file', 'write', 'write-char', 'zero?'
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
+
+ tokens = {
+ 'root': [
+ # the comments
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+ # commented form (entire sexpr folliwng)
+ (r'#;\s*\(', Comment, 'commented-form'),
+ # signifies that the program text that follows is written with the
+ # lexical and datum syntax described in r6rs
+ (r'#!r6rs', Comment),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
+ Keyword),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
(r"(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
- Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'\(', Comment, '#push'),
- (r'\)', Comment, '#pop'),
- (r'[^()]+', Comment),
- ],
- }
-
-
-class CommonLispLexer(RegexLexer):
- """
- A Common Lisp lexer.
-
- .. versionadded:: 0.9
- """
- name = 'Common Lisp'
- aliases = ['common-lisp', 'cl', 'lisp']
- filenames = ['*.cl', '*.lisp']
- mimetypes = ['text/x-common-lisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- def __init__(self, **options):
- from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
- SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
- BUILTIN_TYPES, BUILTIN_CLASSES
- self.builtin_function = BUILTIN_FUNCTIONS
- self.special_forms = SPECIAL_FORMS
- self.macros = MACROS
- self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
- self.declarations = DECLARATIONS
- self.builtin_types = BUILTIN_TYPES
- self.builtin_classes = BUILTIN_CLASSES
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in self.builtin_function:
- yield index, Name.Builtin, value
- continue
- if value in self.special_forms:
- yield index, Keyword, value
- continue
- if value in self.macros:
- yield index, Name.Builtin, value
- continue
- if value in self.lambda_list_keywords:
- yield index, Keyword, value
- continue
- if value in self.declarations:
- yield index, Keyword, value
- continue
- if value in self.builtin_types:
- yield index, Keyword.Type, value
- continue
- if value in self.builtin_classes:
- yield index, Name.Class, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop'),
- (r'[^()]+', Comment.Preproc),
- ],
- 'body': [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
-
- # encoding comment (?)
- (r'#\d*Y.*$', Comment.Special),
-
- # strings and characters
- (r'"(\\.|\\\n|[^"\\])*"', String),
- # quoting
- (r":" + symbol, String.Symbol),
- (r"::" + symbol, String.Symbol),
- (r":#" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # sharpsign strings and characters
- (r"#\\." + terminated, String.Char),
- (r"#\\" + symbol, String.Char),
-
- # vector
- (r'#\(', Operator, 'body'),
-
- # bitstring
- (r'#\d*\*[01]*', Literal.Other),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read-time and load-time evaluation
- (r'#[.,]', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
-
- # complex
- (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
-
- # array
- (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # structure
- (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # path
- (r'#p?"(\\.|[^"])*"', Literal.Other),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # read-time comment
+ Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment, '#push'),
+ (r'\)', Comment, '#pop'),
+ (r'[^()]+', Comment),
+ ],
+ }
+
+
+class CommonLispLexer(RegexLexer):
+ """
+ A Common Lisp lexer.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Common Lisp'
+ aliases = ['common-lisp', 'cl', 'lisp']
+ filenames = ['*.cl', '*.lisp']
+ mimetypes = ['text/x-common-lisp']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
+
+ def __init__(self, **options):
+ from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
+ SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
+ BUILTIN_TYPES, BUILTIN_CLASSES
+ self.builtin_function = BUILTIN_FUNCTIONS
+ self.special_forms = SPECIAL_FORMS
+ self.macros = MACROS
+ self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
+ self.declarations = DECLARATIONS
+ self.builtin_types = BUILTIN_TYPES
+ self.builtin_classes = BUILTIN_CLASSES
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in self.builtin_function:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in self.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.lambda_list_keywords:
+ yield index, Keyword, value
+ continue
+ if value in self.declarations:
+ yield index, Keyword, value
+ continue
+ if value in self.builtin_types:
+ yield index, Keyword.Type, value
+ continue
+ if value in self.builtin_classes:
+ yield index, Name.Class, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment.Preproc, '#push'),
+ (r'\)', Comment.Preproc, '#pop'),
+ (r'[^()]+', Comment.Preproc),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+
+ # encoding comment (?)
+ (r'#\d*Y.*$', Comment.Special),
+
+ # strings and characters
+ (r'"(\\.|\\\n|[^"\\])*"', String),
+ # quoting
+ (r":" + symbol, String.Symbol),
+ (r"::" + symbol, String.Symbol),
+ (r":#" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
+ terminated, Number.Float),
+
+ # sharpsign strings and characters
+ (r"#\\." + terminated, String.Char),
+ (r"#\\" + symbol, String.Char),
+
+ # vector
+ (r'#\(', Operator, 'body'),
+
+ # bitstring
+ (r'#\d*\*[01]*', Literal.Other),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read-time and load-time evaluation
+ (r'#[.,]', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
+
+ # complex
+ (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
+
+ # array
+ (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # structure
+ (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # path
+ (r'#p?"(\\.|[^"])*"', Literal.Other),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # read-time comment
(r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'),
-
- # read-time conditional
- (r'#[+-]', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
+
+ # read-time conditional
+ (r'#[+-]', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
(r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
-
-
-class HyLexer(RegexLexer):
- """
- Lexer for `Hy <http://hylang.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Hy'
- aliases = ['hylang']
- filenames = ['*.hy']
- mimetypes = ['text/x-hy', 'application/x-hy']
-
- special_forms = (
- 'cond', 'for', '->', '->>', 'car',
- 'cdr', 'first', 'rest', 'let', 'when', 'unless',
- 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
- ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
- 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
- 'foreach', 'while',
- 'eval-and-compile', 'eval-when-compile'
- )
-
- declarations = (
- 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
- )
-
- hy_builtins = ()
-
- hy_core = (
- 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
- 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
- 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
- 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
- )
-
- builtins = hy_builtins + hy_core
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
+
+
+class HyLexer(RegexLexer):
+ """
+ Lexer for `Hy <http://hylang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Hy'
+ aliases = ['hylang']
+ filenames = ['*.hy']
+ mimetypes = ['text/x-hy', 'application/x-hy']
+
+ special_forms = (
+ 'cond', 'for', '->', '->>', 'car',
+ 'cdr', 'first', 'rest', 'let', 'when', 'unless',
+ 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
+ ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
+ 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
+ 'foreach', 'while',
+ 'eval-and-compile', 'eval-when-compile'
+ )
+
+ declarations = (
+ 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
+ )
+
+ hy_builtins = ()
+
+ hy_core = (
+ 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
+ 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
+ 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
+ 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
+ )
+
+ builtins = hy_builtins + hy_core
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+'
-
- def _multi_escape(entries):
- return words(entries, suffix=' ')
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-
- # keywords
- (r'::?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- include('py-keywords'),
- include('py-builtins'),
-
- # highlight the special forms
- (_multi_escape(special_forms), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
-
- # highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Hy accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Hy accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
-
- ],
- 'py-keywords': PythonLexer.tokens['keywords'],
- 'py-builtins': PythonLexer.tokens['builtins'],
- }
-
- def analyse_text(text):
- if '(import ' in text or '(defn ' in text:
- return 0.9
-
-
-class RacketLexer(RegexLexer):
- """
- Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
- known as PLT Scheme).
-
- .. versionadded:: 1.6
- """
-
- name = 'Racket'
- aliases = ['racket', 'rkt']
- filenames = ['*.rkt', '*.rktd', '*.rktl']
- mimetypes = ['text/x-racket', 'application/x-racket']
-
- # Generated by example.rkt
- _keywords = (
+
+ def _multi_escape(entries):
+ return words(entries, suffix=' ')
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+
+ # keywords
+ (r'::?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ include('py-keywords'),
+ include('py-builtins'),
+
+ # highlight the special forms
+ (_multi_escape(special_forms), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (_multi_escape(declarations), Keyword.Declaration),
+
+ # highlight the builtins
+ (_multi_escape(builtins), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Hy accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Hy accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+
+ ],
+ 'py-keywords': PythonLexer.tokens['keywords'],
+ 'py-builtins': PythonLexer.tokens['builtins'],
+ }
+
+ def analyse_text(text):
+ if '(import ' in text or '(defn ' in text:
+ return 0.9
+
+
+class RacketLexer(RegexLexer):
+ """
+ Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
+ known as PLT Scheme).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Racket'
+ aliases = ['racket', 'rkt']
+ filenames = ['*.rkt', '*.rktd', '*.rktl']
+ mimetypes = ['text/x-racket', 'application/x-racket']
+
+ # Generated by example.rkt
+ _keywords = (
u'#%app', u'#%datum', u'#%declare', u'#%expression', u'#%module-begin',
u'#%plain-app', u'#%plain-lambda', u'#%plain-module-begin',
u'#%printing-module-begin', u'#%provide', u'#%require',
@@ -564,10 +564,10 @@ class RacketLexer(RegexLexer):
u'when', u'with-continuation-mark', u'with-contract',
u'with-contract-continuation-mark', u'with-handlers', u'with-handlers*',
u'with-method', u'with-syntax', u'λ'
- )
-
- # Generated by example.rkt
- _builtins = (
+ )
+
+ # Generated by example.rkt
+ _builtins = (
u'*', u'*list/c', u'+', u'-', u'/', u'<', u'</c', u'<=', u'<=/c', u'=', u'=/c',
u'>', u'>/c', u'>=', u'>=/c', u'abort-current-continuation', u'abs',
u'absolute-path?', u'acos', u'add-between', u'add1', u'alarm-evt',
@@ -1244,959 +1244,959 @@ class RacketLexer(RegexLexer):
u'write-special-avail*', u'write-special-evt', u'write-string',
u'write-to-file', u'writeln', u'xor', u'zero?', u'~.a', u'~.s', u'~.v', u'~a',
u'~e', u'~r', u'~s', u'~v'
- )
-
- _opening_parenthesis = r'[([{]'
- _closing_parenthesis = r'[)\]}]'
- _delimiters = r'()[\]{}",\'`;\s'
+ )
+
+ _opening_parenthesis = r'[([{]'
+ _closing_parenthesis = r'[)\]}]'
+ _delimiters = r'()[\]{}",\'`;\s'
_symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
- _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
- _exponent = r'(?:[defls][-+]?\d+)'
- _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
- _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
- r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
- _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
- _exponent)
- _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
- _inexact_special = r'(?:(?:inf|nan)\.[0f])'
- _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
- _inexact_special)
- _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
-
- tokens = {
- 'root': [
- (_closing_parenthesis, Error),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'datum': [
- (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
- (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
- (r'#\|', Comment.Multiline, 'block-comment'),
-
- # Whitespaces
- (r'(?u)\s+', Text),
-
- # Numbers: Keep in mind Racket reader hash prefixes, which
- # can denote the base or the type. These don't map neatly
- # onto Pygments token types; some judgment calls here.
-
- # #d or no prefix
- (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
- Number.Integer, '#pop'),
- (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
- (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
- (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
- (_exact_decimal_prefix, _inexact_normal_no_hashes,
- _inexact_normal_no_hashes, _inexact_normal_no_hashes,
- _delimiters), Number, '#pop'),
-
- # Inexact without explicit #i
- (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
- (_inexact_real, _inexact_unsigned, _inexact_unsigned,
- _inexact_real, _inexact_real, _delimiters), Number.Float,
- '#pop'),
-
- # The remaining extflonums
- (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
- (_inexact_simple, _delimiters), Number.Float, '#pop'),
-
- # #b
+ _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
+ _exponent = r'(?:[defls][-+]?\d+)'
+ _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
+ _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
+ r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
+ _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
+ _exponent)
+ _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
+ _inexact_special = r'(?:(?:inf|nan)\.[0f])'
+ _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
+ _inexact_special)
+ _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
+
+ tokens = {
+ 'root': [
+ (_closing_parenthesis, Error),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'datum': [
+ (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
+ (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
+ (r'#\|', Comment.Multiline, 'block-comment'),
+
+ # Whitespaces
+ (r'(?u)\s+', Text),
+
+ # Numbers: Keep in mind Racket reader hash prefixes, which
+ # can denote the base or the type. These don't map neatly
+ # onto Pygments token types; some judgment calls here.
+
+ # #d or no prefix
+ (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
+ Number.Integer, '#pop'),
+ (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
+ (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
+ (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
+ (_exact_decimal_prefix, _inexact_normal_no_hashes,
+ _inexact_normal_no_hashes, _inexact_normal_no_hashes,
+ _delimiters), Number, '#pop'),
+
+ # Inexact without explicit #i
+ (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
+ (_inexact_real, _inexact_unsigned, _inexact_unsigned,
+ _inexact_real, _inexact_real, _delimiters), Number.Float,
+ '#pop'),
+
+ # The remaining extflonums
+ (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
+ (_inexact_simple, _delimiters), Number.Float, '#pop'),
+
+ # #b
(r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
-
- # #o
+
+ # #o
(r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
-
- # #x
+
+ # #x
(r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
-
- # #i is always inexact, i.e. float
+
+ # #i is always inexact, i.e. float
(r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
-
- # Strings and characters
- (r'#?"', String.Double, ('#pop', 'string')),
- (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
- (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
- (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
- (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
-
- # Constants
- (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
-
- # Keyword argument names (e.g. #:keyword)
+
+ # Strings and characters
+ (r'#?"', String.Double, ('#pop', 'string')),
+ (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
+ (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
+ (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
+ (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
+
+ # Constants
+ (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
+
+ # Keyword argument names (e.g. #:keyword)
(r'(?u)#:%s' % _symbol, Keyword.Declaration, '#pop'),
-
- # Reader extensions
- (r'(#lang |#!)(\S+)',
- bygroups(Keyword.Namespace, Name.Namespace)),
- (r'#reader', Keyword.Namespace, 'quoted-datum'),
-
- # Other syntax
- (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
- (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
- Operator, ('#pop', 'quoted-datum'))
- ],
- 'datum*': [
- (r'`|,@?', Operator),
- (_symbol, String.Symbol, '#pop'),
- (r'[|\\]', Error),
- default('#pop')
- ],
- 'list': [
- (_closing_parenthesis, Punctuation, '#pop')
- ],
- 'unquoted-datum': [
- include('datum'),
- (r'quote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quoted-datum')),
- (r'`', Operator, ('#pop', 'quasiquoted-datum')),
- (r'quasiquote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quasiquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
- (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
- Keyword, '#pop'),
- (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
- Name.Builtin, '#pop'),
- (_symbol, Name, '#pop'),
- include('datum*')
- ],
- 'unquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'quasiquoted-datum': [
- include('datum'),
- (r',@?', Operator, ('#pop', 'unquoted-datum')),
- (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'unquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
- include('datum*')
- ],
- 'quasiquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quasiquoted-datum')
- ],
- 'quoted-datum': [
- include('datum'),
- (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
- include('datum*')
- ],
- 'quoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quoted-datum')
- ],
- 'block-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^#|]+|.', Comment.Multiline)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
- r'U[\da-fA-F]{1,8}|.)', String.Escape),
- (r'[^\\"]+', String.Double)
- ]
- }
-
-
-class NewLispLexer(RegexLexer):
- """
+
+ # Reader extensions
+ (r'(#lang |#!)(\S+)',
+ bygroups(Keyword.Namespace, Name.Namespace)),
+ (r'#reader', Keyword.Namespace, 'quoted-datum'),
+
+ # Other syntax
+ (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
+ (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
+ Operator, ('#pop', 'quoted-datum'))
+ ],
+ 'datum*': [
+ (r'`|,@?', Operator),
+ (_symbol, String.Symbol, '#pop'),
+ (r'[|\\]', Error),
+ default('#pop')
+ ],
+ 'list': [
+ (_closing_parenthesis, Punctuation, '#pop')
+ ],
+ 'unquoted-datum': [
+ include('datum'),
+ (r'quote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quoted-datum')),
+ (r'`', Operator, ('#pop', 'quasiquoted-datum')),
+ (r'quasiquote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quasiquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
+ (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Keyword, '#pop'),
+ (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Name.Builtin, '#pop'),
+ (_symbol, Name, '#pop'),
+ include('datum*')
+ ],
+ 'unquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'quasiquoted-datum': [
+ include('datum'),
+ (r',@?', Operator, ('#pop', 'unquoted-datum')),
+ (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'unquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
+ include('datum*')
+ ],
+ 'quasiquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quasiquoted-datum')
+ ],
+ 'quoted-datum': [
+ include('datum'),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
+ include('datum*')
+ ],
+ 'quoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quoted-datum')
+ ],
+ 'block-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^#|]+|.', Comment.Multiline)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
+ r'U[\da-fA-F]{1,8}|.)', String.Escape),
+ (r'[^\\"]+', String.Double)
+ ]
+ }
+
+
+class NewLispLexer(RegexLexer):
+ """
For `newLISP. <http://www.newlisp.org/>`_ source code (version 10.3.0).
-
- .. versionadded:: 1.5
- """
-
- name = 'NewLisp'
- aliases = ['newlisp']
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'NewLisp'
+ aliases = ['newlisp']
filenames = ['*.lsp', '*.nl', '*.kif']
- mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
- flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
-
- # list of built-in functions for newLISP version 10.3
- builtins = (
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
- '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
- '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
- '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
- 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
- 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
- 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
- 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
- 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
- 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
- 'close', 'command-event', 'cond', 'cons', 'constant',
- 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
- 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
- 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
- 'def-new', 'default', 'define-macro', 'define',
- 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
- 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
- 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
- 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
- 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
- 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
- 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
- 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
- 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
- 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
- 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
- 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
- 'last', 'legal?', 'length', 'let', 'letex', 'letn',
- 'list?', 'list', 'load', 'local', 'log', 'lookup',
- 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
- 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
- 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
- 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
- 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
- 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
- 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
- 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
- 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
- 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
- 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
- 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
- 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
- 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
- 'read-key', 'read-line', 'read-utf8', 'reader-event',
- 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
- 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
- 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
- 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
- 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
- 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
- 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
- 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
- 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
- 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
- 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
- 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
- 'write', 'write-char', 'write-file', 'write-line',
- 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
- )
-
- # valid names
- valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
-
- tokens = {
- 'root': [
- # shebang
- (r'#!(.*?)$', Comment.Preproc),
- # comments starting with semicolon
- (r';.*$', Comment.Single),
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # whitespace
- (r'\s+', Text),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
-
- # braces
- (r'\{', String, "bracestring"),
-
- # [text] ... [/text] delimited strings
- (r'\[text\]*', String, "tagstring"),
-
- # 'special' operators...
- (r"('|:)", Operator),
-
- # highlight the builtins
- (words(builtins, suffix=r'\b'),
- Keyword),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Variable),
-
- # the remaining variables
- (valid_name, String.Symbol),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
-
- # braced strings...
- 'bracestring': [
- (r'\{', String, "#push"),
- (r'\}', String, "#pop"),
- ('[^{}]+', String),
- ],
-
- # tagged [text]...[/text] delimited strings...
- 'tagstring': [
- (r'(?s)(.*?)(\[/text\])', String, '#pop'),
- ],
- }
-
-
-class EmacsLispLexer(RegexLexer):
- """
- An ELisp lexer, parsing a stream and outputting the tokens
- needed to highlight elisp code.
-
- .. versionadded:: 2.1
- """
- name = 'EmacsLisp'
+ mimetypes = ['text/x-newlisp', 'application/x-newlisp']
+
+ flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
+
+ # list of built-in functions for newLISP version 10.3
+ builtins = (
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
+ 'close', 'command-event', 'cond', 'cons', 'constant',
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
+ 'def-new', 'default', 'define-macro', 'define',
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
+ 'last', 'legal?', 'length', 'let', 'letex', 'letn',
+ 'list?', 'list', 'load', 'local', 'log', 'lookup',
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
+ 'read-key', 'read-line', 'read-utf8', 'reader-event',
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
+ 'write', 'write-char', 'write-file', 'write-line',
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
+ )
+
+ # valid names
+ valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
+
+ tokens = {
+ 'root': [
+ # shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ # comments starting with semicolon
+ (r';.*$', Comment.Single),
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+
+ # braces
+ (r'\{', String, "bracestring"),
+
+ # [text] ... [/text] delimited strings
+ (r'\[text\]*', String, "tagstring"),
+
+ # 'special' operators...
+ (r"('|:)", Operator),
+
+ # highlight the builtins
+ (words(builtins, suffix=r'\b'),
+ Keyword),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Variable),
+
+ # the remaining variables
+ (valid_name, String.Symbol),
+
+ # parentheses
+ (r'(\(|\))', Punctuation),
+ ],
+
+ # braced strings...
+ 'bracestring': [
+ (r'\{', String, "#push"),
+ (r'\}', String, "#pop"),
+ ('[^{}]+', String),
+ ],
+
+ # tagged [text]...[/text] delimited strings...
+ 'tagstring': [
+ (r'(?s)(.*?)(\[/text\])', String, '#pop'),
+ ],
+ }
+
+
+class EmacsLispLexer(RegexLexer):
+ """
+ An ELisp lexer, parsing a stream and outputting the tokens
+ needed to highlight elisp code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'EmacsLisp'
aliases = ['emacs', 'elisp', 'emacs-lisp']
- filenames = ['*.el']
- mimetypes = ['text/x-elisp', 'application/x-elisp']
-
- flags = re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
-
+ filenames = ['*.el']
+ mimetypes = ['text/x-elisp', 'application/x-elisp']
+
+ flags = re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
+
macros = {
- 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
- 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
- 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
- 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
- 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
- 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
- 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
- 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
- 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
- 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
- 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
- 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
- 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
- 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
- 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
- 'defgroup', 'define-advice', 'define-alternatives',
- 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
- 'define-global-minor-mode', 'define-globalized-minor-mode',
- 'define-minor-mode', 'define-modify-macro',
- 'define-obsolete-face-alias', 'define-obsolete-function-alias',
- 'define-obsolete-variable-alias', 'define-setf-expander',
- 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
- 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
- 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
- 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
- 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
- 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
- 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
- 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
- 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
- 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
- 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
- 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
- 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
- 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
- 'while-no-input', 'with-case-table', 'with-category-table',
- 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
- 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
- 'with-output-to-string', 'with-output-to-temp-buffer',
- 'with-parsed-tramp-file-name', 'with-selected-frame',
- 'with-selected-window', 'with-silent-modifications', 'with-slots',
- 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
- 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
- 'with-tramp-file-property', 'with-tramp-progress-reporter',
- 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
- 'return-from',
+ 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
+ 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
+ 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
+ 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
+ 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
+ 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
+ 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
+ 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
+ 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
+ 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
+ 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
+ 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
+ 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
+ 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
+ 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
+ 'defgroup', 'define-advice', 'define-alternatives',
+ 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
+ 'define-global-minor-mode', 'define-globalized-minor-mode',
+ 'define-minor-mode', 'define-modify-macro',
+ 'define-obsolete-face-alias', 'define-obsolete-function-alias',
+ 'define-obsolete-variable-alias', 'define-setf-expander',
+ 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
+ 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
+ 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
+ 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
+ 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
+ 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
+ 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
+ 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
+ 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
+ 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
+ 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
+ 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
+ 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
+ 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
+ 'while-no-input', 'with-case-table', 'with-category-table',
+ 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
+ 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
+ 'with-output-to-string', 'with-output-to-temp-buffer',
+ 'with-parsed-tramp-file-name', 'with-selected-frame',
+ 'with-selected-window', 'with-silent-modifications', 'with-slots',
+ 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
+ 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
+ 'with-tramp-file-property', 'with-tramp-progress-reporter',
+ 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
+ 'return-from',
}
-
+
special_forms = {
- 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
- 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
- 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
- 'save-restriction', 'setq', 'setq-default', 'subr-arity',
- 'unwind-protect', 'while',
+ 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
+ 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
+ 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
+ 'save-restriction', 'setq', 'setq-default', 'subr-arity',
+ 'unwind-protect', 'while',
}
-
+
builtin_function = {
- '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
- 'Snarf-documentation', 'abort-recursive-edit', 'abs',
- 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
- 'active-minibuffer-window', 'add-face-text-property',
- 'add-name-to-file', 'add-text-properties', 'all-completions',
- 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
- 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
- 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
- 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
- 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
- 'base64-decode-region', 'base64-decode-string',
- 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
- 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
- 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
- 'bool-vector-count-consecutive', 'bool-vector-count-population',
- 'bool-vector-exclusive-or', 'bool-vector-intersection',
- 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
- 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
- 'buffer-base-buffer', 'buffer-chars-modified-tick',
- 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
- 'buffer-list', 'buffer-live-p', 'buffer-local-value',
- 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
- 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
- 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
- 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
- 'byte-to-position', 'byte-to-string', 'byteorder',
- 'call-interactively', 'call-last-kbd-macro', 'call-process',
- 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
- 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
- 'car-safe', 'case-table-p', 'category-docstring',
- 'category-set-mnemonics', 'category-table', 'category-table-p',
- 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
- 'cdr-safe', 'ceiling', 'char-after', 'char-before',
- 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
- 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
- 'char-table-p', 'char-table-parent', 'char-table-range',
- 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
- 'charset-after', 'charset-id-internal', 'charset-plist',
- 'charset-priority-list', 'charsetp', 'check-coding-system',
- 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
- 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
- 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
- 'close-font', 'clrhash', 'coding-system-aliases',
- 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
- 'coding-system-plist', 'coding-system-priority-list',
- 'coding-system-put', 'color-distance', 'color-gray-p',
- 'color-supported-p', 'combine-after-change-execute',
- 'command-error-default-function', 'command-remapping', 'commandp',
- 'compare-buffer-substrings', 'compare-strings',
- 'compare-window-configurations', 'completing-read',
- 'compose-region-internal', 'compose-string-internal',
- 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
- 'consp', 'constrain-to-field', 'continue-process',
- 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
- 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
- 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
- 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
- 'current-buffer', 'current-case-table', 'current-column',
- 'current-global-map', 'current-idle-time', 'current-indentation',
- 'current-input-mode', 'current-local-map', 'current-message',
- 'current-minor-mode-maps', 'current-time', 'current-time-string',
- 'current-time-zone', 'current-window-configuration',
- 'cygwin-convert-file-name-from-windows',
- 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
- 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
- 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
- 'decode-big5-char', 'decode-char', 'decode-coding-region',
- 'decode-coding-string', 'decode-sjis-char', 'decode-time',
- 'default-boundp', 'default-file-modes', 'default-printer-name',
- 'default-toplevel-value', 'default-value', 'define-category',
- 'define-charset-alias', 'define-charset-internal',
- 'define-coding-system-alias', 'define-coding-system-internal',
- 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
- 'define-prefix-command', 'delete',
- 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
- 'delete-directory-internal', 'delete-field', 'delete-file',
- 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
- 'delete-process', 'delete-region', 'delete-terminal',
- 'delete-window-internal', 'delq', 'describe-buffer-bindings',
- 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
- 'detect-coding-string', 'ding', 'directory-file-name',
- 'directory-files', 'directory-files-and-attributes', 'discard-input',
- 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
- 'documentation-property', 'downcase', 'downcase-region',
- 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
- 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
- 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
- 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
- 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
- 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
- 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
- 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
- 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
- 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
- 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
- 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
- 'field-beginning', 'field-end', 'field-string',
- 'field-string-no-properties', 'file-accessible-directory-p',
- 'file-acl', 'file-attributes', 'file-attributes-lessp',
- 'file-directory-p', 'file-executable-p', 'file-exists-p',
- 'file-locked-p', 'file-modes', 'file-name-absolute-p',
- 'file-name-all-completions', 'file-name-as-directory',
- 'file-name-completion', 'file-name-directory',
- 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
- 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
- 'file-system-info', 'file-system-info', 'file-writable-p',
- 'fillarray', 'find-charset-region', 'find-charset-string',
- 'find-coding-systems-region-internal', 'find-composition-internal',
- 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
- 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
- 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
- 'font-family-list', 'font-get', 'font-get-glyphs',
- 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
- 'font-match-p', 'font-otf-alternates', 'font-put',
- 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
- 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
- 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
- 'force-window-update', 'format', 'format-mode-line',
- 'format-network-address', 'format-time-string', 'forward-char',
- 'forward-comment', 'forward-line', 'forward-word',
- 'frame-border-width', 'frame-bottom-divider-width',
- 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
- 'frame-char-width', 'frame-face-alist', 'frame-first-window',
- 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
- 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
- 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
- 'frame-pointer-visible-p', 'frame-right-divider-width',
- 'frame-root-window', 'frame-scroll-bar-height',
- 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
- 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
- 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
- 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
- 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
- 'function-equal', 'functionp', 'gap-position', 'gap-size',
- 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
- 'get-buffer', 'get-buffer-create', 'get-buffer-process',
- 'get-buffer-window', 'get-byte', 'get-char-property',
- 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
- 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
- 'get-process', 'get-screen-color', 'get-text-property',
- 'get-unicode-property-internal', 'get-unused-category',
- 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
- 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
- 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
- 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
- 'gnutls-get-initstage', 'gnutls-peer-status',
- 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
- 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
- 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
- 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
- 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
- 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
- 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
- 'input-pending-p', 'insert', 'insert-and-inherit',
- 'insert-before-markers', 'insert-before-markers-and-inherit',
- 'insert-buffer-substring', 'insert-byte', 'insert-char',
- 'insert-file-contents', 'insert-startup-screen', 'int86',
- 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
- 'intern-soft', 'internal--track-mouse', 'internal-char-font',
- 'internal-complete-buffer', 'internal-copy-lisp-face',
- 'internal-default-process-filter',
- 'internal-default-process-sentinel', 'internal-describe-syntax-value',
- 'internal-event-symbol-parse-modifiers',
- 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
- 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
- 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
- 'internal-make-lisp-face', 'internal-make-var-non-special',
- 'internal-merge-in-global-face',
- 'internal-set-alternative-font-family-alist',
- 'internal-set-alternative-font-registry-alist',
- 'internal-set-font-selection-order',
- 'internal-set-lisp-face-attribute',
- 'internal-set-lisp-face-attribute-from-resource',
- 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
- 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
- 'iso-charset', 'key-binding', 'key-description',
- 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
- 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
- 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
- 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
- 'libxml-parse-html-region', 'libxml-parse-xml-region',
- 'line-beginning-position', 'line-end-position', 'line-pixel-height',
- 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
- 'load-average', 'local-key-binding', 'local-variable-if-set-p',
- 'local-variable-p', 'locale-info', 'locate-file-internal',
- 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
- 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
- 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
- 'make-byte-code', 'make-category-set', 'make-category-table',
- 'make-char', 'make-char-table', 'make-directory-internal',
- 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
- 'make-indirect-buffer', 'make-keymap', 'make-list',
- 'make-local-variable', 'make-marker', 'make-network-process',
- 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
- 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
- 'make-terminal-frame', 'make-variable-buffer-local',
- 'make-variable-frame-local', 'make-vector', 'makunbound',
- 'map-char-table', 'map-charset-chars', 'map-keymap',
- 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
- 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
- 'marker-position', 'markerp', 'match-beginning', 'match-data',
- 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
- 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
- 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
- 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
- 'message-box', 'message-or-box', 'min',
- 'minibuffer-completion-contents', 'minibuffer-contents',
- 'minibuffer-contents-no-properties', 'minibuffer-depth',
- 'minibuffer-prompt', 'minibuffer-prompt-end',
- 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
- 'minor-mode-key-binding', 'mod', 'modify-category-entry',
- 'modify-frame-parameters', 'modify-syntax-entry',
- 'mouse-pixel-position', 'mouse-position', 'move-overlay',
- 'move-point-visually', 'move-to-column', 'move-to-window-line',
- 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
- 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
- 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
- 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
- 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
- 'natnump', 'nconc', 'network-interface-info',
- 'network-interface-list', 'new-fontset', 'newline-cache-check',
- 'next-char-property-change', 'next-frame', 'next-overlay-change',
- 'next-property-change', 'next-read-file-uses-dialog-p',
- 'next-single-char-property-change', 'next-single-property-change',
- 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
- 'number-or-marker-p', 'number-to-string', 'numberp',
- 'open-dribble-file', 'open-font', 'open-termscript',
- 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
- 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
- 'overlay-properties', 'overlay-put', 'overlay-recenter',
- 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
- 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
- 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
- 'point-max-marker', 'point-min', 'point-min-marker',
- 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
- 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
- 'posn-at-point', 'posn-at-x-y', 'preceding-char',
- 'prefix-numeric-value', 'previous-char-property-change',
- 'previous-frame', 'previous-overlay-change',
- 'previous-property-change', 'previous-single-char-property-change',
- 'previous-single-property-change', 'previous-window', 'prin1',
- 'prin1-to-string', 'princ', 'print', 'process-attributes',
- 'process-buffer', 'process-coding-system', 'process-command',
- 'process-connection', 'process-contact', 'process-datagram-address',
- 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
- 'process-id', 'process-inherit-coding-system-flag', 'process-list',
- 'process-mark', 'process-name', 'process-plist',
- 'process-query-on-exit-flag', 'process-running-child-p',
- 'process-send-eof', 'process-send-region', 'process-send-string',
- 'process-sentinel', 'process-status', 'process-tty-name',
- 'process-type', 'processp', 'profiler-cpu-log',
- 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
- 'profiler-memory-log', 'profiler-memory-running-p',
- 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
- 'purecopy', 'put', 'put-text-property',
- 'put-unicode-property-internal', 'puthash', 'query-font',
- 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
- 'rassq', 're-search-backward', 're-search-forward', 'read',
- 'read-buffer', 'read-char', 'read-char-exclusive',
- 'read-coding-system', 'read-command', 'read-event',
- 'read-from-minibuffer', 'read-from-string', 'read-function',
- 'read-key-sequence', 'read-key-sequence-vector',
- 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
- 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
- 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
- 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
- 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
- 'region-end', 'register-ccl-program', 'register-code-conversion-map',
- 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
- 'rename-buffer', 'rename-file', 'replace-match',
- 'reset-this-command-lengths', 'resize-mini-window-internal',
- 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
- 'run-hook-with-args', 'run-hook-with-args-until-failure',
- 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
- 'run-window-configuration-change-hook', 'run-window-scroll-functions',
- 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
- 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
- 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
- 'select-window', 'selected-frame', 'selected-window',
- 'self-insert-command', 'send-string-to-terminal', 'sequencep',
- 'serial-process-configure', 'set', 'set-buffer',
- 'set-buffer-auto-saved', 'set-buffer-major-mode',
- 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
- 'set-category-table', 'set-char-table-extra-slot',
- 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
- 'set-charset-priority', 'set-coding-system-priority',
- 'set-cursor-size', 'set-default', 'set-default-file-modes',
- 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
- 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
- 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
- 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
- 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
- 'set-keyboard-coding-system-internal', 'set-keymap-parent',
- 'set-marker', 'set-marker-insertion-type', 'set-match-data',
- 'set-message-beep', 'set-minibuffer-window',
- 'set-mouse-pixel-position', 'set-mouse-position',
- 'set-network-process-option', 'set-output-flow-control',
- 'set-process-buffer', 'set-process-coding-system',
- 'set-process-datagram-address', 'set-process-filter',
- 'set-process-filter-multibyte',
- 'set-process-inherit-coding-system-flag', 'set-process-plist',
- 'set-process-query-on-exit-flag', 'set-process-sentinel',
- 'set-process-window-size', 'set-quit-char',
- 'set-safe-terminal-coding-system-internal', 'set-screen-color',
- 'set-standard-case-table', 'set-syntax-table',
- 'set-terminal-coding-system-internal', 'set-terminal-local-value',
- 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
- 'set-visited-file-modtime', 'set-window-buffer',
- 'set-window-combination-limit', 'set-window-configuration',
- 'set-window-dedicated-p', 'set-window-display-table',
- 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
- 'set-window-new-normal', 'set-window-new-pixel',
- 'set-window-new-total', 'set-window-next-buffers',
- 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
- 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
- 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
- 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
- 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
- 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
- 'sort-charsets', 'special-variable-p', 'split-char',
- 'split-window-internal', 'sqrt', 'standard-case-table',
- 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
- 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
+ '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
+ 'Snarf-documentation', 'abort-recursive-edit', 'abs',
+ 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
+ 'active-minibuffer-window', 'add-face-text-property',
+ 'add-name-to-file', 'add-text-properties', 'all-completions',
+ 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
+ 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
+ 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
+ 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
+ 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
+ 'base64-decode-region', 'base64-decode-string',
+ 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
+ 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
+ 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
+ 'bool-vector-count-consecutive', 'bool-vector-count-population',
+ 'bool-vector-exclusive-or', 'bool-vector-intersection',
+ 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
+ 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
+ 'buffer-base-buffer', 'buffer-chars-modified-tick',
+ 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
+ 'buffer-list', 'buffer-live-p', 'buffer-local-value',
+ 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
+ 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
+ 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
+ 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
+ 'byte-to-position', 'byte-to-string', 'byteorder',
+ 'call-interactively', 'call-last-kbd-macro', 'call-process',
+ 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
+ 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
+ 'car-safe', 'case-table-p', 'category-docstring',
+ 'category-set-mnemonics', 'category-table', 'category-table-p',
+ 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
+ 'cdr-safe', 'ceiling', 'char-after', 'char-before',
+ 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
+ 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
+ 'char-table-p', 'char-table-parent', 'char-table-range',
+ 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
+ 'charset-after', 'charset-id-internal', 'charset-plist',
+ 'charset-priority-list', 'charsetp', 'check-coding-system',
+ 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
+ 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
+ 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
+ 'close-font', 'clrhash', 'coding-system-aliases',
+ 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
+ 'coding-system-plist', 'coding-system-priority-list',
+ 'coding-system-put', 'color-distance', 'color-gray-p',
+ 'color-supported-p', 'combine-after-change-execute',
+ 'command-error-default-function', 'command-remapping', 'commandp',
+ 'compare-buffer-substrings', 'compare-strings',
+ 'compare-window-configurations', 'completing-read',
+ 'compose-region-internal', 'compose-string-internal',
+ 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
+ 'consp', 'constrain-to-field', 'continue-process',
+ 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
+ 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
+ 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
+ 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
+ 'current-buffer', 'current-case-table', 'current-column',
+ 'current-global-map', 'current-idle-time', 'current-indentation',
+ 'current-input-mode', 'current-local-map', 'current-message',
+ 'current-minor-mode-maps', 'current-time', 'current-time-string',
+ 'current-time-zone', 'current-window-configuration',
+ 'cygwin-convert-file-name-from-windows',
+ 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
+ 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
+ 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
+ 'decode-big5-char', 'decode-char', 'decode-coding-region',
+ 'decode-coding-string', 'decode-sjis-char', 'decode-time',
+ 'default-boundp', 'default-file-modes', 'default-printer-name',
+ 'default-toplevel-value', 'default-value', 'define-category',
+ 'define-charset-alias', 'define-charset-internal',
+ 'define-coding-system-alias', 'define-coding-system-internal',
+ 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
+ 'define-prefix-command', 'delete',
+ 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
+ 'delete-directory-internal', 'delete-field', 'delete-file',
+ 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
+ 'delete-process', 'delete-region', 'delete-terminal',
+ 'delete-window-internal', 'delq', 'describe-buffer-bindings',
+ 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
+ 'detect-coding-string', 'ding', 'directory-file-name',
+ 'directory-files', 'directory-files-and-attributes', 'discard-input',
+ 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
+ 'documentation-property', 'downcase', 'downcase-region',
+ 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
+ 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
+ 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
+ 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
+ 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
+ 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
+ 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
+ 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
+ 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
+ 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
+ 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
+ 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
+ 'field-beginning', 'field-end', 'field-string',
+ 'field-string-no-properties', 'file-accessible-directory-p',
+ 'file-acl', 'file-attributes', 'file-attributes-lessp',
+ 'file-directory-p', 'file-executable-p', 'file-exists-p',
+ 'file-locked-p', 'file-modes', 'file-name-absolute-p',
+ 'file-name-all-completions', 'file-name-as-directory',
+ 'file-name-completion', 'file-name-directory',
+ 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
+ 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
+ 'file-system-info', 'file-system-info', 'file-writable-p',
+ 'fillarray', 'find-charset-region', 'find-charset-string',
+ 'find-coding-systems-region-internal', 'find-composition-internal',
+ 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
+ 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
+ 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
+ 'font-family-list', 'font-get', 'font-get-glyphs',
+ 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
+ 'font-match-p', 'font-otf-alternates', 'font-put',
+ 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
+ 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
+ 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
+ 'force-window-update', 'format', 'format-mode-line',
+ 'format-network-address', 'format-time-string', 'forward-char',
+ 'forward-comment', 'forward-line', 'forward-word',
+ 'frame-border-width', 'frame-bottom-divider-width',
+ 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
+ 'frame-char-width', 'frame-face-alist', 'frame-first-window',
+ 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
+ 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
+ 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
+ 'frame-pointer-visible-p', 'frame-right-divider-width',
+ 'frame-root-window', 'frame-scroll-bar-height',
+ 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
+ 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
+ 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
+ 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
+ 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
+ 'function-equal', 'functionp', 'gap-position', 'gap-size',
+ 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
+ 'get-buffer', 'get-buffer-create', 'get-buffer-process',
+ 'get-buffer-window', 'get-byte', 'get-char-property',
+ 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
+ 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
+ 'get-process', 'get-screen-color', 'get-text-property',
+ 'get-unicode-property-internal', 'get-unused-category',
+ 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
+ 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
+ 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
+ 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
+ 'gnutls-get-initstage', 'gnutls-peer-status',
+ 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
+ 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
+ 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
+ 'hash-table-p', 'hash-table-rehash-size',
+ 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
+ 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
+ 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
+ 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
+ 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
+ 'input-pending-p', 'insert', 'insert-and-inherit',
+ 'insert-before-markers', 'insert-before-markers-and-inherit',
+ 'insert-buffer-substring', 'insert-byte', 'insert-char',
+ 'insert-file-contents', 'insert-startup-screen', 'int86',
+ 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
+ 'intern-soft', 'internal--track-mouse', 'internal-char-font',
+ 'internal-complete-buffer', 'internal-copy-lisp-face',
+ 'internal-default-process-filter',
+ 'internal-default-process-sentinel', 'internal-describe-syntax-value',
+ 'internal-event-symbol-parse-modifiers',
+ 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
+ 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
+ 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
+ 'internal-make-lisp-face', 'internal-make-var-non-special',
+ 'internal-merge-in-global-face',
+ 'internal-set-alternative-font-family-alist',
+ 'internal-set-alternative-font-registry-alist',
+ 'internal-set-font-selection-order',
+ 'internal-set-lisp-face-attribute',
+ 'internal-set-lisp-face-attribute-from-resource',
+ 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
+ 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
+ 'iso-charset', 'key-binding', 'key-description',
+ 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
+ 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
+ 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
+ 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
+ 'libxml-parse-html-region', 'libxml-parse-xml-region',
+ 'line-beginning-position', 'line-end-position', 'line-pixel-height',
+ 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
+ 'load-average', 'local-key-binding', 'local-variable-if-set-p',
+ 'local-variable-p', 'locale-info', 'locate-file-internal',
+ 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
+ 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
+ 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
+ 'make-byte-code', 'make-category-set', 'make-category-table',
+ 'make-char', 'make-char-table', 'make-directory-internal',
+ 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
+ 'make-indirect-buffer', 'make-keymap', 'make-list',
+ 'make-local-variable', 'make-marker', 'make-network-process',
+ 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
+ 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
+ 'make-terminal-frame', 'make-variable-buffer-local',
+ 'make-variable-frame-local', 'make-vector', 'makunbound',
+ 'map-char-table', 'map-charset-chars', 'map-keymap',
+ 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
+ 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
+ 'marker-position', 'markerp', 'match-beginning', 'match-data',
+ 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
+ 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
+ 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
+ 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
+ 'message-box', 'message-or-box', 'min',
+ 'minibuffer-completion-contents', 'minibuffer-contents',
+ 'minibuffer-contents-no-properties', 'minibuffer-depth',
+ 'minibuffer-prompt', 'minibuffer-prompt-end',
+ 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
+ 'minor-mode-key-binding', 'mod', 'modify-category-entry',
+ 'modify-frame-parameters', 'modify-syntax-entry',
+ 'mouse-pixel-position', 'mouse-position', 'move-overlay',
+ 'move-point-visually', 'move-to-column', 'move-to-window-line',
+ 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
+ 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
+ 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
+ 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
+ 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
+ 'natnump', 'nconc', 'network-interface-info',
+ 'network-interface-list', 'new-fontset', 'newline-cache-check',
+ 'next-char-property-change', 'next-frame', 'next-overlay-change',
+ 'next-property-change', 'next-read-file-uses-dialog-p',
+ 'next-single-char-property-change', 'next-single-property-change',
+ 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
+ 'number-or-marker-p', 'number-to-string', 'numberp',
+ 'open-dribble-file', 'open-font', 'open-termscript',
+ 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
+ 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
+ 'overlay-properties', 'overlay-put', 'overlay-recenter',
+ 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
+ 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
+ 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
+ 'point-max-marker', 'point-min', 'point-min-marker',
+ 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
+ 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
+ 'posn-at-point', 'posn-at-x-y', 'preceding-char',
+ 'prefix-numeric-value', 'previous-char-property-change',
+ 'previous-frame', 'previous-overlay-change',
+ 'previous-property-change', 'previous-single-char-property-change',
+ 'previous-single-property-change', 'previous-window', 'prin1',
+ 'prin1-to-string', 'princ', 'print', 'process-attributes',
+ 'process-buffer', 'process-coding-system', 'process-command',
+ 'process-connection', 'process-contact', 'process-datagram-address',
+ 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
+ 'process-id', 'process-inherit-coding-system-flag', 'process-list',
+ 'process-mark', 'process-name', 'process-plist',
+ 'process-query-on-exit-flag', 'process-running-child-p',
+ 'process-send-eof', 'process-send-region', 'process-send-string',
+ 'process-sentinel', 'process-status', 'process-tty-name',
+ 'process-type', 'processp', 'profiler-cpu-log',
+ 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
+ 'profiler-memory-log', 'profiler-memory-running-p',
+ 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
+ 'purecopy', 'put', 'put-text-property',
+ 'put-unicode-property-internal', 'puthash', 'query-font',
+ 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
+ 'rassq', 're-search-backward', 're-search-forward', 'read',
+ 'read-buffer', 'read-char', 'read-char-exclusive',
+ 'read-coding-system', 'read-command', 'read-event',
+ 'read-from-minibuffer', 'read-from-string', 'read-function',
+ 'read-key-sequence', 'read-key-sequence-vector',
+ 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
+ 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
+ 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
+ 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
+ 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
+ 'region-end', 'register-ccl-program', 'register-code-conversion-map',
+ 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
+ 'rename-buffer', 'rename-file', 'replace-match',
+ 'reset-this-command-lengths', 'resize-mini-window-internal',
+ 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
+ 'run-hook-with-args', 'run-hook-with-args-until-failure',
+ 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
+ 'run-window-configuration-change-hook', 'run-window-scroll-functions',
+ 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
+ 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
+ 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
+ 'select-window', 'selected-frame', 'selected-window',
+ 'self-insert-command', 'send-string-to-terminal', 'sequencep',
+ 'serial-process-configure', 'set', 'set-buffer',
+ 'set-buffer-auto-saved', 'set-buffer-major-mode',
+ 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
+ 'set-category-table', 'set-char-table-extra-slot',
+ 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
+ 'set-charset-priority', 'set-coding-system-priority',
+ 'set-cursor-size', 'set-default', 'set-default-file-modes',
+ 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
+ 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
+ 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
+ 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
+ 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
+ 'set-keyboard-coding-system-internal', 'set-keymap-parent',
+ 'set-marker', 'set-marker-insertion-type', 'set-match-data',
+ 'set-message-beep', 'set-minibuffer-window',
+ 'set-mouse-pixel-position', 'set-mouse-position',
+ 'set-network-process-option', 'set-output-flow-control',
+ 'set-process-buffer', 'set-process-coding-system',
+ 'set-process-datagram-address', 'set-process-filter',
+ 'set-process-filter-multibyte',
+ 'set-process-inherit-coding-system-flag', 'set-process-plist',
+ 'set-process-query-on-exit-flag', 'set-process-sentinel',
+ 'set-process-window-size', 'set-quit-char',
+ 'set-safe-terminal-coding-system-internal', 'set-screen-color',
+ 'set-standard-case-table', 'set-syntax-table',
+ 'set-terminal-coding-system-internal', 'set-terminal-local-value',
+ 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
+ 'set-visited-file-modtime', 'set-window-buffer',
+ 'set-window-combination-limit', 'set-window-configuration',
+ 'set-window-dedicated-p', 'set-window-display-table',
+ 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
+ 'set-window-new-normal', 'set-window-new-pixel',
+ 'set-window-new-total', 'set-window-next-buffers',
+ 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
+ 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
+ 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
+ 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
+ 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
+ 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
+ 'sort-charsets', 'special-variable-p', 'split-char',
+ 'split-window-internal', 'sqrt', 'standard-case-table',
+ 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
+ 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
'string=', 'string<', 'string>', 'string-as-multibyte',
'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
'string-collate-lessp', 'string-equal', 'string-greaterp',
- 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
- 'string-match', 'string-to-char', 'string-to-multibyte',
- 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
- 'string-width', 'stringp', 'subr-name', 'subrp',
- 'subst-char-in-region', 'substitute-command-keys',
- 'substitute-in-file-name', 'substring', 'substring-no-properties',
- 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
- 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
- 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
- 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
- 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
- 'terminal-local-value', 'terminal-name', 'terminal-parameter',
- 'terminal-parameters', 'terpri', 'test-completion',
- 'text-char-description', 'text-properties-at', 'text-property-any',
- 'text-property-not-all', 'this-command-keys',
- 'this-command-keys-vector', 'this-single-command-keys',
- 'this-single-command-raw-keys', 'time-add', 'time-less-p',
- 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
- 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
- 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
- 'truncate', 'try-completion', 'tty-display-color-cells',
- 'tty-display-color-p', 'tty-no-underline',
- 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
- 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
- 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
- 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
- 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
- 'upcase-initials-region', 'upcase-region', 'upcase-word',
- 'use-global-map', 'use-local-map', 'user-full-name',
- 'user-login-name', 'user-real-login-name', 'user-real-uid',
- 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
- 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
- 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
- 'w16-get-clipboard-data', 'w16-selection-exists-p',
- 'w16-set-clipboard-data', 'w32-battery-status',
- 'w32-default-color-map', 'w32-define-rgb-color',
- 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
- 'w32-frame-rect', 'w32-get-clipboard-data',
- 'w32-get-codepage-charset', 'w32-get-console-codepage',
- 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
- 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
- 'w32-get-locale-info', 'w32-get-valid-codepages',
- 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
- 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
- 'w32-register-hot-key', 'w32-registered-hot-keys',
- 'w32-selection-exists-p', 'w32-send-sys-command',
- 'w32-set-clipboard-data', 'w32-set-console-codepage',
- 'w32-set-console-output-codepage', 'w32-set-current-locale',
- 'w32-set-keyboard-layout', 'w32-set-process-priority',
- 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
- 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
- 'w32notify-add-watch', 'w32notify-rm-watch',
- 'waiting-for-user-input-p', 'where-is-internal', 'widen',
- 'widget-apply', 'widget-get', 'widget-put',
- 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
- 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
- 'window-combination-limit', 'window-configuration-frame',
- 'window-configuration-p', 'window-dedicated-p',
- 'window-display-table', 'window-edges', 'window-end', 'window-frame',
- 'window-fringes', 'window-header-line-height', 'window-hscroll',
- 'window-inside-absolute-pixel-edges', 'window-inside-edges',
- 'window-inside-pixel-edges', 'window-left-child',
- 'window-left-column', 'window-line-height', 'window-list',
- 'window-list-1', 'window-live-p', 'window-margins',
- 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
- 'window-new-pixel', 'window-new-total', 'window-next-buffers',
- 'window-next-sibling', 'window-normal-size', 'window-old-point',
- 'window-parameter', 'window-parameters', 'window-parent',
- 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
- 'window-pixel-top', 'window-pixel-width', 'window-point',
- 'window-prev-buffers', 'window-prev-sibling',
- 'window-redisplay-end-trigger', 'window-resize-apply',
- 'window-resize-apply-total', 'window-right-divider-width',
- 'window-scroll-bar-height', 'window-scroll-bar-width',
- 'window-scroll-bars', 'window-start', 'window-system',
- 'window-text-height', 'window-text-pixel-size', 'window-text-width',
- 'window-top-child', 'window-top-line', 'window-total-height',
- 'window-total-width', 'window-use-time', 'window-valid-p',
- 'window-vscroll', 'windowp', 'write-char', 'write-region',
- 'x-backspace-delete-keys-p', 'x-change-window-property',
- 'x-change-window-property', 'x-close-connection',
- 'x-close-connection', 'x-create-frame', 'x-create-frame',
- 'x-delete-window-property', 'x-delete-window-property',
- 'x-disown-selection-internal', 'x-display-backing-store',
- 'x-display-backing-store', 'x-display-color-cells',
- 'x-display-color-cells', 'x-display-grayscale-p',
- 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
- 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
- 'x-display-mm-width', 'x-display-monitor-attributes-list',
- 'x-display-pixel-height', 'x-display-pixel-height',
- 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
- 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
- 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
- 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
- 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
- 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
- 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
- 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
- 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
- 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
- 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
- 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
- 'x-send-client-message', 'x-server-max-request-size',
- 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
- 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
- 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
- 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
- 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
- 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
- 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
- 'forward-point',
+ 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
+ 'string-match', 'string-to-char', 'string-to-multibyte',
+ 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
+ 'string-width', 'stringp', 'subr-name', 'subrp',
+ 'subst-char-in-region', 'substitute-command-keys',
+ 'substitute-in-file-name', 'substring', 'substring-no-properties',
+ 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
+ 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
+ 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
+ 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
+ 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
+ 'terminal-local-value', 'terminal-name', 'terminal-parameter',
+ 'terminal-parameters', 'terpri', 'test-completion',
+ 'text-char-description', 'text-properties-at', 'text-property-any',
+ 'text-property-not-all', 'this-command-keys',
+ 'this-command-keys-vector', 'this-single-command-keys',
+ 'this-single-command-raw-keys', 'time-add', 'time-less-p',
+ 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
+ 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
+ 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
+ 'truncate', 'try-completion', 'tty-display-color-cells',
+ 'tty-display-color-p', 'tty-no-underline',
+ 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
+ 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
+ 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
+ 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
+ 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
+ 'upcase-initials-region', 'upcase-region', 'upcase-word',
+ 'use-global-map', 'use-local-map', 'user-full-name',
+ 'user-login-name', 'user-real-login-name', 'user-real-uid',
+ 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
+ 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
+ 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
+ 'w16-get-clipboard-data', 'w16-selection-exists-p',
+ 'w16-set-clipboard-data', 'w32-battery-status',
+ 'w32-default-color-map', 'w32-define-rgb-color',
+ 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
+ 'w32-frame-rect', 'w32-get-clipboard-data',
+ 'w32-get-codepage-charset', 'w32-get-console-codepage',
+ 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
+ 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
+ 'w32-get-locale-info', 'w32-get-valid-codepages',
+ 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
+ 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
+ 'w32-register-hot-key', 'w32-registered-hot-keys',
+ 'w32-selection-exists-p', 'w32-send-sys-command',
+ 'w32-set-clipboard-data', 'w32-set-console-codepage',
+ 'w32-set-console-output-codepage', 'w32-set-current-locale',
+ 'w32-set-keyboard-layout', 'w32-set-process-priority',
+ 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
+ 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
+ 'w32notify-add-watch', 'w32notify-rm-watch',
+ 'waiting-for-user-input-p', 'where-is-internal', 'widen',
+ 'widget-apply', 'widget-get', 'widget-put',
+ 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
+ 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
+ 'window-combination-limit', 'window-configuration-frame',
+ 'window-configuration-p', 'window-dedicated-p',
+ 'window-display-table', 'window-edges', 'window-end', 'window-frame',
+ 'window-fringes', 'window-header-line-height', 'window-hscroll',
+ 'window-inside-absolute-pixel-edges', 'window-inside-edges',
+ 'window-inside-pixel-edges', 'window-left-child',
+ 'window-left-column', 'window-line-height', 'window-list',
+ 'window-list-1', 'window-live-p', 'window-margins',
+ 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
+ 'window-new-pixel', 'window-new-total', 'window-next-buffers',
+ 'window-next-sibling', 'window-normal-size', 'window-old-point',
+ 'window-parameter', 'window-parameters', 'window-parent',
+ 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
+ 'window-pixel-top', 'window-pixel-width', 'window-point',
+ 'window-prev-buffers', 'window-prev-sibling',
+ 'window-redisplay-end-trigger', 'window-resize-apply',
+ 'window-resize-apply-total', 'window-right-divider-width',
+ 'window-scroll-bar-height', 'window-scroll-bar-width',
+ 'window-scroll-bars', 'window-start', 'window-system',
+ 'window-text-height', 'window-text-pixel-size', 'window-text-width',
+ 'window-top-child', 'window-top-line', 'window-total-height',
+ 'window-total-width', 'window-use-time', 'window-valid-p',
+ 'window-vscroll', 'windowp', 'write-char', 'write-region',
+ 'x-backspace-delete-keys-p', 'x-change-window-property',
+ 'x-change-window-property', 'x-close-connection',
+ 'x-close-connection', 'x-create-frame', 'x-create-frame',
+ 'x-delete-window-property', 'x-delete-window-property',
+ 'x-disown-selection-internal', 'x-display-backing-store',
+ 'x-display-backing-store', 'x-display-color-cells',
+ 'x-display-color-cells', 'x-display-grayscale-p',
+ 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
+ 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
+ 'x-display-mm-width', 'x-display-monitor-attributes-list',
+ 'x-display-pixel-height', 'x-display-pixel-height',
+ 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
+ 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
+ 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
+ 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
+ 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
+ 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
+ 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
+ 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
+ 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
+ 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
+ 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
+ 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
+ 'x-send-client-message', 'x-server-max-request-size',
+ 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
+ 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
+ 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
+ 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
+ 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
+ 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
+ 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
+ 'forward-point',
}
-
+
builtin_function_highlighted = {
- 'defvaralias', 'provide', 'require',
- 'with-no-warnings', 'define-widget', 'with-electric-help',
- 'throw', 'defalias', 'featurep'
+ 'defvaralias', 'provide', 'require',
+ 'with-no-warnings', 'define-widget', 'with-electric-help',
+ 'throw', 'defalias', 'featurep'
}
-
+
lambda_list_keywords = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
+ '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
+ '&rest', '&whole',
}
-
+
error_keywords = {
- 'cl-assert', 'cl-check-type', 'error', 'signal',
- 'user-error', 'warn',
+ 'cl-assert', 'cl-check-type', 'error', 'signal',
+ 'user-error', 'warn',
}
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in EmacsLispLexer.builtin_function:
- yield index, Name.Function, value
- continue
- if value in EmacsLispLexer.special_forms:
- yield index, Keyword, value
- continue
- if value in EmacsLispLexer.error_keywords:
- yield index, Name.Exception, value
- continue
- if value in EmacsLispLexer.builtin_function_highlighted:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.macros:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.lambda_list_keywords:
- yield index, Keyword.Pseudo, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'body': [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # strings and characters
- (r'"', String, 'string'),
- (r'\?([^\\]|\\.)', String.Char),
- # quoting
- (r":" + symbol, Name.Builtin),
- (r"::" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # vectors
- (r'\[|\]', Punctuation),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read syntax for char tables
- (r'#\^\^?', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.|:)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in EmacsLispLexer.builtin_function:
+ yield index, Name.Function, value
+ continue
+ if value in EmacsLispLexer.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in EmacsLispLexer.error_keywords:
+ yield index, Name.Exception, value
+ continue
+ if value in EmacsLispLexer.builtin_function_highlighted:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.lambda_list_keywords:
+ yield index, Keyword.Pseudo, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r'\?([^\\]|\\.)', String.Char),
+ # quoting
+ (r":" + symbol, Name.Builtin),
+ (r"::" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
+ terminated, Number.Float),
+
+ # vectors
+ (r'\[|\]', Punctuation),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read syntax for char tables
+ (r'#\^\^?', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.|:)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
(r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'#\(', Operator, 'body'),
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- 'string': [
- (r'[^"\\`]+', String),
- (r'`%s\'' % symbol, String.Symbol),
- (r'`', String),
- (r'\\.', String),
- (r'\\\n', String),
- (r'"', String, '#pop'),
- ],
- }
-
-
-class ShenLexer(RegexLexer):
- """
- Lexer for `Shen <http://shenlanguage.org/>`_ source code.
-
- .. versionadded:: 2.1
- """
- name = 'Shen'
- aliases = ['shen']
- filenames = ['*.shen']
- mimetypes = ['text/x-shen', 'application/x-shen']
-
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'#\(', Operator, 'body'),
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'[^"\\`]+', String),
+ (r'`%s\'' % symbol, String.Symbol),
+ (r'`', String),
+ (r'\\.', String),
+ (r'\\\n', String),
+ (r'"', String, '#pop'),
+ ],
+ }
+
+
+class ShenLexer(RegexLexer):
+ """
+ Lexer for `Shen <http://shenlanguage.org/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Shen'
+ aliases = ['shen']
+ filenames = ['*.shen']
+ mimetypes = ['text/x-shen', 'application/x-shen']
+
DECLARATIONS = (
'datatype', 'define', 'defmacro', 'defprolog', 'defcc',
'synonyms', 'declare', 'package', 'type', 'function',
)
-
+
SPECIAL_FORMS = (
'lambda', 'get', 'let', 'if', 'cases', 'cond', 'put', 'time', 'freeze',
'value', 'load', '$', 'protect', 'or', 'and', 'not', 'do', 'output',
'prolog?', 'trap-error', 'error', 'make-string', '/.', 'set', '@p',
'@s', '@v',
)
-
+
BUILTINS = (
'==', '=', '*', '+', '-', '/', '<', '>', '>=', '<=', '<-address',
'<-vector', 'abort', 'absvector', 'absvector?', 'address->', 'adjoin',
@@ -2223,199 +2223,199 @@ class ShenLexer(RegexLexer):
'verified', 'version', 'warn', 'when', 'write-byte', 'write-to-file',
'y-or-n?',
)
-
+
BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
-
+
MAPPINGS = {s: Keyword for s in DECLARATIONS}
- MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
- MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
-
+ MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
+ MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
+
valid_symbol_chars = r'[\w!$%*+,<=>?/.\'@&#:-]'
- valid_name = '%s+' % valid_symbol_chars
- symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
- variable = r'[A-Z]%s*' % valid_symbol_chars
-
- tokens = {
- 'string': [
- (r'"', String, '#pop'),
- (r'c#\d{1,3};', String.Escape),
- (r'~[ARS%]', String.Interpol),
- (r'(?s).', String),
- ],
-
- 'root': [
- (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\
- (r'\\\\.*', Comment.Single), # \\ ...
- (r'\s+', Text),
- (r'_{5,}', Punctuation),
- (r'={5,}', Punctuation),
- (r'(;|:=|\||--?>|<--?)', Punctuation),
- (r'(:-|:|\{|\})', Literal),
- (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'[+-]*\d+', Number.Integer),
- (r'"', String, 'string'),
- (variable, Name.Variable),
- (r'(true|false|<>|\[\])', Keyword.Pseudo),
- (symbol_name, Literal),
- (r'(\[|\]|\(|\))', Punctuation),
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- tokens = RegexLexer.get_tokens_unprocessed(self, text)
- tokens = self._process_symbols(tokens)
- tokens = self._process_declarations(tokens)
- return tokens
-
- def _relevant(self, token):
- return token not in (Text, Comment.Single, Comment.Multiline)
-
- def _process_declarations(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- yield index, token, value
- if self._relevant(token):
- if opening_paren and token == Keyword and value in self.DECLARATIONS:
- declaration = value
- for index, token, value in \
- self._process_declaration(declaration, tokens):
- yield index, token, value
- opening_paren = value == '(' and token == Punctuation
-
- def _process_symbols(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- if opening_paren and token in (Literal, Name.Variable):
- token = self.MAPPINGS.get(value, Name.Function)
- elif token == Literal and value in self.BUILTINS_ANYWHERE:
- token = Name.Builtin
- opening_paren = value == '(' and token == Punctuation
- yield index, token, value
-
- def _process_declaration(self, declaration, tokens):
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
-
- if declaration == 'datatype':
- prev_was_colon = False
- token = Keyword.Type if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if prev_was_colon and token == Literal:
- token = Keyword.Type
- yield index, token, value
- if self._relevant(token):
- prev_was_colon = token == Literal and value == ':'
- elif declaration == 'package':
- token = Name.Namespace if token == Literal else token
- yield index, token, value
- elif declaration == 'define':
- token = Name.Function if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
- if value == '{' and token == Literal:
- yield index, Punctuation, value
- for index, token, value in self._process_signature(tokens):
- yield index, token, value
- else:
- yield index, token, value
- else:
- token = Name.Function if token == Literal else token
- yield index, token, value
-
+ valid_name = '%s+' % valid_symbol_chars
+ symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
+ variable = r'[A-Z]%s*' % valid_symbol_chars
+
+ tokens = {
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'c#\d{1,3};', String.Escape),
+ (r'~[ARS%]', String.Interpol),
+ (r'(?s).', String),
+ ],
+
+ 'root': [
+ (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\
+ (r'\\\\.*', Comment.Single), # \\ ...
+ (r'\s+', Text),
+ (r'_{5,}', Punctuation),
+ (r'={5,}', Punctuation),
+ (r'(;|:=|\||--?>|<--?)', Punctuation),
+ (r'(:-|:|\{|\})', Literal),
+ (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'[+-]*\d+', Number.Integer),
+ (r'"', String, 'string'),
+ (variable, Name.Variable),
+ (r'(true|false|<>|\[\])', Keyword.Pseudo),
+ (symbol_name, Literal),
+ (r'(\[|\]|\(|\))', Punctuation),
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ tokens = RegexLexer.get_tokens_unprocessed(self, text)
+ tokens = self._process_symbols(tokens)
+ tokens = self._process_declarations(tokens)
+ return tokens
+
+ def _relevant(self, token):
+ return token not in (Text, Comment.Single, Comment.Multiline)
+
+ def _process_declarations(self, tokens):
+ opening_paren = False
+ for index, token, value in tokens:
+ yield index, token, value
+ if self._relevant(token):
+ if opening_paren and token == Keyword and value in self.DECLARATIONS:
+ declaration = value
+ for index, token, value in \
+ self._process_declaration(declaration, tokens):
+ yield index, token, value
+ opening_paren = value == '(' and token == Punctuation
+
+ def _process_symbols(self, tokens):
+ opening_paren = False
+ for index, token, value in tokens:
+ if opening_paren and token in (Literal, Name.Variable):
+ token = self.MAPPINGS.get(value, Name.Function)
+ elif token == Literal and value in self.BUILTINS_ANYWHERE:
+ token = Name.Builtin
+ opening_paren = value == '(' and token == Punctuation
+ yield index, token, value
+
+ def _process_declaration(self, declaration, tokens):
+ for index, token, value in tokens:
+ if self._relevant(token):
+ break
+ yield index, token, value
+
+ if declaration == 'datatype':
+ prev_was_colon = False
+ token = Keyword.Type if token == Literal else token
+ yield index, token, value
+ for index, token, value in tokens:
+ if prev_was_colon and token == Literal:
+ token = Keyword.Type
+ yield index, token, value
+ if self._relevant(token):
+ prev_was_colon = token == Literal and value == ':'
+ elif declaration == 'package':
+ token = Name.Namespace if token == Literal else token
+ yield index, token, value
+ elif declaration == 'define':
+ token = Name.Function if token == Literal else token
+ yield index, token, value
+ for index, token, value in tokens:
+ if self._relevant(token):
+ break
+ yield index, token, value
+ if value == '{' and token == Literal:
+ yield index, Punctuation, value
+ for index, token, value in self._process_signature(tokens):
+ yield index, token, value
+ else:
+ yield index, token, value
+ else:
+ token = Name.Function if token == Literal else token
+ yield index, token, value
+
return
-
- def _process_signature(self, tokens):
- for index, token, value in tokens:
- if token == Literal and value == '}':
- yield index, Punctuation, value
+
+ def _process_signature(self, tokens):
+ for index, token, value in tokens:
+ if token == Literal and value == '}':
+ yield index, Punctuation, value
return
- elif token in (Literal, Name.Function):
- token = Name.Variable if value.istitle() else Keyword.Type
- yield index, token, value
-
-
-class CPSALexer(SchemeLexer):
- """
- A CPSA lexer based on the CPSA language as of version 2.2.12
-
- .. versionadded:: 2.1
- """
- name = 'CPSA'
- aliases = ['cpsa']
- filenames = ['*.cpsa']
- mimetypes = []
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- _keywords = (
- 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole',
- 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig',
- 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text',
- 'skey', 'akey', 'data', 'mesg',
- )
- _builtins = (
- 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp',
- )
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
+ elif token in (Literal, Name.Function):
+ token = Name.Variable if value.istitle() else Keyword.Type
+ yield index, token, value
+
+
+class CPSALexer(SchemeLexer):
+ """
+ A CPSA lexer based on the CPSA language as of version 2.2.12
+
+ .. versionadded:: 2.1
+ """
+ name = 'CPSA'
+ aliases = ['cpsa']
+ filenames = ['*.cpsa']
+ mimetypes = []
+
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ _keywords = (
+ 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole',
+ 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig',
+ 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text',
+ 'skey', 'akey', 'data', 'mesg',
+ )
+ _builtins = (
+ 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp',
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
(r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- (words(_keywords, suffix=r'\b'), Keyword),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- (words(_builtins, prefix=r'(?<=\()', suffix=r'\b'), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- }
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ (words(_keywords, suffix=r'\b'), Keyword),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
+ (words(_builtins, prefix=r'(?<=\()', suffix=r'\b'), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ ],
+ }
class XtlangLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/make.py b/contrib/python/Pygments/py2/pygments/lexers/make.py
index f67f1095fb..cc07064f2a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/make.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/make.py
@@ -1,202 +1,202 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.make
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Makefiles and similar.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.make
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Makefiles and similar.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
- do_insertions, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation
-from pygments.lexers.shell import BashLexer
-
-__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
-
-
-class MakefileLexer(Lexer):
- """
- Lexer for BSD and GNU make extensions (lenient enough to handle both in
- the same file even).
-
- *Rewritten in Pygments 0.10.*
- """
-
- name = 'Makefile'
- aliases = ['make', 'makefile', 'mf', 'bsdmake']
- filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
- mimetypes = ['text/x-makefile']
-
- r_special = re.compile(
- r'^(?:'
- # BSD Make
- r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
- # GNU Make
- r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
- # GNU Automake
- r'\s*(if|else|endif))(?=\s)')
- r_comment = re.compile(r'^\s*@?#')
-
- def get_tokens_unprocessed(self, text):
- ins = []
- lines = text.splitlines(True)
- done = ''
- lex = BaseMakefileLexer(**self.options)
- backslashflag = False
- for line in lines:
- if self.r_special.match(line) or backslashflag:
- ins.append((len(done), [(0, Comment.Preproc, line)]))
- backslashflag = line.strip().endswith('\\')
- elif self.r_comment.match(line):
- ins.append((len(done), [(0, Comment, line)]))
- else:
- done += line
- for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
- yield item
-
- def analyse_text(text):
- # Many makefiles have $(BIG_CAPS) style variables
- if re.search(r'\$\([A-Z_]+\)', text):
- return 0.1
-
-
-class BaseMakefileLexer(RegexLexer):
- """
- Lexer for simple Makefiles (no preprocessing).
-
- .. versionadded:: 0.10
- """
-
- name = 'Base Makefile'
- aliases = ['basemake']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # recipes (need to allow spaces because of expandtabs)
- (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
- # special variables
- (r'\$[<@$+%?|*]', Keyword),
- (r'\s+', Text),
- (r'#.*?\n', Comment),
- (r'(export)(\s+)(?=[\w${}\t -]+\n)',
- bygroups(Keyword, Text), 'export'),
- (r'export\s+', Keyword),
- # assignment
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
+ do_insertions, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
+
+
+class MakefileLexer(Lexer):
+ """
+ Lexer for BSD and GNU make extensions (lenient enough to handle both in
+ the same file even).
+
+ *Rewritten in Pygments 0.10.*
+ """
+
+ name = 'Makefile'
+ aliases = ['make', 'makefile', 'mf', 'bsdmake']
+ filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
+ mimetypes = ['text/x-makefile']
+
+ r_special = re.compile(
+ r'^(?:'
+ # BSD Make
+ r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
+ # GNU Make
+ r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
+ # GNU Automake
+ r'\s*(if|else|endif))(?=\s)')
+ r_comment = re.compile(r'^\s*@?#')
+
+ def get_tokens_unprocessed(self, text):
+ ins = []
+ lines = text.splitlines(True)
+ done = ''
+ lex = BaseMakefileLexer(**self.options)
+ backslashflag = False
+ for line in lines:
+ if self.r_special.match(line) or backslashflag:
+ ins.append((len(done), [(0, Comment.Preproc, line)]))
+ backslashflag = line.strip().endswith('\\')
+ elif self.r_comment.match(line):
+ ins.append((len(done), [(0, Comment, line)]))
+ else:
+ done += line
+ for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
+ yield item
+
+ def analyse_text(text):
+ # Many makefiles have $(BIG_CAPS) style variables
+ if re.search(r'\$\([A-Z_]+\)', text):
+ return 0.1
+
+
+class BaseMakefileLexer(RegexLexer):
+ """
+ Lexer for simple Makefiles (no preprocessing).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Base Makefile'
+ aliases = ['basemake']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # recipes (need to allow spaces because of expandtabs)
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ # special variables
+ (r'\$[<@$+%?|*]', Keyword),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment),
+ (r'(export)(\s+)(?=[\w${}\t -]+\n)',
+ bygroups(Keyword, Text), 'export'),
+ (r'export\s+', Keyword),
+ # assignment
(r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
- bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
- # strings
- (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
- # targets
- (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
- 'block-header'),
- # expansions
- (r'\$\(', Keyword, 'expansion'),
- ],
- 'expansion': [
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
+ # strings
+ (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
+ # targets
+ (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
+ 'block-header'),
+ # expansions
+ (r'\$\(', Keyword, 'expansion'),
+ ],
+ 'expansion': [
(r'[^\w$().-]+', Text),
(r'[\w.-]+', Name.Variable),
- (r'\$', Keyword),
- (r'\(', Keyword, '#push'),
- (r'\)', Keyword, '#pop'),
- ],
- 'export': [
- (r'[\w${}-]+', Name.Variable),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- ],
- 'block-header': [
- (r'[,|]', Punctuation),
- (r'#.*?\n', Comment, '#pop'),
- (r'\\\n', Text), # line continuation
- (r'\$\(', Keyword, 'expansion'),
- (r'[a-zA-Z_]+', Name),
- (r'\n', Text, '#pop'),
- (r'.', Text),
- ],
- }
-
-
-class CMakeLexer(RegexLexer):
- """
- Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
-
- .. versionadded:: 1.2
- """
- name = 'CMake'
- aliases = ['cmake']
- filenames = ['*.cmake', 'CMakeLists.txt']
- mimetypes = ['text/x-cmake']
-
- tokens = {
- 'root': [
- # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
- # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
- # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
- # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
- # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
- # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
- # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
- # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
- # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
- # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
- # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
- # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
- # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
- # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
- # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
- # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
- # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
- # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
- # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
- # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
- # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
- # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
- # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
- # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
- # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
- # r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
- Punctuation), 'args'),
- include('keywords'),
- include('ws')
- ],
- 'args': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
- (r'(?s)".*?"', String.Double),
- (r'\\\S+', String),
- (r'[^)$"# \t\n]+', String),
- (r'\n', Text), # explicitly legal
- include('keywords'),
- include('ws')
- ],
- 'string': [
-
- ],
- 'keywords': [
- (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
- r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
- ],
- 'ws': [
- (r'[ \t]+', Text),
- (r'#.*\n', Comment),
- ]
- }
-
- def analyse_text(text):
- exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
- if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
- return 0.8
- return 0.0
+ (r'\$', Keyword),
+ (r'\(', Keyword, '#push'),
+ (r'\)', Keyword, '#pop'),
+ ],
+ 'export': [
+ (r'[\w${}-]+', Name.Variable),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ ],
+ 'block-header': [
+ (r'[,|]', Punctuation),
+ (r'#.*?\n', Comment, '#pop'),
+ (r'\\\n', Text), # line continuation
+ (r'\$\(', Keyword, 'expansion'),
+ (r'[a-zA-Z_]+', Name),
+ (r'\n', Text, '#pop'),
+ (r'.', Text),
+ ],
+ }
+
+
+class CMakeLexer(RegexLexer):
+ """
+ Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
+
+ .. versionadded:: 1.2
+ """
+ name = 'CMake'
+ aliases = ['cmake']
+ filenames = ['*.cmake', 'CMakeLists.txt']
+ mimetypes = ['text/x-cmake']
+
+ tokens = {
+ 'root': [
+ # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
+ # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
+ # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
+ # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
+ # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
+ # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
+ # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
+ # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
+ # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
+ # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
+ # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
+ # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
+ # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
+ # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
+ # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
+ # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
+ # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
+ # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
+ # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
+ # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
+ # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
+ # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
+ # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
+ # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
+ # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
+ # r'COUNTARGS)\b', Name.Builtin, 'args'),
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
+ Punctuation), 'args'),
+ include('keywords'),
+ include('ws')
+ ],
+ 'args': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
+ (r'(?s)".*?"', String.Double),
+ (r'\\\S+', String),
+ (r'[^)$"# \t\n]+', String),
+ (r'\n', Text), # explicitly legal
+ include('keywords'),
+ include('ws')
+ ],
+ 'string': [
+
+ ],
+ 'keywords': [
+ (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
+ r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
+ ],
+ 'ws': [
+ (r'[ \t]+', Text),
+ (r'#.*\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
+ exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
+ if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
+ return 0.8
+ return 0.0
diff --git a/contrib/python/Pygments/py2/pygments/lexers/markup.py b/contrib/python/Pygments/py2/pygments/lexers/markup.py
index ad2491ad1f..f10560d5b2 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/markup.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/markup.py
@@ -1,504 +1,504 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.markup
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-HTML markup languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.markup
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-HTML markup languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.html import HtmlLexer, XmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.css import CssLexer
-
-from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
- using, this, do_insertions, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other
-from pygments.util import get_bool_opt, ClassNotFound
-
-__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
- 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
- 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.css import CssLexer
+
+from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
+ using, this, do_insertions, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other
+from pygments.util import get_bool_opt, ClassNotFound
+
+__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
+ 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
+ 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
'MozPreprocCssLexer', 'MarkdownLexer']
-
-
-class BBCodeLexer(RegexLexer):
- """
- A lexer that highlights BBCode(-like) syntax.
-
- .. versionadded:: 0.6
- """
-
- name = 'BBCode'
- aliases = ['bbcode']
- mimetypes = ['text/x-bbcode']
-
- tokens = {
- 'root': [
- (r'[^[]+', Text),
- # tag/end tag begin
- (r'\[/?\w+', Keyword, 'tag'),
- # stray bracket
- (r'\[', Text),
- ],
- 'tag': [
- (r'\s+', Text),
- # attribute with value
- (r'(\w+)(=)("?[^\s"\]]+"?)',
- bygroups(Name.Attribute, Operator, String)),
- # tag argument (a la [color=green])
- (r'(=)("?[^\s"\]]+"?)',
- bygroups(Operator, String)),
- # tag end
- (r'\]', Keyword, '#pop'),
- ],
- }
-
-
-class MoinWikiLexer(RegexLexer):
- """
- For MoinMoin (and Trac) Wiki markup.
-
- .. versionadded:: 0.7
- """
-
- name = 'MoinMoin/Trac Wiki markup'
- aliases = ['trac-wiki', 'moin']
- filenames = []
- mimetypes = ['text/x-trac-wiki']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
- # Titles
- (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
- bygroups(Generic.Heading, using(this), Generic.Heading, String)),
- # Literal code blocks, with optional shebang
- (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
- (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
- # Lists
- (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
- (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
- # Other Formatting
- (r'\[\[\w+.*?\]\]', Keyword), # Macro
- (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
- bygroups(Keyword, String, Keyword)), # Link
- (r'^----+$', Keyword), # Horizontal rules
- (r'[^\n\'\[{!_~^,|]+', Text),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'codeblock': [
- (r'\}\}\}', Name.Builtin, '#pop'),
- # these blocks are allowed to be nested in Trac, but not MoinMoin
- (r'\{\{\{', Text, '#push'),
- (r'[^{}]+', Comment.Preproc), # slurp boring text
- (r'.', Comment.Preproc), # allow loose { or }
- ],
- }
-
-
-class RstLexer(RegexLexer):
- """
- For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
-
- .. versionadded:: 0.7
-
- Additional options accepted:
-
- `handlecodeblocks`
- Highlight the contents of ``.. sourcecode:: language``,
- ``.. code:: language`` and ``.. code-block:: language``
- directives with a lexer for the given language (default:
- ``True``).
-
- .. versionadded:: 0.8
- """
- name = 'reStructuredText'
- aliases = ['rst', 'rest', 'restructuredtext']
- filenames = ['*.rst', '*.rest']
- mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
- flags = re.MULTILINE
-
- def _handle_sourcecode(self, match):
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), Punctuation, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator.Word, match.group(3)
- yield match.start(4), Punctuation, match.group(4)
- yield match.start(5), Text, match.group(5)
- yield match.start(6), Keyword, match.group(6)
- yield match.start(7), Text, match.group(7)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(6).strip())
- except ClassNotFound:
- pass
- indention = match.group(8)
- indention_size = len(indention)
- code = (indention + match.group(9) + match.group(10) + match.group(11))
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(8), String, code
- return
-
- # highlight the lines with the lexer.
- ins = []
- codelines = code.splitlines(True)
- code = ''
- for line in codelines:
- if len(line) > indention_size:
- ins.append((len(code), [(0, Text, line[:indention_size])]))
- code += line[indention_size:]
- else:
- code += line
- for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
- yield item
-
- # from docutils.parsers.rst.states
- closers = u'\'")]}>\u2019\u201d\xbb!?'
- unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
-
- tokens = {
- 'root': [
- # Heading with overline
- (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
- r'(.+)(\n)(\1)(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading,
- Text, Generic.Heading, Text)),
- # Plain heading
- (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
- r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # Bulleted lists
- (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered lists
- (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered, but keep words at BOL from becoming lists
- (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Line blocks
- (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
- bygroups(Text, Operator, using(this, state='inline'))),
- # Sourcecode directives
- (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
- _handle_sourcecode),
- # A directive
- (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
- using(this, state='inline'))),
- # A reference target
- (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A footnote/citation target
- (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A substitution def
- (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
- Punctuation, Text, using(this, state='inline'))),
- # Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
+
+
+class BBCodeLexer(RegexLexer):
+ """
+ A lexer that highlights BBCode(-like) syntax.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'BBCode'
+ aliases = ['bbcode']
+ mimetypes = ['text/x-bbcode']
+
+ tokens = {
+ 'root': [
+ (r'[^[]+', Text),
+ # tag/end tag begin
+ (r'\[/?\w+', Keyword, 'tag'),
+ # stray bracket
+ (r'\[', Text),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ # attribute with value
+ (r'(\w+)(=)("?[^\s"\]]+"?)',
+ bygroups(Name.Attribute, Operator, String)),
+ # tag argument (a la [color=green])
+ (r'(=)("?[^\s"\]]+"?)',
+ bygroups(Operator, String)),
+ # tag end
+ (r'\]', Keyword, '#pop'),
+ ],
+ }
+
+
+class MoinWikiLexer(RegexLexer):
+ """
+ For MoinMoin (and Trac) Wiki markup.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'MoinMoin/Trac Wiki markup'
+ aliases = ['trac-wiki', 'moin']
+ filenames = []
+ mimetypes = ['text/x-trac-wiki']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
+ (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
+ # Titles
+ (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
+ bygroups(Generic.Heading, using(this), Generic.Heading, String)),
+ # Literal code blocks, with optional shebang
+ (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
+ (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
+ # Lists
+ (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
+ (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
+ # Other Formatting
+ (r'\[\[\w+.*?\]\]', Keyword), # Macro
+ (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
+ bygroups(Keyword, String, Keyword)), # Link
+ (r'^----+$', Keyword), # Horizontal rules
+ (r'[^\n\'\[{!_~^,|]+', Text),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'codeblock': [
+ (r'\}\}\}', Name.Builtin, '#pop'),
+ # these blocks are allowed to be nested in Trac, but not MoinMoin
+ (r'\{\{\{', Text, '#push'),
+ (r'[^{}]+', Comment.Preproc), # slurp boring text
+ (r'.', Comment.Preproc), # allow loose { or }
+ ],
+ }
+
+
+class RstLexer(RegexLexer):
+ """
+ For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
+
+ .. versionadded:: 0.7
+
+ Additional options accepted:
+
+ `handlecodeblocks`
+ Highlight the contents of ``.. sourcecode:: language``,
+ ``.. code:: language`` and ``.. code-block:: language``
+ directives with a lexer for the given language (default:
+ ``True``).
+
+ .. versionadded:: 0.8
+ """
+ name = 'reStructuredText'
+ aliases = ['rst', 'rest', 'restructuredtext']
+ filenames = ['*.rst', '*.rest']
+ mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
+ flags = re.MULTILINE
+
+ def _handle_sourcecode(self, match):
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), Punctuation, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator.Word, match.group(3)
+ yield match.start(4), Punctuation, match.group(4)
+ yield match.start(5), Text, match.group(5)
+ yield match.start(6), Keyword, match.group(6)
+ yield match.start(7), Text, match.group(7)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name(match.group(6).strip())
+ except ClassNotFound:
+ pass
+ indention = match.group(8)
+ indention_size = len(indention)
+ code = (indention + match.group(9) + match.group(10) + match.group(11))
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(8), String, code
+ return
+
+ # highlight the lines with the lexer.
+ ins = []
+ codelines = code.splitlines(True)
+ code = ''
+ for line in codelines:
+ if len(line) > indention_size:
+ ins.append((len(code), [(0, Text, line[:indention_size])]))
+ code += line[indention_size:]
+ else:
+ code += line
+ for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
+ yield item
+
+ # from docutils.parsers.rst.states
+ closers = u'\'")]}>\u2019\u201d\xbb!?'
+ unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
+ end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
+ % (re.escape(unicode_delimiters),
+ re.escape(closers)))
+
+ tokens = {
+ 'root': [
+ # Heading with overline
+ (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
+ r'(.+)(\n)(\1)(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading,
+ Text, Generic.Heading, Text)),
+ # Plain heading
+ (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
+ r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading, Text)),
+ # Bulleted lists
+ (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered lists
+ (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered, but keep words at BOL from becoming lists
+ (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Line blocks
+ (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
+ bygroups(Text, Operator, using(this, state='inline'))),
+ # Sourcecode directives
+ (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
+ r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
+ _handle_sourcecode),
+ # A directive
+ (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
+ using(this, state='inline'))),
+ # A reference target
+ (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A footnote/citation target
+ (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A substitution def
+ (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
+ Punctuation, Text, using(this, state='inline'))),
+ # Comments
+ (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
# Field list marker
(r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
bygroups(Text, Name.Class, Text)),
- # Definition list
- (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
- bygroups(using(this, state='inline'), using(this, state='inline'))),
- # Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
- bygroups(String.Escape, Text, String, String, Text, String)),
- include('inline'),
- ],
- 'inline': [
- (r'\\.', Text), # escape
- (r'``', String, 'literal'), # code
- (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
- bygroups(String, String.Interpol, String)),
- (r'`.+?`__?', String), # reference
- (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
- bygroups(Name.Variable, Name.Attribute)), # role
- (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
- bygroups(Name.Attribute, Name.Variable)), # role (content first)
- (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
- (r'\*.+?\*', Generic.Emph), # Emphasis
- (r'\[.*?\]_', String), # Footnote or citation
- (r'<.+?>', Name.Tag), # Hyperlink
- (r'[^\\\n\[*`:]+', Text),
- (r'.', Text),
- ],
- 'literal': [
- (r'[^`]+', String),
- (r'``' + end_string_suffix, String, '#pop'),
- (r'`', String),
- ]
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- if text[:2] == '..' and text[2:3] != '.':
- return 0.3
- p1 = text.find("\n")
- p2 = text.find("\n", p1 + 1)
- if (p2 > -1 and # has two lines
- p1 * 2 + 1 == p2 and # they are the same length
- text[p1+1] in '-=' and # the next line both starts and ends with
- text[p1+1] == text[p2-1]): # ...a sufficiently high header
- return 0.5
-
-
-class TexLexer(RegexLexer):
- """
- Lexer for the TeX and LaTeX typesetting languages.
- """
-
- name = 'TeX'
- aliases = ['tex', 'latex']
- filenames = ['*.tex', '*.aux', '*.toc']
- mimetypes = ['text/x-tex', 'text/x-latex']
-
- tokens = {
- 'general': [
- (r'%.*?\n', Comment),
- (r'[{}]', Name.Builtin),
- (r'[&_^]', Name.Builtin),
- ],
- 'root': [
- (r'\\\[', String.Backtick, 'displaymath'),
- (r'\\\(', String, 'inlinemath'),
- (r'\$\$', String.Backtick, 'displaymath'),
- (r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
- (r'\\$', Keyword),
- include('general'),
- (r'[^\\$%&_^{}]+', Text),
- ],
- 'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
- include('general'),
- (r'[0-9]+', Number),
- (r'[-=!+*/()\[\]]', Operator),
- (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
- ],
- 'inlinemath': [
- (r'\\\)', String, '#pop'),
- (r'\$', String, '#pop'),
- include('math'),
- ],
- 'displaymath': [
- (r'\\\]', String, '#pop'),
- (r'\$\$', String, '#pop'),
- (r'\$', Name.Builtin),
- include('math'),
- ],
- 'command': [
- (r'\[.*?\]', Name.Attribute),
- (r'\*', Keyword),
- default('#pop'),
- ],
- }
-
- def analyse_text(text):
- for start in ("\\documentclass", "\\input", "\\documentstyle",
- "\\relax"):
- if text[:len(start)] == start:
- return True
-
-
-class GroffLexer(RegexLexer):
- """
- Lexer for the (g)roff typesetting language, supporting groff
- extensions. Mainly useful for highlighting manpage sources.
-
- .. versionadded:: 0.6
- """
-
- name = 'Groff'
- aliases = ['groff', 'nroff', 'man']
- filenames = ['*.[1234567]', '*.man']
- mimetypes = ['application/x-troff', 'text/troff']
-
- tokens = {
- 'root': [
- (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
- (r'\.', Punctuation, 'request'),
- # Regular characters, slurp till we find a backslash or newline
- (r'[^\\\n]+', Text, 'textline'),
- default('textline'),
- ],
- 'textline': [
- include('escapes'),
- (r'[^\\\n]+', Text),
- (r'\n', Text, '#pop'),
- ],
- 'escapes': [
- # groff has many ways to write escapes.
- (r'\\"[^\n]*', Comment),
- (r'\\[fn]\w', String.Escape),
- (r'\\\(.{2}', String.Escape),
- (r'\\.\[.*\]', String.Escape),
- (r'\\.', String.Escape),
- (r'\\\n', Text, 'request'),
- ],
- 'request': [
- (r'\n', Text, '#pop'),
- include('escapes'),
- (r'"[^\n"]+"', String.Double),
- (r'\d+', Number),
- (r'\S+', String),
- (r'\s+', Text),
- ],
- }
-
- def analyse_text(text):
- if text[:1] != '.':
- return False
- if text[:3] == '.\\"':
- return True
- if text[:4] == '.TH ':
- return True
- if text[1:3].isalnum() and text[3].isspace():
- return 0.9
-
-
-class MozPreprocHashLexer(RegexLexer):
- """
- Lexer for Mozilla Preprocessor files (with '#' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozhashpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^#', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- 'exprstart': [
- (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
- (words((
- 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
- 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
- 'include', 'includesubst', 'error')),
- Comment.Preproc, '#pop'),
- ],
- 'expr': [
- (words(('!', '!=', '==', '&&', '||')), Operator),
- (r'(defined)(\()', bygroups(Keyword, Punctuation)),
- (r'\)', Punctuation),
- (r'[0-9]+', Number.Decimal),
- (r'__\w+?__', Name.Variable),
- (r'@\w+?@', Name.Class),
- (r'\w+', Name),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- (r'\S', Punctuation),
- ],
- }
-
-
-class MozPreprocPercentLexer(MozPreprocHashLexer):
- """
- Lexer for Mozilla Preprocessor files (with '%' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozpercentpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^%', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- }
-
-
-class MozPreprocXulLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 2.0
- """
- name = "XUL+mozpreproc"
- aliases = ['xul+mozpreproc']
- filenames = ['*.xul.in']
- mimetypes = []
-
- def __init__(self, **options):
- super(MozPreprocXulLexer, self).__init__(
- XmlLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `JavascriptLexer`.
-
- .. versionadded:: 2.0
- """
- name = "Javascript+mozpreproc"
- aliases = ['javascript+mozpreproc']
- filenames = ['*.js.in']
- mimetypes = []
-
- def __init__(self, **options):
- super(MozPreprocJavascriptLexer, self).__init__(
- JavascriptLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocCssLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `CssLexer`.
-
- .. versionadded:: 2.0
- """
- name = "CSS+mozpreproc"
- aliases = ['css+mozpreproc']
- filenames = ['*.css.in']
- mimetypes = []
-
- def __init__(self, **options):
- super(MozPreprocCssLexer, self).__init__(
- CssLexer, MozPreprocPercentLexer, **options)
-
+ # Definition list
+ (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
+ bygroups(using(this, state='inline'), using(this, state='inline'))),
+ # Code blocks
+ (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
+ bygroups(String.Escape, Text, String, String, Text, String)),
+ include('inline'),
+ ],
+ 'inline': [
+ (r'\\.', Text), # escape
+ (r'``', String, 'literal'), # code
+ (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
+ bygroups(String, String.Interpol, String)),
+ (r'`.+?`__?', String), # reference
+ (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
+ bygroups(Name.Variable, Name.Attribute)), # role
+ (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
+ bygroups(Name.Attribute, Name.Variable)), # role (content first)
+ (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
+ (r'\*.+?\*', Generic.Emph), # Emphasis
+ (r'\[.*?\]_', String), # Footnote or citation
+ (r'<.+?>', Name.Tag), # Hyperlink
+ (r'[^\\\n\[*`:]+', Text),
+ (r'.', Text),
+ ],
+ 'literal': [
+ (r'[^`]+', String),
+ (r'``' + end_string_suffix, String, '#pop'),
+ (r'`', String),
+ ]
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
+
+ def analyse_text(text):
+ if text[:2] == '..' and text[2:3] != '.':
+ return 0.3
+ p1 = text.find("\n")
+ p2 = text.find("\n", p1 + 1)
+ if (p2 > -1 and # has two lines
+ p1 * 2 + 1 == p2 and # they are the same length
+ text[p1+1] in '-=' and # the next line both starts and ends with
+ text[p1+1] == text[p2-1]): # ...a sufficiently high header
+ return 0.5
+
+
+class TexLexer(RegexLexer):
+ """
+ Lexer for the TeX and LaTeX typesetting languages.
+ """
+
+ name = 'TeX'
+ aliases = ['tex', 'latex']
+ filenames = ['*.tex', '*.aux', '*.toc']
+ mimetypes = ['text/x-tex', 'text/x-latex']
+
+ tokens = {
+ 'general': [
+ (r'%.*?\n', Comment),
+ (r'[{}]', Name.Builtin),
+ (r'[&_^]', Name.Builtin),
+ ],
+ 'root': [
+ (r'\\\[', String.Backtick, 'displaymath'),
+ (r'\\\(', String, 'inlinemath'),
+ (r'\$\$', String.Backtick, 'displaymath'),
+ (r'\$', String, 'inlinemath'),
+ (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ (r'\\$', Keyword),
+ include('general'),
+ (r'[^\\$%&_^{}]+', Text),
+ ],
+ 'math': [
+ (r'\\([a-zA-Z]+|.)', Name.Variable),
+ include('general'),
+ (r'[0-9]+', Number),
+ (r'[-=!+*/()\[\]]', Operator),
+ (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
+ ],
+ 'inlinemath': [
+ (r'\\\)', String, '#pop'),
+ (r'\$', String, '#pop'),
+ include('math'),
+ ],
+ 'displaymath': [
+ (r'\\\]', String, '#pop'),
+ (r'\$\$', String, '#pop'),
+ (r'\$', Name.Builtin),
+ include('math'),
+ ],
+ 'command': [
+ (r'\[.*?\]', Name.Attribute),
+ (r'\*', Keyword),
+ default('#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ for start in ("\\documentclass", "\\input", "\\documentstyle",
+ "\\relax"):
+ if text[:len(start)] == start:
+ return True
+
+
+class GroffLexer(RegexLexer):
+ """
+ Lexer for the (g)roff typesetting language, supporting groff
+ extensions. Mainly useful for highlighting manpage sources.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Groff'
+ aliases = ['groff', 'nroff', 'man']
+ filenames = ['*.[1234567]', '*.man']
+ mimetypes = ['application/x-troff', 'text/troff']
+
+ tokens = {
+ 'root': [
+ (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
+ (r'\.', Punctuation, 'request'),
+ # Regular characters, slurp till we find a backslash or newline
+ (r'[^\\\n]+', Text, 'textline'),
+ default('textline'),
+ ],
+ 'textline': [
+ include('escapes'),
+ (r'[^\\\n]+', Text),
+ (r'\n', Text, '#pop'),
+ ],
+ 'escapes': [
+ # groff has many ways to write escapes.
+ (r'\\"[^\n]*', Comment),
+ (r'\\[fn]\w', String.Escape),
+ (r'\\\(.{2}', String.Escape),
+ (r'\\.\[.*\]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'\\\n', Text, 'request'),
+ ],
+ 'request': [
+ (r'\n', Text, '#pop'),
+ include('escapes'),
+ (r'"[^\n"]+"', String.Double),
+ (r'\d+', Number),
+ (r'\S+', String),
+ (r'\s+', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ if text[:1] != '.':
+ return False
+ if text[:3] == '.\\"':
+ return True
+ if text[:4] == '.TH ':
+ return True
+ if text[1:3].isalnum() and text[3].isspace():
+ return 0.9
+
+
+class MozPreprocHashLexer(RegexLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '#' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozhashpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^#', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ 'exprstart': [
+ (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
+ (words((
+ 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
+ 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
+ 'include', 'includesubst', 'error')),
+ Comment.Preproc, '#pop'),
+ ],
+ 'expr': [
+ (words(('!', '!=', '==', '&&', '||')), Operator),
+ (r'(defined)(\()', bygroups(Keyword, Punctuation)),
+ (r'\)', Punctuation),
+ (r'[0-9]+', Number.Decimal),
+ (r'__\w+?__', Name.Variable),
+ (r'@\w+?@', Name.Class),
+ (r'\w+', Name),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'\S', Punctuation),
+ ],
+ }
+
+
+class MozPreprocPercentLexer(MozPreprocHashLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '%' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozpercentpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^%', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ }
+
+
+class MozPreprocXulLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `XmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "XUL+mozpreproc"
+ aliases = ['xul+mozpreproc']
+ filenames = ['*.xul.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocXulLexer, self).__init__(
+ XmlLexer, MozPreprocHashLexer, **options)
+
+
+class MozPreprocJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "Javascript+mozpreproc"
+ aliases = ['javascript+mozpreproc']
+ filenames = ['*.js.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocJavascriptLexer, self).__init__(
+ JavascriptLexer, MozPreprocHashLexer, **options)
+
+
+class MozPreprocCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `CssLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "CSS+mozpreproc"
+ aliases = ['css+mozpreproc']
+ filenames = ['*.css.in']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(MozPreprocCssLexer, self).__init__(
+ CssLexer, MozPreprocPercentLexer, **options)
+
class MarkdownLexer(RegexLexer):
"""
diff --git a/contrib/python/Pygments/py2/pygments/lexers/math.py b/contrib/python/Pygments/py2/pygments/lexers/math.py
index 731150823e..07c1c863ce 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/math.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/math.py
@@ -1,21 +1,21 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.math
- ~~~~~~~~~~~~~~~~~~~~
-
- Just export lexers that were contained in this module.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.math
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexers that were contained in this module.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.python import NumPyLexer
-from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
- OctaveLexer, ScilabLexer
-from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
-from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
-from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
-from pygments.lexers.idl import IDLLexer
-from pygments.lexers.algebra import MuPADLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.python import NumPyLexer
+from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
+ OctaveLexer, ScilabLexer
+from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
+from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
+from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
+from pygments.lexers.idl import IDLLexer
+from pygments.lexers.algebra import MuPADLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/matlab.py b/contrib/python/Pygments/py2/pygments/lexers/matlab.py
index 64dd3a3c2d..82382ca180 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/matlab.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/matlab.py
@@ -1,89 +1,89 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.matlab
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Matlab and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.matlab
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Matlab and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-from pygments.lexers import _scilab_builtins
-
-__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
-
-
-class MatlabLexer(RegexLexer):
- """
- For Matlab source code.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab'
- aliases = ['matlab']
- filenames = ['*.m']
- mimetypes = ['text/matlab']
-
- #
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # for f in elfun specfun elmat; do
- # echo -n "$f = "
- # matlab -nojvm -r "help $f;exit;" | perl -ne \
- # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
- # done
- #
- # elfun: Elementary math functions
- # specfun: Special Math functions
- # elmat: Elementary matrices and matrix manipulation
- #
- # taken from Matlab version 7.4.0.336 (R2007a)
- #
- elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh",
- "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2",
- "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd",
- "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd",
- "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2",
- "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs",
- "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair",
- "fix", "floor", "ceil", "round", "mod", "rem", "sign")
- specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta",
- "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx",
- "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre",
- "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat",
- "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol",
- "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv")
- elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace",
- "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel",
- "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape",
- "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90",
- "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute",
- "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector",
- "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan",
- "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel",
- "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
- "wilkinson")
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+from pygments.lexers import _scilab_builtins
+
+__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
+
+
+class MatlabLexer(RegexLexer):
+ """
+ For Matlab source code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab'
+ aliases = ['matlab']
+ filenames = ['*.m']
+ mimetypes = ['text/matlab']
+
+ #
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # for f in elfun specfun elmat; do
+ # echo -n "$f = "
+ # matlab -nojvm -r "help $f;exit;" | perl -ne \
+ # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
+ # done
+ #
+ # elfun: Elementary math functions
+ # specfun: Special Math functions
+ # elmat: Elementary matrices and matrix manipulation
+ #
+ # taken from Matlab version 7.4.0.336 (R2007a)
+ #
+ elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh",
+ "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2",
+ "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd",
+ "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd",
+ "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2",
+ "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs",
+ "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair",
+ "fix", "floor", "ceil", "round", "mod", "rem", "sign")
+ specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta",
+ "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx",
+ "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre",
+ "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat",
+ "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol",
+ "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv")
+ elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace",
+ "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel",
+ "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape",
+ "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90",
+ "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute",
+ "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector",
+ "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan",
+ "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel",
+ "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
+ "wilkinson")
+
_operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\'
- tokens = {
- 'root': [
- # line starting with '!' is sent as a system command. not sure what
- # label to use...
- (r'^!.*', String.Other),
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
+ tokens = {
+ 'root': [
+ # line starting with '!' is sent as a system command. not sure what
+ # label to use...
+ (r'^!.*', String.Other),
+ (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+ (r'%.*$', Comment),
(r'^\s*function\b', Keyword, 'deffunc'),
-
- # from 'iskeyword' on version 7.11 (R2010):
+
+ # from 'iskeyword' on version 7.11 (R2010):
# Check that there is no preceding dot, as keywords are valid field
# names.
(words(('break', 'case', 'catch', 'classdef', 'continue', 'else',
@@ -92,13 +92,13 @@ class MatlabLexer(RegexLexer):
'persistent', 'properties', 'return', 'spmd', 'switch',
'try', 'while'),
prefix=r'(?<!\.)', suffix=r'\b'),
- Keyword),
-
- ("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
-
- # line continuation with following comment:
- (r'\.\.\..*$', Comment),
-
+ Keyword),
+
+ ("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
+
+ # line continuation with following comment:
+ (r'\.\.\..*$', Comment),
+
# command form:
# "How MATLAB Recognizes Command Syntax" specifies that an operator
# is recognized if it is either surrounded by spaces or by no
@@ -107,42 +107,42 @@ class MatlabLexer(RegexLexer):
(r'(?:^|(?<=;))\s*\w+\s+(?!=|\(|(%s)\s+)' % _operators, Name,
'commandargs'),
- # operators:
+ # operators:
(_operators, Operator),
-
+
# numbers (must come before punctuation to handle `.5`; cannot use
# `\b` due to e.g. `5. + .5`).
(r'(?<!\w)((\d+\.\d*)|(\d*\.\d+))([eEf][+-]?\d+)?(?!\w)', Number.Float),
(r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
(r'\b\d+\b', Number.Integer),
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
-
+ # punctuation:
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+
(r'"(""|[^"])*"', String),
-
- (r'(?<![\w)\].])\'', String, 'string'),
- (r'[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'blockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
- ],
+
+ (r'(?<![\w)\].])\'', String, 'string'),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'blockcomment': [
+ (r'^\s*%\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
'string': [
(r"[^']*'", String, '#pop'),
],
@@ -151,9 +151,9 @@ class MatlabLexer(RegexLexer):
("[^';\n]+", String),
(";?\n?", Punctuation, '#pop'),
]
- }
-
- def analyse_text(text):
+ }
+
+ def analyse_text(text):
# function declaration.
first_non_comment = next((line for line in text.splitlines()
if not re.match(r'^\s*%', text)), '').strip()
@@ -162,530 +162,530 @@ class MatlabLexer(RegexLexer):
return 1.
# comment
elif re.match(r'^\s*%', text, re.M):
- return 0.2
+ return 0.2
# system cmd
elif re.match(r'^!\w+', text, re.M):
- return 0.2
-
-
-line_re = re.compile('.*?\n')
-
-
-class MatlabSessionLexer(Lexer):
- """
- For Matlab sessions. Modeled after PythonConsoleLexer.
- Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab session'
- aliases = ['matlabsession']
-
- def get_tokens_unprocessed(self, text):
- mlexer = MatlabLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('>> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith('>>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:2])]))
- curcode += line[2:]
-
- elif line.startswith('???'):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- # line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
- else:
- if curcode:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- if curcode: # or item:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class OctaveLexer(RegexLexer):
- """
- For GNU Octave source code.
-
- .. versionadded:: 1.5
- """
- name = 'Octave'
- aliases = ['octave']
- filenames = ['*.m']
- mimetypes = ['text/octave']
-
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # First dump all of the Octave manual into a plain text file:
- #
- # $ info octave --subnodes -o octave-manual
- #
- # Now grep through it:
-
- # for i in \
- # "Built-in Function" "Command" "Function File" \
- # "Loadable Function" "Mapping Function";
- # do
- # perl -e '@name = qw('"$i"');
- # print lc($name[0]),"_kw = [\n"';
- #
- # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
- # octave-manual | sort | uniq ;
- # echo "]" ;
- # echo;
- # done
-
- # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
- builtin_kw = (
- "addlistener", "addpath", "addproperty", "all",
- "and", "any", "argnames", "argv", "assignin",
- "atexit", "autoload",
- "available_graphics_toolkits", "beep_on_error",
- "bitand", "bitmax", "bitor", "bitshift", "bitxor",
- "cat", "cell", "cellstr", "char", "class", "clc",
- "columns", "command_line_path",
- "completion_append_char", "completion_matches",
- "complex", "confirm_recursive_rmdir", "cputime",
- "crash_dumps_octave_core", "ctranspose", "cumprod",
- "cumsum", "debug_on_error", "debug_on_interrupt",
- "debug_on_warning", "default_save_options",
- "dellistener", "diag", "diff", "disp",
- "doc_cache_file", "do_string_escapes", "double",
- "drawnow", "e", "echo_executing_commands", "eps",
- "eq", "errno", "errno_list", "error", "eval",
- "evalin", "exec", "exist", "exit", "eye", "false",
- "fclear", "fclose", "fcntl", "fdisp", "feof",
- "ferror", "feval", "fflush", "fgetl", "fgets",
- "fieldnames", "file_in_loadpath", "file_in_path",
- "filemarker", "filesep", "find_dir_in_path",
- "fixed_point_format", "fnmatch", "fopen", "fork",
- "formula", "fprintf", "fputs", "fread", "freport",
- "frewind", "fscanf", "fseek", "fskipl", "ftell",
- "functions", "fwrite", "ge", "genpath", "get",
- "getegid", "getenv", "geteuid", "getgid",
- "getpgrp", "getpid", "getppid", "getuid", "glob",
- "gt", "gui_mode", "history_control",
- "history_file", "history_size",
- "history_timestamp_format_string", "home",
- "horzcat", "hypot", "ifelse",
- "ignore_function_time_stamp", "inferiorto",
- "info_file", "info_program", "inline", "input",
- "intmax", "intmin", "ipermute",
- "is_absolute_filename", "isargout", "isbool",
- "iscell", "iscellstr", "ischar", "iscomplex",
- "isempty", "isfield", "isfloat", "isglobal",
- "ishandle", "isieee", "isindex", "isinteger",
- "islogical", "ismatrix", "ismethod", "isnull",
- "isnumeric", "isobject", "isreal",
- "is_rooted_relative_filename", "issorted",
- "isstruct", "isvarname", "kbhit", "keyboard",
- "kill", "lasterr", "lasterror", "lastwarn",
- "ldivide", "le", "length", "link", "linspace",
- "logical", "lstat", "lt", "make_absolute_filename",
- "makeinfo_program", "max_recursion_depth", "merge",
- "methods", "mfilename", "minus", "mislocked",
- "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
- "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
- "munlock", "nargin", "nargout",
- "native_float_format", "ndims", "ne", "nfields",
- "nnz", "norm", "not", "numel", "nzmax",
- "octave_config_info", "octave_core_file_limit",
- "octave_core_file_name",
- "octave_core_file_options", "ones", "or",
- "output_max_field_width", "output_precision",
- "page_output_immediately", "page_screen_output",
- "path", "pathsep", "pause", "pclose", "permute",
- "pi", "pipe", "plus", "popen", "power",
- "print_empty_dimensions", "printf",
- "print_struct_array_contents", "prod",
- "program_invocation_name", "program_name",
- "putenv", "puts", "pwd", "quit", "rats", "rdivide",
- "readdir", "readlink", "read_readline_init_file",
- "realmax", "realmin", "rehash", "rename",
- "repelems", "re_read_readline_init_file", "reset",
- "reshape", "resize", "restoredefaultpath",
- "rethrow", "rmdir", "rmfield", "rmpath", "rows",
- "save_header_format_string", "save_precision",
- "saving_history", "scanf", "set", "setenv",
- "shell_cmd", "sighup_dumps_octave_core",
- "sigterm_dumps_octave_core", "silent_functions",
- "single", "size", "size_equal", "sizemax",
- "sizeof", "sleep", "source", "sparse_auto_mutate",
- "split_long_rows", "sprintf", "squeeze", "sscanf",
- "stat", "stderr", "stdin", "stdout", "strcmp",
- "strcmpi", "string_fill_char", "strncmp",
- "strncmpi", "struct", "struct_levels_to_print",
- "strvcat", "subsasgn", "subsref", "sum", "sumsq",
- "superiorto", "suppress_verbose_help_message",
- "symlink", "system", "tic", "tilde_expand",
- "times", "tmpfile", "tmpnam", "toc", "toupper",
- "transpose", "true", "typeinfo", "umask", "uminus",
- "uname", "undo_string_escapes", "unlink", "uplus",
- "upper", "usage", "usleep", "vec", "vectorize",
- "vertcat", "waitpid", "warning", "warranty",
- "whos_line_format", "yes_or_no", "zeros",
- "inf", "Inf", "nan", "NaN")
-
- command_kw = ("close", "load", "who", "whos")
-
- function_kw = (
- "accumarray", "accumdim", "acosd", "acotd",
- "acscd", "addtodate", "allchild", "ancestor",
- "anova", "arch_fit", "arch_rnd", "arch_test",
- "area", "arma_rnd", "arrayfun", "ascii", "asctime",
- "asecd", "asind", "assert", "atand",
- "autoreg_matrix", "autumn", "axes", "axis", "bar",
- "barh", "bartlett", "bartlett_test", "beep",
- "betacdf", "betainv", "betapdf", "betarnd",
- "bicgstab", "bicubic", "binary", "binocdf",
- "binoinv", "binopdf", "binornd", "bitcmp",
- "bitget", "bitset", "blackman", "blanks",
- "blkdiag", "bone", "box", "brighten", "calendar",
- "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
- "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
- "chisquare_test_homogeneity",
- "chisquare_test_independence", "circshift", "cla",
- "clabel", "clf", "clock", "cloglog", "closereq",
- "colon", "colorbar", "colormap", "colperm",
- "comet", "common_size", "commutation_matrix",
- "compan", "compare_versions", "compass",
- "computer", "cond", "condest", "contour",
- "contourc", "contourf", "contrast", "conv",
- "convhull", "cool", "copper", "copyfile", "cor",
- "corrcoef", "cor_test", "cosd", "cotd", "cov",
- "cplxpair", "cross", "cscd", "cstrcat", "csvread",
- "csvwrite", "ctime", "cumtrapz", "curl", "cut",
- "cylinder", "date", "datenum", "datestr",
- "datetick", "datevec", "dblquad", "deal",
- "deblank", "deconv", "delaunay", "delaunayn",
- "delete", "demo", "detrend", "diffpara", "diffuse",
- "dir", "discrete_cdf", "discrete_inv",
- "discrete_pdf", "discrete_rnd", "display",
- "divergence", "dlmwrite", "dos", "dsearch",
- "dsearchn", "duplication_matrix", "durbinlevinson",
- "ellipsoid", "empirical_cdf", "empirical_inv",
- "empirical_pdf", "empirical_rnd", "eomday",
- "errorbar", "etime", "etreeplot", "example",
- "expcdf", "expinv", "expm", "exppdf", "exprnd",
- "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
- "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
- "factorial", "fail", "fcdf", "feather", "fftconv",
- "fftfilt", "fftshift", "figure", "fileattrib",
- "fileparts", "fill", "findall", "findobj",
- "findstr", "finv", "flag", "flipdim", "fliplr",
- "flipud", "fpdf", "fplot", "fractdiff", "freqz",
- "freqz_plot", "frnd", "fsolve",
- "f_test_regression", "ftp", "fullfile", "fzero",
- "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
- "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
- "geoinv", "geopdf", "geornd", "getfield", "ginput",
- "glpk", "gls", "gplot", "gradient",
- "graphics_toolkit", "gray", "grid", "griddata",
- "griddatan", "gtext", "gunzip", "gzip", "hadamard",
- "hamming", "hankel", "hanning", "hggroup",
- "hidden", "hilb", "hist", "histc", "hold", "hot",
- "hotelling_test", "housh", "hsv", "hurst",
- "hygecdf", "hygeinv", "hygepdf", "hygernd",
- "idivide", "ifftshift", "image", "imagesc",
- "imfinfo", "imread", "imshow", "imwrite", "index",
- "info", "inpolygon", "inputname", "interpft",
- "interpn", "intersect", "invhilb", "iqr", "isa",
- "isdefinite", "isdir", "is_duplicate_entry",
- "isequal", "isequalwithequalnans", "isfigure",
- "ishermitian", "ishghandle", "is_leap_year",
- "isletter", "ismac", "ismember", "ispc", "isprime",
- "isprop", "isscalar", "issquare", "isstrprop",
- "issymmetric", "isunix", "is_valid_file_id",
- "isvector", "jet", "kendall",
- "kolmogorov_smirnov_cdf",
- "kolmogorov_smirnov_test", "kruskal_wallis_test",
- "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
- "laplace_pdf", "laplace_rnd", "legend", "legendre",
- "license", "line", "linkprop", "list_primes",
- "loadaudio", "loadobj", "logistic_cdf",
- "logistic_inv", "logistic_pdf", "logistic_rnd",
- "logit", "loglog", "loglogerr", "logm", "logncdf",
- "logninv", "lognpdf", "lognrnd", "logspace",
- "lookfor", "ls_command", "lsqnonneg", "magic",
- "mahalanobis", "manova", "matlabroot",
- "mcnemar_test", "mean", "meansq", "median", "menu",
- "mesh", "meshc", "meshgrid", "meshz", "mexext",
- "mget", "mkpp", "mode", "moment", "movefile",
- "mpoles", "mput", "namelengthmax", "nargchk",
- "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
- "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
- "nonzeros", "normcdf", "normest", "norminv",
- "normpdf", "normrnd", "now", "nthroot", "null",
- "ocean", "ols", "onenormest", "optimget",
- "optimset", "orderfields", "orient", "orth",
- "pack", "pareto", "parseparams", "pascal", "patch",
- "pathdef", "pcg", "pchip", "pcolor", "pcr",
- "peaks", "periodogram", "perl", "perms", "pie",
- "pink", "planerot", "playaudio", "plot",
- "plotmatrix", "plotyy", "poisscdf", "poissinv",
- "poisspdf", "poissrnd", "polar", "poly",
- "polyaffine", "polyarea", "polyderiv", "polyfit",
- "polygcd", "polyint", "polyout", "polyreduce",
- "polyval", "polyvalm", "postpad", "powerset",
- "ppder", "ppint", "ppjumps", "ppplot", "ppval",
- "pqpnonneg", "prepad", "primes", "print",
- "print_usage", "prism", "probit", "qp", "qqplot",
- "quadcc", "quadgk", "quadl", "quadv", "quiver",
- "qzhess", "rainbow", "randi", "range", "rank",
- "ranks", "rat", "reallog", "realpow", "realsqrt",
- "record", "rectangle_lw", "rectangle_sw",
- "rectint", "refresh", "refreshdata",
- "regexptranslate", "repmat", "residue", "ribbon",
- "rindex", "roots", "rose", "rosser", "rotdim",
- "rref", "run", "run_count", "rundemos", "run_test",
- "runtests", "saveas", "saveaudio", "saveobj",
- "savepath", "scatter", "secd", "semilogx",
- "semilogxerr", "semilogy", "semilogyerr",
- "setaudio", "setdiff", "setfield", "setxor",
- "shading", "shift", "shiftdim", "sign_test",
- "sinc", "sind", "sinetone", "sinewave", "skewness",
- "slice", "sombrero", "sortrows", "spaugment",
- "spconvert", "spdiags", "spearman", "spectral_adf",
- "spectral_xdf", "specular", "speed", "spencer",
- "speye", "spfun", "sphere", "spinmap", "spline",
- "spones", "sprand", "sprandn", "sprandsym",
- "spring", "spstats", "spy", "sqp", "stairs",
- "statistics", "std", "stdnormal_cdf",
- "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
- "stem", "stft", "strcat", "strchr", "strjust",
- "strmatch", "strread", "strsplit", "strtok",
- "strtrim", "strtrunc", "structfun", "studentize",
- "subplot", "subsindex", "subspace", "substr",
- "substruct", "summer", "surf", "surface", "surfc",
- "surfl", "surfnorm", "svds", "swapbytes",
- "sylvester_matrix", "symvar", "synthesis", "table",
- "tand", "tar", "tcdf", "tempdir", "tempname",
- "test", "text", "textread", "textscan", "tinv",
- "title", "toeplitz", "tpdf", "trace", "trapz",
- "treelayout", "treeplot", "triangle_lw",
- "triangle_sw", "tril", "trimesh", "triplequad",
- "triplot", "trisurf", "triu", "trnd", "tsearchn",
- "t_test", "t_test_regression", "type", "unidcdf",
- "unidinv", "unidpdf", "unidrnd", "unifcdf",
- "unifinv", "unifpdf", "unifrnd", "union", "unique",
- "unix", "unmkpp", "unpack", "untabify", "untar",
- "unwrap", "unzip", "u_test", "validatestring",
- "vander", "var", "var_test", "vech", "ver",
- "version", "view", "voronoi", "voronoin",
- "waitforbuttonpress", "wavread", "wavwrite",
- "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
- "welch_test", "what", "white", "whitebg",
- "wienrnd", "wilcoxon_test", "wilkinson", "winter",
- "xlabel", "xlim", "ylabel", "yulewalker", "zip",
- "zlabel", "z_test")
-
- loadable_kw = (
- "airy", "amd", "balance", "besselh", "besseli",
- "besselj", "besselk", "bessely", "bitpack",
- "bsxfun", "builtin", "ccolamd", "cellfun",
- "cellslices", "chol", "choldelete", "cholinsert",
- "cholinv", "cholshift", "cholupdate", "colamd",
- "colloc", "convhulln", "convn", "csymamd",
- "cummax", "cummin", "daspk", "daspk_options",
- "dasrt", "dasrt_options", "dassl", "dassl_options",
- "dbclear", "dbdown", "dbstack", "dbstatus",
- "dbstop", "dbtype", "dbup", "dbwhere", "det",
- "dlmread", "dmperm", "dot", "eig", "eigs",
- "endgrent", "endpwent", "etree", "fft", "fftn",
- "fftw", "filter", "find", "full", "gcd",
- "getgrent", "getgrgid", "getgrnam", "getpwent",
- "getpwnam", "getpwuid", "getrusage", "givens",
- "gmtime", "gnuplot_binary", "hess", "ifft",
- "ifftn", "inv", "isdebugmode", "issparse", "kron",
- "localtime", "lookup", "lsode", "lsode_options",
- "lu", "luinc", "luupdate", "matrix_type", "max",
- "min", "mktime", "pinv", "qr", "qrdelete",
- "qrinsert", "qrshift", "qrupdate", "quad",
- "quad_options", "qz", "rand", "rande", "randg",
- "randn", "randp", "randperm", "rcond", "regexp",
- "regexpi", "regexprep", "schur", "setgrent",
- "setpwent", "sort", "spalloc", "sparse", "spparms",
- "sprank", "sqrtm", "strfind", "strftime",
- "strptime", "strrep", "svd", "svd_driver", "syl",
- "symamd", "symbfact", "symrcm", "time", "tsearch",
- "typecast", "urlread", "urlwrite")
-
- mapping_kw = (
- "abs", "acos", "acosh", "acot", "acoth", "acsc",
- "acsch", "angle", "arg", "asec", "asech", "asin",
- "asinh", "atan", "atanh", "beta", "betainc",
- "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
- "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
- "erfcx", "erfinv", "exp", "finite", "fix", "floor",
- "fmod", "gamma", "gammainc", "gammaln", "imag",
- "isalnum", "isalpha", "isascii", "iscntrl",
- "isdigit", "isfinite", "isgraph", "isinf",
- "islower", "isna", "isnan", "isprint", "ispunct",
- "isspace", "isupper", "isxdigit", "lcm", "lgamma",
- "log", "lower", "mod", "real", "rem", "round",
- "roundb", "sec", "sech", "sign", "sin", "sinh",
- "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
-
- builtin_consts = (
- "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
- "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
- "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
- "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
- "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
- "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
- "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
- "WSTOPSIG", "WTERMSIG", "WUNTRACED")
-
- tokens = {
- 'root': [
- # We should look into multiline comments
- (r'[%#].*$', Comment),
+ return 0.2
+
+
+line_re = re.compile('.*?\n')
+
+
+class MatlabSessionLexer(Lexer):
+ """
+ For Matlab sessions. Modeled after PythonConsoleLexer.
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab session'
+ aliases = ['matlabsession']
+
+ def get_tokens_unprocessed(self, text):
+ mlexer = MatlabLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('>> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:3])]))
+ curcode += line[3:]
+
+ elif line.startswith('>>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:2])]))
+ curcode += line[2:]
+
+ elif line.startswith('???'):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ # line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
+
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
+ if curcode: # or item:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class OctaveLexer(RegexLexer):
+ """
+ For GNU Octave source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Octave'
+ aliases = ['octave']
+ filenames = ['*.m']
+ mimetypes = ['text/octave']
+
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # First dump all of the Octave manual into a plain text file:
+ #
+ # $ info octave --subnodes -o octave-manual
+ #
+ # Now grep through it:
+
+ # for i in \
+ # "Built-in Function" "Command" "Function File" \
+ # "Loadable Function" "Mapping Function";
+ # do
+ # perl -e '@name = qw('"$i"');
+ # print lc($name[0]),"_kw = [\n"';
+ #
+ # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
+ # octave-manual | sort | uniq ;
+ # echo "]" ;
+ # echo;
+ # done
+
+ # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
+
+ builtin_kw = (
+ "addlistener", "addpath", "addproperty", "all",
+ "and", "any", "argnames", "argv", "assignin",
+ "atexit", "autoload",
+ "available_graphics_toolkits", "beep_on_error",
+ "bitand", "bitmax", "bitor", "bitshift", "bitxor",
+ "cat", "cell", "cellstr", "char", "class", "clc",
+ "columns", "command_line_path",
+ "completion_append_char", "completion_matches",
+ "complex", "confirm_recursive_rmdir", "cputime",
+ "crash_dumps_octave_core", "ctranspose", "cumprod",
+ "cumsum", "debug_on_error", "debug_on_interrupt",
+ "debug_on_warning", "default_save_options",
+ "dellistener", "diag", "diff", "disp",
+ "doc_cache_file", "do_string_escapes", "double",
+ "drawnow", "e", "echo_executing_commands", "eps",
+ "eq", "errno", "errno_list", "error", "eval",
+ "evalin", "exec", "exist", "exit", "eye", "false",
+ "fclear", "fclose", "fcntl", "fdisp", "feof",
+ "ferror", "feval", "fflush", "fgetl", "fgets",
+ "fieldnames", "file_in_loadpath", "file_in_path",
+ "filemarker", "filesep", "find_dir_in_path",
+ "fixed_point_format", "fnmatch", "fopen", "fork",
+ "formula", "fprintf", "fputs", "fread", "freport",
+ "frewind", "fscanf", "fseek", "fskipl", "ftell",
+ "functions", "fwrite", "ge", "genpath", "get",
+ "getegid", "getenv", "geteuid", "getgid",
+ "getpgrp", "getpid", "getppid", "getuid", "glob",
+ "gt", "gui_mode", "history_control",
+ "history_file", "history_size",
+ "history_timestamp_format_string", "home",
+ "horzcat", "hypot", "ifelse",
+ "ignore_function_time_stamp", "inferiorto",
+ "info_file", "info_program", "inline", "input",
+ "intmax", "intmin", "ipermute",
+ "is_absolute_filename", "isargout", "isbool",
+ "iscell", "iscellstr", "ischar", "iscomplex",
+ "isempty", "isfield", "isfloat", "isglobal",
+ "ishandle", "isieee", "isindex", "isinteger",
+ "islogical", "ismatrix", "ismethod", "isnull",
+ "isnumeric", "isobject", "isreal",
+ "is_rooted_relative_filename", "issorted",
+ "isstruct", "isvarname", "kbhit", "keyboard",
+ "kill", "lasterr", "lasterror", "lastwarn",
+ "ldivide", "le", "length", "link", "linspace",
+ "logical", "lstat", "lt", "make_absolute_filename",
+ "makeinfo_program", "max_recursion_depth", "merge",
+ "methods", "mfilename", "minus", "mislocked",
+ "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
+ "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
+ "munlock", "nargin", "nargout",
+ "native_float_format", "ndims", "ne", "nfields",
+ "nnz", "norm", "not", "numel", "nzmax",
+ "octave_config_info", "octave_core_file_limit",
+ "octave_core_file_name",
+ "octave_core_file_options", "ones", "or",
+ "output_max_field_width", "output_precision",
+ "page_output_immediately", "page_screen_output",
+ "path", "pathsep", "pause", "pclose", "permute",
+ "pi", "pipe", "plus", "popen", "power",
+ "print_empty_dimensions", "printf",
+ "print_struct_array_contents", "prod",
+ "program_invocation_name", "program_name",
+ "putenv", "puts", "pwd", "quit", "rats", "rdivide",
+ "readdir", "readlink", "read_readline_init_file",
+ "realmax", "realmin", "rehash", "rename",
+ "repelems", "re_read_readline_init_file", "reset",
+ "reshape", "resize", "restoredefaultpath",
+ "rethrow", "rmdir", "rmfield", "rmpath", "rows",
+ "save_header_format_string", "save_precision",
+ "saving_history", "scanf", "set", "setenv",
+ "shell_cmd", "sighup_dumps_octave_core",
+ "sigterm_dumps_octave_core", "silent_functions",
+ "single", "size", "size_equal", "sizemax",
+ "sizeof", "sleep", "source", "sparse_auto_mutate",
+ "split_long_rows", "sprintf", "squeeze", "sscanf",
+ "stat", "stderr", "stdin", "stdout", "strcmp",
+ "strcmpi", "string_fill_char", "strncmp",
+ "strncmpi", "struct", "struct_levels_to_print",
+ "strvcat", "subsasgn", "subsref", "sum", "sumsq",
+ "superiorto", "suppress_verbose_help_message",
+ "symlink", "system", "tic", "tilde_expand",
+ "times", "tmpfile", "tmpnam", "toc", "toupper",
+ "transpose", "true", "typeinfo", "umask", "uminus",
+ "uname", "undo_string_escapes", "unlink", "uplus",
+ "upper", "usage", "usleep", "vec", "vectorize",
+ "vertcat", "waitpid", "warning", "warranty",
+ "whos_line_format", "yes_or_no", "zeros",
+ "inf", "Inf", "nan", "NaN")
+
+ command_kw = ("close", "load", "who", "whos")
+
+ function_kw = (
+ "accumarray", "accumdim", "acosd", "acotd",
+ "acscd", "addtodate", "allchild", "ancestor",
+ "anova", "arch_fit", "arch_rnd", "arch_test",
+ "area", "arma_rnd", "arrayfun", "ascii", "asctime",
+ "asecd", "asind", "assert", "atand",
+ "autoreg_matrix", "autumn", "axes", "axis", "bar",
+ "barh", "bartlett", "bartlett_test", "beep",
+ "betacdf", "betainv", "betapdf", "betarnd",
+ "bicgstab", "bicubic", "binary", "binocdf",
+ "binoinv", "binopdf", "binornd", "bitcmp",
+ "bitget", "bitset", "blackman", "blanks",
+ "blkdiag", "bone", "box", "brighten", "calendar",
+ "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
+ "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
+ "chisquare_test_homogeneity",
+ "chisquare_test_independence", "circshift", "cla",
+ "clabel", "clf", "clock", "cloglog", "closereq",
+ "colon", "colorbar", "colormap", "colperm",
+ "comet", "common_size", "commutation_matrix",
+ "compan", "compare_versions", "compass",
+ "computer", "cond", "condest", "contour",
+ "contourc", "contourf", "contrast", "conv",
+ "convhull", "cool", "copper", "copyfile", "cor",
+ "corrcoef", "cor_test", "cosd", "cotd", "cov",
+ "cplxpair", "cross", "cscd", "cstrcat", "csvread",
+ "csvwrite", "ctime", "cumtrapz", "curl", "cut",
+ "cylinder", "date", "datenum", "datestr",
+ "datetick", "datevec", "dblquad", "deal",
+ "deblank", "deconv", "delaunay", "delaunayn",
+ "delete", "demo", "detrend", "diffpara", "diffuse",
+ "dir", "discrete_cdf", "discrete_inv",
+ "discrete_pdf", "discrete_rnd", "display",
+ "divergence", "dlmwrite", "dos", "dsearch",
+ "dsearchn", "duplication_matrix", "durbinlevinson",
+ "ellipsoid", "empirical_cdf", "empirical_inv",
+ "empirical_pdf", "empirical_rnd", "eomday",
+ "errorbar", "etime", "etreeplot", "example",
+ "expcdf", "expinv", "expm", "exppdf", "exprnd",
+ "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
+ "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
+ "factorial", "fail", "fcdf", "feather", "fftconv",
+ "fftfilt", "fftshift", "figure", "fileattrib",
+ "fileparts", "fill", "findall", "findobj",
+ "findstr", "finv", "flag", "flipdim", "fliplr",
+ "flipud", "fpdf", "fplot", "fractdiff", "freqz",
+ "freqz_plot", "frnd", "fsolve",
+ "f_test_regression", "ftp", "fullfile", "fzero",
+ "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
+ "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
+ "geoinv", "geopdf", "geornd", "getfield", "ginput",
+ "glpk", "gls", "gplot", "gradient",
+ "graphics_toolkit", "gray", "grid", "griddata",
+ "griddatan", "gtext", "gunzip", "gzip", "hadamard",
+ "hamming", "hankel", "hanning", "hggroup",
+ "hidden", "hilb", "hist", "histc", "hold", "hot",
+ "hotelling_test", "housh", "hsv", "hurst",
+ "hygecdf", "hygeinv", "hygepdf", "hygernd",
+ "idivide", "ifftshift", "image", "imagesc",
+ "imfinfo", "imread", "imshow", "imwrite", "index",
+ "info", "inpolygon", "inputname", "interpft",
+ "interpn", "intersect", "invhilb", "iqr", "isa",
+ "isdefinite", "isdir", "is_duplicate_entry",
+ "isequal", "isequalwithequalnans", "isfigure",
+ "ishermitian", "ishghandle", "is_leap_year",
+ "isletter", "ismac", "ismember", "ispc", "isprime",
+ "isprop", "isscalar", "issquare", "isstrprop",
+ "issymmetric", "isunix", "is_valid_file_id",
+ "isvector", "jet", "kendall",
+ "kolmogorov_smirnov_cdf",
+ "kolmogorov_smirnov_test", "kruskal_wallis_test",
+ "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
+ "laplace_pdf", "laplace_rnd", "legend", "legendre",
+ "license", "line", "linkprop", "list_primes",
+ "loadaudio", "loadobj", "logistic_cdf",
+ "logistic_inv", "logistic_pdf", "logistic_rnd",
+ "logit", "loglog", "loglogerr", "logm", "logncdf",
+ "logninv", "lognpdf", "lognrnd", "logspace",
+ "lookfor", "ls_command", "lsqnonneg", "magic",
+ "mahalanobis", "manova", "matlabroot",
+ "mcnemar_test", "mean", "meansq", "median", "menu",
+ "mesh", "meshc", "meshgrid", "meshz", "mexext",
+ "mget", "mkpp", "mode", "moment", "movefile",
+ "mpoles", "mput", "namelengthmax", "nargchk",
+ "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
+ "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
+ "nonzeros", "normcdf", "normest", "norminv",
+ "normpdf", "normrnd", "now", "nthroot", "null",
+ "ocean", "ols", "onenormest", "optimget",
+ "optimset", "orderfields", "orient", "orth",
+ "pack", "pareto", "parseparams", "pascal", "patch",
+ "pathdef", "pcg", "pchip", "pcolor", "pcr",
+ "peaks", "periodogram", "perl", "perms", "pie",
+ "pink", "planerot", "playaudio", "plot",
+ "plotmatrix", "plotyy", "poisscdf", "poissinv",
+ "poisspdf", "poissrnd", "polar", "poly",
+ "polyaffine", "polyarea", "polyderiv", "polyfit",
+ "polygcd", "polyint", "polyout", "polyreduce",
+ "polyval", "polyvalm", "postpad", "powerset",
+ "ppder", "ppint", "ppjumps", "ppplot", "ppval",
+ "pqpnonneg", "prepad", "primes", "print",
+ "print_usage", "prism", "probit", "qp", "qqplot",
+ "quadcc", "quadgk", "quadl", "quadv", "quiver",
+ "qzhess", "rainbow", "randi", "range", "rank",
+ "ranks", "rat", "reallog", "realpow", "realsqrt",
+ "record", "rectangle_lw", "rectangle_sw",
+ "rectint", "refresh", "refreshdata",
+ "regexptranslate", "repmat", "residue", "ribbon",
+ "rindex", "roots", "rose", "rosser", "rotdim",
+ "rref", "run", "run_count", "rundemos", "run_test",
+ "runtests", "saveas", "saveaudio", "saveobj",
+ "savepath", "scatter", "secd", "semilogx",
+ "semilogxerr", "semilogy", "semilogyerr",
+ "setaudio", "setdiff", "setfield", "setxor",
+ "shading", "shift", "shiftdim", "sign_test",
+ "sinc", "sind", "sinetone", "sinewave", "skewness",
+ "slice", "sombrero", "sortrows", "spaugment",
+ "spconvert", "spdiags", "spearman", "spectral_adf",
+ "spectral_xdf", "specular", "speed", "spencer",
+ "speye", "spfun", "sphere", "spinmap", "spline",
+ "spones", "sprand", "sprandn", "sprandsym",
+ "spring", "spstats", "spy", "sqp", "stairs",
+ "statistics", "std", "stdnormal_cdf",
+ "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
+ "stem", "stft", "strcat", "strchr", "strjust",
+ "strmatch", "strread", "strsplit", "strtok",
+ "strtrim", "strtrunc", "structfun", "studentize",
+ "subplot", "subsindex", "subspace", "substr",
+ "substruct", "summer", "surf", "surface", "surfc",
+ "surfl", "surfnorm", "svds", "swapbytes",
+ "sylvester_matrix", "symvar", "synthesis", "table",
+ "tand", "tar", "tcdf", "tempdir", "tempname",
+ "test", "text", "textread", "textscan", "tinv",
+ "title", "toeplitz", "tpdf", "trace", "trapz",
+ "treelayout", "treeplot", "triangle_lw",
+ "triangle_sw", "tril", "trimesh", "triplequad",
+ "triplot", "trisurf", "triu", "trnd", "tsearchn",
+ "t_test", "t_test_regression", "type", "unidcdf",
+ "unidinv", "unidpdf", "unidrnd", "unifcdf",
+ "unifinv", "unifpdf", "unifrnd", "union", "unique",
+ "unix", "unmkpp", "unpack", "untabify", "untar",
+ "unwrap", "unzip", "u_test", "validatestring",
+ "vander", "var", "var_test", "vech", "ver",
+ "version", "view", "voronoi", "voronoin",
+ "waitforbuttonpress", "wavread", "wavwrite",
+ "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
+ "welch_test", "what", "white", "whitebg",
+ "wienrnd", "wilcoxon_test", "wilkinson", "winter",
+ "xlabel", "xlim", "ylabel", "yulewalker", "zip",
+ "zlabel", "z_test")
+
+ loadable_kw = (
+ "airy", "amd", "balance", "besselh", "besseli",
+ "besselj", "besselk", "bessely", "bitpack",
+ "bsxfun", "builtin", "ccolamd", "cellfun",
+ "cellslices", "chol", "choldelete", "cholinsert",
+ "cholinv", "cholshift", "cholupdate", "colamd",
+ "colloc", "convhulln", "convn", "csymamd",
+ "cummax", "cummin", "daspk", "daspk_options",
+ "dasrt", "dasrt_options", "dassl", "dassl_options",
+ "dbclear", "dbdown", "dbstack", "dbstatus",
+ "dbstop", "dbtype", "dbup", "dbwhere", "det",
+ "dlmread", "dmperm", "dot", "eig", "eigs",
+ "endgrent", "endpwent", "etree", "fft", "fftn",
+ "fftw", "filter", "find", "full", "gcd",
+ "getgrent", "getgrgid", "getgrnam", "getpwent",
+ "getpwnam", "getpwuid", "getrusage", "givens",
+ "gmtime", "gnuplot_binary", "hess", "ifft",
+ "ifftn", "inv", "isdebugmode", "issparse", "kron",
+ "localtime", "lookup", "lsode", "lsode_options",
+ "lu", "luinc", "luupdate", "matrix_type", "max",
+ "min", "mktime", "pinv", "qr", "qrdelete",
+ "qrinsert", "qrshift", "qrupdate", "quad",
+ "quad_options", "qz", "rand", "rande", "randg",
+ "randn", "randp", "randperm", "rcond", "regexp",
+ "regexpi", "regexprep", "schur", "setgrent",
+ "setpwent", "sort", "spalloc", "sparse", "spparms",
+ "sprank", "sqrtm", "strfind", "strftime",
+ "strptime", "strrep", "svd", "svd_driver", "syl",
+ "symamd", "symbfact", "symrcm", "time", "tsearch",
+ "typecast", "urlread", "urlwrite")
+
+ mapping_kw = (
+ "abs", "acos", "acosh", "acot", "acoth", "acsc",
+ "acsch", "angle", "arg", "asec", "asech", "asin",
+ "asinh", "atan", "atanh", "beta", "betainc",
+ "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
+ "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
+ "erfcx", "erfinv", "exp", "finite", "fix", "floor",
+ "fmod", "gamma", "gammainc", "gammaln", "imag",
+ "isalnum", "isalpha", "isascii", "iscntrl",
+ "isdigit", "isfinite", "isgraph", "isinf",
+ "islower", "isna", "isnan", "isprint", "ispunct",
+ "isspace", "isupper", "isxdigit", "lcm", "lgamma",
+ "log", "lower", "mod", "real", "rem", "round",
+ "roundb", "sec", "sech", "sign", "sin", "sinh",
+ "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
+
+ builtin_consts = (
+ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
+ "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
+ "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
+ "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
+ "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
+ "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
+ "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
+ "WSTOPSIG", "WTERMSIG", "WUNTRACED")
+
+ tokens = {
+ 'root': [
+ # We should look into multiline comments
+ (r'[%#].*$', Comment),
(r'^\s*function\b', Keyword, 'deffunc'),
-
- # from 'iskeyword' on hg changeset 8cc154f45e37
- (words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
- 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
- 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
- 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
- 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
- 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
- suffix=r'\b'), Name.Builtin),
-
- (words(builtin_consts, suffix=r'\b'), Name.Constant),
-
- # operators in Octave but not Matlab:
- (r'-=|!=|!|/=|--', Operator),
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators in Octave but not Matlab requiring escape for re:
- (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
- # punctuation:
- (r'[\[\](){}:@.,]', Punctuation),
- (r'=|:|;', Punctuation),
-
- (r'"[^"]*"', String),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
- ],
- }
-
-
-class ScilabLexer(RegexLexer):
- """
- For Scilab source code.
-
- .. versionadded:: 1.5
- """
- name = 'Scilab'
- aliases = ['scilab']
- filenames = ['*.sci', '*.sce', '*.tst']
- mimetypes = ['text/scilab']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
+
+ # from 'iskeyword' on hg changeset 8cc154f45e37
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
+ suffix=r'\b'), Name.Builtin),
+
+ (words(builtin_consts, suffix=r'\b'), Name.Constant),
+
+ # operators in Octave but not Matlab:
+ (r'-=|!=|!|/=|--', Operator),
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators in Octave but not Matlab requiring escape for re:
+ (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+
+ # punctuation:
+ (r'[\[\](){}:@.,]', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
+
+
+class ScilabLexer(RegexLexer):
+ """
+ For Scilab source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Scilab'
+ aliases = ['scilab']
+ filenames = ['*.sci', '*.sce', '*.tst']
+ mimetypes = ['text/scilab']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
(r'^\s*function\b', Keyword, 'deffunc'),
-
- (words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
- 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
- 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
- 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
- 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
- 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(_scilab_builtins.functions_kw +
- _scilab_builtins.commands_kw +
- _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
-
- (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'[\[\](){}@.,=:;]', Punctuation),
-
- (r'"[^"]*"', String),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- (r'[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- (r'.', String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
- ],
- }
+
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(_scilab_builtins.functions_kw +
+ _scilab_builtins.commands_kw +
+ _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
+
+ (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
+ (r'[\[\](){}@.,=:;]', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ (r'.', String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ml.py b/contrib/python/Pygments/py2/pygments/lexers/ml.py
index 461af88134..d551841cc4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ml.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ml.py
@@ -1,769 +1,769 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ml
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for ML family languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ml
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for ML family languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer']
-
-
-class SMLLexer(RegexLexer):
- """
- For the Standard ML language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Standard ML'
- aliases = ['sml']
- filenames = ['*.sml', '*.sig', '*.fun']
- mimetypes = ['text/x-standardml', 'application/x-standardml']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer']
+
+
+class SMLLexer(RegexLexer):
+ """
+ For the Standard ML language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Standard ML'
+ aliases = ['sml']
+ filenames = ['*.sml', '*.sig', '*.fun']
+ mimetypes = ['text/x-standardml', 'application/x-standardml']
+
alphanumid_reserved = {
- # Core
- 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
- 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
- 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
- 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
- # Modules
- 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
- 'struct', 'structure', 'where',
+ # Core
+ 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
+ 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
+ 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
+ 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
+ # Modules
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
+ 'struct', 'structure', 'where',
}
-
+
symbolicid_reserved = {
- # Core
+ # Core
':', r'\|', '=', '=>', '->', '#',
- # Modules
- ':>',
+ # Modules
+ ':>',
}
-
+
nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
-
- alphanumid_re = r"[a-zA-Z][\w']*"
- symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
- # A character constant is a sequence of the form #s, where s is a string
- # constant denoting a string of size one character. This setup just parses
- # the entire string as either a String.Double or a String.Char (depending
- # on the argument), even if the String.Char is an erronous
- # multiple-character string.
- def stringy(whatkind):
- return [
- (r'[^"\\]', whatkind),
- (r'\\[\\"abtnvfr]', String.Escape),
- # Control-character notation is used for codes < 32,
- # where \^@ == \000
- (r'\\\^[\x40-\x5e]', String.Escape),
- # Docs say 'decimal digits'
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\\s+\\', String.Interpol),
- (r'"', whatkind, '#pop'),
- ]
-
- # Callbacks for distinguishing tokens and reserved words
- def long_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- else:
- token = Name.Namespace
- yield match.start(1), token, match.group(1)
- yield match.start(2), Punctuation, match.group(2)
-
- def end_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- elif match.group(1) in self.symbolicid_reserved:
- token = Error
- else:
- token = Name
- yield match.start(1), token, match.group(1)
-
- def id_callback(self, match):
- str = match.group(1)
- if str in self.alphanumid_reserved:
- token = Keyword.Reserved
- elif str in self.symbolicid_reserved:
- token = Punctuation
- else:
- token = Name
- yield match.start(1), token, str
-
- tokens = {
- # Whitespace and comments are (almost) everywhere
- 'whitespace': [
- (r'\s+', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
-
- 'delimiters': [
- # This lexer treats these delimiters specially:
- # Delimiters define scopes, and the scope is how the meaning of
- # the `|' is resolved - is it a case/handle expression, or function
- # definition by cases? (This is not how the Definition works, but
- # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
- (r'\(|\[|\{', Punctuation, 'main'),
- (r'\)|\]|\}', Punctuation, '#pop'),
- (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
- (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
- (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
- ],
-
- 'core': [
- # Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
- Punctuation),
-
- # Special constants: strings, floats, numbers in decimal and hex
- (r'#"', String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'~?0x[0-9a-fA-F]+', Number.Hex),
- (r'0wx[0-9a-fA-F]+', Number.Hex),
- (r'0w\d+', Number.Integer),
- (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
- (r'~?\d+\.\d+', Number.Float),
- (r'~?\d+[eE]~?\d+', Number.Float),
- (r'~?\d+', Number.Integer),
-
- # Labels
- (r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
- # Some reserved words trigger a special, local lexer state change
- (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
- (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
- (r'\b(functor|include|open|signature|structure)\b(?!\')',
- Keyword.Reserved, 'sname'),
- (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
- # Regular identifiers, long and otherwise
- (r'\'[\w\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
- ],
- 'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
- (r'\s+', Error),
- (r'\S+', Error),
- ],
-
-
- # Main parser (prevents errors in files that have scoping errors)
- 'root': [
- default('main')
- ],
-
- # In this scope, I expect '|' to not be followed by a function name,
- # and I expect 'and' to be followed by a binding site
- 'main': [
- include('whitespace'),
-
- # Special behavior of val/and/fun
- (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
- (r'\b(fun)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main-fun', 'fname')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # In this scope, I expect '|' and 'and' to be followed by a function
- 'main-fun': [
- include('whitespace'),
-
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- # Special behavior of val/and/fun
- (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
- (r'\b(val)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main', 'vname')),
-
- # Special behavior of '|' and '|'-manipulating keywords
- (r'\|', Punctuation, 'fname'),
- (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Character and string parsers
- 'char': stringy(String.Char),
- 'string': stringy(String.Double),
-
- 'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
- ],
-
- # Dealing with what comes after module system keywords
- 'sname': [
- include('whitespace'),
- include('breakout'),
-
- (r'(%s)' % alphanumid_re, Name.Namespace),
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
- 'fname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
- # Ignore interesting function declarations like "fun (x + y) = ..."
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'val' (or 'and') keyword
- 'vname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
- # Ignore interesting patterns like 'val (x, y)'
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'type' (or 'and') keyword
- 'tname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # A type binding includes most identifiers
- 'typbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Dealing with what comes after the 'datatype' (or 'and') keyword
- 'dname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'(=)(\s*)(datatype)',
- bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
- ('#pop', 'datbind', 'datcon')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # common case - A | B | C of int
- 'datbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
- (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- (r'(\|)(\s*)(%s)' % alphanumid_re,
- bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
- bygroups(Punctuation, Text, Name.Class)),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Dealing with what comes after an exception
- 'ename': [
- include('whitespace'),
-
- (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- 'datcon': [
- include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Series of type variables
- 'tyvarseq': [
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- (r'\'[\w\']*', Name.Decorator),
- (alphanumid_re, Name),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (symbolicid_re, Name),
- ],
-
- 'comment': [
- (r'[^(*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[(*)]', Comment.Multiline),
- ],
- }
-
-
-class OcamlLexer(RegexLexer):
- """
- For the OCaml language.
-
- .. versionadded:: 0.7
- """
-
- name = 'OCaml'
- aliases = ['ocaml']
- filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
- mimetypes = ['text/x-ocaml']
-
- keywords = (
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- 'downto', 'else', 'end', 'exception', 'external', 'false',
- 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
- )
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class OpaLexer(RegexLexer):
- """
- Lexer for the Opa language (http://opalang.org).
-
- .. versionadded:: 1.5
- """
-
- name = 'Opa'
- aliases = ['opa']
- filenames = ['*.opa']
- mimetypes = ['text/x-opa']
-
- # most of these aren't strictly keywords
- # but if you color only real keywords, you might just
- # as well not color anything
- keywords = (
- 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
- 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
- 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
- 'type', 'val', 'with', 'xml_parser',
- )
-
- # matches both stuff and `stuff`
- ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
- op_re = r'[.=\-<>,@~%/+?*&^!]'
- punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
- # because they are also used for inserts
-
- tokens = {
- # copied from the caml lexer, should be adapted
- 'escape-sequence': [
- (r'\\[\\"\'ntr}]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
-
- # factorizing these rules, because they are inserted many times
- 'comments': [
- (r'/\*', Comment, 'nested-comment'),
- (r'//.*?$', Comment),
- ],
- 'comments-and-spaces': [
- include('comments'),
- (r'\s+', Text),
- ],
-
- 'root': [
- include('comments-and-spaces'),
- # keywords
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- # directives
- # we could parse the actual set of directives instead of anything
- # starting with @, but this is troublesome
- # because it needs to be adjusted all the time
- # and assuming we parse only sources that compile, it is useless
- (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
-
- # number literals
- (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+[eE][+\-]?\d+', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'\d+', Number.Integer),
- # color literals
- (r'#[\da-fA-F]{3,6}', Number.Integer),
-
- # string literals
- (r'"', String.Double, 'string'),
- # char literal, should be checked because this is the regexp from
- # the caml lexer
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
- String.Char),
-
- # this is meant to deal with embedded exprs in strings
- # every time we find a '}' we pop a state so that if we were
- # inside a string, we are back in the string state
- # as a consequence, we must also push a state every time we find a
- # '{' or else we will have errors when parsing {} for instance
- (r'\{', Operator, '#push'),
- (r'\}', Operator, '#pop'),
-
- # html literals
- # this is a much more strict that the actual parser,
- # since a<b would not be parsed as html
- # but then again, the parser is way too lax, and we can't hope
- # to have something as tolerant
- (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
- # db path
- # matching the '[_]' in '/a[_]' because it is a part
- # of the syntax of the db path definition
- # unfortunately, i don't know how to match the ']' in
- # /a[1], so this is somewhat inconsistent
- (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
- # putting the same color on <- as on db path, since
- # it can be used only to mean Db.write
- (r'<-(?!'+op_re+r')', Name.Variable),
-
- # 'modules'
- # although modules are not distinguished by their names as in caml
- # the standard library seems to follow the convention that modules
- # only area capitalized
- (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
- # operators
- # = has a special role because this is the only
- # way to syntactic distinguish binding constructions
- # unfortunately, this colors the equal in {x=2} too
- (r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
-
- # coercions
- (r':', Operator, 'type'),
- # type variables
- # we need this rule because we don't parse specially type
- # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
- ("'"+ident_re, Keyword.Type),
-
- # id literal, #something, or #{expr}
- (r'#'+ident_re, String.Single),
- (r'#(?=\{)', String.Single),
-
- # identifiers
- # this avoids to color '2' in 'a2' as an integer
- (ident_re, Text),
-
- # default, not sure if that is needed or not
- # (r'.', Text),
- ],
-
- # it is quite painful to have to parse types to know where they end
- # this is the general rule for a type
- # a type is either:
- # * -> ty
- # * type-with-slash
- # * type-with-slash -> ty
- # * type-with-slash (, type-with-slash)+ -> ty
- #
- # the code is pretty funky in here, but this code would roughly
- # translate in caml to:
- # let rec type stream =
- # match stream with
- # | [< "->"; stream >] -> type stream
- # | [< ""; stream >] ->
- # type_with_slash stream
- # type_lhs_1 stream;
- # and type_1 stream = ...
- 'type': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type),
- default(('#pop', 'type-lhs-1', 'type-with-slash')),
- ],
-
- # parses all the atomic or closed constructions in the syntax of type
- # expressions: record types, tuple types, type constructors, basic type
- # and type variables
- 'type-1': [
- include('comments-and-spaces'),
- (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
- (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (ident_re, Keyword.Type, '#pop'),
- ("'"+ident_re, Keyword.Type),
- # this case is not in the syntax but sometimes
- # we think we are parsing types when in fact we are parsing
- # some css, so we just pop the states until we get back into
- # the root state
- default('#pop'),
- ],
-
- # type-with-slash is either:
- # * type-1
- # * type-1 (/ type-1)+
- 'type-with-slash': [
- include('comments-and-spaces'),
- default(('#pop', 'slash-type-1', 'type-1')),
- ],
- 'slash-type-1': [
- include('comments-and-spaces'),
- ('/', Keyword.Type, ('#pop', 'type-1')),
- # same remark as above
- default('#pop'),
- ],
-
- # we go in this state after having parsed a type-with-slash
- # while trying to parse a type
- # and at this point we must determine if we are parsing an arrow
- # type (in which case we must continue parsing) or not (in which
- # case we stop)
- 'type-lhs-1': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
- default('#pop'),
- ],
- 'type-arrow': [
- include('comments-and-spaces'),
- # the look ahead here allows to parse f(x : int, y : float -> truc)
- # correctly
- (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- # same remark as above
- default('#pop'),
- ],
-
- # no need to do precise parsing for tuples and records
- # because they are closed constructions, so we can simply
- # find the closing delimiter
- # note that this function would be not work if the source
- # contained identifiers like `{)` (although it could be patched
- # to support it)
- 'type-tuple': [
- include('comments-and-spaces'),
- (r'[^()/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\(', Keyword.Type, '#push'),
- (r'\)', Keyword.Type, '#pop'),
- ],
- 'type-record': [
- include('comments-and-spaces'),
- (r'[^{}/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\{', Keyword.Type, '#push'),
- (r'\}', Keyword.Type, '#pop'),
- ],
-
- # 'type-tuple': [
- # include('comments-and-spaces'),
- # (r'\)', Keyword.Type, '#pop'),
- # default(('#pop', 'type-tuple-1', 'type-1')),
- # ],
- # 'type-tuple-1': [
- # include('comments-and-spaces'),
- # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
- # (r',', Keyword.Type, 'type-1'),
- # ],
- # 'type-record':[
- # include('comments-and-spaces'),
- # (r'\}', Keyword.Type, '#pop'),
- # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
- # ],
- # 'type-record-field-expr': [
- #
- # ],
-
- 'nested-comment': [
- (r'[^/*]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[/*]', Comment),
- ],
-
- # the copy pasting between string and single-string
- # is kinda sad. Is there a way to avoid that??
- 'string': [
- (r'[^\\"{]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
- 'single-string': [
- (r'[^\\\'{]+', String.Double),
- (r'\'', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
-
- # all the html stuff
- # can't really reuse some existing html parser
- # because we must be able to parse embedded expressions
-
- # we are in this state after someone parsed the '<' that
- # started the html literal
- 'html-open-tag': [
- (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- # we are in this state after someone parsed the '</' that
- # started the end of the closing tag
- 'html-end-tag': [
- # this is a star, because </> is allowed
- (r'[\w\-:]*>', String.Single, '#pop'),
- ],
-
- # we are in this state after having parsed '<ident(:ident)?'
- # we thus parse a possibly empty list of attributes
- 'html-attr': [
- (r'\s+', Text),
- (r'[\w\-:]+=', String.Single, 'html-attr-value'),
- (r'/>', String.Single, '#pop'),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- 'html-attr-value': [
- (r"'", String.Single, ('#pop', 'single-string')),
- (r'"', String.Single, ('#pop', 'string')),
- (r'#'+ident_re, String.Single, '#pop'),
- (r'#(?=\{)', String.Single, ('#pop', 'root')),
- (r'[^"\'{`=<>]+', String.Single, '#pop'),
- (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
- ],
-
- # we should probably deal with '\' escapes here
- 'html-content': [
- (r'<!--', Comment, 'html-comment'),
- (r'</', String.Single, ('#pop', 'html-end-tag')),
- (r'<', String.Single, 'html-open-tag'),
- (r'\{', Operator, 'root'),
- (r'[^<{]+', String.Single),
- ],
-
- 'html-comment': [
- (r'-->', Comment, '#pop'),
- (r'[^\-]+|-', Comment),
- ],
- }
+
+ alphanumid_re = r"[a-zA-Z][\w']*"
+ symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
+
+ # A character constant is a sequence of the form #s, where s is a string
+ # constant denoting a string of size one character. This setup just parses
+ # the entire string as either a String.Double or a String.Char (depending
+ # on the argument), even if the String.Char is an erronous
+ # multiple-character string.
+ def stringy(whatkind):
+ return [
+ (r'[^"\\]', whatkind),
+ (r'\\[\\"abtnvfr]', String.Escape),
+ # Control-character notation is used for codes < 32,
+ # where \^@ == \000
+ (r'\\\^[\x40-\x5e]', String.Escape),
+ # Docs say 'decimal digits'
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\\s+\\', String.Interpol),
+ (r'"', whatkind, '#pop'),
+ ]
+
+ # Callbacks for distinguishing tokens and reserved words
+ def long_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ else:
+ token = Name.Namespace
+ yield match.start(1), token, match.group(1)
+ yield match.start(2), Punctuation, match.group(2)
+
+ def end_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ elif match.group(1) in self.symbolicid_reserved:
+ token = Error
+ else:
+ token = Name
+ yield match.start(1), token, match.group(1)
+
+ def id_callback(self, match):
+ str = match.group(1)
+ if str in self.alphanumid_reserved:
+ token = Keyword.Reserved
+ elif str in self.symbolicid_reserved:
+ token = Punctuation
+ else:
+ token = Name
+ yield match.start(1), token, str
+
+ tokens = {
+ # Whitespace and comments are (almost) everywhere
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+
+ 'delimiters': [
+ # This lexer treats these delimiters specially:
+ # Delimiters define scopes, and the scope is how the meaning of
+ # the `|' is resolved - is it a case/handle expression, or function
+ # definition by cases? (This is not how the Definition works, but
+ # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
+ (r'\(|\[|\{', Punctuation, 'main'),
+ (r'\)|\]|\}', Punctuation, '#pop'),
+ (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
+ (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
+ (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
+ ],
+
+ 'core': [
+ # Punctuation that doesn't overlap symbolic identifiers
+ (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
+ Punctuation),
+
+ # Special constants: strings, floats, numbers in decimal and hex
+ (r'#"', String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ (r'~?0x[0-9a-fA-F]+', Number.Hex),
+ (r'0wx[0-9a-fA-F]+', Number.Hex),
+ (r'0w\d+', Number.Integer),
+ (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+\.\d+', Number.Float),
+ (r'~?\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+', Number.Integer),
+
+ # Labels
+ (r'#\s*[1-9][0-9]*', Name.Label),
+ (r'#\s*(%s)' % alphanumid_re, Name.Label),
+ (r'#\s+(%s)' % symbolicid_re, Name.Label),
+ # Some reserved words trigger a special, local lexer state change
+ (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
+ (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
+ (r'\b(functor|include|open|signature|structure)\b(?!\')',
+ Keyword.Reserved, 'sname'),
+ (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
+
+ # Regular identifiers, long and otherwise
+ (r'\'[\w\']*', Name.Decorator),
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
+ (r'(%s)' % alphanumid_re, id_callback),
+ (r'(%s)' % symbolicid_re, id_callback),
+ ],
+ 'dotted': [
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback),
+ (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
+ (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
+ (r'\s+', Error),
+ (r'\S+', Error),
+ ],
+
+
+ # Main parser (prevents errors in files that have scoping errors)
+ 'root': [
+ default('main')
+ ],
+
+ # In this scope, I expect '|' to not be followed by a function name,
+ # and I expect 'and' to be followed by a binding site
+ 'main': [
+ include('whitespace'),
+
+ # Special behavior of val/and/fun
+ (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
+ (r'\b(fun)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main-fun', 'fname')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # In this scope, I expect '|' and 'and' to be followed by a function
+ 'main-fun': [
+ include('whitespace'),
+
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ # Special behavior of val/and/fun
+ (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
+ (r'\b(val)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main', 'vname')),
+
+ # Special behavior of '|' and '|'-manipulating keywords
+ (r'\|', Punctuation, 'fname'),
+ (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Character and string parsers
+ 'char': stringy(String.Char),
+ 'string': stringy(String.Double),
+
+ 'breakout': [
+ (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
+ ],
+
+ # Dealing with what comes after module system keywords
+ 'sname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'(%s)' % alphanumid_re, Name.Namespace),
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
+ 'fname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
+
+ # Ignore interesting function declarations like "fun (x + y) = ..."
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'val' (or 'and') keyword
+ 'vname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
+
+ # Ignore interesting patterns like 'val (x, y)'
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'type' (or 'and') keyword
+ 'tname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # A type binding includes most identifiers
+ 'typbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Dealing with what comes after the 'datatype' (or 'and') keyword
+ 'dname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'(=)(\s*)(datatype)',
+ bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation,
+ ('#pop', 'datbind', 'datcon')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # common case - A | B | C of int
+ 'datbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
+ (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ (r'(\|)(\s*)(%s)' % alphanumid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+ (r'(\|)(\s+)(%s)' % symbolicid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Dealing with what comes after an exception
+ 'ename': [
+ include('whitespace'),
+
+ (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ 'datcon': [
+ include('whitespace'),
+ (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Series of type variables
+ 'tyvarseq': [
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (alphanumid_re, Name),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ (symbolicid_re, Name),
+ ],
+
+ 'comment': [
+ (r'[^(*)]', Comment.Multiline),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[(*)]', Comment.Multiline),
+ ],
+ }
+
+
+class OcamlLexer(RegexLexer):
+ """
+ For the OCaml language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'OCaml'
+ aliases = ['ocaml']
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
+ mimetypes = ['text/x-ocaml']
+
+ keywords = (
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
+ )
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'\(\*(?![)])', Comment, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class OpaLexer(RegexLexer):
+ """
+ Lexer for the Opa language (http://opalang.org).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Opa'
+ aliases = ['opa']
+ filenames = ['*.opa']
+ mimetypes = ['text/x-opa']
+
+ # most of these aren't strictly keywords
+ # but if you color only real keywords, you might just
+ # as well not color anything
+ keywords = (
+ 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
+ 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
+ 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
+ 'type', 'val', 'with', 'xml_parser',
+ )
+
+ # matches both stuff and `stuff`
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
+
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
+ # because they are also used for inserts
+
+ tokens = {
+ # copied from the caml lexer, should be adapted
+ 'escape-sequence': [
+ (r'\\[\\"\'ntr}]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+
+ # factorizing these rules, because they are inserted many times
+ 'comments': [
+ (r'/\*', Comment, 'nested-comment'),
+ (r'//.*?$', Comment),
+ ],
+ 'comments-and-spaces': [
+ include('comments'),
+ (r'\s+', Text),
+ ],
+
+ 'root': [
+ include('comments-and-spaces'),
+ # keywords
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ # directives
+ # we could parse the actual set of directives instead of anything
+ # starting with @, but this is troublesome
+ # because it needs to be adjusted all the time
+ # and assuming we parse only sources that compile, it is useless
+ (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
+
+ # number literals
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'\d+', Number.Integer),
+ # color literals
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
+
+ # string literals
+ (r'"', String.Double, 'string'),
+ # char literal, should be checked because this is the regexp from
+ # the caml lexer
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
+ String.Char),
+
+ # this is meant to deal with embedded exprs in strings
+ # every time we find a '}' we pop a state so that if we were
+ # inside a string, we are back in the string state
+ # as a consequence, we must also push a state every time we find a
+ # '{' or else we will have errors when parsing {} for instance
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+
+ # html literals
+ # this is a much more strict that the actual parser,
+ # since a<b would not be parsed as html
+ # but then again, the parser is way too lax, and we can't hope
+ # to have something as tolerant
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
+
+ # db path
+ # matching the '[_]' in '/a[_]' because it is a part
+ # of the syntax of the db path definition
+ # unfortunately, i don't know how to match the ']' in
+ # /a[1], so this is somewhat inconsistent
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
+ # putting the same color on <- as on db path, since
+ # it can be used only to mean Db.write
+ (r'<-(?!'+op_re+r')', Name.Variable),
+
+ # 'modules'
+ # although modules are not distinguished by their names as in caml
+ # the standard library seems to follow the convention that modules
+ # only area capitalized
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
+
+ # operators
+ # = has a special role because this is the only
+ # way to syntactic distinguish binding constructions
+ # unfortunately, this colors the equal in {x=2} too
+ (r'=(?!'+op_re+r')', Keyword),
+ (r'(%s)+' % op_re, Operator),
+ (r'(%s)+' % punc_re, Operator),
+
+ # coercions
+ (r':', Operator, 'type'),
+ # type variables
+ # we need this rule because we don't parse specially type
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
+ ("'"+ident_re, Keyword.Type),
+
+ # id literal, #something, or #{expr}
+ (r'#'+ident_re, String.Single),
+ (r'#(?=\{)', String.Single),
+
+ # identifiers
+ # this avoids to color '2' in 'a2' as an integer
+ (ident_re, Text),
+
+ # default, not sure if that is needed or not
+ # (r'.', Text),
+ ],
+
+ # it is quite painful to have to parse types to know where they end
+ # this is the general rule for a type
+ # a type is either:
+ # * -> ty
+ # * type-with-slash
+ # * type-with-slash -> ty
+ # * type-with-slash (, type-with-slash)+ -> ty
+ #
+ # the code is pretty funky in here, but this code would roughly
+ # translate in caml to:
+ # let rec type stream =
+ # match stream with
+ # | [< "->"; stream >] -> type stream
+ # | [< ""; stream >] ->
+ # type_with_slash stream
+ # type_lhs_1 stream;
+ # and type_1 stream = ...
+ 'type': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type),
+ default(('#pop', 'type-lhs-1', 'type-with-slash')),
+ ],
+
+ # parses all the atomic or closed constructions in the syntax of type
+ # expressions: record types, tuple types, type constructors, basic type
+ # and type variables
+ 'type-1': [
+ include('comments-and-spaces'),
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (ident_re, Keyword.Type, '#pop'),
+ ("'"+ident_re, Keyword.Type),
+ # this case is not in the syntax but sometimes
+ # we think we are parsing types when in fact we are parsing
+ # some css, so we just pop the states until we get back into
+ # the root state
+ default('#pop'),
+ ],
+
+ # type-with-slash is either:
+ # * type-1
+ # * type-1 (/ type-1)+
+ 'type-with-slash': [
+ include('comments-and-spaces'),
+ default(('#pop', 'slash-type-1', 'type-1')),
+ ],
+ 'slash-type-1': [
+ include('comments-and-spaces'),
+ ('/', Keyword.Type, ('#pop', 'type-1')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # we go in this state after having parsed a type-with-slash
+ # while trying to parse a type
+ # and at this point we must determine if we are parsing an arrow
+ # type (in which case we must continue parsing) or not (in which
+ # case we stop)
+ 'type-lhs-1': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
+ default('#pop'),
+ ],
+ 'type-arrow': [
+ include('comments-and-spaces'),
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
+ # correctly
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # no need to do precise parsing for tuples and records
+ # because they are closed constructions, so we can simply
+ # find the closing delimiter
+ # note that this function would be not work if the source
+ # contained identifiers like `{)` (although it could be patched
+ # to support it)
+ 'type-tuple': [
+ include('comments-and-spaces'),
+ (r'[^()/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\(', Keyword.Type, '#push'),
+ (r'\)', Keyword.Type, '#pop'),
+ ],
+ 'type-record': [
+ include('comments-and-spaces'),
+ (r'[^{}/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\{', Keyword.Type, '#push'),
+ (r'\}', Keyword.Type, '#pop'),
+ ],
+
+ # 'type-tuple': [
+ # include('comments-and-spaces'),
+ # (r'\)', Keyword.Type, '#pop'),
+ # default(('#pop', 'type-tuple-1', 'type-1')),
+ # ],
+ # 'type-tuple-1': [
+ # include('comments-and-spaces'),
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
+ # (r',', Keyword.Type, 'type-1'),
+ # ],
+ # 'type-record':[
+ # include('comments-and-spaces'),
+ # (r'\}', Keyword.Type, '#pop'),
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
+ # ],
+ # 'type-record-field-expr': [
+ #
+ # ],
+
+ 'nested-comment': [
+ (r'[^/*]+', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[/*]', Comment),
+ ],
+
+ # the copy pasting between string and single-string
+ # is kinda sad. Is there a way to avoid that??
+ 'string': [
+ (r'[^\\"{]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+ 'single-string': [
+ (r'[^\\\'{]+', String.Double),
+ (r'\'', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+
+ # all the html stuff
+ # can't really reuse some existing html parser
+ # because we must be able to parse embedded expressions
+
+ # we are in this state after someone parsed the '<' that
+ # started the html literal
+ 'html-open-tag': [
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ # we are in this state after someone parsed the '</' that
+ # started the end of the closing tag
+ 'html-end-tag': [
+ # this is a star, because </> is allowed
+ (r'[\w\-:]*>', String.Single, '#pop'),
+ ],
+
+ # we are in this state after having parsed '<ident(:ident)?'
+ # we thus parse a possibly empty list of attributes
+ 'html-attr': [
+ (r'\s+', Text),
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
+ (r'/>', String.Single, '#pop'),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ 'html-attr-value': [
+ (r"'", String.Single, ('#pop', 'single-string')),
+ (r'"', String.Single, ('#pop', 'string')),
+ (r'#'+ident_re, String.Single, '#pop'),
+ (r'#(?=\{)', String.Single, ('#pop', 'root')),
+ (r'[^"\'{`=<>]+', String.Single, '#pop'),
+ (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
+ ],
+
+ # we should probably deal with '\' escapes here
+ 'html-content': [
+ (r'<!--', Comment, 'html-comment'),
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
+ (r'<', String.Single, 'html-open-tag'),
+ (r'\{', Operator, 'root'),
+ (r'[^<{]+', String.Single),
+ ],
+
+ 'html-comment': [
+ (r'-->', Comment, '#pop'),
+ (r'[^\-]+|-', Comment),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/modeling.py b/contrib/python/Pygments/py2/pygments/lexers/modeling.py
index f4dca4a905..0ea8c450b7 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/modeling.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/modeling.py
@@ -1,366 +1,366 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.modeling
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for modeling languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.modeling
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for modeling languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers import _stan_builtins
-
-__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
-
-
-class ModelicaLexer(RegexLexer):
- """
- For `Modelica <http://www.modelica.org/>`_ source code.
-
- .. versionadded:: 1.1
- """
- name = 'Modelica'
- aliases = ['modelica']
- filenames = ['*.mo']
- mimetypes = ['text/x-modelica']
-
- flags = re.DOTALL | re.MULTILINE
-
- _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
-
- tokens = {
- 'whitespace': [
- (u'[\\s\ufeff]+', Text),
- (r'//[^\n]*\n?', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'root': [
- include('whitespace'),
- (r'"', String.Double, 'string'),
- (r'[()\[\]{},;]+', Punctuation),
- (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
- (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
- (r'\d+', Number.Integer),
- (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
- r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
- r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
- r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
- r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
- r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
- r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
- r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
- r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
- r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
- r'transpose|vector|zeros)\b', Name.Builtin),
- (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
- r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
+
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers import _stan_builtins
+
+__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
+
+
+class ModelicaLexer(RegexLexer):
+ """
+ For `Modelica <http://www.modelica.org/>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Modelica'
+ aliases = ['modelica']
+ filenames = ['*.mo']
+ mimetypes = ['text/x-modelica']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
+
+ tokens = {
+ 'whitespace': [
+ (u'[\\s\ufeff]+', Text),
+ (r'//[^\n]*\n?', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'"', String.Double, 'string'),
+ (r'[()\[\]{},;]+', Punctuation),
+ (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
+ (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
+ r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
+ r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
+ r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
+ r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
+ r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
+ r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
+ r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
+ r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
+ r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
+ r'transpose|vector|zeros)\b', Name.Builtin),
+ (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
+ r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
- r'output|parameter|partial|protected|public|pure|redeclare|'
- r'replaceable|return|stream|then|when|while)\b',
- Keyword.Reserved),
- (r'(and|not|or)\b', Operator.Word),
- (r'(block|class|connector|end|function|model|operator|package|'
- r'record|type)\b', Keyword.Reserved, 'class'),
- (r'(false|true)\b', Keyword.Constant),
- (r'within\b', Keyword.Reserved, 'package-prefix'),
- (_name, Name)
- ],
- 'class': [
- include('whitespace'),
- (r'(function|record)\b', Keyword.Reserved),
- (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'package-prefix': [
- include('whitespace'),
- (_name, Name.Namespace, '#pop'),
- default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\[\'"?\\abfnrtv]', String.Escape),
- (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
- using(HtmlLexer)),
- (r'<|\\?[^"\\<]+', String.Double)
- ]
- }
-
-
-class BugsLexer(RegexLexer):
- """
- Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
- models.
-
- .. versionadded:: 1.6
- """
-
- name = 'BUGS'
- aliases = ['bugs', 'winbugs', 'openbugs']
- filenames = ['*.bug']
-
- _FUNCTIONS = (
- # Scalar functions
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
- 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
- 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
- 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
- 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
- 'trunc',
- # Vector functions
- 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
- 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
- 'sd', 'sort', 'sum',
- # Special
- 'D', 'I', 'F', 'T', 'C')
- """ OpenBUGS built-in functions
-
- From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
- This also includes
-
- - T, C, I : Truncation and censoring.
- ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- - D : ODE
- - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
- """
-
- _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
- 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
- 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
- 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
- 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
- 'dmt', 'dwish')
- """ OpenBUGS built-in distributions
-
- Functions from
- http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
- """
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
- Name.Builtin),
- # Regular variable names
- (r'[A-Za-z][\w.]*', Name),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- # Punctuation
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators
- (r'\+|-|\*|/', Operator),
- # Block
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r"^\s*model\s*{", text, re.M):
- return 0.7
- else:
- return 0.0
-
-
-class JagsLexer(RegexLexer):
- """
- Pygments Lexer for JAGS.
-
- .. versionadded:: 1.6
- """
-
- name = 'JAGS'
- aliases = ['jags']
- filenames = ['*.jag', '*.bug']
-
- # JAGS
- _FUNCTIONS = (
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cos', 'cosh', 'cloglog',
- 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
- 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
- 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
- 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
- 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
- # Truncation/Censoring (should I include)
- 'T', 'I')
- # Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
- ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
- 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
- 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
- # Other distributions without density and probability
- _OTHER_DISTRIBUTIONS = (
- 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
- 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
- 'dnbinom', 'dweibull', 'ddirich')
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'names': [
- # Regular variable names
- (r'[a-zA-Z][\w.]*\b', Name),
- ],
- 'comments': [
- # do not use stateful comments
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model|data)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- (r'var(?![\w.])', Keyword.Declaration),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Builtins
- # Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
- Name.Builtin),
- # Names
- include('names'),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- (r'<-|~', Operator),
- # # JAGS includes many more than OpenBUGS
- (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*model\s*\{', text, re.M):
- if re.search(r'^\s*data\s*\{', text, re.M):
- return 0.9
- elif re.search(r'^\s*var', text, re.M):
- return 0.9
- else:
- return 0.3
- else:
- return 0
-
-
-class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
-
- The Stan modeling language is specified in the *Stan Modeling Language
+ r'output|parameter|partial|protected|public|pure|redeclare|'
+ r'replaceable|return|stream|then|when|while)\b',
+ Keyword.Reserved),
+ (r'(and|not|or)\b', Operator.Word),
+ (r'(block|class|connector|end|function|model|operator|package|'
+ r'record|type)\b', Keyword.Reserved, 'class'),
+ (r'(false|true)\b', Keyword.Constant),
+ (r'within\b', Keyword.Reserved, 'package-prefix'),
+ (_name, Name)
+ ],
+ 'class': [
+ include('whitespace'),
+ (r'(function|record)\b', Keyword.Reserved),
+ (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'package-prefix': [
+ include('whitespace'),
+ (_name, Name.Namespace, '#pop'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\[\'"?\\abfnrtv]', String.Escape),
+ (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
+ using(HtmlLexer)),
+ (r'<|\\?[^"\\<]+', String.Double)
+ ]
+ }
+
+
+class BugsLexer(RegexLexer):
+ """
+ Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
+ models.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'BUGS'
+ aliases = ['bugs', 'winbugs', 'openbugs']
+ filenames = ['*.bug']
+
+ _FUNCTIONS = (
+ # Scalar functions
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
+ 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
+ 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
+ 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
+ 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
+ 'trunc',
+ # Vector functions
+ 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
+ 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
+ 'sd', 'sort', 'sum',
+ # Special
+ 'D', 'I', 'F', 'T', 'C')
+ """ OpenBUGS built-in functions
+
+ From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
+
+ This also includes
+
+ - T, C, I : Truncation and censoring.
+ ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
+ - D : ODE
+ - F : Functional http://www.openbugs.info/Examples/Functionals.html
+
+ """
+
+ _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
+ 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
+ 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
+ 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
+ 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
+ 'dmt', 'dwish')
+ """ OpenBUGS built-in distributions
+
+ Functions from
+ http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
+ """
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Built-in Functions
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
+ Name.Builtin),
+ # Regular variable names
+ (r'[A-Za-z][\w.]*', Name),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ # Punctuation
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix and prefix operators
+ (r'\+|-|\*|/', Operator),
+ # Block
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r"^\s*model\s*{", text, re.M):
+ return 0.7
+ else:
+ return 0.0
+
+
+class JagsLexer(RegexLexer):
+ """
+ Pygments Lexer for JAGS.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JAGS'
+ aliases = ['jags']
+ filenames = ['*.jag', '*.bug']
+
+ # JAGS
+ _FUNCTIONS = (
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cos', 'cosh', 'cloglog',
+ 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
+ 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
+ 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
+ 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
+ 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
+ # Truncation/Censoring (should I include)
+ 'T', 'I')
+ # Distributions with density, probability and quartile functions
+ _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
+ ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
+ 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
+ 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
+ # Other distributions without density and probability
+ _OTHER_DISTRIBUTIONS = (
+ 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
+ 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
+ 'dnbinom', 'dweibull', 'ddirich')
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'names': [
+ # Regular variable names
+ (r'[a-zA-Z][\w.]*\b', Name),
+ ],
+ 'comments': [
+ # do not use stateful comments
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model|data)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ (r'var(?![\w.])', Keyword.Declaration),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Builtins
+ # Need to use lookahead because . is a valid char
+ (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ + _DISTRIBUTIONS
+ + _OTHER_DISTRIBUTIONS),
+ Name.Builtin),
+ # Names
+ include('names'),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ (r'<-|~', Operator),
+ # # JAGS includes many more than OpenBUGS
+ (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*model\s*\{', text, re.M):
+ if re.search(r'^\s*data\s*\{', text, re.M):
+ return 0.9
+ elif re.search(r'^\s*var', text, re.M):
+ return 0.9
+ else:
+ return 0.3
+ else:
+ return 0
+
+
+class StanLexer(RegexLexer):
+ """Pygments Lexer for Stan models.
+
+ The Stan modeling language is specified in the *Stan Modeling Language
User's Guide and Reference Manual, v2.17.0*,
`pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__.
-
- .. versionadded:: 1.6
- """
-
- name = 'Stan'
- aliases = ['stan']
- filenames = ['*.stan']
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'(//|#).*$', Comment.Single),
- ],
- 'root': [
- # Stan is more restrictive on strings than this regex
- (r'"[^"]*"', String),
- # Comments
- include('comments'),
- # block start
- include('whitespace'),
- # Block start
- (r'(%s)(\s*)(\{)' %
- r'|'.join(('functions', 'data', r'transformed\s+?data',
- 'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
- bygroups(Keyword.Namespace, Text, Punctuation)),
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Stan'
+ aliases = ['stan']
+ filenames = ['*.stan']
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'(//|#).*$', Comment.Single),
+ ],
+ 'root': [
+ # Stan is more restrictive on strings than this regex
+ (r'"[^"]*"', String),
+ # Comments
+ include('comments'),
+ # block start
+ include('whitespace'),
+ # Block start
+ (r'(%s)(\s*)(\{)' %
+ r'|'.join(('functions', 'data', r'transformed\s+?data',
+ 'parameters', r'transformed\s+parameters',
+ 'model', r'generated\s+quantities')),
+ bygroups(Keyword.Namespace, Text, Punctuation)),
# target keyword
(r'target\s*\+=', Keyword),
- # Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
- # Truncation
- (r'T(?=\s*\[)', Keyword),
- # Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ # Reserved Words
+ (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
+ # Truncation
+ (r'T(?=\s*\[)', Keyword),
+ # Data types
+ (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
# < should be punctuation, but elsewhere I can't tell if it is in
# a range constraint
(r'(<)(\s*)(upper|lower)(\s*)(=)',
bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
(r'(,)(\s*)(upper)(\s*)(=)',
bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
- # Punctuation
+ # Punctuation
(r"[;,\[\]()]", Punctuation),
- # Builtin
+ # Builtin
(r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
(r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
bygroups(Operator, Whitespace, Name.Builtin)),
- # Special names ending in __, like lp__
- (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
- # user-defined functions
- (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
- # Regular variable names
- (r'[A-Za-z]\w*\b', Name),
- # Real Literals
+ # Special names ending in __, like lp__
+ (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
+ (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
+ # user-defined functions
+ (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
+ # Regular variable names
+ (r'[A-Za-z]\w*\b', Name),
+ # Real Literals
(r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
(r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
- # Integer Literals
+ # Integer Literals
(r'[0-9]+', Number.Integer),
- # Assignment operators
+ # Assignment operators
(r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
- # Infix, prefix and postfix operators (and = )
+ # Infix, prefix and postfix operators (and = )
(r"\+|-|\.?\*|\.?/|\\|'|\^|!=?|<=?|>=?|\|\||&&|%|\?|:", Operator),
- # Block delimiters
- (r'[{}]', Punctuation),
+ # Block delimiters
+ (r'[{}]', Punctuation),
# Distribution |
(r'\|', Punctuation)
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*parameters\s*\{', text, re.M):
- return 1.0
- else:
- return 0.0
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*parameters\s*\{', text, re.M):
+ return 1.0
+ else:
+ return 0.0
diff --git a/contrib/python/Pygments/py2/pygments/lexers/modula2.py b/contrib/python/Pygments/py2/pygments/lexers/modula2.py
index 4fd84dab8f..4c0fde8aab 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/modula2.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/modula2.py
@@ -1,1561 +1,1561 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.modula2
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Multi-Dialect Lexer for Modula-2.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.modula2
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Multi-Dialect Lexer for Modula-2.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- String, Number, Punctuation, Error
-
-__all__ = ['Modula2Lexer']
-
-
-# Multi-Dialect Modula-2 Lexer
-class Modula2Lexer(RegexLexer):
- """
- For `Modula-2 <http://www.modula2.org/>`_ source code.
-
- The Modula-2 lexer supports several dialects. By default, it operates in
- fallback mode, recognising the *combined* literals, punctuation symbols
- and operators of all supported dialects, and the *combined* reserved words
- and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
- differentiating between library defined identifiers.
-
- To select a specific dialect, a dialect option may be passed
- or a dialect tag may be embedded into a source file.
-
- Dialect Options:
-
- `m2pim`
- Select PIM Modula-2 dialect.
- `m2iso`
- Select ISO Modula-2 dialect.
- `m2r10`
- Select Modula-2 R10 dialect.
- `objm2`
- Select Objective Modula-2 dialect.
-
- The PIM and ISO dialect options may be qualified with a language extension.
-
- Language Extensions:
-
- `+aglet`
- Select Aglet Modula-2 extensions, available with m2iso.
- `+gm2`
- Select GNU Modula-2 extensions, available with m2pim.
- `+p1`
- Select p1 Modula-2 extensions, available with m2iso.
- `+xds`
- Select XDS Modula-2 extensions, available with m2iso.
-
-
- Passing a Dialect Option via Unix Commandline Interface
-
- Dialect options may be passed to the lexer using the `dialect` key.
- Only one such option should be passed. If multiple dialect options are
- passed, the first valid option is used, any subsequent options are ignored.
-
- Examples:
-
- `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
- Use ISO dialect to render input to HTML output
- `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
- Use ISO dialect with p1 extensions to render input to RTF output
-
-
- Embedding a Dialect Option within a source file
-
- A dialect option may be embedded in a source file in form of a dialect
- tag, a specially formatted comment that specifies a dialect option.
-
- Dialect Tag EBNF::
-
- dialectTag :
- OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
-
- dialectOption :
- 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
- 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
-
- Prefix : '!' ;
-
- OpeningCommentDelim : '(*' ;
-
- ClosingCommentDelim : '*)' ;
-
- No whitespace is permitted between the tokens of a dialect tag.
-
- In the event that a source file contains multiple dialect tags, the first
- tag that contains a valid dialect option will be used and any subsequent
- dialect tags will be ignored. Ideally, a dialect tag should be placed
- at the beginning of a source file.
-
- An embedded dialect tag overrides a dialect option set via command line.
-
- Examples:
-
- ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
- Use Modula2 R10 dialect to render this source file.
- ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
- Use PIM dialect with GNU extensions to render this source file.
-
-
- Algol Publication Mode:
-
- In Algol publication mode, source text is rendered for publication of
- algorithms in scientific papers and academic texts, following the format
- of the Revised Algol-60 Language Report. It is activated by passing
- one of two corresponding styles as an option:
-
- `algol`
- render reserved words lowercase underline boldface
- and builtins lowercase boldface italic
- `algol_nu`
- render reserved words lowercase boldface (no underlining)
- and builtins lowercase boldface italic
-
- The lexer automatically performs the required lowercase conversion when
- this mode is activated.
-
- Example:
-
- ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
- Render input file in Algol publication mode to LaTeX output.
-
-
- Rendering Mode of First Class ADT Identifiers:
-
- The rendering of standard library first class ADT identifiers is controlled
- by option flag "treat_stdlib_adts_as_builtins".
-
- When this option is turned on, standard library ADT identifiers are rendered
- as builtins. When it is turned off, they are rendered as ordinary library
- identifiers.
-
- `treat_stdlib_adts_as_builtins` (default: On)
-
- The option is useful for dialects that support ADTs as first class objects
- and provide ADTs in the standard library that would otherwise be built-in.
-
- At present, only Modula-2 R10 supports library ADTs as first class objects
- and therefore, no ADT identifiers are defined for any other dialects.
-
- Example:
-
- ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
- Render standard library ADTs as ordinary library types.
-
- .. versionadded:: 1.3
-
- .. versionchanged:: 2.1
- Added multi-dialect support.
- """
- name = 'Modula-2'
- aliases = ['modula2', 'm2']
- filenames = ['*.def', '*.mod']
- mimetypes = ['text/x-modula2']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'dialecttags': [
- # PIM Dialect Tag
- (r'\(\*!m2pim\*\)', Comment.Special),
- # ISO Dialect Tag
- (r'\(\*!m2iso\*\)', Comment.Special),
- # M2R10 Dialect Tag
- (r'\(\*!m2r10\*\)', Comment.Special),
- # ObjM2 Dialect Tag
- (r'\(\*!objm2\*\)', Comment.Special),
- # Aglet Extensions Dialect Tag
- (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
- # GNU Extensions Dialect Tag
- (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
- # p1 Extensions Dialect Tag
- (r'\(\*!m2iso\+p1\*\)', Comment.Special),
- # XDS Extensions Dialect Tag
- (r'\(\*!m2iso\+xds\*\)', Comment.Special),
- ],
- 'identifiers': [
- (r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'prefixed_number_literals': [
- #
- # Base-2, whole number
- (r'0b[01]+(\'[01]+)*', Number.Bin),
- #
- # Base-16, whole number
- (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
- ],
- 'plain_number_literals': [
- #
- # Base-10, real number with exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*' # fractional part
- r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent
- Number.Float),
- #
- # Base-10, real number without exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*', # fractional part
- Number.Float),
- #
- # Base-10, whole number
- (r'[0-9]+(\'[0-9]+)*', Number.Integer),
- ],
- 'suffixed_number_literals': [
- #
- # Base-8, whole number
- (r'[0-7]+B', Number.Oct),
- #
- # Base-8, character code
- (r'[0-7]+C', Number.Oct),
- #
- # Base-16, number
- (r'[0-9A-F]+H', Number.Hex),
- ],
- 'string_literals': [
- (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
- (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
- ],
- 'digraph_operators': [
- # Dot Product Operator
- (r'\*\.', Operator),
- # Array Concatenation Operator
- (r'\+>', Operator), # M2R10 + ObjM2
- # Inequality Operator
- (r'<>', Operator), # ISO + PIM
- # Less-Or-Equal, Subset
- (r'<=', Operator),
- # Greater-Or-Equal, Superset
- (r'>=', Operator),
- # Identity Operator
- (r'==', Operator), # M2R10 + ObjM2
- # Type Conversion Operator
- (r'::', Operator), # M2R10 + ObjM2
- # Assignment Symbol
- (r':=', Operator),
- # Postfix Increment Mutator
- (r'\+\+', Operator), # M2R10 + ObjM2
- # Postfix Decrement Mutator
- (r'--', Operator), # M2R10 + ObjM2
- ],
- 'unigraph_operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # ISO 80000-2 compliant Set Difference Operator
- (r'\\', Operator), # M2R10 + ObjM2
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Dereferencing Operator Synonym
- (r'@', Operator), # ISO
- # Logical AND Operator Synonym
- (r'&', Operator), # PIM + ISO
- # Logical NOT Operator Synonym
- (r'~', Operator), # PIM + ISO
- # Smalltalk Message Prefix
- (r'`', Operator), # ObjM2
- ],
- 'digraph_punctuation': [
- # Range Constructor
- (r'\.\.', Punctuation),
- # Opening Chevron Bracket
- (r'<<', Punctuation), # M2R10 + ISO
- # Closing Chevron Bracket
- (r'>>', Punctuation), # M2R10 + ISO
- # Blueprint Punctuation
- (r'->', Punctuation), # M2R10 + ISO
- # Distinguish |# and # in M2 R10
- (r'\|#', Punctuation),
- # Distinguish ## and # in M2 R10
- (r'##', Punctuation),
- # Distinguish |* and * in M2 R10
- (r'\|\*', Punctuation),
- ],
- 'unigraph_punctuation': [
- # Common Punctuation
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ String, Number, Punctuation, Error
+
+__all__ = ['Modula2Lexer']
+
+
+# Multi-Dialect Modula-2 Lexer
+class Modula2Lexer(RegexLexer):
+ """
+ For `Modula-2 <http://www.modula2.org/>`_ source code.
+
+ The Modula-2 lexer supports several dialects. By default, it operates in
+ fallback mode, recognising the *combined* literals, punctuation symbols
+ and operators of all supported dialects, and the *combined* reserved words
+ and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
+ differentiating between library defined identifiers.
+
+ To select a specific dialect, a dialect option may be passed
+ or a dialect tag may be embedded into a source file.
+
+ Dialect Options:
+
+ `m2pim`
+ Select PIM Modula-2 dialect.
+ `m2iso`
+ Select ISO Modula-2 dialect.
+ `m2r10`
+ Select Modula-2 R10 dialect.
+ `objm2`
+ Select Objective Modula-2 dialect.
+
+ The PIM and ISO dialect options may be qualified with a language extension.
+
+ Language Extensions:
+
+ `+aglet`
+ Select Aglet Modula-2 extensions, available with m2iso.
+ `+gm2`
+ Select GNU Modula-2 extensions, available with m2pim.
+ `+p1`
+ Select p1 Modula-2 extensions, available with m2iso.
+ `+xds`
+ Select XDS Modula-2 extensions, available with m2iso.
+
+
+ Passing a Dialect Option via Unix Commandline Interface
+
+ Dialect options may be passed to the lexer using the `dialect` key.
+ Only one such option should be passed. If multiple dialect options are
+ passed, the first valid option is used, any subsequent options are ignored.
+
+ Examples:
+
+ `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
+ Use ISO dialect to render input to HTML output
+ `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
+ Use ISO dialect with p1 extensions to render input to RTF output
+
+
+ Embedding a Dialect Option within a source file
+
+ A dialect option may be embedded in a source file in form of a dialect
+ tag, a specially formatted comment that specifies a dialect option.
+
+ Dialect Tag EBNF::
+
+ dialectTag :
+ OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
+
+ dialectOption :
+ 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
+ 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
+
+ Prefix : '!' ;
+
+ OpeningCommentDelim : '(*' ;
+
+ ClosingCommentDelim : '*)' ;
+
+ No whitespace is permitted between the tokens of a dialect tag.
+
+ In the event that a source file contains multiple dialect tags, the first
+ tag that contains a valid dialect option will be used and any subsequent
+ dialect tags will be ignored. Ideally, a dialect tag should be placed
+ at the beginning of a source file.
+
+ An embedded dialect tag overrides a dialect option set via command line.
+
+ Examples:
+
+ ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
+ Use Modula2 R10 dialect to render this source file.
+ ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
+ Use PIM dialect with GNU extensions to render this source file.
+
+
+ Algol Publication Mode:
+
+ In Algol publication mode, source text is rendered for publication of
+ algorithms in scientific papers and academic texts, following the format
+ of the Revised Algol-60 Language Report. It is activated by passing
+ one of two corresponding styles as an option:
+
+ `algol`
+ render reserved words lowercase underline boldface
+ and builtins lowercase boldface italic
+ `algol_nu`
+ render reserved words lowercase boldface (no underlining)
+ and builtins lowercase boldface italic
+
+ The lexer automatically performs the required lowercase conversion when
+ this mode is activated.
+
+ Example:
+
+ ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
+ Render input file in Algol publication mode to LaTeX output.
+
+
+ Rendering Mode of First Class ADT Identifiers:
+
+ The rendering of standard library first class ADT identifiers is controlled
+ by option flag "treat_stdlib_adts_as_builtins".
+
+ When this option is turned on, standard library ADT identifiers are rendered
+ as builtins. When it is turned off, they are rendered as ordinary library
+ identifiers.
+
+ `treat_stdlib_adts_as_builtins` (default: On)
+
+ The option is useful for dialects that support ADTs as first class objects
+ and provide ADTs in the standard library that would otherwise be built-in.
+
+ At present, only Modula-2 R10 supports library ADTs as first class objects
+ and therefore, no ADT identifiers are defined for any other dialects.
+
+ Example:
+
+ ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
+ Render standard library ADTs as ordinary library types.
+
+ .. versionadded:: 1.3
+
+ .. versionchanged:: 2.1
+ Added multi-dialect support.
+ """
+ name = 'Modula-2'
+ aliases = ['modula2', 'm2']
+ filenames = ['*.def', '*.mod']
+ mimetypes = ['text/x-modula2']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'dialecttags': [
+ # PIM Dialect Tag
+ (r'\(\*!m2pim\*\)', Comment.Special),
+ # ISO Dialect Tag
+ (r'\(\*!m2iso\*\)', Comment.Special),
+ # M2R10 Dialect Tag
+ (r'\(\*!m2r10\*\)', Comment.Special),
+ # ObjM2 Dialect Tag
+ (r'\(\*!objm2\*\)', Comment.Special),
+ # Aglet Extensions Dialect Tag
+ (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
+ # GNU Extensions Dialect Tag
+ (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
+ # p1 Extensions Dialect Tag
+ (r'\(\*!m2iso\+p1\*\)', Comment.Special),
+ # XDS Extensions Dialect Tag
+ (r'\(\*!m2iso\+xds\*\)', Comment.Special),
+ ],
+ 'identifiers': [
+ (r'([a-zA-Z_$][\w$]*)', Name),
+ ],
+ 'prefixed_number_literals': [
+ #
+ # Base-2, whole number
+ (r'0b[01]+(\'[01]+)*', Number.Bin),
+ #
+ # Base-16, whole number
+ (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
+ ],
+ 'plain_number_literals': [
+ #
+ # Base-10, real number with exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part
+ r'\.[0-9]+(\'[0-9]+)*' # fractional part
+ r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent
+ Number.Float),
+ #
+ # Base-10, real number without exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part
+ r'\.[0-9]+(\'[0-9]+)*', # fractional part
+ Number.Float),
+ #
+ # Base-10, whole number
+ (r'[0-9]+(\'[0-9]+)*', Number.Integer),
+ ],
+ 'suffixed_number_literals': [
+ #
+ # Base-8, whole number
+ (r'[0-7]+B', Number.Oct),
+ #
+ # Base-8, character code
+ (r'[0-7]+C', Number.Oct),
+ #
+ # Base-16, number
+ (r'[0-9A-F]+H', Number.Hex),
+ ],
+ 'string_literals': [
+ (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
+ (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
+ ],
+ 'digraph_operators': [
+ # Dot Product Operator
+ (r'\*\.', Operator),
+ # Array Concatenation Operator
+ (r'\+>', Operator), # M2R10 + ObjM2
+ # Inequality Operator
+ (r'<>', Operator), # ISO + PIM
+ # Less-Or-Equal, Subset
+ (r'<=', Operator),
+ # Greater-Or-Equal, Superset
+ (r'>=', Operator),
+ # Identity Operator
+ (r'==', Operator), # M2R10 + ObjM2
+ # Type Conversion Operator
+ (r'::', Operator), # M2R10 + ObjM2
+ # Assignment Symbol
+ (r':=', Operator),
+ # Postfix Increment Mutator
+ (r'\+\+', Operator), # M2R10 + ObjM2
+ # Postfix Decrement Mutator
+ (r'--', Operator), # M2R10 + ObjM2
+ ],
+ 'unigraph_operators': [
+ # Arithmetic Operators
+ (r'[+-]', Operator),
+ (r'[*/]', Operator),
+ # ISO 80000-2 compliant Set Difference Operator
+ (r'\\', Operator), # M2R10 + ObjM2
+ # Relational Operators
+ (r'[=#<>]', Operator),
+ # Dereferencing Operator
+ (r'\^', Operator),
+ # Dereferencing Operator Synonym
+ (r'@', Operator), # ISO
+ # Logical AND Operator Synonym
+ (r'&', Operator), # PIM + ISO
+ # Logical NOT Operator Synonym
+ (r'~', Operator), # PIM + ISO
+ # Smalltalk Message Prefix
+ (r'`', Operator), # ObjM2
+ ],
+ 'digraph_punctuation': [
+ # Range Constructor
+ (r'\.\.', Punctuation),
+ # Opening Chevron Bracket
+ (r'<<', Punctuation), # M2R10 + ISO
+ # Closing Chevron Bracket
+ (r'>>', Punctuation), # M2R10 + ISO
+ # Blueprint Punctuation
+ (r'->', Punctuation), # M2R10 + ISO
+ # Distinguish |# and # in M2 R10
+ (r'\|#', Punctuation),
+ # Distinguish ## and # in M2 R10
+ (r'##', Punctuation),
+ # Distinguish |* and * in M2 R10
+ (r'\|\*', Punctuation),
+ ],
+ 'unigraph_punctuation': [
+ # Common Punctuation
(r'[()\[\]{},.:;|]', Punctuation),
- # Case Label Separator Synonym
- (r'!', Punctuation), # ISO
- # Blueprint Punctuation
- (r'\?', Punctuation), # M2R10 + ObjM2
- ],
- 'comments': [
- # Single Line Comment
- (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
- # Block Comment
- (r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # Template Block Comment
- (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
- ],
- 'pragmas': [
- # ISO Style Pragmas
- (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
- # Pascal Style Pragmas
- (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
- ],
- 'root': [
- include('whitespace'),
- include('dialecttags'),
- include('pragmas'),
- include('comments'),
- include('identifiers'),
- include('suffixed_number_literals'), # PIM + ISO
- include('prefixed_number_literals'), # M2R10 + ObjM2
- include('plain_number_literals'),
- include('string_literals'),
- include('digraph_punctuation'),
- include('digraph_operators'),
- include('unigraph_punctuation'),
- include('unigraph_operators'),
- ]
- }
-
-# C o m m o n D a t a s e t s
-
- # Common Reserved Words Dataset
- common_reserved_words = (
- # 37 common reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
- 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
- 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
- )
-
- # Common Builtins Dataset
- common_builtins = (
- # 16 common builtins
- 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
- 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
- 'TRUE',
- )
-
- # Common Pseudo-Module Builtins Dataset
- common_pseudo_builtins = (
- # 4 common pseudo builtins
- 'ADDRESS', 'BYTE', 'WORD', 'ADR'
- )
-
-# P I M M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for PIM Modula-2
- pim_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
- '+>', '->', '<<', '>>', '|#', '##',
- )
-
- # PIM Modula-2 Additional Reserved Words Dataset
- pim_additional_reserved_words = (
- # 3 additional reserved words
- 'EXPORT', 'QUALIFIED', 'WITH',
- )
-
- # PIM Modula-2 Additional Builtins Dataset
- pim_additional_builtins = (
- # 16 additional builtins
- 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
- 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
- )
-
- # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
- pim_additional_pseudo_builtins = (
- # 5 additional pseudo builtins
- 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
- )
-
-# I S O M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for ISO Modula-2
- iso_lexemes_to_reject = (
- '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
- '<<', '>>', '|#', '##',
- )
-
- # ISO Modula-2 Additional Reserved Words Dataset
- iso_additional_reserved_words = (
- # 9 additional reserved words (ISO 10514-1)
- 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
- 'REM', 'RETRY', 'WITH',
- # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
- 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
- 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
- )
-
- # ISO Modula-2 Additional Builtins Dataset
- iso_additional_builtins = (
- # 26 additional builtins (ISO 10514-1)
- 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
- 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
- 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
- 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
- # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
- 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
- )
-
- # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
- iso_additional_pseudo_builtins = (
- # 14 additional builtins (SYSTEM)
- 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
- 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
- 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
- # 13 additional builtins (COROUTINES)
- 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
- 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
- 'NEWCOROUTINE', 'PROT', 'TRANSFER',
- # 9 additional builtins (EXCEPTIONS)
- 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
- 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
- 'IsExceptionalExecution', 'RAISE',
- # 3 additional builtins (TERMINATION)
- 'TERMINATION', 'IsTerminating', 'HasHalted',
- # 4 additional builtins (M2EXCEPTION)
- 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
- 'indexException', 'rangeException', 'caseSelectException',
- 'invalidLocation', 'functionException', 'wholeValueException',
- 'wholeDivException', 'realValueException', 'realDivException',
- 'complexValueException', 'complexDivException', 'protException',
- 'sysException', 'coException', 'exException',
- )
-
-# M o d u l a - 2 R 1 0 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Modula-2 R10
- m2r10_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '&', '<>',
- )
-
- # Modula-2 R10 reserved words in addition to the common set
- m2r10_additional_reserved_words = (
- # 12 additional reserved words
- 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
- 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
- # 2 additional reserved words with symbolic assembly option
- 'ASM', 'REG',
- )
-
- # Modula-2 R10 builtins in addition to the common set
- m2r10_additional_builtins = (
- # 26 additional builtins
- 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
- 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
- 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
- 'UNICHAR', 'WRITE', 'WRITEF',
- )
-
- # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
- m2r10_additional_pseudo_builtins = (
- # 13 additional builtins (TPROPERTIES)
- 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
- 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
- 'TMAXEXP', 'TMINEXP',
- # 4 additional builtins (CONVERSION)
- 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
- # 35 additional builtins (UNSAFE)
- 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
- 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
- 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
- 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
- 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
- # 11 additional builtins (ATOMIC)
- 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
- 'BWNAND', 'BWOR', 'BWXOR',
- # 7 additional builtins (COMPILER)
- 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
- 'HASH',
- # 5 additional builtins (ASSEMBLER)
- 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
- )
-
-# O b j e c t i v e M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Objective Modula-2
- objm2_lexemes_to_reject = (
- '!', '$', '%', '&', '<>',
- )
-
- # Objective Modula-2 Extensions
- # reserved words in addition to Modula-2 R10
- objm2_additional_reserved_words = (
- # 16 additional reserved words
- 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
- 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
- 'SUPER', 'TRY',
- )
-
- # Objective Modula-2 Extensions
- # builtins in addition to Modula-2 R10
- objm2_additional_builtins = (
- # 3 additional builtins
- 'OBJECT', 'NO', 'YES',
- )
-
- # Objective Modula-2 Extensions
- # pseudo-module builtins in addition to Modula-2 R10
- objm2_additional_pseudo_builtins = (
- # None
- )
-
-# A g l e t M o d u l a - 2 D a t a s e t s
-
- # Aglet Extensions
- # reserved words in addition to ISO Modula-2
- aglet_additional_reserved_words = (
- # None
- )
-
- # Aglet Extensions
- # builtins in addition to ISO Modula-2
- aglet_additional_builtins = (
- # 9 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
- )
-
- # Aglet Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- aglet_additional_pseudo_builtins = (
- # None
- )
-
-# G N U M o d u l a - 2 D a t a s e t s
-
- # GNU Extensions
- # reserved words in addition to PIM Modula-2
- gm2_additional_reserved_words = (
- # 10 additional reserved words
- 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
- '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
- )
-
- # GNU Extensions
- # builtins in addition to PIM Modula-2
- gm2_additional_builtins = (
- # 21 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
- 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
- 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
- )
-
- # GNU Extensions
- # pseudo-module builtins in addition to PIM Modula-2
- gm2_additional_pseudo_builtins = (
- # None
- )
-
-# p 1 M o d u l a - 2 D a t a s e t s
-
- # p1 Extensions
- # reserved words in addition to ISO Modula-2
- p1_additional_reserved_words = (
- # None
- )
-
- # p1 Extensions
- # builtins in addition to ISO Modula-2
- p1_additional_builtins = (
- # None
- )
-
- # p1 Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- p1_additional_pseudo_builtins = (
- # 1 additional builtin
- 'BCD',
- )
-
-# X D S M o d u l a - 2 D a t a s e t s
-
- # XDS Extensions
- # reserved words in addition to ISO Modula-2
- xds_additional_reserved_words = (
- # 1 additional reserved word
- 'SEQ',
- )
-
- # XDS Extensions
- # builtins in addition to ISO Modula-2
- xds_additional_builtins = (
- # 9 additional builtins
- 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
- 'LONGCARD', 'SHORTCARD', 'SHORTINT',
- )
-
- # XDS Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- xds_additional_pseudo_builtins = (
- # 22 additional builtins (SYSTEM)
- 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
- 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
- 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
- # 3 additional builtins (COMPILER)
- 'COMPILER', 'OPTION', 'EQUATION'
- )
-
-# P I M S t a n d a r d L i b r a r y D a t a s e t s
-
- # PIM Modula-2 Standard Library Modules Dataset
- pim_stdlib_module_identifiers = (
- 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
- )
-
- # PIM Modula-2 Standard Library Types Dataset
- pim_stdlib_type_identifiers = (
- 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
- 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
- 'DirectoryCommand',
- )
-
- # PIM Modula-2 Standard Library Procedures Dataset
- pim_stdlib_proc_identifiers = (
- 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
- 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
- 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
- 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
- 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
- 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
- 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
- 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
- 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
- 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
- )
-
- # PIM Modula-2 Standard Library Variables Dataset
- pim_stdlib_var_identifiers = (
- 'Done', 'termCH', 'in', 'out'
- )
-
- # PIM Modula-2 Standard Library Constants Dataset
- pim_stdlib_const_identifiers = (
- 'EOL',
- )
-
-# I S O S t a n d a r d L i b r a r y D a t a s e t s
-
- # ISO Modula-2 Standard Library Modules Dataset
- iso_stdlib_module_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Types Dataset
- iso_stdlib_type_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Procedures Dataset
- iso_stdlib_proc_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Variables Dataset
- iso_stdlib_var_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Constants Dataset
- iso_stdlib_const_identifiers = (
- # TO DO
- )
-
-# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
-
- # Modula-2 R10 Standard Library ADTs Dataset
- m2r10_stdlib_adt_identifiers = (
- 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
- 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
- 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
- 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
- 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
- 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
- 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
- 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
- 'INT64', 'INT128', 'STRING', 'UNISTRING',
- )
-
- # Modula-2 R10 Standard Library Blueprints Dataset
- m2r10_stdlib_blueprint_identifiers = (
- 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
- 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
- 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
- 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
- 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
- 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
- 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
- )
-
- # Modula-2 R10 Standard Library Modules Dataset
- m2r10_stdlib_module_identifiers = (
- 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
- 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
- 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
- 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
- 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
- )
-
- # Modula-2 R10 Standard Library Types Dataset
- m2r10_stdlib_type_identifiers = (
- 'File', 'Status',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Procedures Dataset
- m2r10_stdlib_proc_identifiers = (
- 'ALLOCATE', 'DEALLOCATE', 'SIZE',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Variables Dataset
- m2r10_stdlib_var_identifiers = (
- 'stdIn', 'stdOut', 'stdErr',
- )
-
- # Modula-2 R10 Standard Library Constants Dataset
- m2r10_stdlib_const_identifiers = (
- 'pi', 'tau',
- )
-
-# D i a l e c t s
-
- # Dialect modes
- dialects = (
- 'unknown',
- 'm2pim', 'm2iso', 'm2r10', 'objm2',
- 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
- )
-
-# D a t a b a s e s
-
- # Lexemes to Mark as Errors Database
- lexemes_to_reject_db = {
- # Lexemes to reject for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Lexemes to reject for PIM Modula-2
- 'm2pim': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for ISO Modula-2
- 'm2iso': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for Modula-2 R10
- 'm2r10': (
- m2r10_lexemes_to_reject,
- ),
- # Lexemes to reject for Objective Modula-2
- 'objm2': (
- objm2_lexemes_to_reject,
- ),
- # Lexemes to reject for Aglet Modula-2
- 'm2iso+aglet': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for GNU Modula-2
- 'm2pim+gm2': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for p1 Modula-2
- 'm2iso+p1': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for XDS Modula-2
- 'm2iso+xds': (
- iso_lexemes_to_reject,
- ),
- }
-
- # Reserved Words Database
- reserved_words_db = {
- # Reserved words for unknown dialect
- 'unknown': (
- common_reserved_words,
- pim_additional_reserved_words,
- iso_additional_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for PIM Modula-2
- 'm2pim': (
- common_reserved_words,
- pim_additional_reserved_words,
- ),
-
- # Reserved words for Modula-2 R10
- 'm2iso': (
- common_reserved_words,
- iso_additional_reserved_words,
- ),
-
- # Reserved words for ISO Modula-2
- 'm2r10': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for Objective Modula-2
- 'objm2': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- objm2_additional_reserved_words,
- ),
-
- # Reserved words for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_reserved_words,
- iso_additional_reserved_words,
- aglet_additional_reserved_words,
- ),
-
- # Reserved words for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_reserved_words,
- pim_additional_reserved_words,
- gm2_additional_reserved_words,
- ),
-
- # Reserved words for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_reserved_words,
- iso_additional_reserved_words,
- p1_additional_reserved_words,
- ),
-
- # Reserved words for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_reserved_words,
- iso_additional_reserved_words,
- xds_additional_reserved_words,
- ),
- }
-
- # Builtins Database
- builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_builtins,
- pim_additional_builtins,
- iso_additional_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_builtins,
- pim_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_builtins,
- iso_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_builtins,
- m2r10_additional_builtins,
- objm2_additional_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_builtins,
- iso_additional_builtins,
- aglet_additional_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_builtins,
- pim_additional_builtins,
- gm2_additional_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_builtins,
- iso_additional_builtins,
- p1_additional_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_builtins,
- iso_additional_builtins,
- xds_additional_builtins,
- ),
- }
-
- # Pseudo-Module Builtins Database
- pseudo_builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- iso_additional_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- objm2_additional_pseudo_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- aglet_additional_pseudo_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- gm2_additional_pseudo_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- p1_additional_pseudo_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- xds_additional_pseudo_builtins,
- ),
- }
-
- # Standard Library ADTs Database
- stdlib_adts_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library ADTs for PIM Modula-2
- 'm2pim': (
- # No first class library types
- ),
-
- # Standard Library ADTs for ISO Modula-2
- 'm2iso': (
- # No first class library types
- ),
-
- # Standard Library ADTs for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Aglet Modula-2
- 'm2iso+aglet': (
- # No first class library types
- ),
-
- # Standard Library ADTs for GNU Modula-2
- 'm2pim+gm2': (
- # No first class library types
- ),
-
- # Standard Library ADTs for p1 Modula-2
- 'm2iso+p1': (
- # No first class library types
- ),
-
- # Standard Library ADTs for XDS Modula-2
- 'm2iso+xds': (
- # No first class library types
- ),
- }
-
- # Standard Library Modules Database
- stdlib_modules_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Modules for PIM Modula-2
- 'm2pim': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for ISO Modula-2
- 'm2iso': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library Modules for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_module_identifiers,
- ),
- }
-
- # Standard Library Types Database
- stdlib_types_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Types for PIM Modula-2
- 'm2pim': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for ISO Modula-2
- 'm2iso': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_type_identifiers,
- ),
- }
-
- # Standard Library Procedures Database
- stdlib_procedures_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Procedures for PIM Modula-2
- 'm2pim': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for ISO Modula-2
- 'm2iso': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_proc_identifiers,
- ),
- }
-
- # Standard Library Variables Database
- stdlib_variables_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Variables for PIM Modula-2
- 'm2pim': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for ISO Modula-2
- 'm2iso': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_var_identifiers,
- ),
- }
-
- # Standard Library Constants Database
- stdlib_constants_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Constants for PIM Modula-2
- 'm2pim': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for ISO Modula-2
- 'm2iso': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_const_identifiers,
- ),
- }
-
-# M e t h o d s
-
- # initialise a lexer instance
- def __init__(self, **options):
- #
- # check dialect options
- #
- dialects = get_list_opt(options, 'dialect', [])
- #
- for dialect_option in dialects:
- if dialect_option in self.dialects[1:-1]:
- # valid dialect option found
- self.set_dialect(dialect_option)
- break
- #
- # Fallback Mode (DEFAULT)
- else:
- # no valid dialect option
- self.set_dialect('unknown')
- #
- self.dialect_set_by_tag = False
- #
- # check style options
- #
- styles = get_list_opt(options, 'style', [])
- #
- # use lowercase mode for Algol style
- if 'algol' in styles or 'algol_nu' in styles:
- self.algol_publication_mode = True
- else:
- self.algol_publication_mode = False
- #
- # Check option flags
- #
- self.treat_stdlib_adts_as_builtins = get_bool_opt(
- options, 'treat_stdlib_adts_as_builtins', True)
- #
- # call superclass initialiser
- RegexLexer.__init__(self, **options)
-
- # Set lexer to a specified dialect
- def set_dialect(self, dialect_id):
- #
- # if __debug__:
- # print 'entered set_dialect with arg: ', dialect_id
- #
- # check dialect name against known dialects
- if dialect_id not in self.dialects:
- dialect = 'unknown' # default
- else:
- dialect = dialect_id
- #
- # compose lexemes to reject set
- lexemes_to_reject_set = set()
- # add each list of reject lexemes for this dialect
- for list in self.lexemes_to_reject_db[dialect]:
- lexemes_to_reject_set.update(set(list))
- #
- # compose reserved words set
- reswords_set = set()
- # add each list of reserved words for this dialect
- for list in self.reserved_words_db[dialect]:
- reswords_set.update(set(list))
- #
- # compose builtins set
- builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.builtins_db[dialect]:
- builtins_set.update(set(list).difference(reswords_set))
- #
- # compose pseudo-builtins set
- pseudo_builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.pseudo_builtins_db[dialect]:
- pseudo_builtins_set.update(set(list).difference(reswords_set))
- #
- # compose ADTs set
- adts_set = set()
- # add each list of ADTs for this dialect excluding reserved words
- for list in self.stdlib_adts_db[dialect]:
- adts_set.update(set(list).difference(reswords_set))
- #
- # compose modules set
- modules_set = set()
- # add each list of builtins for this dialect excluding builtins
- for list in self.stdlib_modules_db[dialect]:
- modules_set.update(set(list).difference(builtins_set))
- #
- # compose types set
- types_set = set()
- # add each list of types for this dialect excluding builtins
- for list in self.stdlib_types_db[dialect]:
- types_set.update(set(list).difference(builtins_set))
- #
- # compose procedures set
- procedures_set = set()
- # add each list of procedures for this dialect excluding builtins
- for list in self.stdlib_procedures_db[dialect]:
- procedures_set.update(set(list).difference(builtins_set))
- #
- # compose variables set
- variables_set = set()
- # add each list of variables for this dialect excluding builtins
- for list in self.stdlib_variables_db[dialect]:
- variables_set.update(set(list).difference(builtins_set))
- #
- # compose constants set
- constants_set = set()
- # add each list of constants for this dialect excluding builtins
- for list in self.stdlib_constants_db[dialect]:
- constants_set.update(set(list).difference(builtins_set))
- #
- # update lexer state
- self.dialect = dialect
- self.lexemes_to_reject = lexemes_to_reject_set
- self.reserved_words = reswords_set
- self.builtins = builtins_set
- self.pseudo_builtins = pseudo_builtins_set
- self.adts = adts_set
- self.modules = modules_set
- self.types = types_set
- self.procedures = procedures_set
- self.variables = variables_set
- self.constants = constants_set
- #
- # if __debug__:
- # print 'exiting set_dialect'
- # print ' self.dialect: ', self.dialect
- # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
- # print ' self.reserved_words: ', self.reserved_words
- # print ' self.builtins: ', self.builtins
- # print ' self.pseudo_builtins: ', self.pseudo_builtins
- # print ' self.adts: ', self.adts
- # print ' self.modules: ', self.modules
- # print ' self.types: ', self.types
- # print ' self.procedures: ', self.procedures
- # print ' self.variables: ', self.variables
- # print ' self.types: ', self.types
- # print ' self.constants: ', self.constants
-
- # Extracts a dialect name from a dialect tag comment string and checks
- # the extracted name against known dialects. If a match is found, the
- # matching name is returned, otherwise dialect id 'unknown' is returned
- def get_dialect_from_dialect_tag(self, dialect_tag):
- #
- # if __debug__:
- # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
- #
- # constants
- left_tag_delim = '(*!'
- right_tag_delim = '*)'
- left_tag_delim_len = len(left_tag_delim)
- right_tag_delim_len = len(right_tag_delim)
- indicator_start = left_tag_delim_len
- indicator_end = -(right_tag_delim_len)
- #
- # check comment string for dialect indicator
- if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
- and dialect_tag.startswith(left_tag_delim) \
- and dialect_tag.endswith(right_tag_delim):
- #
- # if __debug__:
- # print 'dialect tag found'
- #
- # extract dialect indicator
- indicator = dialect_tag[indicator_start:indicator_end]
- #
- # if __debug__:
- # print 'extracted: ', indicator
- #
- # check against known dialects
- for index in range(1, len(self.dialects)):
- #
- # if __debug__:
- # print 'dialects[', index, ']: ', self.dialects[index]
- #
- if indicator == self.dialects[index]:
- #
- # if __debug__:
- # print 'matching dialect found'
- #
- # indicator matches known dialect
- return indicator
- else:
- # indicator does not match any dialect
- return 'unknown' # default
- else:
- # invalid indicator string
- return 'unknown' # default
-
- # intercept the token stream, modify token attributes and return them
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- #
- # check for dialect tag if dialect has not been set by tag
- if not self.dialect_set_by_tag and token == Comment.Special:
- indicated_dialect = self.get_dialect_from_dialect_tag(value)
- if indicated_dialect != 'unknown':
- # token is a dialect indicator
- # reset reserved words and builtins
- self.set_dialect(indicated_dialect)
- self.dialect_set_by_tag = True
- #
- # check for reserved words, predefined and stdlib identifiers
- if token is Name:
- if value in self.reserved_words:
- token = Keyword.Reserved
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.builtins:
- token = Name.Builtin
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.pseudo_builtins:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.adts:
- if not self.treat_stdlib_adts_as_builtins:
- token = Name.Namespace
- else:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.modules:
- token = Name.Namespace
- #
- elif value in self.types:
- token = Name.Class
- #
- elif value in self.procedures:
- token = Name.Function
- #
- elif value in self.variables:
- token = Name.Variable
- #
- elif value in self.constants:
- token = Name.Constant
- #
- elif token in Number:
- #
- # mark prefix number literals as error for PIM and ISO dialects
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- if "'" in value or value[0:2] in ('0b', '0x', '0u'):
- token = Error
- #
- elif self.dialect in ('m2r10', 'objm2'):
- # mark base-8 number literals as errors for M2 R10 and ObjM2
- if token is Number.Oct:
- token = Error
- # mark suffix base-16 literals as errors for M2 R10 and ObjM2
- elif token is Number.Hex and 'H' in value:
- token = Error
- # mark real numbers with E as errors for M2 R10 and ObjM2
- elif token is Number.Float and 'E' in value:
- token = Error
- #
- elif token in Comment:
- #
- # mark single line comment as error for PIM and ISO dialects
- if token is Comment.Single:
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- token = Error
- #
- if token is Comment.Preproc:
- # mark ISO pragma as error for PIM dialects
- if value.startswith('<*') and \
- self.dialect.startswith('m2pim'):
- token = Error
- # mark PIM pragma as comment for other dialects
- elif value.startswith('(*$') and \
- self.dialect != 'unknown' and \
- not self.dialect.startswith('m2pim'):
- token = Comment.Multiline
- #
- else: # token is neither Name nor Comment
- #
- # mark lexemes matching the dialect's error token set as errors
- if value in self.lexemes_to_reject:
- token = Error
- #
- # substitute lexemes when in Algol mode
- if self.algol_publication_mode:
- if value == '#':
- value = u'≠'
- elif value == '<=':
- value = u'≤'
- elif value == '>=':
- value = u'≥'
- elif value == '==':
- value = u'≡'
- elif value == '*.':
- value = u'•'
-
- # return result
- yield index, token, value
+ # Case Label Separator Synonym
+ (r'!', Punctuation), # ISO
+ # Blueprint Punctuation
+ (r'\?', Punctuation), # M2R10 + ObjM2
+ ],
+ 'comments': [
+ # Single Line Comment
+ (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
+ # Block Comment
+ (r'\(\*([^$].*?)\*\)', Comment.Multiline),
+ # Template Block Comment
+ (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
+ ],
+ 'pragmas': [
+ # ISO Style Pragmas
+ (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
+ # Pascal Style Pragmas
+ (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
+ ],
+ 'root': [
+ include('whitespace'),
+ include('dialecttags'),
+ include('pragmas'),
+ include('comments'),
+ include('identifiers'),
+ include('suffixed_number_literals'), # PIM + ISO
+ include('prefixed_number_literals'), # M2R10 + ObjM2
+ include('plain_number_literals'),
+ include('string_literals'),
+ include('digraph_punctuation'),
+ include('digraph_operators'),
+ include('unigraph_punctuation'),
+ include('unigraph_operators'),
+ ]
+ }
+
+# C o m m o n D a t a s e t s
+
+ # Common Reserved Words Dataset
+ common_reserved_words = (
+ # 37 common reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+ 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
+ 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
+ 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+ 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
+ )
+
+ # Common Builtins Dataset
+ common_builtins = (
+ # 16 common builtins
+ 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
+ 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
+ 'TRUE',
+ )
+
+ # Common Pseudo-Module Builtins Dataset
+ common_pseudo_builtins = (
+ # 4 common pseudo builtins
+ 'ADDRESS', 'BYTE', 'WORD', 'ADR'
+ )
+
+# P I M M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for PIM Modula-2
+ pim_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
+ '+>', '->', '<<', '>>', '|#', '##',
+ )
+
+ # PIM Modula-2 Additional Reserved Words Dataset
+ pim_additional_reserved_words = (
+ # 3 additional reserved words
+ 'EXPORT', 'QUALIFIED', 'WITH',
+ )
+
+ # PIM Modula-2 Additional Builtins Dataset
+ pim_additional_builtins = (
+ # 16 additional builtins
+ 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
+ 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
+ )
+
+ # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
+ pim_additional_pseudo_builtins = (
+ # 5 additional pseudo builtins
+ 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
+ )
+
+# I S O M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for ISO Modula-2
+ iso_lexemes_to_reject = (
+ '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
+ '<<', '>>', '|#', '##',
+ )
+
+ # ISO Modula-2 Additional Reserved Words Dataset
+ iso_additional_reserved_words = (
+ # 9 additional reserved words (ISO 10514-1)
+ 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
+ 'REM', 'RETRY', 'WITH',
+ # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
+ 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
+ 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
+ )
+
+ # ISO Modula-2 Additional Builtins Dataset
+ iso_additional_builtins = (
+ # 26 additional builtins (ISO 10514-1)
+ 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
+ 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
+ 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
+ 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
+ # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
+ 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
+ )
+
+ # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
+ iso_additional_pseudo_builtins = (
+ # 14 additional builtins (SYSTEM)
+ 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
+ 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
+ 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
+ # 13 additional builtins (COROUTINES)
+ 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
+ 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
+ 'NEWCOROUTINE', 'PROT', 'TRANSFER',
+ # 9 additional builtins (EXCEPTIONS)
+ 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
+ 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
+ 'IsExceptionalExecution', 'RAISE',
+ # 3 additional builtins (TERMINATION)
+ 'TERMINATION', 'IsTerminating', 'HasHalted',
+ # 4 additional builtins (M2EXCEPTION)
+ 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
+ 'indexException', 'rangeException', 'caseSelectException',
+ 'invalidLocation', 'functionException', 'wholeValueException',
+ 'wholeDivException', 'realValueException', 'realDivException',
+ 'complexValueException', 'complexDivException', 'protException',
+ 'sysException', 'coException', 'exException',
+ )
+
+# M o d u l a - 2 R 1 0 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Modula-2 R10
+ m2r10_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '&', '<>',
+ )
+
+ # Modula-2 R10 reserved words in addition to the common set
+ m2r10_additional_reserved_words = (
+ # 12 additional reserved words
+ 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
+ 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
+ # 2 additional reserved words with symbolic assembly option
+ 'ASM', 'REG',
+ )
+
+ # Modula-2 R10 builtins in addition to the common set
+ m2r10_additional_builtins = (
+ # 26 additional builtins
+ 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
+ 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
+ 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
+ 'UNICHAR', 'WRITE', 'WRITEF',
+ )
+
+ # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
+ m2r10_additional_pseudo_builtins = (
+ # 13 additional builtins (TPROPERTIES)
+ 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
+ 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
+ 'TMAXEXP', 'TMINEXP',
+ # 4 additional builtins (CONVERSION)
+ 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
+ # 35 additional builtins (UNSAFE)
+ 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
+ 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
+ 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
+ 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
+ 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
+ # 11 additional builtins (ATOMIC)
+ 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
+ 'BWNAND', 'BWOR', 'BWXOR',
+ # 7 additional builtins (COMPILER)
+ 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
+ 'HASH',
+ # 5 additional builtins (ASSEMBLER)
+ 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
+ )
+
+# O b j e c t i v e M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Objective Modula-2
+ objm2_lexemes_to_reject = (
+ '!', '$', '%', '&', '<>',
+ )
+
+ # Objective Modula-2 Extensions
+ # reserved words in addition to Modula-2 R10
+ objm2_additional_reserved_words = (
+ # 16 additional reserved words
+ 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
+ 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
+ 'SUPER', 'TRY',
+ )
+
+ # Objective Modula-2 Extensions
+ # builtins in addition to Modula-2 R10
+ objm2_additional_builtins = (
+ # 3 additional builtins
+ 'OBJECT', 'NO', 'YES',
+ )
+
+ # Objective Modula-2 Extensions
+ # pseudo-module builtins in addition to Modula-2 R10
+ objm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# A g l e t M o d u l a - 2 D a t a s e t s
+
+ # Aglet Extensions
+ # reserved words in addition to ISO Modula-2
+ aglet_additional_reserved_words = (
+ # None
+ )
+
+ # Aglet Extensions
+ # builtins in addition to ISO Modula-2
+ aglet_additional_builtins = (
+ # 9 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
+ )
+
+ # Aglet Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ aglet_additional_pseudo_builtins = (
+ # None
+ )
+
+# G N U M o d u l a - 2 D a t a s e t s
+
+ # GNU Extensions
+ # reserved words in addition to PIM Modula-2
+ gm2_additional_reserved_words = (
+ # 10 additional reserved words
+ 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
+ '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
+ )
+
+ # GNU Extensions
+ # builtins in addition to PIM Modula-2
+ gm2_additional_builtins = (
+ # 21 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
+ 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
+ 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
+ )
+
+ # GNU Extensions
+ # pseudo-module builtins in addition to PIM Modula-2
+ gm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# p 1 M o d u l a - 2 D a t a s e t s
+
+ # p1 Extensions
+ # reserved words in addition to ISO Modula-2
+ p1_additional_reserved_words = (
+ # None
+ )
+
+ # p1 Extensions
+ # builtins in addition to ISO Modula-2
+ p1_additional_builtins = (
+ # None
+ )
+
+ # p1 Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ p1_additional_pseudo_builtins = (
+ # 1 additional builtin
+ 'BCD',
+ )
+
+# X D S M o d u l a - 2 D a t a s e t s
+
+ # XDS Extensions
+ # reserved words in addition to ISO Modula-2
+ xds_additional_reserved_words = (
+ # 1 additional reserved word
+ 'SEQ',
+ )
+
+ # XDS Extensions
+ # builtins in addition to ISO Modula-2
+ xds_additional_builtins = (
+ # 9 additional builtins
+ 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
+ 'LONGCARD', 'SHORTCARD', 'SHORTINT',
+ )
+
+ # XDS Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ xds_additional_pseudo_builtins = (
+ # 22 additional builtins (SYSTEM)
+ 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
+ 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
+ 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
+ # 3 additional builtins (COMPILER)
+ 'COMPILER', 'OPTION', 'EQUATION'
+ )
+
+# P I M S t a n d a r d L i b r a r y D a t a s e t s
+
+ # PIM Modula-2 Standard Library Modules Dataset
+ pim_stdlib_module_identifiers = (
+ 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
+ )
+
+ # PIM Modula-2 Standard Library Types Dataset
+ pim_stdlib_type_identifiers = (
+ 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
+ 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
+ 'DirectoryCommand',
+ )
+
+ # PIM Modula-2 Standard Library Procedures Dataset
+ pim_stdlib_proc_identifiers = (
+ 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
+ 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
+ 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
+ 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
+ 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
+ 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
+ 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
+ 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
+ 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
+ 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
+ )
+
+ # PIM Modula-2 Standard Library Variables Dataset
+ pim_stdlib_var_identifiers = (
+ 'Done', 'termCH', 'in', 'out'
+ )
+
+ # PIM Modula-2 Standard Library Constants Dataset
+ pim_stdlib_const_identifiers = (
+ 'EOL',
+ )
+
+# I S O S t a n d a r d L i b r a r y D a t a s e t s
+
+ # ISO Modula-2 Standard Library Modules Dataset
+ iso_stdlib_module_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Types Dataset
+ iso_stdlib_type_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Procedures Dataset
+ iso_stdlib_proc_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Variables Dataset
+ iso_stdlib_var_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Constants Dataset
+ iso_stdlib_const_identifiers = (
+ # TO DO
+ )
+
+# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
+
+ # Modula-2 R10 Standard Library ADTs Dataset
+ m2r10_stdlib_adt_identifiers = (
+ 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
+ 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
+ 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
+ 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
+ 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
+ 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
+ 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
+ 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
+ 'INT64', 'INT128', 'STRING', 'UNISTRING',
+ )
+
+ # Modula-2 R10 Standard Library Blueprints Dataset
+ m2r10_stdlib_blueprint_identifiers = (
+ 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
+ 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
+ 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
+ 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
+ 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
+ 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
+ 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
+ )
+
+ # Modula-2 R10 Standard Library Modules Dataset
+ m2r10_stdlib_module_identifiers = (
+ 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
+ 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
+ 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
+ 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
+ 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
+ )
+
+ # Modula-2 R10 Standard Library Types Dataset
+ m2r10_stdlib_type_identifiers = (
+ 'File', 'Status',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Procedures Dataset
+ m2r10_stdlib_proc_identifiers = (
+ 'ALLOCATE', 'DEALLOCATE', 'SIZE',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Variables Dataset
+ m2r10_stdlib_var_identifiers = (
+ 'stdIn', 'stdOut', 'stdErr',
+ )
+
+ # Modula-2 R10 Standard Library Constants Dataset
+ m2r10_stdlib_const_identifiers = (
+ 'pi', 'tau',
+ )
+
+# D i a l e c t s
+
+ # Dialect modes
+ dialects = (
+ 'unknown',
+ 'm2pim', 'm2iso', 'm2r10', 'objm2',
+ 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
+ )
+
+# D a t a b a s e s
+
+ # Lexemes to Mark as Errors Database
+ lexemes_to_reject_db = {
+ # Lexemes to reject for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Lexemes to reject for PIM Modula-2
+ 'm2pim': (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for ISO Modula-2
+ 'm2iso': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Modula-2 R10
+ 'm2r10': (
+ m2r10_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Objective Modula-2
+ 'objm2': (
+ objm2_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for p1 Modula-2
+ 'm2iso+p1': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for XDS Modula-2
+ 'm2iso+xds': (
+ iso_lexemes_to_reject,
+ ),
+ }
+
+ # Reserved Words Database
+ reserved_words_db = {
+ # Reserved words for unknown dialect
+ 'unknown': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ iso_additional_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for PIM Modula-2
+ 'm2pim': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ ),
+
+ # Reserved words for Modula-2 R10
+ 'm2iso': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ ),
+
+ # Reserved words for ISO Modula-2
+ 'm2r10': (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for Objective Modula-2
+ 'objm2': (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ objm2_additional_reserved_words,
+ ),
+
+ # Reserved words for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ aglet_additional_reserved_words,
+ ),
+
+ # Reserved words for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ gm2_additional_reserved_words,
+ ),
+
+ # Reserved words for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ p1_additional_reserved_words,
+ ),
+
+ # Reserved words for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ xds_additional_reserved_words,
+ ),
+ }
+
+ # Builtins Database
+ builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown': (
+ common_builtins,
+ pim_additional_builtins,
+ iso_additional_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim': (
+ common_builtins,
+ pim_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso': (
+ common_builtins,
+ iso_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10': (
+ common_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2': (
+ common_builtins,
+ m2r10_additional_builtins,
+ objm2_additional_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_builtins,
+ iso_additional_builtins,
+ aglet_additional_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_builtins,
+ pim_additional_builtins,
+ gm2_additional_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_builtins,
+ iso_additional_builtins,
+ p1_additional_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_builtins,
+ iso_additional_builtins,
+ xds_additional_builtins,
+ ),
+ }
+
+ # Pseudo-Module Builtins Database
+ pseudo_builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10': (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2': (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ objm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ aglet_additional_pseudo_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ gm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ p1_additional_pseudo_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ xds_additional_pseudo_builtins,
+ ),
+ }
+
+ # Standard Library ADTs Database
+ stdlib_adts_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library ADTs for PIM Modula-2
+ 'm2pim': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for ISO Modula-2
+ 'm2iso': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Aglet Modula-2
+ 'm2iso+aglet': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for GNU Modula-2
+ 'm2pim+gm2': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for p1 Modula-2
+ 'm2iso+p1': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for XDS Modula-2
+ 'm2iso+xds': (
+ # No first class library types
+ ),
+ }
+
+ # Standard Library Modules Database
+ stdlib_modules_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Modules for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library Modules for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_module_identifiers,
+ ),
+ }
+
+ # Standard Library Types Database
+ stdlib_types_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Types for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_type_identifiers,
+ ),
+ }
+
+ # Standard Library Procedures Database
+ stdlib_procedures_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Procedures for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_proc_identifiers,
+ ),
+ }
+
+ # Standard Library Variables Database
+ stdlib_variables_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Variables for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_var_identifiers,
+ ),
+ }
+
+ # Standard Library Constants Database
+ stdlib_constants_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Constants for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_const_identifiers,
+ ),
+ }
+
+# M e t h o d s
+
+ # initialise a lexer instance
+ def __init__(self, **options):
+ #
+ # check dialect options
+ #
+ dialects = get_list_opt(options, 'dialect', [])
+ #
+ for dialect_option in dialects:
+ if dialect_option in self.dialects[1:-1]:
+ # valid dialect option found
+ self.set_dialect(dialect_option)
+ break
+ #
+ # Fallback Mode (DEFAULT)
+ else:
+ # no valid dialect option
+ self.set_dialect('unknown')
+ #
+ self.dialect_set_by_tag = False
+ #
+ # check style options
+ #
+ styles = get_list_opt(options, 'style', [])
+ #
+ # use lowercase mode for Algol style
+ if 'algol' in styles or 'algol_nu' in styles:
+ self.algol_publication_mode = True
+ else:
+ self.algol_publication_mode = False
+ #
+ # Check option flags
+ #
+ self.treat_stdlib_adts_as_builtins = get_bool_opt(
+ options, 'treat_stdlib_adts_as_builtins', True)
+ #
+ # call superclass initialiser
+ RegexLexer.__init__(self, **options)
+
+ # Set lexer to a specified dialect
+ def set_dialect(self, dialect_id):
+ #
+ # if __debug__:
+ # print 'entered set_dialect with arg: ', dialect_id
+ #
+ # check dialect name against known dialects
+ if dialect_id not in self.dialects:
+ dialect = 'unknown' # default
+ else:
+ dialect = dialect_id
+ #
+ # compose lexemes to reject set
+ lexemes_to_reject_set = set()
+ # add each list of reject lexemes for this dialect
+ for list in self.lexemes_to_reject_db[dialect]:
+ lexemes_to_reject_set.update(set(list))
+ #
+ # compose reserved words set
+ reswords_set = set()
+ # add each list of reserved words for this dialect
+ for list in self.reserved_words_db[dialect]:
+ reswords_set.update(set(list))
+ #
+ # compose builtins set
+ builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.builtins_db[dialect]:
+ builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose pseudo-builtins set
+ pseudo_builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.pseudo_builtins_db[dialect]:
+ pseudo_builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose ADTs set
+ adts_set = set()
+ # add each list of ADTs for this dialect excluding reserved words
+ for list in self.stdlib_adts_db[dialect]:
+ adts_set.update(set(list).difference(reswords_set))
+ #
+ # compose modules set
+ modules_set = set()
+ # add each list of builtins for this dialect excluding builtins
+ for list in self.stdlib_modules_db[dialect]:
+ modules_set.update(set(list).difference(builtins_set))
+ #
+ # compose types set
+ types_set = set()
+ # add each list of types for this dialect excluding builtins
+ for list in self.stdlib_types_db[dialect]:
+ types_set.update(set(list).difference(builtins_set))
+ #
+ # compose procedures set
+ procedures_set = set()
+ # add each list of procedures for this dialect excluding builtins
+ for list in self.stdlib_procedures_db[dialect]:
+ procedures_set.update(set(list).difference(builtins_set))
+ #
+ # compose variables set
+ variables_set = set()
+ # add each list of variables for this dialect excluding builtins
+ for list in self.stdlib_variables_db[dialect]:
+ variables_set.update(set(list).difference(builtins_set))
+ #
+ # compose constants set
+ constants_set = set()
+ # add each list of constants for this dialect excluding builtins
+ for list in self.stdlib_constants_db[dialect]:
+ constants_set.update(set(list).difference(builtins_set))
+ #
+ # update lexer state
+ self.dialect = dialect
+ self.lexemes_to_reject = lexemes_to_reject_set
+ self.reserved_words = reswords_set
+ self.builtins = builtins_set
+ self.pseudo_builtins = pseudo_builtins_set
+ self.adts = adts_set
+ self.modules = modules_set
+ self.types = types_set
+ self.procedures = procedures_set
+ self.variables = variables_set
+ self.constants = constants_set
+ #
+ # if __debug__:
+ # print 'exiting set_dialect'
+ # print ' self.dialect: ', self.dialect
+ # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
+ # print ' self.reserved_words: ', self.reserved_words
+ # print ' self.builtins: ', self.builtins
+ # print ' self.pseudo_builtins: ', self.pseudo_builtins
+ # print ' self.adts: ', self.adts
+ # print ' self.modules: ', self.modules
+ # print ' self.types: ', self.types
+ # print ' self.procedures: ', self.procedures
+ # print ' self.variables: ', self.variables
+ # print ' self.types: ', self.types
+ # print ' self.constants: ', self.constants
+
+ # Extracts a dialect name from a dialect tag comment string and checks
+ # the extracted name against known dialects. If a match is found, the
+ # matching name is returned, otherwise dialect id 'unknown' is returned
+ def get_dialect_from_dialect_tag(self, dialect_tag):
+ #
+ # if __debug__:
+ # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
+ #
+ # constants
+ left_tag_delim = '(*!'
+ right_tag_delim = '*)'
+ left_tag_delim_len = len(left_tag_delim)
+ right_tag_delim_len = len(right_tag_delim)
+ indicator_start = left_tag_delim_len
+ indicator_end = -(right_tag_delim_len)
+ #
+ # check comment string for dialect indicator
+ if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
+ and dialect_tag.startswith(left_tag_delim) \
+ and dialect_tag.endswith(right_tag_delim):
+ #
+ # if __debug__:
+ # print 'dialect tag found'
+ #
+ # extract dialect indicator
+ indicator = dialect_tag[indicator_start:indicator_end]
+ #
+ # if __debug__:
+ # print 'extracted: ', indicator
+ #
+ # check against known dialects
+ for index in range(1, len(self.dialects)):
+ #
+ # if __debug__:
+ # print 'dialects[', index, ']: ', self.dialects[index]
+ #
+ if indicator == self.dialects[index]:
+ #
+ # if __debug__:
+ # print 'matching dialect found'
+ #
+ # indicator matches known dialect
+ return indicator
+ else:
+ # indicator does not match any dialect
+ return 'unknown' # default
+ else:
+ # invalid indicator string
+ return 'unknown' # default
+
+ # intercept the token stream, modify token attributes and return them
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ #
+ # check for dialect tag if dialect has not been set by tag
+ if not self.dialect_set_by_tag and token == Comment.Special:
+ indicated_dialect = self.get_dialect_from_dialect_tag(value)
+ if indicated_dialect != 'unknown':
+ # token is a dialect indicator
+ # reset reserved words and builtins
+ self.set_dialect(indicated_dialect)
+ self.dialect_set_by_tag = True
+ #
+ # check for reserved words, predefined and stdlib identifiers
+ if token is Name:
+ if value in self.reserved_words:
+ token = Keyword.Reserved
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.builtins:
+ token = Name.Builtin
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.pseudo_builtins:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.adts:
+ if not self.treat_stdlib_adts_as_builtins:
+ token = Name.Namespace
+ else:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.modules:
+ token = Name.Namespace
+ #
+ elif value in self.types:
+ token = Name.Class
+ #
+ elif value in self.procedures:
+ token = Name.Function
+ #
+ elif value in self.variables:
+ token = Name.Variable
+ #
+ elif value in self.constants:
+ token = Name.Constant
+ #
+ elif token in Number:
+ #
+ # mark prefix number literals as error for PIM and ISO dialects
+ if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+ if "'" in value or value[0:2] in ('0b', '0x', '0u'):
+ token = Error
+ #
+ elif self.dialect in ('m2r10', 'objm2'):
+ # mark base-8 number literals as errors for M2 R10 and ObjM2
+ if token is Number.Oct:
+ token = Error
+ # mark suffix base-16 literals as errors for M2 R10 and ObjM2
+ elif token is Number.Hex and 'H' in value:
+ token = Error
+ # mark real numbers with E as errors for M2 R10 and ObjM2
+ elif token is Number.Float and 'E' in value:
+ token = Error
+ #
+ elif token in Comment:
+ #
+ # mark single line comment as error for PIM and ISO dialects
+ if token is Comment.Single:
+ if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+ token = Error
+ #
+ if token is Comment.Preproc:
+ # mark ISO pragma as error for PIM dialects
+ if value.startswith('<*') and \
+ self.dialect.startswith('m2pim'):
+ token = Error
+ # mark PIM pragma as comment for other dialects
+ elif value.startswith('(*$') and \
+ self.dialect != 'unknown' and \
+ not self.dialect.startswith('m2pim'):
+ token = Comment.Multiline
+ #
+ else: # token is neither Name nor Comment
+ #
+ # mark lexemes matching the dialect's error token set as errors
+ if value in self.lexemes_to_reject:
+ token = Error
+ #
+ # substitute lexemes when in Algol mode
+ if self.algol_publication_mode:
+ if value == '#':
+ value = u'≠'
+ elif value == '<=':
+ value = u'≤'
+ elif value == '>=':
+ value = u'≥'
+ elif value == '==':
+ value = u'≡'
+ elif value == '*.':
+ value = u'•'
+
+ # return result
+ yield index, token, value
diff --git a/contrib/python/Pygments/py2/pygments/lexers/nimrod.py b/contrib/python/Pygments/py2/pygments/lexers/nimrod.py
index 4c1bccf079..6af7533a61 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/nimrod.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/nimrod.py
@@ -1,48 +1,48 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.nimrod
- ~~~~~~~~~~~~~~~~~~~~~~
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nimrod
+ ~~~~~~~~~~~~~~~~~~~~~~
+
Lexer for the Nim language (formerly known as Nimrod).
-
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['NimrodLexer']
-
-
-class NimrodLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['NimrodLexer']
+
+
+class NimrodLexer(RegexLexer):
+ """
For `Nim <http://nim-lang.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nimrod'
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nimrod'
aliases = ['nim', 'nimrod']
- filenames = ['*.nim', '*.nimrod']
+ filenames = ['*.nim', '*.nimrod']
mimetypes = ['text/x-nim']
-
- flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
-
- def underscorize(words):
- newWords = []
- new = ""
- for word in words:
- for ch in word:
- new += (ch + "_?")
- newWords.append(new)
- new = ""
- return "|".join(newWords)
-
- keywords = [
+
+ flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
+
+ def underscorize(words):
+ newWords = []
+ new = ""
+ for word in words:
+ for ch in word:
+ new += (ch + "_?")
+ newWords.append(new)
+ new = ""
+ return "|".join(newWords)
+
+ keywords = [
'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', 'case',
'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
@@ -50,110 +50,110 @@ class NimrodLexer(RegexLexer):
'is', 'isnot', 'iterator', 'let', 'macro', 'method', 'mixin', 'mod',
'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise',
'ref', 'return', 'shared', 'shl', 'shr', 'static', 'template', 'try',
- 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
- ]
-
- keywordsPseudo = [
- 'nil', 'true', 'false'
- ]
-
- opWords = [
- 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
- 'notin', 'is', 'isnot'
- ]
-
- types = [
- 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
- 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
- ]
-
- tokens = {
- 'root': [
- (r'##.*$', String.Doc),
- (r'#.*$', Comment),
- (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
- (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
- Punctuation),
-
- # Strings
- (r'(?:[\w]+)"', String, 'rdqs'),
- (r'"""', String, 'tdqs'),
- ('"', String, 'dqs'),
-
- # Char
- ("'", String.Char, 'chars'),
-
- # Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
- (r'(p_?r_?o_?c_?\s)(?![(\[\]])', Keyword, 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include']),
- Keyword.Namespace),
- (r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Keyword.Type),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
- # Identifiers
- (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
- # Numbers
- (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
- Number.Float, ('float-suffix', 'float-number')),
- (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
- (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
- (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
- (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
- # Whitespace
- (r'\s+', Text),
- (r'.+$', Error),
- ],
- 'chars': [
- (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
- (r"'", String.Char, '#pop'),
- (r".", String.Char)
- ],
- 'strings': [
- (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
- (r'[^\\\'"$\n]+', String),
- # quotes, dollars and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- # newlines are an error (use "nl" state)
- ],
- 'dqs': [
- (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
- String.Escape),
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'rdqs': [
- (r'"(?!")', String, '#pop'),
- (r'""', String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""(?!")', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'funcname': [
- (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
- (r'`.+`', Name.Function, '#pop')
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'float-number': [
- (r'\.(?!\.)[0-9_]*', Number.Float),
- (r'e[+-]?[0-9][0-9_]*', Number.Float),
- default('#pop')
- ],
- 'float-suffix': [
- (r'\'f(32|64)', Number.Float),
- default('#pop')
- ],
- 'int-suffix': [
- (r'\'i(32|64)', Number.Integer.Long),
- (r'\'i(8|16)', Number.Integer),
- default('#pop')
- ],
- }
+ 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
+ ]
+
+ keywordsPseudo = [
+ 'nil', 'true', 'false'
+ ]
+
+ opWords = [
+ 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
+ 'notin', 'is', 'isnot'
+ ]
+
+ types = [
+ 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
+ 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'##.*$', String.Doc),
+ (r'#.*$', Comment),
+ (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
+ (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
+ Punctuation),
+
+ # Strings
+ (r'(?:[\w]+)"', String, 'rdqs'),
+ (r'"""', String, 'tdqs'),
+ ('"', String, 'dqs'),
+
+ # Char
+ ("'", String.Char, 'chars'),
+
+ # Keywords
+ (r'(%s)\b' % underscorize(opWords), Operator.Word),
+ (r'(p_?r_?o_?c_?\s)(?![(\[\]])', Keyword, 'funcname'),
+ (r'(%s)\b' % underscorize(keywords), Keyword),
+ (r'(%s)\b' % underscorize(['from', 'import', 'include']),
+ Keyword.Namespace),
+ (r'(v_?a_?r)\b', Keyword.Declaration),
+ (r'(%s)\b' % underscorize(types), Keyword.Type),
+ (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
+ # Identifiers
+ (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
+ # Numbers
+ (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
+ Number.Float, ('float-suffix', 'float-number')),
+ (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
+ (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
+ (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
+ (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
+ # Whitespace
+ (r'\s+', Text),
+ (r'.+$', Error),
+ ],
+ 'chars': [
+ (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
+ (r"'", String.Char, '#pop'),
+ (r".", String.Char)
+ ],
+ 'strings': [
+ (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
+ (r'[^\\\'"$\n]+', String),
+ # quotes, dollars and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'dqs': [
+ (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
+ String.Escape),
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'rdqs': [
+ (r'"(?!")', String, '#pop'),
+ (r'""', String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""(?!")', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'funcname': [
+ (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
+ (r'`.+`', Name.Function, '#pop')
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'float-number': [
+ (r'\.(?!\.)[0-9_]*', Number.Float),
+ (r'e[+-]?[0-9][0-9_]*', Number.Float),
+ default('#pop')
+ ],
+ 'float-suffix': [
+ (r'\'f(32|64)', Number.Float),
+ default('#pop')
+ ],
+ 'int-suffix': [
+ (r'\'i(32|64)', Number.Integer.Long),
+ (r'\'i(8|16)', Number.Integer),
+ default('#pop')
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/nit.py b/contrib/python/Pygments/py2/pygments/lexers/nit.py
index 42167ece13..b481ced320 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/nit.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/nit.py
@@ -1,64 +1,64 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.nit
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Nit language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nit
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nit language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['NitLexer']
-
-
-class NitLexer(RegexLexer):
- """
- For `nit <http://nitlanguage.org>`_ source.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nit'
- aliases = ['nit']
- filenames = ['*.nit']
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (words((
- 'package', 'module', 'import', 'class', 'abstract', 'interface',
- 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
- 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
- 'public', 'protected', 'private', 'intrude', 'if', 'then',
- 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
- 'implies', 'return', 'continue', 'break', 'abort', 'assert',
- 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
- 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
- Keyword),
- (r'[A-Z]\w*', Name.Class),
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
- (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
- r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
- (r'"(\\.|([^"}{\\]))*"', String), # Simple String
- (r'"(\\.|([^"}{\\]))*\{', String), # Start string
- (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
- (r'\}(\\.|([^"}{\\]))*"', String), # End String
- (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
- (r'[0-9]+', Number.Integer),
- (r'[0-9]*.[0-9]+', Number.Float),
- (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
- (r'[a-z]\w*', Name),
- (r'_\w+', Name.Variable.Instance),
- (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
- (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
- (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
- (r'[\r\n\t ]+', Text),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['NitLexer']
+
+
+class NitLexer(RegexLexer):
+ """
+ For `nit <http://nitlanguage.org>`_ source.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nit'
+ aliases = ['nit']
+ filenames = ['*.nit']
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (words((
+ 'package', 'module', 'import', 'class', 'abstract', 'interface',
+ 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
+ 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
+ 'public', 'protected', 'private', 'intrude', 'if', 'then',
+ 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
+ 'implies', 'return', 'continue', 'break', 'abort', 'assert',
+ 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
+ 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
+ Keyword),
+ (r'[A-Z]\w*', Name.Class),
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
+ (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
+ r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
+ (r'"(\\.|([^"}{\\]))*"', String), # Simple String
+ (r'"(\\.|([^"}{\\]))*\{', String), # Start string
+ (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
+ (r'\}(\\.|([^"}{\\]))*"', String), # End String
+ (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*.[0-9]+', Number.Float),
+ (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
+ (r'[a-z]\w*', Name),
+ (r'_\w+', Name.Variable.Instance),
+ (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
+ (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
+ (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
+ (r'[\r\n\t ]+', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/nix.py b/contrib/python/Pygments/py2/pygments/lexers/nix.py
index 50210c4891..711f3ad198 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/nix.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/nix.py
@@ -1,136 +1,136 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.nix
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the NixOS Nix language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nix
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the NixOS Nix language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['NixLexer']
-
-
-class NixLexer(RegexLexer):
- """
- For the `Nix language <http://nixos.org/nix/>`_.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nix'
- aliases = ['nixos', 'nix']
- filenames = ['*.nix']
- mimetypes = ['text/x-nix']
-
- flags = re.MULTILINE | re.UNICODE
-
- keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
- 'else', 'then', '...']
- builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
- 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
- operators = ['++', '+', '?', '.', '!', '//', '==',
- '!=', '&&', '||', '->', '=']
-
- punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
-
- tokens = {
- 'root': [
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # multiline comments
- (r'/\*', Comment.Multiline, 'comment'),
-
- # whitespace
- (r'\s+', Text),
-
- # keywords
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Name.Builtin),
-
- (r'\b(true|false|null)\b', Name.Constant),
-
- # operators
- ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
- Operator),
-
- # word operators
- (r'\b(or|and)\b', Operator.Word),
-
- # punctuations
- ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
-
- # integers
- (r'[0-9]+', Number.Integer),
-
- # strings
- (r'"', String.Double, 'doublequote'),
- (r"''", String.Single, 'singlequote'),
-
- # paths
- (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
- (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
-
- # urls
- (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
-
- # names of variables
- (r'[\w-]+\s*=', String.Symbol),
- (r'[a-zA-Z_][\w\'-]*', Text),
-
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'singlequote': [
- (r"'''", String.Escape),
- (r"''\$\{", String.Escape),
- (r"''\n", String.Escape),
- (r"''\r", String.Escape),
- (r"''\t", String.Escape),
- (r"''", String.Single, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r"[^']", String.Single),
- ],
- 'doublequote': [
- (r'\\', String.Escape),
- (r'\\"', String.Escape),
- (r'\\$\{', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r'[^"]', String.Double),
- ],
- 'antiquote': [
- (r"\}", String.Interpol, '#pop'),
- # TODO: we should probably escape also here ''${ \${
- (r"\$\{", String.Interpol, '#push'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- rv = 0.0
- # TODO: let/in
- if re.search(r'import.+?<[^>]+>', text):
- rv += 0.4
- if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
- rv += 0.4
- if re.search(r'=\s+mkIf\s+', text):
- rv += 0.4
- if re.search(r'\{[a-zA-Z,\s]+\}:', text):
- rv += 0.1
- return rv
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['NixLexer']
+
+
+class NixLexer(RegexLexer):
+ """
+ For the `Nix language <http://nixos.org/nix/>`_.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nix'
+ aliases = ['nixos', 'nix']
+ filenames = ['*.nix']
+ mimetypes = ['text/x-nix']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
+ 'else', 'then', '...']
+ builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
+ 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
+ operators = ['++', '+', '?', '.', '!', '//', '==',
+ '!=', '&&', '||', '->', '=']
+
+ punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
+
+ tokens = {
+ 'root': [
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # multiline comments
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # keywords
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
+
+ # highlight the builtins
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ Name.Builtin),
+
+ (r'\b(true|false|null)\b', Name.Constant),
+
+ # operators
+ ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
+ Operator),
+
+ # word operators
+ (r'\b(or|and)\b', Operator.Word),
+
+ # punctuations
+ ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
+
+ # integers
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'"', String.Double, 'doublequote'),
+ (r"''", String.Single, 'singlequote'),
+
+ # paths
+ (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
+ (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
+
+ # urls
+ (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
+
+ # names of variables
+ (r'[\w-]+\s*=', String.Symbol),
+ (r'[a-zA-Z_][\w\'-]*', Text),
+
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'singlequote': [
+ (r"'''", String.Escape),
+ (r"''\$\{", String.Escape),
+ (r"''\n", String.Escape),
+ (r"''\r", String.Escape),
+ (r"''\t", String.Escape),
+ (r"''", String.Single, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r"[^']", String.Single),
+ ],
+ 'doublequote': [
+ (r'\\', String.Escape),
+ (r'\\"', String.Escape),
+ (r'\\$\{', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r'[^"]', String.Double),
+ ],
+ 'antiquote': [
+ (r"\}", String.Interpol, '#pop'),
+ # TODO: we should probably escape also here ''${ \${
+ (r"\$\{", String.Interpol, '#push'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ # TODO: let/in
+ if re.search(r'import.+?<[^>]+>', text):
+ rv += 0.4
+ if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
+ rv += 0.4
+ if re.search(r'=\s+mkIf\s+', text):
+ rv += 0.4
+ if re.search(r'\{[a-zA-Z,\s]+\}:', text):
+ rv += 0.1
+ return rv
diff --git a/contrib/python/Pygments/py2/pygments/lexers/oberon.py b/contrib/python/Pygments/py2/pygments/lexers/oberon.py
index 1c18488abd..4ce21a1a41 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/oberon.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/oberon.py
@@ -1,105 +1,105 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.oberon
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Oberon family languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.oberon
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Oberon family languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ComponentPascalLexer']
-
-
-class ComponentPascalLexer(RegexLexer):
- """
- For `Component Pascal <http://www.oberon.ch/pdf/CP-Lang.pdf>`_ source code.
-
- .. versionadded:: 2.1
- """
- name = 'Component Pascal'
- aliases = ['componentpascal', 'cp']
- filenames = ['*.cp', '*.cps']
- mimetypes = ['text/x-component-pascal']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- include('punctuation'),
- include('numliterals'),
- include('strings'),
- include('operators'),
- include('builtins'),
- include('identifiers'),
- ],
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'comments': [
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ComponentPascalLexer']
+
+
+class ComponentPascalLexer(RegexLexer):
+ """
+ For `Component Pascal <http://www.oberon.ch/pdf/CP-Lang.pdf>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Component Pascal'
+ aliases = ['componentpascal', 'cp']
+ filenames = ['*.cp', '*.cps']
+ mimetypes = ['text/x-component-pascal']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ include('punctuation'),
+ include('numliterals'),
+ include('strings'),
+ include('operators'),
+ include('builtins'),
+ include('identifiers'),
+ ],
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'comments': [
(r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
- ],
- 'punctuation': [
+ # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
+ ],
+ 'punctuation': [
(r'[()\[\]{},.:;|]', Punctuation),
- ],
- 'numliterals': [
- (r'[0-9A-F]+X\b', Number.Hex), # char code
- (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
- (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
- (r'[0-9]+\.[0-9]+', Number.Float), # real number
- (r'[0-9]+', Number.Integer), # decimal whole number
- ],
- 'strings': [
- (r"'[^\n']*'", String), # single quoted string
- (r'"[^\n"]*"', String), # double quoted string
- ],
- 'operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Logical AND Operator
- (r'&', Operator),
- # Logical NOT Operator
- (r'~', Operator),
- # Assignment Symbol
- (r':=', Operator),
- # Range Constructor
- (r'\.\.', Operator),
- (r'\$', Operator),
- ],
- 'identifiers': [
+ ],
+ 'numliterals': [
+ (r'[0-9A-F]+X\b', Number.Hex), # char code
+ (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
+ (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
+ (r'[0-9]+\.[0-9]+', Number.Float), # real number
+ (r'[0-9]+', Number.Integer), # decimal whole number
+ ],
+ 'strings': [
+ (r"'[^\n']*'", String), # single quoted string
+ (r'"[^\n"]*"', String), # double quoted string
+ ],
+ 'operators': [
+ # Arithmetic Operators
+ (r'[+-]', Operator),
+ (r'[*/]', Operator),
+ # Relational Operators
+ (r'[=#<>]', Operator),
+ # Dereferencing Operator
+ (r'\^', Operator),
+ # Logical AND Operator
+ (r'&', Operator),
+ # Logical NOT Operator
+ (r'~', Operator),
+ # Assignment Symbol
+ (r':=', Operator),
+ # Range Constructor
+ (r'\.\.', Operator),
+ (r'\$', Operator),
+ ],
+ 'identifiers': [
(r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'builtins': [
- (words((
- 'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
- 'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
- ), suffix=r'\b'), Keyword.Type),
- (words((
- 'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
- 'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
- 'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
- 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
- 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
- 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
- 'VAR', 'WHILE', 'WITH'
- ), suffix=r'\b'), Keyword.Reserved),
- (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
- ]
- }
+ ],
+ 'builtins': [
+ (words((
+ 'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
+ 'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
+ ), suffix=r'\b'), Keyword.Type),
+ (words((
+ 'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
+ 'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
+ 'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
+ 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
+ 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
+ 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+ 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
+ 'VAR', 'WHILE', 'WITH'
+ ), suffix=r'\b'), Keyword.Reserved),
+ (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/objective.py b/contrib/python/Pygments/py2/pygments/lexers/objective.py
index 777d8d4d6a..d025ec6dc3 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/objective.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/objective.py
@@ -1,426 +1,426 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.objective
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Objective-C family languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.objective
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Objective-C family languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
- inherit, default
-from pygments.token import Text, Keyword, Name, String, Operator, \
- Number, Punctuation, Literal, Comment
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-
-__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
-
-
-def objective(baselexer):
- """
- Generate a subclass of baselexer that accepts the Objective-C syntax
- extensions.
- """
-
- # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
- # since that's quite common in ordinary C/C++ files. It's OK to match
- # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
- #
- # The upshot of this is that we CANNOT match @class or @interface
- _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
- # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
- # (note the identifier is *optional* when there is a ':'!)
- _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
- r'(?:[a-zA-Z_]\w*\s*\]|'
- r'(?:[a-zA-Z_]\w*)?:)')
-
- class GeneratedObjectiveCVariant(baselexer):
- """
- Implements Objective-C syntax on top of an existing C family lexer.
- """
-
- tokens = {
- 'statements': [
- (r'@"', String, 'string'),
- (r'@(YES|NO)', Number),
- (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'@0[0-7]+[Ll]?', Number.Oct),
- (r'@\d+[Ll]?', Number.Integer),
- (r'@\(', Literal, 'literal_number'),
- (r'@\[', Literal, 'literal_array'),
- (r'@\{', Literal, 'literal_dictionary'),
- (words((
- '@selector', '@private', '@protected', '@public', '@encode',
- '@synchronized', '@try', '@throw', '@catch', '@finally',
- '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
- '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
- 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
- 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
- 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
+ inherit, default
+from pygments.token import Text, Keyword, Name, String, Operator, \
+ Number, Punctuation, Literal, Comment
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
+
+
+def objective(baselexer):
+ """
+ Generate a subclass of baselexer that accepts the Objective-C syntax
+ extensions.
+ """
+
+ # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+ # since that's quite common in ordinary C/C++ files. It's OK to match
+ # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+ #
+ # The upshot of this is that we CANNOT match @class or @interface
+ _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
+
+ # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
+ # (note the identifier is *optional* when there is a ':'!)
+ _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
+ r'(?:[a-zA-Z_]\w*\s*\]|'
+ r'(?:[a-zA-Z_]\w*)?:)')
+
+ class GeneratedObjectiveCVariant(baselexer):
+ """
+ Implements Objective-C syntax on top of an existing C family lexer.
+ """
+
+ tokens = {
+ 'statements': [
+ (r'@"', String, 'string'),
+ (r'@(YES|NO)', Number),
+ (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'@0[0-7]+[Ll]?', Number.Oct),
+ (r'@\d+[Ll]?', Number.Integer),
+ (r'@\(', Literal, 'literal_number'),
+ (r'@\[', Literal, 'literal_array'),
+ (r'@\{', Literal, 'literal_dictionary'),
+ (words((
+ '@selector', '@private', '@protected', '@public', '@encode',
+ '@synchronized', '@try', '@throw', '@catch', '@finally',
+ '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
+ '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
+ 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
+ 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
+ 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
'@required', '@autoreleasepool', '@import'), suffix=r'\b'),
- Keyword),
- (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
- 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
- Keyword.Type),
- (r'@(true|false|YES|NO)\n', Name.Builtin),
- (r'(YES|NO|nil|self|super)\b', Name.Builtin),
- # Carbon types
- (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
- # Carbon built-ins
- (r'(TRUE|FALSE)\b', Name.Builtin),
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_classname')),
- (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_forward_classname')),
- # @ can also prefix other expressions like @{...} or @(...)
- (r'@', Punctuation),
- inherit,
- ],
- 'oc_classname': [
- # interface definition that inherits
+ Keyword),
+ (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
+ 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'@(true|false|YES|NO)\n', Name.Builtin),
+ (r'(YES|NO|nil|self|super)\b', Name.Builtin),
+ # Carbon types
+ (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
+ # Carbon built-ins
+ (r'(TRUE|FALSE)\b', Name.Builtin),
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_classname')),
+ (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_forward_classname')),
+ # @ can also prefix other expressions like @{...} or @(...)
+ (r'@', Punctuation),
+ inherit,
+ ],
+ 'oc_classname': [
+ # interface definition that inherits
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
- ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- # interface definition for a category
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ # interface definition for a category
(r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
- ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
- bygroups(Name.Class, Text, Name.Label), '#pop'),
- # simple interface / implementation
+ bygroups(Name.Class, Text, Name.Label), '#pop'),
+ # simple interface / implementation
(r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
- bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'oc_forward_classname': [
+ ],
+ 'oc_forward_classname': [
(r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
- bygroups(Name.Class, Text), 'oc_forward_classname'),
+ bygroups(Name.Class, Text), 'oc_forward_classname'),
(r'([a-zA-Z$_][\w$]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop')
- ],
- 'oc_ivars': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'root': [
- # methods
- (r'^([-+])(\s*)' # method marker
- r'(\(.*?\))?(\s*)' # return type
- r'([a-zA-Z$_][\w$]*:?)', # begin of method name
- bygroups(Punctuation, Text, using(this),
- Text, Name.Function),
- 'method'),
- inherit,
- ],
- 'method': [
- include('whitespace'),
- # TODO unsure if ellipses are allowed elsewhere, see
- # discussion in Issue 789
- (r',', Punctuation),
- (r'\.\.\.', Punctuation),
- (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
- bygroups(using(this), Text, Name.Variable)),
- (r'[a-zA-Z$_][\w$]*:', Name.Function),
- (';', Punctuation, '#pop'),
- (r'\{', Punctuation, 'function'),
- default('#pop'),
- ],
- 'literal_number': [
- (r'\(', Punctuation, 'literal_number_inner'),
- (r'\)', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_number_inner': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_array': [
- (r'\[', Punctuation, 'literal_array_inner'),
- (r'\]', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_array_inner': [
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_dictionary': [
- (r'\}', Literal, '#pop'),
- include('statement'),
- ],
- }
-
- def analyse_text(text):
- if _oc_keywords.search(text):
- return 1.0
- elif '@"' in text: # strings
- return 0.8
- elif re.search('@[0-9]+', text):
- return 0.7
- elif _oc_message.search(text):
- return 0.8
- return 0
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- baselexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
-
- return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
- """
- For Objective-C source code with preprocessor directives.
- """
-
- name = 'Objective-C'
- aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
- filenames = ['*.m', '*.h']
- mimetypes = ['text/x-objective-c']
- priority = 0.05 # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
- """
- For Objective-C++ source code with preprocessor directives.
- """
-
- name = 'Objective-C++'
- aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
- filenames = ['*.mm', '*.hh']
- mimetypes = ['text/x-objective-c++']
- priority = 0.05 # Lower than C++
-
-
-class LogosLexer(ObjectiveCppLexer):
- """
- For Logos + Objective-C source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
-
- name = 'Logos'
- aliases = ['logos']
- filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
- mimetypes = ['text/x-logos']
- priority = 0.25
-
- tokens = {
- 'statements': [
- (r'(%orig|%log)\b', Keyword),
- (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
- bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
- (r'(%init)\b(\()',
- bygroups(Keyword, Punctuation), 'logos_init_directive'),
- (r'(%init)(?=\s*;)', bygroups(Keyword)),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class), '#pop'),
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'logos_classname')),
- inherit,
- ],
- 'logos_init_directive': [
+ bygroups(Name.Class, Text), '#pop')
+ ],
+ 'oc_ivars': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ # methods
+ (r'^([-+])(\s*)' # method marker
+ r'(\(.*?\))?(\s*)' # return type
+ r'([a-zA-Z$_][\w$]*:?)', # begin of method name
+ bygroups(Punctuation, Text, using(this),
+ Text, Name.Function),
+ 'method'),
+ inherit,
+ ],
+ 'method': [
+ include('whitespace'),
+ # TODO unsure if ellipses are allowed elsewhere, see
+ # discussion in Issue 789
+ (r',', Punctuation),
+ (r'\.\.\.', Punctuation),
+ (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
+ bygroups(using(this), Text, Name.Variable)),
+ (r'[a-zA-Z$_][\w$]*:', Name.Function),
+ (';', Punctuation, '#pop'),
+ (r'\{', Punctuation, 'function'),
+ default('#pop'),
+ ],
+ 'literal_number': [
+ (r'\(', Punctuation, 'literal_number_inner'),
+ (r'\)', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_number_inner': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array': [
+ (r'\[', Punctuation, 'literal_array_inner'),
+ (r'\]', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array_inner': [
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_dictionary': [
+ (r'\}', Literal, '#pop'),
+ include('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ if _oc_keywords.search(text):
+ return 1.0
+ elif '@"' in text: # strings
+ return 0.8
+ elif re.search('@[0-9]+', text):
+ return 0.7
+ elif _oc_message.search(text):
+ return 0.8
+ return 0
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ baselexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
+
+ return GeneratedObjectiveCVariant
+
+
+class ObjectiveCLexer(objective(CLexer)):
+ """
+ For Objective-C source code with preprocessor directives.
+ """
+
+ name = 'Objective-C'
+ aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
+ filenames = ['*.m', '*.h']
+ mimetypes = ['text/x-objective-c']
+ priority = 0.05 # Lower than C
+
+
+class ObjectiveCppLexer(objective(CppLexer)):
+ """
+ For Objective-C++ source code with preprocessor directives.
+ """
+
+ name = 'Objective-C++'
+ aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
+ filenames = ['*.mm', '*.hh']
+ mimetypes = ['text/x-objective-c++']
+ priority = 0.05 # Lower than C++
+
+
+class LogosLexer(ObjectiveCppLexer):
+ """
+ For Logos + Objective-C source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Logos'
+ aliases = ['logos']
+ filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
+ mimetypes = ['text/x-logos']
+ priority = 0.25
+
+ tokens = {
+ 'statements': [
+ (r'(%orig|%log)\b', Keyword),
+ (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
+ bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
+ (r'(%init)\b(\()',
+ bygroups(Keyword, Punctuation), 'logos_init_directive'),
+ (r'(%init)(?=\s*;)', bygroups(Keyword)),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class), '#pop'),
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'logos_classname')),
+ inherit,
+ ],
+ 'logos_init_directive': [
(r'\s+', Text),
- (',', Punctuation, ('logos_init_directive', '#pop')),
+ (',', Punctuation, ('logos_init_directive', '#pop')),
(r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
- bygroups(Name.Class, Text, Punctuation, Text, Text)),
+ bygroups(Name.Class, Text, Punctuation, Text, Text)),
(r'([a-zA-Z$_][\w$]*)', Name.Class),
(r'\)', Punctuation, '#pop'),
- ],
- 'logos_classname': [
+ ],
+ 'logos_classname': [
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
(r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'root': [
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- 'logos_classname'),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
- bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
- (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
- 'function'),
- (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
- bygroups(Keyword, Text, Keyword, String, Keyword)),
- (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
- inherit,
- ],
- }
-
- _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
- def analyse_text(text):
- if LogosLexer._logos_keywords.search(text):
- return 1.0
- return 0
-
-
-class SwiftLexer(RegexLexer):
- """
- For `Swift <https://developer.apple.com/swift/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Swift'
- filenames = ['*.swift']
- aliases = ['swift']
- mimetypes = ['text/x-swift']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'//', Comment.Single, 'comment-single'),
- (r'/\*', Comment.Multiline, 'comment-multi'),
+ ],
+ 'root': [
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ 'logos_classname'),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
+ bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
+ (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
+ 'function'),
+ (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
+ bygroups(Keyword, Text, Keyword, String, Keyword)),
+ (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
+ inherit,
+ ],
+ }
+
+ _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
+
+ def analyse_text(text):
+ if LogosLexer._logos_keywords.search(text):
+ return 1.0
+ return 0
+
+
+class SwiftLexer(RegexLexer):
+ """
+ For `Swift <https://developer.apple.com/swift/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Swift'
+ filenames = ['*.swift']
+ aliases = ['swift']
+ mimetypes = ['text/x-swift']
+
+ tokens = {
+ 'root': [
+ # Whitespace and Comments
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'//', Comment.Single, 'comment-single'),
+ (r'/\*', Comment.Multiline, 'comment-multi'),
(r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
-
- # Keywords
- include('keywords'),
-
- # Global Types
- (words((
- 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
- 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
- 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
- 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
- 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
- 'EnumerateSequence', 'FilterCollectionView',
- 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
- 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
- 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
- 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
- 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
- 'LazyForwardCollection', 'LazyRandomAccessCollection',
- 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
- 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
- 'Optional', 'PermutationGenerator', 'QuickLookObject',
- 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
- 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
- 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
- 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
- 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
- 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
- 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
- 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
- # Protocols
- 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
- 'BidirectionalIndexType', 'BitwiseOperationsType',
- 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
- 'CollectionType', 'Comparable', 'DebugPrintable',
- 'DictionaryLiteralConvertible', 'Equatable',
- 'ExtendedGraphemeClusterLiteralConvertible',
- 'ExtensibleCollectionType', 'FloatLiteralConvertible',
- 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
- 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
- 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
- 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
- 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
- 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
- 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
- 'Streamable', 'Strideable', 'StringInterpolationConvertible',
- 'StringLiteralConvertible', 'UnicodeCodecType',
- 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
- '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
- '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
- '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
- '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
- '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
- '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
- '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
- '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
- '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
- '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
- '_SwiftNSStringType', '_UnsignedIntegerType',
- # Variables
- 'C_ARGC', 'C_ARGV', 'Process',
- # Typealiases
- 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
- 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
- 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
- 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
- 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
- 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
- # Foundation/Cocoa
- 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
- Name.Builtin),
- # Functions
- (words((
- 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
- 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
- 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
- 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
- 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
- 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
- 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
- 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
- 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
- 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
- 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
- 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
- 'withExtendedLifetime', 'withUnsafeMutablePointer',
- 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
- 'withVaList'), suffix=r'\b'),
- Name.Builtin.Pseudo),
-
- # Implicit Block Variables
- (r'\$\d+', Name.Variable),
-
- # Binary Literal
- (r'0b[01_]+', Number.Bin),
- # Octal Literal
- (r'0o[0-7_]+', Number.Oct),
- # Hexadecimal Literal
- (r'0x[0-9a-fA-F_]+', Number.Hex),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
- (r'[0-9][0-9_]*', Number.Integer),
- # String Literal
- (r'"', String, 'string'),
-
- # Operators and Punctuation
- (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
- (r'[/=\-+!*%<>&|^?~]+', Operator),
-
- # Identifier
- (r'[a-zA-Z_]\w*', Name)
- ],
- 'keywords': [
- (words((
+
+ # Keywords
+ include('keywords'),
+
+ # Global Types
+ (words((
+ 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
+ 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
+ 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
+ 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
+ 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
+ 'EnumerateSequence', 'FilterCollectionView',
+ 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
+ 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
+ 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
+ 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
+ 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
+ 'LazyForwardCollection', 'LazyRandomAccessCollection',
+ 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
+ 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
+ 'Optional', 'PermutationGenerator', 'QuickLookObject',
+ 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
+ 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
+ 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
+ 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
+ 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
+ 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
+ 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
+ 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
+ # Protocols
+ 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
+ 'BidirectionalIndexType', 'BitwiseOperationsType',
+ 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
+ 'CollectionType', 'Comparable', 'DebugPrintable',
+ 'DictionaryLiteralConvertible', 'Equatable',
+ 'ExtendedGraphemeClusterLiteralConvertible',
+ 'ExtensibleCollectionType', 'FloatLiteralConvertible',
+ 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
+ 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
+ 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
+ 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
+ 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
+ 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
+ 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
+ 'Streamable', 'Strideable', 'StringInterpolationConvertible',
+ 'StringLiteralConvertible', 'UnicodeCodecType',
+ 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
+ '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
+ '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
+ '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
+ '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
+ '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
+ '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
+ '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
+ '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
+ '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
+ '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
+ '_SwiftNSStringType', '_UnsignedIntegerType',
+ # Variables
+ 'C_ARGC', 'C_ARGV', 'Process',
+ # Typealiases
+ 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
+ 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
+ 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
+ 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
+ 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
+ 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
+ # Foundation/Cocoa
+ 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
+ Name.Builtin),
+ # Functions
+ (words((
+ 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
+ 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
+ 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
+ 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
+ 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
+ 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
+ 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
+ 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
+ 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
+ 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
+ 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
+ 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
+ 'withExtendedLifetime', 'withUnsafeMutablePointer',
+ 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
+ 'withVaList'), suffix=r'\b'),
+ Name.Builtin.Pseudo),
+
+ # Implicit Block Variables
+ (r'\$\d+', Name.Variable),
+
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct),
+ # Hexadecimal Literal
+ (r'0x[0-9a-fA-F_]+', Number.Hex),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
+ (r'[0-9][0-9_]*', Number.Integer),
+ # String Literal
+ (r'"', String, 'string'),
+
+ # Operators and Punctuation
+ (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
+ (r'[/=\-+!*%<>&|^?~]+', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name)
+ ],
+ 'keywords': [
+ (words((
'as', 'break', 'case', 'catch', 'continue', 'default', 'defer',
'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
'repeat', 'return', '#selector', 'switch', 'throw', 'try',
'where', 'while'), suffix=r'\b'),
- Keyword),
- (r'@availability\([^)]+\)', Keyword.Reserved),
- (words((
- 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
+ Keyword),
+ (r'@availability\([^)]+\)', Keyword.Reserved),
+ (words((
+ 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
'none', 'nonmutating', 'optional', 'override', 'postfix',
'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
@@ -428,77 +428,77 @@ class SwiftLexer(RegexLexer):
'@availability', '@autoclosure', '@noreturn',
'@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
'@UIApplicationMain', '@IBAction', '@IBDesignable',
- '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
- Keyword.Reserved),
- (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
+ '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
r'|__FILE__|__FUNCTION__|__LINE__|_'
r'|#(?:file|line|column|function))\b', Keyword.Constant),
- (r'import\b', Keyword.Declaration, 'module'),
- (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(func)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Function)),
- (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
- Text, Name.Variable)),
- (words((
- 'class', 'deinit', 'enum', 'extension', 'func', 'import', 'init',
- 'internal', 'let', 'operator', 'private', 'protocol', 'public',
- 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
- Keyword.Declaration)
- ],
- 'comment': [
- (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
- Comment.Special)
- ],
-
- # Nested
- 'comment-single': [
- (r'\n', Text, '#pop'),
- include('comment'),
- (r'[^\n]', Comment.Single)
- ],
- 'comment-multi': [
- include('comment'),
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'module': [
- (r'\n', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Name.Class),
- include('root')
- ],
- 'preproc': [
- (r'\n', Text, '#pop'),
- include('keywords'),
- (r'[A-Za-z]\w*', Comment.Preproc),
- include('root')
- ],
- 'string': [
- (r'\\\(', String.Interpol, 'string-intp'),
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String)
- ],
- 'string-intp': [
- (r'\(', String.Interpol, '#push'),
- (r'\)', String.Interpol, '#pop'),
- include('root')
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
+ (r'import\b', Keyword.Declaration, 'module'),
+ (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(func)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Function)),
+ (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
+ Text, Name.Variable)),
+ (words((
+ 'class', 'deinit', 'enum', 'extension', 'func', 'import', 'init',
+ 'internal', 'let', 'operator', 'private', 'protocol', 'public',
+ 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
+ Keyword.Declaration)
+ ],
+ 'comment': [
+ (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
+ Comment.Special)
+ ],
+
+ # Nested
+ 'comment-single': [
+ (r'\n', Text, '#pop'),
+ include('comment'),
+ (r'[^\n]', Comment.Single)
+ ],
+ 'comment-multi': [
+ include('comment'),
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'module': [
+ (r'\n', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Name.Class),
+ include('root')
+ ],
+ 'preproc': [
+ (r'\n', Text, '#pop'),
+ include('keywords'),
+ (r'[A-Za-z]\w*', Comment.Preproc),
+ include('root')
+ ],
+ 'string': [
+ (r'\\\(', String.Interpol, 'string-intp'),
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String)
+ ],
+ 'string-intp': [
+ (r'\(', String.Interpol, '#push'),
+ (r'\)', String.Interpol, '#pop'),
+ include('root')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ooc.py b/contrib/python/Pygments/py2/pygments/lexers/ooc.py
index 438719cd41..f91fa2991d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ooc.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ooc.py
@@ -1,85 +1,85 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ooc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Ooc language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ooc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Ooc language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['OocLexer']
-
-
-class OocLexer(RegexLexer):
- """
- For `Ooc <http://ooc-lang.org/>`_ source code
-
- .. versionadded:: 1.2
- """
- name = 'Ooc'
- aliases = ['ooc']
- filenames = ['*.ooc']
- mimetypes = ['text/x-ooc']
-
- tokens = {
- 'root': [
- (words((
- 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
- 'this', 'super', 'new', 'const', 'final', 'static', 'import',
- 'use', 'extern', 'inline', 'proto', 'break', 'continue',
- 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
- 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
- 'false', 'null'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'include\b', Keyword, 'include'),
- (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
- bygroups(Keyword, Text, Name.Function)),
- (r'\bfunc\b', Keyword),
- # Note: %= and ^= not listed on http://ooc-lang.org/syntax
- (r'//.*', Comment),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
- r'&&?|\|\|?|\^=?)', Operator),
- (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
- Name.Function)),
- (r'[A-Z][A-Z0-9_]+', Name.Constant),
- (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
-
- (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
- bygroups(Name.Function, Text)),
- (r'[a-z]\w*', Name.Variable),
-
- # : introduces types
- (r'[:(){}\[\];,]', Punctuation),
-
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0c[0-9]+', Number.Oct),
- (r'0b[01]+', Number.Bin),
- (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
- (r'[0-9_]+', Number.Decimal),
-
- (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
- String.Double),
- (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@', Punctuation), # pointer dereference
- (r'\.', Punctuation), # imports or chain operator
-
- (r'\\[ \t\n]', Text),
- (r'[ \t]+', Text),
- ],
- 'include': [
- (r'[\w/]+', Name),
- (r',', Punctuation),
- (r'[ \t]', Text),
- (r'[;\n]', Text, '#pop'),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['OocLexer']
+
+
+class OocLexer(RegexLexer):
+ """
+ For `Ooc <http://ooc-lang.org/>`_ source code
+
+ .. versionadded:: 1.2
+ """
+ name = 'Ooc'
+ aliases = ['ooc']
+ filenames = ['*.ooc']
+ mimetypes = ['text/x-ooc']
+
+ tokens = {
+ 'root': [
+ (words((
+ 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'new', 'const', 'final', 'static', 'import',
+ 'use', 'extern', 'inline', 'proto', 'break', 'continue',
+ 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
+ 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
+ 'false', 'null'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'include\b', Keyword, 'include'),
+ (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\bfunc\b', Keyword),
+ # Note: %= and ^= not listed on http://ooc-lang.org/syntax
+ (r'//.*', Comment),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
+ r'&&?|\|\|?|\^=?)', Operator),
+ (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
+ Name.Function)),
+ (r'[A-Z][A-Z0-9_]+', Name.Constant),
+ (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
+
+ (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
+ bygroups(Name.Function, Text)),
+ (r'[a-z]\w*', Name.Variable),
+
+ # : introduces types
+ (r'[:(){}\[\];,]', Punctuation),
+
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'0c[0-9]+', Number.Oct),
+ (r'0b[01]+', Number.Bin),
+ (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
+ (r'[0-9_]+', Number.Decimal),
+
+ (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
+ String.Double),
+ (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'@', Punctuation), # pointer dereference
+ (r'\.', Punctuation), # imports or chain operator
+
+ (r'\\[ \t\n]', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'include': [
+ (r'[\w/]+', Name),
+ (r',', Punctuation),
+ (r'[ \t]', Text),
+ (r'[;\n]', Text, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/other.py b/contrib/python/Pygments/py2/pygments/lexers/other.py
index c3a60cefc9..ed3d3fe73a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/other.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/other.py
@@ -1,41 +1,41 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.other
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.other
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
-from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
- TcshLexer
-from pygments.lexers.robotframework import RobotFrameworkLexer
-from pygments.lexers.testing import GherkinLexer
-from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
-from pygments.lexers.prolog import LogtalkLexer
-from pygments.lexers.snobol import SnobolLexer
-from pygments.lexers.rebol import RebolLexer
-from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
-from pygments.lexers.modeling import ModelicaLexer
-from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
- HybrisLexer
-from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
- AsymptoteLexer, PovrayLexer
-from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
- GoodDataCLLexer, MaqlLexer
-from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
-from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
- MscgenLexer, VGLLexer
-from pygments.lexers.basic import CbmBasicV2Lexer
-from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
-from pygments.lexers.ecl import ECLLexer
-from pygments.lexers.urbi import UrbiscriptLexer
-from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
-from pygments.lexers.installers import NSISLexer, RPMSpecLexer
-from pygments.lexers.textedit import AwkLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
+from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
+ TcshLexer
+from pygments.lexers.robotframework import RobotFrameworkLexer
+from pygments.lexers.testing import GherkinLexer
+from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
+from pygments.lexers.prolog import LogtalkLexer
+from pygments.lexers.snobol import SnobolLexer
+from pygments.lexers.rebol import RebolLexer
+from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
+from pygments.lexers.modeling import ModelicaLexer
+from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
+ HybrisLexer
+from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
+ AsymptoteLexer, PovrayLexer
+from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
+ GoodDataCLLexer, MaqlLexer
+from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
+from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
+ MscgenLexer, VGLLexer
+from pygments.lexers.basic import CbmBasicV2Lexer
+from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
+from pygments.lexers.ecl import ECLLexer
+from pygments.lexers.urbi import UrbiscriptLexer
+from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
+from pygments.lexers.installers import NSISLexer, RPMSpecLexer
+from pygments.lexers.textedit import AwkLexer
from pygments.lexers.smv import NuSMVLexer
-
-__all__ = []
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/parasail.py b/contrib/python/Pygments/py2/pygments/lexers/parasail.py
index 7f8cf07342..1c6ca6ae8a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/parasail.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/parasail.py
@@ -1,79 +1,79 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.parasail
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for ParaSail.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.parasail
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for ParaSail.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['ParaSailLexer']
-
-
-class ParaSailLexer(RegexLexer):
- """
- For `ParaSail <http://www.parasail-lang.org>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'ParaSail'
- aliases = ['parasail']
- filenames = ['*.psi', '*.psl']
- mimetypes = ['text/x-parasail']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'\b(and|or|xor)=', Operator.Word),
- (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
- r'(is|not)\s+null)\b',
- Operator.Word),
- # Keywords
- (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
- r'each|end|exit|extends|exports|forward|func|global|implements|'
- r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
- r'optional|private|queued|ref|return|reverse|separate|some|'
- r'type|until|var|with|'
- # Control flow
- r'if|then|else|elsif|case|for|while|loop)\b',
- Keyword.Reserved),
- (r'(abstract\s+)?(interface|class|op|func|type)',
- Keyword.Declaration),
- # Literals
- (r'"[^"]*"', String),
- (r'\\[\'ntrf"0]', String.Escape),
- (r'#[a-zA-Z]\w*', Literal), # Enumeration
- include('numbers'),
- (r"'[^']'", String.Char),
- (r'[a-zA-Z]\w*', Name),
- # Operators and Punctuation
- (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['ParaSailLexer']
+
+
+class ParaSailLexer(RegexLexer):
+ """
+ For `ParaSail <http://www.parasail-lang.org>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ParaSail'
+ aliases = ['parasail']
+ filenames = ['*.psi', '*.psl']
+ mimetypes = ['text/x-parasail']
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'\b(and|or|xor)=', Operator.Word),
+ (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
+ r'(is|not)\s+null)\b',
+ Operator.Word),
+ # Keywords
+ (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
+ r'each|end|exit|extends|exports|forward|func|global|implements|'
+ r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
+ r'optional|private|queued|ref|return|reverse|separate|some|'
+ r'type|until|var|with|'
+ # Control flow
+ r'if|then|else|elsif|case|for|while|loop)\b',
+ Keyword.Reserved),
+ (r'(abstract\s+)?(interface|class|op|func|type)',
+ Keyword.Declaration),
+ # Literals
+ (r'"[^"]*"', String),
+ (r'\\[\'ntrf"0]', String.Escape),
+ (r'#[a-zA-Z]\w*', Literal), # Enumeration
+ include('numbers'),
+ (r"'[^']'", String.Char),
+ (r'[a-zA-Z]\w*', Name),
+ # Operators and Punctuation
+ (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\|=|\||/=|\+|-|\*|/|'
- r'\.\.|<\.\.|\.\.<|<\.\.<)',
- Operator),
- (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
- Punctuation),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex
- (r'0[bB][01][01_]*', Number.Bin), # C-like bin
- (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp
- Number.Float),
- (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float
- (r'\d[0-9_]*', Number.Integer), # integer
- ],
- }
+ r'\.\.|<\.\.|\.\.<|<\.\.<)',
+ Operator),
+ (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
+ Punctuation),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex
+ (r'0[bB][01][01_]*', Number.Bin), # C-like bin
+ (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp
+ Number.Float),
+ (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float
+ (r'\d[0-9_]*', Number.Integer), # integer
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/parsers.py b/contrib/python/Pygments/py2/pygments/lexers/parsers.py
index 8bcbfc5046..cdad7d9d02 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/parsers.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/parsers.py
@@ -1,835 +1,835 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.parsers
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for parser generators.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.parsers
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for parser generators.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, \
- include, bygroups, using
-from pygments.token import Punctuation, Other, Text, Comment, Operator, \
- Keyword, Name, String, Number, Whitespace
-from pygments.lexers.jvm import JavaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.objective import ObjectiveCLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.ruby import RubyLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-
-__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
- 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
- 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
- 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- # 'AntlrCLexer',
- 'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
- 'AntlrJavaLexer', 'AntlrActionScriptLexer',
- 'TreetopLexer', 'EbnfLexer']
-
-
-class RagelLexer(RegexLexer):
- """
- A pure `Ragel <http://www.complang.org/ragel/>`_ lexer. Use this for
- fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
- (or one of the language-specific subclasses).
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel'
- aliases = ['ragel']
- filenames = []
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace)
- ],
- 'comments': [
- (r'\#.*$', Comment),
- ],
- 'keywords': [
- (r'(access|action|alphtype)\b', Keyword),
- (r'(getkey|write|machine|include)\b', Keyword),
- (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
- (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
- ],
- 'numbers': [
- (r'0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- 'literals': [
- (r'"(\\\\|\\"|[^"])*"', String), # double quote string
- (r"'(\\\\|\\'|[^'])*'", String), # single quote string
- (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
- (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
- ],
- 'identifiers': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- 'operators': [
- (r',', Operator), # Join
- (r'\||&|--?', Operator), # Union, Intersection and Subtraction
- (r'\.|<:|:>>?', Operator), # Concatention
- (r':', Operator), # Label
- (r'->', Operator), # Epsilon Transition
- (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
- (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
- (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
- (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
- (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
- (r'>|@|\$|%', Operator), # Transition Actions and Priorities
- (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
- (r'!|\^', Operator), # Negation
- (r'\(|\)', Operator), # Grouping
- ],
- 'root': [
- include('literals'),
- include('whitespace'),
- include('comments'),
- include('keywords'),
- include('numbers'),
- include('identifiers'),
- include('operators'),
- (r'\{', Punctuation, 'host'),
- (r'=', Operator),
- (r';', Punctuation),
- ],
- 'host': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^{}\'"/#]+', # exclude unsafe characters
- r'[^\\]\\[{}]', # allow escaped { or }
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'\#.*$\n?', # ruby comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\/|[^/])*/',
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
-
-
-class RagelEmbeddedLexer(RegexLexer):
- """
- A lexer for `Ragel`_ embedded in a host language file.
-
- This will only highlight Ragel statements. If you want host language
- highlighting then call the language-specific Ragel lexer.
-
- .. versionadded:: 1.1
- """
-
- name = 'Embedded Ragel'
- aliases = ['ragel-em']
- filenames = ['*.rl']
-
- tokens = {
- 'root': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^%\'"/#]+', # exclude unsafe characters
- r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- # Single Line FSM.
- # Please don't put a quoted newline in a single line FSM.
- # That's just mean. It will break this.
- (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
- using(RagelLexer),
- Punctuation, Text)),
-
- # Multi Line FSM.
- (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
- ],
- 'multi-line-fsm': [
- (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
- r'(' + r'|'.join((
- r'[^}\'"\[/#]', # exclude unsafe characters
- r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
- r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
- r'[^\\]\\[{}]', # ...and } is okay if it's escaped
-
- # allow / if it's preceded with one of these symbols
- # (ragel EOF actions)
- r'(>|\$|%|<|@|<>)/',
-
- # specifically allow regex followed immediately by *
- # so it doesn't get mistaken for a comment
- r'/(?!\*)(\\\\|\\/|[^/])*/\*',
-
- # allow / as long as it's not followed by another / or by a *
- r'/(?=[^/*]|$)',
-
- # We want to match as many of these as we can in one block.
- # Not sure if we need the + sign here,
- # does it help performance?
- )) + r')+',
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- )) + r')+', using(RagelLexer)),
-
- (r'\}%%', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- return '@LANG: indep' in text
-
-
-class RagelRubyLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a Ruby host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Ruby Host'
- aliases = ['ragel-ruby', 'ragel-rb']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
- **options)
-
- def analyse_text(text):
- return '@LANG: ruby' in text
-
-
-class RagelCLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in C Host'
- aliases = ['ragel-c']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer,
- **options)
-
- def analyse_text(text):
- return '@LANG: c' in text
-
-
-class RagelDLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a D host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in D Host'
- aliases = ['ragel-d']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: d' in text
-
-
-class RagelCppLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a CPP host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in CPP Host'
- aliases = ['ragel-cpp']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: c++' in text
-
-
-class RagelObjectiveCLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in an Objective C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Objective C Host'
- aliases = ['ragel-objc']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer,
- RagelEmbeddedLexer,
- **options)
-
- def analyse_text(text):
- return '@LANG: objc' in text
-
-
-class RagelJavaLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a Java host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Java Host'
- aliases = ['ragel-java']
- filenames = ['*.rl']
-
- def __init__(self, **options):
- super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer,
- **options)
-
- def analyse_text(text):
- return '@LANG: java' in text
-
-
-class AntlrLexer(RegexLexer):
- """
- Generic `ANTLR`_ Lexer.
- Should not be called directly, instead
- use DelegatingLexer for your target language.
-
- .. versionadded:: 1.1
-
- .. _ANTLR: http://www.antlr.org/
- """
-
- name = 'ANTLR'
- aliases = ['antlr']
- filenames = []
-
- _id = r'[A-Za-z]\w*'
- _TOKEN_REF = r'[A-Z]\w*'
- _RULE_REF = r'[a-z]\w*'
- _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
- _INT = r'[0-9]+'
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//.*$', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
-
- (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
- Punctuation)),
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # tokensSpec
- (r'tokens\b', Keyword, 'tokens'),
- # attrScope
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, DelegatingLexer, \
+ include, bygroups, using
+from pygments.token import Punctuation, Other, Text, Comment, Operator, \
+ Keyword, Name, String, Number, Whitespace
+from pygments.lexers.jvm import JavaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.objective import ObjectiveCLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.dotnet import CSharpLexer
+from pygments.lexers.ruby import RubyLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+
+__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
+ 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
+ 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
+ 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
+ # 'AntlrCLexer',
+ 'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
+ 'AntlrJavaLexer', 'AntlrActionScriptLexer',
+ 'TreetopLexer', 'EbnfLexer']
+
+
+class RagelLexer(RegexLexer):
+ """
+ A pure `Ragel <http://www.complang.org/ragel/>`_ lexer. Use this for
+ fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
+ (or one of the language-specific subclasses).
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel'
+ aliases = ['ragel']
+ filenames = []
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Whitespace)
+ ],
+ 'comments': [
+ (r'\#.*$', Comment),
+ ],
+ 'keywords': [
+ (r'(access|action|alphtype)\b', Keyword),
+ (r'(getkey|write|machine|include)\b', Keyword),
+ (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
+ (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
+ ],
+ 'numbers': [
+ (r'0x[0-9A-Fa-f]+', Number.Hex),
+ (r'[+-]?[0-9]+', Number.Integer),
+ ],
+ 'literals': [
+ (r'"(\\\\|\\"|[^"])*"', String), # double quote string
+ (r"'(\\\\|\\'|[^'])*'", String), # single quote string
+ (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
+ (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
+ ],
+ 'identifiers': [
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ 'operators': [
+ (r',', Operator), # Join
+ (r'\||&|--?', Operator), # Union, Intersection and Subtraction
+ (r'\.|<:|:>>?', Operator), # Concatention
+ (r':', Operator), # Label
+ (r'->', Operator), # Epsilon Transition
+ (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
+ (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
+ (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
+ (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
+ (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
+ (r'>|@|\$|%', Operator), # Transition Actions and Priorities
+ (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
+ (r'!|\^', Operator), # Negation
+ (r'\(|\)', Operator), # Grouping
+ ],
+ 'root': [
+ include('literals'),
+ include('whitespace'),
+ include('comments'),
+ include('keywords'),
+ include('numbers'),
+ include('identifiers'),
+ include('operators'),
+ (r'\{', Punctuation, 'host'),
+ (r'=', Operator),
+ (r';', Punctuation),
+ ],
+ 'host': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^{}\'"/#]+', # exclude unsafe characters
+ r'[^\\]\\[{}]', # allow escaped { or }
+
+ # strings and comments may safely contain unsafe characters
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'\#.*$\n?', # ruby comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
+ r'/(?!\*)(\\\\|\\/|[^/])*/',
+
+ # / is safe now that we've handled regex and javadoc comments
+ r'/',
+ )) + r')+', Other),
+
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ }
+
+
+class RagelEmbeddedLexer(RegexLexer):
+ """
+ A lexer for `Ragel`_ embedded in a host language file.
+
+ This will only highlight Ragel statements. If you want host language
+ highlighting then call the language-specific Ragel lexer.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Embedded Ragel'
+ aliases = ['ragel-em']
+ filenames = ['*.rl']
+
+ tokens = {
+ 'root': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^%\'"/#]+', # exclude unsafe characters
+ r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
+
+ # strings and comments may safely contain unsafe characters
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
+ r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
+
+ # / is safe now that we've handled regex and javadoc comments
+ r'/',
+ )) + r')+', Other),
+
+ # Single Line FSM.
+ # Please don't put a quoted newline in a single line FSM.
+ # That's just mean. It will break this.
+ (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
+ using(RagelLexer),
+ Punctuation, Text)),
+
+ # Multi Line FSM.
+ (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
+ ],
+ 'multi-line-fsm': [
+ (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
+ r'(' + r'|'.join((
+ r'[^}\'"\[/#]', # exclude unsafe characters
+ r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
+ r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
+ r'[^\\]\\[{}]', # ...and } is okay if it's escaped
+
+ # allow / if it's preceded with one of these symbols
+ # (ragel EOF actions)
+ r'(>|\$|%|<|@|<>)/',
+
+ # specifically allow regex followed immediately by *
+ # so it doesn't get mistaken for a comment
+ r'/(?!\*)(\\\\|\\/|[^/])*/\*',
+
+ # allow / as long as it's not followed by another / or by a *
+ r'/(?=[^/*]|$)',
+
+ # We want to match as many of these as we can in one block.
+ # Not sure if we need the + sign here,
+ # does it help performance?
+ )) + r')+',
+
+ # strings and comments may safely contain unsafe characters
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
+ )) + r')+', using(RagelLexer)),
+
+ (r'\}%%', Punctuation, '#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return '@LANG: indep' in text
+
+
+class RagelRubyLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a Ruby host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Ruby Host'
+ aliases = ['ragel-ruby', 'ragel-rb']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
+ **options)
+
+ def analyse_text(text):
+ return '@LANG: ruby' in text
+
+
+class RagelCLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a C host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in C Host'
+ aliases = ['ragel-c']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelCLexer, self).__init__(CLexer, RagelEmbeddedLexer,
+ **options)
+
+ def analyse_text(text):
+ return '@LANG: c' in text
+
+
+class RagelDLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a D host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in D Host'
+ aliases = ['ragel-d']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options)
+
+ def analyse_text(text):
+ return '@LANG: d' in text
+
+
+class RagelCppLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a CPP host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in CPP Host'
+ aliases = ['ragel-cpp']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options)
+
+ def analyse_text(text):
+ return '@LANG: c++' in text
+
+
+class RagelObjectiveCLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in an Objective C host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Objective C Host'
+ aliases = ['ragel-objc']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelObjectiveCLexer, self).__init__(ObjectiveCLexer,
+ RagelEmbeddedLexer,
+ **options)
+
+ def analyse_text(text):
+ return '@LANG: objc' in text
+
+
+class RagelJavaLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a Java host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Java Host'
+ aliases = ['ragel-java']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
+ super(RagelJavaLexer, self).__init__(JavaLexer, RagelEmbeddedLexer,
+ **options)
+
+ def analyse_text(text):
+ return '@LANG: java' in text
+
+
+class AntlrLexer(RegexLexer):
+ """
+ Generic `ANTLR`_ Lexer.
+ Should not be called directly, instead
+ use DelegatingLexer for your target language.
+
+ .. versionadded:: 1.1
+
+ .. _ANTLR: http://www.antlr.org/
+ """
+
+ name = 'ANTLR'
+ aliases = ['antlr']
+ filenames = []
+
+ _id = r'[A-Za-z]\w*'
+ _TOKEN_REF = r'[A-Z]\w*'
+ _RULE_REF = r'[a-z]\w*'
+ _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
+ _INT = r'[0-9]+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'comments': [
+ (r'//.*$', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ ],
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+
+ (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
+ bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
+ Punctuation)),
+ # optionsSpec
+ (r'options\b', Keyword, 'options'),
+ # tokensSpec
+ (r'tokens\b', Keyword, 'tokens'),
+ # attrScope
(r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation), 'action'),
- # exception
- (r'(catch|finally)\b', Keyword, 'exception'),
- # action
+ bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+ Punctuation), 'action'),
+ # exception
+ (r'(catch|finally)\b', Keyword, 'exception'),
+ # action
(r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- Name.Label, Whitespace, Punctuation), 'action'),
- # rule
- (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
- bygroups(Keyword, Whitespace, Name.Label, Punctuation),
- ('rule-alts', 'rule-prelims')),
- ],
- 'exception': [
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
- include('comments'),
-
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- ],
- 'rule-prelims': [
- include('whitespace'),
- include('comments'),
-
- (r'returns\b', Keyword),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- # throwsSpec
- (r'(throws)(\s+)(' + _id + ')',
- bygroups(Keyword, Whitespace, Name.Label)),
- (r'(,)(\s*)(' + _id + ')',
- bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # ruleScopeSpec - scope followed by target language code or name of action
- # TODO finish implementing other possibilities for scope
- # L173 ANTLRv3.g from ANTLR book
- (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
- 'action'),
+ bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+ Name.Label, Whitespace, Punctuation), 'action'),
+ # rule
+ (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
+ bygroups(Keyword, Whitespace, Name.Label, Punctuation),
+ ('rule-alts', 'rule-prelims')),
+ ],
+ 'exception': [
+ (r'\n', Whitespace, '#pop'),
+ (r'\s', Whitespace),
+ include('comments'),
+
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ ],
+ 'rule-prelims': [
+ include('whitespace'),
+ include('comments'),
+
+ (r'returns\b', Keyword),
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ # throwsSpec
+ (r'(throws)(\s+)(' + _id + ')',
+ bygroups(Keyword, Whitespace, Name.Label)),
+ (r'(,)(\s*)(' + _id + ')',
+ bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
+ # optionsSpec
+ (r'options\b', Keyword, 'options'),
+ # ruleScopeSpec - scope followed by target language code or name of action
+ # TODO finish implementing other possibilities for scope
+ # L173 ANTLRv3.g from ANTLR book
+ (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
+ 'action'),
(r'(scope)(\s+)(' + _id + r')(\s*)(;)',
- bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
- # ruleAction
+ bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
+ # ruleAction
(r'(@' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation), 'action'),
- # finished prelims, go to rule alts!
- (r':', Punctuation, '#pop')
- ],
- 'rule-alts': [
- include('whitespace'),
- include('comments'),
-
- # These might need to go in a separate 'block' state triggered by (
- (r'options\b', Keyword, 'options'),
- (r':', Punctuation),
-
- # literals
- (r"'(\\\\|\\'|[^'])*'", String),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'<<([^>]|>[^>])>>', String),
- # identifiers
- # Tokens start with capital letter.
- (r'\$?[A-Z_]\w*', Name.Constant),
- # Rules start with small letter.
- (r'\$?[a-z_]\w*', Name.Variable),
- # operators
- (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
- (r',', Punctuation),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- (r';', Punctuation, '#pop')
- ],
- 'tokens': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ bygroups(Name.Label, Whitespace, Punctuation), 'action'),
+ # finished prelims, go to rule alts!
+ (r':', Punctuation, '#pop')
+ ],
+ 'rule-alts': [
+ include('whitespace'),
+ include('comments'),
+
+ # These might need to go in a separate 'block' state triggered by (
+ (r'options\b', Keyword, 'options'),
+ (r':', Punctuation),
+
+ # literals
+ (r"'(\\\\|\\'|[^'])*'", String),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'<<([^>]|>[^>])>>', String),
+ # identifiers
+ # Tokens start with capital letter.
+ (r'\$?[A-Z_]\w*', Name.Constant),
+ # Rules start with small letter.
+ (r'\$?[a-z_]\w*', Name.Variable),
+ # operators
+ (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
+ (r',', Punctuation),
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ (r';', Punctuation, '#pop')
+ ],
+ 'tokens': [
+ include('whitespace'),
+ include('comments'),
+ (r'\{', Punctuation),
+ (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ r')?(\s*)(;)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- String, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'options': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _id + r')(\s*)(=)(\s*)(' +
+ bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+ String, Whitespace, Punctuation)),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'options': [
+ include('whitespace'),
+ include('comments'),
+ (r'\{', Punctuation),
+ (r'(' + _id + r')(\s*)(=)(\s*)(' +
'|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
- bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
- Text, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^${}\'"/\\]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\/|[^/])*/',
-
- # backslashes are okay, as long as we are not backslashing a %
- r'\\(?!%)',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
- (r'(\\)(%)', bygroups(Punctuation, Other)),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'nested-arg-action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks.
- r'[^$\[\]\'"/]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
- r'/(?!\*)(\\\\|\\/|[^/])*/',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
-
-
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
- ]
- }
-
- def analyse_text(text):
- return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-# TH: I'm not aware of any language features of C++ that will cause
-# incorrect lexing of C files. Antlr doesn't appear to make a distinction,
-# so just assume they're C++. No idea how to make Objective C work in the
-# future.
-
-# class AntlrCLexer(DelegatingLexer):
-# """
-# ANTLR with C Target
-#
-# .. versionadded:: 1.1
-# """
-#
-# name = 'ANTLR With C Target'
-# aliases = ['antlr-c']
-# filenames = ['*.G', '*.g']
-#
-# def __init__(self, **options):
-# super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options)
-#
-# def analyse_text(text):
-# return re.match(r'^\s*language\s*=\s*C\s*;', text)
-
-
-class AntlrCppLexer(DelegatingLexer):
- """
- `ANTLR`_ with CPP Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With CPP Target'
- aliases = ['antlr-cpp']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
-
-
-class AntlrObjectiveCLexer(DelegatingLexer):
- """
- `ANTLR`_ with Objective-C Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ObjectiveC Target'
- aliases = ['antlr-objc']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer,
- AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
-
-
-class AntlrCSharpLexer(DelegatingLexer):
- """
- `ANTLR`_ with C# Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With C# Target'
- aliases = ['antlr-csharp', 'antlr-c#']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer,
- **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
-
-
-class AntlrPythonLexer(DelegatingLexer):
- """
- `ANTLR`_ with Python Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Python Target'
- aliases = ['antlr-python']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer,
- **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
-
-
-class AntlrJavaLexer(DelegatingLexer):
- """
- `ANTLR`_ with Java Target
-
- .. versionadded:: 1.
- """
-
- name = 'ANTLR With Java Target'
- aliases = ['antlr-java']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer,
- **options)
-
- def analyse_text(text):
- # Antlr language is Java by default
- return AntlrLexer.analyse_text(text) and 0.9
-
-
-class AntlrRubyLexer(DelegatingLexer):
- """
- `ANTLR`_ with Ruby Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Ruby Target'
- aliases = ['antlr-ruby', 'antlr-rb']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer,
- **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
-
-
-class AntlrPerlLexer(DelegatingLexer):
- """
- `ANTLR`_ with Perl Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Perl Target'
- aliases = ['antlr-perl']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer,
- **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
-
-
-class AntlrActionScriptLexer(DelegatingLexer):
- """
- `ANTLR`_ with ActionScript Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ActionScript Target'
- aliases = ['antlr-as', 'antlr-actionscript']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- from pygments.lexers.actionscript import ActionScriptLexer
- super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
- AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
-
-
-class TreetopBaseLexer(RegexLexer):
- """
- A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
- Not for direct use; use TreetopLexer instead.
-
- .. versionadded:: 1.6
- """
-
- tokens = {
- 'root': [
- include('space'),
- (r'require[ \t]+[^\n\r]+[\n\r]', Other),
- (r'module\b', Keyword.Namespace, 'module'),
- (r'grammar\b', Keyword, 'grammar'),
- ],
- 'module': [
- include('space'),
- include('end'),
- (r'module\b', Keyword, '#push'),
- (r'grammar\b', Keyword, 'grammar'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
- ],
- 'grammar': [
- include('space'),
- include('end'),
- (r'rule\b', Keyword, 'rule'),
- (r'include\b', Keyword, 'include'),
- (r'[A-Z]\w*', Name),
- ],
- 'include': [
- include('space'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
- ],
- 'rule': [
- include('space'),
- include('end'),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'[A-Za-z_]\w*', Name),
- (r'[()]', Punctuation),
- (r'[?+*/&!~]', Operator),
- (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
- (r'([0-9]*)(\.\.)([0-9]*)',
- bygroups(Number.Integer, Operator, Number.Integer)),
- (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
- (r'\{', Punctuation, 'inline_module'),
- (r'\.', String.Regex),
- ],
- 'inline_module': [
- (r'\{', Other, 'ruby'),
- (r'\}', Punctuation, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'ruby': [
- (r'\{', Other, '#push'),
- (r'\}', Other, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'space': [
- (r'[ \t\n\r]+', Whitespace),
- (r'#[^\n]*', Comment.Single),
- ],
- 'end': [
- (r'end\b', Keyword, '#pop'),
- ],
- }
-
-
-class TreetopLexer(DelegatingLexer):
- """
- A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-
- .. versionadded:: 1.6
- """
-
- name = 'Treetop'
- aliases = ['treetop']
- filenames = ['*.treetop', '*.tt']
-
- def __init__(self, **options):
- super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
-
-
-class EbnfLexer(RegexLexer):
- """
- Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
- grammars.
-
- .. versionadded:: 2.0
- """
-
- name = 'EBNF'
- aliases = ['ebnf']
- filenames = ['*.ebnf']
- mimetypes = ['text/x-ebnf']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'=', Operator, 'production'),
- ],
- 'production': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'"[^"]*"', String.Double),
- (r"'[^']*'", String.Single),
- (r'(\?[^?]*\?)', Name.Entity),
- (r'[\[\]{}(),|]', Punctuation),
- (r'-', Operator),
- (r';', Punctuation, '#pop'),
- (r'\.', Punctuation, '#pop'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comment_start': [
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- include('comment_start'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[*)]', Comment.Multiline),
- ],
- 'identifier': [
- (r'([a-zA-Z][\w \-]*)', Keyword),
- ],
- }
+ bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
+ Text, Whitespace, Punctuation)),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'action': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^${}\'"/\\]+', # exclude unsafe characters
+
+ # strings and comments may safely contain unsafe characters
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
+ r'/(?!\*)(\\\\|\\/|[^/])*/',
+
+ # backslashes are okay, as long as we are not backslashing a %
+ r'\\(?!%)',
+
+ # Now that we've handled regex and javadoc comments
+ # it's safe to let / through.
+ r'/',
+ )) + r')+', Other),
+ (r'(\\)(%)', bygroups(Punctuation, Other)),
+ (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+ bygroups(Name.Variable, Punctuation, Name.Property)),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'nested-arg-action': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks.
+ r'[^$\[\]\'"/]+', # exclude unsafe characters
+
+ # strings and comments may safely contain unsafe characters
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
+ r'/(?!\*)(\\\\|\\/|[^/])*/',
+
+ # Now that we've handled regex and javadoc comments
+ # it's safe to let / through.
+ r'/',
+ )) + r')+', Other),
+
+
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+ bygroups(Name.Variable, Punctuation, Name.Property)),
+ (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
+ ]
+ }
+
+ def analyse_text(text):
+ return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
+
+# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
+
+# TH: I'm not aware of any language features of C++ that will cause
+# incorrect lexing of C files. Antlr doesn't appear to make a distinction,
+# so just assume they're C++. No idea how to make Objective C work in the
+# future.
+
+# class AntlrCLexer(DelegatingLexer):
+# """
+# ANTLR with C Target
+#
+# .. versionadded:: 1.1
+# """
+#
+# name = 'ANTLR With C Target'
+# aliases = ['antlr-c']
+# filenames = ['*.G', '*.g']
+#
+# def __init__(self, **options):
+# super(AntlrCLexer, self).__init__(CLexer, AntlrLexer, **options)
+#
+# def analyse_text(text):
+# return re.match(r'^\s*language\s*=\s*C\s*;', text)
+
+
+class AntlrCppLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with CPP Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With CPP Target'
+ aliases = ['antlr-cpp']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrCppLexer, self).__init__(CppLexer, AntlrLexer, **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
+
+
+class AntlrObjectiveCLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Objective-C Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With ObjectiveC Target'
+ aliases = ['antlr-objc']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrObjectiveCLexer, self).__init__(ObjectiveCLexer,
+ AntlrLexer, **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
+
+
+class AntlrCSharpLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with C# Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With C# Target'
+ aliases = ['antlr-csharp', 'antlr-c#']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrCSharpLexer, self).__init__(CSharpLexer, AntlrLexer,
+ **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
+
+
+class AntlrPythonLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Python Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Python Target'
+ aliases = ['antlr-python']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrPythonLexer, self).__init__(PythonLexer, AntlrLexer,
+ **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
+
+
+class AntlrJavaLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Java Target
+
+ .. versionadded:: 1.
+ """
+
+ name = 'ANTLR With Java Target'
+ aliases = ['antlr-java']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrJavaLexer, self).__init__(JavaLexer, AntlrLexer,
+ **options)
+
+ def analyse_text(text):
+ # Antlr language is Java by default
+ return AntlrLexer.analyse_text(text) and 0.9
+
+
+class AntlrRubyLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Ruby Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Ruby Target'
+ aliases = ['antlr-ruby', 'antlr-rb']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrRubyLexer, self).__init__(RubyLexer, AntlrLexer,
+ **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
+
+
+class AntlrPerlLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Perl Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Perl Target'
+ aliases = ['antlr-perl']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ super(AntlrPerlLexer, self).__init__(PerlLexer, AntlrLexer,
+ **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
+
+
+class AntlrActionScriptLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with ActionScript Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With ActionScript Target'
+ aliases = ['antlr-as', 'antlr-actionscript']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ from pygments.lexers.actionscript import ActionScriptLexer
+ super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
+ AntlrLexer, **options)
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+
+
+class TreetopBaseLexer(RegexLexer):
+ """
+ A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
+ Not for direct use; use TreetopLexer instead.
+
+ .. versionadded:: 1.6
+ """
+
+ tokens = {
+ 'root': [
+ include('space'),
+ (r'require[ \t]+[^\n\r]+[\n\r]', Other),
+ (r'module\b', Keyword.Namespace, 'module'),
+ (r'grammar\b', Keyword, 'grammar'),
+ ],
+ 'module': [
+ include('space'),
+ include('end'),
+ (r'module\b', Keyword, '#push'),
+ (r'grammar\b', Keyword, 'grammar'),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
+ ],
+ 'grammar': [
+ include('space'),
+ include('end'),
+ (r'rule\b', Keyword, 'rule'),
+ (r'include\b', Keyword, 'include'),
+ (r'[A-Z]\w*', Name),
+ ],
+ 'include': [
+ include('space'),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
+ ],
+ 'rule': [
+ include('space'),
+ include('end'),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'[A-Za-z_]\w*', Name),
+ (r'[()]', Punctuation),
+ (r'[?+*/&!~]', Operator),
+ (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
+ (r'([0-9]*)(\.\.)([0-9]*)',
+ bygroups(Number.Integer, Operator, Number.Integer)),
+ (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
+ (r'\{', Punctuation, 'inline_module'),
+ (r'\.', String.Regex),
+ ],
+ 'inline_module': [
+ (r'\{', Other, 'ruby'),
+ (r'\}', Punctuation, '#pop'),
+ (r'[^{}]+', Other),
+ ],
+ 'ruby': [
+ (r'\{', Other, '#push'),
+ (r'\}', Other, '#pop'),
+ (r'[^{}]+', Other),
+ ],
+ 'space': [
+ (r'[ \t\n\r]+', Whitespace),
+ (r'#[^\n]*', Comment.Single),
+ ],
+ 'end': [
+ (r'end\b', Keyword, '#pop'),
+ ],
+ }
+
+
+class TreetopLexer(DelegatingLexer):
+ """
+ A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Treetop'
+ aliases = ['treetop']
+ filenames = ['*.treetop', '*.tt']
+
+ def __init__(self, **options):
+ super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
+
+
+class EbnfLexer(RegexLexer):
+ """
+ Lexer for `ISO/IEC 14977 EBNF
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ grammars.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'EBNF'
+ aliases = ['ebnf']
+ filenames = ['*.ebnf']
+ mimetypes = ['text/x-ebnf']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'=', Operator, 'production'),
+ ],
+ 'production': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'"[^"]*"', String.Double),
+ (r"'[^']*'", String.Single),
+ (r'(\?[^?]*\?)', Name.Entity),
+ (r'[\[\]{}(),|]', Punctuation),
+ (r'-', Operator),
+ (r';', Punctuation, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comment_start': [
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ include('comment_start'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[*)]', Comment.Multiline),
+ ],
+ 'identifier': [
+ (r'([a-zA-Z][\w \-]*)', Keyword),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/pascal.py b/contrib/python/Pygments/py2/pygments/lexers/pascal.py
index 0a8dd7df4d..213a5a55e3 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/pascal.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/pascal.py
@@ -1,644 +1,644 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.pascal
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pascal family languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pascal
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pascal family languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
- using, this, default
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-from pygments.scanner import Scanner
-
-# compatibility import
-from pygments.lexers.modula2 import Modula2Lexer
-
-__all__ = ['DelphiLexer', 'AdaLexer']
-
-
-class DelphiLexer(Lexer):
- """
- For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
- Turbo Pascal and Free Pascal source code.
-
- Additional options accepted:
-
- `turbopascal`
- Highlight Turbo Pascal specific keywords (default: ``True``).
- `delphi`
- Highlight Borland Delphi specific keywords (default: ``True``).
- `freepascal`
- Highlight Free Pascal specific keywords (default: ``True``).
- `units`
- A list of units that should be considered builtin, supported are
- ``System``, ``SysUtils``, ``Classes`` and ``Math``.
- Default is to consider all of them builtin.
- """
- name = 'Delphi'
- aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
+ using, this, default
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.scanner import Scanner
+
+# compatibility import
+from pygments.lexers.modula2 import Modula2Lexer
+
+__all__ = ['DelphiLexer', 'AdaLexer']
+
+
+class DelphiLexer(Lexer):
+ """
+ For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
+ Turbo Pascal and Free Pascal source code.
+
+ Additional options accepted:
+
+ `turbopascal`
+ Highlight Turbo Pascal specific keywords (default: ``True``).
+ `delphi`
+ Highlight Borland Delphi specific keywords (default: ``True``).
+ `freepascal`
+ Highlight Free Pascal specific keywords (default: ``True``).
+ `units`
+ A list of units that should be considered builtin, supported are
+ ``System``, ``SysUtils``, ``Classes`` and ``Math``.
+ Default is to consider all of them builtin.
+ """
+ name = 'Delphi'
+ aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
filenames = ['*.pas', '*.dpr']
- mimetypes = ['text/x-pascal']
-
- TURBO_PASCAL_KEYWORDS = (
- 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
- 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
- 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
- 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
- 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
- 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
- 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
- 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
- )
-
- DELPHI_KEYWORDS = (
- 'as', 'class', 'except', 'exports', 'finalization', 'finally',
- 'initialization', 'is', 'library', 'on', 'property', 'raise',
- 'threadvar', 'try'
- )
-
- FREE_PASCAL_KEYWORDS = (
- 'dispose', 'exit', 'false', 'new', 'true'
- )
-
+ mimetypes = ['text/x-pascal']
+
+ TURBO_PASCAL_KEYWORDS = (
+ 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
+ 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
+ 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
+ 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
+ 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
+ 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
+ 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
+ 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
+ )
+
+ DELPHI_KEYWORDS = (
+ 'as', 'class', 'except', 'exports', 'finalization', 'finally',
+ 'initialization', 'is', 'library', 'on', 'property', 'raise',
+ 'threadvar', 'try'
+ )
+
+ FREE_PASCAL_KEYWORDS = (
+ 'dispose', 'exit', 'false', 'new', 'true'
+ )
+
BLOCK_KEYWORDS = {
- 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
- 'finalization', 'function', 'implementation', 'initialization',
- 'label', 'library', 'operator', 'procedure', 'program', 'property',
- 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
+ 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
+ 'finalization', 'function', 'implementation', 'initialization',
+ 'label', 'library', 'operator', 'procedure', 'program', 'property',
+ 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
}
-
+
FUNCTION_MODIFIERS = {
- 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
- 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
- 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
- 'override', 'assembler'
+ 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
+ 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
+ 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
+ 'override', 'assembler'
}
-
- # XXX: those aren't global. but currently we know no way for defining
- # them just for the type context.
+
+ # XXX: those aren't global. but currently we know no way for defining
+ # them just for the type context.
DIRECTIVES = {
- 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
- 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
- 'published', 'public'
+ 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
+ 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
+ 'published', 'public'
}
-
+
BUILTIN_TYPES = {
- 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
- 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
- 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
- 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
- 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
- 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
- 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
- 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
- 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
- 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
- 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
- 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
- 'widechar', 'widestring', 'word', 'wordbool'
+ 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
+ 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
+ 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
+ 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
+ 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
+ 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
+ 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
+ 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
+ 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
+ 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
+ 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
+ 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
+ 'widechar', 'widestring', 'word', 'wordbool'
}
-
- BUILTIN_UNITS = {
- 'System': (
- 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
- 'append', 'arctan', 'assert', 'assigned', 'assignfile',
- 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
- 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
- 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
- 'dispose', 'doubletocomp', 'endthread', 'enummodules',
- 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
- 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
- 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
- 'findresourcehinstance', 'flush', 'frac', 'freemem',
- 'get8087cw', 'getdir', 'getlasterror', 'getmem',
- 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
- 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
- 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
- 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
- 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
- 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
- 'randomize', 'read', 'readln', 'reallocmem',
- 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
- 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
- 'set8087cw', 'setlength', 'setlinebreakstyle',
- 'setmemorymanager', 'setstring', 'settextbuf',
- 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
- 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
- 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
- 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
- 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
- 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
- 'widecharlentostring', 'widecharlentostrvar',
- 'widechartostring', 'widechartostrvar',
- 'widestringtoucs4string', 'write', 'writeln'
- ),
- 'SysUtils': (
- 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
- 'allocmem', 'ansicomparefilename', 'ansicomparestr',
- 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
- 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
- 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
- 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
- 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
- 'ansistrscan', 'ansistrupper', 'ansiuppercase',
- 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
- 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
- 'callterminateprocs', 'changefileext', 'charlength',
- 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
- 'comparetext', 'createdir', 'createguid', 'currentyear',
- 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
- 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
- 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
- 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
- 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
- 'exceptionerrormessage', 'excludetrailingbackslash',
- 'excludetrailingpathdelimiter', 'expandfilename',
- 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
- 'extractfiledrive', 'extractfileext', 'extractfilename',
- 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
- 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
- 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
- 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
- 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
- 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
- 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
- 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
- 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
- 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
- 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
- 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
- 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
- 'includetrailingbackslash', 'includetrailingpathdelimiter',
- 'incmonth', 'initializepackage', 'interlockeddecrement',
- 'interlockedexchange', 'interlockedexchangeadd',
- 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
- 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
- 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
- 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
- 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
- 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
- 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
- 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
- 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
- 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
- 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
- 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
- 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
- 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
- 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
- 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
- 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
- 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
- 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
- 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
- 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
- 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
- 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
- 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
- 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
- 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
- 'wraptext'
- ),
- 'Classes': (
- 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
- 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
- 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
- 'groupdescendantswith', 'hextobin', 'identtoint',
- 'initinheritedcomponent', 'inttoident', 'invalidpoint',
- 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
- 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
- 'pointsequal', 'readcomponentres', 'readcomponentresex',
- 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
- 'registerclasses', 'registercomponents', 'registerintegerconsts',
- 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
- 'teststreamformat', 'unregisterclass', 'unregisterclasses',
- 'unregisterintegerconsts', 'unregistermoduleclasses',
- 'writecomponentresfile'
- ),
- 'Math': (
- 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
- 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
- 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
- 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
- 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
- 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
- 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
- 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
- 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
- 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
- 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
- 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
- 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
- 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
- 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
- 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
- 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
- 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
- 'tan', 'tanh', 'totalvariance', 'variance'
- )
- }
-
+
+ BUILTIN_UNITS = {
+ 'System': (
+ 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
+ 'append', 'arctan', 'assert', 'assigned', 'assignfile',
+ 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
+ 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
+ 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
+ 'dispose', 'doubletocomp', 'endthread', 'enummodules',
+ 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
+ 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
+ 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
+ 'findresourcehinstance', 'flush', 'frac', 'freemem',
+ 'get8087cw', 'getdir', 'getlasterror', 'getmem',
+ 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
+ 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
+ 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
+ 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
+ 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
+ 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
+ 'randomize', 'read', 'readln', 'reallocmem',
+ 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
+ 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
+ 'set8087cw', 'setlength', 'setlinebreakstyle',
+ 'setmemorymanager', 'setstring', 'settextbuf',
+ 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
+ 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
+ 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
+ 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
+ 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
+ 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
+ 'widecharlentostring', 'widecharlentostrvar',
+ 'widechartostring', 'widechartostrvar',
+ 'widestringtoucs4string', 'write', 'writeln'
+ ),
+ 'SysUtils': (
+ 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
+ 'allocmem', 'ansicomparefilename', 'ansicomparestr',
+ 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
+ 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
+ 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
+ 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
+ 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
+ 'ansistrscan', 'ansistrupper', 'ansiuppercase',
+ 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
+ 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
+ 'callterminateprocs', 'changefileext', 'charlength',
+ 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
+ 'comparetext', 'createdir', 'createguid', 'currentyear',
+ 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
+ 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
+ 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
+ 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
+ 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
+ 'exceptionerrormessage', 'excludetrailingbackslash',
+ 'excludetrailingpathdelimiter', 'expandfilename',
+ 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
+ 'extractfiledrive', 'extractfileext', 'extractfilename',
+ 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
+ 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
+ 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
+ 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
+ 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
+ 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
+ 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
+ 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
+ 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
+ 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
+ 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
+ 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
+ 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
+ 'includetrailingbackslash', 'includetrailingpathdelimiter',
+ 'incmonth', 'initializepackage', 'interlockeddecrement',
+ 'interlockedexchange', 'interlockedexchangeadd',
+ 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
+ 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
+ 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
+ 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
+ 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
+ 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
+ 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
+ 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
+ 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
+ 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
+ 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
+ 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
+ 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
+ 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
+ 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
+ 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
+ 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
+ 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
+ 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
+ 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
+ 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
+ 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
+ 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
+ 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
+ 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
+ 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
+ 'wraptext'
+ ),
+ 'Classes': (
+ 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
+ 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
+ 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
+ 'groupdescendantswith', 'hextobin', 'identtoint',
+ 'initinheritedcomponent', 'inttoident', 'invalidpoint',
+ 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
+ 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
+ 'pointsequal', 'readcomponentres', 'readcomponentresex',
+ 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
+ 'registerclasses', 'registercomponents', 'registerintegerconsts',
+ 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
+ 'teststreamformat', 'unregisterclass', 'unregisterclasses',
+ 'unregisterintegerconsts', 'unregistermoduleclasses',
+ 'writecomponentresfile'
+ ),
+ 'Math': (
+ 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
+ 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
+ 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
+ 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
+ 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
+ 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
+ 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
+ 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
+ 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
+ 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
+ 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
+ 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
+ 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
+ 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
+ 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
+ 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
+ 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
+ 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
+ 'tan', 'tanh', 'totalvariance', 'variance'
+ )
+ }
+
ASM_REGISTERS = {
- 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
- 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
- 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
- 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
- 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
- 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
- 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
- 'xmm6', 'xmm7'
+ 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
+ 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
+ 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
+ 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
+ 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
+ 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
+ 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
+ 'xmm6', 'xmm7'
}
-
+
ASM_INSTRUCTIONS = {
- 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
- 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
- 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
- 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
- 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
- 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
- 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
- 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
- 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
- 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
- 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
- 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
- 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
- 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
- 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
- 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
- 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
- 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
- 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
- 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
- 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
- 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
- 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
- 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
- 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
- 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
- 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
- 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
- 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
- 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
- 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
- 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
- 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
- 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
- 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
- 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
- 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
- 'xlatb', 'xor'
+ 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
+ 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
+ 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
+ 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
+ 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
+ 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
+ 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
+ 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
+ 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
+ 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
+ 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
+ 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
+ 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
+ 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
+ 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
+ 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
+ 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
+ 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
+ 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
+ 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
+ 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
+ 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
+ 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
+ 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
+ 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
+ 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
+ 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
+ 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
+ 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
+ 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
+ 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
+ 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
+ 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
+ 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
+ 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
+ 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
+ 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
+ 'xlatb', 'xor'
}
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.keywords = set()
- if get_bool_opt(options, 'turbopascal', True):
- self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
- if get_bool_opt(options, 'delphi', True):
- self.keywords.update(self.DELPHI_KEYWORDS)
- if get_bool_opt(options, 'freepascal', True):
- self.keywords.update(self.FREE_PASCAL_KEYWORDS)
- self.builtins = set()
- for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
- self.builtins.update(self.BUILTIN_UNITS[unit])
-
- def get_tokens_unprocessed(self, text):
- scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
- stack = ['initial']
- in_function_block = False
- in_property_block = False
- was_dot = False
- next_token_is_function = False
- next_token_is_property = False
- collect_labels = False
- block_labels = set()
- brace_balance = [0, 0]
-
- while not scanner.eos:
- token = Error
-
- if stack[-1] == 'initial':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
- token = Operator
- # stop label highlighting on next ";"
- if collect_labels and scanner.match == ';':
- collect_labels = False
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- # abort function naming ``foo = Function(...)``
- next_token_is_function = False
- # if we are in a function block we count the open
- # braces because ootherwise it's impossible to
- # determine the end of the modifier context
- if in_function_block or in_property_block:
- if scanner.match == '(':
- brace_balance[0] += 1
- elif scanner.match == ')':
- brace_balance[0] -= 1
- elif scanner.match == '[':
- brace_balance[1] += 1
- elif scanner.match == ']':
- brace_balance[1] -= 1
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name == 'result':
- token = Name.Builtin.Pseudo
- elif lowercase_name in self.keywords:
- token = Keyword
- # if we are in a special block and a
- # block ending keyword occours (and the parenthesis
- # is balanced) we end the current block context
- if (in_function_block or in_property_block) and \
- lowercase_name in self.BLOCK_KEYWORDS and \
- brace_balance[0] <= 0 and \
- brace_balance[1] <= 0:
- in_function_block = False
- in_property_block = False
- brace_balance = [0, 0]
- block_labels = set()
- if lowercase_name in ('label', 'goto'):
- collect_labels = True
- elif lowercase_name == 'asm':
- stack.append('asm')
- elif lowercase_name == 'property':
- in_property_block = True
- next_token_is_property = True
- elif lowercase_name in ('procedure', 'operator',
- 'function', 'constructor',
- 'destructor'):
- in_function_block = True
- next_token_is_function = True
- # we are in a function block and the current name
- # is in the set of registered modifiers. highlight
- # it as pseudo keyword
- elif in_function_block and \
- lowercase_name in self.FUNCTION_MODIFIERS:
- token = Keyword.Pseudo
- # if we are in a property highlight some more
- # modifiers
- elif in_property_block and \
- lowercase_name in ('read', 'write'):
- token = Keyword.Pseudo
- next_token_is_function = True
- # if the last iteration set next_token_is_function
- # to true we now want this name highlighted as
- # function. so do that and reset the state
- elif next_token_is_function:
- # Look if the next token is a dot. If yes it's
- # not a function, but a class name and the
- # part after the dot a function name
- if scanner.test(r'\s*\.\s*'):
- token = Name.Class
- # it's not a dot, our job is done
- else:
- token = Name.Function
- next_token_is_function = False
- # same for properties
- elif next_token_is_property:
- token = Name.Property
- next_token_is_property = False
- # Highlight this token as label and add it
- # to the list of known labels
- elif collect_labels:
- token = Name.Label
- block_labels.add(scanner.match.lower())
- # name is in list of known labels
- elif lowercase_name in block_labels:
- token = Name.Label
- elif lowercase_name in self.BUILTIN_TYPES:
- token = Keyword.Type
- elif lowercase_name in self.DIRECTIVES:
- token = Keyword.Pseudo
- # builtins are just builtins if the token
- # before isn't a dot
- elif not was_dot and lowercase_name in self.builtins:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
- token = String.Char
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- # if the stack depth is deeper than once, pop
- if len(stack) > 1:
- stack.pop()
- scanner.get_char()
-
- elif stack[-1] == 'string':
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"'"):
- token = String
- stack.pop()
- elif scanner.scan(r"[^']*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
-
- elif stack[-1] == 'asm':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'end'):
- token = Keyword
- stack.pop()
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
- token = Name.Label
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name in self.ASM_INSTRUCTIONS:
- token = Keyword
- elif lowercase_name in self.ASM_REGISTERS:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
- token = Operator
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- scanner.get_char()
- stack.pop()
-
- # save the dot!!!11
- if scanner.match.strip():
- was_dot = scanner.match == '.'
- yield scanner.start_pos, token, scanner.match or ''
-
-
-class AdaLexer(RegexLexer):
- """
- For Ada source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'Ada'
- aliases = ['ada', 'ada95', 'ada2005']
- filenames = ['*.adb', '*.ads', '*.ada']
- mimetypes = ['text/x-ada']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
- (r'(subtype|type)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'task|protected', Keyword.Declaration),
- (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
- (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
- (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
- Comment.Preproc)),
- (r'(true|false|null)\b', Keyword.Constant),
- (words((
- 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count',
- 'Cursor', 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator',
- 'Integer', 'Long_Float', 'Long_Integer', 'Long_Long_Float',
- 'Long_Long_Integer', 'Natural', 'Positive', 'Reference_Type',
- 'Short_Float', 'Short_Integer', 'Short_Short_Float',
- 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'),
- suffix=r'\b'),
- Keyword.Type),
- (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
- (r'generic|private', Keyword.Declaration),
- (r'package', Keyword.Declaration, 'package'),
- (r'array\b', Keyword.Reserved, 'array_def'),
- (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(\w+)(\s*)(:)(\s*)(constant)',
- bygroups(Name.Constant, Text, Punctuation, Text,
- Keyword.Reserved)),
- (r'<<\w+>>', Name.Label),
- (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
- bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
- (words((
- 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
- 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
- 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
- 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
- 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
- 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
- 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
- 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
- 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (r'"[^"]*"', String),
- include('attribute'),
- include('numbers'),
- (r"'[^']'", String.Character),
- (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
- (r'[*<>+=/&-]', Operator),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
- (r'[0-9_]+\.[0-9_]*', Number.Float),
- (r'[0-9_]+', Number.Integer),
- ],
- 'attribute': [
- (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
- ],
- 'subprogram': [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'is\b', Keyword.Reserved, '#pop'),
- (r'"[^"]+"|\w+', Name.Function),
- include('root'),
- ],
- 'end': [
- ('(if|case|record|loop|select)', Keyword.Reserved),
+
+ def __init__(self, **options):
+ Lexer.__init__(self, **options)
+ self.keywords = set()
+ if get_bool_opt(options, 'turbopascal', True):
+ self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
+ if get_bool_opt(options, 'delphi', True):
+ self.keywords.update(self.DELPHI_KEYWORDS)
+ if get_bool_opt(options, 'freepascal', True):
+ self.keywords.update(self.FREE_PASCAL_KEYWORDS)
+ self.builtins = set()
+ for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
+ self.builtins.update(self.BUILTIN_UNITS[unit])
+
+ def get_tokens_unprocessed(self, text):
+ scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
+ stack = ['initial']
+ in_function_block = False
+ in_property_block = False
+ was_dot = False
+ next_token_is_function = False
+ next_token_is_property = False
+ collect_labels = False
+ block_labels = set()
+ brace_balance = [0, 0]
+
+ while not scanner.eos:
+ token = Error
+
+ if stack[-1] == 'initial':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
+ token = Operator
+ # stop label highlighting on next ";"
+ if collect_labels and scanner.match == ';':
+ collect_labels = False
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ # abort function naming ``foo = Function(...)``
+ next_token_is_function = False
+ # if we are in a function block we count the open
+ # braces because ootherwise it's impossible to
+ # determine the end of the modifier context
+ if in_function_block or in_property_block:
+ if scanner.match == '(':
+ brace_balance[0] += 1
+ elif scanner.match == ')':
+ brace_balance[0] -= 1
+ elif scanner.match == '[':
+ brace_balance[1] += 1
+ elif scanner.match == ']':
+ brace_balance[1] -= 1
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name == 'result':
+ token = Name.Builtin.Pseudo
+ elif lowercase_name in self.keywords:
+ token = Keyword
+ # if we are in a special block and a
+ # block ending keyword occours (and the parenthesis
+ # is balanced) we end the current block context
+ if (in_function_block or in_property_block) and \
+ lowercase_name in self.BLOCK_KEYWORDS and \
+ brace_balance[0] <= 0 and \
+ brace_balance[1] <= 0:
+ in_function_block = False
+ in_property_block = False
+ brace_balance = [0, 0]
+ block_labels = set()
+ if lowercase_name in ('label', 'goto'):
+ collect_labels = True
+ elif lowercase_name == 'asm':
+ stack.append('asm')
+ elif lowercase_name == 'property':
+ in_property_block = True
+ next_token_is_property = True
+ elif lowercase_name in ('procedure', 'operator',
+ 'function', 'constructor',
+ 'destructor'):
+ in_function_block = True
+ next_token_is_function = True
+ # we are in a function block and the current name
+ # is in the set of registered modifiers. highlight
+ # it as pseudo keyword
+ elif in_function_block and \
+ lowercase_name in self.FUNCTION_MODIFIERS:
+ token = Keyword.Pseudo
+ # if we are in a property highlight some more
+ # modifiers
+ elif in_property_block and \
+ lowercase_name in ('read', 'write'):
+ token = Keyword.Pseudo
+ next_token_is_function = True
+ # if the last iteration set next_token_is_function
+ # to true we now want this name highlighted as
+ # function. so do that and reset the state
+ elif next_token_is_function:
+ # Look if the next token is a dot. If yes it's
+ # not a function, but a class name and the
+ # part after the dot a function name
+ if scanner.test(r'\s*\.\s*'):
+ token = Name.Class
+ # it's not a dot, our job is done
+ else:
+ token = Name.Function
+ next_token_is_function = False
+ # same for properties
+ elif next_token_is_property:
+ token = Name.Property
+ next_token_is_property = False
+ # Highlight this token as label and add it
+ # to the list of known labels
+ elif collect_labels:
+ token = Name.Label
+ block_labels.add(scanner.match.lower())
+ # name is in list of known labels
+ elif lowercase_name in block_labels:
+ token = Name.Label
+ elif lowercase_name in self.BUILTIN_TYPES:
+ token = Keyword.Type
+ elif lowercase_name in self.DIRECTIVES:
+ token = Keyword.Pseudo
+ # builtins are just builtins if the token
+ # before isn't a dot
+ elif not was_dot and lowercase_name in self.builtins:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
+ token = String.Char
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ # if the stack depth is deeper than once, pop
+ if len(stack) > 1:
+ stack.pop()
+ scanner.get_char()
+
+ elif stack[-1] == 'string':
+ if scanner.scan(r"''"):
+ token = String.Escape
+ elif scanner.scan(r"'"):
+ token = String
+ stack.pop()
+ elif scanner.scan(r"[^']*"):
+ token = String
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ elif stack[-1] == 'asm':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'end'):
+ token = Keyword
+ stack.pop()
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
+ token = Name.Label
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name in self.ASM_INSTRUCTIONS:
+ token = Keyword
+ elif lowercase_name in self.ASM_REGISTERS:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
+ token = Operator
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ # save the dot!!!11
+ if scanner.match.strip():
+ was_dot = scanner.match == '.'
+ yield scanner.start_pos, token, scanner.match or ''
+
+
+class AdaLexer(RegexLexer):
+ """
+ For Ada source code.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Ada'
+ aliases = ['ada', 'ada95', 'ada2005']
+ filenames = ['*.adb', '*.ads', '*.ada']
+ mimetypes = ['text/x-ada']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
+ (r'(subtype|type)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'task|protected', Keyword.Declaration),
+ (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
+ (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
+ (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
+ Comment.Preproc)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (words((
+ 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count',
+ 'Cursor', 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator',
+ 'Integer', 'Long_Float', 'Long_Integer', 'Long_Long_Float',
+ 'Long_Long_Integer', 'Natural', 'Positive', 'Reference_Type',
+ 'Short_Float', 'Short_Integer', 'Short_Short_Float',
+ 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'),
+ suffix=r'\b'),
+ Keyword.Type),
+ (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
+ (r'generic|private', Keyword.Declaration),
+ (r'package', Keyword.Declaration, 'package'),
+ (r'array\b', Keyword.Reserved, 'array_def'),
+ (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(\w+)(\s*)(:)(\s*)(constant)',
+ bygroups(Name.Constant, Text, Punctuation, Text,
+ Keyword.Reserved)),
+ (r'<<\w+>>', Name.Label),
+ (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
+ bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
+ (words((
+ 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
+ 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
+ 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
+ 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
+ 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
+ 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
+ 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
+ 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
+ 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"[^"]*"', String),
+ include('attribute'),
+ include('numbers'),
+ (r"'[^']'", String.Character),
+ (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
+ (r'[*<>+=/&-]', Operator),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
+ (r'[0-9_]+\.[0-9_]*', Number.Float),
+ (r'[0-9_]+', Number.Integer),
+ ],
+ 'attribute': [
+ (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'is\b', Keyword.Reserved, '#pop'),
+ (r'"[^"]+"|\w+', Name.Function),
+ include('root'),
+ ],
+ 'end': [
+ ('(if|case|record|loop|select)', Keyword.Reserved),
(r'"[^"]+"|[\w.]+', Name.Function),
(r'\s+', Text),
- (';', Punctuation, '#pop'),
- ],
- 'type_def': [
- (r';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'formal_part'),
- (r'with|and|use', Keyword.Reserved),
- (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
- (r'record\b', Keyword.Reserved, ('record_def')),
- (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
- include('root'),
- ],
- 'array_def': [
- (r';', Punctuation, '#pop'),
- (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
- include('root'),
- ],
- 'record_def': [
- (r'end record', Keyword.Reserved, '#pop'),
- include('root'),
- ],
- 'import': [
- (r'[\w.]+', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'formal_part': [
- (r'\)', Punctuation, '#pop'),
- (r'\w+', Name.Variable),
- (r',|:[^=]', Punctuation),
- (r'(in|not|null|out|access)\b', Keyword.Reserved),
- include('root'),
- ],
- 'package': [
- ('body', Keyword.Declaration),
+ (';', Punctuation, '#pop'),
+ ],
+ 'type_def': [
+ (r';', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'formal_part'),
+ (r'with|and|use', Keyword.Reserved),
+ (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
+ (r'record\b', Keyword.Reserved, ('record_def')),
+ (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'array_def': [
+ (r';', Punctuation, '#pop'),
+ (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
+ include('root'),
+ ],
+ 'record_def': [
+ (r'end record', Keyword.Reserved, '#pop'),
+ include('root'),
+ ],
+ 'import': [
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'\w+', Name.Variable),
+ (r',|:[^=]', Punctuation),
+ (r'(in|not|null|out|access)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ 'package': [
+ ('body', Keyword.Declaration),
(r'is\s+new|renames', Keyword.Reserved),
- ('is', Keyword.Reserved, '#pop'),
- (';', Punctuation, '#pop'),
+ ('is', Keyword.Reserved, '#pop'),
+ (';', Punctuation, '#pop'),
(r'\(', Punctuation, 'package_instantiation'),
(r'([\w.]+)', Name.Class),
- include('root'),
- ],
- 'package_instantiation': [
- (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
- (r'[\w.\'"]', Text),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- }
+ include('root'),
+ ],
+ 'package_instantiation': [
+ (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
+ (r'[\w.\'"]', Text),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/pawn.py b/contrib/python/Pygments/py2/pygments/lexers/pawn.py
index 3cdfbd03e8..768a0a8c8d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/pawn.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/pawn.py
@@ -1,91 +1,91 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.pawn
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Pawn languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pawn
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Pawn languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-from pygments.util import get_bool_opt
-
-__all__ = ['SourcePawnLexer', 'PawnLexer']
-
-
-class SourcePawnLexer(RegexLexer):
- """
- For SourcePawn source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
- name = 'SourcePawn'
- aliases = ['sp']
- filenames = ['*.sp']
- mimetypes = ['text/x-sourcepawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.util import get_bool_opt
+
+__all__ = ['SourcePawnLexer', 'PawnLexer']
+
+
+class SourcePawnLexer(RegexLexer):
+ """
+ For SourcePawn source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+ name = 'SourcePawn'
+ aliases = ['sp']
+ filenames = ['*.sp']
+ mimetypes = ['text/x-sourcepawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(case|const|continue|native|'
- r'default|else|enum|for|if|new|operator|'
- r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(case|const|continue|native|'
+ r'default|else|enum|for|if|new|operator|'
+ r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
@@ -105,95 +105,95 @@ class SourcePawnLexer(RegexLexer):
'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
-
- def __init__(self, **options):
- self.smhighlighting = get_bool_opt(options,
- 'sourcemod', True)
-
- self._functions = set()
- if self.smhighlighting:
- from pygments.lexers._sourcemod_builtins import FUNCTIONS
- self._functions.update(FUNCTIONS)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.smhighlighting:
- if value in self.SM_TYPES:
- token = Keyword.Type
- elif value in self._functions:
- token = Name.Builtin
- yield index, token, value
-
-
-class PawnLexer(RegexLexer):
- """
- For Pawn source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pawn'
- aliases = ['pawn']
- filenames = ['*.p', '*.pwn', '*.inc']
- mimetypes = ['text/x-pawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
+
+ def __init__(self, **options):
+ self.smhighlighting = get_bool_opt(options,
+ 'sourcemod', True)
+
+ self._functions = set()
+ if self.smhighlighting:
+ from pygments.lexers._sourcemod_builtins import FUNCTIONS
+ self._functions.update(FUNCTIONS)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.smhighlighting:
+ if value in self.SM_TYPES:
+ token = Keyword.Type
+ elif value in self._functions:
+ token = Name.Builtin
+ yield index, token, value
+
+
+class PawnLexer(RegexLexer):
+ """
+ For Pawn source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pawn'
+ aliases = ['pawn']
+ filenames = ['*.p', '*.pwn', '*.inc']
+ mimetypes = ['text/x-pawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(switch|case|default|const|new|static|char|continue|break|'
- r'if|else|for|while|do|operator|enum|'
- r'public|return|sizeof|tagof|state|goto)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(switch|case|default|const|new|static|char|continue|break|'
+ r'if|else|for|while|do|operator|enum|'
+ r'public|return|sizeof|tagof|state|goto)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/perl.py b/contrib/python/Pygments/py2/pygments/lexers/perl.py
index 8f30c186e2..0390b10e1a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/perl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/perl.py
@@ -1,136 +1,136 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.perl
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Perl and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.perl
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Perl and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- using, this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import shebang_matches
-
-__all__ = ['PerlLexer', 'Perl6Lexer']
-
-
-class PerlLexer(RegexLexer):
- """
- For `Perl <http://www.perl.org>`_ source code.
- """
-
- name = 'Perl'
- aliases = ['perl', 'pl']
- filenames = ['*.pl', '*.pm', '*.t']
- mimetypes = ['text/x-perl', 'application/x-perl']
-
- flags = re.DOTALL | re.MULTILINE
- # TODO: give this to a perl guy who knows how to parse perl...
- tokens = {
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\A\#!.+?$', Comment.Hashbang),
- (r'\#.*?$', Comment.Single),
- (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
- (words((
- 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
- 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ using, this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+
+__all__ = ['PerlLexer', 'Perl6Lexer']
+
+
+class PerlLexer(RegexLexer):
+ """
+ For `Perl <http://www.perl.org>`_ source code.
+ """
+
+ name = 'Perl'
+ aliases = ['perl', 'pl']
+ filenames = ['*.pl', '*.pm', '*.t']
+ mimetypes = ['text/x-perl', 'application/x-perl']
+
+ flags = re.DOTALL | re.MULTILINE
+ # TODO: give this to a perl guy who knows how to parse perl...
+ tokens = {
+ 'balanced-regex': [
+ (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\A\#!.+?$', Comment.Hashbang),
+ (r'\#.*?$', Comment.Single),
+ (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
+ (words((
+ 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
+ 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
'unless', 'until', 'while', 'print', 'new', 'BEGIN',
- 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
- Keyword),
- (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
- bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
- (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
- # common delimiters
- (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
- String.Regex),
- (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
- (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
- String.Regex),
- (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
- String.Regex),
- # balanced delimiters
- (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
- 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
- 'balanced-regex'),
-
- (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
- String.Regex),
- (r'\s+', Text),
- (words((
- 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
- 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
- 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
- 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
- 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
- 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
- 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
- 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
- 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
- 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
- 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
- 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
- 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
+ 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
+ bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
+ (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ # common delimiters
+ (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+ String.Regex),
+ (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+ (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+ (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+ String.Regex),
+ (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+ String.Regex),
+ # balanced delimiters
+ (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+ 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
+ 'balanced-regex'),
+
+ (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+ String.Regex),
+ (r'\s+', Text),
+ (words((
+ 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
+ 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
+ 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
+ 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
+ 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
+ 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
+ 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
+ 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
+ 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
+ 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
+ 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
+ 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
+ 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
- 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
+ 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
- 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
- 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
- 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
- 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
- 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
- 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
- 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
- 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
- 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
- Name.Builtin),
- (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+ 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
+ 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
+ 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
+ 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
+ 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
+ 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
+ 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
+ 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
+ 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
+ Name.Builtin),
+ (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
(r'(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)',
bygroups(String, String, String.Delimiter, String, String.Delimiter, Text)),
- (r'__END__', Comment.Preproc, 'end-part'),
- (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
- (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
- (r'[$@%#]+', Name.Variable, 'varname'),
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
- (r'<([^\s>]+)>', String.Regex),
- (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
- (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
- (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
- (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
- (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+ (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+ (r'[$@%#]+', Name.Variable, 'varname'),
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
+ (r'<([^\s>]+)>', String.Regex),
+ (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+ (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+ (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+ (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+ (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
(r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
@@ -139,482 +139,482 @@ class PerlLexer(RegexLexer):
(words((
'no', 'package', 'require', 'use'), suffix=r'\b'),
Keyword),
- (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&^|!\\~]=?', Operator),
- (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
- # of punctuation in Perl!
- (r'(?=\w)', Name, 'name'),
- ],
- 'format': [
- (r'\.\n', String.Interpol, '#pop'),
- (r'[^\n]*\n', String.Interpol),
- ],
- 'varname': [
- (r'\s+', Text),
- (r'\{', Punctuation, '#pop'), # hash syntax?
- (r'\)|,', Punctuation, '#pop'), # argument specifier
- (r'\w+::', Name.Namespace),
- (r'[\w:]+', Name.Variable, '#pop'),
- ],
- 'name': [
+ (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&^|!\\~]=?', Operator),
+ (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
+ # of punctuation in Perl!
+ (r'(?=\w)', Name, 'name'),
+ ],
+ 'format': [
+ (r'\.\n', String.Interpol, '#pop'),
+ (r'[^\n]*\n', String.Interpol),
+ ],
+ 'varname': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, '#pop'), # hash syntax?
+ (r'\)|,', Punctuation, '#pop'), # argument specifier
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name.Variable, '#pop'),
+ ],
+ 'name': [
(r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
(r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
- (r'[\w:]+', Name, '#pop'),
- (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
- (r'(?=\W)', Text, '#pop'),
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*[!?]?', Name.Function),
- (r'\s+', Text),
- # argument declaration
- (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
- (r';', Punctuation, '#pop'),
- (r'.*?\{', Punctuation, '#pop'),
- ],
- 'cb-string': [
- (r'\\[{}\\]', String.Other),
- (r'\\', String.Other),
- (r'\{', String.Other, 'cb-string'),
- (r'\}', String.Other, '#pop'),
- (r'[^{}\\]+', String.Other)
- ],
- 'rb-string': [
- (r'\\[()\\]', String.Other),
- (r'\\', String.Other),
- (r'\(', String.Other, 'rb-string'),
- (r'\)', String.Other, '#pop'),
- (r'[^()]+', String.Other)
- ],
- 'sb-string': [
- (r'\\[\[\]\\]', String.Other),
- (r'\\', String.Other),
- (r'\[', String.Other, 'sb-string'),
- (r'\]', String.Other, '#pop'),
- (r'[^\[\]]+', String.Other)
- ],
- 'lt-string': [
- (r'\\[<>\\]', String.Other),
- (r'\\', String.Other),
- (r'\<', String.Other, 'lt-string'),
- (r'\>', String.Other, '#pop'),
- (r'[^<>]+', String.Other)
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'perl'):
- return True
+ (r'[\w:]+', Name, '#pop'),
+ (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
+ (r'(?=\W)', Text, '#pop'),
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_]\w*[!?]?', Name.Function),
+ (r'\s+', Text),
+ # argument declaration
+ (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
+ (r';', Punctuation, '#pop'),
+ (r'.*?\{', Punctuation, '#pop'),
+ ],
+ 'cb-string': [
+ (r'\\[{}\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\{', String.Other, 'cb-string'),
+ (r'\}', String.Other, '#pop'),
+ (r'[^{}\\]+', String.Other)
+ ],
+ 'rb-string': [
+ (r'\\[()\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\(', String.Other, 'rb-string'),
+ (r'\)', String.Other, '#pop'),
+ (r'[^()]+', String.Other)
+ ],
+ 'sb-string': [
+ (r'\\[\[\]\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\[', String.Other, 'sb-string'),
+ (r'\]', String.Other, '#pop'),
+ (r'[^\[\]]+', String.Other)
+ ],
+ 'lt-string': [
+ (r'\\[<>\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\<', String.Other, 'lt-string'),
+ (r'\>', String.Other, '#pop'),
+ (r'[^<>]+', String.Other)
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'perl'):
+ return True
if re.search(r'(?:my|our)\s+[$@%(]', text):
- return 0.9
-
-
-class Perl6Lexer(ExtendedRegexLexer):
- """
- For `Perl 6 <http://www.perl6.org>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Perl6'
- aliases = ['perl6', 'pl6']
- filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
- '*.6pm', '*.p6m', '*.pm6', '*.t']
- mimetypes = ['text/x-perl6', 'application/x-perl6']
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
+ return 0.9
+
+
+class Perl6Lexer(ExtendedRegexLexer):
+ """
+ For `Perl 6 <http://www.perl6.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Perl6'
+ aliases = ['perl6', 'pl6']
+ filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
+ '*.6pm', '*.p6m', '*.pm6', '*.t']
+ mimetypes = ['text/x-perl6', 'application/x-perl6']
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
PERL6_IDENTIFIER_RANGE = r"['\w:-]"
-
- PERL6_KEYWORDS = (
- 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
- 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
- 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
- 'cached', 'category', 'class', 'constant', 'contend', 'continue',
- 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
- 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
- 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
- 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
- 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
- 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
- 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
- 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
- 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
- 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
- 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
- )
-
- PERL6_BUILTINS = (
- 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
- 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
- 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh',
- 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by',
- 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
- 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
- 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
- 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
- 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
- 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
- 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
- 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
- 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
- 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
- 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
- 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
- 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
- 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
- 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
- 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
- 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
- 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
- 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
- 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
- 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
- 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
- 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
- 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
- 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
- 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
- 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
- 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
- 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
- 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
- )
-
- PERL6_BUILTIN_CLASSES = (
- 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
- 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
- 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
- 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
- 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
- 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
- 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
- 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
- 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
- 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
- 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
- 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
- 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
- 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
- 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
- )
-
- PERL6_OPERATORS = (
- 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
- 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
- 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
- '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
- '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
- 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
- '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
- '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
- '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
- 'not', '<==', '==>', '<<==', '==>>',
- )
-
- # Perl 6 has a *lot* of possible bracketing characters
- # this list was lifted from STD.pm6 (https://github.com/perl6/std)
- PERL6_BRACKETS = {
- u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d',
- u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b',
- u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019',
- u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d',
- u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a',
- u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e',
- u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d',
- u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd',
- u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265',
- u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b',
- u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273',
- u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279',
- u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f',
- u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285',
- u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b',
- u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8',
- u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4',
- u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1',
- u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7',
- u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1',
- u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db',
- u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1',
- u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7',
- u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed',
- u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb',
- u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe',
- u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a',
- u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b',
- u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771',
- u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4',
- u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de',
- u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7',
- u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984',
- u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a',
- u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990',
- u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996',
- u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5',
- u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5',
- u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9',
- u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e',
- u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65',
- u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80',
- u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c',
- u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96',
- u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c',
- u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9',
- u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0',
- u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe',
- u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4',
- u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0',
- u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6',
- u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa',
- u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a',
- u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21',
- u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d',
- u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015',
- u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b',
- u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18',
- u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a',
- u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40',
- u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48',
- u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e',
- u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d',
- u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63',
- }
-
- def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
- if boundary_regex_fragment is None:
- return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
- suffix + r')\b'
- else:
- return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
- r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
- boundary_regex_fragment + r')'
-
- def brackets_callback(token_class):
- def callback(lexer, match, context):
- groups = match.groupdict()
- opening_chars = groups['delimiter']
- n_chars = len(opening_chars)
- adverbs = groups.get('adverbs')
-
- closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
- text = context.text
-
- if closer is None: # it's not a mirrored character, which means we
- # just need to look for the next occurrence
-
- end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
- else: # we need to look for the corresponding closing character,
- # keep nesting in mind
- closing_chars = closer * n_chars
- nesting_level = 1
-
- search_pos = match.start('delimiter')
-
- while nesting_level > 0:
- next_open_pos = text.find(opening_chars, search_pos + n_chars)
- next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
- if next_close_pos == -1:
- next_close_pos = len(text)
- nesting_level = 0
- elif next_open_pos != -1 and next_open_pos < next_close_pos:
- nesting_level += 1
- search_pos = next_open_pos
- else: # next_close_pos < next_open_pos
- nesting_level -= 1
- search_pos = next_close_pos
-
- end_pos = next_close_pos
-
- if end_pos < 0: # if we didn't find a closer, just highlight the
- # rest of the text in this class
- end_pos = len(text)
-
- if adverbs is not None and re.search(r':to\b', adverbs):
- heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
- end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
- r'\s*$', text[end_pos:], re.MULTILINE)
-
- if end_heredoc:
- end_pos += end_heredoc.end()
- else:
- end_pos = len(text)
-
- yield match.start(), token_class, text[match.start():end_pos + n_chars]
- context.pos = end_pos + n_chars
-
- return callback
-
- def opening_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter an opening brace and we're one level
- # below a token state, it means we need to increment
- # the nesting level for braces so we know later when
- # we should return to the token rules.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level += 1
-
- def closing_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter a free closing brace and we're one level
- # below a token state, it means we need to check the nesting
- # level to see if we need to return to the token state.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level -= 1
- if context.perl6_token_nesting_level == 0:
- stack.pop()
-
- def embedded_perl6_callback(lexer, match, context):
- context.perl6_token_nesting_level = 1
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
- context.stack.append('root')
-
- # If you're modifying these rules, be careful if you need to process '{' or '}'
- # characters. We have special logic for processing these characters (due to the fact
- # that you can nest Perl 6 code in regex blocks), so if you need to process one of
- # them, make sure you also process the corresponding one!
- tokens = {
- 'common': [
- (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
- brackets_callback(Comment.Multiline)),
+
+ PERL6_KEYWORDS = (
+ 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
+ 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
+ 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
+ 'cached', 'category', 'class', 'constant', 'contend', 'continue',
+ 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
+ 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
+ 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
+ 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
+ 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
+ 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
+ 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
+ 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
+ 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
+ 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
+ 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
+ )
+
+ PERL6_BUILTINS = (
+ 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
+ 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
+ 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh',
+ 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by',
+ 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
+ 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
+ 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
+ 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
+ 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
+ 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
+ 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
+ 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
+ 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
+ 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
+ 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
+ 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
+ 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
+ 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
+ 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
+ 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
+ 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
+ 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
+ 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
+ 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
+ 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
+ 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
+ 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
+ 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
+ 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
+ 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
+ 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
+ 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
+ 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
+ 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
+ )
+
+ PERL6_BUILTIN_CLASSES = (
+ 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
+ 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
+ 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
+ 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
+ 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
+ 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
+ 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
+ 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
+ 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
+ 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
+ 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
+ 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
+ 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
+ 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
+ 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
+ )
+
+ PERL6_OPERATORS = (
+ 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
+ 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
+ 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
+ '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
+ '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
+ 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
+ '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
+ '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
+ '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
+ 'not', '<==', '==>', '<<==', '==>>',
+ )
+
+ # Perl 6 has a *lot* of possible bracketing characters
+ # this list was lifted from STD.pm6 (https://github.com/perl6/std)
+ PERL6_BRACKETS = {
+ u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d',
+ u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b',
+ u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019',
+ u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d',
+ u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a',
+ u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e',
+ u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d',
+ u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd',
+ u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265',
+ u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b',
+ u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273',
+ u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279',
+ u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f',
+ u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285',
+ u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b',
+ u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8',
+ u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4',
+ u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1',
+ u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7',
+ u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1',
+ u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db',
+ u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1',
+ u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7',
+ u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed',
+ u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb',
+ u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe',
+ u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a',
+ u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b',
+ u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771',
+ u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4',
+ u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de',
+ u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7',
+ u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984',
+ u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a',
+ u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990',
+ u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996',
+ u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5',
+ u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5',
+ u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9',
+ u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e',
+ u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65',
+ u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80',
+ u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c',
+ u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96',
+ u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c',
+ u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9',
+ u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0',
+ u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe',
+ u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4',
+ u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0',
+ u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6',
+ u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa',
+ u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a',
+ u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21',
+ u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d',
+ u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015',
+ u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b',
+ u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18',
+ u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a',
+ u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40',
+ u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48',
+ u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e',
+ u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d',
+ u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63',
+ }
+
+ def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
+ if boundary_regex_fragment is None:
+ return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
+ suffix + r')\b'
+ else:
+ return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
+ r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
+ boundary_regex_fragment + r')'
+
+ def brackets_callback(token_class):
+ def callback(lexer, match, context):
+ groups = match.groupdict()
+ opening_chars = groups['delimiter']
+ n_chars = len(opening_chars)
+ adverbs = groups.get('adverbs')
+
+ closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
+ text = context.text
+
+ if closer is None: # it's not a mirrored character, which means we
+ # just need to look for the next occurrence
+
+ end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
+ else: # we need to look for the corresponding closing character,
+ # keep nesting in mind
+ closing_chars = closer * n_chars
+ nesting_level = 1
+
+ search_pos = match.start('delimiter')
+
+ while nesting_level > 0:
+ next_open_pos = text.find(opening_chars, search_pos + n_chars)
+ next_close_pos = text.find(closing_chars, search_pos + n_chars)
+
+ if next_close_pos == -1:
+ next_close_pos = len(text)
+ nesting_level = 0
+ elif next_open_pos != -1 and next_open_pos < next_close_pos:
+ nesting_level += 1
+ search_pos = next_open_pos
+ else: # next_close_pos < next_open_pos
+ nesting_level -= 1
+ search_pos = next_close_pos
+
+ end_pos = next_close_pos
+
+ if end_pos < 0: # if we didn't find a closer, just highlight the
+ # rest of the text in this class
+ end_pos = len(text)
+
+ if adverbs is not None and re.search(r':to\b', adverbs):
+ heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
+ end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
+ r'\s*$', text[end_pos:], re.MULTILINE)
+
+ if end_heredoc:
+ end_pos += end_heredoc.end()
+ else:
+ end_pos = len(text)
+
+ yield match.start(), token_class, text[match.start():end_pos + n_chars]
+ context.pos = end_pos + n_chars
+
+ return callback
+
+ def opening_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter an opening brace and we're one level
+ # below a token state, it means we need to increment
+ # the nesting level for braces so we know later when
+ # we should return to the token rules.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level += 1
+
+ def closing_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter a free closing brace and we're one level
+ # below a token state, it means we need to check the nesting
+ # level to see if we need to return to the token state.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level -= 1
+ if context.perl6_token_nesting_level == 0:
+ stack.pop()
+
+ def embedded_perl6_callback(lexer, match, context):
+ context.perl6_token_nesting_level = 1
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+ context.stack.append('root')
+
+ # If you're modifying these rules, be careful if you need to process '{' or '}'
+ # characters. We have special logic for processing these characters (due to the fact
+ # that you can nest Perl 6 code in regex blocks), so if you need to process one of
+ # them, make sure you also process the corresponding one!
+ tokens = {
+ 'common': [
+ (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
+ brackets_callback(Comment.Multiline)),
(r'#[^\n]*$', Comment.Single),
- (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
- (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
- (r'^=.*?\n\s*?\n', Comment.Multiline),
- (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
- bygroups(Keyword, Name), 'token-sym-brackets'),
+ (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
+ (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
+ (r'^=.*?\n\s*?\n', Comment.Multiline),
+ (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
+ bygroups(Keyword, Name), 'token-sym-brackets'),
(r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
- bygroups(Keyword, Name), 'pre-token'),
- # deal with a special case in the Perl 6 grammar (role q { ... })
- (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
- (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
- (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
- Name.Builtin),
- (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
- # copied from PerlLexer
- (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable),
- (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'::\?\w+', Name.Variable.Global),
- (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable.Global),
- (r'\$(?:<.*?>)+', Name.Variable),
- (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
- r'(?P=first_char)*)', brackets_callback(String)),
- # copied from PerlLexer
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'm\w+(?=\()', Name),
- (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
- r'(?P=first_char)*)', brackets_callback(String.Regex)),
- (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
- String.Regex),
- (r'<[^\s=].*?\S>', String),
- (_build_word_match(PERL6_OPERATORS), Operator),
- (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- ],
- 'root': [
- include('common'),
- (r'\{', opening_brace_callback),
- (r'\}', closing_brace_callback),
- (r'.+?', Text),
- ],
- 'pre-token': [
- include('common'),
- (r'\{', Text, ('#pop', 'token')),
- (r'.+?', Text),
- ],
- 'token-sym-brackets': [
- (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
- brackets_callback(Name), ('#pop', 'pre-token')),
- default(('#pop', 'pre-token')),
- ],
- 'token': [
- (r'\}', Text, '#pop'),
- (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
- # make sure that quotes in character classes aren't treated as strings
- (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
- # make sure that '#' characters in quotes aren't treated as comments
- (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
- (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
+ bygroups(Keyword, Name), 'pre-token'),
+ # deal with a special case in the Perl 6 grammar (role q { ... })
+ (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
+ (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
+ (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
+ Name.Builtin),
+ (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
+ # copied from PerlLexer
+ (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable),
+ (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
+ (r'::\?\w+', Name.Variable.Global),
+ (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable.Global),
+ (r'\$(?:<.*?>)+', Name.Variable),
+ (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
+ r'(?P=first_char)*)', brackets_callback(String)),
+ # copied from PerlLexer
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'm\w+(?=\()', Name),
+ (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
+ r'(?P=first_char)*)', brackets_callback(String.Regex)),
+ (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
+ String.Regex),
+ (r'<[^\s=].*?\S>', String),
+ (_build_word_match(PERL6_OPERATORS), Operator),
+ (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ ],
+ 'root': [
+ include('common'),
+ (r'\{', opening_brace_callback),
+ (r'\}', closing_brace_callback),
+ (r'.+?', Text),
+ ],
+ 'pre-token': [
+ include('common'),
+ (r'\{', Text, ('#pop', 'token')),
+ (r'.+?', Text),
+ ],
+ 'token-sym-brackets': [
+ (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
+ brackets_callback(Name), ('#pop', 'pre-token')),
+ default(('#pop', 'pre-token')),
+ ],
+ 'token': [
+ (r'\}', Text, '#pop'),
+ (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
+ # make sure that quotes in character classes aren't treated as strings
+ (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
+ # make sure that '#' characters in quotes aren't treated as comments
+ (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
+ (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
(r'#.*?$', Comment.Single),
- (r'\{', embedded_perl6_callback),
- ('.+?', String.Regex),
- ],
- }
-
- def analyse_text(text):
- def strip_pod(lines):
- in_pod = False
- stripped_lines = []
-
- for line in lines:
- if re.match(r'^=(?:end|cut)', line):
- in_pod = False
- elif re.match(r'^=\w+', line):
- in_pod = True
- elif not in_pod:
- stripped_lines.append(line)
-
- return stripped_lines
-
- # XXX handle block comments
- lines = text.splitlines()
- lines = strip_pod(lines)
- text = '\n'.join(lines)
-
- if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
- return True
-
- saw_perl_decl = False
- rating = False
-
- # check for my/our/has declarations
+ (r'\{', embedded_perl6_callback),
+ ('.+?', String.Regex),
+ ],
+ }
+
+ def analyse_text(text):
+ def strip_pod(lines):
+ in_pod = False
+ stripped_lines = []
+
+ for line in lines:
+ if re.match(r'^=(?:end|cut)', line):
+ in_pod = False
+ elif re.match(r'^=\w+', line):
+ in_pod = True
+ elif not in_pod:
+ stripped_lines.append(line)
+
+ return stripped_lines
+
+ # XXX handle block comments
+ lines = text.splitlines()
+ lines = strip_pod(lines)
+ text = '\n'.join(lines)
+
+ if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
+ return True
+
+ saw_perl_decl = False
+ rating = False
+
+ # check for my/our/has declarations
if re.search(r"(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
r"+\s+)?[$@%&(]", text):
- rating = 0.8
- saw_perl_decl = True
-
- for line in lines:
- line = re.sub('#.*', '', line)
+ rating = 0.8
+ saw_perl_decl = True
+
+ for line in lines:
+ line = re.sub('#.*', '', line)
if re.match(r'^\s*$', line):
- continue
-
- # match v6; use v6; use v6.0; use v6.0.0;
+ continue
+
+ # match v6; use v6; use v6.0; use v6.0.0;
if re.match(r'^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
- return True
- # match class, module, role, enum, grammar declarations
+ return True
+ # match class, module, role, enum, grammar declarations
class_decl = re.match(r'^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
- if class_decl:
- if saw_perl_decl or class_decl.group('scope') is not None:
- return True
- rating = 0.05
- continue
- break
-
- return rating
-
- def __init__(self, **options):
- super(Perl6Lexer, self).__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
+ if class_decl:
+ if saw_perl_decl or class_decl.group('scope') is not None:
+ return True
+ rating = 0.05
+ continue
+ break
+
+ return rating
+
+ def __init__(self, **options):
+ super(Perl6Lexer, self).__init__(**options)
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py2/pygments/lexers/php.py b/contrib/python/Pygments/py2/pygments/lexers/php.py
index bd4a237666..1b3dc71b5d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/php.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/php.py
@@ -1,192 +1,192 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.php
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for PHP and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.php
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for PHP and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, bygroups, default, using, \
this, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Other
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
from pygments.util import get_bool_opt, get_list_opt, iteritems, \
shebang_matches
-
-__all__ = ['ZephirLexer', 'PhpLexer']
-
-
-class ZephirLexer(RegexLexer):
- """
- For `Zephir language <http://zephir-lang.com/>`_ source code.
-
- Zephir is a compiled high level language aimed
- to the creation of C-extensions for PHP.
-
- .. versionadded:: 2.0
- """
-
- name = 'Zephir'
- aliases = ['zephir']
- filenames = ['*.zep']
-
- zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
- zephir_type = ['bit', 'bits', 'string']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
- r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
- r'empty)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
- r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
- r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
- r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w\\]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class PhpLexer(RegexLexer):
- """
- For `PHP <http://www.php.net/>`_ source code.
- For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
- Additional options accepted:
-
- `startinline`
- If given and ``True`` the lexer starts highlighting with
- php code (i.e.: no starting ``<?php`` required). The default
- is ``False``.
- `funcnamehighlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabledmodules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted
- except the special ``'unknown'`` module that includes functions
- that are known to php but are undocumented.
-
- To get a list of allowed modules have a look into the
- `_php_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._php_builtins import MODULES
- >>> MODULES.keys()
- ['PHP Options/Info', 'Zip', 'dba', ...]
-
- In fact the names of those modules match the module names from
- the php documentation.
- """
-
- name = 'PHP'
- aliases = ['php', 'php3', 'php4', 'php5']
- filenames = ['*.php', '*.php[345]', '*.inc']
- mimetypes = ['text/x-php']
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'<\?(php)?', Comment.Preproc, 'php'),
- (r'[^<]+', Other),
- (r'<', Other)
- ],
- 'php': [
- (r'\?>', Comment.Preproc, '#pop'),
+
+__all__ = ['ZephirLexer', 'PhpLexer']
+
+
+class ZephirLexer(RegexLexer):
+ """
+ For `Zephir language <http://zephir-lang.com/>`_ source code.
+
+ Zephir is a compiled high level language aimed
+ to the creation of C-extensions for PHP.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Zephir'
+ aliases = ['zephir']
+ filenames = ['*.zep']
+
+ zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
+ zephir_type = ['bit', 'bits', 'string']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
+ r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
+ r'empty)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
+ r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
+ r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
+ r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_][\w\\]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class PhpLexer(RegexLexer):
+ """
+ For `PHP <http://www.php.net/>`_ source code.
+ For PHP embedded in HTML, use the `HtmlPhpLexer`.
+
+ Additional options accepted:
+
+ `startinline`
+ If given and ``True`` the lexer starts highlighting with
+ php code (i.e.: no starting ``<?php`` required). The default
+ is ``False``.
+ `funcnamehighlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabledmodules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted
+ except the special ``'unknown'`` module that includes functions
+ that are known to php but are undocumented.
+
+ To get a list of allowed modules have a look into the
+ `_php_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._php_builtins import MODULES
+ >>> MODULES.keys()
+ ['PHP Options/Info', 'Zip', 'dba', ...]
+
+ In fact the names of those modules match the module names from
+ the php documentation.
+ """
+
+ name = 'PHP'
+ aliases = ['php', 'php3', 'php4', 'php5']
+ filenames = ['*.php', '*.php[345]', '*.inc']
+ mimetypes = ['text/x-php']
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'<\?(php)?', Comment.Preproc, 'php'),
+ (r'[^<]+', Other),
+ (r'<', Other)
+ ],
+ 'php': [
+ (r'\?>', Comment.Preproc, '#pop'),
(r'(<<<)([\'"]?)(' + _ident_inner + r')(\2\n.*?\n\s*)(\3)(;?)(\n)',
bygroups(String, String, String.Delimiter, String, String.Delimiter,
Punctuation, Text)),
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'//.*?\n', Comment.Single),
- # put the empty comment here, it is otherwise seen as
- # the start of a docstring
- (r'/\*\*/', Comment.Multiline),
- (r'/\*\*.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(->|::)(\s*)(' + _ident_inner + ')',
- bygroups(Operator, Text, Name.Attribute)),
- (r'[~!%^&*+=|:.<>/@-]+', Operator),
- (r'\?', Operator), # don't add to the charclass above!
- (r'[\[\]{}();,]+', Punctuation),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
- (r'(function)(\s+)(&?)(\s*)',
- bygroups(Keyword, Text, Operator, Text), 'functionname'),
- (r'(const)(\s+)(' + _ident_inner + ')',
- bygroups(Keyword, Text, Name.Constant)),
- (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
- r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
- r'FALSE|print|for|require|continue|foreach|require_once|'
- r'declare|return|default|static|do|switch|die|stdClass|'
- r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'//.*?\n', Comment.Single),
+ # put the empty comment here, it is otherwise seen as
+ # the start of a docstring
+ (r'/\*\*/', Comment.Multiline),
+ (r'/\*\*.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(->|::)(\s*)(' + _ident_inner + ')',
+ bygroups(Operator, Text, Name.Attribute)),
+ (r'[~!%^&*+=|:.<>/@-]+', Operator),
+ (r'\?', Operator), # don't add to the charclass above!
+ (r'[\[\]{}();,]+', Punctuation),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
+ (r'(function)(\s+)(&?)(\s*)',
+ bygroups(Keyword, Text, Operator, Text), 'functionname'),
+ (r'(const)(\s+)(' + _ident_inner + ')',
+ bygroups(Keyword, Text, Name.Constant)),
+ (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+ r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+ r'FALSE|print|for|require|continue|foreach|require_once|'
+ r'declare|return|default|static|do|switch|die|stdClass|'
+ r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
r'virtual|endfor|include_once|while|endforeach|global|'
r'endif|list|endswitch|new|endwhile|not|'
r'array|E_ALL|NULL|final|php_user_filter|interface|'
- r'implements|public|private|protected|abstract|clone|try|'
- r'catch|throw|this|use|namespace|trait|yield|'
- r'finally)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
+ r'implements|public|private|protected|abstract|clone|try|'
+ r'catch|throw|this|use|namespace|trait|yield|'
+ r'finally)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
include('magicconstants'),
(r'\$\{\$+' + _ident_inner + r'\}', Name.Variable),
- (r'\$+' + _ident_inner, Name.Variable),
- (_ident_inner, Name.Other),
- (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
- (r'\d+e[+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
- (r'0x[a-f0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'0b[01]+', Number.Bin),
- (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
- (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
- (r'"', String.Double, 'string'),
- ],
+ (r'\$+' + _ident_inner, Name.Variable),
+ (_ident_inner, Name.Other),
+ (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'\d+e[+-]?[0-9]+', Number.Float),
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[a-f0-9]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'0b[01]+', Number.Bin),
+ (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
+ (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
+ (r'"', String.Double, 'string'),
+ ],
'magicfuncs': [
# source: http://php.net/manual/en/language.oop5.magic.php
(words((
@@ -203,68 +203,68 @@ class PhpLexer(RegexLexer):
suffix=r'\b'),
Name.Constant),
],
- 'classname': [
- (_ident_inner, Name.Class, '#pop')
- ],
- 'functionname': [
+ 'classname': [
+ (_ident_inner, Name.Class, '#pop')
+ ],
+ 'functionname': [
include('magicfuncs'),
(_ident_inner, Name.Function, '#pop'),
default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'[^{$"\\]+', String.Double),
- (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'[^{$"\\]+', String.Double),
+ (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
(r'\$' + _ident_inner + r'(\[\S+?\]|->' + _ident_inner + ')?',
- String.Interpol),
- (r'(\{\$\{)(.*?)(\}\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\{)(\$.*?)(\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\$\{)(\S+)(\})',
- bygroups(String.Interpol, Name.Variable, String.Interpol)),
+ String.Interpol),
+ (r'(\{\$\{)(.*?)(\}\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\{)(\$.*?)(\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\$\{)(\S+)(\})',
+ bygroups(String.Interpol, Name.Variable, String.Interpol)),
(r'[${\\]', String.Double)
- ],
- }
-
- def __init__(self, **options):
- self.funcnamehighlighting = get_bool_opt(
- options, 'funcnamehighlighting', True)
- self.disabledmodules = get_list_opt(
- options, 'disabledmodules', ['unknown'])
- self.startinline = get_bool_opt(options, 'startinline', False)
-
- # private option argument for the lexer itself
- if '_startinline' in options:
- self.startinline = options.pop('_startinline')
-
- # collect activated functions in a set
- self._functions = set()
- if self.funcnamehighlighting:
- from pygments.lexers._php_builtins import MODULES
- for key, value in iteritems(MODULES):
- if key not in self.disabledmodules:
- self._functions.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.startinline:
- stack.append('php')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Other:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
+ ],
+ }
+
+ def __init__(self, **options):
+ self.funcnamehighlighting = get_bool_opt(
+ options, 'funcnamehighlighting', True)
+ self.disabledmodules = get_list_opt(
+ options, 'disabledmodules', ['unknown'])
+ self.startinline = get_bool_opt(options, 'startinline', False)
+
+ # private option argument for the lexer itself
+ if '_startinline' in options:
+ self.startinline = options.pop('_startinline')
+
+ # collect activated functions in a set
+ self._functions = set()
+ if self.funcnamehighlighting:
+ from pygments.lexers._php_builtins import MODULES
+ for key, value in iteritems(MODULES):
+ if key not in self.disabledmodules:
+ self._functions.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.startinline:
+ stack.append('php')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Other:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
if shebang_matches(text, r'php'):
return True
- rv = 0.0
- if re.search(r'<\?(?!xml)', text):
- rv += 0.3
- return rv
+ rv = 0.0
+ if re.search(r'<\?(?!xml)', text):
+ rv += 0.3
+ return rv
diff --git a/contrib/python/Pygments/py2/pygments/lexers/praat.py b/contrib/python/Pygments/py2/pygments/lexers/praat.py
index 4a6a14f0ea..46a0f908d7 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/praat.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/praat.py
@@ -1,218 +1,218 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.praat
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Praat
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.praat
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Praat
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, include
-from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
- Operator
-
-__all__ = ['PraatLexer']
-
-
-class PraatLexer(RegexLexer):
- """
- For `Praat <http://www.praat.org>`_ scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Praat'
- aliases = ['praat']
- filenames = ['*.praat', '*.proc', '*.psc']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, include
+from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
+ Operator
+
+__all__ = ['PraatLexer']
+
+
+class PraatLexer(RegexLexer):
+ """
+ For `Praat <http://www.praat.org>`_ scripts.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Praat'
+ aliases = ['praat']
+ filenames = ['*.praat', '*.proc', '*.psc']
+
keywords = (
- 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
- 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
- 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
- 'editor', 'endeditor', 'clearinfo',
+ 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
+ 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
+ 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
+ 'editor', 'endeditor', 'clearinfo',
)
-
+
functions_string = (
- 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
- 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
- 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
- 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
+ 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
+ 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
+ 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
+ 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
)
-
+
functions_numeric = (
- 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
- 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
- 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
- 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
- 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
- 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
- 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
- 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
- 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
- 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
- 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
- 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
- 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
+ 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
+ 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
+ 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
+ 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
+ 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
+ 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
+ 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
+ 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
+ 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
+ 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
+ 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
+ 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
+ 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
- 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
- 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
- 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
- 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
- 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
- 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
- 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
+ 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
+ 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
+ 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
+ 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
+ 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
+ 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
+ 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
- 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
+ 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
- 'writeInfo', 'writeInfoLine',
+ 'writeInfo', 'writeInfoLine',
)
-
+
functions_array = (
- 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
+ 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
)
-
+
objects = (
- 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
- 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
- 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
- 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
- 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
- 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
- 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
- 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
- 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
- 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
- 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
- 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
- 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
- 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
- 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
- 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
+ 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
+ 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
+ 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
+ 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
+ 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
+ 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
+ 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
+ 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
+ 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
+ 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
+ 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
+ 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
+ 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
+ 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
+ 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
+ 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
- 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
- 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
- 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
- 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
- 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
- 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
- 'Weight', 'WordList',
+ 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
+ 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
+ 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
+ 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
+ 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
+ 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
+ 'Weight', 'WordList',
)
-
+
variables_numeric = (
- 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
+ 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
)
-
+
variables_string = (
- 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
- 'preferencesDirectory', 'newline', 'temporaryDirectory',
- 'defaultDirectory',
+ 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
+ 'preferencesDirectory', 'newline', 'temporaryDirectory',
+ 'defaultDirectory',
)
-
+
object_attributes = (
'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
)
- tokens = {
- 'root': [
- (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
- (r'^#.*?$', Comment.Single),
- (r';[^\n]*', Comment.Single),
- (r'\s+', Text),
-
- (r'\bprocedure\b', Keyword, 'procedure_definition'),
- (r'\bcall\b', Keyword, 'procedure_call'),
- (r'@', Name.Function, 'procedure_call'),
-
- include('function_call'),
-
- (words(keywords, suffix=r'\b'), Keyword),
-
- (r'(\bform\b)(\s+)([^\n]+)',
- bygroups(Keyword, Text, String), 'old_form'),
-
- (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
- r'include|execute|system(?:_nocheck)?)(\s+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
-
- include('variable_name'),
- include('number'),
-
- (r'"', String, 'string'),
-
- (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
-
- (r'\b[A-Z]', Keyword, 'command'),
- (r'(\.{3}|[)(,])', Punctuation),
- ],
- 'command': [
- (r'( ?[\w()-]+ ?)', Keyword),
+ tokens = {
+ 'root': [
+ (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
+ (r'^#.*?$', Comment.Single),
+ (r';[^\n]*', Comment.Single),
+ (r'\s+', Text),
+
+ (r'\bprocedure\b', Keyword, 'procedure_definition'),
+ (r'\bcall\b', Keyword, 'procedure_call'),
+ (r'@', Name.Function, 'procedure_call'),
+
+ include('function_call'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+
+ (r'(\bform\b)(\s+)([^\n]+)',
+ bygroups(Keyword, Text, String), 'old_form'),
+
+ (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
+ r'include|execute|system(?:_nocheck)?)(\s+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
+
+ include('variable_name'),
+ include('number'),
+
+ (r'"', String, 'string'),
+
+ (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
+
+ (r'\b[A-Z]', Keyword, 'command'),
+ (r'(\.{3}|[)(,])', Punctuation),
+ ],
+ 'command': [
+ (r'( ?[\w()-]+ ?)', Keyword),
include('string_interpolated'),
- (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
- (r':', Keyword, ('#pop', 'comma_list')),
+ (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
+ (r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
- ],
- 'procedure_call': [
- (r'\s+', Text),
- (r'([\w.]+)(:|\s*\()',
- bygroups(Name.Function, Text), '#pop'),
- (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
- ],
- 'procedure_definition': [
- (r'\s', Text),
- (r'([\w.]+)(\s*?[(:])',
- bygroups(Name.Function, Text), '#pop'),
- (r'([\w.]+)([^\n]*)',
- bygroups(Name.Function, Text), '#pop'),
- ],
- 'function_call': [
- (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
+ ],
+ 'procedure_call': [
+ (r'\s+', Text),
+ (r'([\w.]+)(:|\s*\()',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
+ ],
+ 'procedure_definition': [
+ (r'\s', Text),
+ (r'([\w.]+)(\s*?[(:])',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([\w.]+)([^\n]*)',
+ bygroups(Name.Function, Text), '#pop'),
+ ],
+ 'function_call': [
+ (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
- ],
- 'function': [
- (r'\s+', Text),
- (r':', Punctuation, ('#pop', 'comma_list')),
- (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
- ],
- 'comma_list': [
- (r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'(\s*[])\n])', Text, '#pop'),
-
- (r'\s+', Text),
- (r'"', String, 'string'),
- (r'\b(if|then|else|fi|endif)\b', Keyword),
-
- include('function_call'),
- include('variable_name'),
- include('operator'),
- include('number'),
-
+ ],
+ 'function': [
+ (r'\s+', Text),
+ (r':', Punctuation, ('#pop', 'comma_list')),
+ (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
+ ],
+ 'comma_list': [
+ (r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'(\s*[])\n])', Text, '#pop'),
+
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ (r'\b(if|then|else|fi|endif)\b', Keyword),
+
+ include('function_call'),
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
(r'[()]', Text),
- (r',', Punctuation),
- ],
- 'old_arguments': [
- (r'\n', Text, '#pop'),
-
- include('variable_name'),
- include('operator'),
- include('number'),
-
- (r'"', String, 'string'),
- (r'[^\n]', Text),
- ],
- 'number': [
+ (r',', Punctuation),
+ ],
+ 'old_arguments': [
+ (r'\n', Text, '#pop'),
+
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
+ (r'"', String, 'string'),
+ (r'[^\n]', Text),
+ ],
+ 'number': [
(r'\n', Text, '#pop'),
- (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
- ],
+ (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
+ ],
'object_reference': [
include('string_interpolated'),
(r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
@@ -221,82 +221,82 @@ class PraatLexer(RegexLexer):
(r'\$', Name.Builtin),
(r'\[', Text, '#pop'),
- ],
- 'variable_name': [
- include('operator'),
- include('number'),
-
- (words(variables_string, suffix=r'\$'), Name.Variable.Global),
+ ],
+ 'variable_name': [
+ include('operator'),
+ include('number'),
+
+ (words(variables_string, suffix=r'\$'), Name.Variable.Global),
(words(variables_numeric,
suffix=r'(?=[^a-zA-Z0-9\._"\'\$#\[:\(]|\s|^|$)'),
Name.Variable.Global),
-
+
(words(objects, prefix=r'\b', suffix=r"(_)"),
bygroups(Name.Builtin, Name.Builtin),
'object_reference'),
-
+
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
- (r'[\[\]]', Punctuation, 'comma_list'),
+ (r'[\[\]]', Punctuation, 'comma_list'),
include('string_interpolated'),
- ],
- 'operator': [
+ ],
+ 'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
- ],
- 'string_interpolated': [
+ ],
+ 'string_interpolated': [
(r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w\d,]+")\])?(:[0-9]+)?\'',
- String.Interpol),
- ],
- 'string_unquoted': [
- (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'\n', Text, '#pop'),
- (r'\s', Text),
+ String.Interpol),
+ ],
+ 'string_unquoted': [
+ (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'\n', Text, '#pop'),
+ (r'\s', Text),
include('string_interpolated'),
- (r"'", String),
- (r"[^'\n]+", String),
- ],
- 'string': [
- (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'"', String, '#pop'),
+ (r"'", String),
+ (r"[^'\n]+", String),
+ ],
+ 'string': [
+ (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'"', String, '#pop'),
include('string_interpolated'),
- (r"'", String),
- (r'[^\'"\n]+', String),
- ],
- 'old_form': [
+ (r"'", String),
+ (r'[^\'"\n]+', String),
+ ],
+ 'old_form': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
- (r'\s+', Text),
-
- (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
- bygroups(Keyword, Text), 'number'),
-
- (r'(option|button)([ \t]+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(sentence|text)([ \t]+\S+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
- bygroups(Keyword, Text, String, Text)),
-
- (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
- bygroups(Keyword, Text, Name.Variable)),
-
- # Ideally processing of the number would happend in the 'number'
- # but that doesn't seem to work
- (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
- r'(?:[eE][-+]?\d+)?%?)',
- bygroups(Keyword, Text, Operator, Number)),
-
- (r'(comment)(\s+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'\bendform\b', Keyword, '#pop'),
- ]
- }
+ (r'\s+', Text),
+
+ (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
+ bygroups(Keyword, Text), 'number'),
+
+ (r'(option|button)([ \t]+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(sentence|text)([ \t]+\S+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
+ bygroups(Keyword, Text, String, Text)),
+
+ (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
+ bygroups(Keyword, Text, Name.Variable)),
+
+ # Ideally processing of the number would happend in the 'number'
+ # but that doesn't seem to work
+ (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
+ r'(?:[eE][-+]?\d+)?%?)',
+ bygroups(Keyword, Text, Operator, Number)),
+
+ (r'(comment)(\s+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'\bendform\b', Keyword, '#pop'),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/prolog.py b/contrib/python/Pygments/py2/pygments/lexers/prolog.py
index 70783625e0..5d8a58b59d 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/prolog.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/prolog.py
@@ -1,306 +1,306 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.prolog
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Prolog and Prolog-like languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.prolog
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Prolog and Prolog-like languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['PrologLexer', 'LogtalkLexer']
-
-
-class PrologLexer(RegexLexer):
- """
- Lexer for Prolog files.
- """
- name = 'Prolog'
- aliases = ['prolog']
- filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
- mimetypes = ['text/x-prolog']
-
- flags = re.UNICODE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'nested-comment'),
- (r'%.*', Comment.Single),
- # character literal
- (r'0\'.', String.Char),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # literal with prepended base
- (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[\[\](){}|.,;!]', Punctuation),
- (r':-|-->', Punctuation),
- (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
- r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double),
- (r"'(?:''|[^'])*'", String.Atom), # quoted atom
- # Needs to not be followed by an atom.
- # (r'=(?=\s|[a-zA-Z\[])', Operator),
- (r'is\b', Operator),
- (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
- Operator),
- (r'(mod|div|not)\b', Operator),
- (r'_', Keyword), # The don't-care variable
- (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['PrologLexer', 'LogtalkLexer']
+
+
+class PrologLexer(RegexLexer):
+ """
+ Lexer for Prolog files.
+ """
+ name = 'Prolog'
+ aliases = ['prolog']
+ filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
+ mimetypes = ['text/x-prolog']
+
+ flags = re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'nested-comment'),
+ (r'%.*', Comment.Single),
+ # character literal
+ (r'0\'.', String.Char),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # literal with prepended base
+ (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[\[\](){}|.,;!]', Punctuation),
+ (r':-|-->', Punctuation),
+ (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double),
+ (r"'(?:''|[^'])*'", String.Atom), # quoted atom
+ # Needs to not be followed by an atom.
+ # (r'=(?=\s|[a-zA-Z\[])', Operator),
+ (r'is\b', Operator),
+ (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+ Operator),
+ (r'(mod|div|not)\b', Operator),
+ (r'_', Keyword), # The don't-care variable
+ (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(:-|-->)',
- bygroups(Name.Function, Text, Operator)), # function defn
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'(\\s*)(:-|-->)',
+ bygroups(Name.Function, Text, Operator)), # function defn
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(\\()',
- bygroups(Name.Function, Text, Punctuation)),
- (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'(\\s*)(\\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
u'[\\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
- String.Atom), # atom, characters
- # This one includes !
- (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
- String.Atom), # atom, graphics
- (r'[A-Z_]\w*', Name.Variable),
- (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
- ],
- 'nested-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
- def analyse_text(text):
- return ':-' in text
-
-
-class LogtalkLexer(RegexLexer):
- """
- For `Logtalk <http://logtalk.org/>`_ source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'Logtalk'
- aliases = ['logtalk']
- filenames = ['*.lgt', '*.logtalk']
- mimetypes = ['text/x-logtalk']
-
- tokens = {
- 'root': [
- # Directives
- (r'^\s*:-\s', Punctuation, 'directive'),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- # Numbers
+ String.Atom), # atom, characters
+ # This one includes !
+ (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
+ String.Atom), # atom, graphics
+ (r'[A-Z_]\w*', Name.Variable),
+ (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+ ],
+ 'nested-comment': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+ def analyse_text(text):
+ return ':-' in text
+
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt', '*.logtalk']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s', Punctuation, 'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
(r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Event handlers
- (r'(after|before)(?=[(])', Keyword),
- # Message forwarding handler
- (r'forward(?=[(])', Keyword),
- # Execution-context methods
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Message forwarding handler
+ (r'forward(?=[(])', Keyword),
+ # Execution-context methods
(r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
- # Reflection
- (r'(current_predicate|predicate_property)(?=[(])', Keyword),
- # DCGs and term expansion
- (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
- # Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
- (r'(object|protocol|category)_property(?=[(])', Keyword),
- # Entity relations
- (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
- (r'extends_(object|protocol|category)(?=[(])', Keyword),
- (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
- (r'(instantiat|specializ)es_class(?=[(])', Keyword),
- # Events
- (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
- # Flags
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
(r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
- # Compiling, loading, and library paths
+ # Compiling, loading, and library paths
(r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
- (r'\blogtalk_make\b', Keyword),
- # Database
- (r'(clause|retract(all)?)(?=[(])', Keyword),
- (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
- # Control constructs
- (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'\blogtalk_make\b', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control constructs
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
(r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
(r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
- # All solutions
- (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
# Multi-threading predicates
(r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
# Engine predicates
(r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
- # Term unification
- (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
- # Term creation and decomposition
- (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
- # Evaluable functors
- (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
- (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
- (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
- # Other arithmetic functors
- (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
- # Term testing
+ # Term unification
+ (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
+ # Term testing
(r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
- # Term comparison
- (r'compare(?=[(])', Keyword),
- # Stream selection and control
- (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
- (r'(open|close)(?=[(])', Keyword),
- (r'flush_output(?=[(])', Keyword),
- (r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
- # Character and byte input/output
- (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
- (r'\bnl\b', Keyword),
- # Term input/output
- (r'read(_term)?(?=[(])', Keyword),
- (r'write(q|_(canonical|term))?(?=[(])', Keyword),
- (r'(current_)?op(?=[(])', Keyword),
- (r'(current_)?char_conversion(?=[(])', Keyword),
- # Atomic term processing
- (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
- (r'(char_code|sub_atom)(?=[(])', Keyword),
- (r'number_c(har|ode)s(?=[(])', Keyword),
- # Implementation defined hooks functions
- (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
- (r'\bhalt\b', Keyword),
- (r'halt(?=[(])', Keyword),
- # Message sending operators
- (r'(::|:|\^\^)', Operator),
- # External call
- (r'[{}]', Keyword),
- # Logic and control
- (r'(ignore|once)(?=[(])', Keyword),
- (r'\brepeat\b', Keyword),
- # Sorting
- (r'(key)?sort(?=[(])', Keyword),
- # Bitwise functors
- (r'(>>|<<|/\\|\\\\|\\)', Operator),
- # Predicate aliases
- (r'\bas\b', Operator),
- # Arithemtic evaluation
- (r'\bis\b', Keyword),
- # Arithemtic comparison
- (r'(=:=|=\\=|<|=<|>=|>)', Operator),
- # Term creation and decomposition
- (r'=\.\.', Operator),
- # Term unification
- (r'(=|\\=)', Operator),
- # Term comparison
- (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
- # Evaluable functors
- (r'(//|[-+*/])', Operator),
- (r'\b(e|pi|div|mod|rem)\b', Operator),
- # Other arithemtic functors
- (r'\b\*\*\b', Operator),
- # DCG rules
- (r'-->', Operator),
- # Control constructs
- (r'([!;]|->)', Operator),
- # Logic and control
- (r'\\+', Operator),
- # Mode operators
- (r'[?@]', Operator),
- # Existential quantifier
- (r'\^', Operator),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
+ # Term comparison
+ (r'compare(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'(ignore|once)(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Sorting
+ (r'(key)?sort(?=[(])', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Predicate aliases
+ (r'\bas\b', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(e|pi|div|mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Existential quantifier
+ (r'\^', Operator),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
# Punctuation
- (r'[()\[\],.|]', Text),
- # Atoms
+ (r'[()\[\],.|]', Text),
+ # Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- ],
-
- 'quoted_atom': [
- (r"''", String),
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
- (r"[^\\'\n]+", String),
- (r'\\', String),
- ],
-
- 'directive': [
- # Conditional compilation directives
- (r'(el)?if(?=[(])', Keyword, 'root'),
+ (r"'", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"''", String),
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Conditional compilation directives
+ (r'(el)?if(?=[(])', Keyword, 'root'),
(r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
- # Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
(r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
- # Predicate scope directives
- (r'(public|protected|private)(?=[(])', Keyword, 'root'),
- # Other directives
- (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
- (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+ (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
(r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
(r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
- (r'op(?=[(])', Keyword, 'root'),
- (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
(r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
(r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
- ],
-
- 'entityrelations': [
- (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
- # Numbers
+ ],
+
+ 'entityrelations': [
+ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
(r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Atoms
+ # Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # End of entity-opening directive
- (r'([)]\.)', Text, 'root'),
- # Scope operator
- (r'(::)', Operator),
+ (r"'", String, 'quoted_atom'),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # End of entity-opening directive
+ (r'([)]\.)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
# Punctuation
- (r'[()\[\],.|]', Text),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- ]
- }
-
- def analyse_text(text):
- if ':- object(' in text:
- return 1.0
- elif ':- protocol(' in text:
- return 1.0
- elif ':- category(' in text:
- return 1.0
+ (r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if ':- object(' in text:
+ return 1.0
+ elif ':- protocol(' in text:
+ return 1.0
+ elif ':- category(' in text:
+ return 1.0
elif re.search(r'^:-\s[a-z]', text, re.M):
- return 0.9
- else:
- return 0.0
+ return 0.9
+ else:
+ return 0.0
diff --git a/contrib/python/Pygments/py2/pygments/lexers/python.py b/contrib/python/Pygments/py2/pygments/lexers/python.py
index 5f700e7f5d..dd6bdb67a2 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/python.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/python.py
@@ -1,32 +1,32 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.python
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Python and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.python
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Python and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- default, words, combined, do_insertions
-from pygments.util import get_bool_opt, shebang_matches
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Error
-from pygments import unistring as uni
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ default, words, combined, do_insertions
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'Python2Lexer', 'Python2TracebackLexer',
'CythonLexer', 'DgLexer', 'NumPyLexer']
-
-line_re = re.compile('.*?\n')
-
-
-class PythonLexer(RegexLexer):
- """
+
+line_re = re.compile('.*?\n')
+
+
+class PythonLexer(RegexLexer):
+ """
For `Python <http://www.python.org>`_ source code (version 3.x).
.. versionadded:: 0.10
@@ -34,9 +34,9 @@ class PythonLexer(RegexLexer):
.. versionchanged:: 2.5
This is now the default ``PythonLexer``. It is still available as the
alias ``Python3Lexer``.
- """
-
- name = 'Python'
+ """
+
+ name = 'Python'
aliases = ['python', 'py', 'sage', 'python3', 'py3']
filenames = [
'*.py',
@@ -60,13 +60,13 @@ class PythonLexer(RegexLexer):
]
mimetypes = ['text/x-python', 'application/x-python',
'text/x-python3', 'application/x-python3']
-
+
flags = re.MULTILINE | re.UNICODE
uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
- def innerstring_rules(ttype):
- return [
+ def innerstring_rules(ttype):
+ return [
# the old style '%s' % (...) string formatting (still valid in Py3)
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsaux%]', String.Interpol),
@@ -304,43 +304,43 @@ class Python2Lexer(RegexLexer):
def innerstring_rules(ttype):
return [
- # the old style '%s' % (...) string formatting
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ # the old style '%s' % (...) string formatting
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
bygroups(Text, String.Affix, String.Doc)),
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
bygroups(Text, String.Affix, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
+ (r'[^\S\n]+', Text),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('builtins'),
include('magicfuncs'),
include('magicvars'),
- include('backtick'),
+ include('backtick'),
('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
bygroups(String.Affix, String.Double), 'tdqs'),
("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
@@ -357,40 +357,40 @@ class Python2Lexer(RegexLexer):
combined('stringescape', 'dqs')),
("([uUbB]?)(')", bygroups(String.Affix, String.Single),
combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
- 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
- 'print', 'raise', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
- 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
- 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
- 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
- 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
- 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
- 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
- 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
- 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
- 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'print', 'raise', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+ 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+ 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+ 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+ 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+ 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+ 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+ 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
(r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
'MemoryError', 'ModuleNotFoundError', 'NameError',
'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
'PendingDeprecationWarning', 'RecursionError', 'ReferenceError',
@@ -400,8 +400,8 @@ class Python2Lexer(RegexLexer):
'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
+ Name.Exception),
+ ],
'magicfuncs': [
(words((
'__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',
@@ -433,485 +433,485 @@ class Python2Lexer(RegexLexer):
suffix=r'\b'),
Name.Variable.Magic),
],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[\w.]+', Name.Decorator),
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@[\w.]+', Name.Decorator),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
+ ],
+ 'funcname': [
include('magicfuncs'),
(r'[a-zA-Z_]\w*', Name.Function, '#pop'),
default('#pop'),
- ],
- 'classname': [
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'as\b', Keyword.Namespace),
- (r',', Operator),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'import\b', Keyword.Namespace, '#pop'),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- # sadly, in "raise x from y" y will be highlighted as namespace too
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # anything else here also means "raise x from y" and is therefore
- # not an error
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
+ ],
+ 'import': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'as\b', Keyword.Namespace),
+ (r',', Operator),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'import\b', Keyword.Namespace, '#pop'),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ # sadly, in "raise x from y" y will be highlighted as namespace too
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # anything else here also means "raise x from y" and is therefore
+ # not an error
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings-single': innerstring_rules(String.Single),
+ 'strings-double': innerstring_rules(String.Double),
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings-double')
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings-single')
+ ],
+ 'tdqs': [
+ (r'"""', String.Double, '#pop'),
+ include('strings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqs': [
+ (r"'''", String.Single, '#pop'),
+ include('strings-single'),
+ (r'\n', String.Single)
+ ],
+ }
+
+ def analyse_text(text):
return shebang_matches(text, r'pythonw?2(\.\d)?') or \
- 'import ' in text[:1000]
-
-
-class PythonConsoleLexer(Lexer):
- """
- For Python console output or doctests, such as:
-
- .. sourcecode:: pycon
-
- >>> a = 'foo'
- >>> print a
- foo
- >>> 1 / 0
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ZeroDivisionError: integer division or modulo by zero
-
- Additional options:
-
- `python3`
+ 'import ' in text[:1000]
+
+
+class PythonConsoleLexer(Lexer):
+ """
+ For Python console output or doctests, such as:
+
+ .. sourcecode:: pycon
+
+ >>> a = 'foo'
+ >>> print a
+ foo
+ >>> 1 / 0
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ZeroDivisionError: integer division or modulo by zero
+
+ Additional options:
+
+ `python3`
Use Python 3 lexer for code. Default is ``True``.
-
- .. versionadded:: 1.0
+
+ .. versionadded:: 1.0
.. versionchanged:: 2.5
Now defaults to ``True``.
- """
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- def __init__(self, **options):
+ """
+ name = 'Python console session'
+ aliases = ['pycon']
+ mimetypes = ['text/x-python-doctest']
+
+ def __init__(self, **options):
self.python3 = get_bool_opt(options, 'python3', True)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- if self.python3:
- pylexer = PythonLexer(**self.options)
- tblexer = PythonTracebackLexer(**self.options)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ if self.python3:
+ pylexer = PythonLexer(**self.options)
+ tblexer = PythonTracebackLexer(**self.options)
else:
pylexer = Python2Lexer(**self.options)
tblexer = Python2TracebackLexer(**self.options)
-
- curcode = ''
- insertions = []
- curtb = ''
- tbindex = 0
- tb = 0
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'>>> ') or line.startswith(u'... '):
- tb = 0
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
- elif line.rstrip() == u'...' and not tb:
- # only a new >>> prompt can end an exception block
- # otherwise an ellipsis in place of the traceback frames
- # will be mishandled
- insertions.append((len(curcode),
- [(0, Generic.Prompt, u'...')]))
- curcode += line[3:]
- else:
- if curcode:
- for item in do_insertions(
- insertions, pylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if (line.startswith(u'Traceback (most recent call last):') or
- re.match(u' File "[^"]+", line \\d+\\n$', line)):
- tb = 1
- curtb = line
- tbindex = match.start()
- elif line == 'KeyboardInterrupt\n':
- yield match.start(), Name.Class, line
- elif tb:
- curtb += line
- if not (line.startswith(' ') or line.strip() == u'...'):
- tb = 0
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
- curtb = ''
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
- if curtb:
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
-
-
-class PythonTracebackLexer(RegexLexer):
- """
+
+ curcode = ''
+ insertions = []
+ curtb = ''
+ tbindex = 0
+ tb = 0
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'>>> ') or line.startswith(u'... '):
+ tb = 0
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ elif line.rstrip() == u'...' and not tb:
+ # only a new >>> prompt can end an exception block
+ # otherwise an ellipsis in place of the traceback frames
+ # will be mishandled
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, u'...')]))
+ curcode += line[3:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if (line.startswith(u'Traceback (most recent call last):') or
+ re.match(u' File "[^"]+", line \\d+\\n$', line)):
+ tb = 1
+ curtb = line
+ tbindex = match.start()
+ elif line == 'KeyboardInterrupt\n':
+ yield match.start(), Name.Class, line
+ elif tb:
+ curtb += line
+ if not (line.startswith(' ') or line.strip() == u'...'):
+ tb = 0
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+ curtb = ''
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ if curtb:
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+
+
+class PythonTracebackLexer(RegexLexer):
+ """
For Python 3.x tracebacks, with support for chained exceptions.
-
+
.. versionadded:: 1.0
.. versionchanged:: 2.5
This is now the default ``PythonTracebackLexer``. It is still available
as the alias ``Python3TracebackLexer``.
- """
-
- name = 'Python Traceback'
+ """
+
+ name = 'Python Traceback'
aliases = ['pytb', 'py3tb']
filenames = ['*.pytb', '*.py3tb']
mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
(r'\n', Text),
(r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
(r'^During handling of the above exception, another '
r'exception occurred:\n\n', Generic.Traceback),
(r'^The above exception was the direct cause of the '
r'following exception:\n\n', Generic.Traceback),
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(PythonLexer), Text)),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
Python3TracebackLexer = PythonTracebackLexer
class Python2TracebackLexer(RegexLexer):
- """
+ """
For Python tracebacks.
-
+
.. versionadded:: 0.7
.. versionchanged:: 2.5
This class has been renamed from ``PythonTracebackLexer``.
``PythonTracebackLexer`` now refers to the Python 3 variant.
- """
-
+ """
+
name = 'Python 2.x Traceback'
aliases = ['py2tb']
filenames = ['*.py2tb']
mimetypes = ['text/x-python2-traceback']
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
# Cover both (most recent call last) and (innermost last)
# The optional ^C allows us to catch keyboard interrupt signals.
(r'^(\^C)?(Traceback.*\n)',
bygroups(Text, Generic.Traceback), 'intb'),
# SyntaxError starts with this.
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
(r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
bygroups(Text, using(Python2Lexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
-class CythonLexer(RegexLexer):
- """
- For Pyrex and `Cython <http://cython.org>`_ source code.
-
- .. versionadded:: 1.1
- """
-
- name = 'Cython'
- aliases = ['cython', 'pyx', 'pyrex']
- filenames = ['*.pyx', '*.pxd', '*.pxi']
- mimetypes = ['text/x-cython', 'application/x-cython']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'(<)([a-zA-Z0-9.?]+)(>)',
- bygroups(Punctuation, Keyword.Type, Punctuation)),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
- (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
- Name, Punctuation)),
- include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
- # (should actually start a block with only cdefs)
- (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
- 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
- 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
- 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
- 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
- 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
- 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
- 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
- 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
- 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property',
- 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
- 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
- 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
- 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
- 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
- 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
- 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
- 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
- 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@\w+', Name.Decorator),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
+class CythonLexer(RegexLexer):
+ """
+ For Pyrex and `Cython <http://cython.org>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Cython'
+ aliases = ['cython', 'pyx', 'pyrex']
+ filenames = ['*.pyx', '*.pxd', '*.pxi']
+ mimetypes = ['text/x-cython', 'application/x-cython']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'(<)([a-zA-Z0-9.?]+)(>)',
+ bygroups(Punctuation, Keyword.Type, Punctuation)),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+ (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+ bygroups(Keyword, Number.Integer, Operator, Name, Operator,
+ Name, Punctuation)),
+ include('keywords'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ # (should actually start a block with only cdefs)
+ (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
+ 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+ 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+ 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+ 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+ 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property',
+ 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+ 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
+ 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
+ 'OSError', 'OverflowError', 'OverflowWarning',
+ 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
+ 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
+ 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@\w+', Name.Decorator),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
+ ],
+ 'funcname': [
(r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'cdef': [
- (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
- (r'(struct|enum|union|class)\b', Keyword),
- (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
- (r'([a-zA-Z_]\w*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
- (r'from\b', Keyword, '#pop'),
- (r'as\b', Keyword),
- (r':', Punctuation, '#pop'),
- (r'(?=["\'])', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Keyword.Type),
- (r'.', Text),
- ],
- 'classname': [
+ ],
+ 'cdef': [
+ (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
+ (r'(struct|enum|union|class)\b', Keyword),
+ (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([a-zA-Z_]\w*)(\s*)(,)',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'from\b', Keyword, '#pop'),
+ (r'as\b', Keyword),
+ (r':', Punctuation, '#pop'),
+ (r'(?=["\'])', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Keyword.Type),
+ (r'.', Text),
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class DgLexer(RegexLexer):
- """
- Lexer for `dg <http://pyos.github.com/dg>`_,
- a functional and object-oriented programming language
- running on the CPython 3 VM.
-
- .. versionadded:: 1.6
- """
- name = 'dg'
- aliases = ['dg']
- filenames = ['*.dg']
- mimetypes = ['text/x-dg']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment.Single),
-
- (r'(?i)0b[01]+', Number.Bin),
- (r'(?i)0o[0-7]+', Number.Oct),
- (r'(?i)0x[0-9a-f]+', Number.Hex),
- (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
- (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
- (r'(?i)[+-]?[0-9]+j?', Number.Integer),
-
- (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
- (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
- (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
- (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
-
- (r"`\w+'*`", Operator),
- (r'\b(and|in|is|or|where)\b', Operator.Word),
- (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
-
- (words((
- 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
- 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+
+class DgLexer(RegexLexer):
+ """
+ Lexer for `dg <http://pyos.github.com/dg>`_,
+ a functional and object-oriented programming language
+ running on the CPython 3 VM.
+
+ .. versionadded:: 1.6
+ """
+ name = 'dg'
+ aliases = ['dg']
+ filenames = ['*.dg']
+ mimetypes = ['text/x-dg']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment.Single),
+
+ (r'(?i)0b[01]+', Number.Bin),
+ (r'(?i)0o[0-7]+', Number.Oct),
+ (r'(?i)0x[0-9a-f]+', Number.Hex),
+ (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+j?', Number.Integer),
+
+ (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
+ (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
+ (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
+ (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
+
+ (r"`\w+'*`", Operator),
+ (r'\b(and|in|is|or|where)\b', Operator.Word),
+ (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
+
+ (words((
+ 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
+ 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str',
'super', 'tuple', 'tuple\'', 'type'),
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (words((
- '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
- 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
+ Name.Builtin),
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
+ 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst',
'getattr', 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init',
'input', 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len',
@@ -919,144 +919,144 @@ class DgLexer(RegexLexer):
'print', 'repr', 'reversed', 'round', 'setattr', 'scanl1?', 'snd',
'sorted', 'sum', 'tail', 'take', 'takewhile', 'vars', 'zip'),
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
- Name.Builtin.Pseudo),
-
- (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
- Name.Exception),
- (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
- r"SystemExit)(?!['\w])", Name.Exception),
-
- (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
- r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
-
- (r"[A-Z_]+'*(?!['\w])", Name),
- (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
- (r"\w+'*", Name),
-
- (r'[()]', Punctuation),
- (r'.', Error),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'string': [
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ Name.Builtin),
+ (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
+ Name.Builtin.Pseudo),
+
+ (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
+ Name.Exception),
+ (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
+ r"SystemExit)(?!['\w])", Name.Exception),
+
+ (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
+ r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
+
+ (r"[A-Z_]+'*(?!['\w])", Name),
+ (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
+ (r"\w+'*", Name),
+
+ (r'[()]', Punctuation),
+ (r'.', Error),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'string': [
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String),
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop')
- ],
- 'sqs': [
- (r"'", String, '#pop')
- ],
- 'tdqs': [
- (r'"""', String, '#pop')
- ],
- 'tsqs': [
- (r"'''", String, '#pop')
- ],
- }
-
-
-class NumPyLexer(PythonLexer):
- """
- A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
- """
-
- name = 'NumPy'
- aliases = ['numpy']
-
- # override the mimetypes to not inherit them from python
- mimetypes = []
- filenames = []
-
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String),
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop')
+ ],
+ }
+
+
+class NumPyLexer(PythonLexer):
+ """
+ A Python lexer recognizing Numerical Python builtins.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'NumPy'
+ aliases = ['numpy']
+
+ # override the mimetypes to not inherit them from python
+ mimetypes = []
+ filenames = []
+
EXTRA_KEYWORDS = {
- 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
- 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
- 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
- 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
- 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
- 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
- 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
- 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
- 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
- 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
- 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
- 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
- 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
- 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
- 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
- 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
- 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
- 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
- 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
- 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
- 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
- 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
- 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
- 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
- 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
- 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
- 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
- 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
- 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
- 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
- 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
- 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
- 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
- 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
- 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
- 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
- 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
- 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
- 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
- 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
- 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
- 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
- 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
- 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
- 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
- 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
- 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
- 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
- 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
- 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
- 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
- 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
- 'set_numeric_ops', 'set_printoptions', 'set_string_function',
- 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
- 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
- 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
- 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
- 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
- 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
- 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
- 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
- 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
- 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
- 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
+ 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
+ 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
+ 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
+ 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
+ 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
+ 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
+ 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
+ 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
+ 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
+ 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
+ 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
+ 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
+ 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
+ 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
+ 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
+ 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
+ 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
+ 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
+ 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
+ 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
+ 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
+ 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
+ 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
+ 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
+ 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
+ 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
+ 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
+ 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
+ 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
+ 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
+ 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
+ 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
+ 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
+ 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
+ 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
+ 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
+ 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
+ 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
+ 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
+ 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
+ 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
+ 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
+ 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
+ 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
+ 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
+ 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
+ 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
+ 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
+ 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
+ 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
+ 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
+ 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
+ 'set_numeric_ops', 'set_printoptions', 'set_string_function',
+ 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
+ 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
+ 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
+ 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
+ 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
+ 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
+ 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
+ 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
+ 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
+ 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
+ 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- PythonLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
- def analyse_text(text):
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ PythonLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
- 'import ' in text[:1000]) \
- and ('import numpy' in text or 'from numpy import' in text)
+ 'import ' in text[:1000]) \
+ and ('import numpy' in text or 'from numpy import' in text)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/qvt.py b/contrib/python/Pygments/py2/pygments/lexers/qvt.py
index b1c1495c0f..50ecf08f53 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/qvt.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/qvt.py
@@ -1,106 +1,106 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.qvt
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for QVT Operational language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.qvt
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for QVT Operational language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
words
-from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
- Name, String, Number
-
-__all__ = ['QVToLexer']
-
-
-class QVToLexer(RegexLexer):
+from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
+ Name, String, Number
+
+__all__ = ['QVToLexer']
+
+
+class QVToLexer(RegexLexer):
u"""
- For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
-
- Reference for implementing this: «Meta Object Facility (MOF) 2.0
- Query/View/Transformation Specification», Version 1.1 - January 2011
- (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
- particular.
-
- Notable tokens assignments:
-
- - Name.Class is assigned to the identifier following any of the following
- keywords: metamodel, class, exception, primitive, enum, transformation
- or library
-
- - Name.Function is assigned to the names of mappings and queries
-
- - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
- 'self' and 'result'.
- """
- # With obvious borrowings & inspiration from the Java, Python and C lexers
-
- name = 'QVTO'
- aliases = ['qvto', 'qvt']
- filenames = ['*.qvto']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(--|//)(\s*)(directive:)?(.*)$',
- bygroups(Comment, Comment, Comment.Preproc, Comment)),
- # Uncomment the following if you want to distinguish between
- # '/*' and '/**', à la javadoc
+ For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
+
+ Reference for implementing this: «Meta Object Facility (MOF) 2.0
+ Query/View/Transformation Specification», Version 1.1 - January 2011
+ (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
+ particular.
+
+ Notable tokens assignments:
+
+ - Name.Class is assigned to the identifier following any of the following
+ keywords: metamodel, class, exception, primitive, enum, transformation
+ or library
+
+ - Name.Function is assigned to the names of mappings and queries
+
+ - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
+ 'self' and 'result'.
+ """
+ # With obvious borrowings & inspiration from the Java, Python and C lexers
+
+ name = 'QVTO'
+ aliases = ['qvto', 'qvt']
+ filenames = ['*.qvto']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(--|//)(\s*)(directive:)?(.*)$',
+ bygroups(Comment, Comment, Comment.Preproc, Comment)),
+ # Uncomment the following if you want to distinguish between
+ # '/*' and '/**', à la javadoc
# (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'\\\n', Text),
- (r'(and|not|or|xor|##?)\b', Operator.Word),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'(and|not|or|xor|##?)\b', Operator.Word),
(r'(:{1,2}=|[-+]=)\b', Operator.Word),
(r'(@|<<|>>)\b', Keyword), # stereotypes
(r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- (r'(true|false|unlimited|null)\b', Keyword.Constant),
- (r'(this|self|result)\b', Name.Builtin.Pseudo),
- (r'(var)\b', Keyword.Declaration),
- (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'(true|false|unlimited|null)\b', Keyword.Constant),
+ (r'(this|self|result)\b', Name.Builtin.Pseudo),
+ (r'(var)\b', Keyword.Declaration),
+ (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
(r'(metamodel|class|exception|primitive|enum|transformation|'
r'library)(\s+)(\w+)',
- bygroups(Keyword.Word, Text, Name.Class)),
+ bygroups(Keyword.Word, Text, Name.Class)),
(r'(exception)(\s+)(\w+)',
bygroups(Keyword.Word, Text, Name.Exception)),
- (r'(main)\b', Name.Function),
+ (r'(main)\b', Name.Function),
(r'(mapping|helper|query)(\s+)',
bygroups(Keyword.Declaration, Text), 'operation'),
- (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
- (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
- Keyword.Type),
- include('keywords'),
- ('"', String, combined('stringescape', 'dqs')),
- ("'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
+ (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
+ (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
+ Keyword.Type),
+ include('keywords'),
+ ('"', String, combined('stringescape', 'dqs')),
+ ("'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
# (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
- # bygroups(Text, Text, Text)),
+ # bygroups(Text, Text, Text)),
],
-
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
+
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
(r'[a-zA-Z_][\w.]*', Name.Namespace),
default('#pop'),
],
-
- 'operation': [
- (r'::', Text),
+
+ 'operation': [
+ (r'::', Text),
(r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
bygroups(Text, Name.Function, Text, Punctuation), '#pop')
],
-
- 'assert': [
- (r'(warning|error|fatal)\b', Keyword, '#pop'),
+
+ 'assert': [
+ (r'(warning|error|fatal)\b', Keyword, '#pop'),
default('#pop'), # all else: go back
],
-
- 'keywords': [
+
+ 'keywords': [
(words((
'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
@@ -118,35 +118,35 @@ class QVToLexer(RegexLexer):
'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
'xmap', 'xselect'), suffix=r'\b'), Keyword),
+ ],
+
+ # There is no need to distinguish between String.Single and
+ # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
+ 'strings': [
+ (r'[^\\\'"\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
],
-
- # There is no need to distinguish between String.Single and
- # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
- 'strings': [
- (r'[^\\\'"\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- ],
- 'stringescape': [
- (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
- ],
+ 'stringescape': [
+ (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
+ ],
'dqs': [ # double-quoted string
- (r'"', String, '#pop'),
- (r'\\\\|\\"', String.Escape),
- include('strings')
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"', String.Escape),
+ include('strings')
],
'sqs': [ # single-quoted string
- (r"'", String, '#pop'),
- (r"\\\\|\\'", String.Escape),
- include('strings')
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'", String.Escape),
+ include('strings')
],
- 'name': [
+ 'name': [
(r'[a-zA-Z_]\w*', Name),
],
- # numbers: excerpt taken from the python lexer
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer)
- ],
+ # numbers: excerpt taken from the python lexer
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
}
diff --git a/contrib/python/Pygments/py2/pygments/lexers/r.py b/contrib/python/Pygments/py2/pygments/lexers/r.py
index 8e7445995e..7ff7ada378 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/r.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/r.py
@@ -1,193 +1,193 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.r
- ~~~~~~~~~~~~~~~~~
-
- Lexers for the R/S languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.r
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for the R/S languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, include, do_insertions, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class RConsoleLexer(Lexer):
- """
- For R console transcripts or R CMD BATCH output files.
- """
-
- name = 'RConsole'
- aliases = ['rconsole', 'rout']
- filenames = ['*.Rout']
-
- def get_tokens_unprocessed(self, text):
- slexer = SLexer(**self.options)
-
- current_code_block = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>') or line.startswith('+'):
- # Colorize the prompt as such,
- # then put rest of line into current_code_block
- insertions.append((len(current_code_block),
- [(0, Generic.Prompt, line[:2])]))
- current_code_block += line[2:]
- else:
- # We have reached a non-prompt line!
- # If we have stored prompt lines, need to process them first.
- if current_code_block:
- # Weave together the prompts and highlight code.
- for item in do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block)):
- yield item
- # Reset vars for next code block.
- current_code_block = ''
- insertions = []
- # Now process the actual line itself, this is output from R.
- yield match.start(), Generic.Output, line
-
- # If we happen to end on a code block with nothing after it, need to
- # process the last code block. This is neither elegant nor DRY so
- # should be changed.
- if current_code_block:
- for item in do_insertions(
- insertions, slexer.get_tokens_unprocessed(current_code_block)):
- yield item
-
-
-class SLexer(RegexLexer):
- """
- For S, S-plus, and R source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'S'
- aliases = ['splus', 's', 'r']
- filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
- mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
- 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+ """
+ For R console transcripts or R CMD BATCH output files.
+ """
+
+ name = 'RConsole'
+ aliases = ['rconsole', 'rout']
+ filenames = ['*.Rout']
+
+ def get_tokens_unprocessed(self, text):
+ slexer = SLexer(**self.options)
+
+ current_code_block = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>') or line.startswith('+'):
+ # Colorize the prompt as such,
+ # then put rest of line into current_code_block
+ insertions.append((len(current_code_block),
+ [(0, Generic.Prompt, line[:2])]))
+ current_code_block += line[2:]
+ else:
+ # We have reached a non-prompt line!
+ # If we have stored prompt lines, need to process them first.
+ if current_code_block:
+ # Weave together the prompts and highlight code.
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+ # Reset vars for next code block.
+ current_code_block = ''
+ insertions = []
+ # Now process the actual line itself, this is output from R.
+ yield match.start(), Generic.Output, line
+
+ # If we happen to end on a code block with nothing after it, need to
+ # process the last code block. This is neither elegant nor DRY so
+ # should be changed.
+ if current_code_block:
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+
+
+class SLexer(RegexLexer):
+ """
+ For S, S-plus, and R source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'S'
+ aliases = ['splus', 's', 'r']
+ filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+ mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+ 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+
valid_name = r'(?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*)'
- tokens = {
- 'comments': [
- (r'#.*$', Comment.Single),
- ],
- 'valid_name': [
+ tokens = {
+ 'comments': [
+ (r'#.*$', Comment.Single),
+ ],
+ 'valid_name': [
(valid_name, Name),
- ],
- 'punctuation': [
- (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
- ],
- 'keywords': [
- (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
- r'(?![\w.])',
- Keyword.Reserved),
- ],
- 'operators': [
- (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
+ ],
+ 'punctuation': [
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+ ],
+ 'keywords': [
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![\w.])',
+ Keyword.Reserved),
+ ],
+ 'operators': [
+ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
- ],
- 'builtin_symbols': [
- (r'(NULL|NA(_(integer|real|complex|character)_)?|'
- r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
- r'(?![\w.])',
- Keyword.Constant),
- (r'(T|F)\b', Name.Builtin.Pseudo),
- ],
- 'numbers': [
- # hex number
- (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
- # decimal number
- (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
- Number),
- ],
- 'statements': [
- include('comments'),
- # whitespaces
- (r'\s+', Text),
- (r'\'', String, 'string_squote'),
- (r'\"', String, 'string_dquote'),
- include('builtin_symbols'),
+ ],
+ 'builtin_symbols': [
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+ r'(?![\w.])',
+ Keyword.Constant),
+ (r'(T|F)\b', Name.Builtin.Pseudo),
+ ],
+ 'numbers': [
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
+ ],
+ 'statements': [
+ include('comments'),
+ # whitespaces
+ (r'\s+', Text),
+ (r'\'', String, 'string_squote'),
+ (r'\"', String, 'string_dquote'),
+ include('builtin_symbols'),
include('valid_name'),
- include('numbers'),
- include('keywords'),
- include('punctuation'),
- include('operators'),
- ],
- 'root': [
+ include('numbers'),
+ include('keywords'),
+ include('punctuation'),
+ include('operators'),
+ ],
+ 'root': [
# calls:
(r'(%s)\s*(?=\()' % valid_name, Name.Function),
- include('statements'),
- # blocks:
- (r'\{|\}', Punctuation),
- # (r'\{', Punctuation, 'block'),
- (r'.', Text),
- ],
- # 'block': [
- # include('statements'),
- # ('\{', Punctuation, '#push'),
- # ('\}', Punctuation, '#pop')
- # ],
- 'string_squote': [
- (r'([^\'\\]|\\.)*\'', String, '#pop'),
- ],
- 'string_dquote': [
- (r'([^"\\]|\\.)*"', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
- return 0.11
-
-
-class RdLexer(RegexLexer):
- """
- Pygments Lexer for R documentation (Rd) files
-
- This is a very minimal implementation, highlighting little more
- than the macros. A description of Rd syntax is found in `Writing R
- Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
+ include('statements'),
+ # blocks:
+ (r'\{|\}', Punctuation),
+ # (r'\{', Punctuation, 'block'),
+ (r'.', Text),
+ ],
+ # 'block': [
+ # include('statements'),
+ # ('\{', Punctuation, '#push'),
+ # ('\}', Punctuation, '#pop')
+ # ],
+ 'string_squote': [
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
+ ],
+ 'string_dquote': [
+ (r'([^"\\]|\\.)*"', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
-
- .. versionadded:: 1.6
- """
- name = 'Rd'
- aliases = ['rd']
- filenames = ['*.Rd']
- mimetypes = ['text/x-r-doc']
-
- # To account for verbatim / LaTeX-like / and R-like areas
- # would require parsing.
- tokens = {
- 'root': [
- # catch escaped brackets and percent sign
- (r'\\[\\{}%]', String.Escape),
- # comments
- (r'%.*$', Comment),
- # special macros with no arguments
- (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
- # macros
- (r'\\[a-zA-Z]+\b', Keyword),
- # special preprocessor macros
- (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
- # non-escaped brackets
- (r'[{}]', Name.Builtin),
- # everything else
- (r'[^\\%\n{}]+', Text),
- (r'.', Text),
- ]
- }
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root': [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/rdf.py b/contrib/python/Pygments/py2/pygments/lexers/rdf.py
index 5927a686d4..37a5253268 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/rdf.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/rdf.py
@@ -1,34 +1,34 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.rdf
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for semantic web and RDF query languages and markup.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rdf
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for semantic web and RDF query languages and markup.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default
-from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
- Whitespace, Name, Literal, Comment, Text
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default
+from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
+ Whitespace, Name, Literal, Comment, Text
+
__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
-
-
-class SparqlLexer(RegexLexer):
- """
- Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
-
- .. versionadded:: 2.0
- """
- name = 'SPARQL'
- aliases = ['sparql']
- filenames = ['*.rq', '*.sparql']
- mimetypes = ['application/sparql-query']
-
+
+
+class SparqlLexer(RegexLexer):
+ """
+ Lexer for `SPARQL <http://www.w3.org/TR/rdf-sparql-query/>`_ query language.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SPARQL'
+ aliases = ['sparql']
+ filenames = ['*.rq', '*.sparql']
+ mimetypes = ['application/sparql-query']
+
# character group definitions ::
PN_CHARS_BASE_GRP = (u'a-zA-Z'
@@ -57,217 +57,217 @@ class SparqlLexer(RegexLexer):
PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
- # terminal productions ::
-
+ # terminal productions ::
+
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
+
PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
+
PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
+
HEX = '[' + HEX_GRP + ']'
-
+
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
-
+
+ IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
+
BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
'.]*' + PN_CHARS + ')?'
-
+
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
+
VARNAME = u'[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
u'0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
(r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
- r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
- r'offset|bindings|load|clear|drop|create|add|move|copy|'
- r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
- r'using\s+named|using|graph|default|named|all|optional|service|'
- r'silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # # variables ::
- ('[?$]' + VARNAME, Name.Variable),
- # prefixed names ::
+ r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
+ r'offset|bindings|load|clear|drop|create|add|move|copy|'
+ r'insert\s+data|delete\s+data|delete\s+where|delete|insert|'
+ r'using\s+named|using|graph|default|named|all|optional|service|'
+ r'silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # # variables ::
+ ('[?$]' + VARNAME, Name.Variable),
+ # prefixed names ::
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # function names ::
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # function names ::
(r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
- r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
- r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
- r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
- r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
- r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
- r'count|sum|min|max|avg|sample|group_concat|separator)\b',
- Name.Function),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
+ r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
+ r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
+ r'hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|'
+ r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
+ r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
+ r'count|sum|min|max|avg|sample|group_concat|separator)\b',
+ Name.Function),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
(r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'u' + HEX + '{4}', String.Escape, '#pop'),
- (r'U' + HEX + '{8}', String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
-
-
-class TurtleLexer(RegexLexer):
- """
- Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
-
- .. versionadded:: 2.1
- """
- name = 'Turtle'
- aliases = ['turtle']
- filenames = ['*.ttl']
- mimetypes = ['text/turtle', 'application/x-turtle']
-
- flags = re.IGNORECASE
-
- patterns = {
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'u' + HEX + '{4}', String.Escape, '#pop'),
+ (r'U' + HEX + '{8}', String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
+
+
+class TurtleLexer(RegexLexer):
+ """
+ Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Turtle'
+ aliases = ['turtle']
+ filenames = ['*.ttl']
+ mimetypes = ['text/turtle', 'application/x-turtle']
+
+ flags = re.IGNORECASE
+
+ patterns = {
'PNAME_NS': r'((?:[a-z][\w-]*)?\:)', # Simplified character range
- 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
- }
-
- # PNAME_NS PN_LOCAL (with simplified character range)
- patterns['PrefixedName'] = r'%(PNAME_NS)s([a-z][\w-]*)' % patterns
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
-
- # Base / prefix
- (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation)),
- (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
- Name.Variable, Whitespace, Punctuation)),
-
- # The shorthand predicate 'a'
- (r'(?<=\s)a(?=\s)', Keyword.Type),
-
- # IRIREF
- (r'%(IRIREF)s' % patterns, Name.Variable),
-
- # PrefixedName
- (r'%(PrefixedName)s' % patterns,
- bygroups(Name.Namespace, Name.Tag)),
-
- # Comment
- (r'#[^\n]+', Comment),
-
- (r'\b(true|false)\b', Literal),
- (r'[+\-]?\d*\.\d+', Number.Float),
- (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
- (r'[+\-]?\d+', Number.Integer),
- (r'[\[\](){}.;,:^]', Punctuation),
-
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'.', String, '#pop'),
- ],
- 'end-of-string': [
+ 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
+ }
+
+ # PNAME_NS PN_LOCAL (with simplified character range)
+ patterns['PrefixedName'] = r'%(PNAME_NS)s([a-z][\w-]*)' % patterns
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+
+ # Base / prefix
+ (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+ Punctuation)),
+ (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
+ Name.Variable, Whitespace, Punctuation)),
+
+ # The shorthand predicate 'a'
+ (r'(?<=\s)a(?=\s)', Keyword.Type),
+
+ # IRIREF
+ (r'%(IRIREF)s' % patterns, Name.Variable),
+
+ # PrefixedName
+ (r'%(PrefixedName)s' % patterns,
+ bygroups(Name.Namespace, Name.Tag)),
+
+ # Comment
+ (r'#[^\n]+', Comment),
+
+ (r'\b(true|false)\b', Literal),
+ (r'[+\-]?\d*\.\d+', Number.Float),
+ (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
+ (r'[+\-]?\d+', Number.Integer),
+ (r'[\[\](){}.;,:^]', Punctuation),
+
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'.', String, '#pop'),
+ ],
+ 'end-of-string': [
(r'(@)([a-z]+(:?-[a-z0-9]+)*)',
- bygroups(Operator, Generic.Emph), '#pop:2'),
-
- (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
- (r'(\^\^)%(PrefixedName)s' % patterns,
- bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'),
-
- default('#pop:2'),
-
- ],
- }
+ bygroups(Operator, Generic.Emph), '#pop:2'),
+
+ (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
+ (r'(\^\^)%(PrefixedName)s' % patterns,
+ bygroups(Operator, Generic.Emph, Generic.Emph), '#pop:2'),
+
+ default('#pop:2'),
+
+ ],
+ }
# Turtle and Tera Term macro files share the same file extension
# but each has a recognizable and distinct syntax.
diff --git a/contrib/python/Pygments/py2/pygments/lexers/rebol.py b/contrib/python/Pygments/py2/pygments/lexers/rebol.py
index 1b3d90f574..4796f590b2 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/rebol.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/rebol.py
@@ -1,431 +1,431 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.rebol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the REBOL and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rebol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the REBOL and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Whitespace
-
-__all__ = ['RebolLexer', 'RedLexer']
-
-
-class RebolLexer(RegexLexer):
- """
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- .. versionadded:: 1.1
- """
- name = 'REBOL'
- aliases = ['rebol']
- filenames = ['*.r', '*.r3', '*.reb']
- mimetypes = ['text/x-rebol']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
- r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
- r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
- r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
- r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
- r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
- r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
- r'while|compress|decompress|secure|open|close|read|read-io|'
- r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
- r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
- r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
- r'browse|launch|stats|get-modes|set-modes|to-local-file|'
- r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
- r'hide|draw|show|size-text|textinfo|offset-to-caret|'
- r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
- r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
- r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
- r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
- r'rsa-encrypt)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
- r'minimum|maximum|negate|complement|absolute|random|head|tail|'
- r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
- r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
- r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
- r'copy)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(error|source|input|license|help|install|echo|Usage|with|func|'
- r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
- r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
- r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
- r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
- r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
- r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
- r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
- r'write-user|save-user|set-user-name|protect-system|parse-xml|'
- r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
- r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
- r'request-dir|center-face|do-events|net-error|decode-url|'
- r'parse-header|parse-header-date|parse-email-addrs|import-email|'
- r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
- r'find-key-face|do-face|viewtop|confine|find-window|'
- r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
- r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
- r'read-thru|load-thru|do-thru|launch-thru|load-image|'
- r'request-download|do-face-alt|set-font|set-para|get-style|'
- r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
- r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
- r'resize-face|load-stock|load-stock-block|notify|request|flash|'
- r'request-color|request-pass|request-text|request-list|'
- r'request-date|request-file|dbug|editor|link-relative-path|'
- r'emailer|parse-error)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
- r'return|exit|break)$', word):
- yield match.start(), Name.Exception, word
- elif re.match('REBOL$', word):
- yield match.start(), Generic.Heading, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+ """
+ A `REBOL <http://www.rebol.com/>`_ lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'REBOL'
+ aliases = ['rebol']
+ filenames = ['*.r', '*.r3', '*.reb']
+ mimetypes = ['text/x-rebol']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+ r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+ r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+ r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+ r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+ r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+ r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+ r'while|compress|decompress|secure|open|close|read|read-io|'
+ r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+ r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+ r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+ r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+ r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+ r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+ r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+ r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+ r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+ r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+ r'rsa-encrypt)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+ r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+ r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+ r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+ r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+ r'copy)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(error|source|input|license|help|install|echo|Usage|with|func|'
+ r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+ r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+ r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+ r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+ r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+ r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+ r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+ r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+ r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+ r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+ r'request-dir|center-face|do-events|net-error|decode-url|'
+ r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+ r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+ r'find-key-face|do-face|viewtop|confine|find-window|'
+ r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+ r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+ r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+ r'request-download|do-face-alt|set-font|set-para|get-style|'
+ r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+ r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+ r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+ r'request-color|request-pass|request-text|request-list|'
+ r'request-date|request-file|dbug|editor|link-relative-path|'
+ r'emailer|parse-error)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+ r'return|exit|break)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match('REBOL$', word):
+ yield match.start(), Generic.Heading, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
- word):
- yield match.start(), Operator, word
+ word):
+ yield match.start(), Operator, word
elif re.match(r".*\?$", word):
- yield match.start(), Keyword, word
+ yield match.start(), Keyword, word
elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'REBOL\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
- r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{")\s/[\]]*', Name.Attribute),
- (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
-
- def analyse_text(text):
- """
- Check if code contains REBOL header and so it probably not R code
- """
- if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code starts with REBOL header
- return 1.0
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'REBOL\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{")\s/[\]]*', Name.Attribute),
+ (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check if code contains REBOL header and so it probably not R code
+ """
+ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code starts with REBOL header
+ return 1.0
elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code contains REBOL header but also some text before it
- return 0.5
-
-
-class RedLexer(RegexLexer):
- """
- A `Red-language <http://www.red-lang.org/>`_ lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Red'
- aliases = ['red', 'red/system']
- filenames = ['*.red', '*.reds']
- mimetypes = ['text/x-red', 'text/x-red-system']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
- r'foreach|forall|func|function|does|has|switch|'
- r'case|reduce|compose|get|set|print|prin|equal\?|'
- r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
- r'greater-or-equal\?|same\?|not|type\?|stats|'
- r'bind|union|replace|charset|routine)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
- r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
- r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
- r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
- r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
- r'update|write)$', word):
- yield match.start(), Name.Function, word
- elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
- r'none|crlf|dot|null-byte)$', word):
- yield match.start(), Name.Builtin.Pseudo, word
- elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
- r'#switch|#default|#get-definition)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
- r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
- r'quote|forever)$', word):
- yield match.start(), Name.Exception, word
- elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
- r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
- r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
- r'any-struct\?|none\?|word\?|any-series\?)$', word):
- yield match.start(), Keyword, word
- elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
+ # The code contains REBOL header but also some text before it
+ return 0.5
+
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+ r'update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+ r'none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+ r'#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+ r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+ r'quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+ r'any-struct\?|none\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
r'<<<|>>>|<<|>>|<|>%)$', word):
- yield match.start(), Operator, word
+ yield match.start(), Operator, word
elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- elif re.match(":.*", word):
- yield match.start(), Generic.Subheading, word # get-word
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'Red/System\s+\[', Generic.Strong, 'script'),
- (r'Red\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f\s]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
- bygroups(Number.Hex, Name.Variable, Whitespace)),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{^")\s/[\]]*', Name.Attribute),
- (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f\s]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
+ bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/resource.py b/contrib/python/Pygments/py2/pygments/lexers/resource.py
index ccd4e5f6cc..ed494b9db6 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/resource.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/resource.py
@@ -1,85 +1,85 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.resource
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for resource definition files.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.resource
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for resource definition files.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Comment, String, Number, Operator, Text, \
- Keyword, Name
-
-__all__ = ['ResourceLexer']
-
-
-class ResourceLexer(RegexLexer):
- """Lexer for `ICU Resource bundles
- <http://userguide.icu-project.org/locale/resources>`_.
-
- .. versionadded:: 2.0
- """
- name = 'ResourceBundle'
- aliases = ['resource', 'resourcebundle']
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, String, Number, Operator, Text, \
+ Keyword, Name
+
+__all__ = ['ResourceLexer']
+
+
+class ResourceLexer(RegexLexer):
+ """Lexer for `ICU Resource bundles
+ <http://userguide.icu-project.org/locale/resources>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ResourceBundle'
+ aliases = ['resource', 'resourcebundle']
filenames = []
-
- _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
- ':int', ':alias')
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'//.*?$', Comment),
- (r'"', String, 'string'),
- (r'-?\d+', Number.Integer),
- (r'[,{}]', Operator),
- (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
- bygroups(Name, Text, Keyword)),
- (r'\s+', Text),
- (words(_types), Keyword),
- ],
- 'string': [
- (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
- r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
- (r'\{', String.Escape, 'msgname'),
- (r'"', String, '#pop')
- ],
- 'msgname': [
- (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
- ],
- 'message': [
- (r'\{', String.Escape, 'msgname'),
- (r'\}', String.Escape, '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*\})',
- bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape, Operator.Word, String.Escape, Operator,
- String.Escape, Number.Integer, String.Escape), 'choice'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape), 'choice'),
- (r'\s+', String.Escape)
- ],
- 'choice': [
- (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
- bygroups(Operator, Number.Integer, String.Escape), 'message'),
- (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
- (r'\}', String.Escape, ('#pop', '#pop')),
- (r'\s+', String.Escape)
- ],
- 'str': [
- (r'\}', String.Escape, '#pop'),
- (r'\{', String.Escape, 'msgname'),
- (r'[^{}]+', String)
- ]
- }
-
- def analyse_text(text):
+
+ _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
+ ':int', ':alias')
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment),
+ (r'"', String, 'string'),
+ (r'-?\d+', Number.Integer),
+ (r'[,{}]', Operator),
+ (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
+ bygroups(Name, Text, Keyword)),
+ (r'\s+', Text),
+ (words(_types), Keyword),
+ ],
+ 'string': [
+ (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
+ r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
+ (r'\{', String.Escape, 'msgname'),
+ (r'"', String, '#pop')
+ ],
+ 'msgname': [
+ (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
+ ],
+ 'message': [
+ (r'\{', String.Escape, 'msgname'),
+ (r'\}', String.Escape, '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*\})',
+ bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape, Operator.Word, String.Escape, Operator,
+ String.Escape, Number.Integer, String.Escape), 'choice'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape), 'choice'),
+ (r'\s+', String.Escape)
+ ],
+ 'choice': [
+ (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
+ bygroups(Operator, Number.Integer, String.Escape), 'message'),
+ (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
+ (r'\}', String.Escape, ('#pop', '#pop')),
+ (r'\s+', String.Escape)
+ ],
+ 'str': [
+ (r'\}', String.Escape, '#pop'),
+ (r'\{', String.Escape, 'msgname'),
+ (r'[^{}]+', String)
+ ]
+ }
+
+ def analyse_text(text):
if text.startswith('root:table'):
return 1.0
diff --git a/contrib/python/Pygments/py2/pygments/lexers/roboconf.py b/contrib/python/Pygments/py2/pygments/lexers/roboconf.py
index f820fe1e20..dfade21f84 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/roboconf.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/roboconf.py
@@ -1,82 +1,82 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.roboconf
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Roboconf DSL.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.roboconf
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Roboconf DSL.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, re
-from pygments.token import Text, Operator, Keyword, Name, Comment
-
-__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
-
-
-class RoboconfGraphLexer(RegexLexer):
- """
- Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ graph files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Graph'
- aliases = ['roboconf-graph']
- filenames = ['*.graph']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- # Skip white spaces
- (r'\s+', Text),
-
- # There is one operator
- (r'=', Operator),
-
- # Keywords
- (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words((
- 'installer', 'extends', 'exports', 'imports', 'facets',
- 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
-
-
-class RoboconfInstancesLexer(RegexLexer):
- """
- Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ instances files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Instances'
- aliases = ['roboconf-instances']
- filenames = ['*.instances']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
-
- # Skip white spaces
- (r'\s+', Text),
-
- # Keywords
- (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
- (r'\s*[\w.-]+\s*:', Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, re
+from pygments.token import Text, Operator, Keyword, Name, Comment
+
+__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
+
+
+class RoboconfGraphLexer(RegexLexer):
+ """
+ Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ graph files.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Roboconf Graph'
+ aliases = ['roboconf-graph']
+ filenames = ['*.graph']
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # There is one operator
+ (r'=', Operator),
+
+ # Keywords
+ (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words((
+ 'installer', 'extends', 'exports', 'imports', 'facets',
+ 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class RoboconfInstancesLexer(RegexLexer):
+ """
+ Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ instances files.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Roboconf Instances'
+ aliases = ['roboconf-instances']
+ filenames = ['*.instances']
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # Keywords
+ (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
+ (r'\s*[\w.-]+\s*:', Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/robotframework.py b/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
index 642c90c5c1..0d04b24886 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/robotframework.py
@@ -1,560 +1,560 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.robotframework
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Robot Framework.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.robotframework
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Robot Framework.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Copyright 2012 Nokia Siemens Networks Oyj
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token
-from pygments.util import text_type
-
-__all__ = ['RobotFrameworkLexer']
-
-
-HEADING = Token.Generic.Heading
-SETTING = Token.Keyword.Namespace
-IMPORT = Token.Name.Namespace
-TC_KW_NAME = Token.Generic.Subheading
-KEYWORD = Token.Name.Function
-ARGUMENT = Token.String
-VARIABLE = Token.Name.Variable
-COMMENT = Token.Comment
-SEPARATOR = Token.Punctuation
-SYNTAX = Token.Punctuation
-GHERKIN = Token.Generic.Emph
-ERROR = Token.Error
-
-
-def normalize(string, remove=''):
- string = string.lower()
- for char in remove + ' ':
- if char in string:
- string = string.replace(char, '')
- return string
-
-
-class RobotFrameworkLexer(Lexer):
- """
- For `Robot Framework <http://robotframework.org>`_ test data.
-
- Supports both space and pipe separated plain text formats.
-
- .. versionadded:: 1.6
- """
- name = 'RobotFramework'
- aliases = ['robotframework']
+ :license: BSD, see LICENSE for details.
+"""
+
+# Copyright 2012 Nokia Siemens Networks Oyj
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Token
+from pygments.util import text_type
+
+__all__ = ['RobotFrameworkLexer']
+
+
+HEADING = Token.Generic.Heading
+SETTING = Token.Keyword.Namespace
+IMPORT = Token.Name.Namespace
+TC_KW_NAME = Token.Generic.Subheading
+KEYWORD = Token.Name.Function
+ARGUMENT = Token.String
+VARIABLE = Token.Name.Variable
+COMMENT = Token.Comment
+SEPARATOR = Token.Punctuation
+SYNTAX = Token.Punctuation
+GHERKIN = Token.Generic.Emph
+ERROR = Token.Error
+
+
+def normalize(string, remove=''):
+ string = string.lower()
+ for char in remove + ' ':
+ if char in string:
+ string = string.replace(char, '')
+ return string
+
+
+class RobotFrameworkLexer(Lexer):
+ """
+ For `Robot Framework <http://robotframework.org>`_ test data.
+
+ Supports both space and pipe separated plain text formats.
+
+ .. versionadded:: 1.6
+ """
+ name = 'RobotFramework'
+ aliases = ['robotframework']
filenames = ['*.robot']
- mimetypes = ['text/x-robotframework']
-
- def __init__(self, **options):
- options['tabsize'] = 2
- options['encoding'] = 'UTF-8'
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- row_tokenizer = RowTokenizer()
- var_tokenizer = VariableTokenizer()
- index = 0
- for row in text.splitlines():
- for value, token in row_tokenizer.tokenize(row):
- for value, token in var_tokenizer.tokenize(value, token):
- if value:
- yield index, token, text_type(value)
- index += len(value)
-
-
-class VariableTokenizer(object):
-
- def tokenize(self, string, token):
- var = VariableSplitter(string, identifiers='$@%&')
- if var.start < 0 or token in (COMMENT, ERROR):
- yield string, token
- return
- for value, token in self._tokenize(var, string, token):
- if value:
- yield value, token
-
- def _tokenize(self, var, string, orig_token):
- before = string[:var.start]
- yield before, orig_token
- yield var.identifier + '{', SYNTAX
- for value, token in self.tokenize(var.base, VARIABLE):
- yield value, token
- yield '}', SYNTAX
- if var.index:
- yield '[', SYNTAX
- for value, token in self.tokenize(var.index, VARIABLE):
- yield value, token
- yield ']', SYNTAX
- for value, token in self.tokenize(string[var.end:], orig_token):
- yield value, token
-
-
-class RowTokenizer(object):
-
- def __init__(self):
- self._table = UnknownTable()
- self._splitter = RowSplitter()
- testcases = TestCaseTable()
- settings = SettingTable(testcases.set_default_template)
- variables = VariableTable()
- keywords = KeywordTable()
- self._tables = {'settings': settings, 'setting': settings,
- 'metadata': settings,
- 'variables': variables, 'variable': variables,
- 'testcases': testcases, 'testcase': testcases,
- 'keywords': keywords, 'keyword': keywords,
- 'userkeywords': keywords, 'userkeyword': keywords}
-
- def tokenize(self, row):
- commented = False
- heading = False
- for index, value in enumerate(self._splitter.split(row)):
- # First value, and every second after that, is a separator.
- index, separator = divmod(index-1, 2)
- if value.startswith('#'):
- commented = True
- elif index == 0 and value.startswith('*'):
- self._table = self._start_table(value)
- heading = True
- for value, token in self._tokenize(value, index, commented,
- separator, heading):
- yield value, token
- self._table.end_row()
-
- def _start_table(self, header):
- name = normalize(header, remove='*')
- return self._tables.get(name, UnknownTable())
-
- def _tokenize(self, value, index, commented, separator, heading):
- if commented:
- yield value, COMMENT
- elif separator:
- yield value, SEPARATOR
- elif heading:
- yield value, HEADING
- else:
- for value, token in self._table.tokenize(value, index):
- yield value, token
-
-
-class RowSplitter(object):
- _space_splitter = re.compile('( {2,})')
+ mimetypes = ['text/x-robotframework']
+
+ def __init__(self, **options):
+ options['tabsize'] = 2
+ options['encoding'] = 'UTF-8'
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ row_tokenizer = RowTokenizer()
+ var_tokenizer = VariableTokenizer()
+ index = 0
+ for row in text.splitlines():
+ for value, token in row_tokenizer.tokenize(row):
+ for value, token in var_tokenizer.tokenize(value, token):
+ if value:
+ yield index, token, text_type(value)
+ index += len(value)
+
+
+class VariableTokenizer(object):
+
+ def tokenize(self, string, token):
+ var = VariableSplitter(string, identifiers='$@%&')
+ if var.start < 0 or token in (COMMENT, ERROR):
+ yield string, token
+ return
+ for value, token in self._tokenize(var, string, token):
+ if value:
+ yield value, token
+
+ def _tokenize(self, var, string, orig_token):
+ before = string[:var.start]
+ yield before, orig_token
+ yield var.identifier + '{', SYNTAX
+ for value, token in self.tokenize(var.base, VARIABLE):
+ yield value, token
+ yield '}', SYNTAX
+ if var.index:
+ yield '[', SYNTAX
+ for value, token in self.tokenize(var.index, VARIABLE):
+ yield value, token
+ yield ']', SYNTAX
+ for value, token in self.tokenize(string[var.end:], orig_token):
+ yield value, token
+
+
+class RowTokenizer(object):
+
+ def __init__(self):
+ self._table = UnknownTable()
+ self._splitter = RowSplitter()
+ testcases = TestCaseTable()
+ settings = SettingTable(testcases.set_default_template)
+ variables = VariableTable()
+ keywords = KeywordTable()
+ self._tables = {'settings': settings, 'setting': settings,
+ 'metadata': settings,
+ 'variables': variables, 'variable': variables,
+ 'testcases': testcases, 'testcase': testcases,
+ 'keywords': keywords, 'keyword': keywords,
+ 'userkeywords': keywords, 'userkeyword': keywords}
+
+ def tokenize(self, row):
+ commented = False
+ heading = False
+ for index, value in enumerate(self._splitter.split(row)):
+ # First value, and every second after that, is a separator.
+ index, separator = divmod(index-1, 2)
+ if value.startswith('#'):
+ commented = True
+ elif index == 0 and value.startswith('*'):
+ self._table = self._start_table(value)
+ heading = True
+ for value, token in self._tokenize(value, index, commented,
+ separator, heading):
+ yield value, token
+ self._table.end_row()
+
+ def _start_table(self, header):
+ name = normalize(header, remove='*')
+ return self._tables.get(name, UnknownTable())
+
+ def _tokenize(self, value, index, commented, separator, heading):
+ if commented:
+ yield value, COMMENT
+ elif separator:
+ yield value, SEPARATOR
+ elif heading:
+ yield value, HEADING
+ else:
+ for value, token in self._table.tokenize(value, index):
+ yield value, token
+
+
+class RowSplitter(object):
+ _space_splitter = re.compile('( {2,})')
_pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
-
- def split(self, row):
- splitter = (row.startswith('| ') and self._split_from_pipes
- or self._split_from_spaces)
- for value in splitter(row):
- yield value
- yield '\n'
-
- def _split_from_spaces(self, row):
- yield '' # Start with (pseudo)separator similarly as with pipes
- for value in self._space_splitter.split(row):
- yield value
-
- def _split_from_pipes(self, row):
- _, separator, rest = self._pipe_splitter.split(row, 1)
- yield separator
- while self._pipe_splitter.search(rest):
- cell, separator, rest = self._pipe_splitter.split(rest, 1)
- yield cell
- yield separator
- yield rest
-
-
-class Tokenizer(object):
- _tokens = None
-
- def __init__(self):
- self._index = 0
-
- def tokenize(self, value):
- values_and_tokens = self._tokenize(value, self._index)
- self._index += 1
- if isinstance(values_and_tokens, type(Token)):
- values_and_tokens = [(value, values_and_tokens)]
- return values_and_tokens
-
- def _tokenize(self, value, index):
- index = min(index, len(self._tokens) - 1)
- return self._tokens[index]
-
- def _is_assign(self, value):
- if value.endswith('='):
- value = value[:-1].strip()
- var = VariableSplitter(value, identifiers='$@&')
- return var.start == 0 and var.end == len(value)
-
-
-class Comment(Tokenizer):
- _tokens = (COMMENT,)
-
-
-class Setting(Tokenizer):
- _tokens = (SETTING, ARGUMENT)
- _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
- 'suitepostcondition', 'testsetup', 'testprecondition',
- 'testteardown', 'testpostcondition', 'testtemplate')
- _import_settings = ('library', 'resource', 'variables')
- _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
- 'testtimeout')
- _custom_tokenizer = None
-
- def __init__(self, template_setter=None):
- Tokenizer.__init__(self)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 1 and self._template_setter:
- self._template_setter(value)
- if index == 0:
- normalized = normalize(value)
- if normalized in self._keyword_settings:
- self._custom_tokenizer = KeywordCall(support_assign=False)
- elif normalized in self._import_settings:
- self._custom_tokenizer = ImportSetting()
- elif normalized not in self._other_settings:
- return ERROR
- elif self._custom_tokenizer:
- return self._custom_tokenizer.tokenize(value)
- return Tokenizer._tokenize(self, value, index)
-
-
-class ImportSetting(Tokenizer):
- _tokens = (IMPORT, ARGUMENT)
-
-
-class TestCaseSetting(Setting):
- _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
- 'template')
- _import_settings = ()
- _other_settings = ('documentation', 'tags', 'timeout')
-
- def _tokenize(self, value, index):
- if index == 0:
- type = Setting._tokenize(self, value[1:-1], index)
- return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
- return Setting._tokenize(self, value, index)
-
-
-class KeywordSetting(TestCaseSetting):
- _keyword_settings = ('teardown',)
- _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
-
-
-class Variable(Tokenizer):
- _tokens = (SYNTAX, ARGUMENT)
-
- def _tokenize(self, value, index):
- if index == 0 and not self._is_assign(value):
- return ERROR
- return Tokenizer._tokenize(self, value, index)
-
-
-class KeywordCall(Tokenizer):
- _tokens = (KEYWORD, ARGUMENT)
-
- def __init__(self, support_assign=True):
- Tokenizer.__init__(self)
- self._keyword_found = not support_assign
- self._assigns = 0
-
- def _tokenize(self, value, index):
- if not self._keyword_found and self._is_assign(value):
- self._assigns += 1
- return SYNTAX # VariableTokenizer tokenizes this later.
- if self._keyword_found:
- return Tokenizer._tokenize(self, value, index - self._assigns)
- self._keyword_found = True
- return GherkinTokenizer().tokenize(value, KEYWORD)
-
-
-class GherkinTokenizer(object):
- _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
-
- def tokenize(self, value, token):
- match = self._gherkin_prefix.match(value)
- if not match:
- return [(value, token)]
- end = match.end()
- return [(value[:end], GHERKIN), (value[end:], token)]
-
-
-class TemplatedKeywordCall(Tokenizer):
- _tokens = (ARGUMENT,)
-
-
-class ForLoop(Tokenizer):
-
- def __init__(self):
- Tokenizer.__init__(self)
- self._in_arguments = False
-
- def _tokenize(self, value, index):
- token = self._in_arguments and ARGUMENT or SYNTAX
- if value.upper() in ('IN', 'IN RANGE'):
- self._in_arguments = True
- return token
-
-
-class _Table(object):
- _tokenizer_class = None
-
- def __init__(self, prev_tokenizer=None):
- self._tokenizer = self._tokenizer_class()
- self._prev_tokenizer = prev_tokenizer
- self._prev_values_on_row = []
-
- def tokenize(self, value, index):
- if self._continues(value, index):
- self._tokenizer = self._prev_tokenizer
- yield value, SYNTAX
- else:
- for value_and_token in self._tokenize(value, index):
- yield value_and_token
- self._prev_values_on_row.append(value)
-
- def _continues(self, value, index):
- return value == '...' and all(self._is_empty(t)
- for t in self._prev_values_on_row)
-
- def _is_empty(self, value):
- return value in ('', '\\')
-
- def _tokenize(self, value, index):
- return self._tokenizer.tokenize(value)
-
- def end_row(self):
- self.__init__(prev_tokenizer=self._tokenizer)
-
-
-class UnknownTable(_Table):
- _tokenizer_class = Comment
-
- def _continues(self, value, index):
- return False
-
-
-class VariableTable(_Table):
- _tokenizer_class = Variable
-
-
-class SettingTable(_Table):
- _tokenizer_class = Setting
-
- def __init__(self, template_setter, prev_tokenizer=None):
- _Table.__init__(self, prev_tokenizer)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 0 and normalize(value) == 'testtemplate':
- self._tokenizer = Setting(self._template_setter)
- return _Table._tokenize(self, value, index)
-
- def end_row(self):
- self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
-
-
-class TestCaseTable(_Table):
- _setting_class = TestCaseSetting
- _test_template = None
- _default_template = None
-
- @property
- def _tokenizer_class(self):
- if self._test_template or (self._default_template and
- self._test_template is not False):
- return TemplatedKeywordCall
- return KeywordCall
-
- def _continues(self, value, index):
- return index > 0 and _Table._continues(self, value, index)
-
- def _tokenize(self, value, index):
- if index == 0:
- if value:
- self._test_template = None
- return GherkinTokenizer().tokenize(value, TC_KW_NAME)
- if index == 1 and self._is_setting(value):
- if self._is_template(value):
- self._test_template = False
- self._tokenizer = self._setting_class(self.set_test_template)
- else:
- self._tokenizer = self._setting_class()
- if index == 1 and self._is_for_loop(value):
- self._tokenizer = ForLoop()
- if index == 1 and self._is_empty(value):
- return [(value, SYNTAX)]
- return _Table._tokenize(self, value, index)
-
- def _is_setting(self, value):
- return value.startswith('[') and value.endswith(']')
-
- def _is_template(self, value):
- return normalize(value) == '[template]'
-
- def _is_for_loop(self, value):
- return value.startswith(':') and normalize(value, remove=':') == 'for'
-
- def set_test_template(self, template):
- self._test_template = self._is_template_set(template)
-
- def set_default_template(self, template):
- self._default_template = self._is_template_set(template)
-
- def _is_template_set(self, template):
- return normalize(template) not in ('', '\\', 'none', '${empty}')
-
-
-class KeywordTable(TestCaseTable):
- _tokenizer_class = KeywordCall
- _setting_class = KeywordSetting
-
- def _is_template(self, value):
- return False
-
-
-# Following code copied directly from Robot Framework 2.7.5.
-
-class VariableSplitter:
-
- def __init__(self, string, identifiers):
- self.identifier = None
- self.base = None
- self.index = None
- self.start = -1
- self.end = -1
- self._identifiers = identifiers
- self._may_have_internal_variables = False
- try:
- self._split(string)
- except ValueError:
- pass
- else:
- self._finalize()
-
- def get_replaced_base(self, variables):
- if self._may_have_internal_variables:
- return variables.replace_string(self.base)
- return self.base
-
- def _finalize(self):
- self.identifier = self._variable_chars[0]
- self.base = ''.join(self._variable_chars[2:-1])
- self.end = self.start + len(self._variable_chars)
- if self._has_list_or_dict_variable_index():
- self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
- self.end += len(self._list_and_dict_variable_index_chars)
-
- def _has_list_or_dict_variable_index(self):
- return self._list_and_dict_variable_index_chars\
- and self._list_and_dict_variable_index_chars[-1] == ']'
-
- def _split(self, string):
- start_index, max_index = self._find_variable(string)
- self.start = start_index
- self._open_curly = 1
- self._state = self._variable_state
- self._variable_chars = [string[start_index], '{']
- self._list_and_dict_variable_index_chars = []
- self._string = string
- start_index += 2
- for index, char in enumerate(string[start_index:]):
- index += start_index # Giving start to enumerate only in Py 2.6+
- try:
- self._state(char, index)
- except StopIteration:
- return
- if index == max_index and not self._scanning_list_variable_index():
- return
-
- def _scanning_list_variable_index(self):
- return self._state in [self._waiting_list_variable_index_state,
- self._list_variable_index_state]
-
- def _find_variable(self, string):
- max_end_index = string.rfind('}')
- if max_end_index == -1:
- raise ValueError('No variable end found')
- if self._is_escaped(string, max_end_index):
- return self._find_variable(string[:max_end_index])
- start_index = self._find_start_index(string, 1, max_end_index)
- if start_index == -1:
- raise ValueError('No variable start found')
- return start_index, max_end_index
-
- def _find_start_index(self, string, start, end):
- index = string.find('{', start, end) - 1
- if index < 0:
- return -1
- if self._start_index_is_ok(string, index):
- return index
- return self._find_start_index(string, index+2, end)
-
- def _start_index_is_ok(self, string, index):
- return string[index] in self._identifiers\
- and not self._is_escaped(string, index)
-
- def _is_escaped(self, string, index):
- escaped = False
- while index > 0 and string[index-1] == '\\':
- index -= 1
- escaped = not escaped
- return escaped
-
- def _variable_state(self, char, index):
- self._variable_chars.append(char)
- if char == '}' and not self._is_escaped(self._string, index):
- self._open_curly -= 1
- if self._open_curly == 0:
- if not self._is_list_or_dict_variable():
- raise StopIteration
- self._state = self._waiting_list_variable_index_state
- elif char in self._identifiers:
- self._state = self._internal_variable_start_state
-
- def _is_list_or_dict_variable(self):
- return self._variable_chars[0] in ('@','&')
-
- def _internal_variable_start_state(self, char, index):
- self._state = self._variable_state
- if char == '{':
- self._variable_chars.append(char)
- self._open_curly += 1
- self._may_have_internal_variables = True
- else:
- self._variable_state(char, index)
-
- def _waiting_list_variable_index_state(self, char, index):
- if char != '[':
- raise StopIteration
- self._list_and_dict_variable_index_chars.append(char)
- self._state = self._list_variable_index_state
-
- def _list_variable_index_state(self, char, index):
- self._list_and_dict_variable_index_chars.append(char)
- if char == ']':
- raise StopIteration
+
+ def split(self, row):
+ splitter = (row.startswith('| ') and self._split_from_pipes
+ or self._split_from_spaces)
+ for value in splitter(row):
+ yield value
+ yield '\n'
+
+ def _split_from_spaces(self, row):
+ yield '' # Start with (pseudo)separator similarly as with pipes
+ for value in self._space_splitter.split(row):
+ yield value
+
+ def _split_from_pipes(self, row):
+ _, separator, rest = self._pipe_splitter.split(row, 1)
+ yield separator
+ while self._pipe_splitter.search(rest):
+ cell, separator, rest = self._pipe_splitter.split(rest, 1)
+ yield cell
+ yield separator
+ yield rest
+
+
+class Tokenizer(object):
+ _tokens = None
+
+ def __init__(self):
+ self._index = 0
+
+ def tokenize(self, value):
+ values_and_tokens = self._tokenize(value, self._index)
+ self._index += 1
+ if isinstance(values_and_tokens, type(Token)):
+ values_and_tokens = [(value, values_and_tokens)]
+ return values_and_tokens
+
+ def _tokenize(self, value, index):
+ index = min(index, len(self._tokens) - 1)
+ return self._tokens[index]
+
+ def _is_assign(self, value):
+ if value.endswith('='):
+ value = value[:-1].strip()
+ var = VariableSplitter(value, identifiers='$@&')
+ return var.start == 0 and var.end == len(value)
+
+
+class Comment(Tokenizer):
+ _tokens = (COMMENT,)
+
+
+class Setting(Tokenizer):
+ _tokens = (SETTING, ARGUMENT)
+ _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
+ 'suitepostcondition', 'testsetup', 'testprecondition',
+ 'testteardown', 'testpostcondition', 'testtemplate')
+ _import_settings = ('library', 'resource', 'variables')
+ _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
+ 'testtimeout')
+ _custom_tokenizer = None
+
+ def __init__(self, template_setter=None):
+ Tokenizer.__init__(self)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 1 and self._template_setter:
+ self._template_setter(value)
+ if index == 0:
+ normalized = normalize(value)
+ if normalized in self._keyword_settings:
+ self._custom_tokenizer = KeywordCall(support_assign=False)
+ elif normalized in self._import_settings:
+ self._custom_tokenizer = ImportSetting()
+ elif normalized not in self._other_settings:
+ return ERROR
+ elif self._custom_tokenizer:
+ return self._custom_tokenizer.tokenize(value)
+ return Tokenizer._tokenize(self, value, index)
+
+
+class ImportSetting(Tokenizer):
+ _tokens = (IMPORT, ARGUMENT)
+
+
+class TestCaseSetting(Setting):
+ _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
+ 'template')
+ _import_settings = ()
+ _other_settings = ('documentation', 'tags', 'timeout')
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ type = Setting._tokenize(self, value[1:-1], index)
+ return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
+ return Setting._tokenize(self, value, index)
+
+
+class KeywordSetting(TestCaseSetting):
+ _keyword_settings = ('teardown',)
+ _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
+
+
+class Variable(Tokenizer):
+ _tokens = (SYNTAX, ARGUMENT)
+
+ def _tokenize(self, value, index):
+ if index == 0 and not self._is_assign(value):
+ return ERROR
+ return Tokenizer._tokenize(self, value, index)
+
+
+class KeywordCall(Tokenizer):
+ _tokens = (KEYWORD, ARGUMENT)
+
+ def __init__(self, support_assign=True):
+ Tokenizer.__init__(self)
+ self._keyword_found = not support_assign
+ self._assigns = 0
+
+ def _tokenize(self, value, index):
+ if not self._keyword_found and self._is_assign(value):
+ self._assigns += 1
+ return SYNTAX # VariableTokenizer tokenizes this later.
+ if self._keyword_found:
+ return Tokenizer._tokenize(self, value, index - self._assigns)
+ self._keyword_found = True
+ return GherkinTokenizer().tokenize(value, KEYWORD)
+
+
+class GherkinTokenizer(object):
+ _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
+
+ def tokenize(self, value, token):
+ match = self._gherkin_prefix.match(value)
+ if not match:
+ return [(value, token)]
+ end = match.end()
+ return [(value[:end], GHERKIN), (value[end:], token)]
+
+
+class TemplatedKeywordCall(Tokenizer):
+ _tokens = (ARGUMENT,)
+
+
+class ForLoop(Tokenizer):
+
+ def __init__(self):
+ Tokenizer.__init__(self)
+ self._in_arguments = False
+
+ def _tokenize(self, value, index):
+ token = self._in_arguments and ARGUMENT or SYNTAX
+ if value.upper() in ('IN', 'IN RANGE'):
+ self._in_arguments = True
+ return token
+
+
+class _Table(object):
+ _tokenizer_class = None
+
+ def __init__(self, prev_tokenizer=None):
+ self._tokenizer = self._tokenizer_class()
+ self._prev_tokenizer = prev_tokenizer
+ self._prev_values_on_row = []
+
+ def tokenize(self, value, index):
+ if self._continues(value, index):
+ self._tokenizer = self._prev_tokenizer
+ yield value, SYNTAX
+ else:
+ for value_and_token in self._tokenize(value, index):
+ yield value_and_token
+ self._prev_values_on_row.append(value)
+
+ def _continues(self, value, index):
+ return value == '...' and all(self._is_empty(t)
+ for t in self._prev_values_on_row)
+
+ def _is_empty(self, value):
+ return value in ('', '\\')
+
+ def _tokenize(self, value, index):
+ return self._tokenizer.tokenize(value)
+
+ def end_row(self):
+ self.__init__(prev_tokenizer=self._tokenizer)
+
+
+class UnknownTable(_Table):
+ _tokenizer_class = Comment
+
+ def _continues(self, value, index):
+ return False
+
+
+class VariableTable(_Table):
+ _tokenizer_class = Variable
+
+
+class SettingTable(_Table):
+ _tokenizer_class = Setting
+
+ def __init__(self, template_setter, prev_tokenizer=None):
+ _Table.__init__(self, prev_tokenizer)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 0 and normalize(value) == 'testtemplate':
+ self._tokenizer = Setting(self._template_setter)
+ return _Table._tokenize(self, value, index)
+
+ def end_row(self):
+ self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
+
+
+class TestCaseTable(_Table):
+ _setting_class = TestCaseSetting
+ _test_template = None
+ _default_template = None
+
+ @property
+ def _tokenizer_class(self):
+ if self._test_template or (self._default_template and
+ self._test_template is not False):
+ return TemplatedKeywordCall
+ return KeywordCall
+
+ def _continues(self, value, index):
+ return index > 0 and _Table._continues(self, value, index)
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ if value:
+ self._test_template = None
+ return GherkinTokenizer().tokenize(value, TC_KW_NAME)
+ if index == 1 and self._is_setting(value):
+ if self._is_template(value):
+ self._test_template = False
+ self._tokenizer = self._setting_class(self.set_test_template)
+ else:
+ self._tokenizer = self._setting_class()
+ if index == 1 and self._is_for_loop(value):
+ self._tokenizer = ForLoop()
+ if index == 1 and self._is_empty(value):
+ return [(value, SYNTAX)]
+ return _Table._tokenize(self, value, index)
+
+ def _is_setting(self, value):
+ return value.startswith('[') and value.endswith(']')
+
+ def _is_template(self, value):
+ return normalize(value) == '[template]'
+
+ def _is_for_loop(self, value):
+ return value.startswith(':') and normalize(value, remove=':') == 'for'
+
+ def set_test_template(self, template):
+ self._test_template = self._is_template_set(template)
+
+ def set_default_template(self, template):
+ self._default_template = self._is_template_set(template)
+
+ def _is_template_set(self, template):
+ return normalize(template) not in ('', '\\', 'none', '${empty}')
+
+
+class KeywordTable(TestCaseTable):
+ _tokenizer_class = KeywordCall
+ _setting_class = KeywordSetting
+
+ def _is_template(self, value):
+ return False
+
+
+# Following code copied directly from Robot Framework 2.7.5.
+
+class VariableSplitter:
+
+ def __init__(self, string, identifiers):
+ self.identifier = None
+ self.base = None
+ self.index = None
+ self.start = -1
+ self.end = -1
+ self._identifiers = identifiers
+ self._may_have_internal_variables = False
+ try:
+ self._split(string)
+ except ValueError:
+ pass
+ else:
+ self._finalize()
+
+ def get_replaced_base(self, variables):
+ if self._may_have_internal_variables:
+ return variables.replace_string(self.base)
+ return self.base
+
+ def _finalize(self):
+ self.identifier = self._variable_chars[0]
+ self.base = ''.join(self._variable_chars[2:-1])
+ self.end = self.start + len(self._variable_chars)
+ if self._has_list_or_dict_variable_index():
+ self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+ self.end += len(self._list_and_dict_variable_index_chars)
+
+ def _has_list_or_dict_variable_index(self):
+ return self._list_and_dict_variable_index_chars\
+ and self._list_and_dict_variable_index_chars[-1] == ']'
+
+ def _split(self, string):
+ start_index, max_index = self._find_variable(string)
+ self.start = start_index
+ self._open_curly = 1
+ self._state = self._variable_state
+ self._variable_chars = [string[start_index], '{']
+ self._list_and_dict_variable_index_chars = []
+ self._string = string
+ start_index += 2
+ for index, char in enumerate(string[start_index:]):
+ index += start_index # Giving start to enumerate only in Py 2.6+
+ try:
+ self._state(char, index)
+ except StopIteration:
+ return
+ if index == max_index and not self._scanning_list_variable_index():
+ return
+
+ def _scanning_list_variable_index(self):
+ return self._state in [self._waiting_list_variable_index_state,
+ self._list_variable_index_state]
+
+ def _find_variable(self, string):
+ max_end_index = string.rfind('}')
+ if max_end_index == -1:
+ raise ValueError('No variable end found')
+ if self._is_escaped(string, max_end_index):
+ return self._find_variable(string[:max_end_index])
+ start_index = self._find_start_index(string, 1, max_end_index)
+ if start_index == -1:
+ raise ValueError('No variable start found')
+ return start_index, max_end_index
+
+ def _find_start_index(self, string, start, end):
+ index = string.find('{', start, end) - 1
+ if index < 0:
+ return -1
+ if self._start_index_is_ok(string, index):
+ return index
+ return self._find_start_index(string, index+2, end)
+
+ def _start_index_is_ok(self, string, index):
+ return string[index] in self._identifiers\
+ and not self._is_escaped(string, index)
+
+ def _is_escaped(self, string, index):
+ escaped = False
+ while index > 0 and string[index-1] == '\\':
+ index -= 1
+ escaped = not escaped
+ return escaped
+
+ def _variable_state(self, char, index):
+ self._variable_chars.append(char)
+ if char == '}' and not self._is_escaped(self._string, index):
+ self._open_curly -= 1
+ if self._open_curly == 0:
+ if not self._is_list_or_dict_variable():
+ raise StopIteration
+ self._state = self._waiting_list_variable_index_state
+ elif char in self._identifiers:
+ self._state = self._internal_variable_start_state
+
+ def _is_list_or_dict_variable(self):
+ return self._variable_chars[0] in ('@','&')
+
+ def _internal_variable_start_state(self, char, index):
+ self._state = self._variable_state
+ if char == '{':
+ self._variable_chars.append(char)
+ self._open_curly += 1
+ self._may_have_internal_variables = True
+ else:
+ self._variable_state(char, index)
+
+ def _waiting_list_variable_index_state(self, char, index):
+ if char != '[':
+ raise StopIteration
+ self._list_and_dict_variable_index_chars.append(char)
+ self._state = self._list_variable_index_state
+
+ def _list_variable_index_state(self, char, index):
+ self._list_and_dict_variable_index_chars.append(char)
+ if char == ']':
+ raise StopIteration
diff --git a/contrib/python/Pygments/py2/pygments/lexers/ruby.py b/contrib/python/Pygments/py2/pygments/lexers/ruby.py
index 8bcbde6714..da07bf93b0 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/ruby.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/ruby.py
@@ -1,519 +1,519 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.ruby
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Ruby and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ruby
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Ruby and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
- bygroups, default, LexerContext, do_insertions, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-from pygments.util import shebang_matches
-
-__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
-
-line_re = re.compile('.*?\n')
-
-
-RUBY_OPERATORS = (
- '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
- '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
-)
-
-
-class RubyLexer(ExtendedRegexLexer):
- """
- For `Ruby <http://www.ruby-lang.org>`_ source code.
- """
-
- name = 'Ruby'
- aliases = ['rb', 'ruby', 'duby']
- filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
- '*.rbx', '*.duby', 'Gemfile']
- mimetypes = ['text/x-ruby', 'application/x-ruby']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Ruby...
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, do_insertions, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+RUBY_OPERATORS = (
+ '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+ '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+
+class RubyLexer(ExtendedRegexLexer):
+ """
+ For `Ruby <http://www.ruby-lang.org>`_ source code.
+ """
+
+ name = 'Ruby'
+ aliases = ['rb', 'ruby', 'duby']
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+ '*.rbx', '*.duby', 'Gemfile']
+ mimetypes = ['text/x-ruby', 'application/x-ruby']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
# match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
+
+ start = match.start(1)
yield start, Operator, match.group(1) # <<[-~]?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
- # this may find other heredocs
- for i, t, v in self.get_tokens_unprocessed(context=ctx):
- yield i, t, v
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs
+ for i, t, v in self.get_tokens_unprocessed(context=ctx):
+ yield i, t, v
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), String.Delimiter, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
- def gen_rubystrings_rules():
- def intp_regex_callback(self, match, ctx):
- yield match.start(1), String.Regex, match.group(1) # begin
- nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
- ctx.pos = match.end()
-
- def intp_string_callback(self, match, ctx):
- yield match.start(1), String.Other, match.group(1)
- nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Other, match.group(4) # end
- ctx.pos = match.end()
-
- states = {}
- states['strings'] = [
- # easy ones
- (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
- (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
- (r":'(\\\\|\\'|[^'])*'", String.Symbol),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r':"', String.Symbol, 'simple-sym'),
- (r'([a-zA-Z_]\w*)(:)(?!:)',
- bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
- # double-quoted string and symbol
- for name, ttype, end in ('string', String.Double, '"'), \
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # braced quoted strings
- for lbrace, rbrace, bracecc, name in \
- ('\\{', '\\}', '{}', 'cb'), \
- ('\\[', '\\]', '\\[\\]', 'sb'), \
- ('\\(', '\\)', '()', 'pa'), \
- ('<', '>', '<>', 'ab'):
- states[name+'-intp-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[qsw]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + bracecc + ']', String.Regex),
- (lbrace, String.Regex, '#push'),
- (rbrace + '[mixounse]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + bracecc + ']', String.Regex),
- (r'[^\\#' + bracecc + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- # these must come after %<brace>!
- states['strings'] += [
- # %r regex
- (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
- intp_regex_callback),
- # regular fancy strings with qsw
- (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
- (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- # special forms of fancy strings after operators or
- # in method calls with braces
- (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # and because of fixed width lookbehinds the whole thing a
- # second time for line startings...
- (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # all regular fancy strings without qsw
- (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'\A#!.+?$', Comment.Hashbang),
- (r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
- # keywords
- (words((
- 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
- 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
- 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
- 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- # start of function, class and module names
- (r'(module)(\s+)([a-zA-Z_]\w*'
- r'(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- # special methods
- (words((
- 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
- 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
- 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
- suffix=r'\b'),
- Keyword.Pseudo),
- (r'(not|and|or)\b', Operator.Word),
- (words((
- 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
- 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
- 'private_method_defined', 'protected_method_defined',
- 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
- Name.Builtin),
- (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (words((
- 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
- 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
- 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
- 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
- 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
- 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
- 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
- 'instance_method', 'instance_methods',
- 'instance_variable_get', 'instance_variable_set', 'instance_variables',
- 'lambda', 'load', 'local_variables', 'loop',
- 'method', 'method_missing', 'methods', 'module_eval', 'name',
- 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
- 'private_instance_methods',
- 'private_methods', 'proc', 'protected_instance_methods',
- 'protected_methods', 'public_class_method',
- 'public_instance_methods', 'public_methods',
- 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
- 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
- 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
- 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
- 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
- # normal heredocs
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+ (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ (r":'(\\\\|\\'|[^'])*'", String.Symbol),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'([a-zA-Z_]\w*)(:)(?!:)',
+ bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
+ (r'"', String.Double, 'simple-string'),
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+
+ # double-quoted string and symbol
+ for name, ttype, end in ('string', String.Double, '"'), \
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+ for lbrace, rbrace, bracecc, name in \
+ ('\\{', '\\}', '{}', 'cb'), \
+ ('\\[', '\\]', '\\[\\]', 'sb'), \
+ ('\\(', '\\)', '()', 'pa'), \
+ ('<', '>', '<>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + bracecc + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + bracecc + ']', String.Regex),
+ (r'[^\\#' + bracecc + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
+ (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'\A#!.+?$', Comment.Hashbang),
+ (r'#.*?$', Comment.Single),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+ # keywords
+ (words((
+ 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+ 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+ 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+ 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ # start of function, class and module names
+ (r'(module)(\s+)([a-zA-Z_]\w*'
+ r'(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ # special methods
+ (words((
+ 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+ 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+ 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+ suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (words((
+ 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+ 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+ 'private_method_defined', 'protected_method_defined',
+ 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+ Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (words((
+ 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+ 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+ 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+ 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+ 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+ 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+ 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+ 'instance_method', 'instance_methods',
+ 'instance_variable_get', 'instance_variable_set', 'instance_variables',
+ 'lambda', 'load', 'local_variables', 'loop',
+ 'method', 'method_missing', 'methods', 'module_eval', 'name',
+ 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+ 'private_instance_methods',
+ 'private_methods', 'proc', 'protected_instance_methods',
+ 'protected_methods', 'public_class_method',
+ 'public_instance_methods', 'public_methods',
+ 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+ 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+ 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+ 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+ 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # normal heredocs
(r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
+ heredoc_callback),
+ # empty string heredocs
(r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=\.index\s)|'
- r'(?<=\.scan\s)|'
- r'(?<=\.sub\s)|'
- r'(?<=\.sub!\s)|'
- r'(?<=\.gsub\s)|'
- r'(?<=\.gsub!\s)|'
- r'(?<=\.match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- # Names
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'\$\w+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # chars
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z]\w+', Name.Constant),
- # this is needed because ruby attributes can look
- # like keywords (class) or like this: ` ?!?
- (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
- bygroups(Operator, Name.Operator)),
- (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
- bygroups(Operator, Name)),
- (r'[a-zA-Z_]\w*[!?]?', Name),
- (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Text)
- ],
- 'funcname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'(?:([a-zA-Z_]\w*)(\.))?'
- r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- default('#pop')
- ],
- 'classname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'<<', Operator, '#pop'),
- (r'[A-Z_]\w*', Name.Class, '#pop'),
- default('#pop')
- ],
- 'defexpr': [
- (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
- (r'\(', Operator, '#push'),
- include('root')
- ],
- 'in-intp': [
- (r'\{', String.Interpol, '#push'),
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#\{', String.Interpol, 'in-intp'),
- (r'#@@?[a-zA-Z_]\w*', String.Interpol),
- (r'#\$[a-zA-Z_]\w*', String.Interpol)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
- String.Escape)
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[mixounse]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
- tokens.update(gen_rubystrings_rules())
-
- def analyse_text(text):
- return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
- """
- For Ruby interactive console (**irb**) output like:
-
- .. sourcecode:: rbcon
-
- irb(main):001:0> a = 1
- => 1
- irb(main):002:0> puts a
- 1
- => nil
- """
- name = 'Ruby irb session'
- aliases = ['rbcon', 'irb']
- mimetypes = ['text/x-ruby-shellsession']
-
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignments)
+ (r'(?:^|(?<=[=<>~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=\.index\s)|'
+ r'(?<=\.scan\s)|'
+ r'(?<=\.sub\s)|'
+ r'(?<=\.sub!\s)|'
+ r'(?<=\.gsub\s)|'
+ r'(?<=\.gsub!\s)|'
+ r'(?<=\.match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z]\w+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ (r'[a-zA-Z_]\w*[!?]?', Name),
+ (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'(?:([a-zA-Z_]\w*)(\.))?'
+ r'([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'<<', Operator, '#pop'),
+ (r'[A-Z_]\w*', Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'defexpr': [
+ (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+ (r'\(', Operator, '#push'),
+ include('root')
+ ],
+ 'in-intp': [
+ (r'\{', String.Interpol, '#push'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#\{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+ (r'#\$[a-zA-Z_]\w*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+ def analyse_text(text):
+ return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ For Ruby interactive console (**irb**) output like:
+
+ .. sourcecode:: rbcon
+
+ irb(main):001:0> a = 1
+ => 1
+ irb(main):002:0> puts a
+ 1
+ => nil
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+ mimetypes = ['text/x-ruby-shellsession']
+
_prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
r'|>> |\?> ')
-
- def get_tokens_unprocessed(self, text):
- rblexer = RubyLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(
- insertions, rblexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class FancyLexer(RegexLexer):
- """
- Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
-
- Fancy is a self-hosted, pure object-oriented, dynamic,
- class-based, concurrent general-purpose programming language
- running on Rubinius, the Ruby VM.
-
- .. versionadded:: 1.5
- """
- name = 'Fancy'
- filenames = ['*.fy', '*.fancypack']
- aliases = ['fancy', 'fy']
- mimetypes = ['text/x-fancysrc']
-
- tokens = {
- # copied from PerlLexer:
- 'balanced-regex': [
- (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\\}|[^}])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\\)|[^)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\\$|[^$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\s+', Text),
-
- # balanced delimiters (copied from PerlLexer):
- (r's\{(\\\\|\\\}|[^}])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\\)|[^)])*\)\s*', String.Regex, 'balanced-regex'),
- (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
-
- # Comments
- (r'#(.*?)\n', Comment.Single),
- # Symbols
- (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
- # Multi-line DoubleQuotedString
- (r'"""(\\\\|\\"|[^"])*"""', String),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # keywords
- (r'(def|class|try|catch|finally|retry|return|return_local|match|'
- r'case|->|=>)\b', Keyword),
- # constants
- (r'(self|super|nil|false|true)\b', Name.Constant),
- (r'[(){};,/?|:\\]', Punctuation),
- # names
- (words((
- 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
- 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
- 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
- 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
- Name.Builtin),
- # functions
- (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
- # operators, must be below functions
- (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class FancyLexer(RegexLexer):
+ """
+ Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
+
+ Fancy is a self-hosted, pure object-oriented, dynamic,
+ class-based, concurrent general-purpose programming language
+ running on Rubinius, the Ruby VM.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fancy'
+ filenames = ['*.fy', '*.fancypack']
+ aliases = ['fancy', 'fy']
+ mimetypes = ['text/x-fancysrc']
+
+ tokens = {
+ # copied from PerlLexer:
+ 'balanced-regex': [
+ (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\\}|[^}])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\\)|[^)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\@|[^@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\%|[^%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\\$|[^$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\s+', Text),
+
+ # balanced delimiters (copied from PerlLexer):
+ (r's\{(\\\\|\\\}|[^}])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\\)|[^)])*\)\s*', String.Regex, 'balanced-regex'),
+ (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+
+ # Comments
+ (r'#(.*?)\n', Comment.Single),
+ # Symbols
+ (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
+ # Multi-line DoubleQuotedString
+ (r'"""(\\\\|\\"|[^"])*"""', String),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # keywords
+ (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+ r'case|->|=>)\b', Keyword),
+ # constants
+ (r'(self|super|nil|false|true)\b', Name.Constant),
+ (r'[(){};,/?|:\\]', Punctuation),
+ # names
+ (words((
+ 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+ 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+ 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+ 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+ Name.Builtin),
+ # functions
+ (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+ # operators, must be below functions
+ (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
(r'[A-Z]\w*', Name.Constant),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- ('@@?', Operator),
+ ('@@?', Operator),
(r'[a-zA-Z_]\w*', Name),
- # numbers - / checks are necessary to avoid mismarking regexes,
- # see comment in RubyLexer
- (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ]
- }
+ # numbers - / checks are necessary to avoid mismarking regexes,
+ # see comment in RubyLexer
+ (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/rust.py b/contrib/python/Pygments/py2/pygments/lexers/rust.py
index f731785fe6..726d433738 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/rust.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/rust.py
@@ -1,32 +1,32 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.rust
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Rust language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rust
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rust language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['RustLexer']
-
-
-class RustLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+ """
Lexer for the Rust programming language (version 1.10).
-
- .. versionadded:: 1.6
- """
- name = 'Rust'
- filenames = ['*.rs', '*.rs.in']
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rust'
+ filenames = ['*.rs', '*.rs.in']
aliases = ['rust', 'rs']
- mimetypes = ['text/rust']
-
+ mimetypes = ['text/rust']
+
keyword_types = (
words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64',
'i128', 'u128', 'usize', 'isize', 'f32', 'f64', 'str', 'bool'),
@@ -56,110 +56,110 @@ class RustLexer(RegexLexer):
'Vec'), suffix=r'\b'),
Name.Builtin)
- tokens = {
- 'root': [
- # rust allows a file to start with a shebang, but if the first line
+ tokens = {
+ 'root': [
+ # rust allows a file to start with a shebang, but if the first line
# starts with #![ then it's not a shebang but a crate attribute.
- (r'#![^[\r\n].*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- # Whitespace and Comments
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//!.*?\n', String.Doc),
- (r'///(\n|[^/].*?\n)', String.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
- (r'/\*!', String.Doc, 'doccomment'),
- (r'/\*', Comment.Multiline, 'comment'),
-
- # Macro parameters
- (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
- # Keywords
- (words((
+ (r'#![^[\r\n].*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ # Whitespace and Comments
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//!.*?\n', String.Doc),
+ (r'///(\n|[^/].*?\n)', String.Doc),
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+ (r'/\*!', String.Doc, 'doccomment'),
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # Macro parameters
+ (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
+ # Keywords
+ (words((
'as', 'async', 'await', 'box', 'const', 'crate', 'else',
'extern', 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
'mut', 'pub', 'ref', 'return', 'static', 'super', 'trait',
'try', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
- Keyword),
+ Keyword),
(words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
'typeof', 'unsized', 'virtual', 'yield'), suffix=r'\b'),
- Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
+ Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
(r'mod\b', Keyword, 'modname'),
- (r'let\b', Keyword.Declaration),
+ (r'let\b', Keyword.Declaration),
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
keyword_types,
- (r'self\b', Name.Builtin.Pseudo),
+ (r'self\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust's src/libstd/prelude.rs)
builtin_types,
# Path seperators, so types don't catch them.
(r'::\b', Text),
# Types in positions.
(r'(?::|->)', Text, 'typename'),
- # Labels
+ # Labels
(r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
- # Character Literal
- (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
- (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
- # Binary Literal
- (r'0b[01_]+', Number.Bin, 'number_lit'),
- # Octal Literal
- (r'0o[0-7_]+', Number.Oct, 'number_lit'),
- # Hexadecimal Literal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ # Character Literal
+ (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin, 'number_lit'),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+ # Hexadecimal Literal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
'number_lit'),
- (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
- # String Literal
- (r'b"', String, 'bytestring'),
- (r'"', String, 'string'),
- (r'b?r(#*)".*?"\1', String),
-
- # Lifetime
- (r"""'static""", Name.Builtin),
- (r"""'[a-zA-Z_]\w*""", Name.Attribute),
-
- # Operators and Punctuation
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
-
- # Attributes
- (r'#!?\[', Comment.Preproc, 'attribute['),
- # Macros
- (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
- bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
- Whitespace, Punctuation), 'macro{'),
- (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
- bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
- Punctuation), 'macro('),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'doccomment': [
- (r'[^*/]+', String.Doc),
- (r'/\*', String.Doc, '#push'),
- (r'\*/', String.Doc, '#pop'),
- (r'[*/]', String.Doc),
- ],
+ (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+ # String Literal
+ (r'b"', String, 'bytestring'),
+ (r'"', String, 'string'),
+ (r'b?r(#*)".*?"\1', String),
+
+ # Lifetime
+ (r"""'static""", Name.Builtin),
+ (r"""'[a-zA-Z_]\w*""", Name.Attribute),
+
+ # Operators and Punctuation
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+
+ # Attributes
+ (r'#!?\[', Comment.Preproc, 'attribute['),
+ # Macros
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Whitespace, Punctuation), 'macro{'),
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Punctuation), 'macro('),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'doccomment': [
+ (r'[^*/]+', String.Doc),
+ (r'/\*', String.Doc, '#push'),
+ (r'\*/', String.Doc, '#pop'),
+ (r'[*/]', String.Doc),
+ ],
'modname': [
(r'\s+', Text),
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
@@ -178,43 +178,43 @@ class RustLexer(RegexLexer):
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
- 'number_lit': [
- (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
- (r'f(32|64)', Keyword, '#pop'),
- default('#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String),
- ],
- 'bytestring': [
- (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
- include('string'),
- ],
- 'macro{': [
- (r'\{', Operator, '#push'),
- (r'\}', Operator, '#pop'),
- ],
- 'macro(': [
- (r'\(', Operator, '#push'),
- (r'\)', Operator, '#pop'),
- ],
- 'attribute_common': [
- (r'"', String, 'string'),
- (r'\[', Comment.Preproc, 'attribute['),
- (r'\(', Comment.Preproc, 'attribute('),
- ],
- 'attribute[': [
- include('attribute_common'),
- (r'\];?', Comment.Preproc, '#pop'),
- (r'[^"\]]+', Comment.Preproc),
- ],
- 'attribute(': [
- include('attribute_common'),
- (r'\);?', Comment.Preproc, '#pop'),
- (r'[^")]+', Comment.Preproc),
- ],
- }
+ 'number_lit': [
+ (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
+ (r'f(32|64)', Keyword, '#pop'),
+ default('#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String),
+ ],
+ 'bytestring': [
+ (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+ include('string'),
+ ],
+ 'macro{': [
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+ ],
+ 'macro(': [
+ (r'\(', Operator, '#push'),
+ (r'\)', Operator, '#pop'),
+ ],
+ 'attribute_common': [
+ (r'"', String, 'string'),
+ (r'\[', Comment.Preproc, 'attribute['),
+ (r'\(', Comment.Preproc, 'attribute('),
+ ],
+ 'attribute[': [
+ include('attribute_common'),
+ (r'\];?', Comment.Preproc, '#pop'),
+ (r'[^"\]]+', Comment.Preproc),
+ ],
+ 'attribute(': [
+ include('attribute_common'),
+ (r'\);?', Comment.Preproc, '#pop'),
+ (r'[^")]+', Comment.Preproc),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/scripting.py b/contrib/python/Pygments/py2/pygments/lexers/scripting.py
index a340f8e0d4..3225877731 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/scripting.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/scripting.py
@@ -1,1222 +1,1222 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.scripting
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scripting and embedded languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.scripting
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scripting and embedded languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
- words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Whitespace, Other
-from pygments.util import get_bool_opt, get_list_opt, iteritems
-
-__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
- 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
- 'EasytrieveLexer', 'JclLexer']
-
-
-class LuaLexer(RegexLexer):
- """
- For `Lua <http://www.lua.org>`_ source code.
-
- Additional options accepted:
-
- `func_name_highlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabled_modules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted.
-
- To get a list of allowed modules have a look into the
- `_lua_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._lua_builtins import MODULES
- >>> MODULES.keys()
- ['string', 'coroutine', 'modules', 'io', 'basic', ...]
- """
-
- name = 'Lua'
- aliases = ['lua']
- filenames = ['*.lua', '*.wlua']
- mimetypes = ['text/x-lua', 'application/x-lua']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Whitespace, Other
+from pygments.util import get_bool_opt, get_list_opt, iteritems
+
+__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+ 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
+ 'EasytrieveLexer', 'JclLexer']
+
+
+class LuaLexer(RegexLexer):
+ """
+ For `Lua <http://www.lua.org>`_ source code.
+
+ Additional options accepted:
+
+ `func_name_highlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabled_modules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted.
+
+ To get a list of allowed modules have a look into the
+ `_lua_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._lua_builtins import MODULES
+ >>> MODULES.keys()
+ ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+ """
+
+ name = 'Lua'
+ aliases = ['lua']
+ filenames = ['*.lua', '*.wlua']
+ mimetypes = ['text/x-lua', 'application/x-lua']
+
_comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
_comment_single = r'(?:--.*$)'
_space = r'(?:\s+)'
_s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
_name = r'(?:[^\W\d]\w*)'
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
# Lua allows a file to start with a shebang.
(r'#!.*', Comment.Preproc),
- default('base'),
- ],
+ default('base'),
+ ],
'ws': [
(_comment_multiline, Comment.Multiline),
(_comment_single, Comment.Single),
(_space, Text),
],
- 'base': [
+ 'base': [
include('ws'),
-
+
(r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'\d+', Number.Integer),
-
- # multiline strings
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # multiline strings
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
(r'::', Punctuation, 'label'),
(r'\.{3}', Punctuation),
(r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
- (r'[\[\]{}().,:;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
-
- ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
+ (r'[\[\]{}().,:;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+
+ ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
r'while)\b', Keyword.Reserved),
(r'goto\b', Keyword.Reserved, 'goto'),
- (r'(local)\b', Keyword.Declaration),
- (r'(true|false|nil)\b', Keyword.Constant),
-
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
(r'(function)\b', Keyword.Reserved, 'funcname'),
-
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
-
- 'funcname': [
+
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'funcname': [
include('ws'),
(r'[.:]', Punctuation),
(r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
(_name, Name.Function, '#pop'),
- # inline function
+ # inline function
(r'\(', Punctuation, '#pop'),
- ],
-
+ ],
+
'goto': [
include('ws'),
(_name, Name.Label, '#pop'),
- ],
-
+ ],
+
'label': [
include('ws'),
(r'::', Punctuation, '#pop'),
(_name, Name.Label),
],
- 'stringescape': [
+ 'stringescape': [
(r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
r'u\{[0-9a-fA-F]+\})', String.Escape),
- ],
-
- 'sqs': [
+ ],
+
+ 'sqs': [
(r"'", String.Single, '#pop'),
(r"[^\\']+", String.Single),
- ],
-
- 'dqs': [
+ ],
+
+ 'dqs': [
(r'"', String.Double, '#pop'),
(r'[^\\"]+', String.Double),
- ]
- }
-
- def __init__(self, **options):
- self.func_name_highlighting = get_bool_opt(
- options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
- self._functions = set()
- if self.func_name_highlighting:
- from pygments.lexers._lua_builtins import MODULES
- for mod, func in iteritems(MODULES):
- if mod not in self.disabled_modules:
- self._functions.update(func)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- elif '.' in value:
- a, b = value.split('.')
- yield index, Name, a
- yield index + len(a), Punctuation, u'.'
- yield index + len(a) + 1, Name, b
- continue
- yield index, token, value
-
-
-class MoonScriptLexer(LuaLexer):
- """
- For `MoonScript <http://moonscript.org>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = "MoonScript"
- aliases = ["moon", "moonscript"]
- filenames = ["*.moon"]
- mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('--.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(->|=>)', Name.Function),
- (r':[a-zA-Z_]\w*', Name.Variable),
- (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
- (r'[;,]', Punctuation),
- (r'[\[\]{}()]', Keyword.Type),
- (r'[a-zA-Z_]\w*:', Name.Variable),
- (words((
- 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
- 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
- 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
- 'break'), suffix=r'\b'),
- Keyword),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(and|or|not)\b', Operator.Word),
- (r'(self)\b', Name.Builtin.Pseudo),
- (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
- (r'[A-Z]\w*', Name.Class), # proper name
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
- 'sqs': [
- ("'", String.Single, '#pop'),
- (".", String)
- ],
- 'dqs': [
- ('"', String.Double, '#pop'),
- (".", String)
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # set . as Operator instead of Punctuation
- for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
- if token == Punctuation and value == ".":
- token = Operator
- yield index, token, value
-
-
-class ChaiscriptLexer(RegexLexer):
- """
- For `ChaiScript <http://chaiscript.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'ChaiScript'
- aliases = ['chai', 'chaiscript']
- filenames = ['*.chai']
- mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'^\#.*?\n', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'[=+\-*/]', Operator),
- (r'(for|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch'
- r')\b', Keyword, 'slashstartsregex'),
- (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(attr|def|fun)\b', Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
- (r'(eval|throw)\b', Name.Builtin),
- (r'`\S+`', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"', String.Double, 'dqstring'),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ],
- 'dqstring': [
- (r'\$\{[^"}]+?\}', String.Interpol),
- (r'\$', String.Double),
- (r'\\\\', String.Double),
- (r'\\"', String.Double),
- (r'[^\\"$]+', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class LSLLexer(RegexLexer):
- """
- For Second Life's Linden Scripting Language source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'LSL'
- aliases = ['lsl']
- filenames = ['*.lsl']
- mimetypes = ['text/x-lsl']
-
- flags = re.MULTILINE
-
- lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
- lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
- lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
- lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
- lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
- lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
- lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
- lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
- lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
- lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
- lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
- lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
- lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
- lsl_invalid_illegal = r'\b(?:event)\b'
- lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
- lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
- lsl_reserved_log = r'\b(?:print)\b'
- lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
-
- tokens = {
- 'root':
- [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String.Double, 'string'),
- (lsl_keywords, Keyword),
- (lsl_types, Keyword.Type),
- (lsl_states, Name.Class),
- (lsl_events, Name.Builtin),
- (lsl_functions_builtin, Name.Function),
- (lsl_constants_float, Keyword.Constant),
- (lsl_constants_integer, Keyword.Constant),
- (lsl_constants_integer_boolean, Keyword.Constant),
- (lsl_constants_rotation, Keyword.Constant),
- (lsl_constants_string, Keyword.Constant),
- (lsl_constants_vector, Keyword.Constant),
- (lsl_invalid_broken, Error),
- (lsl_invalid_deprecated, Error),
- (lsl_invalid_illegal, Error),
- (lsl_invalid_unimplemented, Error),
- (lsl_reserved_godmode, Keyword.Reserved),
- (lsl_reserved_log, Keyword.Reserved),
- (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (lsl_operators, Operator),
- (r':=?', Error),
- (r'[,;{}()\[\]]', Punctuation),
- (r'\n+', Whitespace),
- (r'\s+', Whitespace)
- ],
- 'comment':
- [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string':
- [
- (r'\\([nt"\\])', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\\.', Error),
- (r'[^"\\]+', String.Double),
- ]
- }
-
-
-class AppleScriptLexer(RegexLexer):
- """
- For `AppleScript source code
- <http://developer.apple.com/documentation/AppleScript/
- Conceptual/AppleScriptLangGuide>`_,
- including `AppleScript Studio
- <http://developer.apple.com/documentation/AppleScript/
- Reference/StudioReference>`_.
- Contributed by Andreas Amann <aamann@mac.com>.
-
- .. versionadded:: 1.0
- """
-
- name = 'AppleScript'
- aliases = ['applescript']
- filenames = ['*.applescript']
-
- flags = re.MULTILINE | re.DOTALL
-
- Identifiers = r'[a-zA-Z]\w*'
-
- # XXX: use words() for all of these
- Literals = ('AppleScript', 'current application', 'false', 'linefeed',
- 'missing value', 'pi', 'quote', 'result', 'return', 'space',
- 'tab', 'text item delimiters', 'true', 'version')
- Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
- 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
- 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
- 'text ', 'unit types', '(?:Unicode )?text', 'string')
- BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
- 'paragraph', 'word', 'year')
- HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
- 'aside from', 'at', 'below', 'beneath', 'beside',
- 'between', 'for', 'given', 'instead of', 'on', 'onto',
- 'out of', 'over', 'since')
- Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
- 'choose application', 'choose color', 'choose file( name)?',
- 'choose folder', 'choose from list',
- 'choose remote application', 'clipboard info',
- 'close( access)?', 'copy', 'count', 'current date', 'delay',
- 'delete', 'display (alert|dialog)', 'do shell script',
- 'duplicate', 'exists', 'get eof', 'get volume settings',
- 'info for', 'launch', 'list (disks|folder)', 'load script',
- 'log', 'make', 'mount volume', 'new', 'offset',
- 'open( (for access|location))?', 'path to', 'print', 'quit',
- 'random number', 'read', 'round', 'run( script)?',
- 'say', 'scripting components',
- 'set (eof|the clipboard to|volume)', 'store script',
- 'summarize', 'system attribute', 'system info',
- 'the clipboard', 'time to GMT', 'write', 'quoted form')
- References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
- 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
- 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
- 'before', 'behind', 'every', 'front', 'index', 'last',
- 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
- Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
- "isn't", "isn't equal( to)?", "is not equal( to)?",
- "doesn't equal", "does not equal", "(is )?greater than",
- "comes after", "is not less than or equal( to)?",
- "isn't less than or equal( to)?", "(is )?less than",
- "comes before", "is not greater than or equal( to)?",
- "isn't greater than or equal( to)?",
- "(is )?greater than or equal( to)?", "is not less than",
- "isn't less than", "does not come before",
- "doesn't come before", "(is )?less than or equal( to)?",
- "is not greater than", "isn't greater than",
- "does not come after", "doesn't come after", "starts? with",
- "begins? with", "ends? with", "contains?", "does not contain",
- "doesn't contain", "is in", "is contained by", "is not in",
- "is not contained by", "isn't contained by", "div", "mod",
- "not", "(a )?(ref( to)?|reference to)", "is", "does")
- Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
- 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
- 'try', 'until', 'using terms from', 'while', 'whith',
- 'with timeout( of)?', 'with transaction', 'by', 'continue',
- 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
- Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
- Reserved = ('but', 'put', 'returning', 'the')
- StudioClasses = ('action cell', 'alert reply', 'application', 'box',
- 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
- 'clip view', 'color well', 'color-panel',
- 'combo box( item)?', 'control',
- 'data( (cell|column|item|row|source))?', 'default entry',
- 'dialog reply', 'document', 'drag info', 'drawer',
- 'event', 'font(-panel)?', 'formatter',
- 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
- 'movie( view)?', 'open-panel', 'outline view', 'panel',
- 'pasteboard', 'plugin', 'popup button',
- 'progress indicator', 'responder', 'save-panel',
- 'scroll view', 'secure text field( cell)?', 'slider',
- 'sound', 'split view', 'stepper', 'tab view( item)?',
- 'table( (column|header cell|header view|view))',
- 'text( (field( cell)?|view))?', 'toolbar( item)?',
- 'user-defaults', 'view', 'window')
- StudioEvents = ('accept outline drop', 'accept table drop', 'action',
- 'activated', 'alert ended', 'awake from nib', 'became key',
- 'became main', 'begin editing', 'bounds changed',
- 'cell value', 'cell value changed', 'change cell value',
- 'change item value', 'changed', 'child of item',
- 'choose menu item', 'clicked', 'clicked toolbar item',
- 'closed', 'column clicked', 'column moved',
- 'column resized', 'conclude drop', 'data representation',
- 'deminiaturized', 'dialog ended', 'document nib name',
- 'double clicked', 'drag( (entered|exited|updated))?',
- 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
- 'item value', 'item value changed', 'items changed',
- 'keyboard down', 'keyboard up', 'launched',
- 'load data representation', 'miniaturized', 'mouse down',
- 'mouse dragged', 'mouse entered', 'mouse exited',
- 'mouse moved', 'mouse up', 'moved',
- 'number of browser rows', 'number of items',
- 'number of rows', 'open untitled', 'opened', 'panel ended',
- 'parameters updated', 'plugin loaded', 'prepare drop',
- 'prepare outline drag', 'prepare outline drop',
- 'prepare table drag', 'prepare table drop',
- 'read from file', 'resigned active', 'resigned key',
- 'resigned main', 'resized( sub views)?',
- 'right mouse down', 'right mouse dragged',
- 'right mouse up', 'rows changed', 'scroll wheel',
- 'selected tab view item', 'selection changed',
- 'selection changing', 'should begin editing',
- 'should close', 'should collapse item',
- 'should end editing', 'should expand item',
- 'should open( untitled)?',
- 'should quit( after last window closed)?',
- 'should select column', 'should select item',
- 'should select row', 'should select tab view item',
- 'should selection change', 'should zoom', 'shown',
- 'update menu item', 'update parameters',
- 'update toolbar item', 'was hidden', 'was miniaturized',
- 'will become active', 'will close', 'will dismiss',
- 'will display browser cell', 'will display cell',
- 'will display item cell', 'will display outline cell',
- 'will finish launching', 'will hide', 'will miniaturize',
- 'will move', 'will open', 'will pop up', 'will quit',
- 'will resign active', 'will resize( sub views)?',
- 'will select tab view item', 'will show', 'will zoom',
- 'write to file', 'zoomed')
- StudioCommands = ('animate', 'append', 'call method', 'center',
- 'close drawer', 'close panel', 'display',
- 'display alert', 'display dialog', 'display panel', 'go',
- 'hide', 'highlight', 'increment', 'item for',
- 'load image', 'load movie', 'load nib', 'load panel',
- 'load sound', 'localized string', 'lock focus', 'log',
- 'open drawer', 'path for', 'pause', 'perform action',
- 'play', 'register', 'resume', 'scroll', 'select( all)?',
- 'show', 'size to fit', 'start', 'step back',
- 'step forward', 'stop', 'synchronize', 'unlock focus',
- 'update')
- StudioProperties = ('accepts arrow key', 'action method', 'active',
- 'alignment', 'allowed identifiers',
- 'allows branch selection', 'allows column reordering',
- 'allows column resizing', 'allows column selection',
- 'allows customization',
- 'allows editing text attributes',
- 'allows empty selection', 'allows mixed state',
- 'allows multiple selection', 'allows reordering',
- 'allows undo', 'alpha( value)?', 'alternate image',
- 'alternate increment value', 'alternate title',
- 'animation delay', 'associated file name',
- 'associated object', 'auto completes', 'auto display',
- 'auto enables items', 'auto repeat',
- 'auto resizes( outline column)?',
- 'auto save expanded items', 'auto save name',
- 'auto save table columns', 'auto saves configuration',
- 'auto scroll', 'auto sizes all columns to fit',
- 'auto sizes cells', 'background color', 'bezel state',
- 'bezel style', 'bezeled', 'border rect', 'border type',
- 'bordered', 'bounds( rotation)?', 'box type',
- 'button returned', 'button type',
- 'can choose directories', 'can choose files',
- 'can draw', 'can hide',
- 'cell( (background color|size|type))?', 'characters',
- 'class', 'click count', 'clicked( data)? column',
- 'clicked data item', 'clicked( data)? row',
- 'closeable', 'collating', 'color( (mode|panel))',
- 'command key down', 'configuration',
- 'content(s| (size|view( margins)?))?', 'context',
- 'continuous', 'control key down', 'control size',
- 'control tint', 'control view',
- 'controller visible', 'coordinate system',
- 'copies( on scroll)?', 'corner view', 'current cell',
- 'current column', 'current( field)? editor',
- 'current( menu)? item', 'current row',
- 'current tab view item', 'data source',
- 'default identifiers', 'delta (x|y|z)',
- 'destination window', 'directory', 'display mode',
- 'displayed cell', 'document( (edited|rect|view))?',
- 'double value', 'dragged column', 'dragged distance',
- 'dragged items', 'draws( cell)? background',
- 'draws grid', 'dynamically scrolls', 'echos bullets',
- 'edge', 'editable', 'edited( data)? column',
- 'edited data item', 'edited( data)? row', 'enabled',
- 'enclosing scroll view', 'ending page',
- 'error handling', 'event number', 'event type',
- 'excluded from windows menu', 'executable path',
- 'expanded', 'fax number', 'field editor', 'file kind',
- 'file name', 'file type', 'first responder',
- 'first visible column', 'flipped', 'floating',
- 'font( panel)?', 'formatter', 'frameworks path',
- 'frontmost', 'gave up', 'grid color', 'has data items',
- 'has horizontal ruler', 'has horizontal scroller',
- 'has parent data item', 'has resize indicator',
- 'has shadow', 'has sub menu', 'has vertical ruler',
- 'has vertical scroller', 'header cell', 'header view',
- 'hidden', 'hides when deactivated', 'highlights by',
- 'horizontal line scroll', 'horizontal page scroll',
- 'horizontal ruler view', 'horizontally resizable',
- 'icon image', 'id', 'identifier',
- 'ignores multiple clicks',
- 'image( (alignment|dims when disabled|frame style|scaling))?',
- 'imports graphics', 'increment value',
- 'indentation per level', 'indeterminate', 'index',
- 'integer value', 'intercell spacing', 'item height',
- 'key( (code|equivalent( modifier)?|window))?',
- 'knob thickness', 'label', 'last( visible)? column',
- 'leading offset', 'leaf', 'level', 'line scroll',
- 'loaded', 'localized sort', 'location', 'loop mode',
- 'main( (bunde|menu|window))?', 'marker follows cell',
- 'matrix mode', 'maximum( content)? size',
- 'maximum visible columns',
- 'menu( form representation)?', 'miniaturizable',
- 'miniaturized', 'minimized image', 'minimized title',
- 'minimum column width', 'minimum( content)? size',
- 'modal', 'modified', 'mouse down state',
- 'movie( (controller|file|rect))?', 'muted', 'name',
- 'needs display', 'next state', 'next text',
- 'number of tick marks', 'only tick mark values',
- 'opaque', 'open panel', 'option key down',
- 'outline table column', 'page scroll', 'pages across',
- 'pages down', 'palette label', 'pane splitter',
- 'parent data item', 'parent window', 'pasteboard',
- 'path( (names|separator))?', 'playing',
- 'plays every frame', 'plays selection only', 'position',
- 'preferred edge', 'preferred type', 'pressure',
- 'previous text', 'prompt', 'properties',
- 'prototype cell', 'pulls down', 'rate',
- 'released when closed', 'repeated',
- 'requested print time', 'required file type',
- 'resizable', 'resized column', 'resource path',
- 'returns records', 'reuses columns', 'rich text',
- 'roll over', 'row height', 'rulers visible',
- 'save panel', 'scripts path', 'scrollable',
- 'selectable( identifiers)?', 'selected cell',
- 'selected( data)? columns?', 'selected data items?',
- 'selected( data)? rows?', 'selected item identifier',
- 'selection by rect', 'send action on arrow key',
- 'sends action when done editing', 'separates columns',
- 'separator item', 'sequence number', 'services menu',
- 'shared frameworks path', 'shared support path',
- 'sheet', 'shift key down', 'shows alpha',
- 'shows state by', 'size( mode)?',
- 'smart insert delete enabled', 'sort case sensitivity',
- 'sort column', 'sort order', 'sort type',
- 'sorted( data rows)?', 'sound', 'source( mask)?',
- 'spell checking enabled', 'starting page', 'state',
- 'string value', 'sub menu', 'super menu', 'super view',
- 'tab key traverses cells', 'tab state', 'tab type',
- 'tab view', 'table view', 'tag', 'target( printer)?',
- 'text color', 'text container insert',
- 'text container origin', 'text returned',
- 'tick mark position', 'time stamp',
- 'title(d| (cell|font|height|position|rect))?',
- 'tool tip', 'toolbar', 'trailing offset', 'transparent',
- 'treat packages as directories', 'truncated labels',
- 'types', 'unmodified characters', 'update views',
- 'use sort indicator', 'user defaults',
- 'uses data source', 'uses ruler',
- 'uses threaded animation',
- 'uses title from previous column', 'value wraps',
- 'version',
- 'vertical( (line scroll|page scroll|ruler view))?',
- 'vertically resizable', 'view',
- 'visible( document rect)?', 'volume', 'width', 'window',
- 'windows menu', 'wraps', 'zoomable', 'zoomed')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (u'¬\\n', String.Escape),
- (r"'s\s+", Text), # This is a possessive, consider moving
- (r'(--|#).*?$', Comment),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[(){}!,.:]', Punctuation),
- (u'(«)([^»]+)(»)',
- bygroups(Text, Name.Builtin, Text)),
- (r'\b((?:considering|ignoring)\s*)'
- r'(application responses|case|diacriticals|hyphens|'
- r'numeric strings|punctuation|white space)',
- bygroups(Keyword, Name.Builtin)),
- (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
- (r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
- bygroups(Keyword, Name.Function)),
- (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
- bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
- (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- ],
- 'comment': [
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._lua_builtins import MODULES
+ for mod, func in iteritems(MODULES):
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ elif '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
+ yield index + len(a), Punctuation, u'.'
+ yield index + len(a) + 1, Name, b
+ continue
+ yield index, token, value
+
+
+class MoonScriptLexer(LuaLexer):
+ """
+ For `MoonScript <http://moonscript.org>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "MoonScript"
+ aliases = ["moon", "moonscript"]
+ filenames = ["*.moon"]
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_]\w*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]{}()]', Keyword.Type),
+ (r'[a-zA-Z_]\w*:', Name.Variable),
+ (words((
+ 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+ 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+ 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+ 'break'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
+ (".", String)
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
+ (".", String)
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
+ aliases = ['chai', 'chaiscript']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\$\{[^"}]+?\}', String.Interpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class LSLLexer(RegexLexer):
+ """
+ For Second Life's Linden Scripting Language source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'LSL'
+ aliases = ['lsl']
+ filenames = ['*.lsl']
+ mimetypes = ['text/x-lsl']
+
+ flags = re.MULTILINE
+
+ lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+ lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+ lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+ lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+ lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+ lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+ lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+ lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+ lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+ lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+ lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+ lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+ lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+ lsl_invalid_illegal = r'\b(?:event)\b'
+ lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+ lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+ lsl_reserved_log = r'\b(?:print)\b'
+ lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
+
+ tokens = {
+ 'root':
+ [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String.Double, 'string'),
+ (lsl_keywords, Keyword),
+ (lsl_types, Keyword.Type),
+ (lsl_states, Name.Class),
+ (lsl_events, Name.Builtin),
+ (lsl_functions_builtin, Name.Function),
+ (lsl_constants_float, Keyword.Constant),
+ (lsl_constants_integer, Keyword.Constant),
+ (lsl_constants_integer_boolean, Keyword.Constant),
+ (lsl_constants_rotation, Keyword.Constant),
+ (lsl_constants_string, Keyword.Constant),
+ (lsl_constants_vector, Keyword.Constant),
+ (lsl_invalid_broken, Error),
+ (lsl_invalid_deprecated, Error),
+ (lsl_invalid_illegal, Error),
+ (lsl_invalid_unimplemented, Error),
+ (lsl_reserved_godmode, Keyword.Reserved),
+ (lsl_reserved_log, Keyword.Reserved),
+ (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (lsl_operators, Operator),
+ (r':=?', Error),
+ (r'[,;{}()\[\]]', Punctuation),
+ (r'\n+', Whitespace),
+ (r'\s+', Whitespace)
+ ],
+ 'comment':
+ [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'string':
+ [
+ (r'\\([nt"\\])', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', Error),
+ (r'[^"\\]+', String.Double),
+ ]
+ }
+
+
+class AppleScriptLexer(RegexLexer):
+ """
+ For `AppleScript source code
+ <http://developer.apple.com/documentation/AppleScript/
+ Conceptual/AppleScriptLangGuide>`_,
+ including `AppleScript Studio
+ <http://developer.apple.com/documentation/AppleScript/
+ Reference/StudioReference>`_.
+ Contributed by Andreas Amann <aamann@mac.com>.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'AppleScript'
+ aliases = ['applescript']
+ filenames = ['*.applescript']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ Identifiers = r'[a-zA-Z]\w*'
+
+ # XXX: use words() for all of these
+ Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+ 'missing value', 'pi', 'quote', 'result', 'return', 'space',
+ 'tab', 'text item delimiters', 'true', 'version')
+ Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+ 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+ 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+ 'text ', 'unit types', '(?:Unicode )?text', 'string')
+ BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+ 'paragraph', 'word', 'year')
+ HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+ 'aside from', 'at', 'below', 'beneath', 'beside',
+ 'between', 'for', 'given', 'instead of', 'on', 'onto',
+ 'out of', 'over', 'since')
+ Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+ 'choose application', 'choose color', 'choose file( name)?',
+ 'choose folder', 'choose from list',
+ 'choose remote application', 'clipboard info',
+ 'close( access)?', 'copy', 'count', 'current date', 'delay',
+ 'delete', 'display (alert|dialog)', 'do shell script',
+ 'duplicate', 'exists', 'get eof', 'get volume settings',
+ 'info for', 'launch', 'list (disks|folder)', 'load script',
+ 'log', 'make', 'mount volume', 'new', 'offset',
+ 'open( (for access|location))?', 'path to', 'print', 'quit',
+ 'random number', 'read', 'round', 'run( script)?',
+ 'say', 'scripting components',
+ 'set (eof|the clipboard to|volume)', 'store script',
+ 'summarize', 'system attribute', 'system info',
+ 'the clipboard', 'time to GMT', 'write', 'quoted form')
+ References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+ 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+ 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+ 'before', 'behind', 'every', 'front', 'index', 'last',
+ 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+ Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+ "isn't", "isn't equal( to)?", "is not equal( to)?",
+ "doesn't equal", "does not equal", "(is )?greater than",
+ "comes after", "is not less than or equal( to)?",
+ "isn't less than or equal( to)?", "(is )?less than",
+ "comes before", "is not greater than or equal( to)?",
+ "isn't greater than or equal( to)?",
+ "(is )?greater than or equal( to)?", "is not less than",
+ "isn't less than", "does not come before",
+ "doesn't come before", "(is )?less than or equal( to)?",
+ "is not greater than", "isn't greater than",
+ "does not come after", "doesn't come after", "starts? with",
+ "begins? with", "ends? with", "contains?", "does not contain",
+ "doesn't contain", "is in", "is contained by", "is not in",
+ "is not contained by", "isn't contained by", "div", "mod",
+ "not", "(a )?(ref( to)?|reference to)", "is", "does")
+ Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+ 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+ 'try', 'until', 'using terms from', 'while', 'whith',
+ 'with timeout( of)?', 'with transaction', 'by', 'continue',
+ 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+ Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+ Reserved = ('but', 'put', 'returning', 'the')
+ StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+ 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+ 'clip view', 'color well', 'color-panel',
+ 'combo box( item)?', 'control',
+ 'data( (cell|column|item|row|source))?', 'default entry',
+ 'dialog reply', 'document', 'drag info', 'drawer',
+ 'event', 'font(-panel)?', 'formatter',
+ 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+ 'movie( view)?', 'open-panel', 'outline view', 'panel',
+ 'pasteboard', 'plugin', 'popup button',
+ 'progress indicator', 'responder', 'save-panel',
+ 'scroll view', 'secure text field( cell)?', 'slider',
+ 'sound', 'split view', 'stepper', 'tab view( item)?',
+ 'table( (column|header cell|header view|view))',
+ 'text( (field( cell)?|view))?', 'toolbar( item)?',
+ 'user-defaults', 'view', 'window')
+ StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+ 'activated', 'alert ended', 'awake from nib', 'became key',
+ 'became main', 'begin editing', 'bounds changed',
+ 'cell value', 'cell value changed', 'change cell value',
+ 'change item value', 'changed', 'child of item',
+ 'choose menu item', 'clicked', 'clicked toolbar item',
+ 'closed', 'column clicked', 'column moved',
+ 'column resized', 'conclude drop', 'data representation',
+ 'deminiaturized', 'dialog ended', 'document nib name',
+ 'double clicked', 'drag( (entered|exited|updated))?',
+ 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+ 'item value', 'item value changed', 'items changed',
+ 'keyboard down', 'keyboard up', 'launched',
+ 'load data representation', 'miniaturized', 'mouse down',
+ 'mouse dragged', 'mouse entered', 'mouse exited',
+ 'mouse moved', 'mouse up', 'moved',
+ 'number of browser rows', 'number of items',
+ 'number of rows', 'open untitled', 'opened', 'panel ended',
+ 'parameters updated', 'plugin loaded', 'prepare drop',
+ 'prepare outline drag', 'prepare outline drop',
+ 'prepare table drag', 'prepare table drop',
+ 'read from file', 'resigned active', 'resigned key',
+ 'resigned main', 'resized( sub views)?',
+ 'right mouse down', 'right mouse dragged',
+ 'right mouse up', 'rows changed', 'scroll wheel',
+ 'selected tab view item', 'selection changed',
+ 'selection changing', 'should begin editing',
+ 'should close', 'should collapse item',
+ 'should end editing', 'should expand item',
+ 'should open( untitled)?',
+ 'should quit( after last window closed)?',
+ 'should select column', 'should select item',
+ 'should select row', 'should select tab view item',
+ 'should selection change', 'should zoom', 'shown',
+ 'update menu item', 'update parameters',
+ 'update toolbar item', 'was hidden', 'was miniaturized',
+ 'will become active', 'will close', 'will dismiss',
+ 'will display browser cell', 'will display cell',
+ 'will display item cell', 'will display outline cell',
+ 'will finish launching', 'will hide', 'will miniaturize',
+ 'will move', 'will open', 'will pop up', 'will quit',
+ 'will resign active', 'will resize( sub views)?',
+ 'will select tab view item', 'will show', 'will zoom',
+ 'write to file', 'zoomed')
+ StudioCommands = ('animate', 'append', 'call method', 'center',
+ 'close drawer', 'close panel', 'display',
+ 'display alert', 'display dialog', 'display panel', 'go',
+ 'hide', 'highlight', 'increment', 'item for',
+ 'load image', 'load movie', 'load nib', 'load panel',
+ 'load sound', 'localized string', 'lock focus', 'log',
+ 'open drawer', 'path for', 'pause', 'perform action',
+ 'play', 'register', 'resume', 'scroll', 'select( all)?',
+ 'show', 'size to fit', 'start', 'step back',
+ 'step forward', 'stop', 'synchronize', 'unlock focus',
+ 'update')
+ StudioProperties = ('accepts arrow key', 'action method', 'active',
+ 'alignment', 'allowed identifiers',
+ 'allows branch selection', 'allows column reordering',
+ 'allows column resizing', 'allows column selection',
+ 'allows customization',
+ 'allows editing text attributes',
+ 'allows empty selection', 'allows mixed state',
+ 'allows multiple selection', 'allows reordering',
+ 'allows undo', 'alpha( value)?', 'alternate image',
+ 'alternate increment value', 'alternate title',
+ 'animation delay', 'associated file name',
+ 'associated object', 'auto completes', 'auto display',
+ 'auto enables items', 'auto repeat',
+ 'auto resizes( outline column)?',
+ 'auto save expanded items', 'auto save name',
+ 'auto save table columns', 'auto saves configuration',
+ 'auto scroll', 'auto sizes all columns to fit',
+ 'auto sizes cells', 'background color', 'bezel state',
+ 'bezel style', 'bezeled', 'border rect', 'border type',
+ 'bordered', 'bounds( rotation)?', 'box type',
+ 'button returned', 'button type',
+ 'can choose directories', 'can choose files',
+ 'can draw', 'can hide',
+ 'cell( (background color|size|type))?', 'characters',
+ 'class', 'click count', 'clicked( data)? column',
+ 'clicked data item', 'clicked( data)? row',
+ 'closeable', 'collating', 'color( (mode|panel))',
+ 'command key down', 'configuration',
+ 'content(s| (size|view( margins)?))?', 'context',
+ 'continuous', 'control key down', 'control size',
+ 'control tint', 'control view',
+ 'controller visible', 'coordinate system',
+ 'copies( on scroll)?', 'corner view', 'current cell',
+ 'current column', 'current( field)? editor',
+ 'current( menu)? item', 'current row',
+ 'current tab view item', 'data source',
+ 'default identifiers', 'delta (x|y|z)',
+ 'destination window', 'directory', 'display mode',
+ 'displayed cell', 'document( (edited|rect|view))?',
+ 'double value', 'dragged column', 'dragged distance',
+ 'dragged items', 'draws( cell)? background',
+ 'draws grid', 'dynamically scrolls', 'echos bullets',
+ 'edge', 'editable', 'edited( data)? column',
+ 'edited data item', 'edited( data)? row', 'enabled',
+ 'enclosing scroll view', 'ending page',
+ 'error handling', 'event number', 'event type',
+ 'excluded from windows menu', 'executable path',
+ 'expanded', 'fax number', 'field editor', 'file kind',
+ 'file name', 'file type', 'first responder',
+ 'first visible column', 'flipped', 'floating',
+ 'font( panel)?', 'formatter', 'frameworks path',
+ 'frontmost', 'gave up', 'grid color', 'has data items',
+ 'has horizontal ruler', 'has horizontal scroller',
+ 'has parent data item', 'has resize indicator',
+ 'has shadow', 'has sub menu', 'has vertical ruler',
+ 'has vertical scroller', 'header cell', 'header view',
+ 'hidden', 'hides when deactivated', 'highlights by',
+ 'horizontal line scroll', 'horizontal page scroll',
+ 'horizontal ruler view', 'horizontally resizable',
+ 'icon image', 'id', 'identifier',
+ 'ignores multiple clicks',
+ 'image( (alignment|dims when disabled|frame style|scaling))?',
+ 'imports graphics', 'increment value',
+ 'indentation per level', 'indeterminate', 'index',
+ 'integer value', 'intercell spacing', 'item height',
+ 'key( (code|equivalent( modifier)?|window))?',
+ 'knob thickness', 'label', 'last( visible)? column',
+ 'leading offset', 'leaf', 'level', 'line scroll',
+ 'loaded', 'localized sort', 'location', 'loop mode',
+ 'main( (bunde|menu|window))?', 'marker follows cell',
+ 'matrix mode', 'maximum( content)? size',
+ 'maximum visible columns',
+ 'menu( form representation)?', 'miniaturizable',
+ 'miniaturized', 'minimized image', 'minimized title',
+ 'minimum column width', 'minimum( content)? size',
+ 'modal', 'modified', 'mouse down state',
+ 'movie( (controller|file|rect))?', 'muted', 'name',
+ 'needs display', 'next state', 'next text',
+ 'number of tick marks', 'only tick mark values',
+ 'opaque', 'open panel', 'option key down',
+ 'outline table column', 'page scroll', 'pages across',
+ 'pages down', 'palette label', 'pane splitter',
+ 'parent data item', 'parent window', 'pasteboard',
+ 'path( (names|separator))?', 'playing',
+ 'plays every frame', 'plays selection only', 'position',
+ 'preferred edge', 'preferred type', 'pressure',
+ 'previous text', 'prompt', 'properties',
+ 'prototype cell', 'pulls down', 'rate',
+ 'released when closed', 'repeated',
+ 'requested print time', 'required file type',
+ 'resizable', 'resized column', 'resource path',
+ 'returns records', 'reuses columns', 'rich text',
+ 'roll over', 'row height', 'rulers visible',
+ 'save panel', 'scripts path', 'scrollable',
+ 'selectable( identifiers)?', 'selected cell',
+ 'selected( data)? columns?', 'selected data items?',
+ 'selected( data)? rows?', 'selected item identifier',
+ 'selection by rect', 'send action on arrow key',
+ 'sends action when done editing', 'separates columns',
+ 'separator item', 'sequence number', 'services menu',
+ 'shared frameworks path', 'shared support path',
+ 'sheet', 'shift key down', 'shows alpha',
+ 'shows state by', 'size( mode)?',
+ 'smart insert delete enabled', 'sort case sensitivity',
+ 'sort column', 'sort order', 'sort type',
+ 'sorted( data rows)?', 'sound', 'source( mask)?',
+ 'spell checking enabled', 'starting page', 'state',
+ 'string value', 'sub menu', 'super menu', 'super view',
+ 'tab key traverses cells', 'tab state', 'tab type',
+ 'tab view', 'table view', 'tag', 'target( printer)?',
+ 'text color', 'text container insert',
+ 'text container origin', 'text returned',
+ 'tick mark position', 'time stamp',
+ 'title(d| (cell|font|height|position|rect))?',
+ 'tool tip', 'toolbar', 'trailing offset', 'transparent',
+ 'treat packages as directories', 'truncated labels',
+ 'types', 'unmodified characters', 'update views',
+ 'use sort indicator', 'user defaults',
+ 'uses data source', 'uses ruler',
+ 'uses threaded animation',
+ 'uses title from previous column', 'value wraps',
+ 'version',
+ 'vertical( (line scroll|page scroll|ruler view))?',
+ 'vertically resizable', 'view',
+ 'visible( document rect)?', 'volume', 'width', 'window',
+ 'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (u'¬\\n', String.Escape),
+ (r"'s\s+", Text), # This is a possessive, consider moving
+ (r'(--|#).*?$', Comment),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[(){}!,.:]', Punctuation),
+ (u'(«)([^»]+)(»)',
+ bygroups(Text, Name.Builtin, Text)),
+ (r'\b((?:considering|ignoring)\s*)'
+ r'(application responses|case|diacriticals|hyphens|'
+ r'numeric strings|punctuation|white space)',
+ bygroups(Keyword, Name.Builtin)),
+ (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator),
+ (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
+ (r'^(\s*(?:on|end)\s+)'
+ r'(%s)' % '|'.join(StudioEvents[::-1]),
+ bygroups(Keyword, Name.Function)),
+ (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\b(as )(%s)\b' % '|'.join(Classes),
+ bygroups(Keyword, Name.Class)),
+ (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
+ (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(Control), Keyword),
+ (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
+ (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
+ (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
+ (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r'\b(%s)\b' % Identifiers, Name.Variable),
+ (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ 'comment': [
(r'\(\*', Comment.Multiline, '#push'),
(r'\*\)', Comment.Multiline, '#pop'),
- ('[^*(]+', Comment.Multiline),
- ('[*(]', Comment.Multiline),
- ],
- }
-
-
-class RexxLexer(RegexLexer):
- """
- `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
- a wide range of different platforms with its roots found on mainframe
- systems. It is popular for I/O- and data based tasks and can act as glue
- language to bind different applications together.
-
- .. versionadded:: 2.0
- """
- name = 'Rexx'
- aliases = ['rexx', 'arexx']
- filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
- mimetypes = ['text/x-rexx']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String, 'string_double'),
- (r"'", String, 'string_single'),
- (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
- (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration)),
- (r'([a-z_]\w*)(\s*)(:)',
- bygroups(Name.Label, Whitespace, Operator)),
- include('function'),
- include('keyword'),
- include('operator'),
- (r'[a-z_]\w*', Text),
- ],
- 'function': [
- (words((
- 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
- 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
- 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
- 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
- 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
- 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
- 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
- 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
- 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
- 'xrange'), suffix=r'(\s*)(\()'),
- bygroups(Name.Builtin, Whitespace, Operator)),
- ],
- 'keyword': [
- (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
- r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
- r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
- r'while)\b', Keyword.Reserved),
- ],
- 'operator': [
- (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
- r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
- r'¬>>|¬>|¬|\.|,)', Operator),
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'string_single': [
- (r'[^\'\n]', String),
- (r'\'\'', String),
- (r'\'', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ]
- }
-
- _c = lambda s: re.compile(s, re.MULTILINE)
- _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
- _ADDRESS_PATTERN = _c(r'^\s*address\s+')
- _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
- _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
- _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
- _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
- _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
- PATTERNS_AND_WEIGHTS = (
- (_ADDRESS_COMMAND_PATTERN, 0.2),
- (_ADDRESS_PATTERN, 0.05),
- (_DO_WHILE_PATTERN, 0.1),
- (_ELSE_DO_PATTERN, 0.1),
- (_IF_THEN_DO_PATTERN, 0.1),
- (_PROCEDURE_PATTERN, 0.5),
- (_PARSE_ARG_PATTERN, 0.2),
- )
-
- def analyse_text(text):
- """
- Check for inital comment and patterns that distinguish Rexx from other
- C-like languages.
- """
- if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
- # Header matches MVS Rexx requirements, this is certainly a Rexx
- # script.
- return 1.0
- elif text.startswith('/*'):
- # Header matches general Rexx requirements; the source code might
- # still be any language using C comments such as C++, C# or Java.
- lowerText = text.lower()
- result = sum(weight
- for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
- if pattern.search(lowerText)) + 0.01
- return min(result, 1.0)
-
-
-class MOOCodeLexer(RegexLexer):
- """
- For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
- language).
-
- .. versionadded:: 0.9
- """
- name = 'MOOCode'
- filenames = ['*.moo']
- aliases = ['moocode', 'moo']
- mimetypes = ['text/x-moocode']
-
- tokens = {
- 'root': [
- # Numbers
- (r'(0|[1-9][0-9_]*)', Number.Integer),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # exceptions
- (r'(E_PERM|E_DIV)', Name.Exception),
- # db-refs
- (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
- # Keywords
- (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
- r'|endwhile|break|continue|return|try'
- r'|except|endtry|finally|in)\b', Keyword),
- # builtins
- (r'(random|length)', Name.Builtin),
- # special variables
- (r'(player|caller|this|args)', Name.Variable.Instance),
- # skip whitespace
- (r'\s+', Text),
- (r'\n', Text),
- # other operators
- (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
- # function call
- (r'(\w+)(\()', bygroups(Name.Function, Operator)),
- # variables
- (r'(\w+)', Text),
- ]
- }
-
-
-class HybrisLexer(RegexLexer):
- """
- For `Hybris <http://www.hybris-lang.org>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
- mimetypes = ['text/x-hybris', 'application/x-hybris']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:function|method|operator\s+)+?)'
- r'([a-zA-Z_]\w*)'
- r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
- r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
- (r'(extends|private|protected|public|static|throws|function|method|'
- r'operator)\b', Keyword.Declaration),
- (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
- r'__INC_PATH__)\b', Keyword.Constant),
- (r'(class|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import|include)(\s+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (words((
- 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
- 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
- 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
- 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
- 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
- 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
- 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
- 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
- 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
- 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
- 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
- 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
- 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
- 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
- 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
- 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
- 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
- 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
- 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
- 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
- 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
- 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
- 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
- 'contains', 'join'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
- 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
- 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
- Keyword.Type),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class EasytrieveLexer(RegexLexer):
- """
- Easytrieve Plus is a programming language for extracting, filtering and
- converting sequential data. Furthermore it can layout data for reports.
- It is mainly used on mainframe platforms and can access several of the
- mainframe's native file formats. It is somewhat comparable to awk.
-
- .. versionadded:: 2.1
- """
- name = 'Easytrieve'
- aliases = ['easytrieve']
- filenames = ['*.ezt', '*.mac']
- mimetypes = ['text/x-easytrieve']
- flags = 0
-
- # Note: We cannot use r'\b' at the start and end of keywords because
- # Easytrieve Plus delimiter characters are:
- #
- # * space ( )
- # * apostrophe (')
- # * period (.)
- # * comma (,)
- # * paranthesis ( and )
- # * colon (:)
- #
- # Additionally words end once a '*' appears, indicatins a comment.
- _DELIMITERS = r' \'.,():\n'
- _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
- _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
- _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
- _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
- _OPERATORS_PATTERN = u'[.+\\-/=\\[\\](){}<>;,&%¬]'
- _KEYWORDS = [
- 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
- 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
- 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
- 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
- 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
- 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
- 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
- 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
- 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
- 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
- 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
- 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
- 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
- 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
- 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
- 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
- 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
- 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
- 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
- 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
- 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
- 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
- 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
- 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
- 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
- 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
- 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
- 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
- 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
- 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
- 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
- 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
- ]
-
- tokens = {
- 'root': [
- (r'\*.*\n', Comment.Single),
- (r'\n+', Whitespace),
- # Macro argument
- (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
- 'after_macro_argument'),
- # Macro call
- (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
- (r'(FILE|MACRO|REPORT)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
- (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
- bygroups(Keyword.Declaration, Operator)),
- (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
- bygroups(Keyword.Reserved, Operator)),
- (_OPERATORS_PATTERN, Operator),
- # Procedure declaration
- (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration, Whitespace)),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String),
- (r'\s+', Whitespace),
- # Everything else just belongs to a name
+ ('[^*(]+', Comment.Multiline),
+ ('[*(]', Comment.Multiline),
+ ],
+ }
+
+
+class RexxLexer(RegexLexer):
+ """
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Rexx'
+ aliases = ['rexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s', Whitespace),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_]\w*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_]\w*', Text),
+ ],
+ 'function': [
+ (words((
+ 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+ 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+ 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+ 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+ 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+ 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+ 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+ 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+ 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+ 'xrange'), suffix=r'(\s*)(\()'),
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ r'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
+ (r'[^\'\n]', String),
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ _c = lambda s: re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for inital comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+ """
+ For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
+ language).
+
+ .. versionadded:: 0.9
+ """
+ name = 'MOOCode'
+ filenames = ['*.moo']
+ aliases = ['moocode', 'moo']
+ mimetypes = ['text/x-moocode']
+
+ tokens = {
+ 'root': [
+ # Numbers
+ (r'(0|[1-9][0-9_]*)', Number.Integer),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # exceptions
+ (r'(E_PERM|E_DIV)', Name.Exception),
+ # db-refs
+ (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
+ # Keywords
+ (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+ r'|endwhile|break|continue|return|try'
+ r'|except|endtry|finally|in)\b', Keyword),
+ # builtins
+ (r'(random|length)', Name.Builtin),
+ # special variables
+ (r'(player|caller|this|args)', Name.Variable.Instance),
+ # skip whitespace
+ (r'\s+', Text),
+ (r'\n', Text),
+ # other operators
+ (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
+ # function call
+ (r'(\w+)(\()', bygroups(Name.Function, Operator)),
+ # variables
+ (r'(\w+)', Text),
+ ]
+ }
+
+
+class HybrisLexer(RegexLexer):
+ """
+ For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Hybris'
+ aliases = ['hybris', 'hy']
+ filenames = ['*.hy', '*.hyb']
+ mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:function|method|operator\s+)+?)'
+ r'([a-zA-Z_]\w*)'
+ r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+ r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+ (r'(extends|private|protected|public|static|throws|function|method|'
+ r'operator)\b', Keyword.Declaration),
+ (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+ r'__INC_PATH__)\b', Keyword.Constant),
+ (r'(class|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import|include)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (words((
+ 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+ 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
+ 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
+ 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
+ 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
+ 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
+ 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
+ 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+ 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
+ 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
+ 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
+ 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
+ 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+ 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
+ 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
+ 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
+ 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
+ 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
+ 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
+ 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+ 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
+ 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
+ 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
+ 'contains', 'join'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+ 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+ 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+ Keyword.Type),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class EasytrieveLexer(RegexLexer):
+ """
+ Easytrieve Plus is a programming language for extracting, filtering and
+ converting sequential data. Furthermore it can layout data for reports.
+ It is mainly used on mainframe platforms and can access several of the
+ mainframe's native file formats. It is somewhat comparable to awk.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Easytrieve'
+ aliases = ['easytrieve']
+ filenames = ['*.ezt', '*.mac']
+ mimetypes = ['text/x-easytrieve']
+ flags = 0
+
+ # Note: We cannot use r'\b' at the start and end of keywords because
+ # Easytrieve Plus delimiter characters are:
+ #
+ # * space ( )
+ # * apostrophe (')
+ # * period (.)
+ # * comma (,)
+ # * paranthesis ( and )
+ # * colon (:)
+ #
+ # Additionally words end once a '*' appears, indicatins a comment.
+ _DELIMITERS = r' \'.,():\n'
+ _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
+ _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
+ _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
+ _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
+ _OPERATORS_PATTERN = u'[.+\\-/=\\[\\](){}<>;,&%¬]'
+ _KEYWORDS = [
+ 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
+ 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
+ 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
+ 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
+ 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
+ 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
+ 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
+ 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
+ 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
+ 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
+ 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
+ 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
+ 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
+ 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
+ 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
+ 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
+ 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
+ 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
+ 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
+ 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
+ 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
+ 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
+ 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
+ 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
+ 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
+ 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
+ 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
+ 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
+ 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
+ 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
+ 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
+ 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\*.*\n', Comment.Single),
+ (r'\n+', Whitespace),
+ # Macro argument
+ (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
+ 'after_macro_argument'),
+ # Macro call
+ (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
+ (r'(FILE|MACRO|REPORT)(\s+)',
+ bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
+ (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
+ bygroups(Keyword.Declaration, Operator)),
+ (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
+ bygroups(Keyword.Reserved, Operator)),
+ (_OPERATORS_PATTERN, Operator),
+ # Procedure declaration
+ (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration, Whitespace)),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String),
+ (r'\s+', Whitespace),
+ # Everything else just belongs to a name
(_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- 'after_declaration': [
- (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
+ ],
+ 'after_declaration': [
+ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
default('#pop'),
- ],
- 'after_macro_argument': [
- (r'\*.*\n', Comment.Single, '#pop'),
- (r'\s+', Whitespace, '#pop'),
- (_OPERATORS_PATTERN, Operator, '#pop'),
- (r"'(''|[^'])*'", String, '#pop'),
- # Everything else just belongs to a name
+ ],
+ 'after_macro_argument': [
+ (r'\*.*\n', Comment.Single, '#pop'),
+ (r'\s+', Whitespace, '#pop'),
+ (_OPERATORS_PATTERN, Operator, '#pop'),
+ (r"'(''|[^'])*'", String, '#pop'),
+ # Everything else just belongs to a name
(_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- }
- _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
- _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
-
- def analyse_text(text):
- """
- Perform a structural analysis for basic Easytrieve constructs.
- """
- result = 0.0
- lines = text.split('\n')
- hasEndProc = False
- hasHeaderComment = False
- hasFile = False
- hasJob = False
- hasProc = False
- hasParm = False
- hasReport = False
-
- def isCommentLine(line):
- return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
-
- def isEmptyLine(line):
- return not bool(line.strip())
-
- # Remove possible empty lines and header comments.
- while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
- if not isEmptyLine(lines[0]):
- hasHeaderComment = True
- del lines[0]
-
- if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
- # Looks like an Easytrieve macro.
- result = 0.4
- if hasHeaderComment:
- result += 0.4
- else:
- # Scan the source for lines starting with indicators.
- for line in lines:
- words = line.split()
- if (len(words) >= 2):
- firstWord = words[0]
- if not hasReport:
- if not hasJob:
- if not hasFile:
- if not hasParm:
- if firstWord == 'PARM':
- hasParm = True
- if firstWord == 'FILE':
- hasFile = True
- if firstWord == 'JOB':
- hasJob = True
- elif firstWord == 'PROC':
- hasProc = True
- elif firstWord == 'END-PROC':
- hasEndProc = True
- elif firstWord == 'REPORT':
- hasReport = True
-
- # Weight the findings.
- if hasJob and (hasProc == hasEndProc):
- if hasHeaderComment:
- result += 0.1
- if hasParm:
- if hasProc:
- # Found PARM, JOB and PROC/END-PROC:
- # pretty sure this is Easytrieve.
- result += 0.8
- else:
- # Found PARAM and JOB: probably this is Easytrieve
- result += 0.5
- else:
- # Found JOB and possibly other keywords: might be Easytrieve
- result += 0.11
- if hasParm:
- # Note: PARAM is not a proper English word, so this is
- # regarded a much better indicator for Easytrieve than
- # the other words.
- result += 0.2
- if hasFile:
- result += 0.01
- if hasReport:
- result += 0.01
- assert 0.0 <= result <= 1.0
- return result
-
-
-class JclLexer(RegexLexer):
- """
+ ],
+ }
+ _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
+ _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
+
+ def analyse_text(text):
+ """
+ Perform a structural analysis for basic Easytrieve constructs.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ hasEndProc = False
+ hasHeaderComment = False
+ hasFile = False
+ hasJob = False
+ hasProc = False
+ hasParm = False
+ hasReport = False
+
+ def isCommentLine(line):
+ return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
+
+ def isEmptyLine(line):
+ return not bool(line.strip())
+
+ # Remove possible empty lines and header comments.
+ while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
+ if not isEmptyLine(lines[0]):
+ hasHeaderComment = True
+ del lines[0]
+
+ if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
+ # Looks like an Easytrieve macro.
+ result = 0.4
+ if hasHeaderComment:
+ result += 0.4
+ else:
+ # Scan the source for lines starting with indicators.
+ for line in lines:
+ words = line.split()
+ if (len(words) >= 2):
+ firstWord = words[0]
+ if not hasReport:
+ if not hasJob:
+ if not hasFile:
+ if not hasParm:
+ if firstWord == 'PARM':
+ hasParm = True
+ if firstWord == 'FILE':
+ hasFile = True
+ if firstWord == 'JOB':
+ hasJob = True
+ elif firstWord == 'PROC':
+ hasProc = True
+ elif firstWord == 'END-PROC':
+ hasEndProc = True
+ elif firstWord == 'REPORT':
+ hasReport = True
+
+ # Weight the findings.
+ if hasJob and (hasProc == hasEndProc):
+ if hasHeaderComment:
+ result += 0.1
+ if hasParm:
+ if hasProc:
+ # Found PARM, JOB and PROC/END-PROC:
+ # pretty sure this is Easytrieve.
+ result += 0.8
+ else:
+ # Found PARAM and JOB: probably this is Easytrieve
+ result += 0.5
+ else:
+ # Found JOB and possibly other keywords: might be Easytrieve
+ result += 0.11
+ if hasParm:
+ # Note: PARAM is not a proper English word, so this is
+ # regarded a much better indicator for Easytrieve than
+ # the other words.
+ result += 0.2
+ if hasFile:
+ result += 0.01
+ if hasReport:
+ result += 0.01
+ assert 0.0 <= result <= 1.0
+ return result
+
+
+class JclLexer(RegexLexer):
+ """
`Job Control Language (JCL)
<http://publibz.boulder.ibm.com/cgi-bin/bookmgr_OS390/BOOKS/IEA2B570/CCONTENTS>`_
- is a scripting language used on mainframe platforms to instruct the system
- on how to run a batch job or start a subsystem. It is somewhat
- comparable to MS DOS batch and Unix shell scripts.
-
- .. versionadded:: 2.1
- """
- name = 'JCL'
- aliases = ['jcl']
- filenames = ['*.jcl']
- mimetypes = ['text/x-jcl']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//\*.*\n', Comment.Single),
- (r'//', Keyword.Pseudo, 'statement'),
- (r'/\*', Keyword.Pseudo, 'jes2_statement'),
- # TODO: JES3 statement
- (r'.*\n', Other) # Input text or inline code in any language.
- ],
- 'statement': [
- (r'\s*\n', Whitespace, '#pop'),
+ is a scripting language used on mainframe platforms to instruct the system
+ on how to run a batch job or start a subsystem. It is somewhat
+ comparable to MS DOS batch and Unix shell scripts.
+
+ .. versionadded:: 2.1
+ """
+ name = 'JCL'
+ aliases = ['jcl']
+ filenames = ['*.jcl']
+ mimetypes = ['text/x-jcl']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'//\*.*\n', Comment.Single),
+ (r'//', Keyword.Pseudo, 'statement'),
+ (r'/\*', Keyword.Pseudo, 'jes2_statement'),
+ # TODO: JES3 statement
+ (r'.*\n', Other) # Input text or inline code in any language.
+ ],
+ 'statement': [
+ (r'\s*\n', Whitespace, '#pop'),
(r'([a-z]\w*)(\s+)(exec|job)(\s*)',
- bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
- 'option'),
+ bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
+ 'option'),
(r'[a-z]\w*', Name.Variable, 'statement_command'),
- (r'\s+', Whitespace, 'statement_command'),
- ],
- 'statement_command': [
- (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
- r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
- include('option')
- ],
- 'jes2_statement': [
- (r'\s*\n', Whitespace, '#pop'),
- (r'\$', Keyword, 'option'),
- (r'\b(jobparam|message|netacct|notify|output|priority|route|'
- r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
- ],
- 'option': [
- # (r'\n', Text, 'root'),
- (r'\*', Name.Builtin),
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=&%]', Operator),
+ (r'\s+', Whitespace, 'statement_command'),
+ ],
+ 'statement_command': [
+ (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
+ r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
+ include('option')
+ ],
+ 'jes2_statement': [
+ (r'\s*\n', Whitespace, '#pop'),
+ (r'\$', Keyword, 'option'),
+ (r'\b(jobparam|message|netacct|notify|output|priority|route|'
+ r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
+ ],
+ 'option': [
+ # (r'\n', Text, 'root'),
+ (r'\*', Name.Builtin),
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=&%]', Operator),
(r'[a-z_]\w*', Name),
(r'\d+\.\d*', Number.Float),
(r'\.\d+', Number.Float),
(r'\d+', Number.Integer),
- (r"'", String, 'option_string'),
- (r'[ \t]+', Whitespace, 'option_comment'),
- (r'\.', Punctuation),
- ],
- 'option_string': [
- (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
- (r"''", String),
- (r"[^']", String),
- (r"'", String, '#pop'),
- ],
- 'option_comment': [
- # (r'\n', Text, 'root'),
- (r'.+', Comment.Single),
- ]
- }
-
- _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
- re.IGNORECASE)
-
- def analyse_text(text):
- """
- Recognize JCL job by header.
- """
- result = 0.0
- lines = text.split('\n')
- if len(lines) > 0:
- if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
- result = 1.0
- assert 0.0 <= result <= 1.0
- return result
+ (r"'", String, 'option_string'),
+ (r'[ \t]+', Whitespace, 'option_comment'),
+ (r'\.', Punctuation),
+ ],
+ 'option_string': [
+ (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
+ (r"''", String),
+ (r"[^']", String),
+ (r"'", String, '#pop'),
+ ],
+ 'option_comment': [
+ # (r'\n', Text, 'root'),
+ (r'.+', Comment.Single),
+ ]
+ }
+
+ _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
+ re.IGNORECASE)
+
+ def analyse_text(text):
+ """
+ Recognize JCL job by header.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ if len(lines) > 0:
+ if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
+ result = 1.0
+ assert 0.0 <= result <= 1.0
+ return result
diff --git a/contrib/python/Pygments/py2/pygments/lexers/shell.py b/contrib/python/Pygments/py2/pygments/lexers/shell.py
index c12cb3f137..789848fa92 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/shell.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/shell.py
@@ -1,131 +1,131 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.shell
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various shells.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.shell
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various shells.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
- include, default, this, using, words
-from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
-
-
-__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
+ include, default, this, using, words
+from pygments.token import Punctuation, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import shebang_matches
+
+
+__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
- 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer']
-
-line_re = re.compile('.*?\n')
-
-
-class BashLexer(RegexLexer):
- """
+ 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer']
+
+line_re = re.compile('.*?\n')
+
+
+class BashLexer(RegexLexer):
+ """
Lexer for (ba|k|z|)sh shell scripts.
-
- .. versionadded:: 0.6
- """
-
- name = 'Bash'
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Bash'
aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
- filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
+ filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'*.exheres-0', '*.exlib', '*.zsh',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', String.Interpol, 'curly'),
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', String.Interpol, 'curly'),
(r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
- (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
- (r'\$', Text),
- ],
- 'basic': [
- (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
- r'select|continue|until|esac|elif)(\s*)\b',
- bygroups(Keyword, Text)),
- (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
- r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
- r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
- r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
- r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
- r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
- Name.Builtin),
- (r'\A#!.+\n', Comment.Hashbang),
- (r'#.*\n', Comment.Single),
- (r'\\[\w\W]', String.Escape),
+ (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
+ (r'\$', Text),
+ ],
+ 'basic': [
+ (r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
+ r'select|continue|until|esac|elif)(\s*)\b',
+ bygroups(Keyword, Text)),
+ (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
+ r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
+ r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
+ r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
+ r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
+ r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
+ Name.Builtin),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
+ (r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]', Operator),
- (r'<<<', Operator), # here-string
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r'&&|\|\|', Operator),
- ],
- 'data': [
+ (r'[\[\]{}()=]', Operator),
+ (r'<<<', Operator), # here-string
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r'&&|\|\|', Operator),
+ ],
+ 'data': [
(r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&', Punctuation),
- (r'\|', Punctuation),
- (r'\s+', Text),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&', Punctuation),
+ (r'\|', Punctuation),
+ (r'\s+', Text),
(r'\d+\b', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- (r'<', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$\\]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+#\d+', Number),
- (r'\d+#(?! )', Number),
- (r'\d+', Number),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'(ba|z|)sh'):
- return 1
- if text.startswith('$ '):
- return 0.2
-
-
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ (r'<', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$\\]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'(ba|z|)sh'):
+ return 1
+ if text.startswith('$ '):
+ return 0.2
+
+
class SlurmBashLexer(BashLexer):
"""
Lexer for (ba|k|z|)sh Slurm scripts.
@@ -148,25 +148,25 @@ class SlurmBashLexer(BashLexer):
else:
yield index, token, value
-class ShellSessionBaseLexer(Lexer):
- """
- Base lexer for simplistic shell sessions.
-
- .. versionadded:: 2.1
- """
+class ShellSessionBaseLexer(Lexer):
+ """
+ Base lexer for simplistic shell sessions.
+
+ .. versionadded:: 2.1
+ """
_venv = re.compile(r'^(\([^)]*\))(\s*)')
- def get_tokens_unprocessed(self, text):
- innerlexer = self._innerLexerCls(**self.options)
-
- pos = 0
- curcode = ''
- insertions = []
+ def get_tokens_unprocessed(self, text):
+ innerlexer = self._innerLexerCls(**self.options)
+
+ pos = 0
+ curcode = ''
+ insertions = []
backslash_continuation = False
-
- for match in line_re.finditer(text):
- line = match.group()
+
+ for match in line_re.finditer(text):
+ line = match.group()
if backslash_continuation:
curcode += line
backslash_continuation = curcode.endswith('\\\n')
@@ -185,341 +185,341 @@ class ShellSessionBaseLexer(Lexer):
m = self._ps1rgx.match(line)
if m:
- # To support output lexers (say diff output), the output
- # needs to be broken by prompts whenever the output lexer
- # changes.
- if not insertions:
- pos = match.start()
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, m.group(1))]))
- curcode += m.group(2)
+ # To support output lexers (say diff output), the output
+ # needs to be broken by prompts whenever the output lexer
+ # changes.
+ if not insertions:
+ pos = match.start()
+
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, m.group(1))]))
+ curcode += m.group(2)
backslash_continuation = curcode.endswith('\\\n')
- elif line.startswith(self._ps2):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:len(self._ps2)])]))
- curcode += line[len(self._ps2):]
+ elif line.startswith(self._ps2):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:len(self._ps2)])]))
+ curcode += line[len(self._ps2):]
backslash_continuation = curcode.endswith('\\\n')
- else:
- if insertions:
- toks = innerlexer.get_tokens_unprocessed(curcode)
- for i, t, v in do_insertions(insertions, toks):
- yield pos+i, t, v
- yield match.start(), Generic.Output, line
- insertions = []
- curcode = ''
- if insertions:
- for i, t, v in do_insertions(insertions,
- innerlexer.get_tokens_unprocessed(curcode)):
- yield pos+i, t, v
-
-
-class BashSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for simplistic shell sessions.
-
- .. versionadded:: 1.1
- """
-
- name = 'Bash Session'
- aliases = ['console', 'shell-session']
- filenames = ['*.sh-session', '*.shell-session']
- mimetypes = ['application/x-shell-session', 'application/x-sh-session']
-
- _innerLexerCls = BashLexer
+ else:
+ if insertions:
+ toks = innerlexer.get_tokens_unprocessed(curcode)
+ for i, t, v in do_insertions(insertions, toks):
+ yield pos+i, t, v
+ yield match.start(), Generic.Output, line
+ insertions = []
+ curcode = ''
+ if insertions:
+ for i, t, v in do_insertions(insertions,
+ innerlexer.get_tokens_unprocessed(curcode)):
+ yield pos+i, t, v
+
+
+class BashSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for simplistic shell sessions.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Bash Session'
+ aliases = ['console', 'shell-session']
+ filenames = ['*.sh-session', '*.shell-session']
+ mimetypes = ['application/x-shell-session', 'application/x-sh-session']
+
+ _innerLexerCls = BashLexer
_ps1rgx = re.compile(
- r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
+ r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%])(.*\n?)')
- _ps2 = '>'
-
-
-class BatchLexer(RegexLexer):
- """
- Lexer for the DOS/Windows Batch file format.
-
- .. versionadded:: 0.7
- """
- name = 'Batchfile'
- aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
- filenames = ['*.bat', '*.cmd']
- mimetypes = ['application/x-dos-batch']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- _nl = r'\n\x1a'
- _punct = r'&<>|'
- _ws = r'\t\v\f\r ,;=\xa0'
- _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
- _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
- (_nl, _ws, _nl, _punct))
- _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
- _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
- _label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl)
- _label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' %
- (_nl, _punct, _ws, _nl))
- _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
- _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
+ _ps2 = '>'
+
+
+class BatchLexer(RegexLexer):
+ """
+ Lexer for the DOS/Windows Batch file format.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Batchfile'
+ aliases = ['bat', 'batch', 'dosbatch', 'winbatch']
+ filenames = ['*.bat', '*.cmd']
+ mimetypes = ['application/x-dos-batch']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ _nl = r'\n\x1a'
+ _punct = r'&<>|'
+ _ws = r'\t\v\f\r ,;=\xa0'
+ _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
+ _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
+ (_nl, _ws, _nl, _punct))
+ _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
+ _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
+ _label = r'(?:(?:[^%s%s%s+:^]|\^[%s]?[\w\W])*)' % (_nl, _punct, _ws, _nl)
+ _label_compound = (r'(?:(?:[^%s%s%s+:^)]|\^[%s]?[^)])*)' %
+ (_nl, _punct, _ws, _nl))
+ _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
+ _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
_string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
- _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
- r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
- r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
- r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
- r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
- (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
+ _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
+ r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
+ r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
+ r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
+ r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
+ (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
_core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s])+)' % (_nl, _nl, _punct, _ws)
_core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s%s)])+)' % (_nl, _nl,
_punct, _ws)
- _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
- _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
- _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token))
-
- def _make_begin_state(compound, _core_token=_core_token,
- _core_token_compound=_core_token_compound,
- _keyword_terminator=_keyword_terminator,
- _nl=_nl, _punct=_punct, _string=_string,
- _space=_space, _start_label=_start_label,
- _stoken=_stoken, _token_terminator=_token_terminator,
- _variable=_variable, _ws=_ws):
- rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
- ')' if compound else '')
- rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
- rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
- set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
- suffix = ''
- if compound:
- _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
- _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
- suffix = '/compound'
- return [
- ((r'\)', Punctuation, '#pop') if compound else
- (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
- Comment.Single)),
- (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
- (_space, using(this, state='text')),
- include('redirect%s' % suffix),
- (r'[%s]+' % _nl, Text),
- (r'\(', Punctuation, 'root/compound'),
- (r'@+', Punctuation),
- (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
- r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
- (_nl, _token_terminator, _space,
- _core_token_compound if compound else _core_token, _nl, _nl),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
- (_keyword_terminator, rest, _nl, _nl, rest),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
- 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
- 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
- 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
- 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
- 'title', 'type', 'ver', 'verify', 'vol'),
- suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
- (r'(call)(%s?)(:)' % _space,
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'call%s' % suffix),
- (r'call%s' % _keyword_terminator, Keyword),
- (r'(for%s(?!\^))(%s)(/f%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/f', 'for')),
- (r'(for%s(?!\^))(%s)(/l%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/l', 'for')),
- (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
- (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'label%s' % suffix),
- (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
- (_token_terminator, _space, _token_terminator, _space,
- _token_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), Keyword,
- using(this, state='text')), ('(?', 'if')),
- (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
- (_token_terminator, _space, _stoken, _keyword_terminator,
- rest_of_line_compound if compound else rest_of_line),
- Comment.Single, 'follow%s' % suffix),
- (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
- bygroups(Keyword, using(this, state='text'), Keyword),
- 'arithmetic%s' % suffix),
- (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
- r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
- (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
- ')' if compound else '', _nl, _nl),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), using(this, state='variable'),
- Punctuation),
- 'follow%s' % suffix),
- default('follow%s' % suffix)
- ]
-
- def _make_follow_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _space=_space, _start_label=_start_label,
- _token=_token, _token_compound=_token_compound,
- _ws=_ws):
- suffix = '/compound' if compound else ''
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'%s([%s]*)(%s)(.*)' %
- (_start_label, _ws, _label_compound if compound else _label),
- bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
- include('redirect%s' % suffix),
- (r'(?=[%s])' % _nl, Text, '#pop'),
- (r'\|\|?|&&?', Punctuation, '#pop'),
- include('text')
- ]
- return state
-
- def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
- _string=_string, _variable=_variable, _ws=_ws):
- op = r'=+\-*/!~'
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'0[0-7]+', Number.Oct),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'[(),]+', Punctuation),
- (r'([%s]|%%|\^\^)+' % op, Operator),
- (r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' %
- (_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws,
- r'[^)]' if compound else r'[\w\W]'),
- using(this, state='variable')),
- (r'(?=[\x00|&])', Text, '#pop'),
- include('follow')
- ]
- return state
-
- def _make_call_state(compound, _label=_label,
- _label_compound=_label_compound):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
- bygroups(Punctuation, Name.Label), '#pop'))
- return state
-
- def _make_label_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _punct=_punct, _string=_string, _variable=_variable):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
- (_label_compound if compound else _label, _string,
- _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
- _punct, r')' if compound else ''),
- bygroups(Name.Label, Comment.Single), '#pop'))
- return state
-
- def _make_redirect_state(compound,
- _core_token_compound=_core_token_compound,
- _nl=_nl, _punct=_punct, _stoken=_stoken,
- _string=_string, _space=_space,
- _variable=_variable, _ws=_ws):
- stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token_compound))
- return [
- (r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' %
- (_nl, _ws, _nl, _ws),
- bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
- (r'((?:(?<=[%s%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
- (_nl, _ws, _nl, _space, stoken_compound if compound else _stoken),
- bygroups(Number.Integer, Punctuation, using(this, state='text')))
- ]
-
- tokens = {
- 'root': _make_begin_state(False),
- 'follow': _make_follow_state(False),
- 'arithmetic': _make_arithmetic_state(False),
- 'call': _make_call_state(False),
- 'label': _make_label_state(False),
- 'redirect': _make_redirect_state(False),
- 'root/compound': _make_begin_state(True),
- 'follow/compound': _make_follow_state(True),
- 'arithmetic/compound': _make_arithmetic_state(True),
- 'call/compound': _make_call_state(True),
- 'label/compound': _make_label_state(True),
- 'redirect/compound': _make_redirect_state(True),
- 'variable-or-escape': [
- (_variable, Name.Variable),
- (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (_variable, Name.Variable),
- (r'\^!|%%', String.Escape),
- (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
- default('#pop')
- ],
- 'sqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Single)
- ],
- 'bqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Backtick)
- ],
- 'text': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
- (r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text)
- ],
- 'variable': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
- (r'[^"%%^%s]+|.' % _nl, Name.Variable)
- ],
- 'for': [
- (r'(%s)(in)(%s)(\()' % (_space, _space),
- bygroups(using(this, state='text'), Keyword,
- using(this, state='text'), Punctuation), '#pop'),
- include('follow')
- ],
- 'for2': [
- (r'\)', Punctuation),
- (r'(%s)(do%s)' % (_space, _token_terminator),
- bygroups(using(this, state='text'), Keyword), '#pop'),
- (r'[%s]+' % _nl, Text),
- include('follow')
- ],
- 'for/f': [
- (r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws),
- bygroups(String.Double, using(this, state='string'), Text,
- Punctuation)),
- (r'"', String.Double, ('#pop', 'for2', 'string')),
+ _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
+ _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
+ _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
+ (_punct, _string, _variable, _core_token))
+
+ def _make_begin_state(compound, _core_token=_core_token,
+ _core_token_compound=_core_token_compound,
+ _keyword_terminator=_keyword_terminator,
+ _nl=_nl, _punct=_punct, _string=_string,
+ _space=_space, _start_label=_start_label,
+ _stoken=_stoken, _token_terminator=_token_terminator,
+ _variable=_variable, _ws=_ws):
+ rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
+ ')' if compound else '')
+ rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
+ rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
+ set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
+ suffix = ''
+ if compound:
+ _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
+ _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
+ suffix = '/compound'
+ return [
+ ((r'\)', Punctuation, '#pop') if compound else
+ (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
+ Comment.Single)),
+ (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
+ (_space, using(this, state='text')),
+ include('redirect%s' % suffix),
+ (r'[%s]+' % _nl, Text),
+ (r'\(', Punctuation, 'root/compound'),
+ (r'@+', Punctuation),
+ (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
+ r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
+ (_nl, _token_terminator, _space,
+ _core_token_compound if compound else _core_token, _nl, _nl),
+ bygroups(Keyword, using(this, state='text')),
+ 'follow%s' % suffix),
+ (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
+ (_keyword_terminator, rest, _nl, _nl, rest),
+ bygroups(Keyword, using(this, state='text')),
+ 'follow%s' % suffix),
+ (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
+ 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
+ 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
+ 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
+ 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
+ 'title', 'type', 'ver', 'verify', 'vol'),
+ suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
+ (r'(call)(%s?)(:)' % _space,
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ 'call%s' % suffix),
+ (r'call%s' % _keyword_terminator, Keyword),
+ (r'(for%s(?!\^))(%s)(/f%s)' %
+ (_token_terminator, _space, _token_terminator),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/f', 'for')),
+ (r'(for%s(?!\^))(%s)(/l%s)' %
+ (_token_terminator, _space, _token_terminator),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/l', 'for')),
+ (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
+ (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ 'label%s' % suffix),
+ (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
+ (_token_terminator, _space, _token_terminator, _space,
+ _token_terminator, _space),
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), Keyword,
+ using(this, state='text')), ('(?', 'if')),
+ (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
+ (_token_terminator, _space, _stoken, _keyword_terminator,
+ rest_of_line_compound if compound else rest_of_line),
+ Comment.Single, 'follow%s' % suffix),
+ (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ 'arithmetic%s' % suffix),
+ (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
+ r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
+ (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
+ ')' if compound else '', _nl, _nl),
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), using(this, state='variable'),
+ Punctuation),
+ 'follow%s' % suffix),
+ default('follow%s' % suffix)
+ ]
+
+ def _make_follow_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _space=_space, _start_label=_start_label,
+ _token=_token, _token_compound=_token_compound,
+ _ws=_ws):
+ suffix = '/compound' if compound else ''
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (r'%s([%s]*)(%s)(.*)' %
+ (_start_label, _ws, _label_compound if compound else _label),
+ bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
+ include('redirect%s' % suffix),
+ (r'(?=[%s])' % _nl, Text, '#pop'),
+ (r'\|\|?|&&?', Punctuation, '#pop'),
+ include('text')
+ ]
+ return state
+
+ def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
+ _string=_string, _variable=_variable, _ws=_ws):
+ op = r'=+\-*/!~'
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'[(),]+', Punctuation),
+ (r'([%s]|%%|\^\^)+' % op, Operator),
+ (r'(%s|%s|(\^[%s]?)?[^()%s%%^"%s%s%s]|\^[%s%s]?%s)+' %
+ (_string, _variable, _nl, op, _nl, _punct, _ws, _nl, _ws,
+ r'[^)]' if compound else r'[\w\W]'),
+ using(this, state='variable')),
+ (r'(?=[\x00|&])', Text, '#pop'),
+ include('follow')
+ ]
+ return state
+
+ def _make_call_state(compound, _label=_label,
+ _label_compound=_label_compound):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
+ bygroups(Punctuation, Name.Label), '#pop'))
+ return state
+
+ def _make_label_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _punct=_punct, _string=_string, _variable=_variable):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
+ (_label_compound if compound else _label, _string,
+ _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
+ _punct, r')' if compound else ''),
+ bygroups(Name.Label, Comment.Single), '#pop'))
+ return state
+
+ def _make_redirect_state(compound,
+ _core_token_compound=_core_token_compound,
+ _nl=_nl, _punct=_punct, _stoken=_stoken,
+ _string=_string, _space=_space,
+ _variable=_variable, _ws=_ws):
+ stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
+ (_punct, _string, _variable, _core_token_compound))
+ return [
+ (r'((?:(?<=[%s%s])\d)?)(>>?&|<&)([%s%s]*)(\d)' %
+ (_nl, _ws, _nl, _ws),
+ bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
+ (r'((?:(?<=[%s%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
+ (_nl, _ws, _nl, _space, stoken_compound if compound else _stoken),
+ bygroups(Number.Integer, Punctuation, using(this, state='text')))
+ ]
+
+ tokens = {
+ 'root': _make_begin_state(False),
+ 'follow': _make_follow_state(False),
+ 'arithmetic': _make_arithmetic_state(False),
+ 'call': _make_call_state(False),
+ 'label': _make_label_state(False),
+ 'redirect': _make_redirect_state(False),
+ 'root/compound': _make_begin_state(True),
+ 'follow/compound': _make_follow_state(True),
+ 'arithmetic/compound': _make_arithmetic_state(True),
+ 'call/compound': _make_call_state(True),
+ 'label/compound': _make_label_state(True),
+ 'redirect/compound': _make_redirect_state(True),
+ 'variable-or-escape': [
+ (_variable, Name.Variable),
+ (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (_variable, Name.Variable),
+ (r'\^!|%%', String.Escape),
+ (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
+ default('#pop')
+ ],
+ 'sqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Single)
+ ],
+ 'bqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Backtick)
+ ],
+ 'text': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
+ (r'[^"%%^%s%s%s\d)]+|.' % (_nl, _punct, _ws), Text)
+ ],
+ 'variable': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
+ (r'[^"%%^%s]+|.' % _nl, Name.Variable)
+ ],
+ 'for': [
+ (r'(%s)(in)(%s)(\()' % (_space, _space),
+ bygroups(using(this, state='text'), Keyword,
+ using(this, state='text'), Punctuation), '#pop'),
+ include('follow')
+ ],
+ 'for2': [
+ (r'\)', Punctuation),
+ (r'(%s)(do%s)' % (_space, _token_terminator),
+ bygroups(using(this, state='text'), Keyword), '#pop'),
+ (r'[%s]+' % _nl, Text),
+ include('follow')
+ ],
+ 'for/f': [
+ (r'(")((?:%s|[^"])*?")([%s%s]*)(\))' % (_variable, _nl, _ws),
+ bygroups(String.Double, using(this, state='string'), Text,
+ Punctuation)),
+ (r'"', String.Double, ('#pop', 'for2', 'string')),
(r"('(?:%%%%|%s|[\w\W])*?')([%s%s]*)(\))" % (_variable, _nl, _ws),
- bygroups(using(this, state='sqstring'), Text, Punctuation)),
+ bygroups(using(this, state='sqstring'), Text, Punctuation)),
(r'(`(?:%%%%|%s|[\w\W])*?`)([%s%s]*)(\))' % (_variable, _nl, _ws),
- bygroups(using(this, state='bqstring'), Text, Punctuation)),
- include('for2')
- ],
- 'for/l': [
- (r'-?\d+', Number.Integer),
- include('for2')
- ],
- 'if': [
- (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
- (_token_terminator, _space),
- bygroups(Keyword, using(this, state='text'),
- Number.Integer), '#pop'),
- (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text'),
- using(this, state='variable')), '#pop'),
- (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
- bygroups(using(this, state='arithmetic'), Operator.Word,
- using(this, state='arithmetic')), '#pop'),
+ bygroups(using(this, state='bqstring'), Text, Punctuation)),
+ include('for2')
+ ],
+ 'for/l': [
+ (r'-?\d+', Number.Integer),
+ include('for2')
+ ],
+ 'if': [
+ (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
+ (_token_terminator, _space),
+ bygroups(Keyword, using(this, state='text'),
+ Number.Integer), '#pop'),
+ (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
+ bygroups(Keyword, using(this, state='text'),
+ using(this, state='variable')), '#pop'),
+ (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
+ bygroups(Keyword, using(this, state='text')), '#pop'),
+ (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
+ bygroups(using(this, state='arithmetic'), Operator.Word,
+ using(this, state='arithmetic')), '#pop'),
(_stoken, using(this, state='text'), ('#pop', 'if2')),
],
'if2': [
@@ -527,155 +527,155 @@ class BatchLexer(RegexLexer):
bygroups(using(this, state='text'), Operator,
using(this, state='text')), '#pop'),
(r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
- bygroups(using(this, state='text'), Operator.Word,
- using(this, state='text')), '#pop')
- ],
- '(?': [
- (_space, using(this, state='text')),
- (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
- default('#pop')
- ],
- 'else?': [
- (_space, using(this, state='text')),
- (r'else%s' % _token_terminator, Keyword, '#pop'),
- default('#pop')
- ]
- }
-
-
-class MSDOSSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for simplistic MSDOS sessions.
-
- .. versionadded:: 2.1
- """
-
- name = 'MSDOS Session'
- aliases = ['doscon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = BatchLexer
+ bygroups(using(this, state='text'), Operator.Word,
+ using(this, state='text')), '#pop')
+ ],
+ '(?': [
+ (_space, using(this, state='text')),
+ (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
+ default('#pop')
+ ],
+ 'else?': [
+ (_space, using(this, state='text')),
+ (r'else%s' % _token_terminator, Keyword, '#pop'),
+ default('#pop')
+ ]
+ }
+
+
+class MSDOSSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for simplistic MSDOS sessions.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'MSDOS Session'
+ aliases = ['doscon']
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = BatchLexer
_ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
- _ps2 = 'More? '
-
-
-class TcshLexer(RegexLexer):
- """
- Lexer for tcsh scripts.
-
- .. versionadded:: 0.10
- """
-
- name = 'Tcsh'
- aliases = ['tcsh', 'csh']
- filenames = ['*.tcsh', '*.csh']
- mimetypes = ['application/x-csh']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', Keyword, 'curly'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|endif|else|while|then|foreach|case|default|'
- r'continue|goto|breaksw|end|switch|endsw)\s*\b',
- Keyword),
- (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
- r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
- r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
- r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
- r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
- r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
- r'source|stop|suspend|source|suspend|telltc|time|'
- r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
- r'ver|wait|warp|watchlog|where|which)\s*\b',
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
-
-class TcshSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for Tcsh sessions.
-
- .. versionadded:: 2.1
- """
-
- name = 'Tcsh Session'
- aliases = ['tcshcon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = TcshLexer
+ _ps2 = 'More? '
+
+
+class TcshLexer(RegexLexer):
+ """
+ Lexer for tcsh scripts.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Tcsh'
+ aliases = ['tcsh', 'csh']
+ filenames = ['*.tcsh', '*.csh']
+ mimetypes = ['application/x-csh']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', Keyword, 'curly'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|endif|else|while|then|foreach|case|default|'
+ r'continue|goto|breaksw|end|switch|endsw)\s*\b',
+ Keyword),
+ (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
+ r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
+ r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
+ r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
+ r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
+ r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
+ r'source|stop|suspend|source|suspend|telltc|time|'
+ r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
+ r'ver|wait|warp|watchlog|where|which)\s*\b',
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class TcshSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for Tcsh sessions.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Tcsh Session'
+ aliases = ['tcshcon']
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = TcshLexer
_ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
- _ps2 = '? '
-
-
-class PowerShellLexer(RegexLexer):
- """
- For Windows PowerShell code.
-
- .. versionadded:: 1.5
- """
- name = 'PowerShell'
- aliases = ['powershell', 'posh', 'ps1', 'psm1']
- filenames = ['*.ps1', '*.psm1']
- mimetypes = ['text/x-powershell']
-
- flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
-
- keywords = (
- 'while validateset validaterange validatepattern validatelength '
- 'validatecount until trap switch return ref process param parameter in '
- 'if global: function foreach for finally filter end elseif else '
- 'dynamicparam do default continue cmdletbinding break begin alias \\? '
- '% #script #private #local #global mandatory parametersetname position '
- 'valuefrompipeline valuefrompipelinebypropertyname '
- 'valuefromremainingarguments helpmessage try catch throw').split()
-
- operators = (
- 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
- 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
- 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
- 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
- 'lt match ne not notcontains notlike notmatch or regex replace '
- 'wildcard').split()
-
- verbs = (
+ _ps2 = '? '
+
+
+class PowerShellLexer(RegexLexer):
+ """
+ For Windows PowerShell code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'PowerShell'
+ aliases = ['powershell', 'posh', 'ps1', 'psm1']
+ filenames = ['*.ps1', '*.psm1']
+ mimetypes = ['text/x-powershell']
+
+ flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
+
+ keywords = (
+ 'while validateset validaterange validatepattern validatelength '
+ 'validatecount until trap switch return ref process param parameter in '
+ 'if global: function foreach for finally filter end elseif else '
+ 'dynamicparam do default continue cmdletbinding break begin alias \\? '
+ '% #script #private #local #global mandatory parametersetname position '
+ 'valuefrompipeline valuefrompipelinebypropertyname '
+ 'valuefromremainingarguments helpmessage try catch throw').split()
+
+ operators = (
+ 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
+ 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
+ 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
+ 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
+ 'lt match ne not notcontains notlike notmatch or regex replace '
+ 'wildcard').split()
+
+ verbs = (
'write where watch wait use update unregister unpublish unprotect '
'unlock uninstall undo unblock trace test tee take sync switch '
'suspend submit stop step start split sort skip show set send select '
@@ -687,7 +687,7 @@ class PowerShellLexer(RegexLexer):
'dismount disconnect disable deny debug cxnew copy convertto '
'convertfrom convert connect confirm compress complete compare close '
'clear checkpoint block backup assert approve aggregate add').split()
-
+
aliases_ = (
'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
@@ -700,150 +700,150 @@ class PowerShellLexer(RegexLexer):
'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
'trcm type wget where wjb write').split()
- commenthelp = (
- 'component description example externalhelp forwardhelpcategory '
- 'forwardhelptargetname functionality inputs link '
- 'notes outputs parameter remotehelprunspace role synopsis').split()
-
- tokens = {
- 'root': [
- # we need to count pairs of parentheses for correct highlight
- # of '$(...)' blocks in strings
- (r'\(', Punctuation, 'child'),
- (r'\s+', Text),
- (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
- bygroups(Comment, String.Doc, Comment)),
- (r'#[^\n]*?$', Comment),
- (r'(&lt;|<)#', Comment.Multiline, 'multline'),
- (r'@"\n', String.Heredoc, 'heredoc-double'),
- (r"@'\n.*?\n'@", String.Heredoc),
- # escaped syntax
- (r'`[\'"$@-]', Punctuation),
- (r'"', String.Double, 'string'),
- (r"'([^']|'')*'", String.Single),
- (r'(\$|@@|@)((global|script|private|env):)?\w+',
- Name.Variable),
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'-(%s)\b' % '|'.join(operators), Operator),
- (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
+ commenthelp = (
+ 'component description example externalhelp forwardhelpcategory '
+ 'forwardhelptargetname functionality inputs link '
+ 'notes outputs parameter remotehelprunspace role synopsis').split()
+
+ tokens = {
+ 'root': [
+ # we need to count pairs of parentheses for correct highlight
+ # of '$(...)' blocks in strings
+ (r'\(', Punctuation, 'child'),
+ (r'\s+', Text),
+ (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
+ bygroups(Comment, String.Doc, Comment)),
+ (r'#[^\n]*?$', Comment),
+ (r'(&lt;|<)#', Comment.Multiline, 'multline'),
+ (r'@"\n', String.Heredoc, 'heredoc-double'),
+ (r"@'\n.*?\n'@", String.Heredoc),
+ # escaped syntax
+ (r'`[\'"$@-]', Punctuation),
+ (r'"', String.Double, 'string'),
+ (r"'([^']|'')*'", String.Single),
+ (r'(\$|@@|@)((global|script|private|env):)?\w+',
+ Name.Variable),
+ (r'(%s)\b' % '|'.join(keywords), Keyword),
+ (r'-(%s)\b' % '|'.join(operators), Operator),
+ (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
(r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
- (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
- (r'-[a-z_]\w*', Name),
- (r'\w+', Name),
- (r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
- ],
- 'child': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'multline': [
- (r'[^#&.]+', Comment.Multiline),
- (r'#(>|&gt;)', Comment.Multiline, '#pop'),
- (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
- (r'[#&.]', Comment.Multiline),
- ],
- 'string': [
- (r"`[0abfnrtv'\"$`]", String.Escape),
- (r'[^$`"]+', String.Double),
- (r'\$\(', Punctuation, 'child'),
- (r'""', String.Double),
- (r'[`$]', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'heredoc-double': [
- (r'\n"@', String.Heredoc, '#pop'),
- (r'\$\(', Punctuation, 'child'),
- (r'[^@\n]+"]', String.Heredoc),
- (r".", String.Heredoc),
- ]
- }
-
-
-class PowerShellSessionLexer(ShellSessionBaseLexer):
- """
- Lexer for simplistic Windows PowerShell sessions.
-
- .. versionadded:: 2.1
- """
-
- name = 'PowerShell Session'
- aliases = ['ps1con']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = PowerShellLexer
+ (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
+ (r'-[a-z_]\w*', Name),
+ (r'\w+', Name),
+ (r'[.,;@{}\[\]$()=+*/\\&%!~?^`|<>-]|::', Punctuation),
+ ],
+ 'child': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'multline': [
+ (r'[^#&.]+', Comment.Multiline),
+ (r'#(>|&gt;)', Comment.Multiline, '#pop'),
+ (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
+ (r'[#&.]', Comment.Multiline),
+ ],
+ 'string': [
+ (r"`[0abfnrtv'\"$`]", String.Escape),
+ (r'[^$`"]+', String.Double),
+ (r'\$\(', Punctuation, 'child'),
+ (r'""', String.Double),
+ (r'[`$]', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'heredoc-double': [
+ (r'\n"@', String.Heredoc, '#pop'),
+ (r'\$\(', Punctuation, 'child'),
+ (r'[^@\n]+"]', String.Heredoc),
+ (r".", String.Heredoc),
+ ]
+ }
+
+
+class PowerShellSessionLexer(ShellSessionBaseLexer):
+ """
+ Lexer for simplistic Windows PowerShell sessions.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PowerShell Session'
+ aliases = ['ps1con']
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = PowerShellLexer
_ps1rgx = re.compile(r'^(PS [^>]+> )(.*\n?)')
- _ps2 = '>> '
-
-
-class FishShellLexer(RegexLexer):
- """
- Lexer for Fish shell scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Fish'
- aliases = ['fish', 'fishshell']
- filenames = ['*.fish', '*.load']
- mimetypes = ['application/x-fish']
-
- tokens = {
- 'root': [
- include('basic'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\(', Keyword, 'paren'),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'basic': [
- (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
- r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
- r'cd|count|test)(\s*)\b',
- bygroups(Keyword, Text)),
- (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
- r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
- r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
- r'fish_update_completions|fishd|funced|funcsave|functions|help|'
- r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
- r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
- r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
- Name.Builtin),
- (r'#.*\n', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]()=]', Operator),
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- ],
- 'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&|\||\^|<|>', Operator),
- (r'\s+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+#\d+', Number),
- (r'\d+#(?! )', Number),
- (r'\d+', Number),
- include('root'),
- ],
- }
+ _ps2 = '>> '
+
+
+class FishShellLexer(RegexLexer):
+ """
+ Lexer for Fish shell scripts.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Fish'
+ aliases = ['fish', 'fishshell']
+ filenames = ['*.fish', '*.load']
+ mimetypes = ['application/x-fish']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\(', Keyword, 'paren'),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'basic': [
+ (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
+ r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
+ r'cd|count|test)(\s*)\b',
+ bygroups(Keyword, Text)),
+ (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
+ r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
+ r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
+ r'fish_update_completions|fishd|funced|funcsave|functions|help|'
+ r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
+ r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
+ r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
+ Name.Builtin),
+ (r'#.*\n', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]()=]', Operator),
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ ],
+ 'data': [
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&|\||\^|<|>', Operator),
+ (r'\s+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/smalltalk.py b/contrib/python/Pygments/py2/pygments/lexers/smalltalk.py
index 0e4584f3d3..2b2f3cf589 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/smalltalk.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/smalltalk.py
@@ -1,195 +1,195 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.smalltalk
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Smalltalk and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.smalltalk
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Smalltalk and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SmalltalkLexer', 'NewspeakLexer']
-
-
-class SmalltalkLexer(RegexLexer):
- """
- For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
- Contributed by Stefan Matthias Aust.
- Rewritten by Nils Winter.
-
- .. versionadded:: 0.10
- """
- name = 'Smalltalk'
- filenames = ['*.st']
- aliases = ['smalltalk', 'squeak', 'st']
- mimetypes = ['text/x-smalltalk']
-
- tokens = {
- 'root': [
- (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
- include('squeak fileout'),
- include('whitespaces'),
- include('method definition'),
- (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
- include('objects'),
- (r'\^|\:=|\_', Operator),
- # temporaries
- (r'[\]({}.;!]', Text),
- ],
- 'method definition': [
- # Not perfect can't allow whitespaces at the beginning and the
- # without breaking everything
- (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
- (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
- bygroups(Name.Function, Text, Name.Variable, Text)),
- ],
- 'blockvariables': [
- include('whitespaces'),
- (r'(:)(\s*)(\w+)',
- bygroups(Operator, Text, Name.Variable)),
- (r'\|', Operator, '#pop'),
- default('#pop'), # else pop
- ],
- 'literals': [
- (r"'(''|[^'])*'", String, 'afterobject'),
- (r'\$.', String.Char, 'afterobject'),
- (r'#\(', String.Symbol, 'parenth'),
- (r'\)', Text, 'afterobject'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
- ],
- '_parenth_helper': [
- include('whitespaces'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
- (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
- # literals
- (r"'(''|[^'])*'", String),
- (r'\$.', String.Char),
- (r'#*\(', String.Symbol, 'inner_parenth'),
- ],
- 'parenth': [
- # This state is a bit tricky since
- # we can't just pop this state
- (r'\)', String.Symbol, ('root', 'afterobject')),
- include('_parenth_helper'),
- ],
- 'inner_parenth': [
- (r'\)', String.Symbol, '#pop'),
- include('_parenth_helper'),
- ],
- 'whitespaces': [
- # skip whitespace and comments
- (r'\s+', Text),
- (r'"(""|[^"])*"', Comment),
- ],
- 'objects': [
- (r'\[', Text, 'blockvariables'),
- (r'\]', Text, 'afterobject'),
- (r'\b(self|super|true|false|nil|thisContext)\b',
- Name.Builtin.Pseudo, 'afterobject'),
- (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
- (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
- (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
- String.Symbol, 'afterobject'),
- include('literals'),
- ],
- 'afterobject': [
- (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
- include('whitespaces'),
- (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
- Name.Builtin, '#pop'),
- (r'\b(new\b(?!:))', Name.Builtin),
- (r'\:=|\_', Operator, '#pop'),
- (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
- (r'\b[a-zA-Z]+\w*', Name.Function),
- (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
- (r'\.', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'[\])}]', Text),
- (r'[\[({]', Text, '#pop'),
- ],
- 'squeak fileout': [
- # Squeak fileout format (optional)
- (r'^"(""|[^"])*"!', Keyword),
- (r"^'(''|[^'])*'!", Keyword),
- (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
- bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
- (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
- bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
- (r'^(\w+)( subclass: )(#\w+)'
- r'(\s+instanceVariableNames: )(.*?)'
- r'(\s+classVariableNames: )(.*?)'
- r'(\s+poolDictionaries: )(.*?)'
- r'(\s+category: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
- String, Keyword, String, Keyword, String, Keyword)),
- (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String, Keyword)),
- (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
- (r'! !$', Keyword),
- ],
- }
-
-
-class NewspeakLexer(RegexLexer):
- """
- For `Newspeak <http://newspeaklanguage.org/>` syntax.
-
- .. versionadded:: 1.1
- """
- name = 'Newspeak'
- filenames = ['*.ns2']
- aliases = ['newspeak', ]
- mimetypes = ['text/x-newspeak']
-
- tokens = {
- 'root': [
- (r'\b(Newsqueak2)\b', Keyword.Declaration),
- (r"'[^']*'", String),
- (r'\b(class)(\s+)(\w+)(\s*)',
- bygroups(Keyword.Declaration, Text, Name.Class, Text)),
- (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
- Keyword),
- (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- (r'<\w+>', Comment.Special),
- include('expressionstat'),
- include('whitespace')
- ],
-
- 'expressionstat': [
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'\d+', Number.Integer),
- (r':\w+', Name.Variable),
- (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
- (r'\w+:', Name.Function),
- (r'\w+', Name.Variable),
- (r'\(|\)', Punctuation),
- (r'\[|\]', Punctuation),
- (r'\{|\}', Punctuation),
-
- (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
- (r'\.|;', Punctuation),
- include('whitespace'),
- include('literals'),
- ],
- 'literals': [
- (r'\$.', String),
- (r"'[^']*'", String),
- (r"#'[^']*'", String.Symbol),
- (r"#\w+:?", String.Symbol),
- (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'"[^"]*"', Comment)
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+ """
+ For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
+ Contributed by Stefan Matthias Aust.
+ Rewritten by Nils Winter.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Smalltalk'
+ filenames = ['*.st']
+ aliases = ['smalltalk', 'squeak', 'st']
+ mimetypes = ['text/x-smalltalk']
+
+ tokens = {
+ 'root': [
+ (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+ include('squeak fileout'),
+ include('whitespaces'),
+ include('method definition'),
+ (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+ include('objects'),
+ (r'\^|\:=|\_', Operator),
+ # temporaries
+ (r'[\]({}.;!]', Text),
+ ],
+ 'method definition': [
+ # Not perfect can't allow whitespaces at the beginning and the
+ # without breaking everything
+ (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+ (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+ bygroups(Name.Function, Text, Name.Variable, Text)),
+ ],
+ 'blockvariables': [
+ include('whitespaces'),
+ (r'(:)(\s*)(\w+)',
+ bygroups(Operator, Text, Name.Variable)),
+ (r'\|', Operator, '#pop'),
+ default('#pop'), # else pop
+ ],
+ 'literals': [
+ (r"'(''|[^'])*'", String, 'afterobject'),
+ (r'\$.', String.Char, 'afterobject'),
+ (r'#\(', String.Symbol, 'parenth'),
+ (r'\)', Text, 'afterobject'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+ ],
+ '_parenth_helper': [
+ include('whitespaces'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+ (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
+ # literals
+ (r"'(''|[^'])*'", String),
+ (r'\$.', String.Char),
+ (r'#*\(', String.Symbol, 'inner_parenth'),
+ ],
+ 'parenth': [
+ # This state is a bit tricky since
+ # we can't just pop this state
+ (r'\)', String.Symbol, ('root', 'afterobject')),
+ include('_parenth_helper'),
+ ],
+ 'inner_parenth': [
+ (r'\)', String.Symbol, '#pop'),
+ include('_parenth_helper'),
+ ],
+ 'whitespaces': [
+ # skip whitespace and comments
+ (r'\s+', Text),
+ (r'"(""|[^"])*"', Comment),
+ ],
+ 'objects': [
+ (r'\[', Text, 'blockvariables'),
+ (r'\]', Text, 'afterobject'),
+ (r'\b(self|super|true|false|nil|thisContext)\b',
+ Name.Builtin.Pseudo, 'afterobject'),
+ (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+ (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+ String.Symbol, 'afterobject'),
+ include('literals'),
+ ],
+ 'afterobject': [
+ (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
+ include('whitespaces'),
+ (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+ Name.Builtin, '#pop'),
+ (r'\b(new\b(?!:))', Name.Builtin),
+ (r'\:=|\_', Operator, '#pop'),
+ (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+ (r'\b[a-zA-Z]+\w*', Name.Function),
+ (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'[\])}]', Text),
+ (r'[\[({]', Text, '#pop'),
+ ],
+ 'squeak fileout': [
+ # Squeak fileout format (optional)
+ (r'^"(""|[^"])*"!', Keyword),
+ (r"^'(''|[^'])*'!", Keyword),
+ (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+ (r'^(\w+)( subclass: )(#\w+)'
+ r'(\s+instanceVariableNames: )(.*?)'
+ r'(\s+classVariableNames: )(.*?)'
+ r'(\s+poolDictionaries: )(.*?)'
+ r'(\s+category: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+ String, Keyword, String, Keyword, String, Keyword)),
+ (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String, Keyword)),
+ (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+ (r'! !$', Keyword),
+ ],
+ }
+
+
+class NewspeakLexer(RegexLexer):
+ """
+ For `Newspeak <http://newspeaklanguage.org/>` syntax.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Newspeak'
+ filenames = ['*.ns2']
+ aliases = ['newspeak', ]
+ mimetypes = ['text/x-newspeak']
+
+ tokens = {
+ 'root': [
+ (r'\b(Newsqueak2)\b', Keyword.Declaration),
+ (r"'[^']*'", String),
+ (r'\b(class)(\s+)(\w+)(\s*)',
+ bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+ (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+ Keyword),
+ (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'<\w+>', Comment.Special),
+ include('expressionstat'),
+ include('whitespace')
+ ],
+
+ 'expressionstat': [
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r':\w+', Name.Variable),
+ (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+ (r'\w+:', Name.Function),
+ (r'\w+', Name.Variable),
+ (r'\(|\)', Punctuation),
+ (r'\[|\]', Punctuation),
+ (r'\{|\}', Punctuation),
+
+ (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+ (r'\.|;', Punctuation),
+ include('whitespace'),
+ include('literals'),
+ ],
+ 'literals': [
+ (r'\$.', String),
+ (r"'[^']*'", String),
+ (r"#'[^']*'", String.Symbol),
+ (r"#\w+:?", String.Symbol),
+ (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'"[^"]*"', Comment)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/snobol.py b/contrib/python/Pygments/py2/pygments/lexers/snobol.py
index ce52f7c7c7..26f5622d59 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/snobol.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/snobol.py
@@ -1,83 +1,83 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.snobol
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the SNOBOL language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.snobol
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SNOBOL language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SnobolLexer']
-
-
-class SnobolLexer(RegexLexer):
- """
- Lexer for the SNOBOL4 programming language.
-
- Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
- Does not require spaces around binary operators.
-
- .. versionadded:: 1.5
- """
-
- name = "Snobol"
- aliases = ["snobol"]
- filenames = ['*.snobol']
- mimetypes = ['text/x-snobol']
-
- tokens = {
- # root state, start of line
- # comments, continuation lines, and directives start in column 1
- # as do labels
- 'root': [
- (r'\*.*\n', Comment),
- (r'[+.] ', Punctuation, 'statement'),
- (r'-.*\n', Comment),
- (r'END\s*\n', Name.Label, 'heredoc'),
- (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
- (r'\s+', Text, 'statement'),
- ],
- # statement state, line after continuation or label
- 'statement': [
- (r'\s*\n', Text, '#pop'),
- (r'\s+', Text),
- (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
- r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
- r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
- r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
- Name.Builtin),
- (r'[A-Za-z][\w.]*', Name),
- # ASCII equivalents of original operators
- # | for the EBCDIC equivalent, ! likewise
- # \ for EBCDIC negation
- (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- # Accept SPITBOL syntax for real numbers
- # as well as Macro SNOBOL4
- (r'[0-9]+(?=[^.EeDd])', Number.Integer),
- (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
- # Goto
- (r':', Punctuation, 'goto'),
- (r'[()<>,;]', Punctuation),
- ],
- # Goto block
- 'goto': [
- (r'\s*\n', Text, "#pop:2"),
- (r'\s+', Text),
- (r'F|S', Keyword),
- (r'(\()([A-Za-z][\w.]*)(\))',
- bygroups(Punctuation, Name.Label, Punctuation))
- ],
- # everything after the END statement is basically one
- # big heredoc.
- 'heredoc': [
- (r'.*\n', String.Heredoc)
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[+.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[()<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/special.py b/contrib/python/Pygments/py2/pygments/lexers/special.py
index 4016c5949b..93689564a7 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/special.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/special.py
@@ -1,104 +1,104 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.special
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Special lexers.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.special
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Special lexers.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token, Error, Text
-from pygments.util import get_choice_opt, text_type, BytesIO
-
-
-__all__ = ['TextLexer', 'RawTokenLexer']
-
-
-class TextLexer(Lexer):
- """
- "Null" lexer, doesn't highlight anything.
- """
- name = 'Text only'
- aliases = ['text']
- filenames = ['*.txt']
- mimetypes = ['text/plain']
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Token, Error, Text
+from pygments.util import get_choice_opt, text_type, BytesIO
+
+
+__all__ = ['TextLexer', 'RawTokenLexer']
+
+
+class TextLexer(Lexer):
+ """
+ "Null" lexer, doesn't highlight anything.
+ """
+ name = 'Text only'
+ aliases = ['text']
+ filenames = ['*.txt']
+ mimetypes = ['text/plain']
priority = 0.01
-
- def get_tokens_unprocessed(self, text):
- yield 0, Text, text
-
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Text, text
+
def analyse_text(text):
return TextLexer.priority
-
-
-_ttype_cache = {}
-
-line_re = re.compile(b'.*?\n')
-
-
-class RawTokenLexer(Lexer):
- """
- Recreate a token stream formatted with the `RawTokenFormatter`. This
- lexer raises exceptions during parsing if the token stream in the
- file is malformed.
-
- Additional options accepted:
-
- `compress`
- If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
- the given compression algorithm before lexing (default: ``""``).
- """
- name = 'Raw token data'
- aliases = ['raw']
- filenames = []
- mimetypes = ['application/x-pygments-tokens']
-
- def __init__(self, **options):
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- Lexer.__init__(self, **options)
-
- def get_tokens(self, text):
- if isinstance(text, text_type):
- # raw token stream never has any non-ASCII characters
- text = text.encode('ascii')
- if self.compress == 'gz':
- import gzip
- gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text))
- text = gzipfile.read()
- elif self.compress == 'bz2':
- import bz2
- text = bz2.decompress(text)
-
- # do not call Lexer.get_tokens() because we do not want Unicode
- # decoding to occur, and stripping is not optional.
- text = text.strip(b'\n') + b'\n'
- for i, t, v in self.get_tokens_unprocessed(text):
- yield t, v
-
- def get_tokens_unprocessed(self, text):
- length = 0
- for match in line_re.finditer(text):
- try:
- ttypestr, val = match.group().split(b'\t', 1)
- except ValueError:
- val = match.group().decode('ascii', 'replace')
- ttype = Error
- else:
- ttype = _ttype_cache.get(ttypestr)
- if not ttype:
- ttype = Token
- ttypes = ttypestr.split('.')[1:]
- for ttype_ in ttypes:
- if not ttype_ or not ttype_[0].isupper():
- raise ValueError('malformed token name')
- ttype = getattr(ttype, ttype_)
- _ttype_cache[ttypestr] = ttype
- val = val[2:-2].decode('unicode-escape')
- yield length, ttype, val
- length += len(val)
+
+
+_ttype_cache = {}
+
+line_re = re.compile(b'.*?\n')
+
+
+class RawTokenLexer(Lexer):
+ """
+ Recreate a token stream formatted with the `RawTokenFormatter`. This
+ lexer raises exceptions during parsing if the token stream in the
+ file is malformed.
+
+ Additional options accepted:
+
+ `compress`
+ If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
+ the given compression algorithm before lexing (default: ``""``).
+ """
+ name = 'Raw token data'
+ aliases = ['raw']
+ filenames = []
+ mimetypes = ['application/x-pygments-tokens']
+
+ def __init__(self, **options):
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
+ Lexer.__init__(self, **options)
+
+ def get_tokens(self, text):
+ if isinstance(text, text_type):
+ # raw token stream never has any non-ASCII characters
+ text = text.encode('ascii')
+ if self.compress == 'gz':
+ import gzip
+ gzipfile = gzip.GzipFile('', 'rb', 9, BytesIO(text))
+ text = gzipfile.read()
+ elif self.compress == 'bz2':
+ import bz2
+ text = bz2.decompress(text)
+
+ # do not call Lexer.get_tokens() because we do not want Unicode
+ # decoding to occur, and stripping is not optional.
+ text = text.strip(b'\n') + b'\n'
+ for i, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+
+ def get_tokens_unprocessed(self, text):
+ length = 0
+ for match in line_re.finditer(text):
+ try:
+ ttypestr, val = match.group().split(b'\t', 1)
+ except ValueError:
+ val = match.group().decode('ascii', 'replace')
+ ttype = Error
+ else:
+ ttype = _ttype_cache.get(ttypestr)
+ if not ttype:
+ ttype = Token
+ ttypes = ttypestr.split('.')[1:]
+ for ttype_ in ttypes:
+ if not ttype_ or not ttype_[0].isupper():
+ raise ValueError('malformed token name')
+ ttype = getattr(ttype, ttype_)
+ _ttype_cache[ttypestr] = ttype
+ val = val[2:-2].decode('unicode-escape')
+ yield length, ttype, val
+ length += len(val)
diff --git a/contrib/python/Pygments/py2/pygments/lexers/sql.py b/contrib/python/Pygments/py2/pygments/lexers/sql.py
index afcaa6d4f7..1ffc3a9760 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/sql.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/sql.py
@@ -1,66 +1,66 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.sql
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for various SQL dialects and related interactive sessions.
-
- Postgres specific lexers:
-
- `PostgresLexer`
- A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
- lexer are:
-
- - keywords and data types list parsed from the PG docs (run the
- `_postgres_builtins` module to update them);
- - Content of $-strings parsed using a specific lexer, e.g. the content
- of a PL/Python function is parsed using the Python lexer;
- - parse PG specific constructs: E-strings, $-strings, U&-strings,
- different operators and punctuation.
-
- `PlPgsqlLexer`
- A lexer for the PL/pgSQL language. Adds a few specific construct on
- top of the PG SQL lexer (such as <<label>>).
-
- `PostgresConsoleLexer`
- A lexer to highlight an interactive psql session:
-
- - identifies the prompt and does its best to detect the end of command
- in multiline statement where not all the lines are prefixed by a
- prompt, telling them apart from the output;
- - highlights errors in the output and notification levels;
- - handles psql backslash commands.
-
- The ``tests/examplefiles`` contains a few test files with data to be
- parsed by these lexers.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.sql
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various SQL dialects and related interactive sessions.
+
+ Postgres specific lexers:
+
+ `PostgresLexer`
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
+ lexer are:
+
+ - keywords and data types list parsed from the PG docs (run the
+ `_postgres_builtins` module to update them);
+ - Content of $-strings parsed using a specific lexer, e.g. the content
+ of a PL/Python function is parsed using the Python lexer;
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
+ different operators and punctuation.
+
+ `PlPgsqlLexer`
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
+ top of the PG SQL lexer (such as <<label>>).
+
+ `PostgresConsoleLexer`
+ A lexer to highlight an interactive psql session:
+
+ - identifies the prompt and does its best to detect the end of command
+ in multiline statement where not all the lines are prefixed by a
+ prompt, telling them apart from the output;
+ - highlights errors in the output and notification levels;
+ - handles psql backslash commands.
+
+ The ``tests/examplefiles`` contains a few test files with data to be
+ parsed by these lexers.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
Keyword, Name, String, Number, Generic
-from pygments.lexers import get_lexer_by_name, ClassNotFound
-from pygments.util import iteritems
-
-from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
+from pygments.lexers import get_lexer_by_name, ClassNotFound
+from pygments.util import iteritems
+
+from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
from pygments.lexers import _tsql_builtins
-
-
-__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
+
+
+__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
'SqlLexer', 'TransactSqlLexer', 'MySqlLexer',
'SqliteConsoleLexer', 'RqlLexer']
-
-line_re = re.compile('.*?\n')
-
-language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-
+
+line_re = re.compile('.*?\n')
+
+language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
+
do_re = re.compile(r'\bDO\b', re.IGNORECASE)
-
+
# Regular expressions for analyse_text()
name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]')
name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`')
@@ -69,27 +69,27 @@ tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE)
tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b')
-def language_callback(lexer, match):
- """Parse the content of a $-string using a lexer
-
+def language_callback(lexer, match):
+ """Parse the content of a $-string using a lexer
+
The lexer is chosen looking for a nearby LANGUAGE or assumed as
plpgsql if inside a DO statement and no LANGUAGE has been found.
- """
+ """
lx = None
- m = language_re.match(lexer.text[match.end():match.end()+100])
- if m is not None:
+ m = language_re.match(lexer.text[match.end():match.end()+100])
+ if m is not None:
lx = lexer._get_lexer(m.group(1))
- else:
- m = list(language_re.finditer(
- lexer.text[max(0, match.start()-100):match.start()]))
- if m:
+ else:
+ m = list(language_re.finditer(
+ lexer.text[max(0, match.start()-100):match.start()]))
+ if m:
lx = lexer._get_lexer(m[-1].group(1))
else:
m = list(do_re.finditer(
lexer.text[max(0, match.start()-25):match.start()]))
if m:
lx = lexer._get_lexer('plpgsql')
-
+
# 1 = $, 2 = delimiter, 3 = $
yield (match.start(1), String, match.group(1))
yield (match.start(2), String.Delimiter, match.group(2))
@@ -97,97 +97,97 @@ def language_callback(lexer, match):
# 4 = string contents
if lx:
for x in lx.get_tokens_unprocessed(match.group(4)):
- yield x
- else:
+ yield x
+ else:
yield (match.start(4), String, match.group(4))
# 5 = $, 6 = delimiter, 7 = $
yield (match.start(5), String, match.group(5))
yield (match.start(6), String.Delimiter, match.group(6))
yield (match.start(7), String, match.group(7))
-
-
-class PostgresBase(object):
- """Base class for Postgres-related lexers.
-
- This is implemented as a mixin to avoid the Lexer metaclass kicking in.
- this way the different lexer don't have a common Lexer ancestor. If they
- had, _tokens could be created on this ancestor and not updated for the
- other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
- seem to suggest that regexp lexers are not really subclassable.
- """
- def get_tokens_unprocessed(self, text, *args):
- # Have a copy of the entire text to be used by `language_callback`.
- self.text = text
- for x in super(PostgresBase, self).get_tokens_unprocessed(
- text, *args):
- yield x
-
- def _get_lexer(self, lang):
- if lang.lower() == 'sql':
- return get_lexer_by_name('postgresql', **self.options)
-
- tries = [lang]
- if lang.startswith('pl'):
- tries.append(lang[2:])
- if lang.endswith('u'):
- tries.append(lang[:-1])
- if lang.startswith('pl') and lang.endswith('u'):
- tries.append(lang[2:-1])
-
+
+
+class PostgresBase(object):
+ """Base class for Postgres-related lexers.
+
+ This is implemented as a mixin to avoid the Lexer metaclass kicking in.
+ this way the different lexer don't have a common Lexer ancestor. If they
+ had, _tokens could be created on this ancestor and not updated for the
+ other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
+ seem to suggest that regexp lexers are not really subclassable.
+ """
+ def get_tokens_unprocessed(self, text, *args):
+ # Have a copy of the entire text to be used by `language_callback`.
+ self.text = text
+ for x in super(PostgresBase, self).get_tokens_unprocessed(
+ text, *args):
+ yield x
+
+ def _get_lexer(self, lang):
+ if lang.lower() == 'sql':
+ return get_lexer_by_name('postgresql', **self.options)
+
+ tries = [lang]
+ if lang.startswith('pl'):
+ tries.append(lang[2:])
+ if lang.endswith('u'):
+ tries.append(lang[:-1])
+ if lang.startswith('pl') and lang.endswith('u'):
+ tries.append(lang[2:-1])
+
for lx in tries:
- try:
+ try:
return get_lexer_by_name(lx, **self.options)
- except ClassNotFound:
- pass
- else:
- # TODO: better logging
- # print >>sys.stderr, "language not found:", lang
- return None
-
-
-class PostgresLexer(PostgresBase, RegexLexer):
- """
- Lexer for the PostgreSQL dialect of SQL.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL SQL dialect'
- aliases = ['postgresql', 'postgres']
- mimetypes = ['text/x-postgresql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
+ except ClassNotFound:
+ pass
+ else:
+ # TODO: better logging
+ # print >>sys.stderr, "language not found:", lang
+ return None
+
+
+class PostgresLexer(PostgresBase, RegexLexer):
+ """
+ Lexer for the PostgreSQL dialect of SQL.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'PostgreSQL SQL dialect'
+ aliases = ['postgresql', 'postgres']
+ mimetypes = ['text/x-postgresql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", r"\s+")
for s in DATATYPES + PSEUDO_TYPES) + r')\b',
Name.Builtin),
- (words(KEYWORDS, suffix=r'\b'), Keyword),
- (r'[+*/<>=~!@#%^&|`?-]+', Operator),
- (r'::', Operator), # cast
- (r'\$\d+', Name.Variable),
- (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
- (r'[0-9]+', Number.Integer),
+ (words(KEYWORDS, suffix=r'\b'), Keyword),
+ (r'[+*/<>=~!@#%^&|`?-]+', Operator),
+ (r'::', Operator), # cast
+ (r'\$\d+', Name.Variable),
+ (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'[0-9]+', Number.Integer),
(r"((?:E|U&)?)(')", bygroups(String.Affix, String.Single), 'string'),
# quoted identifier
(r'((?:U&)?)(")', bygroups(String.Affix, String.Name), 'quoted-ident'),
(r'(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)', language_callback),
- (r'[a-z_]\w*', Name),
-
- # psql variable in SQL
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
-
- (r'[;:()\[\]{},.]', Punctuation),
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ],
+ (r'[a-z_]\w*', Name),
+
+ # psql variable in SQL
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
+
+ (r'[;:()\[\]{},.]', Punctuation),
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ],
'string': [
(r"[^']+", String.Single),
(r"''", String.Single),
@@ -198,193 +198,193 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'""', String.Name),
(r'"', String.Name, '#pop'),
],
- }
-
-
-class PlPgsqlLexer(PostgresBase, RegexLexer):
- """
- Handle the extra syntax in Pl/pgSQL language.
-
- .. versionadded:: 1.5
- """
- name = 'PL/pgSQL'
- aliases = ['plpgsql']
- mimetypes = ['text/x-plpgsql']
-
- flags = re.IGNORECASE
+ }
+
+
+class PlPgsqlLexer(PostgresBase, RegexLexer):
+ """
+ Handle the extra syntax in Pl/pgSQL language.
+
+ .. versionadded:: 1.5
+ """
+ name = 'PL/pgSQL'
+ aliases = ['plpgsql']
+ mimetypes = ['text/x-plpgsql']
+
+ flags = re.IGNORECASE
tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
-
- # extend the keywords list
- for i, pattern in enumerate(tokens['root']):
- if pattern[1] == Keyword:
- tokens['root'][i] = (
- words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
- Keyword)
- del i
- break
- else:
- assert 0, "SQL keywords not found"
-
- # Add specific PL/pgSQL rules (before the SQL ones)
- tokens['root'][:0] = [
- (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
- (r':=', Operator),
- (r'\<\<[a-z]\w*\>\>', Name.Label),
- (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
- ]
-
-
-class PsqlRegexLexer(PostgresBase, RegexLexer):
- """
- Extend the PostgresLexer adding support specific for psql commands.
-
- This is not a complete psql lexer yet as it lacks prompt support
- and output rendering.
- """
-
- name = 'PostgreSQL console - regexp based lexer'
- aliases = [] # not public
-
- flags = re.IGNORECASE
+
+ # extend the keywords list
+ for i, pattern in enumerate(tokens['root']):
+ if pattern[1] == Keyword:
+ tokens['root'][i] = (
+ words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
+ Keyword)
+ del i
+ break
+ else:
+ assert 0, "SQL keywords not found"
+
+ # Add specific PL/pgSQL rules (before the SQL ones)
+ tokens['root'][:0] = [
+ (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
+ (r':=', Operator),
+ (r'\<\<[a-z]\w*\>\>', Name.Label),
+ (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
+ ]
+
+
+class PsqlRegexLexer(PostgresBase, RegexLexer):
+ """
+ Extend the PostgresLexer adding support specific for psql commands.
+
+ This is not a complete psql lexer yet as it lacks prompt support
+ and output rendering.
+ """
+
+ name = 'PostgreSQL console - regexp based lexer'
+ aliases = [] # not public
+
+ flags = re.IGNORECASE
tokens = {k: l[:] for (k, l) in iteritems(PostgresLexer.tokens)}
-
- tokens['root'].append(
- (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
- tokens['psql-command'] = [
- (r'\n', Text, 'root'),
- (r'\s+', Text),
- (r'\\[^\s]+', Keyword.Pseudo),
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Backtick),
- (r"[^\s]+", String.Symbol),
- ]
-
-
-re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
-re_psql_command = re.compile(r'\s*\\')
-re_end_command = re.compile(r';\s*(--.*?)?$')
-re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
-re_error = re.compile(r'(ERROR|FATAL):')
-re_message = re.compile(
- r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
- r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-
-
-class lookahead(object):
- """Wrap an iterator and allow pushing back an item."""
- def __init__(self, x):
- self.iter = iter(x)
- self._nextitem = None
-
- def __iter__(self):
- return self
-
- def send(self, i):
- self._nextitem = i
- return i
-
- def __next__(self):
- if self._nextitem is not None:
- ni = self._nextitem
- self._nextitem = None
- return ni
- return next(self.iter)
- next = __next__
-
-
-class PostgresConsoleLexer(Lexer):
- """
- Lexer for psql sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL console (psql)'
- aliases = ['psql', 'postgresql-console', 'postgres-console']
- mimetypes = ['text/x-postgresql-psql']
-
- def get_tokens_unprocessed(self, data):
- sql = PsqlRegexLexer(**self.options)
-
- lines = lookahead(line_re.findall(data))
-
- # prompt-output cycle
- while 1:
-
- # consume the lines of the command: start with an optional prompt
- # and continue until the end of command is detected
- curcode = ''
- insertions = []
+
+ tokens['root'].append(
+ (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
+ tokens['psql-command'] = [
+ (r'\n', Text, 'root'),
+ (r'\s+', Text),
+ (r'\\[^\s]+', Keyword.Pseudo),
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Backtick),
+ (r"[^\s]+", String.Symbol),
+ ]
+
+
+re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
+re_psql_command = re.compile(r'\s*\\')
+re_end_command = re.compile(r';\s*(--.*?)?$')
+re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
+re_error = re.compile(r'(ERROR|FATAL):')
+re_message = re.compile(
+ r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
+ r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
+
+
+class lookahead(object):
+ """Wrap an iterator and allow pushing back an item."""
+ def __init__(self, x):
+ self.iter = iter(x)
+ self._nextitem = None
+
+ def __iter__(self):
+ return self
+
+ def send(self, i):
+ self._nextitem = i
+ return i
+
+ def __next__(self):
+ if self._nextitem is not None:
+ ni = self._nextitem
+ self._nextitem = None
+ return ni
+ return next(self.iter)
+ next = __next__
+
+
+class PostgresConsoleLexer(Lexer):
+ """
+ Lexer for psql sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'PostgreSQL console (psql)'
+ aliases = ['psql', 'postgresql-console', 'postgres-console']
+ mimetypes = ['text/x-postgresql-psql']
+
+ def get_tokens_unprocessed(self, data):
+ sql = PsqlRegexLexer(**self.options)
+
+ lines = lookahead(line_re.findall(data))
+
+ # prompt-output cycle
+ while 1:
+
+ # consume the lines of the command: start with an optional prompt
+ # and continue until the end of command is detected
+ curcode = ''
+ insertions = []
for line in lines:
- # Identify a shell prompt in case of psql commandline example
- if line.startswith('$') and not curcode:
- lexer = get_lexer_by_name('console', **self.options)
- for x in lexer.get_tokens_unprocessed(line):
- yield x
- break
-
- # Identify a psql prompt
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, mprompt.group())]))
- curcode += line[len(mprompt.group()):]
- else:
- curcode += line
-
- # Check if this is the end of the command
- # TODO: better handle multiline comments at the end with
- # a lexer with an external state?
- if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
- break
-
- # Emit the combined stream of command and prompt(s)
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
-
- # Emit the output lines
- out_token = Generic.Output
+ # Identify a shell prompt in case of psql commandline example
+ if line.startswith('$') and not curcode:
+ lexer = get_lexer_by_name('console', **self.options)
+ for x in lexer.get_tokens_unprocessed(line):
+ yield x
+ break
+
+ # Identify a psql prompt
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, mprompt.group())]))
+ curcode += line[len(mprompt.group()):]
+ else:
+ curcode += line
+
+ # Check if this is the end of the command
+ # TODO: better handle multiline comments at the end with
+ # a lexer with an external state?
+ if re_psql_command.match(curcode) \
+ or re_end_command.search(curcode):
+ break
+
+ # Emit the combined stream of command and prompt(s)
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+
+ # Emit the output lines
+ out_token = Generic.Output
for line in lines:
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- # push the line back to have it processed by the prompt
- lines.send(line)
- break
-
- mmsg = re_message.match(line)
- if mmsg is not None:
- if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
- out_token = Generic.Error
- yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
- yield (mmsg.start(2), out_token, mmsg.group(2))
- else:
- yield (0, out_token, line)
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ # push the line back to have it processed by the prompt
+ lines.send(line)
+ break
+
+ mmsg = re_message.match(line)
+ if mmsg is not None:
+ if mmsg.group(1).startswith("ERROR") \
+ or mmsg.group(1).startswith("FATAL"):
+ out_token = Generic.Error
+ yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
+ yield (mmsg.start(2), out_token, mmsg.group(2))
+ else:
+ yield (0, out_token, line)
else:
return
-
-
-class SqlLexer(RegexLexer):
- """
- Lexer for Structured Query Language. Currently, this lexer does
- not recognize any special syntax except ANSI SQL.
- """
-
- name = 'SQL'
- aliases = ['sql']
- filenames = ['*.sql']
- mimetypes = ['text/x-sql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
+
+
+class SqlLexer(RegexLexer):
+ """
+ Lexer for Structured Query Language. Currently, this lexer does
+ not recognize any special syntax except ANSI SQL.
+ """
+
+ name = 'SQL'
+ aliases = ['sql']
+ filenames = ['*.sql']
+ mimetypes = ['text/x-sql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (words((
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (words((
'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
@@ -392,8 +392,8 @@ class SqlLexer(RegexLexer):
'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
- 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
- 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
+ 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
+ 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
'CLUSTER', 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION',
'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
@@ -406,7 +406,7 @@ class SqlLexer(RegexLexer):
'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
- 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
+ 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
@@ -471,32 +471,32 @@ class SqlLexer(RegexLexer):
'VALID', 'VALIDATOR', 'VALUES', 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW',
'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
- Keyword),
- (words((
+ Keyword),
+ (words((
'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
- Name.Builtin),
- (r'[+*/<>=~!@#%^&|`?-]', Operator),
- (r'[0-9]+', Number.Integer),
- # TODO: Backslash escapes?
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
- (r'[;:()\[\],.]', Punctuation)
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
+ Name.Builtin),
+ (r'[+*/<>=~!@#%^&|`?-]', Operator),
+ (r'[0-9]+', Number.Integer),
+ # TODO: Backslash escapes?
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
+ (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
def analyse_text(text):
return 0.01
-
+
class TransactSqlLexer(RegexLexer):
"""
@@ -582,78 +582,78 @@ class TransactSqlLexer(RegexLexer):
return rating
-class MySqlLexer(RegexLexer):
- """
- Special lexer for MySQL.
- """
-
- name = 'MySQL'
- aliases = ['mysql']
- mimetypes = ['text/x-mysql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
+class MySqlLexer(RegexLexer):
+ """
+ Special lexer for MySQL.
+ """
+
+ name = 'MySQL'
+ aliases = ['mysql']
+ mimetypes = ['text/x-mysql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'(#|--\s+).*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'[0-9]+', Number.Integer),
- (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
- (r"'(\\\\|\\'|''|[^'])*'", String.Single),
- (r'"(\\\\|\\"|""|[^"])*"', String.Double),
- (r"`(\\\\|\\`|``|[^`])*`", String.Symbol),
- (r'[+*/<>=~!@#%^&|`?-]', Operator),
- (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
- r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
- r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
- r'precision|real|numeric|dec|decimal|timestamp|year|char|'
- r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
- bygroups(Keyword.Type, Text, Punctuation)),
- (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
- r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
- r'character|check|collate|column|condition|constraint|continue|'
- r'convert|create|cross|current_date|current_time|'
- r'current_timestamp|current_user|cursor|database|databases|'
- r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
- r'declare|default|delayed|delete|desc|describe|deterministic|'
- r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
- r'enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|'
- r'float8|for|force|foreign|from|fulltext|grant|group|having|'
- r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
- r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
- r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
- r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
- r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
- r'minute_microsecond|minute_second|mod|modifies|natural|'
- r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
- r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
- r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
- r'replace|require|restrict|return|revoke|right|rlike|schema|'
- r'schemas|second_microsecond|select|sensitive|separator|set|'
- r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
- r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
- r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
- r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
- r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
- r'varying|when|where|while|with|write|x509|xor|year_month|'
- r'zerofill)\b', Keyword),
- # TODO: this list is not complete
- (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
- (r'(true|false|null)', Name.Constant),
- (r'([a-z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z_]\w*', Name),
- (r'@[a-z0-9]*[._]*[a-z0-9]*', Name.Variable),
- (r'[;:()\[\],.]', Punctuation)
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
+ (r"'(\\\\|\\'|''|[^'])*'", String.Single),
+ (r'"(\\\\|\\"|""|[^"])*"', String.Double),
+ (r"`(\\\\|\\`|``|[^`])*`", String.Symbol),
+ (r'[+*/<>=~!@#%^&|`?-]', Operator),
+ (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
+ r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
+ r'tinyblob|mediumblob|longblob|blob|float|double|double\s+'
+ r'precision|real|numeric|dec|decimal|timestamp|year|char|'
+ r'varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?',
+ bygroups(Keyword.Type, Text, Punctuation)),
+ (r'\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|'
+ r'bigint|binary|blob|both|by|call|cascade|case|change|char|'
+ r'character|check|collate|column|condition|constraint|continue|'
+ r'convert|create|cross|current_date|current_time|'
+ r'current_timestamp|current_user|cursor|database|databases|'
+ r'day_hour|day_microsecond|day_minute|day_second|dec|decimal|'
+ r'declare|default|delayed|delete|desc|describe|deterministic|'
+ r'distinct|distinctrow|div|double|drop|dual|each|else|elseif|'
+ r'enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|'
+ r'float8|for|force|foreign|from|fulltext|grant|group|having|'
+ r'high_priority|hour_microsecond|hour_minute|hour_second|if|'
+ r'ignore|in|index|infile|inner|inout|insensitive|insert|int|'
+ r'int1|int2|int3|int4|int8|integer|interval|into|is|iterate|'
+ r'join|key|keys|kill|leading|leave|left|like|limit|lines|load|'
+ r'localtime|localtimestamp|lock|long|loop|low_priority|match|'
+ r'minute_microsecond|minute_second|mod|modifies|natural|'
+ r'no_write_to_binlog|not|numeric|on|optimize|option|optionally|'
+ r'or|order|out|outer|outfile|precision|primary|procedure|purge|'
+ r'raid0|read|reads|real|references|regexp|release|rename|repeat|'
+ r'replace|require|restrict|return|revoke|right|rlike|schema|'
+ r'schemas|second_microsecond|select|sensitive|separator|set|'
+ r'show|smallint|soname|spatial|specific|sql|sql_big_result|'
+ r'sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|'
+ r'sqlwarning|ssl|starting|straight_join|table|terminated|then|'
+ r'to|trailing|trigger|undo|union|unique|unlock|unsigned|update|'
+ r'usage|use|using|utc_date|utc_time|utc_timestamp|values|'
+ r'varying|when|where|while|with|write|x509|xor|year_month|'
+ r'zerofill)\b', Keyword),
+ # TODO: this list is not complete
+ (r'\b(auto_increment|engine|charset|tables)\b', Keyword.Pseudo),
+ (r'(true|false|null)', Name.Constant),
+ (r'([a-z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-z_]\w*', Name),
+ (r'@[a-z0-9]*[._]*[a-z0-9]*', Name.Variable),
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
def analyse_text(text):
rating = 0
name_between_backtick_count = len(
@@ -671,74 +671,74 @@ class MySqlLexer(RegexLexer):
elif name_between_backtick_count > 0:
rating += 0.1
return rating
-
-
-class SqliteConsoleLexer(Lexer):
- """
- Lexer for example sessions using sqlite3.
-
- .. versionadded:: 0.11
- """
-
- name = 'sqlite3con'
- aliases = ['sqlite3']
- filenames = ['*.sqlite3-console']
- mimetypes = ['text/x-sqlite3-console']
-
- def get_tokens_unprocessed(self, data):
- sql = SqlLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(data):
- line = match.group()
- if line.startswith('sqlite> ') or line.startswith(' ...> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:8])]))
- curcode += line[8:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if line.startswith('SQL error: '):
- yield (match.start(), Generic.Traceback, line)
- else:
- yield (match.start(), Generic.Output, line)
- if curcode:
- for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class RqlLexer(RegexLexer):
- """
- Lexer for Relation Query Language.
-
- `RQL <http://www.logilab.org/project/rql>`_
-
- .. versionadded:: 2.0
- """
- name = 'RQL'
- aliases = ['rql']
- filenames = ['*.rql']
- mimetypes = ['text/x-rql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
- r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
- r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
- (r'[+*/<>=%-]', Operator),
- (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
- (r'[0-9]+', Number.Integer),
- (r'[A-Z_]\w*\??', Name),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Single),
- (r'[;:()\[\],.]', Punctuation)
- ],
- }
+
+
+class SqliteConsoleLexer(Lexer):
+ """
+ Lexer for example sessions using sqlite3.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'sqlite3con'
+ aliases = ['sqlite3']
+ filenames = ['*.sqlite3-console']
+ mimetypes = ['text/x-sqlite3-console']
+
+ def get_tokens_unprocessed(self, data):
+ sql = SqlLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(data):
+ line = match.group()
+ if line.startswith('sqlite> ') or line.startswith(' ...> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:8])]))
+ curcode += line[8:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('SQL error: '):
+ yield (match.start(), Generic.Traceback, line)
+ else:
+ yield (match.start(), Generic.Output, line)
+ if curcode:
+ for item in do_insertions(insertions,
+ sql.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class RqlLexer(RegexLexer):
+ """
+ Lexer for Relation Query Language.
+
+ `RQL <http://www.logilab.org/project/rql>`_
+
+ .. versionadded:: 2.0
+ """
+ name = 'RQL'
+ aliases = ['rql']
+ filenames = ['*.rql']
+ mimetypes = ['text/x-rql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
+ r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
+ r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
+ (r'[+*/<>=%-]', Operator),
+ (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
+ (r'[0-9]+', Number.Integer),
+ (r'[A-Z_]\w*\??', Name),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Single),
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/supercollider.py b/contrib/python/Pygments/py2/pygments/lexers/supercollider.py
index d0d033a06c..52b924611a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/supercollider.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/supercollider.py
@@ -1,90 +1,90 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.supercollider
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for SuperCollider
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.supercollider
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for SuperCollider
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SuperColliderLexer']
-
-
-class SuperColliderLexer(RegexLexer):
- """
- For `SuperCollider <http://supercollider.github.io/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'SuperCollider'
- aliases = ['sc', 'supercollider']
- filenames = ['*.sc', '*.scd']
- mimetypes = ['application/supercollider', 'text/supercollider', ]
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SuperColliderLexer']
+
+
+class SuperColliderLexer(RegexLexer):
+ """
+ For `SuperCollider <http://supercollider.github.io/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'SuperCollider'
+ aliases = ['sc', 'supercollider']
+ filenames = ['*.sc', '*.scd']
+ mimetypes = ['application/supercollider', 'text/supercollider', ]
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (words((
- 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
- 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
- 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
- 'void'), suffix=r'\b'),
- Keyword, 'slashstartsregex'),
- (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
- Keyword.Declaration, 'slashstartsregex'),
- (words((
- '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
- 'debugger', 'double', 'enum', 'export', 'extends', 'final',
- 'float', 'goto', 'implements', 'import', 'int', 'interface',
- 'long', 'native', 'package', 'private', 'protected', 'public',
- 'short', 'static', 'super', 'synchronized', 'throws',
- 'transient', 'volatile'), suffix=r'\b'),
- Keyword.Reserved),
- (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
- (words((
- 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (words((
+ 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
+ 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
+ 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
+ 'void'), suffix=r'\b'),
+ Keyword, 'slashstartsregex'),
+ (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
+ Keyword.Declaration, 'slashstartsregex'),
+ (words((
+ '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
+ 'debugger', 'double', 'enum', 'export', 'extends', 'final',
+ 'float', 'goto', 'implements', 'import', 'int', 'interface',
+ 'long', 'native', 'package', 'private', 'protected', 'public',
+ 'short', 'static', 'super', 'synchronized', 'throws',
+ 'transient', 'volatile'), suffix=r'\b'),
+ Keyword.Reserved),
+ (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
+ (words((
+ 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
'Object', 'Packages', 'RegExp', 'String',
- 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
- 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
- 'thisThread', 'this'), suffix=r'\b'),
- Name.Builtin),
+ 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
+ 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
+ 'thisThread', 'this'), suffix=r'\b'),
+ Name.Builtin),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'\\?[$a-zA-Z_]\w*', String.Symbol),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/tcl.py b/contrib/python/Pygments/py2/pygments/lexers/tcl.py
index ea37c82475..23b6b4e3fe 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/tcl.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/tcl.py
@@ -1,145 +1,145 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.tcl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Tcl and related languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.tcl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Tcl and related languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number
-from pygments.util import shebang_matches
-
-__all__ = ['TclLexer']
-
-
-class TclLexer(RegexLexer):
- """
- For Tcl source code.
-
- .. versionadded:: 0.10
- """
-
- keyword_cmds_re = words((
- 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
- 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
- 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
- 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
-
- builtin_cmds_re = words((
- 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
- 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
- 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
- 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
- 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
- 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
- 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
- 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
- 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
-
- name = 'Tcl'
- aliases = ['tcl']
- filenames = ['*.tcl', '*.rvt']
- mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
- def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
- return [
- (keyword_cmds_re, Keyword, 'params' + context),
- (builtin_cmds_re, Name.Builtin, 'params' + context),
- (r'([\w.-]+)', Name.Variable, 'params' + context),
- (r'#', Comment, 'comment'),
- ]
-
- tokens = {
- 'root': [
- include('command'),
- include('basic'),
- include('data'),
- (r'\}', Keyword), # HACK: somehow we miscounted our braces
- ],
- 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
- 'command-in-brace': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-brace"),
- 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-bracket"),
- 'command-in-paren': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-paren"),
- 'basic': [
- (r'\(', Keyword, 'paren'),
- (r'\[', Keyword, 'bracket'),
- (r'\{', Keyword, 'brace'),
- (r'"', String.Double, 'string'),
- (r'(eq|ne|in|ni)\b', Operator.Word),
- (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
- ],
- 'data': [
- (r'\s+', Text),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'0[0-7]+', Number.Oct),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\$([\w.:-]+)', Name.Variable),
- (r'([\w.:-]+)', Text),
- ],
- 'params': [
- (r';', Keyword, '#pop'),
- (r'\n', Text, '#pop'),
- (r'(else|elseif|then)\b', Keyword),
- include('basic'),
- include('data'),
- ],
- 'params-in-brace': [
- (r'\}', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-paren': [
- (r'\)', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-bracket': [
- (r'\]', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'string': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
- (r'"', String.Double, '#pop')
- ],
- 'string-square': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
- (r'\]', String.Double, '#pop')
- ],
- 'brace': [
- (r'\}', Keyword, '#pop'),
- include('command-in-brace'),
- include('basic'),
- include('data'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('command-in-paren'),
- include('basic'),
- include('data'),
- ],
- 'bracket': [
- (r'\]', Keyword, '#pop'),
- include('command-in-bracket'),
- include('basic'),
- include('data'),
- ],
- 'comment': [
- (r'.*[^\\]\n', Comment, '#pop'),
- (r'.*\\\n', Comment),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(tcl)')
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+from pygments.util import shebang_matches
+
+__all__ = ['TclLexer']
+
+
+class TclLexer(RegexLexer):
+ """
+ For Tcl source code.
+
+ .. versionadded:: 0.10
+ """
+
+ keyword_cmds_re = words((
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
+
+ builtin_cmds_re = words((
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
+
+ name = 'Tcl'
+ aliases = ['tcl']
+ filenames = ['*.tcl', '*.rvt']
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
+
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
+ return [
+ (keyword_cmds_re, Keyword, 'params' + context),
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
+ (r'([\w.-]+)', Name.Variable, 'params' + context),
+ (r'#', Comment, 'comment'),
+ ]
+
+ tokens = {
+ 'root': [
+ include('command'),
+ include('basic'),
+ include('data'),
+ (r'\}', Keyword), # HACK: somehow we miscounted our braces
+ ],
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-brace"),
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-bracket"),
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-paren"),
+ 'basic': [
+ (r'\(', Keyword, 'paren'),
+ (r'\[', Keyword, 'bracket'),
+ (r'\{', Keyword, 'brace'),
+ (r'"', String.Double, 'string'),
+ (r'(eq|ne|in|ni)\b', Operator.Word),
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
+ ],
+ 'data': [
+ (r'\s+', Text),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'0[0-7]+', Number.Oct),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\$([\w.:-]+)', Name.Variable),
+ (r'([\w.:-]+)', Text),
+ ],
+ 'params': [
+ (r';', Keyword, '#pop'),
+ (r'\n', Text, '#pop'),
+ (r'(else|elseif|then)\b', Keyword),
+ include('basic'),
+ include('data'),
+ ],
+ 'params-in-brace': [
+ (r'\}', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-paren': [
+ (r'\)', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-bracket': [
+ (r'\]', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'string': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
+ (r'"', String.Double, '#pop')
+ ],
+ 'string-square': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
+ (r'\]', String.Double, '#pop')
+ ],
+ 'brace': [
+ (r'\}', Keyword, '#pop'),
+ include('command-in-brace'),
+ include('basic'),
+ include('data'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('command-in-paren'),
+ include('basic'),
+ include('data'),
+ ],
+ 'bracket': [
+ (r'\]', Keyword, '#pop'),
+ include('command-in-bracket'),
+ include('basic'),
+ include('data'),
+ ],
+ 'comment': [
+ (r'.*[^\\]\n', Comment, '#pop'),
+ (r'.*\\\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'(tcl)')
diff --git a/contrib/python/Pygments/py2/pygments/lexers/templates.py b/contrib/python/Pygments/py2/pygments/lexers/templates.py
index f891242cb8..cebdd1d70a 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/templates.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/templates.py
@@ -1,1828 +1,1828 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.templates
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various template engines' markup.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.templates
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various template engines' markup.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.html import HtmlLexer, XmlLexer
-from pygments.lexers.javascript import JavascriptLexer, LassoLexer
-from pygments.lexers.css import CssLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-from pygments.lexers.jvm import JavaLexer, TeaLangLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
- include, using, this, default, combined
-from pygments.token import Error, Punctuation, Whitespace, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
-from pygments.util import html_doctype_matches, looks_like_xml
-
-__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
- 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
- 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
- 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
- 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
- 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
- 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
- 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
- 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
- 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
- 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
- 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
- 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
- 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
- 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
- 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
- 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
- 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
- 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer, LassoLexer
+from pygments.lexers.css import CssLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+from pygments.lexers.jvm import JavaLexer, TeaLangLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
+ include, using, this, default, combined
+from pygments.token import Error, Punctuation, Whitespace, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
+from pygments.util import html_doctype_matches, looks_like_xml
+
+__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
+ 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
+ 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
+ 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
+ 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
+ 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
+ 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
+ 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
+ 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
+ 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
+ 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
+ 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
+ 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
+ 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
+ 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
+ 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
+ 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
+ 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
+ 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
-
-
-class ErbLexer(Lexer):
- """
- Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
- lexer.
-
- Just highlights ruby code between the preprocessor directives, other data
- is left untouched by the lexer.
-
- All options are also forwarded to the `RubyLexer`.
- """
-
- name = 'ERB'
- aliases = ['erb']
- mimetypes = ['application/x-ruby-templating']
-
- _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
-
- def __init__(self, **options):
- from pygments.lexers.ruby import RubyLexer
- self.ruby_lexer = RubyLexer(**options)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- """
- Since ERB doesn't allow "<%" and other tags inside of ruby
- blocks we have to use a split approach here that fails for
- that too.
- """
- tokens = self._block_re.split(text)
- tokens.reverse()
- state = idx = 0
- try:
- while True:
- # text
- if state == 0:
- val = tokens.pop()
- yield idx, Other, val
- idx += len(val)
- state = 1
- # block starts
- elif state == 1:
- tag = tokens.pop()
- # literals
- if tag in ('<%%', '%%>'):
- yield idx, Other, tag
- idx += 3
- state = 0
- # comment
- elif tag == '<%#':
- yield idx, Comment.Preproc, tag
- val = tokens.pop()
- yield idx + 3, Comment, val
- idx += 3 + len(val)
- state = 2
- # blocks or output
- elif tag in ('<%', '<%=', '<%-'):
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- data = tokens.pop()
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(data):
- yield r_idx + idx, r_token, r_value
- idx += len(data)
- state = 2
- elif tag in ('%>', '-%>'):
- yield idx, Error, tag
- idx += len(tag)
- state = 0
- # % raw ruby statements
- else:
- yield idx, Comment.Preproc, tag[0]
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
- yield idx + 1 + r_idx, r_token, r_value
- idx += len(tag)
- state = 0
- # block ends
- elif state == 2:
- tag = tokens.pop()
- if tag not in ('%>', '-%>'):
- yield idx, Other, tag
- else:
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- state = 0
- except IndexError:
- return
-
- def analyse_text(text):
- if '<%' in text and '%>' in text:
- return 0.4
-
-
-class SmartyLexer(RegexLexer):
- """
- Generic `Smarty <http://smarty.php.net/>`_ template lexer.
-
- Just highlights smarty code between the preprocessor directives, other
- data is left untouched by the lexer.
- """
-
- name = 'Smarty'
- aliases = ['smarty']
- filenames = ['*.tpl']
- mimetypes = ['application/x-smarty']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'(\{)(\*.*?\*)(\})',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(\{php\})(.*?)(\{/php\})',
- bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
- Comment.Preproc)),
- (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
- bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
- (r'\{', Comment.Preproc, 'smarty')
- ],
- 'smarty': [
- (r'\s+', Text),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- (r'#[a-zA-Z_]\w*#', Name.Variable),
- (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
- (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
- (r'(true|false|null)\b', Keyword.Constant),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name.Attribute)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
+
+
+class ErbLexer(Lexer):
+ """
+ Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
+ lexer.
+
+ Just highlights ruby code between the preprocessor directives, other data
+ is left untouched by the lexer.
+
+ All options are also forwarded to the `RubyLexer`.
+ """
+
+ name = 'ERB'
+ aliases = ['erb']
+ mimetypes = ['application/x-ruby-templating']
+
+ _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
+
+ def __init__(self, **options):
+ from pygments.lexers.ruby import RubyLexer
+ self.ruby_lexer = RubyLexer(**options)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Since ERB doesn't allow "<%" and other tags inside of ruby
+ blocks we have to use a split approach here that fails for
+ that too.
+ """
+ tokens = self._block_re.split(text)
+ tokens.reverse()
+ state = idx = 0
+ try:
+ while True:
+ # text
+ if state == 0:
+ val = tokens.pop()
+ yield idx, Other, val
+ idx += len(val)
+ state = 1
+ # block starts
+ elif state == 1:
+ tag = tokens.pop()
+ # literals
+ if tag in ('<%%', '%%>'):
+ yield idx, Other, tag
+ idx += 3
+ state = 0
+ # comment
+ elif tag == '<%#':
+ yield idx, Comment.Preproc, tag
+ val = tokens.pop()
+ yield idx + 3, Comment, val
+ idx += 3 + len(val)
+ state = 2
+ # blocks or output
+ elif tag in ('<%', '<%=', '<%-'):
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ data = tokens.pop()
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(data):
+ yield r_idx + idx, r_token, r_value
+ idx += len(data)
+ state = 2
+ elif tag in ('%>', '-%>'):
+ yield idx, Error, tag
+ idx += len(tag)
+ state = 0
+ # % raw ruby statements
+ else:
+ yield idx, Comment.Preproc, tag[0]
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
+ yield idx + 1 + r_idx, r_token, r_value
+ idx += len(tag)
+ state = 0
+ # block ends
+ elif state == 2:
+ tag = tokens.pop()
+ if tag not in ('%>', '-%>'):
+ yield idx, Other, tag
+ else:
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ state = 0
+ except IndexError:
+ return
+
+ def analyse_text(text):
+ if '<%' in text and '%>' in text:
+ return 0.4
+
+
+class SmartyLexer(RegexLexer):
+ """
+ Generic `Smarty <http://smarty.php.net/>`_ template lexer.
+
+ Just highlights smarty code between the preprocessor directives, other
+ data is left untouched by the lexer.
+ """
+
+ name = 'Smarty'
+ aliases = ['smarty']
+ filenames = ['*.tpl']
+ mimetypes = ['application/x-smarty']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'(\{)(\*.*?\*)(\})',
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+ (r'(\{php\})(.*?)(\{/php\})',
+ bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
+ Comment.Preproc)),
+ (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
+ bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
+ (r'\{', Comment.Preproc, 'smarty')
+ ],
+ 'smarty': [
+ (r'\s+', Text),
+ (r'\{', Comment.Preproc, '#push'),
+ (r'\}', Comment.Preproc, '#pop'),
+ (r'#[a-zA-Z_]\w*#', Name.Variable),
+ (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z_]\w*', Name.Attribute)
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{include\s+file=.*?\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{\$.*?\}', text):
- rv += 0.01
- return rv
-
-
-class VelocityLexer(RegexLexer):
- """
- Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
-
- Just highlights velocity directives and variable references, other
- data is left untouched by the lexer.
- """
-
- name = 'Velocity'
- aliases = ['velocity']
- filenames = ['*.vm', '*.fhtml']
-
- flags = re.MULTILINE | re.DOTALL
-
- identifier = r'[a-zA-Z_]\w*'
-
- tokens = {
- 'root': [
- (r'[^{#$]+', Other),
- (r'(#)(\*.*?\*)(#)',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(##)(.*?$)',
- bygroups(Comment.Preproc, Comment)),
- (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
- 'directiveparams'),
- (r'(#\{?)(' + identifier + r')(\}|\b)',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
+ rv += 0.01
+ return rv
+
+
+class VelocityLexer(RegexLexer):
+ """
+ Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
+
+ Just highlights velocity directives and variable references, other
+ data is left untouched by the lexer.
+ """
+
+ name = 'Velocity'
+ aliases = ['velocity']
+ filenames = ['*.vm', '*.fhtml']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ identifier = r'[a-zA-Z_]\w*'
+
+ tokens = {
+ 'root': [
+ (r'[^{#$]+', Other),
+ (r'(#)(\*.*?\*)(#)',
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+ (r'(##)(.*?$)',
+ bygroups(Comment.Preproc, Comment)),
+ (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
+ 'directiveparams'),
+ (r'(#\{?)(' + identifier + r')(\}|\b)',
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
(r'\$!?\{?', Punctuation, 'variable')
- ],
- 'variable': [
- (identifier, Name.Variable),
- (r'\(', Punctuation, 'funcparams'),
- (r'(\.)(' + identifier + r')',
- bygroups(Punctuation, Name.Variable), '#push'),
- (r'\}', Punctuation, '#pop'),
- default('#pop')
- ],
- 'directiveparams': [
- (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
- Operator),
- (r'\[', Operator, 'rangeoperator'),
- (r'\b' + identifier + r'\b', Name.Function),
- include('funcparams')
- ],
- 'rangeoperator': [
- (r'\.\.', Operator),
- include('funcparams'),
- (r'\]', Operator, '#pop')
- ],
- 'funcparams': [
+ ],
+ 'variable': [
+ (identifier, Name.Variable),
+ (r'\(', Punctuation, 'funcparams'),
+ (r'(\.)(' + identifier + r')',
+ bygroups(Punctuation, Name.Variable), '#push'),
+ (r'\}', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'directiveparams': [
+ (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
+ Operator),
+ (r'\[', Operator, 'rangeoperator'),
+ (r'\b' + identifier + r'\b', Name.Function),
+ include('funcparams')
+ ],
+ 'rangeoperator': [
+ (r'\.\.', Operator),
+ include('funcparams'),
+ (r'\]', Operator, '#pop')
+ ],
+ 'funcparams': [
(r'\$!?\{?', Punctuation, 'variable'),
- (r'\s+', Text),
+ (r'\s+', Text),
(r'[,:]', Punctuation),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"\b[0-9]+\b", Number),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"\b[0-9]+\b", Number),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
- rv += 0.25
- if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
- rv += 0.15
- if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
- rv += 0.15
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
+ rv += 0.25
+ if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
+ rv += 0.15
+ if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
+ rv += 0.15
if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
- r'(\.\w+(\([^)]*\))?)*\}?', text):
- rv += 0.01
- return rv
-
-
-class VelocityHtmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- """
-
- name = 'HTML+Velocity'
- aliases = ['html+velocity']
- alias_filenames = ['*.html', '*.fhtml']
- mimetypes = ['text/html+velocity']
-
- def __init__(self, **options):
- super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
- **options)
-
-
-class VelocityXmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- """
-
- name = 'XML+Velocity'
- aliases = ['xml+velocity']
- alias_filenames = ['*.xml', '*.vm']
- mimetypes = ['application/xml+velocity']
-
- def __init__(self, **options):
- super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
- **options)
-
- def analyse_text(text):
- rv = VelocityLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class DjangoLexer(RegexLexer):
- """
- Generic `django <http://www.djangoproject.com/documentation/templates/>`_
- and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
-
- It just highlights django/jinja code between the preprocessor directives,
- other data is left untouched by the lexer.
- """
-
- name = 'Django/Jinja'
- aliases = ['django', 'jinja']
- mimetypes = ['application/x-django-templating', 'application/x-jinja']
-
- flags = re.M | re.S
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # jinja/django comments
- (r'\{[*#].*?[*#]\}', Comment),
- # django comments
- (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Comment, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # raw jinja blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Text, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'block'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'block'),
- (r'\{', Other)
- ],
- 'varnames': [
- (r'(\|)(\s*)([a-zA-Z_]\w*)',
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
- (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
- r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
- Keyword),
- (r'(loop|block|super|forloop)\b', Name.Builtin),
- (r'[a-zA-Z_][\w-]*', Name.Variable),
- (r'\.\w+', Name.Variable),
- (r':?"(\\\\|\\"|[^"])*"', String.Double),
- (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ r'(\.\w+(\([^)]*\))?)*\}?', text):
+ rv += 0.01
+ return rv
+
+
+class VelocityHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `VelocityLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ """
+
+ name = 'HTML+Velocity'
+ aliases = ['html+velocity']
+ alias_filenames = ['*.html', '*.fhtml']
+ mimetypes = ['text/html+velocity']
+
+ def __init__(self, **options):
+ super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
+ **options)
+
+
+class VelocityXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `VelocityLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ """
+
+ name = 'XML+Velocity'
+ aliases = ['xml+velocity']
+ alias_filenames = ['*.xml', '*.vm']
+ mimetypes = ['application/xml+velocity']
+
+ def __init__(self, **options):
+ super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
+ **options)
+
+ def analyse_text(text):
+ rv = VelocityLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class DjangoLexer(RegexLexer):
+ """
+ Generic `django <http://www.djangoproject.com/documentation/templates/>`_
+ and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
+
+ It just highlights django/jinja code between the preprocessor directives,
+ other data is left untouched by the lexer.
+ """
+
+ name = 'Django/Jinja'
+ aliases = ['django', 'jinja']
+ mimetypes = ['application/x-django-templating', 'application/x-jinja']
+
+ flags = re.M | re.S
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ # jinja/django comments
+ (r'\{[*#].*?[*#]\}', Comment),
+ # django comments
+ (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Comment, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # raw jinja blocks
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Text, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # filter blocks
+ (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
+ 'block'),
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'block'),
+ (r'\{', Other)
+ ],
+ 'varnames': [
+ (r'(\|)(\s*)([a-zA-Z_]\w*)',
+ bygroups(Operator, Text, Name.Function)),
+ (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
+ (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
+ r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
+ Keyword),
+ (r'(loop|block|super|forloop)\b', Name.Builtin),
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
+ (r'\.\w+', Name.Variable),
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
(r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'block': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\{%\s*(block|extends)', text) is not None:
- rv += 0.4
- if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
- rv += 0.1
- if re.search(r'\{\{.*?\}\}', text) is not None:
- rv += 0.1
- return rv
-
-
-class MyghtyLexer(RegexLexer):
- """
- Generic `myghty templates`_ lexer. Code that isn't Myghty
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.6
-
- .. _myghty templates: http://www.myghty.org/
- """
-
- name = 'Myghty'
- aliases = ['myghty']
- filenames = ['*.myt', 'autodelegate']
- mimetypes = ['application/x-myghty']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ],
+ 'var': [
+ (r'\s+', Text),
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames')
+ ],
+ 'block': [
+ (r'\s+', Text),
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames'),
+ (r'.', Punctuation)
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ if re.search(r'\{%\s*(block|extends)', text) is not None:
+ rv += 0.4
+ if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
+ rv += 0.1
+ if re.search(r'\{\{.*?\}\}', text) is not None:
+ rv += 0.1
+ return rv
+
+
+class MyghtyLexer(RegexLexer):
+ """
+ Generic `myghty templates`_ lexer. Code that isn't Myghty
+ markup is yielded as `Token.Other`.
+
+ .. versionadded:: 0.6
+
+ .. _myghty templates: http://www.myghty.org/
+ """
+
+ name = 'Myghty'
+ aliases = ['myghty']
+ filenames = ['*.myt', 'autodelegate']
+ mimetypes = ['application/x-myghty']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Text, Name.Function, Name.Tag,
- using(this), Name.Tag)),
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
+ using(this), Name.Tag)),
(r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Name.Function, Name.Tag,
- using(PythonLexer), Name.Tag)),
- (r'(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
+ bygroups(Name.Tag, Name.Function, Name.Tag,
+ using(PythonLexer), Name.Tag)),
+ (r'(<&[^|])(.*?)(,.*?)?(&>)',
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
- (r'</&>', Name.Tag),
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
+ (r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PythonLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(Other, Operator)),
- ]
- }
-
-
-class MyghtyHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'HTML+Myghty'
- aliases = ['html+myghty']
- mimetypes = ['text/html+myghty']
-
- def __init__(self, **options):
- super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
- **options)
-
-
-class MyghtyXmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'XML+Myghty'
- aliases = ['xml+myghty']
- mimetypes = ['application/xml+myghty']
-
- def __init__(self, **options):
- super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
- **options)
-
-
-class MyghtyJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'JavaScript+Myghty'
- aliases = ['js+myghty', 'javascript+myghty']
- mimetypes = ['application/x-javascript+myghty',
- 'text/x-javascript+myghty',
- 'text/javascript+mygthy']
-
- def __init__(self, **options):
- super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
- MyghtyLexer, **options)
-
-
-class MyghtyCssLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'CSS+Myghty'
- aliases = ['css+myghty']
- mimetypes = ['text/css+myghty']
-
- def __init__(self, **options):
- super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
- **options)
-
-
-class MasonLexer(RegexLexer):
- """
- Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
- Mason markup is HTML.
-
- .. _mason templates: http://www.masonhq.com/
-
- .. versionadded:: 1.4
- """
- name = 'Mason'
- aliases = ['mason']
- filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
- mimetypes = ['application/x-mason']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+ bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+ bygroups(Name.Tag, using(PythonLexer), Other)),
+ (r"""(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=[%#]) | # an eval or comment line
+ (?=</?[%&]) | # a substitution or block or
+ # call start or end
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )""", bygroups(Other, Operator)),
+ ]
+ }
+
+
+class MyghtyHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'HTML+Myghty'
+ aliases = ['html+myghty']
+ mimetypes = ['text/html+myghty']
+
+ def __init__(self, **options):
+ super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
+ **options)
+
+
+class MyghtyXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'XML+Myghty'
+ aliases = ['xml+myghty']
+ mimetypes = ['application/xml+myghty']
+
+ def __init__(self, **options):
+ super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
+ **options)
+
+
+class MyghtyJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'JavaScript+Myghty'
+ aliases = ['js+myghty', 'javascript+myghty']
+ mimetypes = ['application/x-javascript+myghty',
+ 'text/x-javascript+myghty',
+ 'text/javascript+mygthy']
+
+ def __init__(self, **options):
+ super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
+ MyghtyLexer, **options)
+
+
+class MyghtyCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `CssLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'CSS+Myghty'
+ aliases = ['css+myghty']
+ mimetypes = ['text/css+myghty']
+
+ def __init__(self, **options):
+ super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
+ **options)
+
+
+class MasonLexer(RegexLexer):
+ """
+ Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
+ Mason markup is HTML.
+
+ .. _mason templates: http://www.masonhq.com/
+
+ .. versionadded:: 1.4
+ """
+ name = 'Mason'
+ aliases = ['mason']
+ filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
+ mimetypes = ['application/x-mason']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'(?s)(<%doc>)(.*?)(</%doc>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Text, Name.Function, Name.Tag,
- using(this), Name.Tag)),
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
+ using(this), Name.Tag)),
(r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Name.Function, Name.Tag,
- using(PerlLexer), Name.Tag)),
+ bygroups(Name.Tag, Name.Function, Name.Tag,
+ using(PerlLexer), Name.Tag)),
(r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
- (r'</&>', Name.Tag),
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+ (r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PerlLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(using(HtmlLexer), Operator)),
- ]
- }
-
- def analyse_text(text):
- result = 0.0
- if re.search(r'</%(class|doc|init)%>', text) is not None:
- result = 1.0
- elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
- result = 0.11
- return result
-
-
-class MakoLexer(RegexLexer):
- """
- Generic `mako templates`_ lexer. Code that isn't Mako
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.7
-
- .. _mako templates: http://www.makotemplates.org/
- """
-
- name = 'Mako'
- aliases = ['mako']
- filenames = ['*.mao']
- mimetypes = ['application/x-mako']
-
- tokens = {
- 'root': [
- (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
- bygroups(Text, Comment.Preproc, Keyword, Other)),
- (r'(\s*)(%)([^\n]*)(\n|\Z)',
- bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
- (r'(\s*)(##[^\n]*)(\n|\Z)',
- bygroups(Text, Comment.Preproc, Other)),
- (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
- (r'(<%)([\w.:]+)',
- bygroups(Comment.Preproc, Name.Builtin), 'tag'),
- (r'(</%)([\w.:]+)(>)',
- bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
- (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
+ bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+ bygroups(Name.Tag, using(PerlLexer), Other)),
+ (r"""(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=[%#]) | # an eval or comment line
+ (?=</?[%&]) | # a substitution or block or
+ # call start or end
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )""", bygroups(using(HtmlLexer), Operator)),
+ ]
+ }
+
+ def analyse_text(text):
+ result = 0.0
+ if re.search(r'</%(class|doc|init)%>', text) is not None:
+ result = 1.0
+ elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
+ result = 0.11
+ return result
+
+
+class MakoLexer(RegexLexer):
+ """
+ Generic `mako templates`_ lexer. Code that isn't Mako
+ markup is yielded as `Token.Other`.
+
+ .. versionadded:: 0.7
+
+ .. _mako templates: http://www.makotemplates.org/
+ """
+
+ name = 'Mako'
+ aliases = ['mako']
+ filenames = ['*.mao']
+ mimetypes = ['application/x-mako']
+
+ tokens = {
+ 'root': [
+ (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
+ bygroups(Text, Comment.Preproc, Keyword, Other)),
+ (r'(\s*)(%)([^\n]*)(\n|\Z)',
+ bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
+ (r'(\s*)(##[^\n]*)(\n|\Z)',
+ bygroups(Text, Comment.Preproc, Other)),
+ (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
+ (r'(<%)([\w.:]+)',
+ bygroups(Comment.Preproc, Name.Builtin), 'tag'),
+ (r'(</%)([\w.:]+)(>)',
+ bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
+ (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
(r'(?s)(<%(?:!?))(.*?)(%>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(\$\{)(.*?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=%|\#\#) | # an eval or comment line
- (?=\#\*) | # multiline comment
- (?=</?%) | # a python block
- # call start or end
- (?=\$\{) | # a substitution
- (?<=\n)(?=\s*%) |
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )
- ''', bygroups(Other, Operator)),
- (r'\s+', Text),
- ],
- 'ondeftags': [
- (r'<%', Comment.Preproc),
- (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
- include('tag'),
- ],
- 'tag': [
- (r'((?:\w+)\s*=)(\s*)(".*?")',
- bygroups(Name.Attribute, Text, String)),
- (r'/?\s*>', Comment.Preproc, '#pop'),
- (r'\s+', Text),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class MakoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'HTML+Mako'
- aliases = ['html+mako']
- mimetypes = ['text/html+mako']
-
- def __init__(self, **options):
- super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
- **options)
-
-
-class MakoXmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'XML+Mako'
- aliases = ['xml+mako']
- mimetypes = ['application/xml+mako']
-
- def __init__(self, **options):
- super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
- **options)
-
-
-class MakoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'JavaScript+Mako'
- aliases = ['js+mako', 'javascript+mako']
- mimetypes = ['application/x-javascript+mako',
- 'text/x-javascript+mako',
- 'text/javascript+mako']
-
- def __init__(self, **options):
- super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
- MakoLexer, **options)
-
-
-class MakoCssLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'CSS+Mako'
- aliases = ['css+mako']
- mimetypes = ['text/css+mako']
-
- def __init__(self, **options):
- super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
- **options)
-
-
-# Genshi and Cheetah lexers courtesy of Matt Good.
-
-class CheetahPythonLexer(Lexer):
- """
- Lexer for handling Cheetah's special $ tokens in Python syntax.
- """
-
- def get_tokens_unprocessed(self, text):
- pylexer = PythonLexer(**self.options)
- for pos, type_, value in pylexer.get_tokens_unprocessed(text):
- if type_ == Token.Error and value == '$':
- type_ = Comment.Preproc
- yield pos, type_, value
-
-
-class CheetahLexer(RegexLexer):
- """
- Generic `cheetah templates`_ lexer. Code that isn't Cheetah
- markup is yielded as `Token.Other`. This also works for
- `spitfire templates`_ which use the same syntax.
-
- .. _cheetah templates: http://www.cheetahtemplate.org/
- .. _spitfire templates: http://code.google.com/p/spitfire/
- """
-
- name = 'Cheetah'
- aliases = ['cheetah', 'spitfire']
- filenames = ['*.tmpl', '*.spt']
- mimetypes = ['application/x-cheetah', 'application/x-spitfire']
-
- tokens = {
- 'root': [
- (r'(##[^\n]*)$',
- (bygroups(Comment))),
- (r'#[*](.|\n)*?[*]#', Comment),
- (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
- (r'#slurp$', Comment.Preproc),
- (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
- (bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc))),
- # TODO support other Python syntax like $foo['bar']
- (r'(\$)([a-zA-Z_][\w.]*\w)',
- bygroups(Comment.Preproc, using(CheetahPythonLexer))),
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'''(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=%|\#\#) | # an eval or comment line
+ (?=\#\*) | # multiline comment
+ (?=</?%) | # a python block
+ # call start or end
+ (?=\$\{) | # a substitution
+ (?<=\n)(?=\s*%) |
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )
+ ''', bygroups(Other, Operator)),
+ (r'\s+', Text),
+ ],
+ 'ondeftags': [
+ (r'<%', Comment.Preproc),
+ (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
+ include('tag'),
+ ],
+ 'tag': [
+ (r'((?:\w+)\s*=)(\s*)(".*?")',
+ bygroups(Name.Attribute, Text, String)),
+ (r'/?\s*>', Comment.Preproc, '#pop'),
+ (r'\s+', Text),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+
+class MakoHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'HTML+Mako'
+ aliases = ['html+mako']
+ mimetypes = ['text/html+mako']
+
+ def __init__(self, **options):
+ super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
+ **options)
+
+
+class MakoXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'XML+Mako'
+ aliases = ['xml+mako']
+ mimetypes = ['application/xml+mako']
+
+ def __init__(self, **options):
+ super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
+ **options)
+
+
+class MakoJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'JavaScript+Mako'
+ aliases = ['js+mako', 'javascript+mako']
+ mimetypes = ['application/x-javascript+mako',
+ 'text/x-javascript+mako',
+ 'text/javascript+mako']
+
+ def __init__(self, **options):
+ super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
+ MakoLexer, **options)
+
+
+class MakoCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `CssLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'CSS+Mako'
+ aliases = ['css+mako']
+ mimetypes = ['text/css+mako']
+
+ def __init__(self, **options):
+ super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
+ **options)
+
+
+# Genshi and Cheetah lexers courtesy of Matt Good.
+
+class CheetahPythonLexer(Lexer):
+ """
+ Lexer for handling Cheetah's special $ tokens in Python syntax.
+ """
+
+ def get_tokens_unprocessed(self, text):
+ pylexer = PythonLexer(**self.options)
+ for pos, type_, value in pylexer.get_tokens_unprocessed(text):
+ if type_ == Token.Error and value == '$':
+ type_ = Comment.Preproc
+ yield pos, type_, value
+
+
+class CheetahLexer(RegexLexer):
+ """
+ Generic `cheetah templates`_ lexer. Code that isn't Cheetah
+ markup is yielded as `Token.Other`. This also works for
+ `spitfire templates`_ which use the same syntax.
+
+ .. _cheetah templates: http://www.cheetahtemplate.org/
+ .. _spitfire templates: http://code.google.com/p/spitfire/
+ """
+
+ name = 'Cheetah'
+ aliases = ['cheetah', 'spitfire']
+ filenames = ['*.tmpl', '*.spt']
+ mimetypes = ['application/x-cheetah', 'application/x-spitfire']
+
+ tokens = {
+ 'root': [
+ (r'(##[^\n]*)$',
+ (bygroups(Comment))),
+ (r'#[*](.|\n)*?[*]#', Comment),
+ (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
+ (r'#slurp$', Comment.Preproc),
+ (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
+ (bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc))),
+ # TODO support other Python syntax like $foo['bar']
+ (r'(\$)([a-zA-Z_][\w.]*\w)',
+ bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(?s)(\$\{!?)(.*?)(\})',
- bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?=\#[#a-zA-Z]*) | # an eval comment
- (?=\$[a-zA-Z_{]) | # a substitution
- \Z # end of string
- )
- ''', Other),
- (r'\s+', Text),
- ],
- }
-
-
-class CheetahHtmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `HtmlLexer`.
- """
-
- name = 'HTML+Cheetah'
- aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
- mimetypes = ['text/html+cheetah', 'text/html+spitfire']
-
- def __init__(self, **options):
- super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
- **options)
-
-
-class CheetahXmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `XmlLexer`.
- """
-
- name = 'XML+Cheetah'
- aliases = ['xml+cheetah', 'xml+spitfire']
- mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
-
- def __init__(self, **options):
- super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
- **options)
-
-
-class CheetahJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `JavascriptLexer`.
- """
-
- name = 'JavaScript+Cheetah'
- aliases = ['js+cheetah', 'javascript+cheetah',
- 'js+spitfire', 'javascript+spitfire']
- mimetypes = ['application/x-javascript+cheetah',
- 'text/x-javascript+cheetah',
- 'text/javascript+cheetah',
- 'application/x-javascript+spitfire',
- 'text/x-javascript+spitfire',
- 'text/javascript+spitfire']
-
- def __init__(self, **options):
- super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
- CheetahLexer, **options)
-
-
-class GenshiTextLexer(RegexLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
- templates.
- """
-
- name = 'Genshi Text'
- aliases = ['genshitext']
- mimetypes = ['application/x-genshi-text', 'text/x-genshi']
-
- tokens = {
- 'root': [
- (r'[^#$\s]+', Other),
- (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
- (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
- include('variable'),
- (r'[#$\s]', Other),
- ],
- 'directive': [
- (r'\n', Text, '#pop'),
- (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
- (r'(choose|when|with)([^\S\n]+)(.*)',
- bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
- (r'(choose|otherwise)\b', Keyword, '#pop'),
- (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
- Name.Variable),
- ]
- }
-
-
-class GenshiMarkupLexer(RegexLexer):
- """
- Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
- `GenshiLexer`.
- """
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^<$]+', Other),
- (r'(<\?python)(.*?)(\?>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- # yield style and script blocks as Other
- (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
- (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
+ bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc)),
+ (r'''(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?=\#[#a-zA-Z]*) | # an eval comment
+ (?=\$[a-zA-Z_{]) | # a substitution
+ \Z # end of string
+ )
+ ''', Other),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class CheetahHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+ """
+
+ name = 'HTML+Cheetah'
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
+ mimetypes = ['text/html+cheetah', 'text/html+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
+ **options)
+
+
+class CheetahXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `XmlLexer`.
+ """
+
+ name = 'XML+Cheetah'
+ aliases = ['xml+cheetah', 'xml+spitfire']
+ mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
+ **options)
+
+
+class CheetahJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Cheetah'
+ aliases = ['js+cheetah', 'javascript+cheetah',
+ 'js+spitfire', 'javascript+spitfire']
+ mimetypes = ['application/x-javascript+cheetah',
+ 'text/x-javascript+cheetah',
+ 'text/javascript+cheetah',
+ 'application/x-javascript+spitfire',
+ 'text/x-javascript+spitfire',
+ 'text/javascript+spitfire']
+
+ def __init__(self, **options):
+ super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
+ CheetahLexer, **options)
+
+
+class GenshiTextLexer(RegexLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
+ templates.
+ """
+
+ name = 'Genshi Text'
+ aliases = ['genshitext']
+ mimetypes = ['application/x-genshi-text', 'text/x-genshi']
+
+ tokens = {
+ 'root': [
+ (r'[^#$\s]+', Other),
+ (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
+ (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
+ include('variable'),
+ (r'[#$\s]', Other),
+ ],
+ 'directive': [
+ (r'\n', Text, '#pop'),
+ (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
+ (r'(choose|when|with)([^\S\n]+)(.*)',
+ bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
+ (r'(choose|otherwise)\b', Keyword, '#pop'),
+ (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
+ ],
+ 'variable': [
+ (r'(?<!\$)(\$\{)(.+?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
+ Name.Variable),
+ ]
+ }
+
+
+class GenshiMarkupLexer(RegexLexer):
+ """
+ Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
+ `GenshiLexer`.
+ """
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^<$]+', Other),
+ (r'(<\?python)(.*?)(\?>)',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ # yield style and script blocks as Other
+ (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
+ (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
- include('variable'),
- (r'[<$]', Other),
- ],
- 'pytag': [
- (r'\s+', Text),
- (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'pyattr': [
- ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
- ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
- (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('"', String, 'attr-dstring'),
- ("'", String, 'attr-sstring'),
- (r'[^\s>]*', String, '#pop')
- ],
- 'attr-dstring': [
- ('"', String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'attr-sstring': [
- ("'", String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'strings': [
- ('[^"\'$]+', String),
- include('variable')
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
- Name.Variable),
- ]
- }
-
-
-class HtmlGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid HTML templates.
- """
-
- name = 'HTML+Genshi'
- aliases = ['html+genshi', 'html+kid']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+genshi']
-
- def __init__(self, **options):
- super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
- **options)
-
- def analyse_text(text):
- rv = 0.0
+ include('variable'),
+ (r'[<$]', Other),
+ ],
+ 'pytag': [
+ (r'\s+', Text),
+ (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'pyattr': [
+ ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
+ ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('"', String, 'attr-dstring'),
+ ("'", String, 'attr-sstring'),
+ (r'[^\s>]*', String, '#pop')
+ ],
+ 'attr-dstring': [
+ ('"', String, '#pop'),
+ include('strings'),
+ ("'", String)
+ ],
+ 'attr-sstring': [
+ ("'", String, '#pop'),
+ include('strings'),
+ ("'", String)
+ ],
+ 'strings': [
+ ('[^"\'$]+', String),
+ include('variable')
+ ],
+ 'variable': [
+ (r'(?<!\$)(\$\{)(.+?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
+ Name.Variable),
+ ]
+ }
+
+
+class HtmlGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ `kid <http://kid-templating.org/>`_ kid HTML templates.
+ """
+
+ name = 'HTML+Genshi'
+ aliases = ['html+genshi', 'html+kid']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+genshi']
+
+ def __init__(self, **options):
+ super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
+ **options)
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
+ rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + HtmlLexer.analyse_text(text) - 0.01
-
-
-class GenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid XML templates.
- """
-
- name = 'Genshi'
- aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
- filenames = ['*.kid']
- alias_filenames = ['*.xml']
- mimetypes = ['application/x-genshi', 'application/x-kid']
-
- def __init__(self, **options):
- super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
- **options)
-
- def analyse_text(text):
- rv = 0.0
+ rv += 0.2
+ return rv + HtmlLexer.analyse_text(text) - 0.01
+
+
+class GenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ `kid <http://kid-templating.org/>`_ kid XML templates.
+ """
+
+ name = 'Genshi'
+ aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
+ filenames = ['*.kid']
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/x-genshi', 'application/x-kid']
+
+ def __init__(self, **options):
+ super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
+ **options)
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
+ rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + XmlLexer.analyse_text(text) - 0.01
-
-
-class JavascriptGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights javascript code in genshi text templates.
- """
-
- name = 'JavaScript+Genshi Text'
- aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
- 'javascript+genshi']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+genshi',
- 'text/x-javascript+genshi',
- 'text/javascript+genshi']
-
- def __init__(self, **options):
- super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
- GenshiTextLexer,
- **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class CssGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights CSS definitions in genshi text templates.
- """
-
- name = 'CSS+Genshi Text'
- aliases = ['css+genshitext', 'css+genshi']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+genshi']
-
- def __init__(self, **options):
- super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
- **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class RhtmlLexer(DelegatingLexer):
- """
- Subclass of the ERB lexer that highlights the unlexed data with the
- html lexer.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'RHTML'
- aliases = ['rhtml', 'html+erb', 'html+ruby']
- filenames = ['*.rhtml']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+ruby']
-
- def __init__(self, **options):
- super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- # one more than the XmlErbLexer returns
- rv += 0.5
- return rv
-
-
-class XmlErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights data outside preprocessor
- directives with the `XmlLexer`.
- """
-
- name = 'XML+Ruby'
- aliases = ['xml+erb', 'xml+ruby']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+ruby']
-
- def __init__(self, **options):
- super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
- """
-
- name = 'CSS+Ruby'
- aliases = ['css+erb', 'css+ruby']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+ruby']
-
- def __init__(self, **options):
- super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class JavascriptErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Ruby'
- aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+ruby',
- 'text/x-javascript+ruby',
- 'text/javascript+ruby']
-
- def __init__(self, **options):
- super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
- **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class HtmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+PHP'
- aliases = ['html+php']
- filenames = ['*.phtml']
- alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
- '*.php[345]']
- mimetypes = ['application/x-php',
- 'application/x-httpd-php', 'application/x-httpd-php3',
- 'application/x-httpd-php4', 'application/x-httpd-php5']
-
- def __init__(self, **options):
- super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
- """
-
- name = 'XML+PHP'
- aliases = ['xml+php']
- alias_filenames = ['*.xml', '*.php', '*.php[345]']
- mimetypes = ['application/xml+php']
-
- def __init__(self, **options):
- super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
- """
-
- name = 'CSS+PHP'
- aliases = ['css+php']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+php']
-
- def __init__(self, **options):
- super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text) - 0.05
-
-
-class JavascriptPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+PHP'
- aliases = ['js+php', 'javascript+php']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+php',
- 'text/x-javascript+php',
- 'text/javascript+php']
-
- def __init__(self, **options):
- super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
- **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text)
-
-
-class HtmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Smarty'
- aliases = ['html+smarty']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
- mimetypes = ['text/html+smarty']
-
- def __init__(self, **options):
- super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Smarty'
- aliases = ['xml+smarty']
- alias_filenames = ['*.xml', '*.tpl']
- mimetypes = ['application/xml+smarty']
-
- def __init__(self, **options):
- super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Smarty'
- aliases = ['css+smarty']
- alias_filenames = ['*.css', '*.tpl']
- mimetypes = ['text/css+smarty']
-
- def __init__(self, **options):
- super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class JavascriptSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Smarty'
- aliases = ['js+smarty', 'javascript+smarty']
- alias_filenames = ['*.js', '*.tpl']
- mimetypes = ['application/x-javascript+smarty',
- 'text/x-javascript+smarty',
- 'text/javascript+smarty']
-
- def __init__(self, **options):
- super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
- **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class HtmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Django/Jinja'
- aliases = ['html+django', 'html+jinja', 'htmldjango']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+django', 'text/html+jinja']
-
- def __init__(self, **options):
- super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Django/Jinja'
- aliases = ['xml+django', 'xml+jinja']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+django', 'application/xml+jinja']
-
- def __init__(self, **options):
- super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Django/Jinja'
- aliases = ['css+django', 'css+jinja']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+django', 'text/css+jinja']
-
- def __init__(self, **options):
- super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JavascriptDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Django/Jinja'
- aliases = ['js+django', 'javascript+django',
- 'js+jinja', 'javascript+jinja']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+django',
- 'application/x-javascript+jinja',
- 'text/x-javascript+django',
- 'text/x-javascript+jinja',
- 'text/javascript+django',
- 'text/javascript+jinja']
-
- def __init__(self, **options):
- super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
- **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JspRootLexer(RegexLexer):
- """
- Base for the `JspLexer`. Yields `Token.Other` for area outside of
- JSP tags.
-
- .. versionadded:: 0.7
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- # FIXME: I want to make these keywords but still parse attributes.
- (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
- Keyword),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
- ],
- }
-
-
-class JspLexer(DelegatingLexer):
- """
- Lexer for Java Server Pages.
-
- .. versionadded:: 0.7
- """
- name = 'Java Server Page'
- aliases = ['jsp']
- filenames = ['*.jsp']
- mimetypes = ['application/x-jsp']
-
- def __init__(self, **options):
- super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = JavaLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class EvoqueLexer(RegexLexer):
- """
- For files using the Evoque templating system.
-
- .. versionadded:: 1.1
- """
- name = 'Evoque'
- aliases = ['evoque']
- filenames = ['*.evoque']
- mimetypes = ['application/x-evoque']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^#$]+', Other),
- (r'#\[', Comment.Multiline, 'comment'),
- (r'\$\$', Other),
- # svn keywords
- (r'\$\w+:[^$\n]*\$', Comment.Multiline),
- # directives: begin, end
- (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, Punctuation)),
- # directives: evoque, overlay
- # see doc for handling first name arg: /directives/evoque/
- # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
- # should be using(PythonLexer), not passed out as String
- (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
- r'(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, using(PythonLexer), Punctuation)),
- # directives: if, for, prefer, test
- (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- using(PythonLexer), Punctuation)),
- # directive clauses (no {} expression)
- (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
- # expressions
- (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
- bygroups(Punctuation, None, using(PythonLexer),
- Name.Builtin, None, None, Punctuation)),
- (r'#', Other),
- ],
- 'comment': [
- (r'[^\]#]', Comment.Multiline),
- (r'#\[', Comment.Multiline, '#push'),
- (r'\]#', Comment.Multiline, '#pop'),
- (r'[\]#]', Comment.Multiline)
- ],
- }
-
-
-class EvoqueHtmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'HTML+Evoque'
- aliases = ['html+evoque']
- filenames = ['*.html']
- mimetypes = ['text/html+evoque']
-
- def __init__(self, **options):
- super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
- **options)
-
-
-class EvoqueXmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'XML+Evoque'
- aliases = ['xml+evoque']
- filenames = ['*.xml']
- mimetypes = ['application/xml+evoque']
-
- def __init__(self, **options):
- super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
- **options)
-
-
-class ColdfusionLexer(RegexLexer):
- """
- Coldfusion statements
- """
- name = 'cfstatement'
- aliases = ['cfs']
- filenames = []
- mimetypes = []
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//.*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'\+\+|--', Operator),
- (r'[-+*/^&=!]', Operator),
- (r'<=|>=|<|>|==', Operator),
- (r'mod\b', Operator),
- (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
- (r'\|\||&&', Operator),
- (r'\?', Operator),
- (r'"', String.Double, 'string'),
- # There is a special rule for allowing html in single quoted
- # strings, evidently.
- (r"'.*?'", String.Single),
- (r'\d+', Number),
- (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
- r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
- r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(application|session|client|cookie|super|this|variables|arguments)\b',
- Name.Constant),
- (r'([a-z_$][\w.]*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z_$][\w.]*', Name.Variable),
- (r'[()\[\]{};:,.\\]', Punctuation),
- (r'\s+', Text),
- ],
- 'string': [
- (r'""', String.Double),
- (r'#.+?#', String.Interp),
- (r'[^"#]+', String.Double),
- (r'#', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class ColdfusionMarkupLexer(RegexLexer):
- """
- Coldfusion markup only
- """
- name = 'Coldfusion'
- aliases = ['cf']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'[^<]+', Other),
- include('tags'),
- (r'<[^<>]*', Other),
- ],
- 'tags': [
- (r'<!---', Comment.Multiline, 'cfcomment'),
- (r'(?s)<!--.*?-->', Comment),
- (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
- (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- # negative lookbehind is for strings with embedded >
- (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
- r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
- r'mailpart|mail|header|content|zip|image|lock|argument|try|'
- r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- ],
- 'cfoutput': [
- (r'[^#<]+', Other),
- (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
- Punctuation)),
- # (r'<cfoutput.*?>', Name.Builtin, '#push'),
- (r'</cfoutput.*?>', Name.Builtin, '#pop'),
- include('tags'),
- (r'(?s)<[^<>]*', Other),
- (r'#', Other),
- ],
- 'cfcomment': [
- (r'<!---', Comment.Multiline, '#push'),
- (r'--->', Comment.Multiline, '#pop'),
- (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
- ],
- }
-
-
-class ColdfusionHtmlLexer(DelegatingLexer):
- """
- Coldfusion markup in html
- """
- name = 'Coldfusion HTML'
- aliases = ['cfm']
- filenames = ['*.cfm', '*.cfml']
- mimetypes = ['application/x-coldfusion']
-
- def __init__(self, **options):
- super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
- **options)
-
-
-class ColdfusionCFCLexer(DelegatingLexer):
- """
- Coldfusion markup/script components
-
- .. versionadded:: 2.0
- """
- name = 'Coldfusion CFC'
- aliases = ['cfc']
- filenames = ['*.cfc']
- mimetypes = []
-
- def __init__(self, **options):
- super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
- **options)
-
-
-class SspLexer(DelegatingLexer):
- """
- Lexer for Scalate Server Pages.
-
- .. versionadded:: 1.4
- """
- name = 'Scalate Server Page'
- aliases = ['ssp']
- filenames = ['*.ssp']
- mimetypes = ['application/x-ssp']
-
- def __init__(self, **options):
- super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
+ rv += 0.2
+ return rv + XmlLexer.analyse_text(text) - 0.01
+
+
+class JavascriptGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights javascript code in genshi text templates.
+ """
+
+ name = 'JavaScript+Genshi Text'
+ aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
+ 'javascript+genshi']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+genshi',
+ 'text/x-javascript+genshi',
+ 'text/javascript+genshi']
+
+ def __init__(self, **options):
+ super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
+ GenshiTextLexer,
+ **options)
+
+ def analyse_text(text):
+ return GenshiLexer.analyse_text(text) - 0.05
+
+
+class CssGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights CSS definitions in genshi text templates.
+ """
+
+ name = 'CSS+Genshi Text'
+ aliases = ['css+genshitext', 'css+genshi']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+genshi']
+
+ def __init__(self, **options):
+ super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
+ **options)
+
+ def analyse_text(text):
+ return GenshiLexer.analyse_text(text) - 0.05
+
+
+class RhtmlLexer(DelegatingLexer):
+ """
+ Subclass of the ERB lexer that highlights the unlexed data with the
+ html lexer.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'RHTML'
+ aliases = ['rhtml', 'html+erb', 'html+ruby']
+ filenames = ['*.rhtml']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+ruby']
+
+ def __init__(self, **options):
+ super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
+
+ def analyse_text(text):
+ rv = ErbLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ # one more than the XmlErbLexer returns
+ rv += 0.5
+ return rv
+
+
+class XmlErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights data outside preprocessor
+ directives with the `XmlLexer`.
+ """
+
+ name = 'XML+Ruby'
+ aliases = ['xml+erb', 'xml+ruby']
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/xml+ruby']
+
+ def __init__(self, **options):
+ super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
+
+ def analyse_text(text):
+ rv = ErbLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
+ """
+
+ name = 'CSS+Ruby'
+ aliases = ['css+erb', 'css+ruby']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+ruby']
+
+ def __init__(self, **options):
+ super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
+
+ def analyse_text(text):
+ return ErbLexer.analyse_text(text) - 0.05
+
+
+class JavascriptErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Ruby'
+ aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+ruby',
+ 'text/x-javascript+ruby',
+ 'text/javascript+ruby']
+
+ def __init__(self, **options):
+ super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
+ **options)
+
+ def analyse_text(text):
+ return ErbLexer.analyse_text(text) - 0.05
+
+
+class HtmlPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+PHP'
+ aliases = ['html+php']
+ filenames = ['*.phtml']
+ alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
+ '*.php[345]']
+ mimetypes = ['application/x-php',
+ 'application/x-httpd-php', 'application/x-httpd-php3',
+ 'application/x-httpd-php4', 'application/x-httpd-php5']
+
+ def __init__(self, **options):
+ super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
+
+ def analyse_text(text):
+ rv = PhpLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
+ """
+
+ name = 'XML+PHP'
+ aliases = ['xml+php']
+ alias_filenames = ['*.xml', '*.php', '*.php[345]']
+ mimetypes = ['application/xml+php']
+
+ def __init__(self, **options):
+ super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
+
+ def analyse_text(text):
+ rv = PhpLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
+ """
+
+ name = 'CSS+PHP'
+ aliases = ['css+php']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+php']
+
+ def __init__(self, **options):
+ super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
+
+ def analyse_text(text):
+ return PhpLexer.analyse_text(text) - 0.05
+
+
+class JavascriptPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` which highlights unmatched data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+PHP'
+ aliases = ['js+php', 'javascript+php']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+php',
+ 'text/x-javascript+php',
+ 'text/javascript+php']
+
+ def __init__(self, **options):
+ super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
+ **options)
+
+ def analyse_text(text):
+ return PhpLexer.analyse_text(text)
+
+
+class HtmlSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+Smarty'
+ aliases = ['html+smarty']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
+ mimetypes = ['text/html+smarty']
+
+ def __init__(self, **options):
+ super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
+
+ def analyse_text(text):
+ rv = SmartyLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `XmlLexer`.
+ """
+
+ name = 'XML+Smarty'
+ aliases = ['xml+smarty']
+ alias_filenames = ['*.xml', '*.tpl']
+ mimetypes = ['application/xml+smarty']
+
+ def __init__(self, **options):
+ super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
+
+ def analyse_text(text):
+ rv = SmartyLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `CssLexer`.
+ """
+
+ name = 'CSS+Smarty'
+ aliases = ['css+smarty']
+ alias_filenames = ['*.css', '*.tpl']
+ mimetypes = ['text/css+smarty']
+
+ def __init__(self, **options):
+ super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
+
+ def analyse_text(text):
+ return SmartyLexer.analyse_text(text) - 0.05
+
+
+class JavascriptSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Smarty'
+ aliases = ['js+smarty', 'javascript+smarty']
+ alias_filenames = ['*.js', '*.tpl']
+ mimetypes = ['application/x-javascript+smarty',
+ 'text/x-javascript+smarty',
+ 'text/javascript+smarty']
+
+ def __init__(self, **options):
+ super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
+ **options)
+
+ def analyse_text(text):
+ return SmartyLexer.analyse_text(text) - 0.05
+
+
+class HtmlDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+Django/Jinja'
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+django', 'text/html+jinja']
+
+ def __init__(self, **options):
+ super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
+
+ def analyse_text(text):
+ rv = DjangoLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `XmlLexer`.
+ """
+
+ name = 'XML+Django/Jinja'
+ aliases = ['xml+django', 'xml+jinja']
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/xml+django', 'application/xml+jinja']
+
+ def __init__(self, **options):
+ super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
+
+ def analyse_text(text):
+ rv = DjangoLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `CssLexer`.
+ """
+
+ name = 'CSS+Django/Jinja'
+ aliases = ['css+django', 'css+jinja']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+django', 'text/css+jinja']
+
+ def __init__(self, **options):
+ super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
+
+ def analyse_text(text):
+ return DjangoLexer.analyse_text(text) - 0.05
+
+
+class JavascriptDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Django/Jinja'
+ aliases = ['js+django', 'javascript+django',
+ 'js+jinja', 'javascript+jinja']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+django',
+ 'application/x-javascript+jinja',
+ 'text/x-javascript+django',
+ 'text/x-javascript+jinja',
+ 'text/javascript+django',
+ 'text/javascript+jinja']
+
+ def __init__(self, **options):
+ super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
+ **options)
+
+ def analyse_text(text):
+ return DjangoLexer.analyse_text(text) - 0.05
+
+
+class JspRootLexer(RegexLexer):
+ """
+ Base for the `JspLexer`. Yields `Token.Other` for area outside of
+ JSP tags.
+
+ .. versionadded:: 0.7
+ """
+
+ tokens = {
+ 'root': [
+ (r'<%\S?', Keyword, 'sec'),
+ # FIXME: I want to make these keywords but still parse attributes.
+ (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
+ Keyword),
+ (r'[^<]+', Other),
+ (r'<', Other),
+ ],
+ 'sec': [
+ (r'%>', Keyword, '#pop'),
+ # note: '\w\W' != '.' without DOTALL.
+ (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
+ ],
+ }
+
+
+class JspLexer(DelegatingLexer):
+ """
+ Lexer for Java Server Pages.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Java Server Page'
+ aliases = ['jsp']
+ filenames = ['*.jsp']
+ mimetypes = ['application/x-jsp']
+
+ def __init__(self, **options):
+ super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+
+ def analyse_text(text):
+ rv = JavaLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class EvoqueLexer(RegexLexer):
+ """
+ For files using the Evoque templating system.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Evoque'
+ aliases = ['evoque']
+ filenames = ['*.evoque']
+ mimetypes = ['application/x-evoque']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^#$]+', Other),
+ (r'#\[', Comment.Multiline, 'comment'),
+ (r'\$\$', Other),
+ # svn keywords
+ (r'\$\w+:[^$\n]*\$', Comment.Multiline),
+ # directives: begin, end
+ (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ String, Punctuation)),
+ # directives: evoque, overlay
+ # see doc for handling first name arg: /directives/evoque/
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
+ # should be using(PythonLexer), not passed out as String
+ (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
+ r'(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ String, using(PythonLexer), Punctuation)),
+ # directives: if, for, prefer, test
+ (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ using(PythonLexer), Punctuation)),
+ # directive clauses (no {} expression)
+ (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
+ # expressions
+ (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
+ bygroups(Punctuation, None, using(PythonLexer),
+ Name.Builtin, None, None, Punctuation)),
+ (r'#', Other),
+ ],
+ 'comment': [
+ (r'[^\]#]', Comment.Multiline),
+ (r'#\[', Comment.Multiline, '#push'),
+ (r'\]#', Comment.Multiline, '#pop'),
+ (r'[\]#]', Comment.Multiline)
+ ],
+ }
+
+
+class EvoqueHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 1.1
+ """
+ name = 'HTML+Evoque'
+ aliases = ['html+evoque']
+ filenames = ['*.html']
+ mimetypes = ['text/html+evoque']
+
+ def __init__(self, **options):
+ super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
+ **options)
+
+
+class EvoqueXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
+ `XmlLexer`.
+
+ .. versionadded:: 1.1
+ """
+ name = 'XML+Evoque'
+ aliases = ['xml+evoque']
+ filenames = ['*.xml']
+ mimetypes = ['application/xml+evoque']
+
+ def __init__(self, **options):
+ super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
+ **options)
+
+
+class ColdfusionLexer(RegexLexer):
+ """
+ Coldfusion statements
+ """
+ name = 'cfstatement'
+ aliases = ['cfs']
+ filenames = []
+ mimetypes = []
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ (r'\+\+|--', Operator),
+ (r'[-+*/^&=!]', Operator),
+ (r'<=|>=|<|>|==', Operator),
+ (r'mod\b', Operator),
+ (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
+ (r'\|\||&&', Operator),
+ (r'\?', Operator),
+ (r'"', String.Double, 'string'),
+ # There is a special rule for allowing html in single quoted
+ # strings, evidently.
+ (r"'.*?'", String.Single),
+ (r'\d+', Number),
+ (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
+ r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
+ r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(application|session|client|cookie|super|this|variables|arguments)\b',
+ Name.Constant),
+ (r'([a-z_$][\w.]*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-z_$][\w.]*', Name.Variable),
+ (r'[()\[\]{};:,.\\]', Punctuation),
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'#.+?#', String.Interp),
+ (r'[^"#]+', String.Double),
+ (r'#', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class ColdfusionMarkupLexer(RegexLexer):
+ """
+ Coldfusion markup only
+ """
+ name = 'Coldfusion'
+ aliases = ['cf']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'[^<]+', Other),
+ include('tags'),
+ (r'<[^<>]*', Other),
+ ],
+ 'tags': [
+ (r'<!---', Comment.Multiline, 'cfcomment'),
+ (r'(?s)<!--.*?-->', Comment),
+ (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
+ (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
+ # negative lookbehind is for strings with embedded >
+ (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
+ r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
+ r'mailpart|mail|header|content|zip|image|lock|argument|try|'
+ r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
+ ],
+ 'cfoutput': [
+ (r'[^#<]+', Other),
+ (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
+ Punctuation)),
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
+ (r'</cfoutput.*?>', Name.Builtin, '#pop'),
+ include('tags'),
+ (r'(?s)<[^<>]*', Other),
+ (r'#', Other),
+ ],
+ 'cfcomment': [
+ (r'<!---', Comment.Multiline, '#push'),
+ (r'--->', Comment.Multiline, '#pop'),
+ (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
+ ],
+ }
+
+
+class ColdfusionHtmlLexer(DelegatingLexer):
+ """
+ Coldfusion markup in html
+ """
+ name = 'Coldfusion HTML'
+ aliases = ['cfm']
+ filenames = ['*.cfm', '*.cfml']
+ mimetypes = ['application/x-coldfusion']
+
+ def __init__(self, **options):
+ super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
+ **options)
+
+
+class ColdfusionCFCLexer(DelegatingLexer):
+ """
+ Coldfusion markup/script components
+
+ .. versionadded:: 2.0
+ """
+ name = 'Coldfusion CFC'
+ aliases = ['cfc']
+ filenames = ['*.cfc']
+ mimetypes = []
+
+ def __init__(self, **options):
+ super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
+ **options)
+
+
+class SspLexer(DelegatingLexer):
+ """
+ Lexer for Scalate Server Pages.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Scalate Server Page'
+ aliases = ['ssp']
+ filenames = ['*.ssp']
+ mimetypes = ['application/x-ssp']
+
+ def __init__(self, **options):
+ super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'val \w+\s*:', text):
- rv += 0.6
- if looks_like_xml(text):
- rv += 0.2
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class TeaTemplateRootLexer(RegexLexer):
- """
- Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
- code blocks.
-
- .. versionadded:: 1.5
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
- ],
- }
-
-
-class TeaTemplateLexer(DelegatingLexer):
- """
- Lexer for `Tea Templates <http://teatrove.org/>`_.
-
- .. versionadded:: 1.5
- """
- name = 'Tea'
- aliases = ['tea']
- filenames = ['*.tea']
- mimetypes = ['text/x-tea']
-
- def __init__(self, **options):
- super(TeaTemplateLexer, self).__init__(XmlLexer,
- TeaTemplateRootLexer, **options)
-
- def analyse_text(text):
- rv = TeaLangLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class LassoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `HtmlLexer`.
-
- Nested JavaScript and CSS is also highlighted.
-
- .. versionadded:: 1.6
- """
-
- name = 'HTML+Lasso'
- aliases = ['html+lasso']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['text/html+lasso',
- 'application/x-httpd-lasso',
- 'application/x-httpd-lasso[89]']
-
- def __init__(self, **options):
- super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text): # same as HTML lexer
- rv += 0.5
- return rv
-
-
-class LassoXmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `XmlLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'XML+Lasso'
- aliases = ['xml+lasso']
- alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['application/xml+lasso']
-
- def __init__(self, **options):
- super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class LassoCssLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `CssLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'CSS+Lasso'
- aliases = ['css+lasso']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
- super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
- if re.search(r'\w+:.+?;', text):
- rv += 0.1
- if 'padding:' in text:
- rv += 0.1
- return rv
-
-
-class LassoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `JavascriptLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'JavaScript+Lasso'
- aliases = ['js+lasso', 'javascript+lasso']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+lasso',
- 'text/x-javascript+lasso',
- 'text/javascript+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
- super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
- **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
- return rv
-
-
-class HandlebarsLexer(RegexLexer):
- """
- Generic `handlebars <http://handlebarsjs.com/>` template lexer.
-
- Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
- Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.0
- """
-
- name = "Handlebars"
- aliases = ['handlebars']
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
-
+ rv += 0.6
+ if looks_like_xml(text):
+ rv += 0.2
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class TeaTemplateRootLexer(RegexLexer):
+ """
+ Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
+ code blocks.
+
+ .. versionadded:: 1.5
+ """
+
+ tokens = {
+ 'root': [
+ (r'<%\S?', Keyword, 'sec'),
+ (r'[^<]+', Other),
+ (r'<', Other),
+ ],
+ 'sec': [
+ (r'%>', Keyword, '#pop'),
+ # note: '\w\W' != '.' without DOTALL.
+ (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
+ ],
+ }
+
+
+class TeaTemplateLexer(DelegatingLexer):
+ """
+ Lexer for `Tea Templates <http://teatrove.org/>`_.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Tea'
+ aliases = ['tea']
+ filenames = ['*.tea']
+ mimetypes = ['text/x-tea']
+
+ def __init__(self, **options):
+ super(TeaTemplateLexer, self).__init__(XmlLexer,
+ TeaTemplateRootLexer, **options)
+
+ def analyse_text(text):
+ rv = TeaLangLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class LassoHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `HtmlLexer`.
+
+ Nested JavaScript and CSS is also highlighted.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'HTML+Lasso'
+ aliases = ['html+lasso']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
+ '*.incl', '*.inc', '*.las']
+ mimetypes = ['text/html+lasso',
+ 'application/x-httpd-lasso',
+ 'application/x-httpd-lasso[89]']
+
+ def __init__(self, **options):
+ super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text): # same as HTML lexer
+ rv += 0.5
+ return rv
+
+
+class LassoXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `XmlLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'XML+Lasso'
+ aliases = ['xml+lasso']
+ alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
+ '*.incl', '*.inc', '*.las']
+ mimetypes = ['application/xml+lasso']
+
+ def __init__(self, **options):
+ super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class LassoCssLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `CssLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'CSS+Lasso'
+ aliases = ['css+lasso']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+lasso']
+
+ def __init__(self, **options):
+ options['requiredelimiters'] = True
+ super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.05
+ if re.search(r'\w+:.+?;', text):
+ rv += 0.1
+ if 'padding:' in text:
+ rv += 0.1
+ return rv
+
+
+class LassoJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `JavascriptLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JavaScript+Lasso'
+ aliases = ['js+lasso', 'javascript+lasso']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+lasso',
+ 'text/x-javascript+lasso',
+ 'text/javascript+lasso']
+
+ def __init__(self, **options):
+ options['requiredelimiters'] = True
+ super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
+ **options)
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.05
+ return rv
+
+
+class HandlebarsLexer(RegexLexer):
+ """
+ Generic `handlebars <http://handlebarsjs.com/>` template lexer.
+
+ Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
+ Everything else is left for a delegating lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "Handlebars"
+ aliases = ['handlebars']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+
# Comment start {{! }} or {{!--
- (r'\{\{!.*\}\}', Comment),
-
+ (r'\{\{!.*\}\}', Comment),
+
# HTML Escaping open {{{expression
- (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
+ (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
# {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
(r'(\{\{)([#~/]+)([^\s}]*)', bygroups(Comment.Preproc, Number.Attribute,Number.Attribute), 'tag'),
- (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
- ],
-
- 'tag': [
- (r'\s+', Text),
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
+ ],
+
+ 'tag': [
+ (r'\s+', Text),
# HTML Escaping close }}}
- (r'\}\}\}', Comment.Special, '#pop'),
+ (r'\}\}\}', Comment.Special, '#pop'),
# blockClose}}, includes optional tilde ~
(r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
-
- # {{opt=something}}
+
+ # {{opt=something}}
(r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
-
+
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
(r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
@@ -1851,355 +1851,355 @@ class HandlebarsLexer(RegexLexer):
'generic': [
include('variable'),
- # borrowed from DjangoLexer
- (r':?"(\\\\|\\"|[^"])*"', String.Double),
- (r":?'(\\\\|\\'|[^'])*'", String.Single),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ]
- }
-
-
-class HandlebarsHtmlLexer(DelegatingLexer):
- """
- Subclass of the `HandlebarsLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Handlebars"
- aliases = ["html+handlebars"]
- filenames = ['*.handlebars', '*.hbs']
- mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
-
- def __init__(self, **options):
- super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
-
-
-class YamlJinjaLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `YamlLexer`.
-
- Commonly used in Saltstack salt states.
-
- .. versionadded:: 2.0
- """
-
- name = 'YAML+Jinja'
- aliases = ['yaml+jinja', 'salt', 'sls']
- filenames = ['*.sls']
- mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
-
- def __init__(self, **options):
- super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
-
-
-class LiquidLexer(RegexLexer):
- """
- Lexer for `Liquid templates
- <http://www.rubydoc.info/github/Shopify/liquid>`_.
-
- .. versionadded:: 2.0
- """
- name = 'liquid'
- aliases = ['liquid']
- filenames = ['*.liquid']
-
- tokens = {
- 'root': [
- (r'[^{]+', Text),
- # tags and block tags
- (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
- # output tags
- (r'(\{\{)(\s*)([^\s}]+)',
- bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
- 'output'),
- (r'\{', Text)
- ],
-
- 'tag-or-block': [
- # builtin logic blocks
- (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
- (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
- combined('end-of-block', 'whitespace', 'generic')),
- (r'(else)(\s*)(%\})',
- bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
-
- # other builtin blocks
- (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation), '#pop'),
- (r'(comment)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
- (r'(raw)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
-
- # end of block
- (r'(end(case|unless|if))(\s*)(%\})',
- bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
- (r'(end([^\s%]+))(\s*)(%\})',
- bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
-
- # builtin tags (assign and include are handled together with usual tags)
- (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
- bygroups(Name.Tag, Whitespace,
- using(this, state='generic'), Punctuation, Whitespace),
- 'variable-tag-markup'),
-
- # other tags or blocks
- (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
- ],
-
- 'output': [
- include('whitespace'),
+ # borrowed from DjangoLexer
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ]
+ }
+
+
+class HandlebarsHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `HandlebarsLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Handlebars"
+ aliases = ["html+handlebars"]
+ filenames = ['*.handlebars', '*.hbs']
+ mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
+
+ def __init__(self, **options):
+ super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
+
+
+class YamlJinjaLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `YamlLexer`.
+
+ Commonly used in Saltstack salt states.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'YAML+Jinja'
+ aliases = ['yaml+jinja', 'salt', 'sls']
+ filenames = ['*.sls']
+ mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
+
+ def __init__(self, **options):
+ super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
+
+
+class LiquidLexer(RegexLexer):
+ """
+ Lexer for `Liquid templates
+ <http://www.rubydoc.info/github/Shopify/liquid>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'liquid'
+ aliases = ['liquid']
+ filenames = ['*.liquid']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Text),
+ # tags and block tags
+ (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
+ # output tags
+ (r'(\{\{)(\s*)([^\s}]+)',
+ bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
+ 'output'),
+ (r'\{', Text)
+ ],
+
+ 'tag-or-block': [
+ # builtin logic blocks
+ (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
+ (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
+ combined('end-of-block', 'whitespace', 'generic')),
+ (r'(else)(\s*)(%\})',
+ bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
+
+ # other builtin blocks
+ (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation), '#pop'),
+ (r'(comment)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
+ (r'(raw)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
+
+ # end of block
+ (r'(end(case|unless|if))(\s*)(%\})',
+ bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
+ (r'(end([^\s%]+))(\s*)(%\})',
+ bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
+
+ # builtin tags (assign and include are handled together with usual tags)
+ (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
+ bygroups(Name.Tag, Whitespace,
+ using(this, state='generic'), Punctuation, Whitespace),
+ 'variable-tag-markup'),
+
+ # other tags or blocks
+ (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
+ ],
+
+ 'output': [
+ include('whitespace'),
(r'\}\}', Punctuation, '#pop'), # end of output
-
- (r'\|', Punctuation, 'filters')
- ],
-
- 'filters': [
- include('whitespace'),
- (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
-
- (r'([^\s|:]+)(:?)(\s*)',
- bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
- ],
-
- 'filter-markup': [
- (r'\|', Punctuation, '#pop'),
- include('end-of-tag'),
- include('default-param-markup')
- ],
-
- 'condition': [
- include('end-of-block'),
- include('whitespace'),
-
- (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
- bygroups(using(this, state = 'generic'), Whitespace, Operator,
- Whitespace, using(this, state = 'generic'), Whitespace,
- Punctuation)),
- (r'\b!', Operator),
- (r'\bnot\b', Operator.Word),
- (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
- bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
- Whitespace, using(this, state = 'generic'))),
-
- include('generic'),
- include('whitespace')
- ],
-
- 'generic-value': [
- include('generic'),
- include('end-at-whitespace')
- ],
-
- 'operator': [
- (r'(\s*)((=|!|>|<)=?)(\s*)',
- bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
- (r'(\s*)(\bcontains\b)(\s*)',
- bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
- ],
-
- 'end-of-tag': [
- (r'\}\}', Punctuation, '#pop')
- ],
-
- 'end-of-block': [
- (r'%\}', Punctuation, ('#pop', '#pop'))
- ],
-
- 'end-at-whitespace': [
- (r'\s+', Whitespace, '#pop')
- ],
-
- # states for unknown markup
- 'param-markup': [
- include('whitespace'),
- # params with colons or equals
- (r'([^\s=:]+)(\s*)(=|:)',
- bygroups(Name.Attribute, Whitespace, Operator)),
- # explicit variables
- (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
- bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation)),
-
- include('string'),
- include('number'),
- include('keyword'),
- (r',', Punctuation)
- ],
-
- 'default-param-markup': [
- include('param-markup'),
- (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
- ],
-
- 'variable-param-markup': [
- include('param-markup'),
- include('variable'),
- (r'.', Text) # fallback
- ],
-
- 'tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('default-param-markup')
- ],
-
- 'variable-tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('variable-param-markup')
- ],
-
- # states for different values types
- 'keyword': [
- (r'\b(false|true)\b', Keyword.Constant)
- ],
-
- 'variable': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- (r'(?<=\w)\.(?=\w)', Punctuation)
- ],
-
- 'string': [
- (r"'[^']*'", String.Single),
- (r'"[^"]*"', String.Double)
- ],
-
- 'number': [
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer)
- ],
-
- 'generic': [ # decides for variable, string, keyword or number
- include('keyword'),
- include('string'),
- include('number'),
- include('variable')
- ],
-
- 'whitespace': [
- (r'[ \t]+', Whitespace)
- ],
-
- # states for builtin blocks
- 'comment': [
- (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), ('#pop', '#pop')),
- (r'.', Comment)
- ],
-
- 'raw': [
- (r'[^{]+', Text),
- (r'(\{%)(\s*)(endraw)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), '#pop'),
- (r'\{', Text)
- ],
- }
-
-
-class TwigLexer(RegexLexer):
- """
- `Twig <http://twig.sensiolabs.org/>`_ template lexer.
-
- It just highlights Twig code between the preprocessor directives,
- other data is left untouched by the lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Twig'
- aliases = ['twig']
- mimetypes = ['application/x-twig']
-
- flags = re.M | re.S
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w-]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # twig comments
- (r'\{\#.*?\#\}', Comment),
- # raw twig blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'tag'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'tag'),
- (r'\{', Other),
- ],
- 'varnames': [
- (r'(\|)(\s*)(%s)' % _ident_inner,
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
- (r'(in|not|and|b-and|or|b-or|b-xor|is'
- r'if|elseif|else|import'
- r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
- r'matches|starts\s+with|ends\s+with)\b',
- Keyword),
- (r'(loop|block|parent)\b', Name.Builtin),
- (_ident_inner, Name.Variable),
- (r'\.' + _ident_inner, Name.Variable),
- (r'\.[0-9]+', Number),
- (r':?"(\\\\|\\"|[^"])*"', String.Double),
- (r":?'(\\\\|\\'|[^'])*'", String.Single),
- (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'tag': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation),
- ],
- }
-
-
-class TwigHtmlLexer(DelegatingLexer):
- """
- Subclass of the `TwigLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Twig"
- aliases = ["html+twig"]
- filenames = ['*.twig']
- mimetypes = ['text/html+twig']
-
- def __init__(self, **options):
- super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
+
+ (r'\|', Punctuation, 'filters')
+ ],
+
+ 'filters': [
+ include('whitespace'),
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
+
+ (r'([^\s|:]+)(:?)(\s*)',
+ bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
+ ],
+
+ 'filter-markup': [
+ (r'\|', Punctuation, '#pop'),
+ include('end-of-tag'),
+ include('default-param-markup')
+ ],
+
+ 'condition': [
+ include('end-of-block'),
+ include('whitespace'),
+
+ (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator,
+ Whitespace, using(this, state = 'generic'), Whitespace,
+ Punctuation)),
+ (r'\b!', Operator),
+ (r'\bnot\b', Operator.Word),
+ (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
+ Whitespace, using(this, state = 'generic'))),
+
+ include('generic'),
+ include('whitespace')
+ ],
+
+ 'generic-value': [
+ include('generic'),
+ include('end-at-whitespace')
+ ],
+
+ 'operator': [
+ (r'(\s*)((=|!|>|<)=?)(\s*)',
+ bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
+ (r'(\s*)(\bcontains\b)(\s*)',
+ bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
+ ],
+
+ 'end-of-tag': [
+ (r'\}\}', Punctuation, '#pop')
+ ],
+
+ 'end-of-block': [
+ (r'%\}', Punctuation, ('#pop', '#pop'))
+ ],
+
+ 'end-at-whitespace': [
+ (r'\s+', Whitespace, '#pop')
+ ],
+
+ # states for unknown markup
+ 'param-markup': [
+ include('whitespace'),
+ # params with colons or equals
+ (r'([^\s=:]+)(\s*)(=|:)',
+ bygroups(Name.Attribute, Whitespace, Operator)),
+ # explicit variables
+ (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
+ bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation)),
+
+ include('string'),
+ include('number'),
+ include('keyword'),
+ (r',', Punctuation)
+ ],
+
+ 'default-param-markup': [
+ include('param-markup'),
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
+ ],
+
+ 'variable-param-markup': [
+ include('param-markup'),
+ include('variable'),
+ (r'.', Text) # fallback
+ ],
+
+ 'tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('default-param-markup')
+ ],
+
+ 'variable-tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('variable-param-markup')
+ ],
+
+ # states for different values types
+ 'keyword': [
+ (r'\b(false|true)\b', Keyword.Constant)
+ ],
+
+ 'variable': [
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ (r'(?<=\w)\.(?=\w)', Punctuation)
+ ],
+
+ 'string': [
+ (r"'[^']*'", String.Single),
+ (r'"[^"]*"', String.Double)
+ ],
+
+ 'number': [
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'generic': [ # decides for variable, string, keyword or number
+ include('keyword'),
+ include('string'),
+ include('number'),
+ include('variable')
+ ],
+
+ 'whitespace': [
+ (r'[ \t]+', Whitespace)
+ ],
+
+ # states for builtin blocks
+ 'comment': [
+ (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), ('#pop', '#pop')),
+ (r'.', Comment)
+ ],
+
+ 'raw': [
+ (r'[^{]+', Text),
+ (r'(\{%)(\s*)(endraw)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), '#pop'),
+ (r'\{', Text)
+ ],
+ }
+
+
+class TwigLexer(RegexLexer):
+ """
+ `Twig <http://twig.sensiolabs.org/>`_ template lexer.
+
+ It just highlights Twig code between the preprocessor directives,
+ other data is left untouched by the lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Twig'
+ aliases = ['twig']
+ mimetypes = ['application/x-twig']
+
+ flags = re.M | re.S
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w-]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ # twig comments
+ (r'\{\#.*?\#\}', Comment),
+ # raw twig blocks
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # filter blocks
+ (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
+ 'tag'),
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'tag'),
+ (r'\{', Other),
+ ],
+ 'varnames': [
+ (r'(\|)(\s*)(%s)' % _ident_inner,
+ bygroups(Operator, Text, Name.Function)),
+ (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
+ (r'(in|not|and|b-and|or|b-or|b-xor|is'
+ r'if|elseif|else|import'
+ r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
+ r'matches|starts\s+with|ends\s+with)\b',
+ Keyword),
+ (r'(loop|block|parent)\b', Name.Builtin),
+ (_ident_inner, Name.Variable),
+ (r'\.' + _ident_inner, Name.Variable),
+ (r'\.[0-9]+', Number),
+ (r':?"(\\\\|\\"|[^"])*"', String.Double),
+ (r":?'(\\\\|\\'|[^'])*'", String.Single),
+ (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ],
+ 'var': [
+ (r'\s+', Text),
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames')
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames'),
+ (r'.', Punctuation),
+ ],
+ }
+
+
+class TwigHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `TwigLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Twig"
+ aliases = ["html+twig"]
+ filenames = ['*.twig']
+ mimetypes = ['text/html+twig']
+
+ def __init__(self, **options):
+ super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
class Angular2Lexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/testing.py b/contrib/python/Pygments/py2/pygments/lexers/testing.py
index 9288b5b47f..0482a132bd 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/testing.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/testing.py
@@ -1,207 +1,207 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.testing
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for testing languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.testing
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for testing languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
-
-__all__ = ['GherkinLexer', 'TAPLexer']
-
-
-class GherkinLexer(RegexLexer):
- """
- For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
-
- .. versionadded:: 1.2
- """
- name = 'Gherkin'
- aliases = ['cucumber', 'gherkin']
- filenames = ['*.feature']
- mimetypes = ['text/x-gherkin']
-
- feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
- examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
+
+__all__ = ['GherkinLexer', 'TAPLexer']
+
+
+class GherkinLexer(RegexLexer):
+ """
+ For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Gherkin'
+ aliases = ['cucumber', 'gherkin']
+ filenames = ['*.feature']
+ mimetypes = ['text/x-gherkin']
+
+ feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+ feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+ examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
-
- tokens = {
- 'comments': [
- (r'^\s*#.*$', Comment),
- ],
- 'feature_elements': [
- (step_keywords, Keyword, "step_content_stack"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'feature_elements_on_stack': [
- (step_keywords, Keyword, "#pop:2"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table': [
- (r"\s+\|", Keyword, 'examples_table_header'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table_header': [
- (r"\s+\|\s*$", Keyword, "#pop:2"),
- include('comments'),
- (r"\\\|", Name.Variable),
- (r"\s*\|", Keyword),
- (r"[^|]", Name.Variable),
- ],
- 'scenario_sections_on_stack': [
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- "feature_elements_on_stack"),
- ],
- 'narrative': [
- include('scenario_sections_on_stack'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'table_vars': [
- (r'(<[^>]+>)', Name.Variable),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
- ],
- 'string': [
- include('table_vars'),
- (r'(\s|.)', String),
- ],
- 'py_string': [
- (r'"""', Keyword, "#pop"),
- include('string'),
- ],
- 'step_content_root': [
- (r"$", Keyword, "#pop"),
- include('step_content'),
- ],
- 'step_content_stack': [
- (r"$", Keyword, "#pop:2"),
- include('step_content'),
- ],
- 'step_content': [
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- include('comments'),
- (r'(\s|.)', Name.Function),
- ],
- 'table_content': [
- (r"\s+\|\s*$", Keyword, "#pop"),
- include('comments'),
- (r"\\\|", String),
- (r"\s*\|", Keyword),
- include('string'),
- ],
- 'double_string': [
- (r'"', Name.Function, "#pop"),
- include('string'),
- ],
- 'root': [
- (r'\n', Name.Function),
- include('comments'),
- (r'"""', Keyword, "py_string"),
- (r'\s+\|', Keyword, 'table_content'),
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
- (step_keywords, bygroups(Name.Function, Keyword),
- 'step_content_root'),
- (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
- 'narrative'),
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'feature_elements'),
- (examples_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'examples_table'),
- (r'(\s|.)', Name.Function),
- ]
- }
-
-
-class TAPLexer(RegexLexer):
- """
- For Test Anything Protocol (TAP) output.
-
- .. versionadded:: 2.1
- """
- name = 'TAP'
- aliases = ['tap']
- filenames = ['*.tap']
-
- tokens = {
- 'root': [
- # A TAP version may be specified.
- (r'^TAP version \d+\n', Name.Namespace),
-
- # Specify a plan with a plan line.
+
+ tokens = {
+ 'comments': [
+ (r'^\s*#.*$', Comment),
+ ],
+ 'feature_elements': [
+ (step_keywords, Keyword, "step_content_stack"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'feature_elements_on_stack': [
+ (step_keywords, Keyword, "#pop:2"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table': [
+ (r"\s+\|", Keyword, 'examples_table_header'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table_header': [
+ (r"\s+\|\s*$", Keyword, "#pop:2"),
+ include('comments'),
+ (r"\\\|", Name.Variable),
+ (r"\s*\|", Keyword),
+ (r"[^|]", Name.Variable),
+ ],
+ 'scenario_sections_on_stack': [
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ "feature_elements_on_stack"),
+ ],
+ 'narrative': [
+ include('scenario_sections_on_stack'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'table_vars': [
+ (r'(<[^>]+>)', Name.Variable),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
+ ],
+ 'string': [
+ include('table_vars'),
+ (r'(\s|.)', String),
+ ],
+ 'py_string': [
+ (r'"""', Keyword, "#pop"),
+ include('string'),
+ ],
+ 'step_content_root': [
+ (r"$", Keyword, "#pop"),
+ include('step_content'),
+ ],
+ 'step_content_stack': [
+ (r"$", Keyword, "#pop:2"),
+ include('step_content'),
+ ],
+ 'step_content': [
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ include('comments'),
+ (r'(\s|.)', Name.Function),
+ ],
+ 'table_content': [
+ (r"\s+\|\s*$", Keyword, "#pop"),
+ include('comments'),
+ (r"\\\|", String),
+ (r"\s*\|", Keyword),
+ include('string'),
+ ],
+ 'double_string': [
+ (r'"', Name.Function, "#pop"),
+ include('string'),
+ ],
+ 'root': [
+ (r'\n', Name.Function),
+ include('comments'),
+ (r'"""', Keyword, "py_string"),
+ (r'\s+\|', Keyword, 'table_content'),
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+ (step_keywords, bygroups(Name.Function, Keyword),
+ 'step_content_root'),
+ (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
+ 'narrative'),
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'feature_elements'),
+ (examples_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'examples_table'),
+ (r'(\s|.)', Name.Function),
+ ]
+ }
+
+
+class TAPLexer(RegexLexer):
+ """
+ For Test Anything Protocol (TAP) output.
+
+ .. versionadded:: 2.1
+ """
+ name = 'TAP'
+ aliases = ['tap']
+ filenames = ['*.tap']
+
+ tokens = {
+ 'root': [
+ # A TAP version may be specified.
+ (r'^TAP version \d+\n', Name.Namespace),
+
+ # Specify a plan with a plan line.
(r'^1\.\.\d+', Keyword.Declaration, 'plan'),
-
- # A test failure
- (r'^(not ok)([^\S\n]*)(\d*)',
- bygroups(Generic.Error, Text, Number.Integer), 'test'),
-
- # A test success
- (r'^(ok)([^\S\n]*)(\d*)',
- bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
-
- # Diagnostics start with a hash.
- (r'^#.*\n', Comment),
-
- # TAP's version of an abort statement.
- (r'^Bail out!.*\n', Generic.Error),
-
- # TAP ignores any unrecognized lines.
- (r'^.*\n', Text),
- ],
- 'plan': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A plan may have a directive with it.
- (r'#', Comment, 'directive'),
-
- # Or it could just end.
- (r'\n', Comment, '#pop'),
-
- # Anything else is wrong.
- (r'.*\n', Generic.Error, '#pop'),
- ],
- 'test': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A test may have a directive with it.
- (r'#', Comment, 'directive'),
-
- (r'\S+', Text),
-
- (r'\n', Text, '#pop'),
- ],
- 'directive': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Comment),
-
- # Extract todo items.
- (r'(?i)\bTODO\b', Comment.Preproc),
-
- # Extract skip items.
- (r'(?i)\bSKIP\S*', Comment.Preproc),
-
- (r'\S+', Comment),
-
- (r'\n', Comment, '#pop:2'),
- ],
- }
+
+ # A test failure
+ (r'^(not ok)([^\S\n]*)(\d*)',
+ bygroups(Generic.Error, Text, Number.Integer), 'test'),
+
+ # A test success
+ (r'^(ok)([^\S\n]*)(\d*)',
+ bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
+
+ # Diagnostics start with a hash.
+ (r'^#.*\n', Comment),
+
+ # TAP's version of an abort statement.
+ (r'^Bail out!.*\n', Generic.Error),
+
+ # TAP ignores any unrecognized lines.
+ (r'^.*\n', Text),
+ ],
+ 'plan': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Text),
+
+ # A plan may have a directive with it.
+ (r'#', Comment, 'directive'),
+
+ # Or it could just end.
+ (r'\n', Comment, '#pop'),
+
+ # Anything else is wrong.
+ (r'.*\n', Generic.Error, '#pop'),
+ ],
+ 'test': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Text),
+
+ # A test may have a directive with it.
+ (r'#', Comment, 'directive'),
+
+ (r'\S+', Text),
+
+ (r'\n', Text, '#pop'),
+ ],
+ 'directive': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Comment),
+
+ # Extract todo items.
+ (r'(?i)\bTODO\b', Comment.Preproc),
+
+ # Extract skip items.
+ (r'(?i)\bSKIP\S*', Comment.Preproc),
+
+ (r'\S+', Comment),
+
+ (r'\n', Comment, '#pop:2'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/text.py b/contrib/python/Pygments/py2/pygments/lexers/text.py
index 6b431f6965..1180e38416 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/text.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/text.py
@@ -1,26 +1,26 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.text
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-source code file types.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.text
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-source code file types.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
- SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
-from pygments.lexers.console import PyPyLogLexer
-from pygments.lexers.textedit import VimLexer
-from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
- TexLexer, GroffLexer
-from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
-from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
-from pygments.lexers.haxe import HxmlLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
+ SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
+from pygments.lexers.console import PyPyLogLexer
+from pygments.lexers.textedit import VimLexer
+from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
+ TexLexer, GroffLexer
+from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
+from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
+from pygments.lexers.haxe import HxmlLexer
from pygments.lexers.sgf import SmartGameFormatLexer
-from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
-
-__all__ = []
+from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/textedit.py b/contrib/python/Pygments/py2/pygments/lexers/textedit.py
index 3c6fb570df..fee8b91000 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/textedit.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/textedit.py
@@ -1,169 +1,169 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.textedit
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for languages related to text processing.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textedit
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for languages related to text processing.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from bisect import bisect
-
-from pygments.lexer import RegexLexer, include, default, bygroups, using, this
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-from pygments.lexers.python import PythonLexer
-
-__all__ = ['AwkLexer', 'VimLexer']
-
-
-class AwkLexer(RegexLexer):
- """
- For Awk scripts.
-
- .. versionadded:: 1.5
- """
-
- name = 'Awk'
- aliases = ['awk', 'gawk', 'mawk', 'nawk']
- filenames = ['*.awk']
- mimetypes = ['application/x-awk']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'\B', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
- r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(break|continue|do|while|exit|for|if|else|'
- r'return)\b', Keyword, 'slashstartsregex'),
- (r'function\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
- r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
- r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
- r'delete|system)\b', Keyword.Reserved),
- (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
- r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
- r'RSTART|RT|SUBSEP)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class VimLexer(RegexLexer):
- """
- Lexer for VimL script files.
-
- .. versionadded:: 0.8
- """
- name = 'VimL'
- aliases = ['vim']
- filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
- '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
- mimetypes = ['text/x-vim']
- flags = re.MULTILINE
-
- _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
-
- tokens = {
- 'root': [
- (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
- bygroups(using(this), Keyword, Text, Operator, Text, Text,
- using(PythonLexer), Text)),
- (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
- bygroups(using(this), Keyword, Text, using(PythonLexer))),
-
- (r'^\s*".*', Comment),
-
- (r'[ \t]+', Text),
- # TODO: regexes can have other delims
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from bisect import bisect
+
+from pygments.lexer import RegexLexer, include, default, bygroups, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['AwkLexer', 'VimLexer']
+
+
+class AwkLexer(RegexLexer):
+ """
+ For Awk scripts.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Awk'
+ aliases = ['awk', 'gawk', 'mawk', 'nawk']
+ filenames = ['*.awk']
+ mimetypes = ['application/x-awk']
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'\B', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
+ r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(break|continue|do|while|exit|for|if|else|'
+ r'return)\b', Keyword, 'slashstartsregex'),
+ (r'function\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
+ r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
+ r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
+ r'delete|system)\b', Keyword.Reserved),
+ (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
+ r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
+ r'RSTART|RT|SUBSEP)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class VimLexer(RegexLexer):
+ """
+ Lexer for VimL script files.
+
+ .. versionadded:: 0.8
+ """
+ name = 'VimL'
+ aliases = ['vim']
+ filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
+ '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
+ mimetypes = ['text/x-vim']
+ flags = re.MULTILINE
+
+ _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
+
+ tokens = {
+ 'root': [
+ (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
+ bygroups(using(this), Keyword, Text, Operator, Text, Text,
+ using(PythonLexer), Text)),
+ (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
+ bygroups(using(this), Keyword, Text, using(PythonLexer))),
+
+ (r'^\s*".*', Comment),
+
+ (r'[ \t]+', Text),
+ # TODO: regexes can have other delims
(r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
(r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
(r"'[^\n']*(?:''[^\n']*)*'", String.Single),
-
- # Who decided that doublequote was a good comment character??
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
- (r'-?\d+', Number),
- (r'#[0-9a-f]{6}', Number.Hex),
- (r'^:', Punctuation),
- (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
- (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
- Keyword),
- (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
- (r'\b\w+\b', Name.Other), # These are postprocessed below
- (r'.', Text),
- ],
- }
-
- def __init__(self, **options):
- from pygments.lexers._vim_builtins import command, option, auto
- self._cmd = command
- self._opt = option
- self._aut = auto
-
- RegexLexer.__init__(self, **options)
-
- def is_in(self, w, mapping):
- r"""
- It's kind of difficult to decide if something might be a keyword
- in VimL because it allows you to abbreviate them. In fact,
- 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
- valid ways to call it so rather than making really awful regexps
- like::
-
- \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
- we match `\b\w+\b` and then call is_in() on those tokens. See
- `scripts/get_vimkw.py` for how the lists are extracted.
- """
- p = bisect(mapping, (w,))
- if p > 0:
- if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
- mapping[p-1][1][:len(w)] == w:
- return True
- if p < len(mapping):
- return mapping[p][0] == w[:len(mapping[p][0])] and \
- mapping[p][1][:len(w)] == w
- return False
-
- def get_tokens_unprocessed(self, text):
- # TODO: builtins are only subsequent tokens on lines
- # and 'keywords' only happen at the beginning except
- # for :au ones
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name.Other:
- if self.is_in(value, self._cmd):
- yield index, Keyword, value
- elif self.is_in(value, self._opt) or \
- self.is_in(value, self._aut):
- yield index, Name.Builtin, value
- else:
- yield index, Text, value
- else:
- yield index, token, value
+
+ # Who decided that doublequote was a good comment character??
+ (r'(?<=\s)"[^\-:.%#=*].*', Comment),
+ (r'-?\d+', Number),
+ (r'#[0-9a-f]{6}', Number.Hex),
+ (r'^:', Punctuation),
+ (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
+ (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
+ Keyword),
+ (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
+ (r'\b\w+\b', Name.Other), # These are postprocessed below
+ (r'.', Text),
+ ],
+ }
+
+ def __init__(self, **options):
+ from pygments.lexers._vim_builtins import command, option, auto
+ self._cmd = command
+ self._opt = option
+ self._aut = auto
+
+ RegexLexer.__init__(self, **options)
+
+ def is_in(self, w, mapping):
+ r"""
+ It's kind of difficult to decide if something might be a keyword
+ in VimL because it allows you to abbreviate them. In fact,
+ 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
+ valid ways to call it so rather than making really awful regexps
+ like::
+
+ \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
+
+ we match `\b\w+\b` and then call is_in() on those tokens. See
+ `scripts/get_vimkw.py` for how the lists are extracted.
+ """
+ p = bisect(mapping, (w,))
+ if p > 0:
+ if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
+ mapping[p-1][1][:len(w)] == w:
+ return True
+ if p < len(mapping):
+ return mapping[p][0] == w[:len(mapping[p][0])] and \
+ mapping[p][1][:len(w)] == w
+ return False
+
+ def get_tokens_unprocessed(self, text):
+ # TODO: builtins are only subsequent tokens on lines
+ # and 'keywords' only happen at the beginning except
+ # for :au ones
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name.Other:
+ if self.is_in(value, self._cmd):
+ yield index, Keyword, value
+ elif self.is_in(value, self._opt) or \
+ self.is_in(value, self._aut):
+ yield index, Name.Builtin, value
+ else:
+ yield index, Text, value
+ else:
+ yield index, token, value
diff --git a/contrib/python/Pygments/py2/pygments/lexers/textfmts.py b/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
index d3a191b08f..d709e6a902 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/textfmts.py
@@ -1,302 +1,302 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.textfmts
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various text formats.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textfmts
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various text formats.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexers import guess_lexer, get_lexer_by_name
from pygments.lexer import RegexLexer, bygroups, default, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Generic, Literal, Punctuation
-from pygments.util import ClassNotFound
-
+from pygments.util import ClassNotFound
+
__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
'NotmuchLexer']
-
-
-class IrcLogsLexer(RegexLexer):
- """
- Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
- """
-
- name = 'IRC logs'
- aliases = ['irc']
- filenames = ['*.weechatlog']
- mimetypes = ['text/x-irclog']
-
- flags = re.VERBOSE | re.MULTILINE
- timestamp = r"""
- (
- # irssi / xchat and others
- (?: \[|\()? # Opening bracket or paren for the timestamp
- (?: # Timestamp
- (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
- (?:\d{1,4})
- [T ])? # Date/time separator: T or space
- (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
- (?: \d?\d)
- )
- (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
- |
- # weechat
- \d{4}\s\w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- |
- # xchat
- \w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- )?
- """
- tokens = {
- 'root': [
- # log start/end
- (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
- # hack
- ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
- # normal msgs
- ("^" + timestamp + r"""
- (\s*<.*?>\s*) # Nick """,
- bygroups(Comment.Preproc, Name.Tag), 'msg'),
- # /me msgs
- ("^" + timestamp + r"""
- (\s*[*]\s+) # Star
- (\S+\s+.*?\n) # Nick + rest of message """,
- bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
- # join/part msgs
- ("^" + timestamp + r"""
- (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
- (\S+\s+) # Nick + Space
- (.*?\n) # Rest of message """,
- bygroups(Comment.Preproc, Keyword, String, Comment)),
- (r"^.*?\n", Text),
- ],
- 'msg': [
- (r"\S+:(?!//)", Name.Attribute), # Prefix
- (r".*\n", Text, '#pop'),
- ],
- }
-
-
-class GettextLexer(RegexLexer):
- """
- Lexer for Gettext catalog files.
-
- .. versionadded:: 0.9
- """
- name = 'Gettext Catalog'
- aliases = ['pot', 'po']
- filenames = ['*.pot', '*.po']
- mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
- tokens = {
- 'root': [
- (r'^#,\s.*?$', Keyword.Type),
- (r'^#:\s.*?$', Keyword.Declaration),
- # (r'^#$', Comment),
- (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
- (r'^(")([A-Za-z-]+:)(.*")$',
- bygroups(String, Name.Property, String)),
- (r'^".*"$', String),
- (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
- bygroups(Name.Variable, Text, String)),
- (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
- bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
- ]
- }
-
-
-class HttpLexer(RegexLexer):
- """
- Lexer for HTTP sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'HTTP'
- aliases = ['http']
-
- flags = re.DOTALL
-
+
+
+class IrcLogsLexer(RegexLexer):
+ """
+ Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
+ """
+
+ name = 'IRC logs'
+ aliases = ['irc']
+ filenames = ['*.weechatlog']
+ mimetypes = ['text/x-irclog']
+
+ flags = re.VERBOSE | re.MULTILINE
+ timestamp = r"""
+ (
+ # irssi / xchat and others
+ (?: \[|\()? # Opening bracket or paren for the timestamp
+ (?: # Timestamp
+ (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
+ (?:\d{1,4})
+ [T ])? # Date/time separator: T or space
+ (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
+ (?: \d?\d)
+ )
+ (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
+ |
+ # weechat
+ \d{4}\s\w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ |
+ # xchat
+ \w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ )?
+ """
+ tokens = {
+ 'root': [
+ # log start/end
+ (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
+ # hack
+ ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+ # normal msgs
+ ("^" + timestamp + r"""
+ (\s*<.*?>\s*) # Nick """,
+ bygroups(Comment.Preproc, Name.Tag), 'msg'),
+ # /me msgs
+ ("^" + timestamp + r"""
+ (\s*[*]\s+) # Star
+ (\S+\s+.*?\n) # Nick + rest of message """,
+ bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
+ # join/part msgs
+ ("^" + timestamp + r"""
+ (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
+ (\S+\s+) # Nick + Space
+ (.*?\n) # Rest of message """,
+ bygroups(Comment.Preproc, Keyword, String, Comment)),
+ (r"^.*?\n", Text),
+ ],
+ 'msg': [
+ (r"\S+:(?!//)", Name.Attribute), # Prefix
+ (r".*\n", Text, '#pop'),
+ ],
+ }
+
+
+class GettextLexer(RegexLexer):
+ """
+ Lexer for Gettext catalog files.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Gettext Catalog'
+ aliases = ['pot', 'po']
+ filenames = ['*.pot', '*.po']
+ mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
+
+ tokens = {
+ 'root': [
+ (r'^#,\s.*?$', Keyword.Type),
+ (r'^#:\s.*?$', Keyword.Declaration),
+ # (r'^#$', Comment),
+ (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
+ (r'^(")([A-Za-z-]+:)(.*")$',
+ bygroups(String, Name.Property, String)),
+ (r'^".*"$', String),
+ (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
+ bygroups(Name.Variable, Text, String)),
+ (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
+ bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
+ ]
+ }
+
+
+class HttpLexer(RegexLexer):
+ """
+ Lexer for HTTP sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
def get_tokens_unprocessed(self, text, stack=('root',)):
"""Reset the content-type state."""
self.content_type = None
return RegexLexer.get_tokens_unprocessed(self, text, stack)
- def header_callback(self, match):
- if match.group(1).lower() == 'content-type':
- content_type = match.group(5).strip()
- if ';' in content_type:
- content_type = content_type[:content_type.find(';')].strip()
- self.content_type = content_type
- yield match.start(1), Name.Attribute, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator, match.group(3)
- yield match.start(4), Text, match.group(4)
- yield match.start(5), Literal, match.group(5)
- yield match.start(6), Text, match.group(6)
-
- def continuous_header_callback(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Literal, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- def content_callback(self, match):
- content_type = getattr(self, 'content_type', None)
- content = match.group()
- offset = match.start()
- if content_type:
- from pygments.lexers import get_lexer_for_mimetype
- possible_lexer_mimetypes = [content_type]
- if '+' in content_type:
- # application/calendar+xml can be treated as application/xml
- # if there's not a better match.
- general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
- content_type)
- possible_lexer_mimetypes.append(general_type)
-
- for i in possible_lexer_mimetypes:
- try:
- lexer = get_lexer_for_mimetype(i)
- except ClassNotFound:
- pass
- else:
- for idx, token, value in lexer.get_tokens_unprocessed(content):
- yield offset + idx, token, value
- return
- yield offset, Text, content
-
- tokens = {
- 'root': [
- (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01])(\r?\n|\Z)',
- bygroups(Name.Function, Text, Name.Namespace, Text,
- Keyword.Reserved, Operator, Number, Text),
- 'headers'),
- (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)',
- bygroups(Keyword.Reserved, Operator, Number, Text, Number,
- Text, Name.Exception, Text),
- 'headers'),
- ],
- 'headers': [
- (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
- (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
- (r'\r?\n', Text, 'content')
- ],
- 'content': [
- (r'.+', content_callback)
- ]
- }
-
- def analyse_text(text):
- return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
- 'OPTIONS /', 'TRACE /', 'PATCH /'))
-
-
-class TodotxtLexer(RegexLexer):
- """
- Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
-
- .. versionadded:: 2.0
- """
-
- name = 'Todotxt'
- aliases = ['todotxt']
- # *.todotxt is not a standard extension for Todo.txt files; including it
- # makes testing easier, and also makes autodetecting file type easier.
- filenames = ['todo.txt', '*.todotxt']
- mimetypes = ['text/x-todo']
-
- # Aliases mapping standard token types of Todo.txt format concepts
- CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
- IncompleteTaskText = Text # Incomplete tasks should look like plain text
-
- # Priority should have most emphasis to indicate importance of tasks
- Priority = Generic.Heading
- # Dates should have next most emphasis because time is important
- Date = Generic.Subheading
-
- # Project and context should have equal weight, and be in different colors
- Project = Generic.Error
- Context = String
-
- # If tag functionality is added, it should have the same weight as Project
- # and Context, and a different color. Generic.Traceback would work well.
-
- # Regex patterns for building up rules; dates, priorities, projects, and
- # contexts are all atomic
- # TODO: Make date regex more ISO 8601 compliant
- date_regex = r'\d{4,}-\d{2}-\d{2}'
- priority_regex = r'\([A-Z]\)'
- project_regex = r'\+\S+'
- context_regex = r'@\S+'
-
- # Compound regex expressions
- complete_one_date_regex = r'(x )(' + date_regex + r')'
- complete_two_date_regex = (complete_one_date_regex + r'( )(' +
- date_regex + r')')
- priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
-
- tokens = {
- # Should parse starting at beginning of line; each line is a task
- 'root': [
- # Complete task entry points: two total:
- # 1. Complete task with two dates
- (complete_two_date_regex, bygroups(CompleteTaskText, Date,
- CompleteTaskText, Date),
- 'complete'),
- # 2. Complete task with one date
- (complete_one_date_regex, bygroups(CompleteTaskText, Date),
- 'complete'),
-
- # Incomplete task entry points: six total:
- # 1. Priority plus date
- (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
- 'incomplete'),
- # 2. Priority only
- (priority_regex, Priority, 'incomplete'),
- # 3. Leading date
- (date_regex, Date, 'incomplete'),
- # 4. Leading context
- (context_regex, Context, 'incomplete'),
- # 5. Leading project
- (project_regex, Project, 'incomplete'),
- # 6. Non-whitespace catch-all
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ possible_lexer_mimetypes = [content_type]
+ if '+' in content_type:
+ # application/calendar+xml can be treated as application/xml
+ # if there's not a better match.
+ general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
+ content_type)
+ possible_lexer_mimetypes.append(general_type)
+
+ for i in possible_lexer_mimetypes:
+ try:
+ lexer = get_lexer_for_mimetype(i)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
+ r'(HTTP)(/)(1\.[01])(\r?\n|\Z)',
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
+ (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number,
+ Text, Name.Exception, Text),
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
+ 'OPTIONS /', 'TRACE /', 'PATCH /'))
+
+
+class TodotxtLexer(RegexLexer):
+ """
+ Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Todotxt'
+ aliases = ['todotxt']
+ # *.todotxt is not a standard extension for Todo.txt files; including it
+ # makes testing easier, and also makes autodetecting file type easier.
+ filenames = ['todo.txt', '*.todotxt']
+ mimetypes = ['text/x-todo']
+
+ # Aliases mapping standard token types of Todo.txt format concepts
+ CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
+ IncompleteTaskText = Text # Incomplete tasks should look like plain text
+
+ # Priority should have most emphasis to indicate importance of tasks
+ Priority = Generic.Heading
+ # Dates should have next most emphasis because time is important
+ Date = Generic.Subheading
+
+ # Project and context should have equal weight, and be in different colors
+ Project = Generic.Error
+ Context = String
+
+ # If tag functionality is added, it should have the same weight as Project
+ # and Context, and a different color. Generic.Traceback would work well.
+
+ # Regex patterns for building up rules; dates, priorities, projects, and
+ # contexts are all atomic
+ # TODO: Make date regex more ISO 8601 compliant
+ date_regex = r'\d{4,}-\d{2}-\d{2}'
+ priority_regex = r'\([A-Z]\)'
+ project_regex = r'\+\S+'
+ context_regex = r'@\S+'
+
+ # Compound regex expressions
+ complete_one_date_regex = r'(x )(' + date_regex + r')'
+ complete_two_date_regex = (complete_one_date_regex + r'( )(' +
+ date_regex + r')')
+ priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
+
+ tokens = {
+ # Should parse starting at beginning of line; each line is a task
+ 'root': [
+ # Complete task entry points: two total:
+ # 1. Complete task with two dates
+ (complete_two_date_regex, bygroups(CompleteTaskText, Date,
+ CompleteTaskText, Date),
+ 'complete'),
+ # 2. Complete task with one date
+ (complete_one_date_regex, bygroups(CompleteTaskText, Date),
+ 'complete'),
+
+ # Incomplete task entry points: six total:
+ # 1. Priority plus date
+ (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
+ 'incomplete'),
+ # 2. Priority only
+ (priority_regex, Priority, 'incomplete'),
+ # 3. Leading date
+ (date_regex, Date, 'incomplete'),
+ # 4. Leading context
+ (context_regex, Context, 'incomplete'),
+ # 5. Leading project
+ (project_regex, Project, 'incomplete'),
+ # 6. Non-whitespace catch-all
(r'\S+', IncompleteTaskText, 'incomplete'),
- ],
-
- # Parse a complete task
- 'complete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', CompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
+ ],
+
+ # Parse a complete task
+ 'complete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', CompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
(r'\S+', CompleteTaskText),
- # Tokenize whitespace not containing a newline
+ # Tokenize whitespace not containing a newline
(r'\s+', CompleteTaskText),
- ],
-
- # Parse an incomplete task
- 'incomplete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', IncompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
+ ],
+
+ # Parse an incomplete task
+ 'incomplete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', IncompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
(r'\S+', IncompleteTaskText),
- # Tokenize whitespace not containing a newline
+ # Tokenize whitespace not containing a newline
(r'\s+', IncompleteTaskText),
- ],
- }
+ ],
+ }
class NotmuchLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py2/pygments/lexers/theorem.py b/contrib/python/Pygments/py2/pygments/lexers/theorem.py
index a26a173f95..b7a2edc4e4 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/theorem.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/theorem.py
@@ -1,393 +1,393 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.theorem
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for theorem-proving languages.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.theorem
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for theorem-proving languages.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
-
-
-class CoqLexer(RegexLexer):
- """
- For the `Coq <http://coq.inria.fr/>`_ theorem prover.
-
- .. versionadded:: 1.5
- """
-
- name = 'Coq'
- aliases = ['coq']
- filenames = ['*.v']
- mimetypes = ['text/x-coq']
-
- keywords1 = (
- # Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
- 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
- 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
- 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
- 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
- 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
- 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
- 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
- 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
- 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
- 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
- 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
- 'Universe', 'Polymorphic', 'Monomorphic', 'Context'
- )
- keywords2 = (
- # Gallina
- 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
- 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
- 'for', 'of', 'nosimpl', 'with', 'as',
- )
- keywords3 = (
- # Sorts
- 'Type', 'Prop',
- )
- keywords4 = (
- # Tactics
- 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
- 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
- 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
- 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
- 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
- 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
- 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
- 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
- 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
- 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
- 'native_compute', 'subst',
- )
- keywords5 = (
- # Terminators
- 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
- 'assumption', 'solve', 'contradiction', 'discriminate',
- 'congruence',
- )
- keywords6 = (
- # Control
- 'do', 'last', 'first', 'try', 'idtac', 'repeat',
- )
- # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- # 'downto', 'else', 'end', 'exception', 'external', 'false',
- # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- # 'type', 'val', 'virtual', 'when', 'while', 'with'
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
- '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
- '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
- r'/\\', r'\\/', r'\{\|', r'\|\}',
- u'Π', u'λ',
- )
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\(\*', Comment, 'comment'),
- (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
- (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^"]+', String.Double),
- (r'""', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z][a-z0-9_\']*', Name, '#pop'),
- default('#pop')
- ],
- }
-
- def analyse_text(text):
- if text.startswith('(*'):
- return True
-
-
-class IsabelleLexer(RegexLexer):
- """
- For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Isabelle'
- aliases = ['isabelle']
- filenames = ['*.thy']
- mimetypes = ['text/x-isabelle']
-
- keyword_minor = (
- 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
- 'class_instance', 'class_relation', 'code_module', 'congs',
- 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
- 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
- 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
- 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
- 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
- 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
- 'type_constructor', 'unchecked', 'unsafe', 'where',
- )
-
- keyword_diag = (
- 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
- 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
- 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
- 'print_abbrevs', 'print_antiquotations', 'print_attributes',
- 'print_binds', 'print_bnfs', 'print_bundles',
- 'print_case_translations', 'print_cases', 'print_claset',
- 'print_classes', 'print_codeproc', 'print_codesetup',
- 'print_coercions', 'print_commands', 'print_context',
- 'print_defn_rules', 'print_dependencies', 'print_facts',
- 'print_induct_rules', 'print_inductives', 'print_interps',
- 'print_locale', 'print_locales', 'print_methods', 'print_options',
- 'print_orders', 'print_quot_maps', 'print_quotconsts',
- 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
- 'print_rules', 'print_simpset', 'print_state', 'print_statement',
- 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
- 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
- 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
- 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
- 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
- )
-
- keyword_thy = ('theory', 'begin', 'end')
-
- keyword_section = ('header', 'chapter')
-
- keyword_subsection = (
- 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
- 'subsubsect',
- )
-
- keyword_theory_decl = (
- 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
- 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
- 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
- 'code_abort', 'code_class', 'code_const', 'code_datatype',
- 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
- 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
- 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
- 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
- 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
- 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
- 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
- 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
- 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
- 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
- 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
- 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
- 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
- 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
- 'overloading', 'parse_ast_translation', 'parse_translation',
- 'partial_function', 'primcorec', 'primrec', 'primrec_new',
- 'print_ast_translation', 'print_translation', 'quickcheck_generator',
- 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
- 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
- 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
- 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
- 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
- 'text_raw', 'theorems', 'translations', 'type_notation',
- 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
- 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
- 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
- 'bnf_axiomatization', 'cartouche', 'datatype_compat',
- 'free_constructors', 'functor', 'nominal_function',
- 'nominal_termination', 'permanent_interpretation',
- 'binds', 'defining', 'smt2_status', 'term_cartouche',
- 'boogie_file', 'text_cartouche',
- )
-
- keyword_theory_script = ('inductive_cases', 'inductive_simps')
-
- keyword_theory_goal = (
- 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
- 'crunch', 'crunch_ignore',
- 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
- 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
- 'nominal_primrec', 'pcpodef', 'primcorecursive',
- 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
- 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
- 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
- 'theorem', 'typedef', 'wrap_free_constructors',
- )
-
- keyword_qed = ('by', 'done', 'qed')
- keyword_abandon_proof = ('sorry', 'oops')
-
- keyword_proof_goal = ('have', 'hence', 'interpret')
-
- keyword_proof_block = ('next', 'proof')
-
- keyword_proof_chain = (
- 'finally', 'from', 'then', 'ultimately', 'with',
- )
-
- keyword_proof_decl = (
- 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
- 'txt', 'txt_raw', 'unfolding', 'using', 'write',
- )
-
- keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
-
- keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
-
- keyword_proof_script = (
- 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
- )
-
- operators = (
- '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
- '+', '-', '!', '?',
- )
-
- proof_operators = ('{', '}', '.', '..')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'\(\*', Comment, 'comment'),
- (r'\{\*', Comment, 'text'),
-
- (words(operators), Operator),
- (words(proof_operators), Operator.Word),
-
- (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
- (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
-
- (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
-
- (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
-
- (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (r'\\<\w*>', Text.Symbol),
-
- (r"[^\W\d][.\w']*", Name),
- (r"\?[^\W\d][.\w']*", Name),
- (r"'[^\W\d][.\w']*", Name.Type),
-
- (r'\d[\d_]*', Name), # display numbers as name
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
-
- (r'"', String, 'string'),
- (r'`', String.Other, 'fact'),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'text': [
- (r'[^*}]+', Comment),
- (r'\*\}', Comment, '#pop'),
- (r'\*', Comment),
- (r'\}', Comment),
- ],
- 'string': [
- (r'[^"\\]+', String),
- (r'\\<\w*>', String.Symbol),
- (r'\\"', String),
- (r'\\', String),
- (r'"', String, '#pop'),
- ],
- 'fact': [
- (r'[^`\\]+', String.Other),
- (r'\\<\w*>', String.Symbol),
- (r'\\`', String.Other),
- (r'\\', String.Other),
- (r'`', String.Other, '#pop'),
- ],
- }
-
-
-class LeanLexer(RegexLexer):
- """
- For the `Lean <https://github.com/leanprover/lean>`_
- theorem prover.
-
- .. versionadded:: 2.0
- """
- name = 'Lean'
- aliases = ['lean']
- filenames = ['*.lean']
- mimetypes = ['text/x-lean']
-
- flags = re.MULTILINE | re.UNICODE
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
+
+
+class CoqLexer(RegexLexer):
+ """
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Coq'
+ aliases = ['coq']
+ filenames = ['*.v']
+ mimetypes = ['text/x-coq']
+
+ keywords1 = (
+ # Vernacular commands
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
+ 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
+ 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
+ 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
+ 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
+ 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
+ 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
+ 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
+ 'Universe', 'Polymorphic', 'Monomorphic', 'Context'
+ )
+ keywords2 = (
+ # Gallina
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
+ 'for', 'of', 'nosimpl', 'with', 'as',
+ )
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+ keywords4 = (
+ # Tactics
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
+ 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
+ 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
+ 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
+ 'native_compute', 'subst',
+ )
+ keywords5 = (
+ # Terminators
+ 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
+ 'assumption', 'solve', 'contradiction', 'discriminate',
+ 'congruence',
+ )
+ keywords6 = (
+ # Control
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
+ )
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
+ '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
+ '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
+ r'/\\', r'\\/', r'\{\|', r'\|\}',
+ u'Π', u'λ',
+ )
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\(\*', Comment, 'comment'),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+ (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+ default('#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ if text.startswith('(*'):
+ return True
+
+
+class IsabelleLexer(RegexLexer):
+ """
+ For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Isabelle'
+ aliases = ['isabelle']
+ filenames = ['*.thy']
+ mimetypes = ['text/x-isabelle']
+
+ keyword_minor = (
+ 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
+ 'class_instance', 'class_relation', 'code_module', 'congs',
+ 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
+ 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
+ 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
+ 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
+ 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
+ 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
+ 'type_constructor', 'unchecked', 'unsafe', 'where',
+ )
+
+ keyword_diag = (
+ 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
+ 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
+ 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
+ 'print_abbrevs', 'print_antiquotations', 'print_attributes',
+ 'print_binds', 'print_bnfs', 'print_bundles',
+ 'print_case_translations', 'print_cases', 'print_claset',
+ 'print_classes', 'print_codeproc', 'print_codesetup',
+ 'print_coercions', 'print_commands', 'print_context',
+ 'print_defn_rules', 'print_dependencies', 'print_facts',
+ 'print_induct_rules', 'print_inductives', 'print_interps',
+ 'print_locale', 'print_locales', 'print_methods', 'print_options',
+ 'print_orders', 'print_quot_maps', 'print_quotconsts',
+ 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
+ 'print_rules', 'print_simpset', 'print_state', 'print_statement',
+ 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
+ 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
+ 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
+ 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
+ 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
+ )
+
+ keyword_thy = ('theory', 'begin', 'end')
+
+ keyword_section = ('header', 'chapter')
+
+ keyword_subsection = (
+ 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
+ 'subsubsect',
+ )
+
+ keyword_theory_decl = (
+ 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
+ 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
+ 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
+ 'code_abort', 'code_class', 'code_const', 'code_datatype',
+ 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
+ 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
+ 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
+ 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
+ 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
+ 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
+ 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
+ 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
+ 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
+ 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
+ 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
+ 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
+ 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
+ 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
+ 'overloading', 'parse_ast_translation', 'parse_translation',
+ 'partial_function', 'primcorec', 'primrec', 'primrec_new',
+ 'print_ast_translation', 'print_translation', 'quickcheck_generator',
+ 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
+ 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
+ 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
+ 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
+ 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
+ 'text_raw', 'theorems', 'translations', 'type_notation',
+ 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
+ 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
+ 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
+ 'bnf_axiomatization', 'cartouche', 'datatype_compat',
+ 'free_constructors', 'functor', 'nominal_function',
+ 'nominal_termination', 'permanent_interpretation',
+ 'binds', 'defining', 'smt2_status', 'term_cartouche',
+ 'boogie_file', 'text_cartouche',
+ )
+
+ keyword_theory_script = ('inductive_cases', 'inductive_simps')
+
+ keyword_theory_goal = (
+ 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
+ 'crunch', 'crunch_ignore',
+ 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
+ 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
+ 'nominal_primrec', 'pcpodef', 'primcorecursive',
+ 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
+ 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
+ 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
+ 'theorem', 'typedef', 'wrap_free_constructors',
+ )
+
+ keyword_qed = ('by', 'done', 'qed')
+ keyword_abandon_proof = ('sorry', 'oops')
+
+ keyword_proof_goal = ('have', 'hence', 'interpret')
+
+ keyword_proof_block = ('next', 'proof')
+
+ keyword_proof_chain = (
+ 'finally', 'from', 'then', 'ultimately', 'with',
+ )
+
+ keyword_proof_decl = (
+ 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
+ 'txt', 'txt_raw', 'unfolding', 'using', 'write',
+ )
+
+ keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
+
+ keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
+
+ keyword_proof_script = (
+ 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
+ )
+
+ operators = (
+ '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
+ '+', '-', '!', '?',
+ )
+
+ proof_operators = ('{', '}', '.', '..')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'\(\*', Comment, 'comment'),
+ (r'\{\*', Comment, 'text'),
+
+ (words(operators), Operator),
+ (words(proof_operators), Operator.Word),
+
+ (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
+ (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
+
+ (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+
+ (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
+
+ (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (r'\\<\w*>', Text.Symbol),
+
+ (r"[^\W\d][.\w']*", Name),
+ (r"\?[^\W\d][.\w']*", Name),
+ (r"'[^\W\d][.\w']*", Name.Type),
+
+ (r'\d[\d_]*', Name), # display numbers as name
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+
+ (r'"', String, 'string'),
+ (r'`', String.Other, 'fact'),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'text': [
+ (r'[^*}]+', Comment),
+ (r'\*\}', Comment, '#pop'),
+ (r'\*', Comment),
+ (r'\}', Comment),
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\"', String),
+ (r'\\', String),
+ (r'"', String, '#pop'),
+ ],
+ 'fact': [
+ (r'[^`\\]+', String.Other),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\`', String.Other),
+ (r'\\', String.Other),
+ (r'`', String.Other, '#pop'),
+ ],
+ }
+
+
+class LeanLexer(RegexLexer):
+ """
+ For the `Lean <https://github.com/leanprover/lean>`_
+ theorem prover.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Lean'
+ aliases = ['lean']
+ filenames = ['*.lean']
+ mimetypes = ['text/x-lean']
+
+ flags = re.MULTILINE | re.UNICODE
+
keywords1 = (
'import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition',
'renaming', 'inline', 'hiding', 'exposing', 'parameter', 'parameters',
@@ -399,58 +399,58 @@ class LeanLexer(RegexLexer):
'including', 'instance', 'section', 'context', 'protected', 'expose',
'export', 'set_option', 'add_rewrite', 'extends', 'open', 'example',
'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible',
- )
-
- keywords2 = (
+ )
+
+ keywords2 = (
'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume',
'take', 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin',
'proof', 'qed', 'calc', 'match',
- )
-
- keywords3 = (
- # Sorts
- 'Type', 'Prop',
- )
-
- operators = (
+ )
+
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+
+ operators = (
u'!=', u'#', u'&', u'&&', u'*', u'+', u'-', u'/', u'@', u'!', u'`',
u'-.', u'->', u'.', u'..', u'...', u'::', u':>', u';', u';;', u'<',
u'<-', u'=', u'==', u'>', u'_', u'|', u'||', u'~', u'=>', u'<=', u'>=',
u'/\\', u'\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞',
u'⌟', u'≡', u'⟨', u'⟩',
- )
-
+ )
+
punctuation = (u'(', u')', u':', u'{', u'}', u'[', u']', u'⦃', u'⦄',
u':=', u',')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'/-', Comment, 'comment'),
- (r'--.*?$', Comment.Single),
- (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(operators), Name.Builtin.Pseudo),
- (words(punctuation), Operator),
- (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
- u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
- u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
- (r'\d+', Number.Integer),
- (r'"', String.Double, 'string'),
- (r'[~?][a-z][\w\']*:', Name.Variable)
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/-]', Comment.Multiline),
- (r'/-', Comment.Multiline, '#push'),
- (r'-/', Comment.Multiline, '#pop'),
- (r'[/-]', Comment.Multiline)
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- (r'\\[n"\\]', String.Escape),
- ('"', String.Double, '#pop'),
- ],
- }
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'/-', Comment, 'comment'),
+ (r'--.*?$', Comment.Single),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(operators), Name.Builtin.Pseudo),
+ (words(punctuation), Operator),
+ (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
+ u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
+ u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
+ (r'\d+', Number.Integer),
+ (r'"', String.Double, 'string'),
+ (r'[~?][a-z][\w\']*:', Name.Variable)
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/-]', Comment.Multiline),
+ (r'/-', Comment.Multiline, '#push'),
+ (r'-/', Comment.Multiline, '#pop'),
+ (r'[/-]', Comment.Multiline)
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ (r'\\[n"\\]', String.Escape),
+ ('"', String.Double, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/trafficscript.py b/contrib/python/Pygments/py2/pygments/lexers/trafficscript.py
index 9b767253d9..9397bce1c5 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/trafficscript.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/trafficscript.py
@@ -1,54 +1,54 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.trafficscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for RiverBed's TrafficScript (RTS) language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.trafficscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for RiverBed's TrafficScript (RTS) language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
-
-__all__ = ['RtsLexer']
-
-
-class RtsLexer(RegexLexer):
- """
- For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
-
- .. versionadded:: 2.1
- """
- name = 'TrafficScript'
- aliases = ['rts','trafficscript']
- filenames = ['*.rts']
-
- tokens = {
- 'root' : [
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"', String, 'escapable-string'),
- (r'(0x[0-9a-fA-F]+|\d+)', Number),
- (r'\d+\.\d+', Number.Float),
- (r'\$[a-zA-Z](\w|_)*', Name.Variable),
- (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
- (r'[a-zA-Z][\w.]*', Name.Function),
- (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
- (r'(>=|<=|==|!=|'
- r'&&|\|\||'
- r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
- r'>>|<<|'
- r'\+\+|--|=>)', Operator),
- (r'[ \t\r]+', Text),
- (r'#[^\n]*', Comment),
- ],
- 'escapable-string' : [
- (r'\\[tsn]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer
+from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
+
+__all__ = ['RtsLexer']
+
+
+class RtsLexer(RegexLexer):
+ """
+ For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
+
+ .. versionadded:: 2.1
+ """
+ name = 'TrafficScript'
+ aliases = ['rts','trafficscript']
+ filenames = ['*.rts']
+
+ tokens = {
+ 'root' : [
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"', String, 'escapable-string'),
+ (r'(0x[0-9a-fA-F]+|\d+)', Number),
+ (r'\d+\.\d+', Number.Float),
+ (r'\$[a-zA-Z](\w|_)*', Name.Variable),
+ (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
+ (r'[a-zA-Z][\w.]*', Name.Function),
+ (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
+ (r'(>=|<=|==|!=|'
+ r'&&|\|\||'
+ r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
+ r'>>|<<|'
+ r'\+\+|--|=>)', Operator),
+ (r'[ \t\r]+', Text),
+ (r'#[^\n]*', Comment),
+ ],
+ 'escapable-string' : [
+ (r'\\[tsn]', String.Escape),
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/urbi.py b/contrib/python/Pygments/py2/pygments/lexers/urbi.py
index 72349cbd9b..ae0b654734 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/urbi.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/urbi.py
@@ -1,133 +1,133 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.urbi
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for UrbiScript language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.urbi
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for UrbiScript language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['UrbiscriptLexer']
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
- """
- For UrbiScript source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'UrbiScript'
- aliases = ['urbiscript']
- filenames = ['*.u']
- mimetypes = ['application/x-urbiscript']
-
- flags = re.DOTALL
-
- # TODO
- # - handle Experimental and deprecated tags with specific tokens
- # - handle Angles and Durations with specific tokens
-
- def blob_callback(lexer, match, ctx):
- text_before_blob = match.group(1)
- blob_start = match.group(2)
- blob_size_str = match.group(3)
- blob_size = int(blob_size_str)
- yield match.start(), String, text_before_blob
- ctx.pos += len(text_before_blob)
-
- # if blob size doesn't match blob format (example : "\B(2)(aaa)")
- # yield blob as a string
- if ctx.text[match.end() + blob_size] != ")":
- result = "\\B(" + blob_size_str + ")("
- yield match.start(), String, result
- ctx.pos += len(result)
- return
-
- # if blob is well formated, yield as Escape
- blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
- yield match.start(), String.Escape, blob_text
- ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # comments
- (r'//.*?\n', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
- (words((
- 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
- 'continue', 'default', 'else', 'enum', 'every', 'external',
- 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
- 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
- 'waituntil', 'whenever', 'while'), suffix=r'\b'),
- Keyword),
- (words((
- 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
- 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
- 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
- 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
- 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
- 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
- Keyword.Reserved),
- # deprecated keywords, use a meaningfull token when available
- (r'(emit|foreach|internal|loopn|static)\b', Keyword),
- # ignored keywords, use a meaningfull token when available
- (r'(private|protected|public)\b', Keyword),
- (r'(var|do|const|function|class)\b', Keyword.Declaration),
- (r'(true|false|nil|void)\b', Keyword.Constant),
- (words((
- 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
- 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
- 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
- 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
- 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
- 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
- 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
- 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
- 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
- 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
- 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
- 'UValue', 'UVar'), suffix=r'\b'),
- Name.Builtin),
- (r'(?:this)\b', Name.Builtin.Pseudo),
- # don't match single | and &
- (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
- (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
- Operator.Word),
- (r'[{}\[\]()]+', Punctuation),
- (r'(?:;|\||,|&|\?|!)+', Punctuation),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # Float, Integer, Angle and Duration
- (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
- r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
- # handle binary blob in strings
- (r'"', String.Double, "string.double"),
- (r"'", String.Single, "string.single"),
- ],
- 'string.double': [
- (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
- (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
- ],
- 'string.single': [
- (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
- (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
- ],
- # from http://pygments.org/docs/lexerdevelopment/#changing-states
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['UrbiscriptLexer']
+
+
+class UrbiscriptLexer(ExtendedRegexLexer):
+ """
+ For UrbiScript source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'UrbiScript'
+ aliases = ['urbiscript']
+ filenames = ['*.u']
+ mimetypes = ['application/x-urbiscript']
+
+ flags = re.DOTALL
+
+ # TODO
+ # - handle Experimental and deprecated tags with specific tokens
+ # - handle Angles and Durations with specific tokens
+
+ def blob_callback(lexer, match, ctx):
+ text_before_blob = match.group(1)
+ blob_start = match.group(2)
+ blob_size_str = match.group(3)
+ blob_size = int(blob_size_str)
+ yield match.start(), String, text_before_blob
+ ctx.pos += len(text_before_blob)
+
+ # if blob size doesn't match blob format (example : "\B(2)(aaa)")
+ # yield blob as a string
+ if ctx.text[match.end() + blob_size] != ")":
+ result = "\\B(" + blob_size_str + ")("
+ yield match.start(), String, result
+ ctx.pos += len(result)
+ return
+
+ # if blob is well formated, yield as Escape
+ blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
+ yield match.start(), String.Escape, blob_text
+ ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # comments
+ (r'//.*?\n', Comment),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
+ (words((
+ 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
+ 'continue', 'default', 'else', 'enum', 'every', 'external',
+ 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
+ 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
+ 'waituntil', 'whenever', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
+ 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
+ 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
+ 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
+ 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
+ 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
+ Keyword.Reserved),
+ # deprecated keywords, use a meaningfull token when available
+ (r'(emit|foreach|internal|loopn|static)\b', Keyword),
+ # ignored keywords, use a meaningfull token when available
+ (r'(private|protected|public)\b', Keyword),
+ (r'(var|do|const|function|class)\b', Keyword.Declaration),
+ (r'(true|false|nil|void)\b', Keyword.Constant),
+ (words((
+ 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
+ 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
+ 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
+ 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
+ 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
+ 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
+ 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
+ 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
+ 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
+ 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
+ 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
+ 'UValue', 'UVar'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(?:this)\b', Name.Builtin.Pseudo),
+ # don't match single | and &
+ (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
+ (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
+ Operator.Word),
+ (r'[{}\[\]()]+', Punctuation),
+ (r'(?:;|\||,|&|\?|!)+', Punctuation),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # Float, Integer, Angle and Duration
+ (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
+ r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
+ # handle binary blob in strings
+ (r'"', String.Double, "string.double"),
+ (r"'", String.Single, "string.single"),
+ ],
+ 'string.double': [
+ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
+ (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
+ ],
+ 'string.single': [
+ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
+ (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
+ ],
+ # from http://pygments.org/docs/lexerdevelopment/#changing-states
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ]
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/web.py b/contrib/python/Pygments/py2/pygments/lexers/web.py
index 587b0cde7e..f5187a8c99 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/web.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/web.py
@@ -1,24 +1,24 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.web
- ~~~~~~~~~~~~~~~~~~~
-
- Just export previously exported lexers.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.web
+ ~~~~~~~~~~~~~~~~~~~
+
+ Just export previously exported lexers.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
- HamlLexer, ScamlLexer, JadeLexer
-from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
-from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
- DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
-from pygments.lexers.actionscript import ActionScriptLexer, \
- ActionScript3Lexer, MxmlLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
-from pygments.lexers.data import JsonLexer
-JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
+ HamlLexer, ScamlLexer, JadeLexer
+from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
+from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
+ DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
+from pygments.lexers.actionscript import ActionScriptLexer, \
+ ActionScript3Lexer, MxmlLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
+from pygments.lexers.data import JsonLexer
+JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
+
+__all__ = []
diff --git a/contrib/python/Pygments/py2/pygments/lexers/webmisc.py b/contrib/python/Pygments/py2/pygments/lexers/webmisc.py
index b39334bc43..8733ae4e21 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/webmisc.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/webmisc.py
@@ -1,989 +1,989 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.webmisc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc. web stuff.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.webmisc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc. web stuff.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-from pygments.util import unirange
-
-from pygments.lexers.css import _indentation, _starts_block
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
-
-
-class DuelLexer(RegexLexer):
- """
- Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
- See http://duelengine.org/.
- See http://jsonml.org/jbst/.
-
- .. versionadded:: 1.4
- """
-
- name = 'Duel'
- aliases = ['duel', 'jbst', 'jsonml+bst']
- filenames = ['*.duel', '*.jbst']
- mimetypes = ['text/x-duel', 'text/x-jbst']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#!:]?)(.*?)(%>)',
- bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
- (r'(<%\$)(.*?)(:)(.*?)(%>)',
- bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
- (r'(<%--)(.*?)(--%>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)',
- bygroups(using(HtmlLexer),
- using(JavascriptLexer), using(HtmlLexer))),
- (r'(.+?)(?=<)', using(HtmlLexer)),
- (r'.+', using(HtmlLexer)),
- ],
- }
-
-
-class XQueryLexer(ExtendedRegexLexer):
- """
- An XQuery lexer, parsing a stream and outputting the tokens needed to
- highlight xquery code.
-
- .. versionadded:: 1.4
- """
- name = 'XQuery'
- aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
- filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
- mimetypes = ['text/xquery', 'application/xquery']
-
- xquery_parse_state = []
-
- # FIX UNICODE LATER
- # ncnamestartchar = (
- # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
- # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
- # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
- # ur"[\u10000-\uEFFFF]"
- # )
- ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
- # FIX UNICODE LATER
- # ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
- # ur"[\u203F-\u2040]")
- ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
- pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
- pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
- unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
- entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
- charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
- stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
- stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
- # FIX UNICODE LATER
- # elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
- # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
- # aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
-
- # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
- # aposattrcontentchar
- # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
- flags = re.DOTALL | re.MULTILINE | re.UNICODE
-
- def punctuation_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def operator_root_callback(lexer, match, ctx):
- yield match.start(), Operator, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def popstate_tag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- next_state = lexer.xquery_parse_state.pop()
- if next_state == 'occurrenceindicator':
- if re.match("[?*+]+", match.group(2)):
- yield match.start(), Punctuation, match.group(2)
- ctx.stack.append('operator')
- ctx.pos = match.end()
- else:
- ctx.stack.append('operator')
- ctx.pos = match.end(1)
- else:
- ctx.stack.append(next_state)
- ctx.pos = match.end(1)
-
- def popstate_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # if we have run out of our state stack, pop whatever is on the pygments
- # state stack
- if len(lexer.xquery_parse_state) == 0:
- ctx.stack.pop()
- elif len(ctx.stack) > 1:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- else:
- # i don't know if i'll need this, but in case, default back to root
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_element_content_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('element_content')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.pos = match.end()
-
- def pushstate_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_order_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_map_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate_withmode(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Keyword, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('kindtest')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtestforpi')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('occurrenceindicator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_root_construct_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- cur_state = ctx.stack.pop()
- lexer.xquery_parse_state.append(cur_state)
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_attribute_callback(lexer, match, ctx):
- yield match.start(), Name.Attribute, match.group(1)
- ctx.stack.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- tokens = {
- 'comment': [
- # xquery comments
- (r'(:\))', Comment, '#pop'),
- (r'(\(:)', Comment, '#push'),
- (r'[^:)]', Comment),
- (r'([^:)]|:|\))', Comment),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'operator': [
- include('whitespace'),
- (r'(\})', popstate_callback),
- (r'\(:', Comment, 'comment'),
-
- (r'(\{)', pushstate_root_callback),
- (r'then|else|external|at|div|except', Keyword, 'root'),
- (r'order by', Keyword, 'root'),
- (r'group by', Keyword, 'root'),
- (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
- (r'and|or', Operator.Word, 'root'),
- (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
- Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|count|preserve\s+strip',
- Keyword, 'root'),
- (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
- operator_root_callback),
- (r'(::|:|;|\[|//|/|,)',
- punctuation_root_callback),
- (r'(castable|cast)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(instance)(\s+)(of)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(treat)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+from pygments.util import unirange
+
+from pygments.lexers.css import _indentation, _starts_block
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
+
+
+class DuelLexer(RegexLexer):
+ """
+ Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+ See http://duelengine.org/.
+ See http://jsonml.org/jbst/.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Duel'
+ aliases = ['duel', 'jbst', 'jsonml+bst']
+ filenames = ['*.duel', '*.jbst']
+ mimetypes = ['text/x-duel', 'text/x-jbst']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#!:]?)(.*?)(%>)',
+ bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+ (r'(<%\$)(.*?)(:)(.*?)(%>)',
+ bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+ (r'(<%--)(.*?)(--%>)',
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)',
+ bygroups(using(HtmlLexer),
+ using(JavascriptLexer), using(HtmlLexer))),
+ (r'(.+?)(?=<)', using(HtmlLexer)),
+ (r'.+', using(HtmlLexer)),
+ ],
+ }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+ """
+ An XQuery lexer, parsing a stream and outputting the tokens needed to
+ highlight xquery code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'XQuery'
+ aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
+ filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
+ mimetypes = ['text/xquery', 'application/xquery']
+
+ xquery_parse_state = []
+
+ # FIX UNICODE LATER
+ # ncnamestartchar = (
+ # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+ # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+ # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+ # ur"[\u10000-\uEFFFF]"
+ # )
+ ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
+ # FIX UNICODE LATER
+ # ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+ # ur"[\u203F-\u2040]")
+ ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
+ ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
+ pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
+ pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
+ pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
+ prefixedname = "%s:%s" % (ncname, ncname)
+ unprefixedname = ncname
+ qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
+
+ entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
+ charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
+
+ stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
+ stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
+
+ # FIX UNICODE LATER
+ # elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ # quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+ # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ # aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
+
+ # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+ # aposattrcontentchar
+ # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+ flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+ def punctuation_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def operator_root_callback(lexer, match, ctx):
+ yield match.start(), Operator, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def popstate_tag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ next_state = lexer.xquery_parse_state.pop()
+ if next_state == 'occurrenceindicator':
+ if re.match("[?*+]+", match.group(2)):
+ yield match.start(), Punctuation, match.group(2)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+ else:
+ ctx.stack.append('operator')
+ ctx.pos = match.end(1)
+ else:
+ ctx.stack.append(next_state)
+ ctx.pos = match.end(1)
+
+ def popstate_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # if we have run out of our state stack, pop whatever is on the pygments
+ # state stack
+ if len(lexer.xquery_parse_state) == 0:
+ ctx.stack.pop()
+ elif len(ctx.stack) > 1:
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ else:
+ # i don't know if i'll need this, but in case, default back to root
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_element_content_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('element_content')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.pos = match.end()
+
+ def pushstate_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_order_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_map_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Keyword, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('kindtest')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtestforpi')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('occurrenceindicator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_construct_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ cur_state = ctx.stack.pop()
+ lexer.xquery_parse_state.append(cur_state)
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_attribute_callback(lexer, match, ctx):
+ yield match.start(), Name.Attribute, match.group(1)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ tokens = {
+ 'comment': [
+ # xquery comments
+ (r'(:\))', Comment, '#pop'),
+ (r'(\(:)', Comment, '#push'),
+ (r'[^:)]', Comment),
+ (r'([^:)]|:|\))', Comment),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'operator': [
+ include('whitespace'),
+ (r'(\})', popstate_callback),
+ (r'\(:', Comment, 'comment'),
+
+ (r'(\{)', pushstate_root_callback),
+ (r'then|else|external|at|div|except', Keyword, 'root'),
+ (r'order by', Keyword, 'root'),
+ (r'group by', Keyword, 'root'),
+ (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+ (r'and|or', Operator.Word, 'root'),
+ (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+ Operator.Word, 'root'),
+ (r'return|satisfies|to|union|where|count|preserve\s+strip',
+ Keyword, 'root'),
+ (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
+ operator_root_callback),
+ (r'(::|:|;|\[|//|/|,)',
+ punctuation_root_callback),
+ (r'(castable|cast)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(instance)(\s+)(of)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(treat)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(case)(\s+)(' + stringdouble + ')',
bygroups(Keyword, Text, String.Double), 'itemtype'),
(r'(case)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'itemtype'),
- (r'(case|as)\b', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)',
- bygroups(Punctuation, Text, Keyword), 'itemtype'),
- (r'\$', Name.Variable, 'varname'),
- (r'(for|let|previous|next)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ (r'(case|as)\b', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)',
+ bygroups(Punctuation, Text, Keyword), 'itemtype'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(for|let|previous|next)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable),
'varname'),
- # (r'\)|\?|\]', Punctuation, '#push'),
- (r'\)|\?|\]', Punctuation),
- (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
- (r'ascending|descending|default', Keyword, '#push'),
- (r'(allowing)(\s+)(empty)', bygroups(Keyword, Text, Keyword)),
- (r'external', Keyword),
- (r'(start|when|end)', Keyword, 'root'),
- (r'(only)(\s+)(end)', bygroups(Keyword, Text, Keyword), 'root'),
- (r'collation', Keyword, 'uritooperator'),
-
- # eXist specific XQUF
- (r'(into|following|preceding|with)', Keyword, 'root'),
-
- # support for current context on rhs of Simple Map Operator
- (r'\.', Operator),
-
- # finally catch all string literals and stay in operator state
- (stringdouble, String.Double),
- (stringsingle, String.Single),
-
- (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
- ],
- 'uritooperator': [
- (stringdouble, String.Double, '#pop'),
- (stringsingle, String.Single, '#pop'),
- ],
- 'namespacedecl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
- (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r',', Punctuation),
- (r'=', Operator),
- (r';', Punctuation, 'root'),
- (ncname, Name.Namespace),
- ],
- 'namespacekeyword': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double, 'namespacedecl'),
- (stringsingle, String.Single, 'namespacedecl'),
- (r'inherit|no-inherit', Keyword, 'root'),
- (r'namespace', Keyword, 'namespacedecl'),
- (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
- (r'preserve|no-preserve', Keyword),
- (r',', Punctuation),
- ],
+ # (r'\)|\?|\]', Punctuation, '#push'),
+ (r'\)|\?|\]', Punctuation),
+ (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+ (r'ascending|descending|default', Keyword, '#push'),
+ (r'(allowing)(\s+)(empty)', bygroups(Keyword, Text, Keyword)),
+ (r'external', Keyword),
+ (r'(start|when|end)', Keyword, 'root'),
+ (r'(only)(\s+)(end)', bygroups(Keyword, Text, Keyword), 'root'),
+ (r'collation', Keyword, 'uritooperator'),
+
+ # eXist specific XQUF
+ (r'(into|following|preceding|with)', Keyword, 'root'),
+
+ # support for current context on rhs of Simple Map Operator
+ (r'\.', Operator),
+
+ # finally catch all string literals and stay in operator state
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+
+ (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+ ],
+ 'uritooperator': [
+ (stringdouble, String.Double, '#pop'),
+ (stringsingle, String.Single, '#pop'),
+ ],
+ 'namespacedecl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
+ (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r',', Punctuation),
+ (r'=', Operator),
+ (r';', Punctuation, 'root'),
+ (ncname, Name.Namespace),
+ ],
+ 'namespacekeyword': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double, 'namespacedecl'),
+ (stringsingle, String.Single, 'namespacedecl'),
+ (r'inherit|no-inherit', Keyword, 'root'),
+ (r'namespace', Keyword, 'namespacedecl'),
+ (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+ (r'preserve|no-preserve', Keyword),
+ (r',', Punctuation),
+ ],
'annotationname': [
- (r'\(:', Comment, 'comment'),
- (qname, Name.Decorator),
- (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
- (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
+ (r'\(:', Comment, 'comment'),
+ (qname, Name.Decorator),
+ (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
+ (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
(r'(\,)(\s+)(' + stringdouble + ')',
bygroups(Punctuation, Text, String.Double)),
(r'(\,)(\s+)(' + stringsingle + ')',
bygroups(Punctuation, Text, String.Single)),
- (r'\)', Punctuation),
- (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
+ (r'\)', Punctuation),
+ (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
(r'(\s+)(variable)(\s+)(\$)',
bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
(r'(\s+)(function)(\s+)',
bygroups(Text, Keyword.Declaration, Text), 'root')
- ],
- 'varname': [
- (r'\(:', Comment, 'comment'),
+ ],
+ 'varname': [
+ (r'\(:', Comment, 'comment'),
(r'(' + qname + r')(\()?', bygroups(Name, Punctuation), 'operator'),
- ],
- 'singletype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (ncname + r'(:\*)', Name.Variable, 'operator'),
- (qname, Name.Variable, 'operator'),
- ],
- 'itemtype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\$', Name.Variable, 'varname'),
- (r'(void)(\s*)(\()(\s*)(\))',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
- (r'(element|attribute|schema-element|schema-attribute|comment|text|'
- r'node|binary|document-node|empty-sequence)(\s*)(\()',
- pushstate_occurrenceindicator_kindtest_callback),
- # Marklogic specific type?
- (r'(processing-instruction)(\s*)(\()',
- bygroups(Keyword, Text, Punctuation),
- ('occurrenceindicator', 'kindtestforpi')),
- (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation),
- 'occurrenceindicator'),
- (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
- (r';', Punctuation, '#pop'),
- (r'then|else', Keyword, '#pop'),
- (r'(at)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'namespacedecl'),
- (r'(at)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where|count',
- Keyword, 'root'),
- (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
- (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
- (r'external|at', Keyword, 'root'),
- (r'(stable)(\s+)(order)(\s+)(by)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
- (r'(castable|cast)(\s+)(as)',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
- (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
+ ],
+ 'singletype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (ncname + r'(:\*)', Name.Variable, 'operator'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'itemtype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(void)(\s*)(\()(\s*)(\))',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+ (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+ r'node|binary|document-node|empty-sequence)(\s*)(\()',
+ pushstate_occurrenceindicator_kindtest_callback),
+ # Marklogic specific type?
+ (r'(processing-instruction)(\s*)(\()',
+ bygroups(Keyword, Text, Punctuation),
+ ('occurrenceindicator', 'kindtestforpi')),
+ (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+ 'occurrenceindicator'),
+ (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
+ (r';', Punctuation, '#pop'),
+ (r'then|else', Keyword, '#pop'),
+ (r'(at)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+ (r'(at)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+ (r'except|intersect|in|is|return|satisfies|to|union|where|count',
+ Keyword, 'root'),
+ (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+ (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
+ (r'external|at', Keyword, 'root'),
+ (r'(stable)(\s+)(order)(\s+)(by)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+ (r'(castable|cast)(\s+)(as)',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
+ (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
(r'(case)(\s+)(' + stringdouble + ')',
bygroups(Keyword, Text, String.Double), 'itemtype'),
(r'(case)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'itemtype'),
- (r'case|as', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
- (ncname + r':\*', Keyword.Type, 'operator'),
- (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
- (qname, Keyword.Type, 'occurrenceindicator'),
- ],
- 'kindtest': [
- (r'\(:', Comment, 'comment'),
- (r'\{', Punctuation, 'root'),
- (r'(\))([*+?]?)', popstate_kindtest_callback),
- (r'\*', Name, 'closekindtest'),
- (qname, Name, 'closekindtest'),
- (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
- ],
- 'kindtestforpi': [
- (r'\(:', Comment, 'comment'),
- (r'\)', Punctuation, '#pop'),
- (ncname, Name.Variable),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- ],
- 'closekindtest': [
- (r'\(:', Comment, 'comment'),
- (r'(\))', popstate_callback),
- (r',', Punctuation),
- (r'(\{)', pushstate_operator_root_callback),
- (r'\?', Punctuation),
- ],
- 'xml_comment': [
- (r'(-->)', popstate_xmlcomment_callback),
- (r'[^-]{1,2}', Literal),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'processing_instruction': [
- (r'\s+', Text, 'processing_instruction_content'),
- (r'\?>', String.Doc, '#pop'),
- (pitarget, Name),
- ],
- 'processing_instruction_content': [
- (r'\?>', String.Doc, '#pop'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'cdata_section': [
- (r']]>', String.Doc, '#pop'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'start_tag': [
- include('whitespace'),
- (r'(/>)', popstate_tag_callback),
- (r'>', Name.Tag, 'element_content'),
- (r'"', Punctuation, 'quot_attribute_content'),
- (r"'", Punctuation, 'apos_attribute_content'),
- (r'=', Operator),
- (qname, Name.Tag),
- ],
- 'quot_attribute_content': [
- (r'"', Punctuation, 'start_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'""', Name.Attribute),
- (quotattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'apos_attribute_content': [
- (r"'", Punctuation, 'start_tag'),
- (r'\{', Punctuation, 'root'),
- (r"''", Name.Attribute),
- (aposattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'element_content': [
- (r'</', Name.Tag, 'end_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'(<!--)', pushstate_element_content_xmlcomment_callback),
- (r'(<\?)', pushstate_element_content_processing_instruction_callback),
- (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
- (r'(<)', pushstate_element_content_starttag_callback),
- (elementcontentchar, Literal),
- (entityref, Literal),
- (charref, Literal),
- (r'\{\{|\}\}', Literal),
- ],
- 'end_tag': [
- include('whitespace'),
- (r'(>)', popstate_tag_callback),
- (qname, Name.Tag),
- ],
- 'xmlspace_decl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'preserve|strip', Keyword, '#pop'),
- ],
- 'declareordering': [
- (r'\(:', Comment, 'comment'),
- include('whitespace'),
- (r'ordered|unordered', Keyword, '#pop'),
- ],
- 'xqueryversion': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r'encoding', Keyword),
- (r';', Punctuation, '#pop'),
- ],
- 'pragma': [
- (qname, Name.Variable, 'pragmacontents'),
- ],
- 'pragmacontents': [
- (r'#\)', Punctuation, 'operator'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- (r'(\s+)', Text),
- ],
- 'occurrenceindicator': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\*|\?|\+', Operator, 'operator'),
- (r':=', Operator, 'root'),
- default('operator'),
- ],
- 'option': [
- include('whitespace'),
- (qname, Name.Variable, '#pop'),
- ],
- 'qname_braren': [
- include('whitespace'),
- (r'(\{)', pushstate_operator_root_callback),
- (r'(\()', Punctuation, 'root'),
- ],
- 'element_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'attribute_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'root': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
-
- # handle operator state
- # order on numbers matters - handle most complex first
- (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
- (r'(\d+)', Number.Integer, 'operator'),
- (r'(\.\.|\.|\))', Punctuation, 'operator'),
- (r'(declare)(\s+)(construction)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(default)(\s+)(order)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(context)(\s+)(item)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'case|as', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+ (ncname + r':\*', Keyword.Type, 'operator'),
+ (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
+ (qname, Keyword.Type, 'occurrenceindicator'),
+ ],
+ 'kindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'\{', Punctuation, 'root'),
+ (r'(\))([*+?]?)', popstate_kindtest_callback),
+ (r'\*', Name, 'closekindtest'),
+ (qname, Name, 'closekindtest'),
+ (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+ ],
+ 'kindtestforpi': [
+ (r'\(:', Comment, 'comment'),
+ (r'\)', Punctuation, '#pop'),
+ (ncname, Name.Variable),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ ],
+ 'closekindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'(\))', popstate_callback),
+ (r',', Punctuation),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'\?', Punctuation),
+ ],
+ 'xml_comment': [
+ (r'(-->)', popstate_xmlcomment_callback),
+ (r'[^-]{1,2}', Literal),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'processing_instruction': [
+ (r'\s+', Text, 'processing_instruction_content'),
+ (r'\?>', String.Doc, '#pop'),
+ (pitarget, Name),
+ ],
+ 'processing_instruction_content': [
+ (r'\?>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'cdata_section': [
+ (r']]>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'start_tag': [
+ include('whitespace'),
+ (r'(/>)', popstate_tag_callback),
+ (r'>', Name.Tag, 'element_content'),
+ (r'"', Punctuation, 'quot_attribute_content'),
+ (r"'", Punctuation, 'apos_attribute_content'),
+ (r'=', Operator),
+ (qname, Name.Tag),
+ ],
+ 'quot_attribute_content': [
+ (r'"', Punctuation, 'start_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'""', Name.Attribute),
+ (quotattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'apos_attribute_content': [
+ (r"'", Punctuation, 'start_tag'),
+ (r'\{', Punctuation, 'root'),
+ (r"''", Name.Attribute),
+ (aposattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'element_content': [
+ (r'</', Name.Tag, 'end_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+ (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+ (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+ (r'(<)', pushstate_element_content_starttag_callback),
+ (elementcontentchar, Literal),
+ (entityref, Literal),
+ (charref, Literal),
+ (r'\{\{|\}\}', Literal),
+ ],
+ 'end_tag': [
+ include('whitespace'),
+ (r'(>)', popstate_tag_callback),
+ (qname, Name.Tag),
+ ],
+ 'xmlspace_decl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'preserve|strip', Keyword, '#pop'),
+ ],
+ 'declareordering': [
+ (r'\(:', Comment, 'comment'),
+ include('whitespace'),
+ (r'ordered|unordered', Keyword, '#pop'),
+ ],
+ 'xqueryversion': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r'encoding', Keyword),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'pragma': [
+ (qname, Name.Variable, 'pragmacontents'),
+ ],
+ 'pragmacontents': [
+ (r'#\)', Punctuation, 'operator'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ (r'(\s+)', Text),
+ ],
+ 'occurrenceindicator': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\*|\?|\+', Operator, 'operator'),
+ (r':=', Operator, 'root'),
+ default('operator'),
+ ],
+ 'option': [
+ include('whitespace'),
+ (qname, Name.Variable, '#pop'),
+ ],
+ 'qname_braren': [
+ include('whitespace'),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'(\()', Punctuation, 'root'),
+ ],
+ 'element_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'attribute_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+
+ # handle operator state
+ # order on numbers matters - handle most complex first
+ (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
+ (r'(\d+)', Number.Integer, 'operator'),
+ (r'(\.\.|\.|\))', Punctuation, 'operator'),
+ (r'(declare)(\s+)(construction)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(declare)(\s+)(default)(\s+)(order)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(declare)(\s+)(context)(\s+)(item)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
(ncname + r':\*', Name, 'operator'),
(r'\*:'+ncname, Name.Tag, 'operator'),
(r'\*', Name.Tag, 'operator'),
- (stringdouble, String.Double, 'operator'),
- (stringsingle, String.Single, 'operator'),
-
- (r'(\}|\])', popstate_callback),
-
- # NAMESPACE DECL
- (r'(declare)(\s+)(default)(\s+)(collation)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration)),
- (r'(module|declare)(\s+)(namespace)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
- (r'(declare)(\s+)(base-uri)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
-
- # NAMESPACE KEYWORD
- (r'(declare)(\s+)(default)(\s+)(element|function)',
+ (stringdouble, String.Double, 'operator'),
+ (stringsingle, String.Single, 'operator'),
+
+ (r'(\}|\])', popstate_callback),
+
+ # NAMESPACE DECL
+ (r'(declare)(\s+)(default)(\s+)(collation)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration)),
+ (r'(module|declare)(\s+)(namespace)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
+ (r'(declare)(\s+)(base-uri)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
+
+ # NAMESPACE KEYWORD
+ (r'(declare)(\s+)(default)(\s+)(element|function)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration),
'namespacekeyword'),
- (r'(import)(\s+)(schema|module)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
- (r'(declare)(\s+)(copy-namespaces)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'),
-
- # VARNAMEs
- (r'(for|let|some|every)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
- (r'\$', Name.Variable, 'varname'),
- (r'(declare)(\s+)(variable)(\s+)(\$)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
-
- # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS
- (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Text, Name.Decorator), 'annotationname'),
-
- # ITEMTYPE
- (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-
- (r'(element|attribute|schema-element|schema-attribute|comment|'
- r'text|node|document-node|empty-sequence)(\s+)(\()',
- pushstate_operator_kindtest_callback),
-
- (r'(processing-instruction)(\s+)(\()',
- pushstate_operator_kindtestforpi_callback),
-
- (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
- (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
- (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
- # (r'</', Name.Tag, 'end_tag'),
- (r'(<)', pushstate_operator_starttag_callback),
-
- (r'(declare)(\s+)(boundary-space)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'),
-
- (r'(validate)(\s+)(lax|strict)',
- pushstate_operator_root_validate_withmode),
- (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(element|attribute|namespace)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
-
- (r'(document|text|processing-instruction|comment)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
- # ATTRIBUTE
- (r'(attribute)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'attribute_qname'),
- # ELEMENT
- (r'(element)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'element_qname'),
- # PROCESSING_INSTRUCTION
- (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
- bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
- 'operator'),
-
- (r'(declare|define)(\s+)(function)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration)),
-
- (r'(\{|\[)', pushstate_operator_root_callback),
-
- (r'(unordered|ordered)(\s*)(\{)',
- pushstate_operator_order_callback),
-
- (r'(map|array)(\s*)(\{)',
- pushstate_operator_map_callback),
-
- (r'(declare)(\s+)(ordering)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'),
-
- (r'(xquery)(\s+)(version)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
-
- (r'(\(#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
-
- # sometimes return can occur in root state
- (r'return', Keyword),
-
- (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration, Text, Keyword.Declaration),
- 'option'),
-
- # URI LITERALS - single and double quoted
- (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
- (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
- (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
- bygroups(Keyword, Punctuation)),
- (r'(descendant|following-sibling|following|parent|preceding-sibling'
- r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-
- (r'then|else', Keyword),
-
- # eXist specific XQUF
- (r'(update)(\s*)(insert|delete|replace|value|rename)', bygroups(Keyword, Text, Keyword)),
- (r'(into|following|preceding|with)', Keyword),
-
- # Marklogic specific
- (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
- (r'(catch)(\s*)(\()(\$)',
- bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
-
-
- (r'(@'+qname+')', Name.Attribute, 'operator'),
- (r'(@'+ncname+')', Name.Attribute, 'operator'),
- (r'@\*:'+ncname, Name.Attribute, 'operator'),
- (r'@\*', Name.Attribute, 'operator'),
- (r'(@)', Name.Attribute, 'operator'),
-
- (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
- # STANDALONE QNAMES
- (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
- (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
- (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)),
- (qname, Name.Tag, 'operator'),
- ]
- }
-
-
-class QmlLexer(RegexLexer):
- """
- For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
-
- .. versionadded:: 1.6
- """
-
- # QML is based on javascript, so much of this is taken from the
- # JavascriptLexer above.
-
- name = 'QML'
- aliases = ['qml', 'qbs']
- filenames = ['*.qml', '*.qbs']
- mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
-
- # pasted from JavascriptLexer, with some additions
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # QML insertions
- (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
- 'slashstartsregex'),
- (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
-
- # the rest from JavascriptLexer
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class CirruLexer(RegexLexer):
+ (r'(import)(\s+)(schema|module)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+ (r'(declare)(\s+)(copy-namespaces)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'),
+
+ # VARNAMEs
+ (r'(for|let|some|every)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(declare)(\s+)(variable)(\s+)(\$)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
+
+ # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS
+ (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Text, Name.Decorator), 'annotationname'),
+
+ # ITEMTYPE
+ (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+ (r'(element|attribute|schema-element|schema-attribute|comment|'
+ r'text|node|document-node|empty-sequence)(\s+)(\()',
+ pushstate_operator_kindtest_callback),
+
+ (r'(processing-instruction)(\s+)(\()',
+ pushstate_operator_kindtestforpi_callback),
+
+ (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+ (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+ (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+ # (r'</', Name.Tag, 'end_tag'),
+ (r'(<)', pushstate_operator_starttag_callback),
+
+ (r'(declare)(\s+)(boundary-space)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'),
+
+ (r'(validate)(\s+)(lax|strict)',
+ pushstate_operator_root_validate_withmode),
+ (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+ (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(element|attribute|namespace)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+
+ (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+ # ATTRIBUTE
+ (r'(attribute)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'attribute_qname'),
+ # ELEMENT
+ (r'(element)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'element_qname'),
+ # PROCESSING_INSTRUCTION
+ (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
+ bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
+ 'operator'),
+
+ (r'(declare|define)(\s+)(function)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration)),
+
+ (r'(\{|\[)', pushstate_operator_root_callback),
+
+ (r'(unordered|ordered)(\s*)(\{)',
+ pushstate_operator_order_callback),
+
+ (r'(map|array)(\s*)(\{)',
+ pushstate_operator_map_callback),
+
+ (r'(declare)(\s+)(ordering)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'),
+
+ (r'(xquery)(\s+)(version)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+ (r'(\(#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
+
+ # sometimes return can occur in root state
+ (r'return', Keyword),
+
+ (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration, Text, Keyword.Declaration),
+ 'option'),
+
+ # URI LITERALS - single and double quoted
+ (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+ (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+ (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+ bygroups(Keyword, Punctuation)),
+ (r'(descendant|following-sibling|following|parent|preceding-sibling'
+ r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+ (r'then|else', Keyword),
+
+ # eXist specific XQUF
+ (r'(update)(\s*)(insert|delete|replace|value|rename)', bygroups(Keyword, Text, Keyword)),
+ (r'(into|following|preceding|with)', Keyword),
+
+ # Marklogic specific
+ (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+ (r'(catch)(\s*)(\()(\$)',
+ bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+
+ (r'(@'+qname+')', Name.Attribute, 'operator'),
+ (r'(@'+ncname+')', Name.Attribute, 'operator'),
+ (r'@\*:'+ncname, Name.Attribute, 'operator'),
+ (r'@\*', Name.Attribute, 'operator'),
+ (r'(@)', Name.Attribute, 'operator'),
+
+ (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+ # STANDALONE QNAMES
+ (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
+ (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
+ (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)),
+ (qname, Name.Tag, 'operator'),
+ ]
+ }
+
+
+class QmlLexer(RegexLexer):
+ """
+ For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
+
+ .. versionadded:: 1.6
+ """
+
+ # QML is based on javascript, so much of this is taken from the
+ # JavascriptLexer above.
+
+ name = 'QML'
+ aliases = ['qml', 'qbs']
+ filenames = ['*.qml', '*.qbs']
+ mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
+
+ # pasted from JavascriptLexer, with some additions
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ # QML insertions
+ (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
+ 'slashstartsregex'),
+ (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
+
+ # the rest from JavascriptLexer
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class CirruLexer(RegexLexer):
r"""
- Syntax rules of Cirru can be found at:
- http://cirru.org/
-
- * using ``()`` for expressions, but restricted in a same line
- * using ``""`` for strings, with ``\`` for escaping chars
- * using ``$`` as folding operator
- * using ``,`` as unfolding operator
- * using indentations for nested blocks
-
- .. versionadded:: 2.0
- """
-
- name = 'Cirru'
- aliases = ['cirru']
- filenames = ['*.cirru']
- mimetypes = ['text/x-cirru']
- flags = re.MULTILINE
-
- tokens = {
- 'string': [
- (r'[^"\\\n]', String),
- (r'\\', String.Escape, 'escape'),
- (r'"', String, '#pop'),
- ],
- 'escape': [
- (r'.', String.Escape, '#pop'),
- ],
- 'function': [
- (r'\,', Operator, '#pop'),
- (r'[^\s"()]+', Name.Function, '#pop'),
- (r'\)', Operator, '#pop'),
- (r'(?=\n)', Text, '#pop'),
- (r'\(', Operator, '#push'),
- (r'"', String, ('#pop', 'string')),
- (r'[ ]+', Text.Whitespace),
- ],
- 'line': [
- (r'(?<!\w)\$(?!\w)', Operator, 'function'),
- (r'\(', Operator, 'function'),
- (r'\)', Operator),
- (r'\n', Text, '#pop'),
- (r'"', String, 'string'),
- (r'[ ]+', Text.Whitespace),
- (r'[+-]?[\d.]+\b', Number),
- (r'[^\s"()]+', Name.Variable)
- ],
- 'root': [
- (r'^\n+', Text.Whitespace),
- default(('line', 'function')),
- ]
- }
-
-
-class SlimLexer(ExtendedRegexLexer):
- """
- For Slim markup.
-
- .. versionadded:: 2.0
- """
-
- name = 'Slim'
- aliases = ['slim']
- filenames = ['*.slim']
- mimetypes = ['text/x-slim']
-
- flags = re.IGNORECASE
- _dot = r'(?: \|\n(?=.* \|)|.)'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'([ \t]*==?)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
- (r'(-)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- (r'[ \t]+\n', Punctuation, '#pop:2'),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(.*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'=', Punctuation),
- (r'"[^"]+"', using(RubyLexer), 'tag'),
- (r'\'[^\']+\'', using(RubyLexer), 'tag'),
- (r'\w+', Text, 'tag'),
- ],
-
- 'slim-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
- }
+ Syntax rules of Cirru can be found at:
+ http://cirru.org/
+
+ * using ``()`` for expressions, but restricted in a same line
+ * using ``""`` for strings, with ``\`` for escaping chars
+ * using ``$`` as folding operator
+ * using ``,`` as unfolding operator
+ * using indentations for nested blocks
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Cirru'
+ aliases = ['cirru']
+ filenames = ['*.cirru']
+ mimetypes = ['text/x-cirru']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'string': [
+ (r'[^"\\\n]', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'function': [
+ (r'\,', Operator, '#pop'),
+ (r'[^\s"()]+', Name.Function, '#pop'),
+ (r'\)', Operator, '#pop'),
+ (r'(?=\n)', Text, '#pop'),
+ (r'\(', Operator, '#push'),
+ (r'"', String, ('#pop', 'string')),
+ (r'[ ]+', Text.Whitespace),
+ ],
+ 'line': [
+ (r'(?<!\w)\$(?!\w)', Operator, 'function'),
+ (r'\(', Operator, 'function'),
+ (r'\)', Operator),
+ (r'\n', Text, '#pop'),
+ (r'"', String, 'string'),
+ (r'[ ]+', Text.Whitespace),
+ (r'[+-]?[\d.]+\b', Number),
+ (r'[^\s"()]+', Name.Variable)
+ ],
+ 'root': [
+ (r'^\n+', Text.Whitespace),
+ default(('line', 'function')),
+ ]
+ }
+
+
+class SlimLexer(ExtendedRegexLexer):
+ """
+ For Slim markup.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Slim'
+ aliases = ['slim']
+ filenames = ['*.slim']
+ mimetypes = ['text/x-slim']
+
+ flags = re.IGNORECASE
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'([ \t]*==?)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
+ (r'(-)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ (r'[ \t]+\n', Punctuation, '#pop:2'),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(.*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'=', Punctuation),
+ (r'"[^"]+"', using(RubyLexer), 'tag'),
+ (r'\'[^\']+\'', using(RubyLexer), 'tag'),
+ (r'\w+', Text, 'tag'),
+ ],
+
+ 'slim-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/lexers/x10.py b/contrib/python/Pygments/py2/pygments/lexers/x10.py
index eac87b1cff..cfabc44627 100644
--- a/contrib/python/Pygments/py2/pygments/lexers/x10.py
+++ b/contrib/python/Pygments/py2/pygments/lexers/x10.py
@@ -1,69 +1,69 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.x10
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the X10 programming language.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.x10
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the X10 programming language.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['X10Lexer']
-
-class X10Lexer(RegexLexer):
- """
- For the X10 language.
-
- .. versionadded:: 0.1
- """
-
- name = 'X10'
- aliases = ['x10', 'xten']
- filenames = ['*.x10']
- mimetypes = ['text/x-x10']
-
- keywords = (
- 'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
- 'break', 'case', 'catch', 'class', 'clocked', 'continue',
- 'def', 'default', 'do', 'else', 'final', 'finally', 'finish',
- 'for', 'goto', 'haszero', 'here', 'if', 'import', 'in',
- 'instanceof', 'interface', 'isref', 'new', 'offer',
- 'operator', 'package', 'return', 'struct', 'switch', 'throw',
- 'try', 'type', 'val', 'var', 'when', 'while'
- )
-
- types = (
- 'void'
- )
-
- values = (
- 'false', 'null', 'self', 'super', 'this', 'true'
- )
-
- modifiers = (
- 'abstract', 'extends', 'implements', 'native', 'offers',
- 'private', 'property', 'protected', 'public', 'static',
- 'throws', 'transient'
- )
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
- (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'.', Text)
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['X10Lexer']
+
+class X10Lexer(RegexLexer):
+ """
+ For the X10 language.
+
+ .. versionadded:: 0.1
+ """
+
+ name = 'X10'
+ aliases = ['x10', 'xten']
+ filenames = ['*.x10']
+ mimetypes = ['text/x-x10']
+
+ keywords = (
+ 'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
+ 'break', 'case', 'catch', 'class', 'clocked', 'continue',
+ 'def', 'default', 'do', 'else', 'final', 'finally', 'finish',
+ 'for', 'goto', 'haszero', 'here', 'if', 'import', 'in',
+ 'instanceof', 'interface', 'isref', 'new', 'offer',
+ 'operator', 'package', 'return', 'struct', 'switch', 'throw',
+ 'try', 'type', 'val', 'var', 'when', 'while'
+ )
+
+ types = (
+ 'void'
+ )
+
+ values = (
+ 'false', 'null', 'self', 'super', 'this', 'true'
+ )
+
+ modifiers = (
+ 'abstract', 'extends', 'implements', 'native', 'offers',
+ 'private', 'property', 'protected', 'public', 'static',
+ 'throws', 'transient'
+ )
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
+ (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
+ (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r'.', Text)
+ ],
+ }
diff --git a/contrib/python/Pygments/py2/pygments/modeline.py b/contrib/python/Pygments/py2/pygments/modeline.py
index 31b2e7fb0d..ec3b9e7a5f 100644
--- a/contrib/python/Pygments/py2/pygments/modeline.py
+++ b/contrib/python/Pygments/py2/pygments/modeline.py
@@ -1,44 +1,44 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.modeline
- ~~~~~~~~~~~~~~~~~
-
- A simple modeline parser (based on pymodeline).
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.modeline
+ ~~~~~~~~~~~~~~~~~
+
+ A simple modeline parser (based on pymodeline).
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-__all__ = ['get_filetype_from_buffer']
-
-
-modeline_re = re.compile(r'''
- (?: vi | vim | ex ) (?: [<=>]? \d* )? :
- .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
-''', re.VERBOSE)
-
-
-def get_filetype_from_line(l):
- m = modeline_re.search(l)
- if m:
- return m.group(1)
-
-
-def get_filetype_from_buffer(buf, max_lines=5):
- """
- Scan the buffer for modelines and return filetype if one is found.
- """
- lines = buf.splitlines()
- for l in lines[-1:-max_lines-1:-1]:
- ret = get_filetype_from_line(l)
- if ret:
- return ret
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+__all__ = ['get_filetype_from_buffer']
+
+
+modeline_re = re.compile(r'''
+ (?: vi | vim | ex ) (?: [<=>]? \d* )? :
+ .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
+''', re.VERBOSE)
+
+
+def get_filetype_from_line(l):
+ m = modeline_re.search(l)
+ if m:
+ return m.group(1)
+
+
+def get_filetype_from_buffer(buf, max_lines=5):
+ """
+ Scan the buffer for modelines and return filetype if one is found.
+ """
+ lines = buf.splitlines()
+ for l in lines[-1:-max_lines-1:-1]:
+ ret = get_filetype_from_line(l)
+ if ret:
+ return ret
for i in range(max_lines, -1, -1):
if i < len(lines):
ret = get_filetype_from_line(lines[i])
if ret:
return ret
-
- return None
+
+ return None
diff --git a/contrib/python/Pygments/py2/pygments/plugin.py b/contrib/python/Pygments/py2/pygments/plugin.py
index 3d185efc10..22c848bd22 100644
--- a/contrib/python/Pygments/py2/pygments/plugin.py
+++ b/contrib/python/Pygments/py2/pygments/plugin.py
@@ -1,46 +1,46 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.plugin
- ~~~~~~~~~~~~~~~
-
- Pygments setuptools plugin interface. The methods defined
- here also work if setuptools isn't installed but they just
- return nothing.
-
- lexer plugins::
-
- [pygments.lexers]
- yourlexer = yourmodule:YourLexer
-
- formatter plugins::
-
- [pygments.formatters]
- yourformatter = yourformatter:YourFormatter
- /.ext = yourformatter:YourFormatter
-
- As you can see, you can define extensions for the formatter
- with a leading slash.
-
- syntax plugins::
-
- [pygments.styles]
- yourstyle = yourstyle:YourStyle
-
- filter plugin::
-
- [pygments.filter]
- yourfilter = yourfilter:YourFilter
-
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.plugin
+ ~~~~~~~~~~~~~~~
+
+ Pygments setuptools plugin interface. The methods defined
+ here also work if setuptools isn't installed but they just
+ return nothing.
+
+ lexer plugins::
+
+ [pygments.lexers]
+ yourlexer = yourmodule:YourLexer
+
+ formatter plugins::
+
+ [pygments.formatters]
+ yourformatter = yourformatter:YourFormatter
+ /.ext = yourformatter:YourFormatter
+
+ As you can see, you can define extensions for the formatter
+ with a leading slash.
+
+ syntax plugins::
+
+ [pygments.styles]
+ yourstyle = yourstyle:YourStyle
+
+ filter plugin::
+
+ [pygments.filter]
+ yourfilter = yourfilter:YourFilter
+
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-LEXER_ENTRY_POINT = 'pygments.lexers'
-FORMATTER_ENTRY_POINT = 'pygments.formatters'
-STYLE_ENTRY_POINT = 'pygments.styles'
-FILTER_ENTRY_POINT = 'pygments.filters'
-
-
+ :license: BSD, see LICENSE for details.
+"""
+LEXER_ENTRY_POINT = 'pygments.lexers'
+FORMATTER_ENTRY_POINT = 'pygments.formatters'
+STYLE_ENTRY_POINT = 'pygments.styles'
+FILTER_ENTRY_POINT = 'pygments.filters'
+
+
def iter_entry_points(group_name):
try:
import pkg_resources
@@ -50,21 +50,21 @@ def iter_entry_points(group_name):
return pkg_resources.iter_entry_points(group_name)
-def find_plugin_lexers():
+def find_plugin_lexers():
for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
- yield entrypoint.load()
-
-
-def find_plugin_formatters():
+ yield entrypoint.load()
+
+
+def find_plugin_formatters():
for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_styles():
+ yield entrypoint.name, entrypoint.load()
+
+
+def find_plugin_styles():
for entrypoint in iter_entry_points(STYLE_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_filters():
+ yield entrypoint.name, entrypoint.load()
+
+
+def find_plugin_filters():
for entrypoint in iter_entry_points(FILTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
+ yield entrypoint.name, entrypoint.load()
diff --git a/contrib/python/Pygments/py2/pygments/regexopt.py b/contrib/python/Pygments/py2/pygments/regexopt.py
index 59d77ee064..0529778606 100644
--- a/contrib/python/Pygments/py2/pygments/regexopt.py
+++ b/contrib/python/Pygments/py2/pygments/regexopt.py
@@ -1,92 +1,92 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.regexopt
- ~~~~~~~~~~~~~~~~~
-
- An algorithm that generates optimized regexes for matching long lists of
- literal strings.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.regexopt
+ ~~~~~~~~~~~~~~~~~
+
+ An algorithm that generates optimized regexes for matching long lists of
+ literal strings.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from re import escape
-from os.path import commonprefix
-from itertools import groupby
-from operator import itemgetter
-
-CS_ESCAPE = re.compile(r'[\^\\\-\]]')
-FIRST_ELEMENT = itemgetter(0)
-
-
-def make_charset(letters):
- return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
-
-
-def regex_opt_inner(strings, open_paren):
- """Return a regex that matches any string in the sorted list of strings."""
- close_paren = open_paren and ')' or ''
- # print strings, repr(open_paren)
- if not strings:
- # print '-> nothing left'
- return ''
- first = strings[0]
- if len(strings) == 1:
- # print '-> only 1 string'
- return open_paren + escape(first) + close_paren
- if not first:
- # print '-> first string empty'
- return open_paren + regex_opt_inner(strings[1:], '(?:') \
- + '?' + close_paren
- if len(first) == 1:
- # multiple one-char strings? make a charset
- oneletter = []
- rest = []
- for s in strings:
- if len(s) == 1:
- oneletter.append(s)
- else:
- rest.append(s)
- if len(oneletter) > 1: # do we have more than one oneletter string?
- if rest:
- # print '-> 1-character + rest'
- return open_paren + regex_opt_inner(rest, '') + '|' \
- + make_charset(oneletter) + close_paren
- # print '-> only 1-character'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from re import escape
+from os.path import commonprefix
+from itertools import groupby
+from operator import itemgetter
+
+CS_ESCAPE = re.compile(r'[\^\\\-\]]')
+FIRST_ELEMENT = itemgetter(0)
+
+
+def make_charset(letters):
+ return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
+
+
+def regex_opt_inner(strings, open_paren):
+ """Return a regex that matches any string in the sorted list of strings."""
+ close_paren = open_paren and ')' or ''
+ # print strings, repr(open_paren)
+ if not strings:
+ # print '-> nothing left'
+ return ''
+ first = strings[0]
+ if len(strings) == 1:
+ # print '-> only 1 string'
+ return open_paren + escape(first) + close_paren
+ if not first:
+ # print '-> first string empty'
+ return open_paren + regex_opt_inner(strings[1:], '(?:') \
+ + '?' + close_paren
+ if len(first) == 1:
+ # multiple one-char strings? make a charset
+ oneletter = []
+ rest = []
+ for s in strings:
+ if len(s) == 1:
+ oneletter.append(s)
+ else:
+ rest.append(s)
+ if len(oneletter) > 1: # do we have more than one oneletter string?
+ if rest:
+ # print '-> 1-character + rest'
+ return open_paren + regex_opt_inner(rest, '') + '|' \
+ + make_charset(oneletter) + close_paren
+ # print '-> only 1-character'
return open_paren + make_charset(oneletter) + close_paren
- prefix = commonprefix(strings)
- if prefix:
- plen = len(prefix)
- # we have a prefix for all strings
- # print '-> prefix:', prefix
- return open_paren + escape(prefix) \
- + regex_opt_inner([s[plen:] for s in strings], '(?:') \
- + close_paren
- # is there a suffix?
- strings_rev = [s[::-1] for s in strings]
- suffix = commonprefix(strings_rev)
- if suffix:
- slen = len(suffix)
- # print '-> suffix:', suffix[::-1]
- return open_paren \
- + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
- + escape(suffix[::-1]) + close_paren
- # recurse on common 1-string prefixes
- # print '-> last resort'
- return open_paren + \
- '|'.join(regex_opt_inner(list(group[1]), '')
- for group in groupby(strings, lambda s: s[0] == first[0])) \
- + close_paren
-
-
-def regex_opt(strings, prefix='', suffix=''):
- """Return a compiled regex that matches any string in the given list.
-
- The strings to match must be literal strings, not regexes. They will be
- regex-escaped.
-
- *prefix* and *suffix* are pre- and appended to the final regex.
- """
- strings = sorted(strings)
- return prefix + regex_opt_inner(strings, '(') + suffix
+ prefix = commonprefix(strings)
+ if prefix:
+ plen = len(prefix)
+ # we have a prefix for all strings
+ # print '-> prefix:', prefix
+ return open_paren + escape(prefix) \
+ + regex_opt_inner([s[plen:] for s in strings], '(?:') \
+ + close_paren
+ # is there a suffix?
+ strings_rev = [s[::-1] for s in strings]
+ suffix = commonprefix(strings_rev)
+ if suffix:
+ slen = len(suffix)
+ # print '-> suffix:', suffix[::-1]
+ return open_paren \
+ + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
+ + escape(suffix[::-1]) + close_paren
+ # recurse on common 1-string prefixes
+ # print '-> last resort'
+ return open_paren + \
+ '|'.join(regex_opt_inner(list(group[1]), '')
+ for group in groupby(strings, lambda s: s[0] == first[0])) \
+ + close_paren
+
+
+def regex_opt(strings, prefix='', suffix=''):
+ """Return a compiled regex that matches any string in the given list.
+
+ The strings to match must be literal strings, not regexes. They will be
+ regex-escaped.
+
+ *prefix* and *suffix* are pre- and appended to the final regex.
+ """
+ strings = sorted(strings)
+ return prefix + regex_opt_inner(strings, '(') + suffix
diff --git a/contrib/python/Pygments/py2/pygments/scanner.py b/contrib/python/Pygments/py2/pygments/scanner.py
index bcb19ed9c6..0810bf3268 100644
--- a/contrib/python/Pygments/py2/pygments/scanner.py
+++ b/contrib/python/Pygments/py2/pygments/scanner.py
@@ -1,105 +1,105 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.scanner
- ~~~~~~~~~~~~~~~~
-
- This library implements a regex based scanner. Some languages
- like Pascal are easy to parse but have some keywords that
- depend on the context. Because of this it's impossible to lex
- that just by using a regular expression lexer like the
- `RegexLexer`.
-
- Have a look at the `DelphiLexer` to get an idea of how to use
- this scanner.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.scanner
+ ~~~~~~~~~~~~~~~~
+
+ This library implements a regex based scanner. Some languages
+ like Pascal are easy to parse but have some keywords that
+ depend on the context. Because of this it's impossible to lex
+ that just by using a regular expression lexer like the
+ `RegexLexer`.
+
+ Have a look at the `DelphiLexer` to get an idea of how to use
+ this scanner.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-
-class EndOfText(RuntimeError):
- """
- Raise if end of text is reached and the user
- tried to call a match function.
- """
-
-
-class Scanner(object):
- """
- Simple scanner
-
- All method patterns are regular expression strings (not
- compiled expressions!)
- """
-
- def __init__(self, text, flags=0):
- """
- :param text: The text which should be scanned
- :param flags: default regular expression flags
- """
- self.data = text
- self.data_length = len(text)
- self.start_pos = 0
- self.pos = 0
- self.flags = flags
- self.last = None
- self.match = None
- self._re_cache = {}
-
- def eos(self):
- """`True` if the scanner reached the end of text."""
- return self.pos >= self.data_length
- eos = property(eos, eos.__doc__)
-
- def check(self, pattern):
- """
- Apply `pattern` on the current position and return
- the match object. (Doesn't touch pos). Use this for
- lookahead.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- return self._re_cache[pattern].match(self.data, self.pos)
-
- def test(self, pattern):
- """Apply a pattern on the current position and check
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+
+class EndOfText(RuntimeError):
+ """
+ Raise if end of text is reached and the user
+ tried to call a match function.
+ """
+
+
+class Scanner(object):
+ """
+ Simple scanner
+
+ All method patterns are regular expression strings (not
+ compiled expressions!)
+ """
+
+ def __init__(self, text, flags=0):
+ """
+ :param text: The text which should be scanned
+ :param flags: default regular expression flags
+ """
+ self.data = text
+ self.data_length = len(text)
+ self.start_pos = 0
+ self.pos = 0
+ self.flags = flags
+ self.last = None
+ self.match = None
+ self._re_cache = {}
+
+ def eos(self):
+ """`True` if the scanner reached the end of text."""
+ return self.pos >= self.data_length
+ eos = property(eos, eos.__doc__)
+
+ def check(self, pattern):
+ """
+ Apply `pattern` on the current position and return
+ the match object. (Doesn't touch pos). Use this for
+ lookahead.
+ """
+ if self.eos:
+ raise EndOfText()
+ if pattern not in self._re_cache:
+ self._re_cache[pattern] = re.compile(pattern, self.flags)
+ return self._re_cache[pattern].match(self.data, self.pos)
+
+ def test(self, pattern):
+ """Apply a pattern on the current position and check
if it patches. Doesn't touch pos.
"""
- return self.check(pattern) is not None
-
- def scan(self, pattern):
- """
- Scan the text for the given pattern and update pos/match
- and related fields. The return value is a boolen that
- indicates if the pattern matched. The matched value is
- stored on the instance as ``match``, the last value is
- stored as ``last``. ``start_pos`` is the position of the
- pointer before the pattern was matched, ``pos`` is the
- end position.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- self.last = self.match
- m = self._re_cache[pattern].match(self.data, self.pos)
- if m is None:
- return False
- self.start_pos = m.start()
- self.pos = m.end()
- self.match = m.group()
- return True
-
- def get_char(self):
- """Scan exactly one char."""
- self.scan('.')
-
- def __repr__(self):
- return '<%s %d/%d>' % (
- self.__class__.__name__,
- self.pos,
- self.data_length
- )
+ return self.check(pattern) is not None
+
+ def scan(self, pattern):
+ """
+ Scan the text for the given pattern and update pos/match
+ and related fields. The return value is a boolen that
+ indicates if the pattern matched. The matched value is
+ stored on the instance as ``match``, the last value is
+ stored as ``last``. ``start_pos`` is the position of the
+ pointer before the pattern was matched, ``pos`` is the
+ end position.
+ """
+ if self.eos:
+ raise EndOfText()
+ if pattern not in self._re_cache:
+ self._re_cache[pattern] = re.compile(pattern, self.flags)
+ self.last = self.match
+ m = self._re_cache[pattern].match(self.data, self.pos)
+ if m is None:
+ return False
+ self.start_pos = m.start()
+ self.pos = m.end()
+ self.match = m.group()
+ return True
+
+ def get_char(self):
+ """Scan exactly one char."""
+ self.scan('.')
+
+ def __repr__(self):
+ return '<%s %d/%d>' % (
+ self.__class__.__name__,
+ self.pos,
+ self.data_length
+ )
diff --git a/contrib/python/Pygments/py2/pygments/sphinxext.py b/contrib/python/Pygments/py2/pygments/sphinxext.py
index 282693e7e9..161e7e9328 100644
--- a/contrib/python/Pygments/py2/pygments/sphinxext.py
+++ b/contrib/python/Pygments/py2/pygments/sphinxext.py
@@ -1,158 +1,158 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.sphinxext
- ~~~~~~~~~~~~~~~~~~
-
- Sphinx extension to generate automatic documentation of lexers,
- formatters and filters.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.sphinxext
+ ~~~~~~~~~~~~~~~~~~
+
+ Sphinx extension to generate automatic documentation of lexers,
+ formatters and filters.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-import sys
-
-from docutils import nodes
-from docutils.statemachine import ViewList
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from docutils import nodes
+from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
-from sphinx.util.nodes import nested_parse_with_titles
-
-
-MODULEDOC = '''
-.. module:: %s
-
-%s
-%s
-'''
-
-LEXERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
- :MIME types: %s
-
- %s
-
-'''
-
-FMTERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
-
- %s
-
-'''
-
-FILTERDOC = '''
-.. class:: %s
-
- :Name: %s
-
- %s
-
-'''
-
-
-class PygmentsDoc(Directive):
- """
- A directive to collect all lexers/formatters/filters and generate
- autoclass directives for them.
- """
- has_content = False
- required_arguments = 1
- optional_arguments = 0
- final_argument_whitespace = False
- option_spec = {}
-
- def run(self):
- self.filenames = set()
- if self.arguments[0] == 'lexers':
- out = self.document_lexers()
- elif self.arguments[0] == 'formatters':
- out = self.document_formatters()
- elif self.arguments[0] == 'filters':
- out = self.document_filters()
- else:
- raise Exception('invalid argument for "pygmentsdoc" directive')
- node = nodes.compound()
- vl = ViewList(out.split('\n'), source='')
- nested_parse_with_titles(self.state, vl, node)
- for fn in self.filenames:
- self.state.document.settings.record_dependencies.add(fn)
- return node.children
-
- def document_lexers(self):
- from pygments.lexers._mapping import LEXERS
- out = []
- modules = {}
- moduledocstrings = {}
- for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- if not cls.__doc__:
- print("Warning: %s does not have a docstring." % classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- modules.setdefault(module, []).append((
- classname,
- ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
- ', '.join(data[4]) or 'None',
- docstring))
- if module not in moduledocstrings:
- moddoc = mod.__doc__
- if isinstance(moddoc, bytes):
- moddoc = moddoc.decode('utf8')
- moduledocstrings[module] = moddoc
-
- for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
- if moduledocstrings[module] is None:
- raise Exception("Missing docstring for %s" % (module,))
- heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
- out.append(MODULEDOC % (module, heading, '-'*len(heading)))
- for data in lexers:
- out.append(LEXERDOC % data)
-
- return ''.join(out)
-
- def document_formatters(self):
- from pygments.formatters import FORMATTERS
-
- out = []
- for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- heading = cls.__name__
- out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*') or 'None',
- docstring))
- return ''.join(out)
-
- def document_filters(self):
- from pygments.filters import FILTERS
-
- out = []
- for name, cls in FILTERS.items():
- self.filenames.add(sys.modules[cls.__module__].__file__)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- out.append(FILTERDOC % (cls.__name__, name, docstring))
- return ''.join(out)
-
-
-def setup(app):
- app.add_directive('pygmentsdoc', PygmentsDoc)
+from sphinx.util.nodes import nested_parse_with_titles
+
+
+MODULEDOC = '''
+.. module:: %s
+
+%s
+%s
+'''
+
+LEXERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+ :MIME types: %s
+
+ %s
+
+'''
+
+FMTERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+
+ %s
+
+'''
+
+FILTERDOC = '''
+.. class:: %s
+
+ :Name: %s
+
+ %s
+
+'''
+
+
+class PygmentsDoc(Directive):
+ """
+ A directive to collect all lexers/formatters/filters and generate
+ autoclass directives for them.
+ """
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {}
+
+ def run(self):
+ self.filenames = set()
+ if self.arguments[0] == 'lexers':
+ out = self.document_lexers()
+ elif self.arguments[0] == 'formatters':
+ out = self.document_formatters()
+ elif self.arguments[0] == 'filters':
+ out = self.document_filters()
+ else:
+ raise Exception('invalid argument for "pygmentsdoc" directive')
+ node = nodes.compound()
+ vl = ViewList(out.split('\n'), source='')
+ nested_parse_with_titles(self.state, vl, node)
+ for fn in self.filenames:
+ self.state.document.settings.record_dependencies.add(fn)
+ return node.children
+
+ def document_lexers(self):
+ from pygments.lexers._mapping import LEXERS
+ out = []
+ modules = {}
+ moduledocstrings = {}
+ for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ if not cls.__doc__:
+ print("Warning: %s does not have a docstring." % classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ modules.setdefault(module, []).append((
+ classname,
+ ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
+ ', '.join(data[4]) or 'None',
+ docstring))
+ if module not in moduledocstrings:
+ moddoc = mod.__doc__
+ if isinstance(moddoc, bytes):
+ moddoc = moddoc.decode('utf8')
+ moduledocstrings[module] = moddoc
+
+ for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
+ if moduledocstrings[module] is None:
+ raise Exception("Missing docstring for %s" % (module,))
+ heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
+ out.append(MODULEDOC % (module, heading, '-'*len(heading)))
+ for data in lexers:
+ out.append(LEXERDOC % data)
+
+ return ''.join(out)
+
+ def document_formatters(self):
+ from pygments.formatters import FORMATTERS
+
+ out = []
+ for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ heading = cls.__name__
+ out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*') or 'None',
+ docstring))
+ return ''.join(out)
+
+ def document_filters(self):
+ from pygments.filters import FILTERS
+
+ out = []
+ for name, cls in FILTERS.items():
+ self.filenames.add(sys.modules[cls.__module__].__file__)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ out.append(FILTERDOC % (cls.__name__, name, docstring))
+ return ''.join(out)
+
+
+def setup(app):
+ app.add_directive('pygmentsdoc', PygmentsDoc)
diff --git a/contrib/python/Pygments/py2/pygments/style.py b/contrib/python/Pygments/py2/pygments/style.py
index 7326457774..7975d4cffd 100644
--- a/contrib/python/Pygments/py2/pygments/style.py
+++ b/contrib/python/Pygments/py2/pygments/style.py
@@ -1,17 +1,17 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.style
- ~~~~~~~~~~~~~~
-
- Basic style object.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.style
+ ~~~~~~~~~~~~~~
+
+ Basic style object.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Token, STANDARD_TYPES
-from pygments.util import add_metaclass
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.token import Token, STANDARD_TYPES
+from pygments.util import add_metaclass
+
# Default mapping of ansixxx to RGB colors.
_ansimap = {
# dark
@@ -55,78 +55,78 @@ _deprecated_ansicolors = {
'#ansiwhite': 'ansiwhite',
}
ansicolors = set(_ansimap)
-
-
-class StyleMeta(type):
-
- def __new__(mcs, name, bases, dct):
- obj = type.__new__(mcs, name, bases, dct)
- for token in STANDARD_TYPES:
- if token not in obj.styles:
- obj.styles[token] = ''
-
- def colorformat(text):
+
+
+class StyleMeta(type):
+
+ def __new__(mcs, name, bases, dct):
+ obj = type.__new__(mcs, name, bases, dct)
+ for token in STANDARD_TYPES:
+ if token not in obj.styles:
+ obj.styles[token] = ''
+
+ def colorformat(text):
if text in ansicolors:
return text
- if text[0:1] == '#':
- col = text[1:]
- if len(col) == 6:
- return col
- elif len(col) == 3:
+ if text[0:1] == '#':
+ col = text[1:]
+ if len(col) == 6:
+ return col
+ elif len(col) == 3:
return col[0] * 2 + col[1] * 2 + col[2] * 2
- elif text == '':
- return ''
+ elif text == '':
+ return ''
elif text.startswith('var') or text.startswith('calc'):
return text
- assert False, "wrong color format %r" % text
-
- _styles = obj._styles = {}
-
- for ttype in obj.styles:
- for token in ttype.split():
- if token in _styles:
- continue
- ndef = _styles.get(token.parent, None)
- styledefs = obj.styles.get(token, '').split()
- if not ndef or token is None:
- ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
- elif 'noinherit' in styledefs and token is not Token:
- ndef = _styles[Token][:]
- else:
- ndef = ndef[:]
- _styles[token] = ndef
- for styledef in obj.styles.get(token, '').split():
- if styledef == 'noinherit':
- pass
- elif styledef == 'bold':
- ndef[1] = 1
- elif styledef == 'nobold':
- ndef[1] = 0
- elif styledef == 'italic':
- ndef[2] = 1
- elif styledef == 'noitalic':
- ndef[2] = 0
- elif styledef == 'underline':
- ndef[3] = 1
- elif styledef == 'nounderline':
- ndef[3] = 0
- elif styledef[:3] == 'bg:':
- ndef[4] = colorformat(styledef[3:])
- elif styledef[:7] == 'border:':
- ndef[5] = colorformat(styledef[7:])
- elif styledef == 'roman':
- ndef[6] = 1
- elif styledef == 'sans':
- ndef[7] = 1
- elif styledef == 'mono':
- ndef[8] = 1
- else:
- ndef[0] = colorformat(styledef)
-
- return obj
-
- def style_for_token(cls, token):
- t = cls._styles[token]
+ assert False, "wrong color format %r" % text
+
+ _styles = obj._styles = {}
+
+ for ttype in obj.styles:
+ for token in ttype.split():
+ if token in _styles:
+ continue
+ ndef = _styles.get(token.parent, None)
+ styledefs = obj.styles.get(token, '').split()
+ if not ndef or token is None:
+ ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
+ elif 'noinherit' in styledefs and token is not Token:
+ ndef = _styles[Token][:]
+ else:
+ ndef = ndef[:]
+ _styles[token] = ndef
+ for styledef in obj.styles.get(token, '').split():
+ if styledef == 'noinherit':
+ pass
+ elif styledef == 'bold':
+ ndef[1] = 1
+ elif styledef == 'nobold':
+ ndef[1] = 0
+ elif styledef == 'italic':
+ ndef[2] = 1
+ elif styledef == 'noitalic':
+ ndef[2] = 0
+ elif styledef == 'underline':
+ ndef[3] = 1
+ elif styledef == 'nounderline':
+ ndef[3] = 0
+ elif styledef[:3] == 'bg:':
+ ndef[4] = colorformat(styledef[3:])
+ elif styledef[:7] == 'border:':
+ ndef[5] = colorformat(styledef[7:])
+ elif styledef == 'roman':
+ ndef[6] = 1
+ elif styledef == 'sans':
+ ndef[7] = 1
+ elif styledef == 'mono':
+ ndef[8] = 1
+ else:
+ ndef[0] = colorformat(styledef)
+
+ return obj
+
+ def style_for_token(cls, token):
+ t = cls._styles[token]
ansicolor = bgansicolor = None
color = t[0]
if color in _deprecated_ansicolors:
@@ -141,42 +141,42 @@ class StyleMeta(type):
bgansicolor = bgcolor
bgcolor = _ansimap[bgcolor]
- return {
+ return {
'color': color or None,
- 'bold': bool(t[1]),
- 'italic': bool(t[2]),
- 'underline': bool(t[3]),
+ 'bold': bool(t[1]),
+ 'italic': bool(t[2]),
+ 'underline': bool(t[3]),
'bgcolor': bgcolor or None,
- 'border': t[5] or None,
- 'roman': bool(t[6]) or None,
- 'sans': bool(t[7]) or None,
- 'mono': bool(t[8]) or None,
+ 'border': t[5] or None,
+ 'roman': bool(t[6]) or None,
+ 'sans': bool(t[7]) or None,
+ 'mono': bool(t[8]) or None,
'ansicolor': ansicolor,
'bgansicolor': bgansicolor,
- }
-
- def list_styles(cls):
- return list(cls)
-
- def styles_token(cls, ttype):
- return ttype in cls._styles
-
- def __iter__(cls):
- for token in cls._styles:
- yield token, cls.style_for_token(token)
-
- def __len__(cls):
- return len(cls._styles)
-
-
-@add_metaclass(StyleMeta)
-class Style(object):
-
- #: overall background color (``None`` means transparent)
- background_color = '#ffffff'
-
- #: highlight background color
- highlight_color = '#ffffcc'
-
- #: Style definitions for individual token types.
- styles = {}
+ }
+
+ def list_styles(cls):
+ return list(cls)
+
+ def styles_token(cls, ttype):
+ return ttype in cls._styles
+
+ def __iter__(cls):
+ for token in cls._styles:
+ yield token, cls.style_for_token(token)
+
+ def __len__(cls):
+ return len(cls._styles)
+
+
+@add_metaclass(StyleMeta)
+class Style(object):
+
+ #: overall background color (``None`` means transparent)
+ background_color = '#ffffff'
+
+ #: highlight background color
+ highlight_color = '#ffffcc'
+
+ #: Style definitions for individual token types.
+ styles = {}
diff --git a/contrib/python/Pygments/py2/pygments/styles/__init__.py b/contrib/python/Pygments/py2/pygments/styles/__init__.py
index c0614718a2..dbdd9486b0 100644
--- a/contrib/python/Pygments/py2/pygments/styles/__init__.py
+++ b/contrib/python/Pygments/py2/pygments/styles/__init__.py
@@ -1,46 +1,46 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles
- ~~~~~~~~~~~~~~~
-
- Contains built-in styles.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles
+ ~~~~~~~~~~~~~~~
+
+ Contains built-in styles.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.plugin import find_plugin_styles
-from pygments.util import ClassNotFound
-
-
-#: Maps style names to 'submodule::classname'.
-STYLE_MAP = {
- 'default': 'default::DefaultStyle',
- 'emacs': 'emacs::EmacsStyle',
- 'friendly': 'friendly::FriendlyStyle',
- 'colorful': 'colorful::ColorfulStyle',
- 'autumn': 'autumn::AutumnStyle',
- 'murphy': 'murphy::MurphyStyle',
- 'manni': 'manni::ManniStyle',
- 'monokai': 'monokai::MonokaiStyle',
- 'perldoc': 'perldoc::PerldocStyle',
- 'pastie': 'pastie::PastieStyle',
- 'borland': 'borland::BorlandStyle',
- 'trac': 'trac::TracStyle',
- 'native': 'native::NativeStyle',
- 'fruity': 'fruity::FruityStyle',
- 'bw': 'bw::BlackWhiteStyle',
- 'vim': 'vim::VimStyle',
- 'vs': 'vs::VisualStudioStyle',
- 'tango': 'tango::TangoStyle',
- 'rrt': 'rrt::RrtStyle',
- 'xcode': 'xcode::XcodeStyle',
- 'igor': 'igor::IgorStyle',
- 'paraiso-light': 'paraiso_light::ParaisoLightStyle',
- 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle',
- 'lovelace': 'lovelace::LovelaceStyle',
- 'algol': 'algol::AlgolStyle',
- 'algol_nu': 'algol_nu::Algol_NuStyle',
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.plugin import find_plugin_styles
+from pygments.util import ClassNotFound
+
+
+#: Maps style names to 'submodule::classname'.
+STYLE_MAP = {
+ 'default': 'default::DefaultStyle',
+ 'emacs': 'emacs::EmacsStyle',
+ 'friendly': 'friendly::FriendlyStyle',
+ 'colorful': 'colorful::ColorfulStyle',
+ 'autumn': 'autumn::AutumnStyle',
+ 'murphy': 'murphy::MurphyStyle',
+ 'manni': 'manni::ManniStyle',
+ 'monokai': 'monokai::MonokaiStyle',
+ 'perldoc': 'perldoc::PerldocStyle',
+ 'pastie': 'pastie::PastieStyle',
+ 'borland': 'borland::BorlandStyle',
+ 'trac': 'trac::TracStyle',
+ 'native': 'native::NativeStyle',
+ 'fruity': 'fruity::FruityStyle',
+ 'bw': 'bw::BlackWhiteStyle',
+ 'vim': 'vim::VimStyle',
+ 'vs': 'vs::VisualStudioStyle',
+ 'tango': 'tango::TangoStyle',
+ 'rrt': 'rrt::RrtStyle',
+ 'xcode': 'xcode::XcodeStyle',
+ 'igor': 'igor::IgorStyle',
+ 'paraiso-light': 'paraiso_light::ParaisoLightStyle',
+ 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle',
+ 'lovelace': 'lovelace::LovelaceStyle',
+ 'algol': 'algol::AlgolStyle',
+ 'algol_nu': 'algol_nu::Algol_NuStyle',
'arduino': 'arduino::ArduinoStyle',
'rainbow_dash': 'rainbow_dash::RainbowDashStyle',
'abap': 'abap::AbapStyle',
@@ -51,37 +51,37 @@ STYLE_MAP = {
'stata-light': 'stata_light::StataLightStyle',
'stata-dark': 'stata_dark::StataDarkStyle',
'inkpot': 'inkpot::InkPotStyle',
-}
-
-
-def get_style_by_name(name):
- if name in STYLE_MAP:
- mod, cls = STYLE_MAP[name].split('::')
- builtin = "yes"
- else:
- for found_name, style in find_plugin_styles():
- if name == found_name:
- return style
- # perhaps it got dropped into our styles package
- builtin = ""
- mod = name
- cls = name.title() + "Style"
-
- try:
- mod = __import__('pygments.styles.' + mod, None, None, [cls])
- except ImportError:
- raise ClassNotFound("Could not find style module %r" % mod +
- (builtin and ", though it should be builtin") + ".")
- try:
- return getattr(mod, cls)
- except AttributeError:
- raise ClassNotFound("Could not find style class %r in style module." % cls)
-
-
-def get_all_styles():
- """Return an generator for all styles by name,
- both builtin and plugin."""
- for name in STYLE_MAP:
- yield name
- for name, _ in find_plugin_styles():
- yield name
+}
+
+
+def get_style_by_name(name):
+ if name in STYLE_MAP:
+ mod, cls = STYLE_MAP[name].split('::')
+ builtin = "yes"
+ else:
+ for found_name, style in find_plugin_styles():
+ if name == found_name:
+ return style
+ # perhaps it got dropped into our styles package
+ builtin = ""
+ mod = name
+ cls = name.title() + "Style"
+
+ try:
+ mod = __import__('pygments.styles.' + mod, None, None, [cls])
+ except ImportError:
+ raise ClassNotFound("Could not find style module %r" % mod +
+ (builtin and ", though it should be builtin") + ".")
+ try:
+ return getattr(mod, cls)
+ except AttributeError:
+ raise ClassNotFound("Could not find style class %r in style module." % cls)
+
+
+def get_all_styles():
+ """Return an generator for all styles by name,
+ both builtin and plugin."""
+ for name in STYLE_MAP:
+ yield name
+ for name, _ in find_plugin_styles():
+ yield name
diff --git a/contrib/python/Pygments/py2/pygments/styles/algol.py b/contrib/python/Pygments/py2/pygments/styles/algol.py
index d4d91870d7..318a287d1d 100644
--- a/contrib/python/Pygments/py2/pygments/styles/algol.py
+++ b/contrib/python/Pygments/py2/pygments/styles/algol.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.algol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase underline boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords without underlining, refer to the `Algol_Nu` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.algol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase underline boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords without underlining, refer to the `Algol_Nu` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-class AlgolStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "underline bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class AlgolStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "underline bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/algol_nu.py b/contrib/python/Pygments/py2/pygments/styles/algol_nu.py
index 09d69452d9..f6709f4b76 100644
--- a/contrib/python/Pygments/py2/pygments/styles/algol_nu.py
+++ b/contrib/python/Pygments/py2/pygments/styles/algol_nu.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.algol_nu
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style without underlining of keywords.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords with underlining, refer to the `Algol` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.algol_nu
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style without underlining of keywords.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords with underlining, refer to the `Algol` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-class Algol_NuStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class Algol_NuStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/arduino.py b/contrib/python/Pygments/py2/pygments/styles/arduino.py
index 160ca0947d..b47a9cfbd8 100644
--- a/contrib/python/Pygments/py2/pygments/styles/arduino.py
+++ b/contrib/python/Pygments/py2/pygments/styles/arduino.py
@@ -1,98 +1,98 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.arduino
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Arduino® Syntax highlighting style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.arduino
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Arduino® Syntax highlighting style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
-
-
-class ArduinoStyle(Style):
+
+
+class ArduinoStyle(Style):
u"""
- The Arduino® language style. This style is designed to highlight the
- Arduino source code, so exepect the best results with it.
- """
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Whitespace: "", # class: 'w'
- Error: "#a61717", # class: 'err'
-
- Comment: "#95a5a6", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
+ The Arduino® language style. This style is designed to highlight the
+ Arduino source code, so exepect the best results with it.
+ """
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Whitespace: "", # class: 'w'
+ Error: "#a61717", # class: 'err'
+
+ Comment: "#95a5a6", # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
Comment.Preproc: "#728E00", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#728E00", # class: 'k'
- Keyword.Constant: "#00979D", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "", # class: 'kn'
- Keyword.Pseudo: "#00979D", # class: 'kp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: "#728E00", # class: 'k'
+ Keyword.Constant: "#00979D", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: "", # class: 'kn'
+ Keyword.Pseudo: "#00979D", # class: 'kp'
Keyword.Reserved: "#00979D", # class: 'kr'
- Keyword.Type: "#00979D", # class: 'kt'
-
+ Keyword.Type: "#00979D", # class: 'kt'
+
Operator: "#728E00", # class: 'o'
- Operator.Word: "", # class: 'ow'
-
- Name: "#434f54", # class: 'n'
- Name.Attribute: "", # class: 'na'
+ Operator.Word: "", # class: 'ow'
+
+ Name: "#434f54", # class: 'n'
+ Name.Attribute: "", # class: 'na'
Name.Builtin: "#728E00", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "", # class: 'nc'
- Name.Constant: "", # class: 'no'
- Name.Decorator: "", # class: 'nd'
- Name.Entity: "", # class: 'ni'
- Name.Exception: "", # class: 'ne'
- Name.Function: "#D35400", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn'
- Name.Other: "#728E00", # class: 'nx'
- Name.Tag: "", # class: 'nt'
- Name.Variable: "", # class: 'nv'
- Name.Variable.Class: "", # class: 'vc'
- Name.Variable.Global: "", # class: 'vg'
- Name.Variable.Instance: "", # class: 'vi'
-
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: "", # class: 'nc'
+ Name.Constant: "", # class: 'no'
+ Name.Decorator: "", # class: 'nd'
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: "", # class: 'ne'
+ Name.Function: "#D35400", # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: "", # class: 'nn'
+ Name.Other: "#728E00", # class: 'nx'
+ Name.Tag: "", # class: 'nt'
+ Name.Variable: "", # class: 'nv'
+ Name.Variable.Class: "", # class: 'vc'
+ Name.Variable.Global: "", # class: 'vg'
+ Name.Variable.Instance: "", # class: 'vi'
+
Number: "#8A7B52", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- String: "#7F8C8D", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd'
- String.Double: "", # class: 's2'
- String.Escape: "", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: "", # class: 'gd',
- Generic.Emph: "", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "", # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "", # class: 'gp'
- Generic.Strong: "", # class: 'gs'
- Generic.Subheading: "", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ String: "#7F8C8D", # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: "", # class: 'sc'
+ String.Doc: "", # class: 'sd'
+ String.Double: "", # class: 's2'
+ String.Escape: "", # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: "", # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: "", # class: 'gd',
+ Generic.Emph: "", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "", # class: 'gh'
+ Generic.Inserted: "", # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "", # class: 'gp'
+ Generic.Strong: "", # class: 'gs'
+ Generic.Subheading: "", # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/autumn.py b/contrib/python/Pygments/py2/pygments/styles/autumn.py
index d5ee045d43..6e5d7225a3 100644
--- a/contrib/python/Pygments/py2/pygments/styles/autumn.py
+++ b/contrib/python/Pygments/py2/pygments/styles/autumn.py
@@ -1,65 +1,65 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.autumn
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.autumn
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class AutumnStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #aaaaaa',
- Comment.Preproc: 'noitalic #4c8317',
- Comment.Special: 'italic #0000aa',
-
- Keyword: '#0000aa',
- Keyword.Type: '#00aaaa',
-
- Operator.Word: '#0000aa',
-
- Name.Builtin: '#00aaaa',
- Name.Function: '#00aa00',
- Name.Class: 'underline #00aa00',
- Name.Namespace: 'underline #00aaaa',
- Name.Variable: '#aa0000',
- Name.Constant: '#aa0000',
- Name.Entity: 'bold #800',
- Name.Attribute: '#1e90ff',
- Name.Tag: 'bold #1e90ff',
- Name.Decorator: '#888888',
-
- String: '#aa5500',
- String.Symbol: '#0000aa',
- String.Regex: '#009999',
-
- Number: '#009999',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: '#F00 bg:#FAA'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class AutumnStyle(Style):
+ """
+ A colorful style, inspired by the terminal highlighting style.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: '#bbbbbb',
+
+ Comment: 'italic #aaaaaa',
+ Comment.Preproc: 'noitalic #4c8317',
+ Comment.Special: 'italic #0000aa',
+
+ Keyword: '#0000aa',
+ Keyword.Type: '#00aaaa',
+
+ Operator.Word: '#0000aa',
+
+ Name.Builtin: '#00aaaa',
+ Name.Function: '#00aa00',
+ Name.Class: 'underline #00aa00',
+ Name.Namespace: 'underline #00aaaa',
+ Name.Variable: '#aa0000',
+ Name.Constant: '#aa0000',
+ Name.Entity: 'bold #800',
+ Name.Attribute: '#1e90ff',
+ Name.Tag: 'bold #1e90ff',
+ Name.Decorator: '#888888',
+
+ String: '#aa5500',
+ String.Symbol: '#0000aa',
+ String.Regex: '#009999',
+
+ Number: '#009999',
+
+ Generic.Heading: 'bold #000080',
+ Generic.Subheading: 'bold #800080',
+ Generic.Deleted: '#aa0000',
+ Generic.Inserted: '#00aa00',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: '#F00 bg:#FAA'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/borland.py b/contrib/python/Pygments/py2/pygments/styles/borland.py
index e8ec1229fb..9187b35f13 100644
--- a/contrib/python/Pygments/py2/pygments/styles/borland.py
+++ b/contrib/python/Pygments/py2/pygments/styles/borland.py
@@ -1,51 +1,51 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.borland
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the Borland IDEs.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.borland
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the Borland IDEs.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class BorlandStyle(Style):
- """
- Style similar to the style used in the borland IDEs.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #008800',
- Comment.Preproc: 'noitalic #008080',
- Comment.Special: 'noitalic bold',
-
- String: '#0000FF',
- String.Char: '#800080',
- Number: '#0000FF',
- Keyword: 'bold #000080',
- Operator.Word: 'bold',
- Name.Tag: 'bold #000080',
- Name.Attribute: '#FF0000',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class BorlandStyle(Style):
+ """
+ Style similar to the style used in the borland IDEs.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+
+ Comment: 'italic #008800',
+ Comment.Preproc: 'noitalic #008080',
+ Comment.Special: 'noitalic bold',
+
+ String: '#0000FF',
+ String.Char: '#800080',
+ Number: '#0000FF',
+ Keyword: 'bold #000080',
+ Operator.Word: 'bold',
+ Name.Tag: 'bold #000080',
+ Name.Attribute: '#FF0000',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/bw.py b/contrib/python/Pygments/py2/pygments/styles/bw.py
index 54a653b6cb..a6a2946d5f 100644
--- a/contrib/python/Pygments/py2/pygments/styles/bw.py
+++ b/contrib/python/Pygments/py2/pygments/styles/bw.py
@@ -1,49 +1,49 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.bw
- ~~~~~~~~~~~~~~~~~~
-
- Simple black/white only style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.bw
+ ~~~~~~~~~~~~~~~~~~
+
+ Simple black/white only style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-class BlackWhiteStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic",
- Comment.Preproc: "noitalic",
-
- Keyword: "bold",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold",
-
- Operator.Word: "bold",
-
- Name.Class: "bold",
- Name.Namespace: "bold",
- Name.Exception: "bold",
- Name.Entity: "bold",
- Name.Tag: "bold",
-
- String: "italic",
- String.Interpol: "bold",
- String.Escape: "bold",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Operator, Generic
+
+
+class BlackWhiteStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic",
+ Comment.Preproc: "noitalic",
+
+ Keyword: "bold",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold",
+
+ Operator.Word: "bold",
+
+ Name.Class: "bold",
+ Name.Namespace: "bold",
+ Name.Exception: "bold",
+ Name.Entity: "bold",
+ Name.Tag: "bold",
+
+ String: "italic",
+ String.Interpol: "bold",
+ String.Escape: "bold",
+
+ Generic.Heading: "bold",
+ Generic.Subheading: "bold",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/colorful.py b/contrib/python/Pygments/py2/pygments/styles/colorful.py
index 6aa493c9fe..dec0af039c 100644
--- a/contrib/python/Pygments/py2/pygments/styles/colorful.py
+++ b/contrib/python/Pygments/py2/pygments/styles/colorful.py
@@ -1,81 +1,81 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.colorful
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by CodeRay.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.colorful
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by CodeRay.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class ColorfulStyle(Style):
- """
- A colorful style, inspired by CodeRay.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
-
- Comment: "#888",
- Comment.Preproc: "#579",
- Comment.Special: "bold #cc0000",
-
- Keyword: "bold #080",
- Keyword.Pseudo: "#038",
- Keyword.Type: "#339",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#007020",
- Name.Function: "bold #06B",
- Name.Class: "bold #B06",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#963",
- Name.Variable.Instance: "#33B",
- Name.Variable.Class: "#369",
- Name.Variable.Global: "bold #d70",
- Name.Constant: "bold #036",
- Name.Label: "bold #970",
- Name.Entity: "bold #800",
- Name.Attribute: "#00C",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#fff0f0",
- String.Char: "#04D bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#fff0ff #000",
- String.Symbol: "#A60 bg:",
- String.Other: "#D20",
-
- Number: "bold #60E",
- Number.Integer: "bold #00D",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class ColorfulStyle(Style):
+ """
+ A colorful style, inspired by CodeRay.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+
+ Comment: "#888",
+ Comment.Preproc: "#579",
+ Comment.Special: "bold #cc0000",
+
+ Keyword: "bold #080",
+ Keyword.Pseudo: "#038",
+ Keyword.Type: "#339",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#007020",
+ Name.Function: "bold #06B",
+ Name.Class: "bold #B06",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#963",
+ Name.Variable.Instance: "#33B",
+ Name.Variable.Class: "#369",
+ Name.Variable.Global: "bold #d70",
+ Name.Constant: "bold #036",
+ Name.Label: "bold #970",
+ Name.Entity: "bold #800",
+ Name.Attribute: "#00C",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#fff0f0",
+ String.Char: "#04D bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#fff0ff #000",
+ String.Symbol: "#A60 bg:",
+ String.Other: "#D20",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #00D",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/default.py b/contrib/python/Pygments/py2/pygments/styles/default.py
index c93d29fcf3..91394d808e 100644
--- a/contrib/python/Pygments/py2/pygments/styles/default.py
+++ b/contrib/python/Pygments/py2/pygments/styles/default.py
@@ -1,73 +1,73 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.default
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- The default highlighting style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.default
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ The default highlighting style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class DefaultStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #408080",
- Comment.Preproc: "noitalic #BC7A00",
-
- #Keyword: "bold #AA22FF",
- Keyword: "bold #008000",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #B00040",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#008000",
- Name.Function: "#0000FF",
- Name.Class: "bold #0000FF",
- Name.Namespace: "bold #0000FF",
- Name.Exception: "bold #D2413A",
- Name.Variable: "#19177C",
- Name.Constant: "#880000",
- Name.Label: "#A0A000",
- Name.Entity: "bold #999999",
- Name.Attribute: "#7D9029",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BA2121",
- String.Doc: "italic",
- String.Interpol: "bold #BB6688",
- String.Escape: "bold #BB6622",
- String.Regex: "#BB6688",
- #String.Symbol: "#B8860B",
- String.Symbol: "#19177C",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class DefaultStyle(Style):
+ """
+ The default style (inspired by Emacs 22).
+ """
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "italic #408080",
+ Comment.Preproc: "noitalic #BC7A00",
+
+ #Keyword: "bold #AA22FF",
+ Keyword: "bold #008000",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold #B00040",
+
+ Operator: "#666666",
+ Operator.Word: "bold #AA22FF",
+
+ Name.Builtin: "#008000",
+ Name.Function: "#0000FF",
+ Name.Class: "bold #0000FF",
+ Name.Namespace: "bold #0000FF",
+ Name.Exception: "bold #D2413A",
+ Name.Variable: "#19177C",
+ Name.Constant: "#880000",
+ Name.Label: "#A0A000",
+ Name.Entity: "bold #999999",
+ Name.Attribute: "#7D9029",
+ Name.Tag: "bold #008000",
+ Name.Decorator: "#AA22FF",
+
+ String: "#BA2121",
+ String.Doc: "italic",
+ String.Interpol: "bold #BB6688",
+ String.Escape: "bold #BB6622",
+ String.Regex: "#BB6688",
+ #String.Symbol: "#B8860B",
+ String.Symbol: "#19177C",
+ String.Other: "#008000",
+ Number: "#666666",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/emacs.py b/contrib/python/Pygments/py2/pygments/styles/emacs.py
index 8408b09a5e..b954a99499 100644
--- a/contrib/python/Pygments/py2/pygments/styles/emacs.py
+++ b/contrib/python/Pygments/py2/pygments/styles/emacs.py
@@ -1,72 +1,72 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.emacs
- ~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by Emacs.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.emacs
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by Emacs.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class EmacsStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #008800",
- Comment.Preproc: "noitalic",
- Comment.Special: "noitalic bold",
-
- Keyword: "bold #AA22FF",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "bold #00BB00",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#AA22FF",
- Name.Function: "#00A000",
- Name.Class: "#0000FF",
- Name.Namespace: "bold #0000FF",
- Name.Exception: "bold #D2413A",
- Name.Variable: "#B8860B",
- Name.Constant: "#880000",
- Name.Label: "#A0A000",
- Name.Entity: "bold #999999",
- Name.Attribute: "#BB4444",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BB4444",
- String.Doc: "italic",
- String.Interpol: "bold #BB6688",
- String.Escape: "bold #BB6622",
- String.Regex: "#BB6688",
- String.Symbol: "#B8860B",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class EmacsStyle(Style):
+ """
+ The default style (inspired by Emacs 22).
+ """
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "italic #008800",
+ Comment.Preproc: "noitalic",
+ Comment.Special: "noitalic bold",
+
+ Keyword: "bold #AA22FF",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "bold #00BB00",
+
+ Operator: "#666666",
+ Operator.Word: "bold #AA22FF",
+
+ Name.Builtin: "#AA22FF",
+ Name.Function: "#00A000",
+ Name.Class: "#0000FF",
+ Name.Namespace: "bold #0000FF",
+ Name.Exception: "bold #D2413A",
+ Name.Variable: "#B8860B",
+ Name.Constant: "#880000",
+ Name.Label: "#A0A000",
+ Name.Entity: "bold #999999",
+ Name.Attribute: "#BB4444",
+ Name.Tag: "bold #008000",
+ Name.Decorator: "#AA22FF",
+
+ String: "#BB4444",
+ String.Doc: "italic",
+ String.Interpol: "bold #BB6688",
+ String.Escape: "bold #BB6622",
+ String.Regex: "#BB6688",
+ String.Symbol: "#B8860B",
+ String.Other: "#008000",
+ Number: "#666666",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/friendly.py b/contrib/python/Pygments/py2/pygments/styles/friendly.py
index 4184a8e3c4..c8083a8d1d 100644
--- a/contrib/python/Pygments/py2/pygments/styles/friendly.py
+++ b/contrib/python/Pygments/py2/pygments/styles/friendly.py
@@ -1,72 +1,72 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.friendly
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A modern style based on the VIM pyte theme.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.friendly
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A modern style based on the VIM pyte theme.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class FriendlyStyle(Style):
- """
- A modern style based on the VIM pyte theme.
- """
-
- background_color = "#f0f0f0"
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #60a0b0",
- Comment.Preproc: "noitalic #007020",
- Comment.Special: "noitalic bg:#fff0f0",
-
- Keyword: "bold #007020",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #902000",
-
- Operator: "#666666",
- Operator.Word: "bold #007020",
-
- Name.Builtin: "#007020",
- Name.Function: "#06287e",
- Name.Class: "bold #0e84b5",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "#007020",
- Name.Variable: "#bb60d5",
- Name.Constant: "#60add5",
- Name.Label: "bold #002070",
- Name.Entity: "bold #d55537",
- Name.Attribute: "#4070a0",
- Name.Tag: "bold #062873",
- Name.Decorator: "bold #555555",
-
- String: "#4070a0",
- String.Doc: "italic",
- String.Interpol: "italic #70a0d0",
- String.Escape: "bold #4070a0",
- String.Regex: "#235388",
- String.Symbol: "#517918",
- String.Other: "#c65d09",
- Number: "#40a070",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class FriendlyStyle(Style):
+ """
+ A modern style based on the VIM pyte theme.
+ """
+
+ background_color = "#f0f0f0"
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "italic #60a0b0",
+ Comment.Preproc: "noitalic #007020",
+ Comment.Special: "noitalic bg:#fff0f0",
+
+ Keyword: "bold #007020",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold #902000",
+
+ Operator: "#666666",
+ Operator.Word: "bold #007020",
+
+ Name.Builtin: "#007020",
+ Name.Function: "#06287e",
+ Name.Class: "bold #0e84b5",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "#007020",
+ Name.Variable: "#bb60d5",
+ Name.Constant: "#60add5",
+ Name.Label: "bold #002070",
+ Name.Entity: "bold #d55537",
+ Name.Attribute: "#4070a0",
+ Name.Tag: "bold #062873",
+ Name.Decorator: "bold #555555",
+
+ String: "#4070a0",
+ String.Doc: "italic",
+ String.Interpol: "italic #70a0d0",
+ String.Escape: "bold #4070a0",
+ String.Regex: "#235388",
+ String.Symbol: "#517918",
+ String.Other: "#c65d09",
+ Number: "#40a070",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/fruity.py b/contrib/python/Pygments/py2/pygments/styles/fruity.py
index 7f5c0e3f53..d4647d32cc 100644
--- a/contrib/python/Pygments/py2/pygments/styles/fruity.py
+++ b/contrib/python/Pygments/py2/pygments/styles/fruity.py
@@ -1,42 +1,42 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.fruity
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "fruity" vim theme.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.fruity
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of my "fruity" vim theme.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Comment, Name, Keyword, \
- Generic, Number, String, Whitespace
-
-class FruityStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
-
- background_color = '#111111'
- highlight_color = '#333333'
-
- styles = {
- Whitespace: '#888888',
- Token: '#ffffff',
- Generic.Output: '#444444 bg:#222222',
- Keyword: '#fb660a bold',
- Keyword.Pseudo: 'nobold',
- Number: '#0086f7 bold',
- Name.Tag: '#fb660a bold',
- Name.Variable: '#fb660a',
- Comment: '#008800 bg:#0f140f italic',
- Name.Attribute: '#ff0086 bold',
- String: '#0086d2',
- Name.Function: '#ff0086 bold',
- Generic.Heading: '#ffffff bold',
- Keyword.Type: '#cdcaa9 bold',
- Generic.Subheading: '#ffffff bold',
- Name.Constant: '#0086d2',
- Comment.Preproc: '#ff0007 bold'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Token, Comment, Name, Keyword, \
+ Generic, Number, String, Whitespace
+
+class FruityStyle(Style):
+ """
+ Pygments version of the "native" vim theme.
+ """
+
+ background_color = '#111111'
+ highlight_color = '#333333'
+
+ styles = {
+ Whitespace: '#888888',
+ Token: '#ffffff',
+ Generic.Output: '#444444 bg:#222222',
+ Keyword: '#fb660a bold',
+ Keyword.Pseudo: 'nobold',
+ Number: '#0086f7 bold',
+ Name.Tag: '#fb660a bold',
+ Name.Variable: '#fb660a',
+ Comment: '#008800 bg:#0f140f italic',
+ Name.Attribute: '#ff0086 bold',
+ String: '#0086d2',
+ Name.Function: '#ff0086 bold',
+ Generic.Heading: '#ffffff bold',
+ Keyword.Type: '#cdcaa9 bold',
+ Generic.Subheading: '#ffffff bold',
+ Name.Constant: '#0086d2',
+ Comment.Preproc: '#ff0007 bold'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/igor.py b/contrib/python/Pygments/py2/pygments/styles/igor.py
index 70ee38bceb..de283a11b6 100644
--- a/contrib/python/Pygments/py2/pygments/styles/igor.py
+++ b/contrib/python/Pygments/py2/pygments/styles/igor.py
@@ -1,29 +1,29 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Igor Pro default style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Igor Pro default style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String
-
-
-class IgorStyle(Style):
- """
- Pygments version of the official colors for Igor Pro procedures.
- """
- default_style = ""
-
- styles = {
- Comment: 'italic #FF0000',
- Keyword: '#0000FF',
- Name.Function: '#C34E00',
- Name.Decorator: '#CC00A3',
- Name.Class: '#007575',
- String: '#009C00'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String
+
+
+class IgorStyle(Style):
+ """
+ Pygments version of the official colors for Igor Pro procedures.
+ """
+ default_style = ""
+
+ styles = {
+ Comment: 'italic #FF0000',
+ Keyword: '#0000FF',
+ Name.Function: '#C34E00',
+ Name.Decorator: '#CC00A3',
+ Name.Class: '#007575',
+ String: '#009C00'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/lovelace.py b/contrib/python/Pygments/py2/pygments/styles/lovelace.py
index 65b641ec24..c8c404c37b 100644
--- a/contrib/python/Pygments/py2/pygments/styles/lovelace.py
+++ b/contrib/python/Pygments/py2/pygments/styles/lovelace.py
@@ -1,97 +1,97 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.lovelace
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lovelace by Miikka Salminen
-
- Pygments style by Miikka Salminen (https://github.com/miikkas)
- A desaturated, somewhat subdued style created for the Lovelace interactive
- learning environment.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.lovelace
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lovelace by Miikka Salminen
+
+ Pygments style by Miikka Salminen (https://github.com/miikkas)
+ A desaturated, somewhat subdued style created for the Lovelace interactive
+ learning environment.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Punctuation, Generic, Whitespace
-
-
-class LovelaceStyle(Style):
- """
- The style used in Lovelace interactive learning environment. Tries to avoid
- the "angry fruit salad" effect with desaturated and dim colours.
- """
- _KW_BLUE = '#2838b0'
- _NAME_GREEN = '#388038'
- _DOC_ORANGE = '#b85820'
- _OW_PURPLE = '#a848a8'
- _FUN_BROWN = '#785840'
- _STR_RED = '#b83838'
- _CLS_CYAN = '#287088'
- _ESCAPE_LIME = '#709030'
- _LABEL_CYAN = '#289870'
- _EXCEPT_YELLOW = '#908828'
-
- default_style = '#222222'
-
- styles = {
- Whitespace: '#a89028',
- Comment: 'italic #888888',
- Comment.Hashbang: _CLS_CYAN,
- Comment.Multiline: '#888888',
- Comment.Preproc: 'noitalic '+_LABEL_CYAN,
-
- Keyword: _KW_BLUE,
- Keyword.Constant: 'italic #444444',
- Keyword.Declaration: 'italic',
- Keyword.Type: 'italic',
-
- Operator: '#666666',
- Operator.Word: _OW_PURPLE,
-
- Punctuation: '#888888',
-
- Name.Attribute: _NAME_GREEN,
- Name.Builtin: _NAME_GREEN,
- Name.Builtin.Pseudo: 'italic',
- Name.Class: _CLS_CYAN,
- Name.Constant: _DOC_ORANGE,
- Name.Decorator: _CLS_CYAN,
- Name.Entity: _ESCAPE_LIME,
- Name.Exception: _EXCEPT_YELLOW,
- Name.Function: _FUN_BROWN,
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Punctuation, Generic, Whitespace
+
+
+class LovelaceStyle(Style):
+ """
+ The style used in Lovelace interactive learning environment. Tries to avoid
+ the "angry fruit salad" effect with desaturated and dim colours.
+ """
+ _KW_BLUE = '#2838b0'
+ _NAME_GREEN = '#388038'
+ _DOC_ORANGE = '#b85820'
+ _OW_PURPLE = '#a848a8'
+ _FUN_BROWN = '#785840'
+ _STR_RED = '#b83838'
+ _CLS_CYAN = '#287088'
+ _ESCAPE_LIME = '#709030'
+ _LABEL_CYAN = '#289870'
+ _EXCEPT_YELLOW = '#908828'
+
+ default_style = '#222222'
+
+ styles = {
+ Whitespace: '#a89028',
+ Comment: 'italic #888888',
+ Comment.Hashbang: _CLS_CYAN,
+ Comment.Multiline: '#888888',
+ Comment.Preproc: 'noitalic '+_LABEL_CYAN,
+
+ Keyword: _KW_BLUE,
+ Keyword.Constant: 'italic #444444',
+ Keyword.Declaration: 'italic',
+ Keyword.Type: 'italic',
+
+ Operator: '#666666',
+ Operator.Word: _OW_PURPLE,
+
+ Punctuation: '#888888',
+
+ Name.Attribute: _NAME_GREEN,
+ Name.Builtin: _NAME_GREEN,
+ Name.Builtin.Pseudo: 'italic',
+ Name.Class: _CLS_CYAN,
+ Name.Constant: _DOC_ORANGE,
+ Name.Decorator: _CLS_CYAN,
+ Name.Entity: _ESCAPE_LIME,
+ Name.Exception: _EXCEPT_YELLOW,
+ Name.Function: _FUN_BROWN,
Name.Function.Magic: _DOC_ORANGE,
- Name.Label: _LABEL_CYAN,
- Name.Namespace: _LABEL_CYAN,
- Name.Tag: _KW_BLUE,
- Name.Variable: '#b04040',
- Name.Variable.Global:_EXCEPT_YELLOW,
+ Name.Label: _LABEL_CYAN,
+ Name.Namespace: _LABEL_CYAN,
+ Name.Tag: _KW_BLUE,
+ Name.Variable: '#b04040',
+ Name.Variable.Global:_EXCEPT_YELLOW,
Name.Variable.Magic: _DOC_ORANGE,
-
- String: _STR_RED,
+
+ String: _STR_RED,
String.Affix: '#444444',
- String.Char: _OW_PURPLE,
+ String.Char: _OW_PURPLE,
String.Delimiter: _DOC_ORANGE,
- String.Doc: 'italic '+_DOC_ORANGE,
- String.Escape: _ESCAPE_LIME,
- String.Interpol: 'underline',
- String.Other: _OW_PURPLE,
- String.Regex: _OW_PURPLE,
-
- Number: '#444444',
-
- Generic.Deleted: '#c02828',
- Generic.Emph: 'italic',
- Generic.Error: '#c02828',
- Generic.Heading: '#666666',
- Generic.Subheading: '#444444',
- Generic.Inserted: _NAME_GREEN,
- Generic.Output: '#666666',
- Generic.Prompt: '#444444',
- Generic.Strong: 'bold',
- Generic.Traceback: _KW_BLUE,
-
- Error: 'bg:'+_OW_PURPLE,
- }
+ String.Doc: 'italic '+_DOC_ORANGE,
+ String.Escape: _ESCAPE_LIME,
+ String.Interpol: 'underline',
+ String.Other: _OW_PURPLE,
+ String.Regex: _OW_PURPLE,
+
+ Number: '#444444',
+
+ Generic.Deleted: '#c02828',
+ Generic.Emph: 'italic',
+ Generic.Error: '#c02828',
+ Generic.Heading: '#666666',
+ Generic.Subheading: '#444444',
+ Generic.Inserted: _NAME_GREEN,
+ Generic.Output: '#666666',
+ Generic.Prompt: '#444444',
+ Generic.Strong: 'bold',
+ Generic.Traceback: _KW_BLUE,
+
+ Error: 'bg:'+_OW_PURPLE,
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/manni.py b/contrib/python/Pygments/py2/pygments/styles/manni.py
index bd14f6a174..1f968f9bba 100644
--- a/contrib/python/Pygments/py2/pygments/styles/manni.py
+++ b/contrib/python/Pygments/py2/pygments/styles/manni.py
@@ -1,75 +1,75 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.manni
- ~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
- This is a port of the style used in the `php port`_ of pygments
- by Manni. The style is called 'default' there.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.manni
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
+ This is a port of the style used in the `php port`_ of pygments
+ by Manni. The style is called 'default' there.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class ManniStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
-
- background_color = '#f0f3f3'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #0099FF',
- Comment.Preproc: 'noitalic #009999',
- Comment.Special: 'bold',
-
- Keyword: 'bold #006699',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#007788',
-
- Operator: '#555555',
- Operator.Word: 'bold #000000',
-
- Name.Builtin: '#336666',
- Name.Function: '#CC00FF',
- Name.Class: 'bold #00AA88',
- Name.Namespace: 'bold #00CCFF',
- Name.Exception: 'bold #CC0000',
- Name.Variable: '#003333',
- Name.Constant: '#336600',
- Name.Label: '#9999FF',
- Name.Entity: 'bold #999999',
- Name.Attribute: '#330099',
- Name.Tag: 'bold #330099',
- Name.Decorator: '#9999FF',
-
- String: '#CC3300',
- String.Doc: 'italic',
- String.Interpol: '#AA0000',
- String.Escape: 'bold #CC3300',
- String.Regex: '#33AAAA',
- String.Symbol: '#FFCC33',
- String.Other: '#CC3300',
-
- Number: '#FF6600',
-
- Generic.Heading: 'bold #003300',
- Generic.Subheading: 'bold #003300',
- Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
- Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
- Generic.Error: '#FF0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: 'bold #000099',
- Generic.Output: '#AAAAAA',
- Generic.Traceback: '#99CC66',
-
- Error: 'bg:#FFAAAA #AA0000'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class ManniStyle(Style):
+ """
+ A colorful style, inspired by the terminal highlighting style.
+ """
+
+ background_color = '#f0f3f3'
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #0099FF',
+ Comment.Preproc: 'noitalic #009999',
+ Comment.Special: 'bold',
+
+ Keyword: 'bold #006699',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#007788',
+
+ Operator: '#555555',
+ Operator.Word: 'bold #000000',
+
+ Name.Builtin: '#336666',
+ Name.Function: '#CC00FF',
+ Name.Class: 'bold #00AA88',
+ Name.Namespace: 'bold #00CCFF',
+ Name.Exception: 'bold #CC0000',
+ Name.Variable: '#003333',
+ Name.Constant: '#336600',
+ Name.Label: '#9999FF',
+ Name.Entity: 'bold #999999',
+ Name.Attribute: '#330099',
+ Name.Tag: 'bold #330099',
+ Name.Decorator: '#9999FF',
+
+ String: '#CC3300',
+ String.Doc: 'italic',
+ String.Interpol: '#AA0000',
+ String.Escape: 'bold #CC3300',
+ String.Regex: '#33AAAA',
+ String.Symbol: '#FFCC33',
+ String.Other: '#CC3300',
+
+ Number: '#FF6600',
+
+ Generic.Heading: 'bold #003300',
+ Generic.Subheading: 'bold #003300',
+ Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
+ Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
+ Generic.Error: '#FF0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: 'bold #000099',
+ Generic.Output: '#AAAAAA',
+ Generic.Traceback: '#99CC66',
+
+ Error: 'bg:#FFAAAA #AA0000'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/monokai.py b/contrib/python/Pygments/py2/pygments/styles/monokai.py
index c9db9f2218..cc44ef8704 100644
--- a/contrib/python/Pygments/py2/pygments/styles/monokai.py
+++ b/contrib/python/Pygments/py2/pygments/styles/monokai.py
@@ -1,107 +1,107 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.monokai
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Mimic the Monokai color scheme. Based on tango.py.
-
- http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.monokai
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Mimic the Monokai color scheme. Based on tango.py.
+
+ http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-class MonokaiStyle(Style):
- """
- This style mimics the Monokai color scheme.
- """
-
- background_color = "#272822"
- highlight_color = "#49483e"
-
- styles = {
- # No corresponding class for the following:
- Text: "#f8f8f2", # class: ''
- Whitespace: "", # class: 'w'
- Error: "#960050 bg:#1e0010", # class: 'err'
- Other: "", # class 'x'
-
- Comment: "#75715e", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#66d9ef", # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "#f92672", # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: "", # class: 'kt'
-
- Operator: "#f92672", # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: "#f8f8f2", # class: 'p'
-
- Name: "#f8f8f2", # class: 'n'
- Name.Attribute: "#a6e22e", # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "#a6e22e", # class: 'nc' - to be revised
- Name.Constant: "#66d9ef", # class: 'no' - to be revised
- Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: "#a6e22e", # class: 'ne'
- Name.Function: "#a6e22e", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn' - to be revised
- Name.Other: "#a6e22e", # class: 'nx'
- Name.Tag: "#f92672", # class: 'nt' - like a keyword
- Name.Variable: "", # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: "#ae81ff", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: "#ae81ff", # class: 'l'
- Literal.Date: "#e6db74", # class: 'ld'
-
- String: "#e6db74", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: "#ae81ff", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
-
- Generic: "", # class: 'g'
- Generic.Deleted: "#f92672", # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "#a6e22e", # class: 'gi'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+class MonokaiStyle(Style):
+ """
+ This style mimics the Monokai color scheme.
+ """
+
+ background_color = "#272822"
+ highlight_color = "#49483e"
+
+ styles = {
+ # No corresponding class for the following:
+ Text: "#f8f8f2", # class: ''
+ Whitespace: "", # class: 'w'
+ Error: "#960050 bg:#1e0010", # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: "#75715e", # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: "#66d9ef", # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: "#f92672", # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: "", # class: 'kt'
+
+ Operator: "#f92672", # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: "#f8f8f2", # class: 'p'
+
+ Name: "#f8f8f2", # class: 'n'
+ Name.Attribute: "#a6e22e", # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: "#a6e22e", # class: 'nc' - to be revised
+ Name.Constant: "#66d9ef", # class: 'no' - to be revised
+ Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: "#a6e22e", # class: 'ne'
+ Name.Function: "#a6e22e", # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: "", # class: 'nn' - to be revised
+ Name.Other: "#a6e22e", # class: 'nx'
+ Name.Tag: "#f92672", # class: 'nt' - like a keyword
+ Name.Variable: "", # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: "#ae81ff", # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: "#ae81ff", # class: 'l'
+ Literal.Date: "#e6db74", # class: 'ld'
+
+ String: "#e6db74", # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: "", # class: 'sc'
+ String.Doc: "", # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: "#ae81ff", # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: "", # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: "#f92672", # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "", # class: 'gh'
+ Generic.Inserted: "#a6e22e", # class: 'gi'
Generic.Output: "#66d9ef", # class: 'go'
Generic.Prompt: "bold #f92672", # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "#75715e", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "#75715e", # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/murphy.py b/contrib/python/Pygments/py2/pygments/styles/murphy.py
index e9b50d7a88..dccd3783ec 100644
--- a/contrib/python/Pygments/py2/pygments/styles/murphy.py
+++ b/contrib/python/Pygments/py2/pygments/styles/murphy.py
@@ -1,80 +1,80 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.murphy
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Murphy's style from CodeRay.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.murphy
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Murphy's style from CodeRay.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class MurphyStyle(Style):
- """
- Murphy's style from CodeRay.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "#666 italic",
- Comment.Preproc: "#579 noitalic",
- Comment.Special: "#c00 bold",
-
- Keyword: "bold #289",
- Keyword.Pseudo: "#08f",
- Keyword.Type: "#66f",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#072",
- Name.Function: "bold #5ed",
- Name.Class: "bold #e9e",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#036",
- Name.Variable.Instance: "#aaf",
- Name.Variable.Class: "#ccf",
- Name.Variable.Global: "#f84",
- Name.Constant: "bold #5ed",
- Name.Label: "bold #970",
- Name.Entity: "#800",
- Name.Attribute: "#007",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#e0e0ff",
- String.Char: "#88F bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#e0e0ff #000",
- String.Symbol: "#fc8 bg:",
- String.Other: "#f88",
-
- Number: "bold #60E",
- Number.Integer: "bold #66f",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class MurphyStyle(Style):
+ """
+ Murphy's style from CodeRay.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "#666 italic",
+ Comment.Preproc: "#579 noitalic",
+ Comment.Special: "#c00 bold",
+
+ Keyword: "bold #289",
+ Keyword.Pseudo: "#08f",
+ Keyword.Type: "#66f",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#072",
+ Name.Function: "bold #5ed",
+ Name.Class: "bold #e9e",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#036",
+ Name.Variable.Instance: "#aaf",
+ Name.Variable.Class: "#ccf",
+ Name.Variable.Global: "#f84",
+ Name.Constant: "bold #5ed",
+ Name.Label: "bold #970",
+ Name.Entity: "#800",
+ Name.Attribute: "#007",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#e0e0ff",
+ String.Char: "#88F bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#e0e0ff #000",
+ String.Symbol: "#fc8 bg:",
+ String.Other: "#f88",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #66f",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/native.py b/contrib/python/Pygments/py2/pygments/styles/native.py
index a41852ac86..d5f6a0668b 100644
--- a/contrib/python/Pygments/py2/pygments/styles/native.py
+++ b/contrib/python/Pygments/py2/pygments/styles/native.py
@@ -1,65 +1,65 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.native
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "native" vim theme.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.native
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of my "native" vim theme.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-
-
-class NativeStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
-
- background_color = '#202020'
- highlight_color = '#404040'
-
- styles = {
- Token: '#d0d0d0',
- Whitespace: '#666666',
-
- Comment: 'italic #999999',
- Comment.Preproc: 'noitalic bold #cd2828',
- Comment.Special: 'noitalic bold #e50808 bg:#520000',
-
- Keyword: 'bold #6ab825',
- Keyword.Pseudo: 'nobold',
- Operator.Word: 'bold #6ab825',
-
- String: '#ed9d13',
- String.Other: '#ffa500',
-
- Number: '#3677a9',
-
- Name.Builtin: '#24909d',
- Name.Variable: '#40ffff',
- Name.Constant: '#40ffff',
- Name.Class: 'underline #447fcf',
- Name.Function: '#447fcf',
- Name.Namespace: 'underline #447fcf',
- Name.Exception: '#bbbbbb',
- Name.Tag: 'bold #6ab825',
- Name.Attribute: '#bbbbbb',
- Name.Decorator: '#ffa500',
-
- Generic.Heading: 'bold #ffffff',
- Generic.Subheading: 'underline #ffffff',
- Generic.Deleted: '#d22323',
- Generic.Inserted: '#589819',
- Generic.Error: '#d22323',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#aaaaaa',
- Generic.Output: '#cccccc',
- Generic.Traceback: '#d22323',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+
+
+class NativeStyle(Style):
+ """
+ Pygments version of the "native" vim theme.
+ """
+
+ background_color = '#202020'
+ highlight_color = '#404040'
+
+ styles = {
+ Token: '#d0d0d0',
+ Whitespace: '#666666',
+
+ Comment: 'italic #999999',
+ Comment.Preproc: 'noitalic bold #cd2828',
+ Comment.Special: 'noitalic bold #e50808 bg:#520000',
+
+ Keyword: 'bold #6ab825',
+ Keyword.Pseudo: 'nobold',
+ Operator.Word: 'bold #6ab825',
+
+ String: '#ed9d13',
+ String.Other: '#ffa500',
+
+ Number: '#3677a9',
+
+ Name.Builtin: '#24909d',
+ Name.Variable: '#40ffff',
+ Name.Constant: '#40ffff',
+ Name.Class: 'underline #447fcf',
+ Name.Function: '#447fcf',
+ Name.Namespace: 'underline #447fcf',
+ Name.Exception: '#bbbbbb',
+ Name.Tag: 'bold #6ab825',
+ Name.Attribute: '#bbbbbb',
+ Name.Decorator: '#ffa500',
+
+ Generic.Heading: 'bold #ffffff',
+ Generic.Subheading: 'underline #ffffff',
+ Generic.Deleted: '#d22323',
+ Generic.Inserted: '#589819',
+ Generic.Error: '#d22323',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#aaaaaa',
+ Generic.Output: '#cccccc',
+ Generic.Traceback: '#d22323',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/paraiso_dark.py b/contrib/python/Pygments/py2/pygments/styles/paraiso_dark.py
index 50afc37ea2..08b5d0f9a0 100644
--- a/contrib/python/Pygments/py2/pygments/styles/paraiso_dark.py
+++ b/contrib/python/Pygments/py2/pygments/styles/paraiso_dark.py
@@ -1,125 +1,125 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.paraiso_dark
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Dark) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.paraiso_dark
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Dark) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-BACKGROUND = "#2f1e2e"
-CURRENT_LINE = "#41323f"
-SELECTION = "#4f424c"
-FOREGROUND = "#e7e9db"
-COMMENT = "#776e71"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoDarkStyle(Style):
-
- default_style = ''
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#2f1e2e"
+CURRENT_LINE = "#41323f"
+SELECTION = "#4f424c"
+FOREGROUND = "#e7e9db"
+COMMENT = "#776e71"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoDarkStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/paraiso_light.py b/contrib/python/Pygments/py2/pygments/styles/paraiso_light.py
index a5f357ba72..af153afb0b 100644
--- a/contrib/python/Pygments/py2/pygments/styles/paraiso_light.py
+++ b/contrib/python/Pygments/py2/pygments/styles/paraiso_light.py
@@ -1,125 +1,125 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.paraiso_light
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Light) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.paraiso_light
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Light) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-BACKGROUND = "#e7e9db"
-CURRENT_LINE = "#b9b6b0"
-SELECTION = "#a39e9b"
-FOREGROUND = "#2f1e2e"
-COMMENT = "#8d8687"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoLightStyle(Style):
-
- default_style = ''
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#e7e9db"
+CURRENT_LINE = "#b9b6b0"
+SELECTION = "#a39e9b"
+FOREGROUND = "#2f1e2e"
+COMMENT = "#8d8687"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoLightStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/pastie.py b/contrib/python/Pygments/py2/pygments/styles/pastie.py
index 6b022025ba..d19214a646 100644
--- a/contrib/python/Pygments/py2/pygments/styles/pastie.py
+++ b/contrib/python/Pygments/py2/pygments/styles/pastie.py
@@ -1,75 +1,75 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.pastie
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `pastie`_ default style.
-
- .. _pastie: http://pastie.caboo.se/
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.pastie
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `pastie`_ default style.
+
+ .. _pastie: http://pastie.caboo.se/
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class PastieStyle(Style):
- """
- Style similar to the pastie default style.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#888888',
- Comment.Preproc: 'bold #cc0000',
- Comment.Special: 'bg:#fff0f0 bold #cc0000',
-
- String: 'bg:#fff0f0 #dd2200',
- String.Regex: 'bg:#fff0ff #008800',
- String.Other: 'bg:#f0fff0 #22bb22',
- String.Symbol: '#aa6600',
- String.Interpol: '#3333bb',
- String.Escape: '#0044dd',
-
- Operator.Word: '#008800',
-
- Keyword: 'bold #008800',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#888888',
-
- Name.Class: 'bold #bb0066',
- Name.Exception: 'bold #bb0066',
- Name.Function: 'bold #0066bb',
- Name.Property: 'bold #336699',
- Name.Namespace: 'bold #bb0066',
- Name.Builtin: '#003388',
- Name.Variable: '#336699',
- Name.Variable.Class: '#336699',
- Name.Variable.Instance: '#3333bb',
- Name.Variable.Global: '#dd7700',
- Name.Constant: 'bold #003366',
- Name.Tag: 'bold #bb0066',
- Name.Attribute: '#336699',
- Name.Decorator: '#555555',
- Name.Label: 'italic #336699',
-
- Number: 'bold #0000DD',
-
- Generic.Heading: '#333',
- Generic.Subheading: '#666',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class PastieStyle(Style):
+ """
+ Style similar to the pastie default style.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: '#888888',
+ Comment.Preproc: 'bold #cc0000',
+ Comment.Special: 'bg:#fff0f0 bold #cc0000',
+
+ String: 'bg:#fff0f0 #dd2200',
+ String.Regex: 'bg:#fff0ff #008800',
+ String.Other: 'bg:#f0fff0 #22bb22',
+ String.Symbol: '#aa6600',
+ String.Interpol: '#3333bb',
+ String.Escape: '#0044dd',
+
+ Operator.Word: '#008800',
+
+ Keyword: 'bold #008800',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#888888',
+
+ Name.Class: 'bold #bb0066',
+ Name.Exception: 'bold #bb0066',
+ Name.Function: 'bold #0066bb',
+ Name.Property: 'bold #336699',
+ Name.Namespace: 'bold #bb0066',
+ Name.Builtin: '#003388',
+ Name.Variable: '#336699',
+ Name.Variable.Class: '#336699',
+ Name.Variable.Instance: '#3333bb',
+ Name.Variable.Global: '#dd7700',
+ Name.Constant: 'bold #003366',
+ Name.Tag: 'bold #bb0066',
+ Name.Attribute: '#336699',
+ Name.Decorator: '#555555',
+ Name.Label: 'italic #336699',
+
+ Number: 'bold #0000DD',
+
+ Generic.Heading: '#333',
+ Generic.Subheading: '#666',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/perldoc.py b/contrib/python/Pygments/py2/pygments/styles/perldoc.py
index a6c21e908a..9d526f532f 100644
--- a/contrib/python/Pygments/py2/pygments/styles/perldoc.py
+++ b/contrib/python/Pygments/py2/pygments/styles/perldoc.py
@@ -1,69 +1,69 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.perldoc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the `perldoc`_ code blocks.
-
- .. _perldoc: http://perldoc.perl.org/
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.perldoc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the `perldoc`_ code blocks.
+
+ .. _perldoc: http://perldoc.perl.org/
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class PerldocStyle(Style):
- """
- Style similar to the style used in the perldoc code blocks.
- """
-
- background_color = '#eeeedd'
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#228B22',
- Comment.Preproc: '#1e889b',
- Comment.Special: '#8B008B bold',
-
- String: '#CD5555',
- String.Heredoc: '#1c7e71 italic',
- String.Regex: '#B452CD',
- String.Other: '#cb6c20',
- String.Regex: '#1c7e71',
-
- Number: '#B452CD',
-
- Operator.Word: '#8B008B',
-
- Keyword: '#8B008B bold',
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class PerldocStyle(Style):
+ """
+ Style similar to the style used in the perldoc code blocks.
+ """
+
+ background_color = '#eeeedd'
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: '#228B22',
+ Comment.Preproc: '#1e889b',
+ Comment.Special: '#8B008B bold',
+
+ String: '#CD5555',
+ String.Heredoc: '#1c7e71 italic',
+ String.Regex: '#B452CD',
+ String.Other: '#cb6c20',
+ String.Regex: '#1c7e71',
+
+ Number: '#B452CD',
+
+ Operator.Word: '#8B008B',
+
+ Keyword: '#8B008B bold',
Keyword.Type: '#00688B',
-
- Name.Class: '#008b45 bold',
- Name.Exception: '#008b45 bold',
- Name.Function: '#008b45',
- Name.Namespace: '#008b45 underline',
- Name.Variable: '#00688B',
- Name.Constant: '#00688B',
- Name.Decorator: '#707a7c',
- Name.Tag: '#8B008B bold',
- Name.Attribute: '#658b00',
- Name.Builtin: '#658b00',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+
+ Name.Class: '#008b45 bold',
+ Name.Exception: '#008b45 bold',
+ Name.Function: '#008b45',
+ Name.Namespace: '#008b45 underline',
+ Name.Variable: '#00688B',
+ Name.Constant: '#00688B',
+ Name.Decorator: '#707a7c',
+ Name.Tag: '#8B008B bold',
+ Name.Attribute: '#658b00',
+ Name.Builtin: '#658b00',
+
+ Generic.Heading: 'bold #000080',
+ Generic.Subheading: 'bold #800080',
+ Generic.Deleted: '#aa0000',
+ Generic.Inserted: '#00aa00',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/rrt.py b/contrib/python/Pygments/py2/pygments/styles/rrt.py
index d61085e0eb..e700f410a3 100644
--- a/contrib/python/Pygments/py2/pygments/styles/rrt.py
+++ b/contrib/python/Pygments/py2/pygments/styles/rrt.py
@@ -1,33 +1,33 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.rrt
- ~~~~~~~~~~~~~~~~~~~
-
- pygments "rrt" theme, based on Zap and Emacs defaults.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.rrt
+ ~~~~~~~~~~~~~~~~~~~
+
+ pygments "rrt" theme, based on Zap and Emacs defaults.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Comment, Name, Keyword, String
-
-
-class RrtStyle(Style):
- """
- Minimalistic "rrt" theme, based on Zap and Emacs defaults.
- """
-
- background_color = '#000000'
- highlight_color = '#0000ff'
-
- styles = {
- Comment: '#00ff00',
- Name.Function: '#ffff00',
- Name.Variable: '#eedd82',
- Name.Constant: '#7fffd4',
- Keyword: '#ff0000',
- Comment.Preproc: '#e5e5e5',
- String: '#87ceeb',
- Keyword.Type: '#ee82ee',
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Comment, Name, Keyword, String
+
+
+class RrtStyle(Style):
+ """
+ Minimalistic "rrt" theme, based on Zap and Emacs defaults.
+ """
+
+ background_color = '#000000'
+ highlight_color = '#0000ff'
+
+ styles = {
+ Comment: '#00ff00',
+ Name.Function: '#ffff00',
+ Name.Variable: '#eedd82',
+ Name.Constant: '#7fffd4',
+ Keyword: '#ff0000',
+ Comment.Preproc: '#e5e5e5',
+ String: '#87ceeb',
+ Keyword.Type: '#ee82ee',
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/tango.py b/contrib/python/Pygments/py2/pygments/styles/tango.py
index 5205244cf1..4827a24dca 100644
--- a/contrib/python/Pygments/py2/pygments/styles/tango.py
+++ b/contrib/python/Pygments/py2/pygments/styles/tango.py
@@ -1,141 +1,141 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.tango
- ~~~~~~~~~~~~~~~~~~~~~
-
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
-
- http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
-
- Butter: #fce94f #edd400 #c4a000
- Orange: #fcaf3e #f57900 #ce5c00
- Chocolate: #e9b96e #c17d11 #8f5902
- Chameleon: #8ae234 #73d216 #4e9a06
- Sky Blue: #729fcf #3465a4 #204a87
- Plum: #ad7fa8 #75507b #5c35cc
- Scarlet Red:#ef2929 #cc0000 #a40000
- Aluminium: #eeeeec #d3d7cf #babdb6
- #888a85 #555753 #2e3436
-
- Not all of the above colors are used; other colors added:
- very light grey: #f8f8f8 (for background)
-
- This style can be used as a template as it includes all the known
- Token types, unlike most (if not all) of the styles included in the
- Pygments distribution.
-
- However, since Crunchy is intended to be used by beginners, we have strived
- to create a style that gloss over subtle distinctions between different
- categories.
-
- Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
- have been chosen to have the same style. Similarly, keywords (Keyword.*),
- and Operator.Word (and, or, in) have been assigned the same style.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.tango
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ The Crunchy default Style inspired from the color palette from
+ the Tango Icon Theme Guidelines.
+
+ http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
+
+ Butter: #fce94f #edd400 #c4a000
+ Orange: #fcaf3e #f57900 #ce5c00
+ Chocolate: #e9b96e #c17d11 #8f5902
+ Chameleon: #8ae234 #73d216 #4e9a06
+ Sky Blue: #729fcf #3465a4 #204a87
+ Plum: #ad7fa8 #75507b #5c35cc
+ Scarlet Red:#ef2929 #cc0000 #a40000
+ Aluminium: #eeeeec #d3d7cf #babdb6
+ #888a85 #555753 #2e3436
+
+ Not all of the above colors are used; other colors added:
+ very light grey: #f8f8f8 (for background)
+
+ This style can be used as a template as it includes all the known
+ Token types, unlike most (if not all) of the styles included in the
+ Pygments distribution.
+
+ However, since Crunchy is intended to be used by beginners, we have strived
+ to create a style that gloss over subtle distinctions between different
+ categories.
+
+ Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
+ have been chosen to have the same style. Similarly, keywords (Keyword.*),
+ and Operator.Word (and, or, in) have been assigned the same style.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-class TangoStyle(Style):
- """
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
- """
-
- # work in progress...
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- # No corresponding class for the following:
- #Text: "", # class: ''
- Whitespace: "underline #f8f8f8", # class: 'w'
- Error: "#a40000 border:#ef2929", # class: 'err'
- Other: "#000000", # class 'x'
-
- Comment: "italic #8f5902", # class: 'c'
- Comment.Multiline: "italic #8f5902", # class: 'cm'
- Comment.Preproc: "italic #8f5902", # class: 'cp'
- Comment.Single: "italic #8f5902", # class: 'c1'
- Comment.Special: "italic #8f5902", # class: 'cs'
-
- Keyword: "bold #204a87", # class: 'k'
- Keyword.Constant: "bold #204a87", # class: 'kc'
- Keyword.Declaration: "bold #204a87", # class: 'kd'
- Keyword.Namespace: "bold #204a87", # class: 'kn'
- Keyword.Pseudo: "bold #204a87", # class: 'kp'
- Keyword.Reserved: "bold #204a87", # class: 'kr'
- Keyword.Type: "bold #204a87", # class: 'kt'
-
- Operator: "bold #ce5c00", # class: 'o'
- Operator.Word: "bold #204a87", # class: 'ow' - like keywords
-
- Punctuation: "bold #000000", # class: 'p'
-
- # because special names such as Name.Class, Name.Function, etc.
- # are not recognized as such later in the parsing, we choose them
- # to look the same as ordinary variables.
- Name: "#000000", # class: 'n'
- Name.Attribute: "#c4a000", # class: 'na' - to be revised
- Name.Builtin: "#204a87", # class: 'nb'
- Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
- Name.Class: "#000000", # class: 'nc' - to be revised
- Name.Constant: "#000000", # class: 'no' - to be revised
- Name.Decorator: "bold #5c35cc", # class: 'nd' - to be revised
- Name.Entity: "#ce5c00", # class: 'ni'
- Name.Exception: "bold #cc0000", # class: 'ne'
- Name.Function: "#000000", # class: 'nf'
- Name.Property: "#000000", # class: 'py'
- Name.Label: "#f57900", # class: 'nl'
- Name.Namespace: "#000000", # class: 'nn' - to be revised
- Name.Other: "#000000", # class: 'nx'
- Name.Tag: "bold #204a87", # class: 'nt' - like a keyword
- Name.Variable: "#000000", # class: 'nv' - to be revised
- Name.Variable.Class: "#000000", # class: 'vc' - to be revised
- Name.Variable.Global: "#000000", # class: 'vg' - to be revised
- Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
-
- # since the tango light blue does not show up well in text, we choose
- # a pure blue instead.
- Number: "bold #0000cf", # class: 'm'
- Number.Float: "bold #0000cf", # class: 'mf'
- Number.Hex: "bold #0000cf", # class: 'mh'
- Number.Integer: "bold #0000cf", # class: 'mi'
- Number.Integer.Long: "bold #0000cf", # class: 'il'
- Number.Oct: "bold #0000cf", # class: 'mo'
-
- Literal: "#000000", # class: 'l'
- Literal.Date: "#000000", # class: 'ld'
-
- String: "#4e9a06", # class: 's'
- String.Backtick: "#4e9a06", # class: 'sb'
- String.Char: "#4e9a06", # class: 'sc'
- String.Doc: "italic #8f5902", # class: 'sd' - like a comment
- String.Double: "#4e9a06", # class: 's2'
- String.Escape: "#4e9a06", # class: 'se'
- String.Heredoc: "#4e9a06", # class: 'sh'
- String.Interpol: "#4e9a06", # class: 'si'
- String.Other: "#4e9a06", # class: 'sx'
- String.Regex: "#4e9a06", # class: 'sr'
- String.Single: "#4e9a06", # class: 's1'
- String.Symbol: "#4e9a06", # class: 'ss'
-
- Generic: "#000000", # class: 'g'
- Generic.Deleted: "#a40000", # class: 'gd'
- Generic.Emph: "italic #000000", # class: 'ge'
- Generic.Error: "#ef2929", # class: 'gr'
- Generic.Heading: "bold #000080", # class: 'gh'
- Generic.Inserted: "#00A000", # class: 'gi'
- Generic.Output: "italic #000000", # class: 'go'
- Generic.Prompt: "#8f5902", # class: 'gp'
- Generic.Strong: "bold #000000", # class: 'gs'
- Generic.Subheading: "bold #800080", # class: 'gu'
- Generic.Traceback: "bold #a40000", # class: 'gt'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+class TangoStyle(Style):
+ """
+ The Crunchy default Style inspired from the color palette from
+ the Tango Icon Theme Guidelines.
+ """
+
+ # work in progress...
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ # No corresponding class for the following:
+ #Text: "", # class: ''
+ Whitespace: "underline #f8f8f8", # class: 'w'
+ Error: "#a40000 border:#ef2929", # class: 'err'
+ Other: "#000000", # class 'x'
+
+ Comment: "italic #8f5902", # class: 'c'
+ Comment.Multiline: "italic #8f5902", # class: 'cm'
+ Comment.Preproc: "italic #8f5902", # class: 'cp'
+ Comment.Single: "italic #8f5902", # class: 'c1'
+ Comment.Special: "italic #8f5902", # class: 'cs'
+
+ Keyword: "bold #204a87", # class: 'k'
+ Keyword.Constant: "bold #204a87", # class: 'kc'
+ Keyword.Declaration: "bold #204a87", # class: 'kd'
+ Keyword.Namespace: "bold #204a87", # class: 'kn'
+ Keyword.Pseudo: "bold #204a87", # class: 'kp'
+ Keyword.Reserved: "bold #204a87", # class: 'kr'
+ Keyword.Type: "bold #204a87", # class: 'kt'
+
+ Operator: "bold #ce5c00", # class: 'o'
+ Operator.Word: "bold #204a87", # class: 'ow' - like keywords
+
+ Punctuation: "bold #000000", # class: 'p'
+
+ # because special names such as Name.Class, Name.Function, etc.
+ # are not recognized as such later in the parsing, we choose them
+ # to look the same as ordinary variables.
+ Name: "#000000", # class: 'n'
+ Name.Attribute: "#c4a000", # class: 'na' - to be revised
+ Name.Builtin: "#204a87", # class: 'nb'
+ Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
+ Name.Class: "#000000", # class: 'nc' - to be revised
+ Name.Constant: "#000000", # class: 'no' - to be revised
+ Name.Decorator: "bold #5c35cc", # class: 'nd' - to be revised
+ Name.Entity: "#ce5c00", # class: 'ni'
+ Name.Exception: "bold #cc0000", # class: 'ne'
+ Name.Function: "#000000", # class: 'nf'
+ Name.Property: "#000000", # class: 'py'
+ Name.Label: "#f57900", # class: 'nl'
+ Name.Namespace: "#000000", # class: 'nn' - to be revised
+ Name.Other: "#000000", # class: 'nx'
+ Name.Tag: "bold #204a87", # class: 'nt' - like a keyword
+ Name.Variable: "#000000", # class: 'nv' - to be revised
+ Name.Variable.Class: "#000000", # class: 'vc' - to be revised
+ Name.Variable.Global: "#000000", # class: 'vg' - to be revised
+ Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
+
+ # since the tango light blue does not show up well in text, we choose
+ # a pure blue instead.
+ Number: "bold #0000cf", # class: 'm'
+ Number.Float: "bold #0000cf", # class: 'mf'
+ Number.Hex: "bold #0000cf", # class: 'mh'
+ Number.Integer: "bold #0000cf", # class: 'mi'
+ Number.Integer.Long: "bold #0000cf", # class: 'il'
+ Number.Oct: "bold #0000cf", # class: 'mo'
+
+ Literal: "#000000", # class: 'l'
+ Literal.Date: "#000000", # class: 'ld'
+
+ String: "#4e9a06", # class: 's'
+ String.Backtick: "#4e9a06", # class: 'sb'
+ String.Char: "#4e9a06", # class: 'sc'
+ String.Doc: "italic #8f5902", # class: 'sd' - like a comment
+ String.Double: "#4e9a06", # class: 's2'
+ String.Escape: "#4e9a06", # class: 'se'
+ String.Heredoc: "#4e9a06", # class: 'sh'
+ String.Interpol: "#4e9a06", # class: 'si'
+ String.Other: "#4e9a06", # class: 'sx'
+ String.Regex: "#4e9a06", # class: 'sr'
+ String.Single: "#4e9a06", # class: 's1'
+ String.Symbol: "#4e9a06", # class: 'ss'
+
+ Generic: "#000000", # class: 'g'
+ Generic.Deleted: "#a40000", # class: 'gd'
+ Generic.Emph: "italic #000000", # class: 'ge'
+ Generic.Error: "#ef2929", # class: 'gr'
+ Generic.Heading: "bold #000080", # class: 'gh'
+ Generic.Inserted: "#00A000", # class: 'gi'
+ Generic.Output: "italic #000000", # class: 'go'
+ Generic.Prompt: "#8f5902", # class: 'gp'
+ Generic.Strong: "bold #000000", # class: 'gs'
+ Generic.Subheading: "bold #800080", # class: 'gu'
+ Generic.Traceback: "bold #a40000", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/trac.py b/contrib/python/Pygments/py2/pygments/styles/trac.py
index 1e96584db2..ecbbe30cc4 100644
--- a/contrib/python/Pygments/py2/pygments/styles/trac.py
+++ b/contrib/python/Pygments/py2/pygments/styles/trac.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.trac
- ~~~~~~~~~~~~~~~~~~~~
-
- Port of the default trac highlighter design.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.trac
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Port of the default trac highlighter design.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class TracStyle(Style):
- """
- Port of the default trac highlighter design.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #999988',
- Comment.Preproc: 'bold noitalic #999999',
- Comment.Special: 'bold #999999',
-
- Operator: 'bold',
-
- String: '#bb8844',
- String.Regex: '#808000',
-
- Number: '#009999',
-
- Keyword: 'bold',
- Keyword.Type: '#445588',
-
- Name.Builtin: '#999999',
- Name.Function: 'bold #990000',
- Name.Class: 'bold #445588',
- Name.Exception: 'bold #990000',
- Name.Namespace: '#555555',
- Name.Variable: '#008080',
- Name.Constant: '#008080',
- Name.Tag: '#000080',
- Name.Attribute: '#008080',
- Name.Entity: '#800080',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class TracStyle(Style):
+ """
+ Port of the default trac highlighter design.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #999988',
+ Comment.Preproc: 'bold noitalic #999999',
+ Comment.Special: 'bold #999999',
+
+ Operator: 'bold',
+
+ String: '#bb8844',
+ String.Regex: '#808000',
+
+ Number: '#009999',
+
+ Keyword: 'bold',
+ Keyword.Type: '#445588',
+
+ Name.Builtin: '#999999',
+ Name.Function: 'bold #990000',
+ Name.Class: 'bold #445588',
+ Name.Exception: 'bold #990000',
+ Name.Namespace: '#555555',
+ Name.Variable: '#008080',
+ Name.Constant: '#008080',
+ Name.Tag: '#000080',
+ Name.Attribute: '#008080',
+ Name.Entity: '#800080',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/vim.py b/contrib/python/Pygments/py2/pygments/styles/vim.py
index 954e91d5a0..15e1936615 100644
--- a/contrib/python/Pygments/py2/pygments/styles/vim.py
+++ b/contrib/python/Pygments/py2/pygments/styles/vim.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.vim
- ~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by vim.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.vim
+ ~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by vim.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Token
-
-
-class VimStyle(Style):
- """
- Styles somewhat like vim 7.0
- """
-
- background_color = "#000000"
- highlight_color = "#222222"
- default_style = "#cccccc"
-
- styles = {
- Token: "#cccccc",
- Whitespace: "",
- Comment: "#000080",
- Comment.Preproc: "",
- Comment.Special: "bold #cd0000",
-
- Keyword: "#cdcd00",
- Keyword.Declaration: "#00cd00",
- Keyword.Namespace: "#cd00cd",
- Keyword.Pseudo: "",
- Keyword.Type: "#00cd00",
-
- Operator: "#3399cc",
- Operator.Word: "#cdcd00",
-
- Name: "",
- Name.Class: "#00cdcd",
- Name.Builtin: "#cd00cd",
- Name.Exception: "bold #666699",
- Name.Variable: "#00cdcd",
-
- String: "#cd0000",
- Number: "#cd00cd",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#cd0000",
- Generic.Inserted: "#00cd00",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace, Token
+
+
+class VimStyle(Style):
+ """
+ Styles somewhat like vim 7.0
+ """
+
+ background_color = "#000000"
+ highlight_color = "#222222"
+ default_style = "#cccccc"
+
+ styles = {
+ Token: "#cccccc",
+ Whitespace: "",
+ Comment: "#000080",
+ Comment.Preproc: "",
+ Comment.Special: "bold #cd0000",
+
+ Keyword: "#cdcd00",
+ Keyword.Declaration: "#00cd00",
+ Keyword.Namespace: "#cd00cd",
+ Keyword.Pseudo: "",
+ Keyword.Type: "#00cd00",
+
+ Operator: "#3399cc",
+ Operator.Word: "#cdcd00",
+
+ Name: "",
+ Name.Class: "#00cdcd",
+ Name.Builtin: "#cd00cd",
+ Name.Exception: "bold #666699",
+ Name.Variable: "#00cdcd",
+
+ String: "#cd0000",
+ Number: "#cd00cd",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#cd0000",
+ Generic.Inserted: "#00cd00",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/vs.py b/contrib/python/Pygments/py2/pygments/styles/vs.py
index 111e4aed07..ccfcb870eb 100644
--- a/contrib/python/Pygments/py2/pygments/styles/vs.py
+++ b/contrib/python/Pygments/py2/pygments/styles/vs.py
@@ -1,38 +1,38 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.vs
- ~~~~~~~~~~~~~~~~~~
-
- Simple style with MS Visual Studio colors.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.vs
+ ~~~~~~~~~~~~~~~~~~
+
+ Simple style with MS Visual Studio colors.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-class VisualStudioStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "#008000",
- Comment.Preproc: "#0000ff",
- Keyword: "#0000ff",
- Operator.Word: "#0000ff",
- Keyword.Type: "#2b91af",
- Name.Class: "#2b91af",
- String: "#a31515",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Operator, Generic
+
+
+class VisualStudioStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "#008000",
+ Comment.Preproc: "#0000ff",
+ Keyword: "#0000ff",
+ Operator.Word: "#0000ff",
+ Keyword.Type: "#2b91af",
+ Name.Class: "#2b91af",
+ String: "#a31515",
+
+ Generic.Heading: "bold",
+ Generic.Subheading: "bold",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py2/pygments/styles/xcode.py b/contrib/python/Pygments/py2/pygments/styles/xcode.py
index 7e87d0837f..71ff49c144 100644
--- a/contrib/python/Pygments/py2/pygments/styles/xcode.py
+++ b/contrib/python/Pygments/py2/pygments/styles/xcode.py
@@ -1,51 +1,51 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.styles.xcode
- ~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `Xcode` default theme.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.styles.xcode
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `Xcode` default theme.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Literal
-
-
-class XcodeStyle(Style):
- """
- Style similar to the Xcode default colouring theme.
- """
-
- default_style = ''
-
- styles = {
- Comment: '#177500',
- Comment.Preproc: '#633820',
-
- String: '#C41A16',
- String.Char: '#2300CE',
-
- Operator: '#000000',
-
- Keyword: '#A90D91',
-
- Name: '#000000',
- Name.Attribute: '#836C28',
- Name.Class: '#3F6E75',
- Name.Function: '#000000',
- Name.Builtin: '#A90D91',
- # In Obj-C code this token is used to colour Cocoa types
- Name.Builtin.Pseudo: '#5B269A',
- Name.Variable: '#000000',
- Name.Tag: '#000000',
- Name.Decorator: '#000000',
- # Workaround for a BUG here: lexer treats multiline method signatres as labels
- Name.Label: '#000000',
-
- Literal: '#1C01CE',
- Number: '#1C01CE',
- Error: '#000000',
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Literal
+
+
+class XcodeStyle(Style):
+ """
+ Style similar to the Xcode default colouring theme.
+ """
+
+ default_style = ''
+
+ styles = {
+ Comment: '#177500',
+ Comment.Preproc: '#633820',
+
+ String: '#C41A16',
+ String.Char: '#2300CE',
+
+ Operator: '#000000',
+
+ Keyword: '#A90D91',
+
+ Name: '#000000',
+ Name.Attribute: '#836C28',
+ Name.Class: '#3F6E75',
+ Name.Function: '#000000',
+ Name.Builtin: '#A90D91',
+ # In Obj-C code this token is used to colour Cocoa types
+ Name.Builtin.Pseudo: '#5B269A',
+ Name.Variable: '#000000',
+ Name.Tag: '#000000',
+ Name.Decorator: '#000000',
+ # Workaround for a BUG here: lexer treats multiline method signatres as labels
+ Name.Label: '#000000',
+
+ Literal: '#1C01CE',
+ Number: '#1C01CE',
+ Error: '#000000',
+ }
diff --git a/contrib/python/Pygments/py2/pygments/token.py b/contrib/python/Pygments/py2/pygments/token.py
index 5c30eb4665..3ddec95f3b 100644
--- a/contrib/python/Pygments/py2/pygments/token.py
+++ b/contrib/python/Pygments/py2/pygments/token.py
@@ -1,213 +1,213 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.token
- ~~~~~~~~~~~~~~
-
- Basic token types and the standard tokens.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.token
+ ~~~~~~~~~~~~~~
+
+ Basic token types and the standard tokens.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-class _TokenType(tuple):
- parent = None
-
- def split(self):
- buf = []
- node = self
- while node is not None:
- buf.append(node)
- node = node.parent
- buf.reverse()
- return buf
-
- def __init__(self, *args):
- # no need to call super.__init__
- self.subtypes = set()
-
- def __contains__(self, val):
- return self is val or (
- type(val) is self.__class__ and
- val[:len(self)] == self
- )
-
- def __getattr__(self, val):
- if not val or not val[0].isupper():
- return tuple.__getattribute__(self, val)
- new = _TokenType(self + (val,))
- setattr(self, val, new)
- self.subtypes.add(new)
- new.parent = self
- return new
-
- def __repr__(self):
- return 'Token' + (self and '.' or '') + '.'.join(self)
-
+ :license: BSD, see LICENSE for details.
+"""
+
+
+class _TokenType(tuple):
+ parent = None
+
+ def split(self):
+ buf = []
+ node = self
+ while node is not None:
+ buf.append(node)
+ node = node.parent
+ buf.reverse()
+ return buf
+
+ def __init__(self, *args):
+ # no need to call super.__init__
+ self.subtypes = set()
+
+ def __contains__(self, val):
+ return self is val or (
+ type(val) is self.__class__ and
+ val[:len(self)] == self
+ )
+
+ def __getattr__(self, val):
+ if not val or not val[0].isupper():
+ return tuple.__getattribute__(self, val)
+ new = _TokenType(self + (val,))
+ setattr(self, val, new)
+ self.subtypes.add(new)
+ new.parent = self
+ return new
+
+ def __repr__(self):
+ return 'Token' + (self and '.' or '') + '.'.join(self)
+
def __copy__(self):
# These instances are supposed to be singletons
return self
-
+
def __deepcopy__(self, memo):
# These instances are supposed to be singletons
return self
-
+
Token = _TokenType()
-# Special token types
+# Special token types
Text = Token.Text
Whitespace = Text.Whitespace
Escape = Token.Escape
Error = Token.Error
-# Text that doesn't belong to this lexer (e.g. HTML in PHP)
+# Text that doesn't belong to this lexer (e.g. HTML in PHP)
Other = Token.Other
-
-# Common token types for source code
+
+# Common token types for source code
Keyword = Token.Keyword
Name = Token.Name
Literal = Token.Literal
String = Literal.String
Number = Literal.Number
-Punctuation = Token.Punctuation
+Punctuation = Token.Punctuation
Operator = Token.Operator
Comment = Token.Comment
-
-# Generic types for non-source code
+
+# Generic types for non-source code
Generic = Token.Generic
-
+
# String and some others are not direct children of Token.
-# alias them:
-Token.Token = Token
-Token.String = String
-Token.Number = Number
-
-
-def is_token_subtype(ttype, other):
- """
- Return True if ``ttype`` is a subtype of ``other``.
-
- exists for backwards compatibility. use ``ttype in other`` now.
- """
- return ttype in other
-
-
-def string_to_tokentype(s):
- """
- Convert a string into a token type::
-
- >>> string_to_token('String.Double')
- Token.Literal.String.Double
- >>> string_to_token('Token.Literal.Number')
- Token.Literal.Number
- >>> string_to_token('')
- Token
-
- Tokens that are already tokens are returned unchanged:
-
- >>> string_to_token(String)
- Token.Literal.String
- """
- if isinstance(s, _TokenType):
- return s
- if not s:
- return Token
- node = Token
- for item in s.split('.'):
- node = getattr(node, item)
- return node
-
-
-# Map standard token types to short names, used in CSS class naming.
-# If you add a new item, please be sure to run this file to perform
-# a consistency check for duplicate values.
-STANDARD_TYPES = {
- Token: '',
-
- Text: '',
- Whitespace: 'w',
- Escape: 'esc',
- Error: 'err',
- Other: 'x',
-
- Keyword: 'k',
- Keyword.Constant: 'kc',
- Keyword.Declaration: 'kd',
- Keyword.Namespace: 'kn',
- Keyword.Pseudo: 'kp',
- Keyword.Reserved: 'kr',
- Keyword.Type: 'kt',
-
- Name: 'n',
- Name.Attribute: 'na',
- Name.Builtin: 'nb',
- Name.Builtin.Pseudo: 'bp',
- Name.Class: 'nc',
- Name.Constant: 'no',
- Name.Decorator: 'nd',
- Name.Entity: 'ni',
- Name.Exception: 'ne',
- Name.Function: 'nf',
+# alias them:
+Token.Token = Token
+Token.String = String
+Token.Number = Number
+
+
+def is_token_subtype(ttype, other):
+ """
+ Return True if ``ttype`` is a subtype of ``other``.
+
+ exists for backwards compatibility. use ``ttype in other`` now.
+ """
+ return ttype in other
+
+
+def string_to_tokentype(s):
+ """
+ Convert a string into a token type::
+
+ >>> string_to_token('String.Double')
+ Token.Literal.String.Double
+ >>> string_to_token('Token.Literal.Number')
+ Token.Literal.Number
+ >>> string_to_token('')
+ Token
+
+ Tokens that are already tokens are returned unchanged:
+
+ >>> string_to_token(String)
+ Token.Literal.String
+ """
+ if isinstance(s, _TokenType):
+ return s
+ if not s:
+ return Token
+ node = Token
+ for item in s.split('.'):
+ node = getattr(node, item)
+ return node
+
+
+# Map standard token types to short names, used in CSS class naming.
+# If you add a new item, please be sure to run this file to perform
+# a consistency check for duplicate values.
+STANDARD_TYPES = {
+ Token: '',
+
+ Text: '',
+ Whitespace: 'w',
+ Escape: 'esc',
+ Error: 'err',
+ Other: 'x',
+
+ Keyword: 'k',
+ Keyword.Constant: 'kc',
+ Keyword.Declaration: 'kd',
+ Keyword.Namespace: 'kn',
+ Keyword.Pseudo: 'kp',
+ Keyword.Reserved: 'kr',
+ Keyword.Type: 'kt',
+
+ Name: 'n',
+ Name.Attribute: 'na',
+ Name.Builtin: 'nb',
+ Name.Builtin.Pseudo: 'bp',
+ Name.Class: 'nc',
+ Name.Constant: 'no',
+ Name.Decorator: 'nd',
+ Name.Entity: 'ni',
+ Name.Exception: 'ne',
+ Name.Function: 'nf',
Name.Function.Magic: 'fm',
- Name.Property: 'py',
- Name.Label: 'nl',
- Name.Namespace: 'nn',
- Name.Other: 'nx',
- Name.Tag: 'nt',
- Name.Variable: 'nv',
- Name.Variable.Class: 'vc',
- Name.Variable.Global: 'vg',
- Name.Variable.Instance: 'vi',
+ Name.Property: 'py',
+ Name.Label: 'nl',
+ Name.Namespace: 'nn',
+ Name.Other: 'nx',
+ Name.Tag: 'nt',
+ Name.Variable: 'nv',
+ Name.Variable.Class: 'vc',
+ Name.Variable.Global: 'vg',
+ Name.Variable.Instance: 'vi',
Name.Variable.Magic: 'vm',
-
- Literal: 'l',
- Literal.Date: 'ld',
-
- String: 's',
+
+ Literal: 'l',
+ Literal.Date: 'ld',
+
+ String: 's',
String.Affix: 'sa',
- String.Backtick: 'sb',
- String.Char: 'sc',
+ String.Backtick: 'sb',
+ String.Char: 'sc',
String.Delimiter: 'dl',
- String.Doc: 'sd',
- String.Double: 's2',
- String.Escape: 'se',
- String.Heredoc: 'sh',
- String.Interpol: 'si',
- String.Other: 'sx',
- String.Regex: 'sr',
- String.Single: 's1',
- String.Symbol: 'ss',
-
- Number: 'm',
- Number.Bin: 'mb',
- Number.Float: 'mf',
- Number.Hex: 'mh',
- Number.Integer: 'mi',
- Number.Integer.Long: 'il',
- Number.Oct: 'mo',
-
- Operator: 'o',
- Operator.Word: 'ow',
-
- Punctuation: 'p',
-
- Comment: 'c',
- Comment.Hashbang: 'ch',
- Comment.Multiline: 'cm',
- Comment.Preproc: 'cp',
- Comment.PreprocFile: 'cpf',
- Comment.Single: 'c1',
- Comment.Special: 'cs',
-
- Generic: 'g',
- Generic.Deleted: 'gd',
- Generic.Emph: 'ge',
- Generic.Error: 'gr',
- Generic.Heading: 'gh',
- Generic.Inserted: 'gi',
- Generic.Output: 'go',
- Generic.Prompt: 'gp',
- Generic.Strong: 'gs',
- Generic.Subheading: 'gu',
- Generic.Traceback: 'gt',
-}
+ String.Doc: 'sd',
+ String.Double: 's2',
+ String.Escape: 'se',
+ String.Heredoc: 'sh',
+ String.Interpol: 'si',
+ String.Other: 'sx',
+ String.Regex: 'sr',
+ String.Single: 's1',
+ String.Symbol: 'ss',
+
+ Number: 'm',
+ Number.Bin: 'mb',
+ Number.Float: 'mf',
+ Number.Hex: 'mh',
+ Number.Integer: 'mi',
+ Number.Integer.Long: 'il',
+ Number.Oct: 'mo',
+
+ Operator: 'o',
+ Operator.Word: 'ow',
+
+ Punctuation: 'p',
+
+ Comment: 'c',
+ Comment.Hashbang: 'ch',
+ Comment.Multiline: 'cm',
+ Comment.Preproc: 'cp',
+ Comment.PreprocFile: 'cpf',
+ Comment.Single: 'c1',
+ Comment.Special: 'cs',
+
+ Generic: 'g',
+ Generic.Deleted: 'gd',
+ Generic.Emph: 'ge',
+ Generic.Error: 'gr',
+ Generic.Heading: 'gh',
+ Generic.Inserted: 'gi',
+ Generic.Output: 'go',
+ Generic.Prompt: 'gp',
+ Generic.Strong: 'gs',
+ Generic.Subheading: 'gu',
+ Generic.Traceback: 'gt',
+}
diff --git a/contrib/python/Pygments/py2/pygments/unistring.py b/contrib/python/Pygments/py2/pygments/unistring.py
index dd011cf0bf..f1e9ffa5c3 100644
--- a/contrib/python/Pygments/py2/pygments/unistring.py
+++ b/contrib/python/Pygments/py2/pygments/unistring.py
@@ -1,221 +1,221 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.unistring
- ~~~~~~~~~~~~~~~~~~
-
- Strings of all Unicode characters of a certain category.
- Used for matching in Unicode-aware languages. Run to regenerate.
-
- Inspired by chartypes_create.py from the MoinMoin project.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.unistring
+ ~~~~~~~~~~~~~~~~~~
+
+ Strings of all Unicode characters of a certain category.
+ Used for matching in Unicode-aware languages. Run to regenerate.
+
+ Inspired by chartypes_create.py from the MoinMoin project.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-Cc = u'\x00-\x1f\x7f-\x9f'
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+
+Cc = u'\x00-\x1f\x7f-\x9f'
+
Cf = u'\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb'
-
+
Cn = u'\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff'
-
-Co = u'\ue000-\uf8ff'
-
-try:
- Cs = eval(r"u'\ud800-\udbff\\\udc00\udc01-\udfff'")
-except UnicodeDecodeError:
- Cs = '' # Jython can't handle isolated surrogates
-
+
+Co = u'\ue000-\uf8ff'
+
+try:
+ Cs = eval(r"u'\ud800-\udbff\\\udc00\udc01-\udfff'")
+except UnicodeDecodeError:
+ Cs = '' # Jython can't handle isolated surrogates
+
Ll = u'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a'
-
+
Lm = u'\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f'
-
+
Lo = u'\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
-
-Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
-
+
+Lt = u'\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
+
Lu = u'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a'
-
+
Mc = u'\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec'
-
+
Me = u'\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
-
+
Mn = u'\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f'
-
+
Nd = u'0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19'
-
-Nl = u'\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef'
-
+
+Nl = u'\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef'
+
No = u'\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835'
-
-Pc = u'_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
-
+
+Pc = u'_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
+
Pd = u'\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
-
+
Pe = u')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-
-Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-
-Pi = u'\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-
+
+Pf = u'\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
+
+Pi = u'\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
+
Po = u"!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65"
-
+
Ps = u'(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-
+
Sc = u'$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6'
-
+
Sk = u'\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3'
-
-Sm = u'+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec'
-
+
+Sm = u'+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec'
+
So = u'\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd'
-
-Zl = u'\u2028'
-
-Zp = u'\u2029'
-
-Zs = u' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
-
+
+Zl = u'\u2028'
+
+Zp = u'\u2029'
+
+Zs = u' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
+
xid_continue = u'0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
-
+
xid_start = u'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc'
-
-if sys.maxunicode > 0xFFFF:
- # non-BMP characters, use only on wide Unicode builds
+
+if sys.maxunicode > 0xFFFF:
+ # non-BMP characters, use only on wide Unicode builds
Cf += u'\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
-
+
Cn += u'\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
-
- Co += u'\U000f0000-\U000ffffd\U00100000-\U0010fffd'
-
+
+ Co += u'\U000f0000-\U000ffffd\U00100000-\U0010fffd'
+
Ll += u'\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
-
+
Lm += u'\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
-
+
Lo += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
+
Lu += u'\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
-
+
Mc += u'\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
-
+
Mn += u'\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
-
+
Nd += u'\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
-
+
Nl += u'\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
-
+
No += u'\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
-
+
Po += u'\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f'
-
+
Sc += u'\U0001ecb0'
Sk += u'\U0001f3fb-\U0001f3ff'
- Sm += u'\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
-
+ Sm += u'\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
+
So += u'\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
-
+
xid_continue += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
-
+
xid_start += u'\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
-cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-
+
+cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
+
# Generated from unidata 11.0.0
-
-def combine(*args):
- return u''.join(globals()[cat] for cat in args)
-
-
-def allexcept(*args):
- newcats = cats[:]
- for arg in args:
- newcats.remove(arg)
- return u''.join(globals()[cat] for cat in newcats)
-
-
-def _handle_runs(char_list): # pragma: no cover
- buf = []
- for c in char_list:
- if len(c) == 1:
- if buf and buf[-1][1] == chr(ord(c)-1):
- buf[-1] = (buf[-1][0], c)
- else:
- buf.append((c, c))
- else:
- buf.append((c, c))
- for a, b in buf:
- if a == b:
- yield a
- else:
- yield u'%s-%s' % (a, b)
-
-
-if __name__ == '__main__': # pragma: no cover
- import unicodedata
-
- # we need Py3 for the determination of the XID_* properties
- if sys.version_info[:2] < (3, 3):
- raise RuntimeError('this file must be regenerated with Python 3.3+')
-
- categories_bmp = {'xid_start': [], 'xid_continue': []}
- categories_nonbmp = {'xid_start': [], 'xid_continue': []}
-
- with open(__file__) as fp:
- content = fp.read()
-
- header = content[:content.find('Cc =')]
- footer = content[content.find("def combine("):]
-
- for code in range(0x110000):
- c = chr(code)
- cat = unicodedata.category(c)
- if ord(c) == 0xdc00:
- # Hack to avoid combining this combining with the preceeding high
- # surrogate, 0xdbff, when doing a repr.
- c = u'\\' + c
- elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
- # Escape regex metachars.
- c = u'\\' + c
- cat_dic = categories_bmp if code < 0x10000 else categories_nonbmp
- cat_dic.setdefault(cat, []).append(c)
- # XID_START and XID_CONTINUE are special categories used for matching
- # identifiers in Python 3.
- if c.isidentifier():
- cat_dic['xid_start'].append(c)
- if ('a' + c).isidentifier():
- cat_dic['xid_continue'].append(c)
-
- with open(__file__, 'w') as fp:
- fp.write(header)
-
- for cat in sorted(categories_bmp):
- val = u''.join(_handle_runs(categories_bmp[cat]))
- if cat == 'Cs':
- # Jython can't handle isolated surrogates
- fp.write("""\
-try:
- Cs = eval(r"u%s")
-except UnicodeDecodeError:
- Cs = '' # Jython can't handle isolated surrogates\n\n""" % ascii(val))
- else:
- fp.write('%s = u%a\n\n' % (cat, val))
-
- fp.write('if sys.maxunicode > 0xFFFF:\n')
- fp.write(' # non-BMP characters, use only on wide Unicode builds\n')
- for cat in sorted(categories_nonbmp):
- # no special case for Cs needed, since there are no surrogates
- # in the higher planes
- val = u''.join(_handle_runs(categories_nonbmp[cat]))
- fp.write(' %s += u%a\n\n' % (cat, val))
-
- cats = sorted(categories_bmp)
- cats.remove('xid_start')
- cats.remove('xid_continue')
- fp.write('cats = %r\n\n' % cats)
-
- fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
-
- fp.write(footer)
+
+def combine(*args):
+ return u''.join(globals()[cat] for cat in args)
+
+
+def allexcept(*args):
+ newcats = cats[:]
+ for arg in args:
+ newcats.remove(arg)
+ return u''.join(globals()[cat] for cat in newcats)
+
+
+def _handle_runs(char_list): # pragma: no cover
+ buf = []
+ for c in char_list:
+ if len(c) == 1:
+ if buf and buf[-1][1] == chr(ord(c)-1):
+ buf[-1] = (buf[-1][0], c)
+ else:
+ buf.append((c, c))
+ else:
+ buf.append((c, c))
+ for a, b in buf:
+ if a == b:
+ yield a
+ else:
+ yield u'%s-%s' % (a, b)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import unicodedata
+
+ # we need Py3 for the determination of the XID_* properties
+ if sys.version_info[:2] < (3, 3):
+ raise RuntimeError('this file must be regenerated with Python 3.3+')
+
+ categories_bmp = {'xid_start': [], 'xid_continue': []}
+ categories_nonbmp = {'xid_start': [], 'xid_continue': []}
+
+ with open(__file__) as fp:
+ content = fp.read()
+
+ header = content[:content.find('Cc =')]
+ footer = content[content.find("def combine("):]
+
+ for code in range(0x110000):
+ c = chr(code)
+ cat = unicodedata.category(c)
+ if ord(c) == 0xdc00:
+ # Hack to avoid combining this combining with the preceeding high
+ # surrogate, 0xdbff, when doing a repr.
+ c = u'\\' + c
+ elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
+ # Escape regex metachars.
+ c = u'\\' + c
+ cat_dic = categories_bmp if code < 0x10000 else categories_nonbmp
+ cat_dic.setdefault(cat, []).append(c)
+ # XID_START and XID_CONTINUE are special categories used for matching
+ # identifiers in Python 3.
+ if c.isidentifier():
+ cat_dic['xid_start'].append(c)
+ if ('a' + c).isidentifier():
+ cat_dic['xid_continue'].append(c)
+
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+
+ for cat in sorted(categories_bmp):
+ val = u''.join(_handle_runs(categories_bmp[cat]))
+ if cat == 'Cs':
+ # Jython can't handle isolated surrogates
+ fp.write("""\
+try:
+ Cs = eval(r"u%s")
+except UnicodeDecodeError:
+ Cs = '' # Jython can't handle isolated surrogates\n\n""" % ascii(val))
+ else:
+ fp.write('%s = u%a\n\n' % (cat, val))
+
+ fp.write('if sys.maxunicode > 0xFFFF:\n')
+ fp.write(' # non-BMP characters, use only on wide Unicode builds\n')
+ for cat in sorted(categories_nonbmp):
+ # no special case for Cs needed, since there are no surrogates
+ # in the higher planes
+ val = u''.join(_handle_runs(categories_nonbmp[cat]))
+ fp.write(' %s += u%a\n\n' % (cat, val))
+
+ cats = sorted(categories_bmp)
+ cats.remove('xid_start')
+ cats.remove('xid_continue')
+ fp.write('cats = %r\n\n' % cats)
+
+ fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
+
+ fp.write(footer)
diff --git a/contrib/python/Pygments/py2/pygments/util.py b/contrib/python/Pygments/py2/pygments/util.py
index 054b705e85..e4d4815bf1 100644
--- a/contrib/python/Pygments/py2/pygments/util.py
+++ b/contrib/python/Pygments/py2/pygments/util.py
@@ -1,388 +1,388 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.util
- ~~~~~~~~~~~~~
-
- Utility functions.
-
+# -*- coding: utf-8 -*-
+"""
+ pygments.util
+ ~~~~~~~~~~~~~
+
+ Utility functions.
+
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-
-
-split_path_re = re.compile(r'[/\\ ]')
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+
+
+split_path_re = re.compile(r'[/\\ ]')
doctype_lookup_re = re.compile(r'''
- (<\?.*?\?>)?\s*
- <!DOCTYPE\s+(
- [a-zA-Z_][a-zA-Z0-9]*
- (?: \s+ # optional in HTML5
- [a-zA-Z_][a-zA-Z0-9]*\s+
- "[^"]*")?
- )
- [^>]*>
+ (<\?.*?\?>)?\s*
+ <!DOCTYPE\s+(
+ [a-zA-Z_][a-zA-Z0-9]*
+ (?: \s+ # optional in HTML5
+ [a-zA-Z_][a-zA-Z0-9]*\s+
+ "[^"]*")?
+ )
+ [^>]*>
''', re.DOTALL | re.MULTILINE | re.VERBOSE)
tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>',
re.UNICODE | re.IGNORECASE | re.DOTALL | re.MULTILINE)
-xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
-
-
-class ClassNotFound(ValueError):
- """Raised if one of the lookup functions didn't find a matching class."""
-
-
-class OptionError(Exception):
- pass
-
-
-def get_choice_opt(options, optname, allowed, default=None, normcase=False):
- string = options.get(optname, default)
- if normcase:
- string = string.lower()
- if string not in allowed:
- raise OptionError('Value for option %s must be one of %s' %
- (optname, ', '.join(map(str, allowed))))
- return string
-
-
-def get_bool_opt(options, optname, default=None):
- string = options.get(optname, default)
- if isinstance(string, bool):
- return string
- elif isinstance(string, int):
- return bool(string)
- elif not isinstance(string, string_types):
- raise OptionError('Invalid type %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
- elif string.lower() in ('1', 'yes', 'true', 'on'):
- return True
- elif string.lower() in ('0', 'no', 'false', 'off'):
- return False
- else:
- raise OptionError('Invalid value %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
-
-
-def get_int_opt(options, optname, default=None):
- string = options.get(optname, default)
- try:
- return int(string)
- except TypeError:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
- except ValueError:
- raise OptionError('Invalid value %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
-
-
-def get_list_opt(options, optname, default=None):
- val = options.get(optname, default)
- if isinstance(val, string_types):
- return val.split()
- elif isinstance(val, (list, tuple)):
- return list(val)
- else:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give a list value' % (
- val, optname))
-
-
-def docstring_headline(obj):
- if not obj.__doc__:
- return ''
- res = []
- for line in obj.__doc__.strip().splitlines():
- if line.strip():
- res.append(" " + line.strip())
- else:
- break
- return ''.join(res).lstrip()
-
-
-def make_analysator(f):
- """Return a static text analyser function that returns float values."""
- def text_analyse(text):
- try:
- rv = f(text)
- except Exception:
- return 0.0
- if not rv:
- return 0.0
- try:
- return min(1.0, max(0.0, float(rv)))
- except (ValueError, TypeError):
- return 0.0
- text_analyse.__doc__ = f.__doc__
- return staticmethod(text_analyse)
-
-
-def shebang_matches(text, regex):
- r"""Check if the given regular expression matches the last part of the
- shebang if one exists.
-
- >>> from pygments.util import shebang_matches
- >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/startsomethingwith python',
- ... r'python(2\.\d)?')
- True
-
- It also checks for common windows executable file extensions::
-
- >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
- True
-
- Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
- the same as ``'perl -e'``)
-
- Note that this method automatically searches the whole string (eg:
- the regular expression is wrapped in ``'^$'``)
- """
- index = text.find('\n')
- if index >= 0:
- first_line = text[:index].lower()
- else:
- first_line = text.lower()
- if first_line.startswith('#!'):
- try:
- found = [x for x in split_path_re.split(first_line[2:].strip())
- if x and not x.startswith('-')][-1]
- except IndexError:
- return False
- regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
- if regex.search(found) is not None:
- return True
- return False
-
-
-def doctype_matches(text, regex):
- """Check if the doctype matches a regular expression (if present).
-
- Note that this method only checks the first part of a DOCTYPE.
- eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
- """
- m = doctype_lookup_re.match(text)
- if m is None:
- return False
- doctype = m.group(2)
- return re.compile(regex, re.I).match(doctype.strip()) is not None
-
-
-def html_doctype_matches(text):
- """Check if the file looks like it has a html doctype."""
- return doctype_matches(text, r'html')
-
-
-_looks_like_xml_cache = {}
-
-
-def looks_like_xml(text):
- """Check if a doctype exists or if we have some tags."""
- if xml_decl_re.match(text):
- return True
- key = hash(text)
- try:
- return _looks_like_xml_cache[key]
- except KeyError:
- m = doctype_lookup_re.match(text)
- if m is not None:
- return True
- rv = tag_re.search(text[:1000]) is not None
- _looks_like_xml_cache[key] = rv
- return rv
-
-
-# Python narrow build compatibility
-
-def _surrogatepair(c):
- # Given a unicode character code
- # with length greater than 16 bits,
- # return the two 16 bit surrogate pair.
- # From example D28 of:
- # http://www.unicode.org/book/ch03.pdf
- return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
-
-
-def unirange(a, b):
- """Returns a regular expression string to match the given non-BMP range."""
- if b < a:
- raise ValueError("Bad character range")
- if a < 0x10000 or b < 0x10000:
- raise ValueError("unirange is only defined for non-BMP ranges")
-
- if sys.maxunicode > 0xffff:
- # wide build
- return u'[%s-%s]' % (unichr(a), unichr(b))
- else:
- # narrow build stores surrogates, and the 're' module handles them
- # (incorrectly) as characters. Since there is still ordering among
- # these characters, expand the range to one that it understands. Some
- # background in http://bugs.python.org/issue3665 and
- # http://bugs.python.org/issue12749
- #
- # Additionally, the lower constants are using unichr rather than
- # literals because jython [which uses the wide path] can't load this
- # file if they are literals.
- ah, al = _surrogatepair(a)
- bh, bl = _surrogatepair(b)
- if ah == bh:
- return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl))
- else:
- buf = []
- buf.append(u'%s[%s-%s]' %
- (unichr(ah), unichr(al),
- ah == bh and unichr(bl) or unichr(0xdfff)))
- if ah - bh > 1:
- buf.append(u'[%s-%s][%s-%s]' %
- unichr(ah+1), unichr(bh-1), unichr(0xdc00), unichr(0xdfff))
- if ah != bh:
- buf.append(u'%s[%s-%s]' %
- (unichr(bh), unichr(0xdc00), unichr(bl)))
-
- return u'(?:' + u'|'.join(buf) + u')'
-
-
-def format_lines(var_name, seq, raw=False, indent_level=0):
- """Formats a sequence of strings for output."""
- lines = []
- base_indent = ' ' * indent_level * 4
- inner_indent = ' ' * (indent_level + 1) * 4
- lines.append(base_indent + var_name + ' = (')
- if raw:
- # These should be preformatted reprs of, say, tuples.
- for i in seq:
- lines.append(inner_indent + i + ',')
- else:
- for i in seq:
- # Force use of single quotes
- r = repr(i + '"')
- lines.append(inner_indent + r[:-2] + r[-1] + ',')
- lines.append(base_indent + ')')
- return '\n'.join(lines)
-
-
-def duplicates_removed(it, already_seen=()):
- """
- Returns a list with duplicates removed from the iterable `it`.
-
- Order is preserved.
- """
- lst = []
- seen = set()
- for i in it:
- if i in seen or i in already_seen:
- continue
- lst.append(i)
- seen.add(i)
- return lst
-
-
-class Future(object):
- """Generic class to defer some work.
-
- Handled specially in RegexLexerMeta, to support regex string construction at
- first use.
- """
- def get(self):
- raise NotImplementedError
-
-
-def guess_decode(text):
- """Decode *text* with guessed encoding.
-
- First try UTF-8; this should fail for non-UTF-8 encodings.
- Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- try:
- text = text.decode('utf-8')
- return text, 'utf-8'
- except UnicodeDecodeError:
- try:
- import locale
- prefencoding = locale.getpreferredencoding()
- text = text.decode()
- return text, prefencoding
- except (UnicodeDecodeError, LookupError):
- text = text.decode('latin1')
- return text, 'latin1'
-
-
-def guess_decode_from_terminal(text, term):
- """Decode *text* coming from terminal *term*.
-
- First try the terminal encoding, if given.
- Then try UTF-8. Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- if getattr(term, 'encoding', None):
- try:
- text = text.decode(term.encoding)
- except UnicodeDecodeError:
- pass
- else:
- return text, term.encoding
- return guess_decode(text)
-
-
-def terminal_encoding(term):
- """Return our best guess of encoding for the given *term*."""
- if getattr(term, 'encoding', None):
- return term.encoding
- import locale
- return locale.getpreferredencoding()
-
-
-# Python 2/3 compatibility
-
-if sys.version_info < (3, 0):
- unichr = unichr
- xrange = xrange
- string_types = (str, unicode)
- text_type = unicode
- u_prefix = 'u'
- iteritems = dict.iteritems
- itervalues = dict.itervalues
- import StringIO
- import cStringIO
- # unfortunately, io.StringIO in Python 2 doesn't accept str at all
- StringIO = StringIO.StringIO
- BytesIO = cStringIO.StringIO
-else:
- unichr = chr
- xrange = range
- string_types = (str,)
- text_type = str
- u_prefix = ''
- iteritems = dict.items
- itervalues = dict.values
- from io import StringIO, BytesIO, TextIOWrapper
-
- class UnclosingTextIOWrapper(TextIOWrapper):
- # Don't close underlying buffer on destruction.
- def close(self):
- self.flush()
-
-
-def add_metaclass(metaclass):
- """Class decorator for creating a class with a metaclass."""
- def wrapper(cls):
- orig_vars = cls.__dict__.copy()
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
- for slots_var in orig_vars.get('__slots__', ()):
- orig_vars.pop(slots_var)
- return metaclass(cls.__name__, cls.__bases__, orig_vars)
- return wrapper
+xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
+
+
+class ClassNotFound(ValueError):
+ """Raised if one of the lookup functions didn't find a matching class."""
+
+
+class OptionError(Exception):
+ pass
+
+
+def get_choice_opt(options, optname, allowed, default=None, normcase=False):
+ string = options.get(optname, default)
+ if normcase:
+ string = string.lower()
+ if string not in allowed:
+ raise OptionError('Value for option %s must be one of %s' %
+ (optname, ', '.join(map(str, allowed))))
+ return string
+
+
+def get_bool_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ if isinstance(string, bool):
+ return string
+ elif isinstance(string, int):
+ return bool(string)
+ elif not isinstance(string, string_types):
+ raise OptionError('Invalid type %r for option %s; use '
+ '1/0, yes/no, true/false, on/off' % (
+ string, optname))
+ elif string.lower() in ('1', 'yes', 'true', 'on'):
+ return True
+ elif string.lower() in ('0', 'no', 'false', 'off'):
+ return False
+ else:
+ raise OptionError('Invalid value %r for option %s; use '
+ '1/0, yes/no, true/false, on/off' % (
+ string, optname))
+
+
+def get_int_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ try:
+ return int(string)
+ except TypeError:
+ raise OptionError('Invalid type %r for option %s; you '
+ 'must give an integer value' % (
+ string, optname))
+ except ValueError:
+ raise OptionError('Invalid value %r for option %s; you '
+ 'must give an integer value' % (
+ string, optname))
+
+
+def get_list_opt(options, optname, default=None):
+ val = options.get(optname, default)
+ if isinstance(val, string_types):
+ return val.split()
+ elif isinstance(val, (list, tuple)):
+ return list(val)
+ else:
+ raise OptionError('Invalid type %r for option %s; you '
+ 'must give a list value' % (
+ val, optname))
+
+
+def docstring_headline(obj):
+ if not obj.__doc__:
+ return ''
+ res = []
+ for line in obj.__doc__.strip().splitlines():
+ if line.strip():
+ res.append(" " + line.strip())
+ else:
+ break
+ return ''.join(res).lstrip()
+
+
+def make_analysator(f):
+ """Return a static text analyser function that returns float values."""
+ def text_analyse(text):
+ try:
+ rv = f(text)
+ except Exception:
+ return 0.0
+ if not rv:
+ return 0.0
+ try:
+ return min(1.0, max(0.0, float(rv)))
+ except (ValueError, TypeError):
+ return 0.0
+ text_analyse.__doc__ = f.__doc__
+ return staticmethod(text_analyse)
+
+
+def shebang_matches(text, regex):
+ r"""Check if the given regular expression matches the last part of the
+ shebang if one exists.
+
+ >>> from pygments.util import shebang_matches
+ >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
+ True
+ >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
+ True
+ >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
+ False
+ >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
+ False
+ >>> shebang_matches('#!/usr/bin/startsomethingwith python',
+ ... r'python(2\.\d)?')
+ True
+
+ It also checks for common windows executable file extensions::
+
+ >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
+ True
+
+ Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
+ the same as ``'perl -e'``)
+
+ Note that this method automatically searches the whole string (eg:
+ the regular expression is wrapped in ``'^$'``)
+ """
+ index = text.find('\n')
+ if index >= 0:
+ first_line = text[:index].lower()
+ else:
+ first_line = text.lower()
+ if first_line.startswith('#!'):
+ try:
+ found = [x for x in split_path_re.split(first_line[2:].strip())
+ if x and not x.startswith('-')][-1]
+ except IndexError:
+ return False
+ regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
+ if regex.search(found) is not None:
+ return True
+ return False
+
+
+def doctype_matches(text, regex):
+ """Check if the doctype matches a regular expression (if present).
+
+ Note that this method only checks the first part of a DOCTYPE.
+ eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
+ """
+ m = doctype_lookup_re.match(text)
+ if m is None:
+ return False
+ doctype = m.group(2)
+ return re.compile(regex, re.I).match(doctype.strip()) is not None
+
+
+def html_doctype_matches(text):
+ """Check if the file looks like it has a html doctype."""
+ return doctype_matches(text, r'html')
+
+
+_looks_like_xml_cache = {}
+
+
+def looks_like_xml(text):
+ """Check if a doctype exists or if we have some tags."""
+ if xml_decl_re.match(text):
+ return True
+ key = hash(text)
+ try:
+ return _looks_like_xml_cache[key]
+ except KeyError:
+ m = doctype_lookup_re.match(text)
+ if m is not None:
+ return True
+ rv = tag_re.search(text[:1000]) is not None
+ _looks_like_xml_cache[key] = rv
+ return rv
+
+
+# Python narrow build compatibility
+
+def _surrogatepair(c):
+ # Given a unicode character code
+ # with length greater than 16 bits,
+ # return the two 16 bit surrogate pair.
+ # From example D28 of:
+ # http://www.unicode.org/book/ch03.pdf
+ return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
+
+
+def unirange(a, b):
+ """Returns a regular expression string to match the given non-BMP range."""
+ if b < a:
+ raise ValueError("Bad character range")
+ if a < 0x10000 or b < 0x10000:
+ raise ValueError("unirange is only defined for non-BMP ranges")
+
+ if sys.maxunicode > 0xffff:
+ # wide build
+ return u'[%s-%s]' % (unichr(a), unichr(b))
+ else:
+ # narrow build stores surrogates, and the 're' module handles them
+ # (incorrectly) as characters. Since there is still ordering among
+ # these characters, expand the range to one that it understands. Some
+ # background in http://bugs.python.org/issue3665 and
+ # http://bugs.python.org/issue12749
+ #
+ # Additionally, the lower constants are using unichr rather than
+ # literals because jython [which uses the wide path] can't load this
+ # file if they are literals.
+ ah, al = _surrogatepair(a)
+ bh, bl = _surrogatepair(b)
+ if ah == bh:
+ return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl))
+ else:
+ buf = []
+ buf.append(u'%s[%s-%s]' %
+ (unichr(ah), unichr(al),
+ ah == bh and unichr(bl) or unichr(0xdfff)))
+ if ah - bh > 1:
+ buf.append(u'[%s-%s][%s-%s]' %
+ unichr(ah+1), unichr(bh-1), unichr(0xdc00), unichr(0xdfff))
+ if ah != bh:
+ buf.append(u'%s[%s-%s]' %
+ (unichr(bh), unichr(0xdc00), unichr(bl)))
+
+ return u'(?:' + u'|'.join(buf) + u')'
+
+
+def format_lines(var_name, seq, raw=False, indent_level=0):
+ """Formats a sequence of strings for output."""
+ lines = []
+ base_indent = ' ' * indent_level * 4
+ inner_indent = ' ' * (indent_level + 1) * 4
+ lines.append(base_indent + var_name + ' = (')
+ if raw:
+ # These should be preformatted reprs of, say, tuples.
+ for i in seq:
+ lines.append(inner_indent + i + ',')
+ else:
+ for i in seq:
+ # Force use of single quotes
+ r = repr(i + '"')
+ lines.append(inner_indent + r[:-2] + r[-1] + ',')
+ lines.append(base_indent + ')')
+ return '\n'.join(lines)
+
+
+def duplicates_removed(it, already_seen=()):
+ """
+ Returns a list with duplicates removed from the iterable `it`.
+
+ Order is preserved.
+ """
+ lst = []
+ seen = set()
+ for i in it:
+ if i in seen or i in already_seen:
+ continue
+ lst.append(i)
+ seen.add(i)
+ return lst
+
+
+class Future(object):
+ """Generic class to defer some work.
+
+ Handled specially in RegexLexerMeta, to support regex string construction at
+ first use.
+ """
+ def get(self):
+ raise NotImplementedError
+
+
+def guess_decode(text):
+ """Decode *text* with guessed encoding.
+
+ First try UTF-8; this should fail for non-UTF-8 encodings.
+ Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ try:
+ text = text.decode('utf-8')
+ return text, 'utf-8'
+ except UnicodeDecodeError:
+ try:
+ import locale
+ prefencoding = locale.getpreferredencoding()
+ text = text.decode()
+ return text, prefencoding
+ except (UnicodeDecodeError, LookupError):
+ text = text.decode('latin1')
+ return text, 'latin1'
+
+
+def guess_decode_from_terminal(text, term):
+ """Decode *text* coming from terminal *term*.
+
+ First try the terminal encoding, if given.
+ Then try UTF-8. Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ if getattr(term, 'encoding', None):
+ try:
+ text = text.decode(term.encoding)
+ except UnicodeDecodeError:
+ pass
+ else:
+ return text, term.encoding
+ return guess_decode(text)
+
+
+def terminal_encoding(term):
+ """Return our best guess of encoding for the given *term*."""
+ if getattr(term, 'encoding', None):
+ return term.encoding
+ import locale
+ return locale.getpreferredencoding()
+
+
+# Python 2/3 compatibility
+
+if sys.version_info < (3, 0):
+ unichr = unichr
+ xrange = xrange
+ string_types = (str, unicode)
+ text_type = unicode
+ u_prefix = 'u'
+ iteritems = dict.iteritems
+ itervalues = dict.itervalues
+ import StringIO
+ import cStringIO
+ # unfortunately, io.StringIO in Python 2 doesn't accept str at all
+ StringIO = StringIO.StringIO
+ BytesIO = cStringIO.StringIO
+else:
+ unichr = chr
+ xrange = range
+ string_types = (str,)
+ text_type = str
+ u_prefix = ''
+ iteritems = dict.items
+ itervalues = dict.values
+ from io import StringIO, BytesIO, TextIOWrapper
+
+ class UnclosingTextIOWrapper(TextIOWrapper):
+ # Don't close underlying buffer on destruction.
+ def close(self):
+ self.flush()
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ for slots_var in orig_vars.get('__slots__', ()):
+ orig_vars.pop(slots_var)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
diff --git a/contrib/python/Pygments/py2/ya.make b/contrib/python/Pygments/py2/ya.make
index 978caf3d68..aaede6838c 100644
--- a/contrib/python/Pygments/py2/ya.make
+++ b/contrib/python/Pygments/py2/ya.make
@@ -1,7 +1,7 @@
# Generated by devtools/yamaker (pypi).
PY2_LIBRARY()
-
+
OWNER(blinkov g:python-contrib)
VERSION(2.5.2)
@@ -14,231 +14,231 @@ NO_CHECK_IMPORTS(
pygments.sphinxext
)
-PY_SRCS(
- TOP_LEVEL
- pygments/__init__.py
+PY_SRCS(
+ TOP_LEVEL
+ pygments/__init__.py
pygments/__main__.py
- pygments/cmdline.py
- pygments/console.py
- pygments/filter.py
- pygments/filters/__init__.py
- pygments/formatter.py
- pygments/formatters/__init__.py
- pygments/formatters/_mapping.py
- pygments/formatters/bbcode.py
- pygments/formatters/html.py
- pygments/formatters/img.py
- pygments/formatters/irc.py
- pygments/formatters/latex.py
- pygments/formatters/other.py
- pygments/formatters/rtf.py
- pygments/formatters/svg.py
- pygments/formatters/terminal.py
- pygments/formatters/terminal256.py
- pygments/lexer.py
- pygments/lexers/__init__.py
- pygments/lexers/_asy_builtins.py
- pygments/lexers/_cl_builtins.py
- pygments/lexers/_cocoa_builtins.py
- pygments/lexers/_csound_builtins.py
- pygments/lexers/_lasso_builtins.py
- pygments/lexers/_lua_builtins.py
- pygments/lexers/_mapping.py
- pygments/lexers/_mql_builtins.py
- pygments/lexers/_openedge_builtins.py
- pygments/lexers/_php_builtins.py
- pygments/lexers/_postgres_builtins.py
- pygments/lexers/_scilab_builtins.py
- pygments/lexers/_sourcemod_builtins.py
- pygments/lexers/_stan_builtins.py
+ pygments/cmdline.py
+ pygments/console.py
+ pygments/filter.py
+ pygments/filters/__init__.py
+ pygments/formatter.py
+ pygments/formatters/__init__.py
+ pygments/formatters/_mapping.py
+ pygments/formatters/bbcode.py
+ pygments/formatters/html.py
+ pygments/formatters/img.py
+ pygments/formatters/irc.py
+ pygments/formatters/latex.py
+ pygments/formatters/other.py
+ pygments/formatters/rtf.py
+ pygments/formatters/svg.py
+ pygments/formatters/terminal.py
+ pygments/formatters/terminal256.py
+ pygments/lexer.py
+ pygments/lexers/__init__.py
+ pygments/lexers/_asy_builtins.py
+ pygments/lexers/_cl_builtins.py
+ pygments/lexers/_cocoa_builtins.py
+ pygments/lexers/_csound_builtins.py
+ pygments/lexers/_lasso_builtins.py
+ pygments/lexers/_lua_builtins.py
+ pygments/lexers/_mapping.py
+ pygments/lexers/_mql_builtins.py
+ pygments/lexers/_openedge_builtins.py
+ pygments/lexers/_php_builtins.py
+ pygments/lexers/_postgres_builtins.py
+ pygments/lexers/_scilab_builtins.py
+ pygments/lexers/_sourcemod_builtins.py
+ pygments/lexers/_stan_builtins.py
pygments/lexers/_stata_builtins.py
pygments/lexers/_tsql_builtins.py
pygments/lexers/_vbscript_builtins.py
- pygments/lexers/_vim_builtins.py
- pygments/lexers/actionscript.py
- pygments/lexers/agile.py
- pygments/lexers/algebra.py
- pygments/lexers/ambient.py
+ pygments/lexers/_vim_builtins.py
+ pygments/lexers/actionscript.py
+ pygments/lexers/agile.py
+ pygments/lexers/algebra.py
+ pygments/lexers/ambient.py
pygments/lexers/ampl.py
- pygments/lexers/apl.py
- pygments/lexers/archetype.py
- pygments/lexers/asm.py
- pygments/lexers/automation.py
- pygments/lexers/basic.py
+ pygments/lexers/apl.py
+ pygments/lexers/archetype.py
+ pygments/lexers/asm.py
+ pygments/lexers/automation.py
+ pygments/lexers/basic.py
pygments/lexers/bibtex.py
pygments/lexers/boa.py
- pygments/lexers/business.py
- pygments/lexers/c_cpp.py
- pygments/lexers/c_like.py
+ pygments/lexers/business.py
+ pygments/lexers/c_cpp.py
+ pygments/lexers/c_like.py
pygments/lexers/capnproto.py
- pygments/lexers/chapel.py
+ pygments/lexers/chapel.py
pygments/lexers/clean.py
- pygments/lexers/compiled.py
- pygments/lexers/configs.py
- pygments/lexers/console.py
+ pygments/lexers/compiled.py
+ pygments/lexers/configs.py
+ pygments/lexers/console.py
pygments/lexers/crystal.py
- pygments/lexers/csound.py
- pygments/lexers/css.py
- pygments/lexers/d.py
- pygments/lexers/dalvik.py
- pygments/lexers/data.py
- pygments/lexers/diff.py
- pygments/lexers/dotnet.py
- pygments/lexers/dsls.py
- pygments/lexers/dylan.py
- pygments/lexers/ecl.py
- pygments/lexers/eiffel.py
- pygments/lexers/elm.py
+ pygments/lexers/csound.py
+ pygments/lexers/css.py
+ pygments/lexers/d.py
+ pygments/lexers/dalvik.py
+ pygments/lexers/data.py
+ pygments/lexers/diff.py
+ pygments/lexers/dotnet.py
+ pygments/lexers/dsls.py
+ pygments/lexers/dylan.py
+ pygments/lexers/ecl.py
+ pygments/lexers/eiffel.py
+ pygments/lexers/elm.py
pygments/lexers/email.py
- pygments/lexers/erlang.py
- pygments/lexers/esoteric.py
- pygments/lexers/ezhil.py
- pygments/lexers/factor.py
- pygments/lexers/fantom.py
- pygments/lexers/felix.py
+ pygments/lexers/erlang.py
+ pygments/lexers/esoteric.py
+ pygments/lexers/ezhil.py
+ pygments/lexers/factor.py
+ pygments/lexers/fantom.py
+ pygments/lexers/felix.py
pygments/lexers/floscript.py
pygments/lexers/forth.py
- pygments/lexers/fortran.py
- pygments/lexers/foxpro.py
+ pygments/lexers/fortran.py
+ pygments/lexers/foxpro.py
pygments/lexers/freefem.py
- pygments/lexers/functional.py
- pygments/lexers/go.py
- pygments/lexers/grammar_notation.py
- pygments/lexers/graph.py
- pygments/lexers/graphics.py
- pygments/lexers/haskell.py
- pygments/lexers/haxe.py
- pygments/lexers/hdl.py
- pygments/lexers/hexdump.py
- pygments/lexers/html.py
- pygments/lexers/idl.py
- pygments/lexers/igor.py
- pygments/lexers/inferno.py
- pygments/lexers/installers.py
- pygments/lexers/int_fiction.py
- pygments/lexers/iolang.py
- pygments/lexers/j.py
- pygments/lexers/javascript.py
- pygments/lexers/julia.py
- pygments/lexers/jvm.py
- pygments/lexers/lisp.py
- pygments/lexers/make.py
- pygments/lexers/markup.py
- pygments/lexers/math.py
- pygments/lexers/matlab.py
+ pygments/lexers/functional.py
+ pygments/lexers/go.py
+ pygments/lexers/grammar_notation.py
+ pygments/lexers/graph.py
+ pygments/lexers/graphics.py
+ pygments/lexers/haskell.py
+ pygments/lexers/haxe.py
+ pygments/lexers/hdl.py
+ pygments/lexers/hexdump.py
+ pygments/lexers/html.py
+ pygments/lexers/idl.py
+ pygments/lexers/igor.py
+ pygments/lexers/inferno.py
+ pygments/lexers/installers.py
+ pygments/lexers/int_fiction.py
+ pygments/lexers/iolang.py
+ pygments/lexers/j.py
+ pygments/lexers/javascript.py
+ pygments/lexers/julia.py
+ pygments/lexers/jvm.py
+ pygments/lexers/lisp.py
+ pygments/lexers/make.py
+ pygments/lexers/markup.py
+ pygments/lexers/math.py
+ pygments/lexers/matlab.py
pygments/lexers/mime.py
- pygments/lexers/ml.py
- pygments/lexers/modeling.py
- pygments/lexers/modula2.py
+ pygments/lexers/ml.py
+ pygments/lexers/modeling.py
+ pygments/lexers/modula2.py
pygments/lexers/monte.py
pygments/lexers/ncl.py
- pygments/lexers/nimrod.py
- pygments/lexers/nit.py
- pygments/lexers/nix.py
- pygments/lexers/oberon.py
- pygments/lexers/objective.py
- pygments/lexers/ooc.py
- pygments/lexers/other.py
- pygments/lexers/parasail.py
- pygments/lexers/parsers.py
- pygments/lexers/pascal.py
- pygments/lexers/pawn.py
- pygments/lexers/perl.py
- pygments/lexers/php.py
+ pygments/lexers/nimrod.py
+ pygments/lexers/nit.py
+ pygments/lexers/nix.py
+ pygments/lexers/oberon.py
+ pygments/lexers/objective.py
+ pygments/lexers/ooc.py
+ pygments/lexers/other.py
+ pygments/lexers/parasail.py
+ pygments/lexers/parsers.py
+ pygments/lexers/pascal.py
+ pygments/lexers/pawn.py
+ pygments/lexers/perl.py
+ pygments/lexers/php.py
pygments/lexers/pony.py
- pygments/lexers/praat.py
- pygments/lexers/prolog.py
- pygments/lexers/python.py
- pygments/lexers/qvt.py
- pygments/lexers/r.py
- pygments/lexers/rdf.py
- pygments/lexers/rebol.py
- pygments/lexers/resource.py
+ pygments/lexers/praat.py
+ pygments/lexers/prolog.py
+ pygments/lexers/python.py
+ pygments/lexers/qvt.py
+ pygments/lexers/r.py
+ pygments/lexers/rdf.py
+ pygments/lexers/rebol.py
+ pygments/lexers/resource.py
pygments/lexers/rnc.py
- pygments/lexers/roboconf.py
- pygments/lexers/robotframework.py
- pygments/lexers/ruby.py
- pygments/lexers/rust.py
+ pygments/lexers/roboconf.py
+ pygments/lexers/robotframework.py
+ pygments/lexers/ruby.py
+ pygments/lexers/rust.py
pygments/lexers/sas.py
pygments/lexers/scdoc.py
- pygments/lexers/scripting.py
+ pygments/lexers/scripting.py
pygments/lexers/sgf.py
- pygments/lexers/shell.py
+ pygments/lexers/shell.py
pygments/lexers/slash.py
- pygments/lexers/smalltalk.py
+ pygments/lexers/smalltalk.py
pygments/lexers/smv.py
- pygments/lexers/snobol.py
+ pygments/lexers/snobol.py
pygments/lexers/solidity.py
- pygments/lexers/special.py
- pygments/lexers/sql.py
+ pygments/lexers/special.py
+ pygments/lexers/sql.py
pygments/lexers/stata.py
- pygments/lexers/supercollider.py
- pygments/lexers/tcl.py
- pygments/lexers/templates.py
+ pygments/lexers/supercollider.py
+ pygments/lexers/tcl.py
+ pygments/lexers/templates.py
pygments/lexers/teraterm.py
- pygments/lexers/testing.py
- pygments/lexers/text.py
- pygments/lexers/textedit.py
- pygments/lexers/textfmts.py
- pygments/lexers/theorem.py
- pygments/lexers/trafficscript.py
+ pygments/lexers/testing.py
+ pygments/lexers/text.py
+ pygments/lexers/textedit.py
+ pygments/lexers/textfmts.py
+ pygments/lexers/theorem.py
+ pygments/lexers/trafficscript.py
pygments/lexers/typoscript.py
pygments/lexers/unicon.py
- pygments/lexers/urbi.py
+ pygments/lexers/urbi.py
pygments/lexers/varnish.py
pygments/lexers/verification.py
- pygments/lexers/web.py
- pygments/lexers/webmisc.py
+ pygments/lexers/web.py
+ pygments/lexers/webmisc.py
pygments/lexers/whiley.py
- pygments/lexers/x10.py
+ pygments/lexers/x10.py
pygments/lexers/xorg.py
pygments/lexers/zig.py
- pygments/modeline.py
- pygments/plugin.py
- pygments/regexopt.py
- pygments/scanner.py
- pygments/sphinxext.py
- pygments/style.py
- pygments/styles/__init__.py
+ pygments/modeline.py
+ pygments/plugin.py
+ pygments/regexopt.py
+ pygments/scanner.py
+ pygments/sphinxext.py
+ pygments/style.py
+ pygments/styles/__init__.py
pygments/styles/abap.py
- pygments/styles/algol.py
- pygments/styles/algol_nu.py
- pygments/styles/arduino.py
- pygments/styles/autumn.py
- pygments/styles/borland.py
- pygments/styles/bw.py
- pygments/styles/colorful.py
- pygments/styles/default.py
- pygments/styles/emacs.py
- pygments/styles/friendly.py
- pygments/styles/fruity.py
- pygments/styles/igor.py
+ pygments/styles/algol.py
+ pygments/styles/algol_nu.py
+ pygments/styles/arduino.py
+ pygments/styles/autumn.py
+ pygments/styles/borland.py
+ pygments/styles/bw.py
+ pygments/styles/colorful.py
+ pygments/styles/default.py
+ pygments/styles/emacs.py
+ pygments/styles/friendly.py
+ pygments/styles/fruity.py
+ pygments/styles/igor.py
pygments/styles/inkpot.py
- pygments/styles/lovelace.py
- pygments/styles/manni.py
- pygments/styles/monokai.py
- pygments/styles/murphy.py
- pygments/styles/native.py
- pygments/styles/paraiso_dark.py
- pygments/styles/paraiso_light.py
- pygments/styles/pastie.py
- pygments/styles/perldoc.py
+ pygments/styles/lovelace.py
+ pygments/styles/manni.py
+ pygments/styles/monokai.py
+ pygments/styles/murphy.py
+ pygments/styles/native.py
+ pygments/styles/paraiso_dark.py
+ pygments/styles/paraiso_light.py
+ pygments/styles/pastie.py
+ pygments/styles/perldoc.py
pygments/styles/rainbow_dash.py
- pygments/styles/rrt.py
+ pygments/styles/rrt.py
pygments/styles/sas.py
pygments/styles/solarized.py
pygments/styles/stata_dark.py
pygments/styles/stata_light.py
- pygments/styles/tango.py
- pygments/styles/trac.py
- pygments/styles/vim.py
- pygments/styles/vs.py
- pygments/styles/xcode.py
- pygments/token.py
- pygments/unistring.py
- pygments/util.py
-)
-
+ pygments/styles/tango.py
+ pygments/styles/trac.py
+ pygments/styles/vim.py
+ pygments/styles/vs.py
+ pygments/styles/xcode.py
+ pygments/token.py
+ pygments/unistring.py
+ pygments/util.py
+)
+
RESOURCE_FILES(
PREFIX contrib/python/Pygments/py2/
.dist-info/METADATA
@@ -246,4 +246,4 @@ RESOURCE_FILES(
.dist-info/top_level.txt
)
-END()
+END()
diff --git a/contrib/python/Pygments/py3/LICENSE b/contrib/python/Pygments/py3/LICENSE
index e1b15663d9..816d50bfb6 100644
--- a/contrib/python/Pygments/py3/LICENSE
+++ b/contrib/python/Pygments/py3/LICENSE
@@ -1,25 +1,25 @@
Copyright (c) 2006-2021 by the respective authors (see AUTHORS file).
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-* Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/contrib/python/Pygments/py3/pygments/__init__.py b/contrib/python/Pygments/py3/pygments/__init__.py
index 22c50b356a..958ecca008 100644
--- a/contrib/python/Pygments/py3/pygments/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/__init__.py
@@ -1,83 +1,83 @@
-"""
- Pygments
- ~~~~~~~~
-
- Pygments is a syntax highlighting package written in Python.
-
- It is a generic syntax highlighter for general use in all kinds of software
- such as forum systems, wikis or other applications that need to prettify
- source code. Highlights are:
-
- * a wide range of common languages and markup formats is supported
- * special attention is paid to details, increasing quality by a fair amount
- * support for new languages and formats are added easily
- * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
- formats that PIL supports, and ANSI sequences
- * it is usable as a command-line tool and as a library
- * ... and it highlights even Brainfuck!
-
+"""
+ Pygments
+ ~~~~~~~~
+
+ Pygments is a syntax highlighting package written in Python.
+
+ It is a generic syntax highlighter for general use in all kinds of software
+ such as forum systems, wikis or other applications that need to prettify
+ source code. Highlights are:
+
+ * a wide range of common languages and markup formats is supported
+ * special attention is paid to details, increasing quality by a fair amount
+ * support for new languages and formats are added easily
+ * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
+ formats that PIL supports, and ANSI sequences
+ * it is usable as a command-line tool and as a library
+ * ... and it highlights even Brainfuck!
+
The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``.
-
+
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
+ :license: BSD, see LICENSE for details.
+"""
from io import StringIO, BytesIO
-
+
__version__ = '2.11.2'
-__docformat__ = 'restructuredtext'
-
-__all__ = ['lex', 'format', 'highlight']
-
-
-def lex(code, lexer):
- """
- Lex ``code`` with ``lexer`` and return an iterable of tokens.
- """
- try:
- return lexer.get_tokens(code)
- except TypeError as err:
+__docformat__ = 'restructuredtext'
+
+__all__ = ['lex', 'format', 'highlight']
+
+
+def lex(code, lexer):
+ """
+ Lex ``code`` with ``lexer`` and return an iterable of tokens.
+ """
+ try:
+ return lexer.get_tokens(code)
+ except TypeError as err:
if (isinstance(err.args[0], str) and
('unbound method get_tokens' in err.args[0] or
'missing 1 required positional argument' in err.args[0])):
- raise TypeError('lex() argument must be a lexer instance, '
- 'not a class')
- raise
-
-
-def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
- """
- Format a tokenlist ``tokens`` with the formatter ``formatter``.
-
- If ``outfile`` is given and a valid file object (an object
- with a ``write`` method), the result will be written to it, otherwise
- it is returned as a string.
- """
- try:
- if not outfile:
- realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
- formatter.format(tokens, realoutfile)
- return realoutfile.getvalue()
- else:
- formatter.format(tokens, outfile)
- except TypeError as err:
+ raise TypeError('lex() argument must be a lexer instance, '
+ 'not a class')
+ raise
+
+
+def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin
+ """
+ Format a tokenlist ``tokens`` with the formatter ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ try:
+ if not outfile:
+ realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO()
+ formatter.format(tokens, realoutfile)
+ return realoutfile.getvalue()
+ else:
+ formatter.format(tokens, outfile)
+ except TypeError as err:
if (isinstance(err.args[0], str) and
('unbound method format' in err.args[0] or
'missing 1 required positional argument' in err.args[0])):
- raise TypeError('format() argument must be a formatter instance, '
- 'not a class')
- raise
-
-
-def highlight(code, lexer, formatter, outfile=None):
- """
- Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
-
- If ``outfile`` is given and a valid file object (an object
- with a ``write`` method), the result will be written to it, otherwise
- it is returned as a string.
- """
- return format(lex(code, lexer), formatter, outfile)
-
+ raise TypeError('format() argument must be a formatter instance, '
+ 'not a class')
+ raise
+
+
+def highlight(code, lexer, formatter, outfile=None):
+ """
+ Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
+
+ If ``outfile`` is given and a valid file object (an object
+ with a ``write`` method), the result will be written to it, otherwise
+ it is returned as a string.
+ """
+ return format(lex(code, lexer), formatter, outfile)
+
diff --git a/contrib/python/Pygments/py3/pygments/cmdline.py b/contrib/python/Pygments/py3/pygments/cmdline.py
index 4f688c7f8e..927b2b0ae9 100644
--- a/contrib/python/Pygments/py3/pygments/cmdline.py
+++ b/contrib/python/Pygments/py3/pygments/cmdline.py
@@ -1,140 +1,140 @@
-"""
- pygments.cmdline
- ~~~~~~~~~~~~~~~~
-
- Command line interface.
-
+"""
+ pygments.cmdline
+ ~~~~~~~~~~~~~~~~
+
+ Command line interface.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import os
-import sys
+import sys
import shutil
import argparse
-from textwrap import dedent
-
-from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline, \
+from textwrap import dedent
+
+from pygments import __version__, highlight
+from pygments.util import ClassNotFound, OptionError, docstring_headline, \
guess_decode, guess_decode_from_terminal, terminal_encoding, \
UnclosingTextIOWrapper
-from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
-from pygments.lexers.special import TextLexer
-from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
-from pygments.formatters import get_all_formatters, get_formatter_by_name, \
+from pygments.lexers.special import TextLexer
+from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
+from pygments.formatters import get_all_formatters, get_formatter_by_name, \
load_formatter_from_file, get_formatter_for_filename, find_formatter_class
-from pygments.formatters.terminal import TerminalFormatter
+from pygments.formatters.terminal import TerminalFormatter
from pygments.formatters.terminal256 import Terminal256Formatter
-from pygments.filters import get_all_filters, find_filter_class
-from pygments.styles import get_all_styles, get_style_by_name
-
-
-def _parse_options(o_strs):
- opts = {}
- if not o_strs:
- return opts
- for o_str in o_strs:
- if not o_str.strip():
- continue
- o_args = o_str.split(',')
- for o_arg in o_args:
- o_arg = o_arg.strip()
- try:
- o_key, o_val = o_arg.split('=', 1)
- o_key = o_key.strip()
- o_val = o_val.strip()
- except ValueError:
- opts[o_arg] = True
- else:
- opts[o_key] = o_val
- return opts
-
-
-def _parse_filters(f_strs):
- filters = []
- if not f_strs:
- return filters
- for f_str in f_strs:
- if ':' in f_str:
- fname, fopts = f_str.split(':', 1)
- filters.append((fname, _parse_options([fopts])))
- else:
- filters.append((f_str, {}))
- return filters
-
-
-def _print_help(what, name):
- try:
- if what == 'lexer':
- cls = get_lexer_by_name(name)
- print("Help on the %s lexer:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'formatter':
- cls = find_formatter_class(name)
- print("Help on the %s formatter:" % cls.name)
- print(dedent(cls.__doc__))
- elif what == 'filter':
- cls = find_filter_class(name)
- print("Help on the %s filter:" % name)
- print(dedent(cls.__doc__))
- return 0
- except (AttributeError, ValueError):
- print("%s not found!" % what, file=sys.stderr)
- return 1
-
-
-def _print_list(what):
- if what == 'lexer':
- print()
- print("Lexers:")
- print("~~~~~~~")
-
- info = []
- for fullname, names, exts, _ in get_all_lexers():
- tup = (', '.join(names)+':', fullname,
- exts and '(filenames ' + ', '.join(exts) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'formatter':
- print()
- print("Formatters:")
- print("~~~~~~~~~~~")
-
- info = []
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
- '(filenames ' + ', '.join(cls.filenames) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* %s\n %s %s') % i)
-
- elif what == 'filter':
- print()
- print("Filters:")
- print("~~~~~~~~")
-
- for name in get_all_filters():
- cls = find_filter_class(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
- elif what == 'style':
- print()
- print("Styles:")
- print("~~~~~~~")
-
- for name in get_all_styles():
- cls = get_style_by_name(name)
- print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
-
-
+from pygments.filters import get_all_filters, find_filter_class
+from pygments.styles import get_all_styles, get_style_by_name
+
+
+def _parse_options(o_strs):
+ opts = {}
+ if not o_strs:
+ return opts
+ for o_str in o_strs:
+ if not o_str.strip():
+ continue
+ o_args = o_str.split(',')
+ for o_arg in o_args:
+ o_arg = o_arg.strip()
+ try:
+ o_key, o_val = o_arg.split('=', 1)
+ o_key = o_key.strip()
+ o_val = o_val.strip()
+ except ValueError:
+ opts[o_arg] = True
+ else:
+ opts[o_key] = o_val
+ return opts
+
+
+def _parse_filters(f_strs):
+ filters = []
+ if not f_strs:
+ return filters
+ for f_str in f_strs:
+ if ':' in f_str:
+ fname, fopts = f_str.split(':', 1)
+ filters.append((fname, _parse_options([fopts])))
+ else:
+ filters.append((f_str, {}))
+ return filters
+
+
+def _print_help(what, name):
+ try:
+ if what == 'lexer':
+ cls = get_lexer_by_name(name)
+ print("Help on the %s lexer:" % cls.name)
+ print(dedent(cls.__doc__))
+ elif what == 'formatter':
+ cls = find_formatter_class(name)
+ print("Help on the %s formatter:" % cls.name)
+ print(dedent(cls.__doc__))
+ elif what == 'filter':
+ cls = find_filter_class(name)
+ print("Help on the %s filter:" % name)
+ print(dedent(cls.__doc__))
+ return 0
+ except (AttributeError, ValueError):
+ print("%s not found!" % what, file=sys.stderr)
+ return 1
+
+
+def _print_list(what):
+ if what == 'lexer':
+ print()
+ print("Lexers:")
+ print("~~~~~~~")
+
+ info = []
+ for fullname, names, exts, _ in get_all_lexers():
+ tup = (', '.join(names)+':', fullname,
+ exts and '(filenames ' + ', '.join(exts) + ')' or '')
+ info.append(tup)
+ info.sort()
+ for i in info:
+ print(('* %s\n %s %s') % i)
+
+ elif what == 'formatter':
+ print()
+ print("Formatters:")
+ print("~~~~~~~~~~~")
+
+ info = []
+ for cls in get_all_formatters():
+ doc = docstring_headline(cls)
+ tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
+ '(filenames ' + ', '.join(cls.filenames) + ')' or '')
+ info.append(tup)
+ info.sort()
+ for i in info:
+ print(('* %s\n %s %s') % i)
+
+ elif what == 'filter':
+ print()
+ print("Filters:")
+ print("~~~~~~~~")
+
+ for name in get_all_filters():
+ cls = find_filter_class(name)
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
+
+ elif what == 'style':
+ print()
+ print("Styles:")
+ print("~~~~~~~")
+
+ for name in get_all_styles():
+ cls = get_style_by_name(name)
+ print("* " + name + ':')
+ print(" %s" % docstring_headline(cls))
+
+
def _print_list_as_json(requested_items):
import json
result = {}
@@ -182,17 +182,17 @@ def _print_list_as_json(requested_items):
def main_inner(parser, argns):
if argns.help:
parser.print_help()
- return 0
-
+ return 0
+
if argns.V:
print('Pygments version %s, (c) 2006-2021 by Georg Brandl, Matthäus '
'Chajdas and contributors.' % __version__)
- return 0
-
+ return 0
+
def is_only_option(opt):
return not any(v for (k, v) in vars(argns).items() if k != opt)
- # handle ``pygmentize -L``
+ # handle ``pygmentize -L``
if argns.L is not None:
arg_set = set()
for k, v in vars(argns).items():
@@ -204,9 +204,9 @@ def main_inner(parser, argns):
if arg_set:
parser.print_help(sys.stderr)
- return 2
+ return 2
- # print version
+ # print version
if not argns.json:
main(['', '-V'])
allowed_types = {'lexer', 'formatter', 'filter', 'style'}
@@ -221,44 +221,44 @@ def main_inner(parser, argns):
_print_list(arg)
else:
_print_list_as_json(largs)
- return 0
-
- # handle ``pygmentize -H``
+ return 0
+
+ # handle ``pygmentize -H``
if argns.H:
if not is_only_option('H'):
parser.print_help(sys.stderr)
- return 2
+ return 2
what, name = argns.H
- if what not in ('lexer', 'formatter', 'filter'):
+ if what not in ('lexer', 'formatter', 'filter'):
parser.print_help(sys.stderr)
- return 2
- return _print_help(what, name)
-
- # parse -O options
+ return 2
+ return _print_help(what, name)
+
+ # parse -O options
parsed_opts = _parse_options(argns.O or [])
-
- # parse -P options
+
+ # parse -P options
for p_opt in argns.P or []:
- try:
- name, value = p_opt.split('=', 1)
- except ValueError:
- parsed_opts[p_opt] = True
- else:
- parsed_opts[name] = value
-
- # encodings
- inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
- outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
-
- # handle ``pygmentize -N``
+ try:
+ name, value = p_opt.split('=', 1)
+ except ValueError:
+ parsed_opts[p_opt] = True
+ else:
+ parsed_opts[name] = value
+
+ # encodings
+ inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+ outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
+ # handle ``pygmentize -N``
if argns.N:
lexer = find_lexer_class_for_filename(argns.N)
- if lexer is None:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
+ if lexer is None:
+ lexer = TextLexer
+
+ print(lexer.aliases[0])
+ return 0
+
# handle ``pygmentize -C``
if argns.C:
inp = sys.stdin.buffer.read()
@@ -270,45 +270,45 @@ def main_inner(parser, argns):
print(lexer.aliases[0])
return 0
- # handle ``pygmentize -S``
+ # handle ``pygmentize -S``
S_opt = argns.S
a_opt = argns.a
- if S_opt is not None:
+ if S_opt is not None:
f_opt = argns.f
- if not f_opt:
+ if not f_opt:
parser.print_help(sys.stderr)
- return 2
+ return 2
if argns.l or argns.INPUTFILE:
parser.print_help(sys.stderr)
- return 2
-
- try:
- parsed_opts['style'] = S_opt
- fmter = get_formatter_by_name(f_opt, **parsed_opts)
- except ClassNotFound as err:
- print(err, file=sys.stderr)
- return 1
-
- print(fmter.get_style_defs(a_opt or ''))
- return 0
-
- # if no -S is given, -a is not allowed
+ return 2
+
+ try:
+ parsed_opts['style'] = S_opt
+ fmter = get_formatter_by_name(f_opt, **parsed_opts)
+ except ClassNotFound as err:
+ print(err, file=sys.stderr)
+ return 1
+
+ print(fmter.get_style_defs(a_opt or ''))
+ return 0
+
+ # if no -S is given, -a is not allowed
if argns.a is not None:
parser.print_help(sys.stderr)
- return 2
-
- # parse -F options
+ return 2
+
+ # parse -F options
F_opts = _parse_filters(argns.F or [])
-
+
# -x: allow custom (eXternal) lexers and formatters
allow_custom_lexer_formatter = bool(argns.x)
- # select lexer
- lexer = None
-
- # given by name?
+ # select lexer
+ lexer = None
+
+ # given by name?
lexername = argns.l
- if lexername:
+ if lexername:
# custom lexer, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in lexername:
try:
@@ -336,74 +336,74 @@ def main_inner(parser, argns):
except (OptionError, ClassNotFound) as err:
print('Error:', err, file=sys.stderr)
return 1
-
- # read input code
- code = None
-
+
+ # read input code
+ code = None
+
if argns.INPUTFILE:
if argns.s:
- print('Error: -s option not usable when input file specified',
- file=sys.stderr)
- return 2
-
+ print('Error: -s option not usable when input file specified',
+ file=sys.stderr)
+ return 2
+
infn = argns.INPUTFILE
- try:
- with open(infn, 'rb') as infp:
- code = infp.read()
- except Exception as err:
- print('Error: cannot read infile:', err, file=sys.stderr)
- return 1
- if not inencoding:
- code, inencoding = guess_decode(code)
-
- # do we have to guess the lexer?
- if not lexer:
- try:
- lexer = get_lexer_for_filename(infn, code, **parsed_opts)
- except ClassNotFound as err:
+ try:
+ with open(infn, 'rb') as infp:
+ code = infp.read()
+ except Exception as err:
+ print('Error: cannot read infile:', err, file=sys.stderr)
+ return 1
+ if not inencoding:
+ code, inencoding = guess_decode(code)
+
+ # do we have to guess the lexer?
+ if not lexer:
+ try:
+ lexer = get_lexer_for_filename(infn, code, **parsed_opts)
+ except ClassNotFound as err:
if argns.g:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
- else:
- print('Error:', err, file=sys.stderr)
- return 1
- except OptionError as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
+ try:
+ lexer = guess_lexer(code, **parsed_opts)
+ except ClassNotFound:
+ lexer = TextLexer(**parsed_opts)
+ else:
+ print('Error:', err, file=sys.stderr)
+ return 1
+ except OptionError as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
elif not argns.s: # treat stdin as full file (-s support is later)
- # read code from terminal, always in binary mode since we want to
- # decode ourselves and be tolerant with it
+ # read code from terminal, always in binary mode since we want to
+ # decode ourselves and be tolerant with it
code = sys.stdin.buffer.read() # use .buffer to get a binary stream
- if not inencoding:
- code, inencoding = guess_decode_from_terminal(code, sys.stdin)
- # else the lexer will do the decoding
- if not lexer:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
-
- else: # -s option needs a lexer with -l
- if not lexer:
- print('Error: when using -s a lexer has to be selected with -l',
- file=sys.stderr)
- return 2
-
- # process filters
- for fname, fopts in F_opts:
- try:
- lexer.add_filter(fname, **fopts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # select formatter
+ if not inencoding:
+ code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+ # else the lexer will do the decoding
+ if not lexer:
+ try:
+ lexer = guess_lexer(code, **parsed_opts)
+ except ClassNotFound:
+ lexer = TextLexer(**parsed_opts)
+
+ else: # -s option needs a lexer with -l
+ if not lexer:
+ print('Error: when using -s a lexer has to be selected with -l',
+ file=sys.stderr)
+ return 2
+
+ # process filters
+ for fname, fopts in F_opts:
+ try:
+ lexer.add_filter(fname, **fopts)
+ except ClassNotFound as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+
+ # select formatter
outfn = argns.o
fmter = argns.f
- if fmter:
+ if fmter:
# custom formatter, located relative to user's cwd
if allow_custom_lexer_formatter and '.py' in fmter:
try:
@@ -430,88 +430,88 @@ def main_inner(parser, argns):
except (OptionError, ClassNotFound) as err:
print('Error:', err, file=sys.stderr)
return 1
-
- if outfn:
- if not fmter:
- try:
- fmter = get_formatter_for_filename(outfn, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
- try:
- outfile = open(outfn, 'wb')
- except Exception as err:
- print('Error: cannot open outfile:', err, file=sys.stderr)
- return 1
- else:
- if not fmter:
+
+ if outfn:
+ if not fmter:
+ try:
+ fmter = get_formatter_for_filename(outfn, **parsed_opts)
+ except (OptionError, ClassNotFound) as err:
+ print('Error:', err, file=sys.stderr)
+ return 1
+ try:
+ outfile = open(outfn, 'wb')
+ except Exception as err:
+ print('Error: cannot open outfile:', err, file=sys.stderr)
+ return 1
+ else:
+ if not fmter:
if '256' in os.environ.get('TERM', ''):
fmter = Terminal256Formatter(**parsed_opts)
else:
fmter = TerminalFormatter(**parsed_opts)
outfile = sys.stdout.buffer
-
- # determine output encoding if not explicitly selected
- if not outencoding:
- if outfn:
- # output file? use lexer encoding for now (can still be None)
- fmter.encoding = inencoding
- else:
- # else use terminal encoding
- fmter.encoding = terminal_encoding(sys.stdout)
-
- # provide coloring under Windows, if possible
- if not outfn and sys.platform in ('win32', 'cygwin') and \
- fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
- # unfortunately colorama doesn't support binary streams on Py3
+
+ # determine output encoding if not explicitly selected
+ if not outencoding:
+ if outfn:
+ # output file? use lexer encoding for now (can still be None)
+ fmter.encoding = inencoding
+ else:
+ # else use terminal encoding
+ fmter.encoding = terminal_encoding(sys.stdout)
+
+ # provide coloring under Windows, if possible
+ if not outfn and sys.platform in ('win32', 'cygwin') and \
+ fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
+ # unfortunately colorama doesn't support binary streams on Py3
outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
fmter.encoding = None
- try:
- import colorama.initialise
- except ImportError:
- pass
- else:
- outfile = colorama.initialise.wrap_stream(
- outfile, convert=None, strip=None, autoreset=False, wrap=True)
-
- # When using the LaTeX formatter and the option `escapeinside` is
- # specified, we need a special lexer which collects escaped text
- # before running the chosen language lexer.
- escapeinside = parsed_opts.get('escapeinside', '')
- if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
- left = escapeinside[0]
- right = escapeinside[1]
- lexer = LatexEmbeddedLexer(left, right, lexer)
-
- # ... and do it!
+ try:
+ import colorama.initialise
+ except ImportError:
+ pass
+ else:
+ outfile = colorama.initialise.wrap_stream(
+ outfile, convert=None, strip=None, autoreset=False, wrap=True)
+
+ # When using the LaTeX formatter and the option `escapeinside` is
+ # specified, we need a special lexer which collects escaped text
+ # before running the chosen language lexer.
+ escapeinside = parsed_opts.get('escapeinside', '')
+ if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
+ left = escapeinside[0]
+ right = escapeinside[1]
+ lexer = LatexEmbeddedLexer(left, right, lexer)
+
+ # ... and do it!
if not argns.s:
- # process whole input as per normal...
+ # process whole input as per normal...
try:
highlight(code, lexer, fmter, outfile)
finally:
if outfn:
outfile.close()
- return 0
- else:
- # line by line processing of stdin (eg: for 'tail -f')...
- try:
- while 1:
+ return 0
+ else:
+ # line by line processing of stdin (eg: for 'tail -f')...
+ try:
+ while 1:
line = sys.stdin.buffer.readline()
- if not line:
- break
- if not inencoding:
- line = guess_decode_from_terminal(line, sys.stdin)[0]
- highlight(line, lexer, fmter, outfile)
- if hasattr(outfile, 'flush'):
- outfile.flush()
- return 0
- except KeyboardInterrupt: # pragma: no cover
- return 0
+ if not line:
+ break
+ if not inencoding:
+ line = guess_decode_from_terminal(line, sys.stdin)[0]
+ highlight(line, lexer, fmter, outfile)
+ if hasattr(outfile, 'flush'):
+ outfile.flush()
+ return 0
+ except KeyboardInterrupt: # pragma: no cover
+ return 0
finally:
if outfn:
outfile.close()
-
-
+
+
class HelpFormatter(argparse.HelpFormatter):
def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
if width is None:
@@ -523,14 +523,14 @@ class HelpFormatter(argparse.HelpFormatter):
max_help_position, width)
-def main(args=sys.argv):
- """
- Main command line entry point.
- """
+def main(args=sys.argv):
+ """
+ Main command line entry point.
+ """
desc = "Highlight an input file and write the result to an output file."
parser = argparse.ArgumentParser(description=desc, add_help=False,
formatter_class=HelpFormatter)
-
+
operation = parser.add_argument_group('Main operation')
lexersel = operation.add_mutually_exclusive_group()
lexersel.add_argument(
@@ -564,7 +564,7 @@ def main(args=sys.argv):
operation.add_argument(
'-o', metavar='OUTPUTFILE',
help='Where to write the output. Defaults to standard output.')
-
+
operation.add_argument(
'INPUTFILE', nargs='?',
help='Where to read the input. Defaults to standard input.')
@@ -634,30 +634,30 @@ def main(args=sys.argv):
argns = parser.parse_args(args[1:])
- try:
+ try:
return main_inner(parser, argns)
- except Exception:
+ except Exception:
if argns.v:
- print(file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print('An unhandled exception occurred while highlighting.',
- file=sys.stderr)
- print('Please report the whole traceback to the issue tracker at',
- file=sys.stderr)
+ print(file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print('An unhandled exception occurred while highlighting.',
+ file=sys.stderr)
+ print('Please report the whole traceback to the issue tracker at',
+ file=sys.stderr)
print('<https://github.com/pygments/pygments/issues>.',
- file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print(file=sys.stderr)
- raise
- import traceback
- info = traceback.format_exception(*sys.exc_info())
- msg = info[-1].strip()
- if len(info) >= 3:
- # extract relevant file and position info
- msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
- print(file=sys.stderr)
- print('*** Error while highlighting:', file=sys.stderr)
- print(msg, file=sys.stderr)
- print('*** If this is a bug you want to report, please rerun with -v.',
- file=sys.stderr)
- return 1
+ file=sys.stderr)
+ print('*' * 65, file=sys.stderr)
+ print(file=sys.stderr)
+ raise
+ import traceback
+ info = traceback.format_exception(*sys.exc_info())
+ msg = info[-1].strip()
+ if len(info) >= 3:
+ # extract relevant file and position info
+ msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
+ print(file=sys.stderr)
+ print('*** Error while highlighting:', file=sys.stderr)
+ print(msg, file=sys.stderr)
+ print('*** If this is a bug you want to report, please rerun with -v.',
+ file=sys.stderr)
+ return 1
diff --git a/contrib/python/Pygments/py3/pygments/console.py b/contrib/python/Pygments/py3/pygments/console.py
index 8dd08abebc..728ca1f6d7 100644
--- a/contrib/python/Pygments/py3/pygments/console.py
+++ b/contrib/python/Pygments/py3/pygments/console.py
@@ -1,70 +1,70 @@
-"""
- pygments.console
- ~~~~~~~~~~~~~~~~
-
- Format colored console output.
-
+"""
+ pygments.console
+ ~~~~~~~~~~~~~~~~
+
+ Format colored console output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-esc = "\x1b["
-
-codes = {}
+ :license: BSD, see LICENSE for details.
+"""
+
+esc = "\x1b["
+
+codes = {}
codes[""] = ""
codes["reset"] = esc + "39;49;00m"
-
+
codes["bold"] = esc + "01m"
codes["faint"] = esc + "02m"
codes["standout"] = esc + "03m"
-codes["underline"] = esc + "04m"
+codes["underline"] = esc + "04m"
codes["blink"] = esc + "05m"
codes["overline"] = esc + "06m"
-
+
dark_colors = ["black", "red", "green", "yellow", "blue",
"magenta", "cyan", "gray"]
light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue",
"brightmagenta", "brightcyan", "white"]
-
-x = 30
-for d, l in zip(dark_colors, light_colors):
- codes[d] = esc + "%im" % x
+
+x = 30
+for d, l in zip(dark_colors, light_colors):
+ codes[d] = esc + "%im" % x
codes[l] = esc + "%im" % (60 + x)
- x += 1
-
-del d, l, x
-
+ x += 1
+
+del d, l, x
+
codes["white"] = codes["bold"]
-
-
-def reset_color():
- return codes["reset"]
-
-
-def colorize(color_key, text):
- return codes[color_key] + text + codes["reset"]
-
-
-def ansiformat(attr, text):
- """
- Format ``text`` with a color and/or some attributes::
-
- color normal color
- *color* bold color
- _color_ underlined color
- +color+ blinking color
- """
- result = []
- if attr[:1] == attr[-1:] == '+':
- result.append(codes['blink'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '*':
- result.append(codes['bold'])
- attr = attr[1:-1]
- if attr[:1] == attr[-1:] == '_':
- result.append(codes['underline'])
- attr = attr[1:-1]
- result.append(codes[attr])
- result.append(text)
- result.append(codes['reset'])
- return ''.join(result)
+
+
+def reset_color():
+ return codes["reset"]
+
+
+def colorize(color_key, text):
+ return codes[color_key] + text + codes["reset"]
+
+
+def ansiformat(attr, text):
+ """
+ Format ``text`` with a color and/or some attributes::
+
+ color normal color
+ *color* bold color
+ _color_ underlined color
+ +color+ blinking color
+ """
+ result = []
+ if attr[:1] == attr[-1:] == '+':
+ result.append(codes['blink'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '*':
+ result.append(codes['bold'])
+ attr = attr[1:-1]
+ if attr[:1] == attr[-1:] == '_':
+ result.append(codes['underline'])
+ attr = attr[1:-1]
+ result.append(codes[attr])
+ result.append(text)
+ result.append(codes['reset'])
+ return ''.join(result)
diff --git a/contrib/python/Pygments/py3/pygments/filter.py b/contrib/python/Pygments/py3/pygments/filter.py
index 85b4829878..255b0f9aa9 100644
--- a/contrib/python/Pygments/py3/pygments/filter.py
+++ b/contrib/python/Pygments/py3/pygments/filter.py
@@ -1,71 +1,71 @@
-"""
- pygments.filter
- ~~~~~~~~~~~~~~~
-
- Module that implements the default filter.
-
+"""
+ pygments.filter
+ ~~~~~~~~~~~~~~~
+
+ Module that implements the default filter.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-def apply_filters(stream, filters, lexer=None):
- """
- Use this method to apply an iterable of filters to
- a stream. If lexer is given it's forwarded to the
- filter, otherwise the filter receives `None`.
- """
- def _apply(filter_, stream):
+ :license: BSD, see LICENSE for details.
+"""
+
+
+def apply_filters(stream, filters, lexer=None):
+ """
+ Use this method to apply an iterable of filters to
+ a stream. If lexer is given it's forwarded to the
+ filter, otherwise the filter receives `None`.
+ """
+ def _apply(filter_, stream):
yield from filter_.filter(lexer, stream)
- for filter_ in filters:
- stream = _apply(filter_, stream)
- return stream
-
-
-def simplefilter(f):
- """
- Decorator that converts a function into a filter::
-
- @simplefilter
- def lowercase(self, lexer, stream, options):
- for ttype, value in stream:
- yield ttype, value.lower()
- """
- return type(f.__name__, (FunctionFilter,), {
+ for filter_ in filters:
+ stream = _apply(filter_, stream)
+ return stream
+
+
+def simplefilter(f):
+ """
+ Decorator that converts a function into a filter::
+
+ @simplefilter
+ def lowercase(self, lexer, stream, options):
+ for ttype, value in stream:
+ yield ttype, value.lower()
+ """
+ return type(f.__name__, (FunctionFilter,), {
'__module__': getattr(f, '__module__'),
'__doc__': f.__doc__,
'function': f,
})
-
-
+
+
class Filter:
- """
- Default filter. Subclass this class or use the `simplefilter`
- decorator to create own filters.
- """
-
- def __init__(self, **options):
- self.options = options
-
- def filter(self, lexer, stream):
- raise NotImplementedError()
-
-
-class FunctionFilter(Filter):
- """
- Abstract class used by `simplefilter` to create simple
- function filters on the fly. The `simplefilter` decorator
- automatically creates subclasses of this class for
- functions passed to it.
- """
- function = None
-
- def __init__(self, **options):
- if not hasattr(self, 'function'):
- raise TypeError('%r used without bound function' %
- self.__class__.__name__)
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- # pylint: disable=not-callable
+ """
+ Default filter. Subclass this class or use the `simplefilter`
+ decorator to create own filters.
+ """
+
+ def __init__(self, **options):
+ self.options = options
+
+ def filter(self, lexer, stream):
+ raise NotImplementedError()
+
+
+class FunctionFilter(Filter):
+ """
+ Abstract class used by `simplefilter` to create simple
+ function filters on the fly. The `simplefilter` decorator
+ automatically creates subclasses of this class for
+ functions passed to it.
+ """
+ function = None
+
+ def __init__(self, **options):
+ if not hasattr(self, 'function'):
+ raise TypeError('%r used without bound function' %
+ self.__class__.__name__)
+ Filter.__init__(self, **options)
+
+ def filter(self, lexer, stream):
+ # pylint: disable=not-callable
yield from self.function(lexer, stream, self.options)
diff --git a/contrib/python/Pygments/py3/pygments/filters/__init__.py b/contrib/python/Pygments/py3/pygments/filters/__init__.py
index 930ff64e05..11bcb5f1a5 100644
--- a/contrib/python/Pygments/py3/pygments/filters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/filters/__init__.py
@@ -1,96 +1,96 @@
-"""
- pygments.filters
- ~~~~~~~~~~~~~~~~
-
- Module containing filter lookup functions and default
- filters.
-
+"""
+ pygments.filters
+ ~~~~~~~~~~~~~~~~
+
+ Module containing filter lookup functions and default
+ filters.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
- string_to_tokentype
-from pygments.filter import Filter
-from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \
+ string_to_tokentype
+from pygments.filter import Filter
+from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \
get_choice_opt, ClassNotFound, OptionError
-from pygments.plugin import find_plugin_filters
-
-
-def find_filter_class(filtername):
- """Lookup a filter by name. Return None if not found."""
- if filtername in FILTERS:
- return FILTERS[filtername]
- for name, cls in find_plugin_filters():
- if name == filtername:
- return cls
- return None
-
-
-def get_filter_by_name(filtername, **options):
- """Return an instantiated filter.
-
- Options are passed to the filter initializer if wanted.
- Raise a ClassNotFound if not found.
- """
- cls = find_filter_class(filtername)
- if cls:
- return cls(**options)
- else:
- raise ClassNotFound('filter %r not found' % filtername)
-
-
-def get_all_filters():
- """Return a generator of all filter names."""
+from pygments.plugin import find_plugin_filters
+
+
+def find_filter_class(filtername):
+ """Lookup a filter by name. Return None if not found."""
+ if filtername in FILTERS:
+ return FILTERS[filtername]
+ for name, cls in find_plugin_filters():
+ if name == filtername:
+ return cls
+ return None
+
+
+def get_filter_by_name(filtername, **options):
+ """Return an instantiated filter.
+
+ Options are passed to the filter initializer if wanted.
+ Raise a ClassNotFound if not found.
+ """
+ cls = find_filter_class(filtername)
+ if cls:
+ return cls(**options)
+ else:
+ raise ClassNotFound('filter %r not found' % filtername)
+
+
+def get_all_filters():
+ """Return a generator of all filter names."""
yield from FILTERS
- for name, _ in find_plugin_filters():
- yield name
-
-
-def _replace_special(ttype, value, regex, specialttype,
- replacefunc=lambda x: x):
- last = 0
- for match in regex.finditer(value):
- start, end = match.start(), match.end()
- if start != last:
- yield ttype, value[last:start]
- yield specialttype, replacefunc(value[start:end])
- last = end
- if last != len(value):
- yield ttype, value[last:]
-
-
-class CodeTagFilter(Filter):
- """Highlight special code tags in comments and docstrings.
-
- Options accepted:
-
- `codetags` : list of strings
- A list of strings that are flagged as code tags. The default is to
- highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- tags = get_list_opt(options, 'codetags',
- ['XXX', 'TODO', 'BUG', 'NOTE'])
- self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
- re.escape(tag) for tag in tags if tag
- ]))
-
- def filter(self, lexer, stream):
- regex = self.tag_re
- for ttype, value in stream:
- if ttype in String.Doc or \
- ttype in Comment and \
- ttype not in Comment.Preproc:
+ for name, _ in find_plugin_filters():
+ yield name
+
+
+def _replace_special(ttype, value, regex, specialttype,
+ replacefunc=lambda x: x):
+ last = 0
+ for match in regex.finditer(value):
+ start, end = match.start(), match.end()
+ if start != last:
+ yield ttype, value[last:start]
+ yield specialttype, replacefunc(value[start:end])
+ last = end
+ if last != len(value):
+ yield ttype, value[last:]
+
+
+class CodeTagFilter(Filter):
+ """Highlight special code tags in comments and docstrings.
+
+ Options accepted:
+
+ `codetags` : list of strings
+ A list of strings that are flagged as code tags. The default is to
+ highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ tags = get_list_opt(options, 'codetags',
+ ['XXX', 'TODO', 'BUG', 'NOTE'])
+ self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+ re.escape(tag) for tag in tags if tag
+ ]))
+
+ def filter(self, lexer, stream):
+ regex = self.tag_re
+ for ttype, value in stream:
+ if ttype in String.Doc or \
+ ttype in Comment and \
+ ttype not in Comment.Preproc:
yield from _replace_special(ttype, value, regex, Comment.Special)
- else:
- yield ttype, value
-
-
+ else:
+ yield ttype, value
+
+
class SymbolFilter(Filter):
"""Convert mathematical symbols such as \\<longrightarrow> in Isabelle
or \\longrightarrow in LaTeX into Unicode characters.
@@ -681,257 +681,257 @@ class SymbolFilter(Filter):
yield ttype, value
-class KeywordCaseFilter(Filter):
- """Convert keywords to lowercase or uppercase or capitalize them, which
- means first letter uppercase, rest lowercase.
-
- This can be useful e.g. if you highlight Pascal code and want to adapt the
- code to your styleguide.
-
- Options accepted:
-
- `case` : string
- The casing to convert keywords to. Must be one of ``'lower'``,
- ``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- case = get_choice_opt(options, 'case',
- ['lower', 'upper', 'capitalize'], 'lower')
+class KeywordCaseFilter(Filter):
+ """Convert keywords to lowercase or uppercase or capitalize them, which
+ means first letter uppercase, rest lowercase.
+
+ This can be useful e.g. if you highlight Pascal code and want to adapt the
+ code to your styleguide.
+
+ Options accepted:
+
+ `case` : string
+ The casing to convert keywords to. Must be one of ``'lower'``,
+ ``'upper'`` or ``'capitalize'``. The default is ``'lower'``.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ case = get_choice_opt(options, 'case',
+ ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(str, case)
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Keyword:
- yield ttype, self.convert(value)
- else:
- yield ttype, value
-
-
-class NameHighlightFilter(Filter):
- """Highlight a normal Name (and Name.*) token with a different token type.
-
- Example::
-
- filter = NameHighlightFilter(
- names=['foo', 'bar', 'baz'],
- tokentype=Name.Function,
- )
-
- This would highlight the names "foo", "bar" and "baz"
- as functions. `Name.Function` is the default token type.
-
- Options accepted:
-
- `names` : list of strings
- A list of names that should be given the different token type.
- There is no default.
- `tokentype` : TokenType or string
- A token type or a string containing a token type name that is
- used for highlighting the strings in `names`. The default is
- `Name.Function`.
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.names = set(get_list_opt(options, 'names', []))
- tokentype = options.get('tokentype')
- if tokentype:
- self.tokentype = string_to_tokentype(tokentype)
- else:
- self.tokentype = Name.Function
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype in Name and value in self.names:
- yield self.tokentype, value
- else:
- yield ttype, value
-
-
-class ErrorToken(Exception):
- pass
-
-
-class RaiseOnErrorTokenFilter(Filter):
- """Raise an exception when the lexer generates an error token.
-
- Options accepted:
-
- `excclass` : Exception class
- The exception class to raise.
- The default is `pygments.filters.ErrorToken`.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.exception = options.get('excclass', ErrorToken)
- try:
- # issubclass() will raise TypeError if first argument is not a class
- if not issubclass(self.exception, Exception):
- raise TypeError
- except TypeError:
- raise OptionError('excclass option is not an exception class')
-
- def filter(self, lexer, stream):
- for ttype, value in stream:
- if ttype is Error:
- raise self.exception(value)
- yield ttype, value
-
-
-class VisibleWhitespaceFilter(Filter):
- """Convert tabs, newlines and/or spaces to visible characters.
-
- Options accepted:
-
- `spaces` : string or bool
- If this is a one-character string, spaces will be replaces by this string.
- If it is another true value, spaces will be replaced by ``·`` (unicode
- MIDDLE DOT). If it is a false value, spaces will not be replaced. The
- default is ``False``.
- `tabs` : string or bool
- The same as for `spaces`, but the default replacement character is ``»``
- (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
- is ``False``. Note: this will not work if the `tabsize` option for the
- lexer is nonzero, as tabs will already have been expanded then.
- `tabsize` : int
- If tabs are to be replaced by this filter (see the `tabs` option), this
- is the total number of characters that a tab should be expanded to.
- The default is ``8``.
- `newlines` : string or bool
- The same as for `spaces`, but the default replacement character is ``¶``
- (unicode PILCROW SIGN). The default value is ``False``.
- `wstokentype` : bool
- If true, give whitespace the special `Whitespace` token type. This allows
- styling the visible whitespace differently (e.g. greyed out), but it can
- disrupt background colors. The default is ``True``.
-
- .. versionadded:: 0.8
- """
-
- def __init__(self, **options):
- Filter.__init__(self, **options)
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype in Keyword:
+ yield ttype, self.convert(value)
+ else:
+ yield ttype, value
+
+
+class NameHighlightFilter(Filter):
+ """Highlight a normal Name (and Name.*) token with a different token type.
+
+ Example::
+
+ filter = NameHighlightFilter(
+ names=['foo', 'bar', 'baz'],
+ tokentype=Name.Function,
+ )
+
+ This would highlight the names "foo", "bar" and "baz"
+ as functions. `Name.Function` is the default token type.
+
+ Options accepted:
+
+ `names` : list of strings
+ A list of names that should be given the different token type.
+ There is no default.
+ `tokentype` : TokenType or string
+ A token type or a string containing a token type name that is
+ used for highlighting the strings in `names`. The default is
+ `Name.Function`.
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.names = set(get_list_opt(options, 'names', []))
+ tokentype = options.get('tokentype')
+ if tokentype:
+ self.tokentype = string_to_tokentype(tokentype)
+ else:
+ self.tokentype = Name.Function
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype in Name and value in self.names:
+ yield self.tokentype, value
+ else:
+ yield ttype, value
+
+
+class ErrorToken(Exception):
+ pass
+
+
+class RaiseOnErrorTokenFilter(Filter):
+ """Raise an exception when the lexer generates an error token.
+
+ Options accepted:
+
+ `excclass` : Exception class
+ The exception class to raise.
+ The default is `pygments.filters.ErrorToken`.
+
+ .. versionadded:: 0.8
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.exception = options.get('excclass', ErrorToken)
+ try:
+ # issubclass() will raise TypeError if first argument is not a class
+ if not issubclass(self.exception, Exception):
+ raise TypeError
+ except TypeError:
+ raise OptionError('excclass option is not an exception class')
+
+ def filter(self, lexer, stream):
+ for ttype, value in stream:
+ if ttype is Error:
+ raise self.exception(value)
+ yield ttype, value
+
+
+class VisibleWhitespaceFilter(Filter):
+ """Convert tabs, newlines and/or spaces to visible characters.
+
+ Options accepted:
+
+ `spaces` : string or bool
+ If this is a one-character string, spaces will be replaces by this string.
+ If it is another true value, spaces will be replaced by ``·`` (unicode
+ MIDDLE DOT). If it is a false value, spaces will not be replaced. The
+ default is ``False``.
+ `tabs` : string or bool
+ The same as for `spaces`, but the default replacement character is ``»``
+ (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value
+ is ``False``. Note: this will not work if the `tabsize` option for the
+ lexer is nonzero, as tabs will already have been expanded then.
+ `tabsize` : int
+ If tabs are to be replaced by this filter (see the `tabs` option), this
+ is the total number of characters that a tab should be expanded to.
+ The default is ``8``.
+ `newlines` : string or bool
+ The same as for `spaces`, but the default replacement character is ``¶``
+ (unicode PILCROW SIGN). The default value is ``False``.
+ `wstokentype` : bool
+ If true, give whitespace the special `Whitespace` token type. This allows
+ styling the visible whitespace differently (e.g. greyed out), but it can
+ disrupt background colors. The default is ``True``.
+
+ .. versionadded:: 0.8
+ """
+
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
for name, default in [('spaces', '·'),
('tabs', '»'),
('newlines', '¶')]:
- opt = options.get(name, False)
+ opt = options.get(name, False)
if isinstance(opt, str) and len(opt) == 1:
- setattr(self, name, opt)
- else:
- setattr(self, name, (opt and default or ''))
- tabsize = get_int_opt(options, 'tabsize', 8)
- if self.tabs:
- self.tabs += ' ' * (tabsize - 1)
- if self.newlines:
- self.newlines += '\n'
- self.wstt = get_bool_opt(options, 'wstokentype', True)
-
- def filter(self, lexer, stream):
- if self.wstt:
+ setattr(self, name, opt)
+ else:
+ setattr(self, name, (opt and default or ''))
+ tabsize = get_int_opt(options, 'tabsize', 8)
+ if self.tabs:
+ self.tabs += ' ' * (tabsize - 1)
+ if self.newlines:
+ self.newlines += '\n'
+ self.wstt = get_bool_opt(options, 'wstokentype', True)
+
+ def filter(self, lexer, stream):
+ if self.wstt:
spaces = self.spaces or ' '
tabs = self.tabs or '\t'
newlines = self.newlines or '\n'
- regex = re.compile(r'\s')
-
- def replacefunc(wschar):
- if wschar == ' ':
- return spaces
- elif wschar == '\t':
- return tabs
- elif wschar == '\n':
- return newlines
- return wschar
-
- for ttype, value in stream:
+ regex = re.compile(r'\s')
+
+ def replacefunc(wschar):
+ if wschar == ' ':
+ return spaces
+ elif wschar == '\t':
+ return tabs
+ elif wschar == '\n':
+ return newlines
+ return wschar
+
+ for ttype, value in stream:
yield from _replace_special(ttype, value, regex, Whitespace,
replacefunc)
- else:
- spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
- # simpler processing
- for ttype, value in stream:
- if spaces:
- value = value.replace(' ', spaces)
- if tabs:
- value = value.replace('\t', tabs)
- if newlines:
- value = value.replace('\n', newlines)
- yield ttype, value
-
-
-class GobbleFilter(Filter):
- """Gobbles source code lines (eats initial characters).
-
- This filter drops the first ``n`` characters off every line of code. This
- may be useful when the source code fed to the lexer is indented by a fixed
- amount of space that isn't desired in the output.
-
- Options accepted:
-
- `n` : int
- The number of characters to gobble.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
- self.n = get_int_opt(options, 'n', 0)
-
- def gobble(self, value, left):
- if left < len(value):
- return value[left:], 0
- else:
+ else:
+ spaces, tabs, newlines = self.spaces, self.tabs, self.newlines
+ # simpler processing
+ for ttype, value in stream:
+ if spaces:
+ value = value.replace(' ', spaces)
+ if tabs:
+ value = value.replace('\t', tabs)
+ if newlines:
+ value = value.replace('\n', newlines)
+ yield ttype, value
+
+
+class GobbleFilter(Filter):
+ """Gobbles source code lines (eats initial characters).
+
+ This filter drops the first ``n`` characters off every line of code. This
+ may be useful when the source code fed to the lexer is indented by a fixed
+ amount of space that isn't desired in the output.
+
+ Options accepted:
+
+ `n` : int
+ The number of characters to gobble.
+
+ .. versionadded:: 1.2
+ """
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+ self.n = get_int_opt(options, 'n', 0)
+
+ def gobble(self, value, left):
+ if left < len(value):
+ return value[left:], 0
+ else:
return '', left - len(value)
-
- def filter(self, lexer, stream):
- n = self.n
+
+ def filter(self, lexer, stream):
+ n = self.n
left = n # How many characters left to gobble.
- for ttype, value in stream:
- # Remove ``left`` tokens from first line, ``n`` from all others.
- parts = value.split('\n')
- (parts[0], left) = self.gobble(parts[0], left)
- for i in range(1, len(parts)):
- (parts[i], left) = self.gobble(parts[i], n)
+ for ttype, value in stream:
+ # Remove ``left`` tokens from first line, ``n`` from all others.
+ parts = value.split('\n')
+ (parts[0], left) = self.gobble(parts[0], left)
+ for i in range(1, len(parts)):
+ (parts[i], left) = self.gobble(parts[i], n)
value = '\n'.join(parts)
-
- if value != '':
- yield ttype, value
-
-
-class TokenMergeFilter(Filter):
- """Merges consecutive tokens with the same token type in the output
- stream of a lexer.
-
- .. versionadded:: 1.2
- """
- def __init__(self, **options):
- Filter.__init__(self, **options)
-
- def filter(self, lexer, stream):
- current_type = None
- current_value = None
- for ttype, value in stream:
- if ttype is current_type:
- current_value += value
- else:
- if current_type is not None:
- yield current_type, current_value
- current_type = ttype
- current_value = value
- if current_type is not None:
- yield current_type, current_value
-
-
-FILTERS = {
- 'codetagify': CodeTagFilter,
- 'keywordcase': KeywordCaseFilter,
- 'highlight': NameHighlightFilter,
- 'raiseonerror': RaiseOnErrorTokenFilter,
- 'whitespace': VisibleWhitespaceFilter,
- 'gobble': GobbleFilter,
- 'tokenmerge': TokenMergeFilter,
+
+ if value != '':
+ yield ttype, value
+
+
+class TokenMergeFilter(Filter):
+ """Merges consecutive tokens with the same token type in the output
+ stream of a lexer.
+
+ .. versionadded:: 1.2
+ """
+ def __init__(self, **options):
+ Filter.__init__(self, **options)
+
+ def filter(self, lexer, stream):
+ current_type = None
+ current_value = None
+ for ttype, value in stream:
+ if ttype is current_type:
+ current_value += value
+ else:
+ if current_type is not None:
+ yield current_type, current_value
+ current_type = ttype
+ current_value = value
+ if current_type is not None:
+ yield current_type, current_value
+
+
+FILTERS = {
+ 'codetagify': CodeTagFilter,
+ 'keywordcase': KeywordCaseFilter,
+ 'highlight': NameHighlightFilter,
+ 'raiseonerror': RaiseOnErrorTokenFilter,
+ 'whitespace': VisibleWhitespaceFilter,
+ 'gobble': GobbleFilter,
+ 'tokenmerge': TokenMergeFilter,
'symbols': SymbolFilter,
-}
+}
diff --git a/contrib/python/Pygments/py3/pygments/formatter.py b/contrib/python/Pygments/py3/pygments/formatter.py
index c3fe68d3ef..3b3fbc8761 100644
--- a/contrib/python/Pygments/py3/pygments/formatter.py
+++ b/contrib/python/Pygments/py3/pygments/formatter.py
@@ -1,94 +1,94 @@
-"""
- pygments.formatter
- ~~~~~~~~~~~~~~~~~~
-
- Base formatter class.
-
+"""
+ pygments.formatter
+ ~~~~~~~~~~~~~~~~~~
+
+ Base formatter class.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import codecs
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import codecs
+
from pygments.util import get_bool_opt
-from pygments.styles import get_style_by_name
-
-__all__ = ['Formatter']
-
-
-def _lookup_style(style):
+from pygments.styles import get_style_by_name
+
+__all__ = ['Formatter']
+
+
+def _lookup_style(style):
if isinstance(style, str):
- return get_style_by_name(style)
- return style
-
-
+ return get_style_by_name(style)
+ return style
+
+
class Formatter:
- """
- Converts a token stream to text.
-
- Options accepted:
-
- ``style``
- The style to use, can be a string or a Style subclass
- (default: "default"). Not used by e.g. the
- TerminalFormatter.
- ``full``
- Tells the formatter to output a "full" document, i.e.
- a complete self-contained document. This doesn't have
- any effect for some formatters (default: false).
- ``title``
- If ``full`` is true, the title that should be used to
- caption the document (default: '').
- ``encoding``
- If given, must be an encoding name. This will be used to
- convert the Unicode token strings to byte strings in the
- output. If it is "" or None, Unicode strings will be written
- to the output file, which most file-like objects do not
- support (default: None).
- ``outencoding``
- Overrides ``encoding`` if given.
- """
-
- #: Name of the formatter
- name = None
-
- #: Shortcuts for the formatter
- aliases = []
-
- #: fn match rules
- filenames = []
-
- #: If True, this formatter outputs Unicode strings when no encoding
- #: option is given.
- unicodeoutput = True
-
- def __init__(self, **options):
- self.style = _lookup_style(options.get('style', 'default'))
+ """
+ Converts a token stream to text.
+
+ Options accepted:
+
+ ``style``
+ The style to use, can be a string or a Style subclass
+ (default: "default"). Not used by e.g. the
+ TerminalFormatter.
+ ``full``
+ Tells the formatter to output a "full" document, i.e.
+ a complete self-contained document. This doesn't have
+ any effect for some formatters (default: false).
+ ``title``
+ If ``full`` is true, the title that should be used to
+ caption the document (default: '').
+ ``encoding``
+ If given, must be an encoding name. This will be used to
+ convert the Unicode token strings to byte strings in the
+ output. If it is "" or None, Unicode strings will be written
+ to the output file, which most file-like objects do not
+ support (default: None).
+ ``outencoding``
+ Overrides ``encoding`` if given.
+ """
+
+ #: Name of the formatter
+ name = None
+
+ #: Shortcuts for the formatter
+ aliases = []
+
+ #: fn match rules
+ filenames = []
+
+ #: If True, this formatter outputs Unicode strings when no encoding
+ #: option is given.
+ unicodeoutput = True
+
+ def __init__(self, **options):
+ self.style = _lookup_style(options.get('style', 'default'))
self.full = get_bool_opt(options, 'full', False)
- self.title = options.get('title', '')
- self.encoding = options.get('encoding', None) or None
- if self.encoding in ('guess', 'chardet'):
- # can happen for e.g. pygmentize -O encoding=guess
- self.encoding = 'utf-8'
- self.encoding = options.get('outencoding') or self.encoding
- self.options = options
-
- def get_style_defs(self, arg=''):
- """
- Return the style definitions for the current style as a string.
-
- ``arg`` is an additional argument whose meaning depends on the
- formatter used. Note that ``arg`` can also be a list or tuple
- for some formatters like the html formatter.
- """
- return ''
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
- """
- if self.encoding:
- # wrap the outfile in a StreamWriter
- outfile = codecs.lookup(self.encoding)[3](outfile)
- return self.format_unencoded(tokensource, outfile)
+ self.title = options.get('title', '')
+ self.encoding = options.get('encoding', None) or None
+ if self.encoding in ('guess', 'chardet'):
+ # can happen for e.g. pygmentize -O encoding=guess
+ self.encoding = 'utf-8'
+ self.encoding = options.get('outencoding') or self.encoding
+ self.options = options
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the style definitions for the current style as a string.
+
+ ``arg`` is an additional argument whose meaning depends on the
+ formatter used. Note that ``arg`` can also be a list or tuple
+ for some formatters like the html formatter.
+ """
+ return ''
+
+ def format(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+ """
+ if self.encoding:
+ # wrap the outfile in a StreamWriter
+ outfile = codecs.lookup(self.encoding)[3](outfile)
+ return self.format_unencoded(tokensource, outfile)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/__init__.py b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
index 66c9e9d404..4d64cf9ca6 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
@@ -1,83 +1,83 @@
-"""
- pygments.formatters
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments formatters.
-
+"""
+ pygments.formatters
+ ~~~~~~~~~~~~~~~~~~~
+
+ Pygments formatters.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.formatters._mapping import FORMATTERS
-from pygments.plugin import find_plugin_formatters
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+import types
+import fnmatch
+from os.path import basename
+
+from pygments.formatters._mapping import FORMATTERS
+from pygments.plugin import find_plugin_formatters
from pygments.util import ClassNotFound
-
-__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
+
+__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS)
-
-_formatter_cache = {} # classes by name
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_formatters(module_name):
- """Load a formatter (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for formatter_name in mod.__all__:
- cls = getattr(mod, formatter_name)
- _formatter_cache[cls.name] = cls
-
-
-def get_all_formatters():
- """Return a generator for all formatter classes."""
- # NB: this returns formatter classes, not info like get_all_lexers().
+
+_formatter_cache = {} # classes by name
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
+
+
+def _load_formatters(module_name):
+ """Load a formatter (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for formatter_name in mod.__all__:
+ cls = getattr(mod, formatter_name)
+ _formatter_cache[cls.name] = cls
+
+
+def get_all_formatters():
+ """Return a generator for all formatter classes."""
+ # NB: this returns formatter classes, not info like get_all_lexers().
for info in FORMATTERS.values():
- if info[1] not in _formatter_cache:
- _load_formatters(info[0])
- yield _formatter_cache[info[1]]
- for _, formatter in find_plugin_formatters():
- yield formatter
-
-
-def find_formatter_class(alias):
- """Lookup a formatter by alias.
-
- Returns None if not found.
- """
+ if info[1] not in _formatter_cache:
+ _load_formatters(info[0])
+ yield _formatter_cache[info[1]]
+ for _, formatter in find_plugin_formatters():
+ yield formatter
+
+
+def find_formatter_class(alias):
+ """Lookup a formatter by alias.
+
+ Returns None if not found.
+ """
for module_name, name, aliases, _, _ in FORMATTERS.values():
- if alias in aliases:
- if name not in _formatter_cache:
- _load_formatters(module_name)
- return _formatter_cache[name]
- for _, cls in find_plugin_formatters():
- if alias in cls.aliases:
- return cls
-
-
-def get_formatter_by_name(_alias, **options):
- """Lookup and instantiate a formatter by alias.
-
- Raises ClassNotFound if not found.
- """
- cls = find_formatter_class(_alias)
- if cls is None:
- raise ClassNotFound("no formatter found for name %r" % _alias)
- return cls(**options)
-
-
+ if alias in aliases:
+ if name not in _formatter_cache:
+ _load_formatters(module_name)
+ return _formatter_cache[name]
+ for _, cls in find_plugin_formatters():
+ if alias in cls.aliases:
+ return cls
+
+
+def get_formatter_by_name(_alias, **options):
+ """Lookup and instantiate a formatter by alias.
+
+ Raises ClassNotFound if not found.
+ """
+ cls = find_formatter_class(_alias)
+ if cls is None:
+ raise ClassNotFound("no formatter found for name %r" % _alias)
+ return cls(**options)
+
+
def load_formatter_from_file(filename, formattername="CustomFormatter",
**options):
"""Load a formatter from a file.
@@ -114,40 +114,40 @@ def load_formatter_from_file(filename, formattername="CustomFormatter",
raise ClassNotFound('error when loading custom formatter: %s' % err)
-def get_formatter_for_filename(fn, **options):
- """Lookup and instantiate a formatter by filename pattern.
-
- Raises ClassNotFound if not found.
- """
- fn = basename(fn)
+def get_formatter_for_filename(fn, **options):
+ """Lookup and instantiate a formatter by filename pattern.
+
+ Raises ClassNotFound if not found.
+ """
+ fn = basename(fn)
for modname, name, _, filenames, _ in FORMATTERS.values():
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _formatter_cache:
- _load_formatters(modname)
- return _formatter_cache[name](**options)
- for cls in find_plugin_formatters():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- return cls(**options)
- raise ClassNotFound("no formatter found for file name %r" % fn)
-
-
-class _automodule(types.ModuleType):
- """Automatically import formatters."""
-
- def __getattr__(self, name):
- info = FORMATTERS.get(name)
- if info:
- _load_formatters(info[0])
- cls = _formatter_cache[info[1]]
- setattr(self, name, cls)
- return cls
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _formatter_cache:
+ _load_formatters(modname)
+ return _formatter_cache[name](**options)
+ for cls in find_plugin_formatters():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ return cls(**options)
+ raise ClassNotFound("no formatter found for file name %r" % fn)
+
+
+class _automodule(types.ModuleType):
+ """Automatically import formatters."""
+
+ def __getattr__(self, name):
+ info = FORMATTERS.get(name)
+ if info:
+ _load_formatters(info[0])
+ cls = _formatter_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/contrib/python/Pygments/py3/pygments/formatters/_mapping.py b/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
index 8b5e478e39..216850db97 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/_mapping.py
@@ -1,84 +1,84 @@
-"""
- pygments.formatters._mapping
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter mapping definitions. This file is generated by itself. Everytime
- you change something on a builtin formatter definition, run this script from
- the formatters folder to update it.
-
- Do not alter the FORMATTERS dictionary by hand.
-
+"""
+ pygments.formatters._mapping
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin formatter definition, run this script from
+ the formatters folder to update it.
+
+ Do not alter the FORMATTERS dictionary by hand.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-FORMATTERS = {
- 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
- 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ :license: BSD, see LICENSE for details.
+"""
+
+FORMATTERS = {
+ 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
+ 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'),
- 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
- 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
- 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
- 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
+ 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
+ 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'),
+ 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
+ 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'),
- 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
- 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
- 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
- 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
-}
-
-if __name__ == '__main__': # pragma: no cover
- import sys
- import os
-
- # lookup formatters
- found_formatters = []
- imports = []
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- from pygments.util import docstring_headline
-
- for root, dirs, files in os.walk('.'):
- for filename in files:
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.formatters%s.%s' % (
- root[1:].replace('/', '.'), filename[:-3])
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for formatter_name in module.__all__:
- formatter = getattr(module, formatter_name)
- found_formatters.append(
- '%r: %r' % (formatter_name,
- (module_name,
- formatter.name,
- tuple(formatter.aliases),
- tuple(formatter.filenames),
- docstring_headline(formatter))))
- # sort them to make the diff minimal
- found_formatters.sort()
-
- # extract useful sourcecode from this file
- with open(__file__) as fp:
- content = fp.read()
- # replace crnl to nl for Windows.
- #
- # Note that, originally, contributers should keep nl of master
- # repository, for example by using some kind of automatic
- # management EOL, like `EolExtension
- # <https://www.mercurial-scm.org/wiki/EolExtension>`.
- content = content.replace("\r\n", "\n")
- header = content[:content.find('FORMATTERS = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- # write new file
+ 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
+ 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'),
+ 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
+ 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
+}
+
+if __name__ == '__main__': # pragma: no cover
+ import sys
+ import os
+
+ # lookup formatters
+ found_formatters = []
+ imports = []
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+ from pygments.util import docstring_headline
+
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.formatters%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for formatter_name in module.__all__:
+ formatter = getattr(module, formatter_name)
+ found_formatters.append(
+ '%r: %r' % (formatter_name,
+ (module_name,
+ formatter.name,
+ tuple(formatter.aliases),
+ tuple(formatter.filenames),
+ docstring_headline(formatter))))
+ # sort them to make the diff minimal
+ found_formatters.sort()
+
+ # extract useful sourcecode from this file
+ with open(__file__) as fp:
+ content = fp.read()
+ # replace crnl to nl for Windows.
+ #
+ # Note that, originally, contributers should keep nl of master
+ # repository, for example by using some kind of automatic
+ # management EOL, like `EolExtension
+ # <https://www.mercurial-scm.org/wiki/EolExtension>`.
+ content = content.replace("\r\n", "\n")
+ header = content[:content.find('FORMATTERS = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
with open(__file__, 'w') as fp:
- fp.write(header)
- fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
- fp.write(footer)
-
- print ('=== %d formatters processed.' % len(found_formatters))
+ fp.write(header)
+ fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
+ fp.write(footer)
+
+ print ('=== %d formatters processed.' % len(found_formatters))
diff --git a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
index 586a8925c6..2d4dbbd1d6 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
@@ -1,108 +1,108 @@
-"""
- pygments.formatters.bbcode
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- BBcode formatter.
-
+"""
+ pygments.formatters.bbcode
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ BBcode formatter.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt
-
-__all__ = ['BBCodeFormatter']
-
-
-class BBCodeFormatter(Formatter):
- """
- Format tokens with BBcodes. These formatting codes are used by many
- bulletin boards, so you can highlight your sourcecode with pygments before
- posting it there.
-
- This formatter has no support for background colors and borders, as there
- are no common BBcode tags for that.
-
- Some board systems (e.g. phpBB) don't support colors in their [code] tag,
- so you can't use the highlighting together with that tag.
- Text in a [code] tag usually is shown with a monospace font (which this
- formatter can do with the ``monofont`` option) and no spaces (which you
- need for indentation) are removed.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `codetag`
- If set to true, put the output into ``[code]`` tags (default:
- ``false``)
-
- `monofont`
- If set to true, add a tag to show the code with a monospace font
- (default: ``false``).
- """
- name = 'BBCode'
- aliases = ['bbcode', 'bb']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self._code = get_bool_opt(options, 'codetag', False)
- self._mono = get_bool_opt(options, 'monofont', False)
-
- self.styles = {}
- self._make_styles()
-
- def _make_styles(self):
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '[color=#%s]' % ndef['color']
- end = '[/color]' + end
- if ndef['bold']:
- start += '[b]'
- end = '[/b]' + end
- if ndef['italic']:
- start += '[i]'
- end = '[/i]' + end
- if ndef['underline']:
- start += '[u]'
- end = '[/u]' + end
- # there are no common BBcodes for background-color and border
-
- self.styles[ttype] = start, end
-
- def format_unencoded(self, tokensource, outfile):
- if self._code:
- outfile.write('[code]')
- if self._mono:
- outfile.write('[font=monospace]')
-
- lastval = ''
- lasttype = None
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += value
- else:
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
- lastval = value
- lasttype = ttype
-
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
-
- if self._mono:
- outfile.write('[/font]')
- if self._code:
- outfile.write('[/code]')
- if self._code or self._mono:
- outfile.write('\n')
+ :license: BSD, see LICENSE for details.
+"""
+
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt
+
+__all__ = ['BBCodeFormatter']
+
+
+class BBCodeFormatter(Formatter):
+ """
+ Format tokens with BBcodes. These formatting codes are used by many
+ bulletin boards, so you can highlight your sourcecode with pygments before
+ posting it there.
+
+ This formatter has no support for background colors and borders, as there
+ are no common BBcode tags for that.
+
+ Some board systems (e.g. phpBB) don't support colors in their [code] tag,
+ so you can't use the highlighting together with that tag.
+ Text in a [code] tag usually is shown with a monospace font (which this
+ formatter can do with the ``monofont`` option) and no spaces (which you
+ need for indentation) are removed.
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `codetag`
+ If set to true, put the output into ``[code]`` tags (default:
+ ``false``)
+
+ `monofont`
+ If set to true, add a tag to show the code with a monospace font
+ (default: ``false``).
+ """
+ name = 'BBCode'
+ aliases = ['bbcode', 'bb']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self._code = get_bool_opt(options, 'codetag', False)
+ self._mono = get_bool_opt(options, 'monofont', False)
+
+ self.styles = {}
+ self._make_styles()
+
+ def _make_styles(self):
+ for ttype, ndef in self.style:
+ start = end = ''
+ if ndef['color']:
+ start += '[color=#%s]' % ndef['color']
+ end = '[/color]' + end
+ if ndef['bold']:
+ start += '[b]'
+ end = '[/b]' + end
+ if ndef['italic']:
+ start += '[i]'
+ end = '[/i]' + end
+ if ndef['underline']:
+ start += '[u]'
+ end = '[/u]' + end
+ # there are no common BBcodes for background-color and border
+
+ self.styles[ttype] = start, end
+
+ def format_unencoded(self, tokensource, outfile):
+ if self._code:
+ outfile.write('[code]')
+ if self._mono:
+ outfile.write('[font=monospace]')
+
+ lastval = ''
+ lasttype = None
+
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ if ttype == lasttype:
+ lastval += value
+ else:
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+ lastval = value
+ lasttype = ttype
+
+ if lastval:
+ start, end = self.styles[lasttype]
+ outfile.write(''.join((start, lastval, end)))
+
+ if self._mono:
+ outfile.write('[/font]')
+ if self._code:
+ outfile.write('[/code]')
+ if self._code or self._mono:
+ outfile.write('\n')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/html.py b/contrib/python/Pygments/py3/pygments/formatters/html.py
index f3a77a2ddf..8baac4e48c 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/html.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/html.py
@@ -1,352 +1,352 @@
-"""
- pygments.formatters.html
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for HTML output.
-
+"""
+ pygments.formatters.html
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for HTML output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import functools
-import os
-import sys
-import os.path
+import os
+import sys
+import os.path
from io import StringIO
-
-from pygments.formatter import Formatter
-from pygments.token import Token, Text, STANDARD_TYPES
+
+from pygments.formatter import Formatter
+from pygments.token import Token, Text, STANDARD_TYPES
from pygments.util import get_bool_opt, get_int_opt, get_list_opt
-
-try:
- import ctags
-except ImportError:
- ctags = None
-
-__all__ = ['HtmlFormatter']
-
-
-_escape_html_table = {
+
+try:
+ import ctags
+except ImportError:
+ ctags = None
+
+__all__ = ['HtmlFormatter']
+
+
+_escape_html_table = {
ord('&'): '&amp;',
ord('<'): '&lt;',
ord('>'): '&gt;',
ord('"'): '&quot;',
ord("'"): '&#39;',
-}
-
-
-def escape_html(text, table=_escape_html_table):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.translate(table)
-
+}
+
+
+def escape_html(text, table=_escape_html_table):
+ """Escape &, <, > as well as single and double quotes for HTML."""
+ return text.translate(table)
+
def webify(color):
if color.startswith('calc') or color.startswith('var'):
return color
else:
return '#' + color
-
-
-def _get_ttype_class(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = '-' + ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-CSSFILE_TEMPLATE = '''\
+
+
+def _get_ttype_class(ttype):
+ fname = STANDARD_TYPES.get(ttype)
+ if fname:
+ return fname
+ aname = ''
+ while fname is None:
+ aname = '-' + ttype[-1] + aname
+ ttype = ttype.parent
+ fname = STANDARD_TYPES.get(ttype)
+ return fname + aname
+
+
+CSSFILE_TEMPLATE = '''\
/*
generated by Pygments <https://pygments.org/>
Copyright 2006-2021 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
-%(styledefs)s
-'''
-
-DOC_HEADER = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
+%(styledefs)s
+'''
+
+DOC_HEADER = '''\
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
<!--
generated by Pygments <https://pygments.org/>
Copyright 2006-2021 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <style type="text/css">
-''' + CSSFILE_TEMPLATE + '''
- </style>
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_HEADER_EXTERNALCSS = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <link rel="stylesheet" href="%(cssfile)s" type="text/css">
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_FOOTER = '''\
-</body>
-</html>
-'''
-
-
-class HtmlFormatter(Formatter):
- r"""
- Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
- in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
- option.
-
- If the `linenos` option is set to ``"table"``, the ``<pre>`` is
- additionally wrapped inside a ``<table>`` which has one row and two
- cells: one containing the line numbers and one containing the code.
- Example:
-
- .. sourcecode:: html
-
- <div class="highlight" >
- <table><tr>
- <td class="linenos" title="click to toggle"
- onclick="with (this.firstChild.style)
- { display = (display == '') ? 'none' : '' }">
- <pre>1
- 2</pre>
- </td>
- <td class="code">
- <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
- <span class="Ke">pass</span>
- </pre>
- </td>
- </tr></table></div>
-
- (whitespace added to improve clarity).
-
- Wrapping can be disabled using the `nowrap` option.
-
- A list of lines can be specified using the `hl_lines` option to make these
- lines highlighted (as of Pygments 0.11).
-
- With the `full` option, a complete HTML 4 document is output, including
- the style definitions inside a ``<style>`` tag, or in a separate file if
- the `cssfile` option is given.
-
- When `tagsfile` is set to the path of a ctags index file, it is used to
- generate hyperlinks from names to their definition. You must enable
- `lineanchors` and run ctags with the `-n` option for this to work. The
- `python-ctags` module from PyPI must be installed to use this feature;
- otherwise a `RuntimeError` will be raised.
-
- The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
- containing CSS rules for the CSS classes used by the formatter. The
- argument `arg` can be used to specify additional CSS selectors that
- are prepended to the classes. A call `fmter.get_style_defs('td .code')`
- would result in the following CSS classes:
-
- .. sourcecode:: css
-
- td .code .kw { font-weight: bold; color: #00FF00 }
- td .code .cm { color: #999999 }
- ...
-
- If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
- `get_style_defs()` method to request multiple prefixes for the tokens:
-
- .. sourcecode:: python
-
- formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
-
- The output would then look like this:
-
- .. sourcecode:: css
-
- div.syntax pre .kw,
- pre.syntax .kw { font-weight: bold; color: #00FF00 }
- div.syntax pre .cm,
- pre.syntax .cm { color: #999999 }
- ...
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
- tag. This disables most other options (default: ``False``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``). This option has no effect if the `cssfile`
- and `noclobber_cssfile` option are given and the file specified in
- `cssfile` exists.
-
- `noclasses`
+<html>
+<head>
+ <title>%(title)s</title>
+ <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
+ <style type="text/css">
+''' + CSSFILE_TEMPLATE + '''
+ </style>
+</head>
+<body>
+<h2>%(title)s</h2>
+
+'''
+
+DOC_HEADER_EXTERNALCSS = '''\
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
+
+<html>
+<head>
+ <title>%(title)s</title>
+ <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
+ <link rel="stylesheet" href="%(cssfile)s" type="text/css">
+</head>
+<body>
+<h2>%(title)s</h2>
+
+'''
+
+DOC_FOOTER = '''\
+</body>
+</html>
+'''
+
+
+class HtmlFormatter(Formatter):
+ r"""
+ Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped
+ in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass`
+ option.
+
+ If the `linenos` option is set to ``"table"``, the ``<pre>`` is
+ additionally wrapped inside a ``<table>`` which has one row and two
+ cells: one containing the line numbers and one containing the code.
+ Example:
+
+ .. sourcecode:: html
+
+ <div class="highlight" >
+ <table><tr>
+ <td class="linenos" title="click to toggle"
+ onclick="with (this.firstChild.style)
+ { display = (display == '') ? 'none' : '' }">
+ <pre>1
+ 2</pre>
+ </td>
+ <td class="code">
+ <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
+ <span class="Ke">pass</span>
+ </pre>
+ </td>
+ </tr></table></div>
+
+ (whitespace added to improve clarity).
+
+ Wrapping can be disabled using the `nowrap` option.
+
+ A list of lines can be specified using the `hl_lines` option to make these
+ lines highlighted (as of Pygments 0.11).
+
+ With the `full` option, a complete HTML 4 document is output, including
+ the style definitions inside a ``<style>`` tag, or in a separate file if
+ the `cssfile` option is given.
+
+ When `tagsfile` is set to the path of a ctags index file, it is used to
+ generate hyperlinks from names to their definition. You must enable
+ `lineanchors` and run ctags with the `-n` option for this to work. The
+ `python-ctags` module from PyPI must be installed to use this feature;
+ otherwise a `RuntimeError` will be raised.
+
+ The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
+ containing CSS rules for the CSS classes used by the formatter. The
+ argument `arg` can be used to specify additional CSS selectors that
+ are prepended to the classes. A call `fmter.get_style_defs('td .code')`
+ would result in the following CSS classes:
+
+ .. sourcecode:: css
+
+ td .code .kw { font-weight: bold; color: #00FF00 }
+ td .code .cm { color: #999999 }
+ ...
+
+ If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
+ `get_style_defs()` method to request multiple prefixes for the tokens:
+
+ .. sourcecode:: python
+
+ formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
+
+ The output would then look like this:
+
+ .. sourcecode:: css
+
+ div.syntax pre .kw,
+ pre.syntax .kw { font-weight: bold; color: #00FF00 }
+ div.syntax pre .cm,
+ pre.syntax .cm { color: #999999 }
+ ...
+
+ Additional options accepted:
+
+ `nowrap`
+ If set to ``True``, don't wrap the tokens at all, not even inside a ``<pre>``
+ tag. This disables most other options (default: ``False``).
+
+ `full`
+ Tells the formatter to output a "full" document, i.e. a complete
+ self-contained document (default: ``False``).
+
+ `title`
+ If `full` is true, the title that should be used to caption the
+ document (default: ``''``).
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``). This option has no effect if the `cssfile`
+ and `noclobber_cssfile` option are given and the file specified in
+ `cssfile` exists.
+
+ `noclasses`
If set to true, token ``<span>`` tags (as well as line number elements)
will not use CSS classes, but inline styles. This is not recommended
for larger pieces of code since it increases output size by quite a bit
(default: ``False``).
-
- `classprefix`
- Since the token types use relatively short class names, they may clash
- with some of your own class names. In this case you can use the
- `classprefix` option to give a string to prepend to all Pygments-generated
- CSS class names for token types.
- Note that this option also affects the output of `get_style_defs()`.
-
- `cssclass`
- CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
- If you set this option, the default selector for `get_style_defs()`
- will be this class.
-
- .. versionadded:: 0.9
- If you select the ``'table'`` line numbers, the wrapping table will
- have a CSS class of this string plus ``'table'``, the default is
- accordingly ``'highlighttable'``.
-
- `cssstyles`
- Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
-
- `prestyles`
- Inline CSS styles for the ``<pre>`` tag (default: ``''``).
-
- .. versionadded:: 0.11
-
- `cssfile`
- If the `full` option is true and this option is given, it must be the
- name of an external file. If the filename does not include an absolute
- path, the file's path will be assumed to be relative to the main output
- file's path, if the latter can be found. The stylesheet is then written
- to this file instead of the HTML file.
-
- .. versionadded:: 0.6
-
- `noclobber_cssfile`
- If `cssfile` is given and the specified file exists, the css file will
- not be overwritten. This allows the use of the `full` option in
- combination with a user specified css file. Default is ``False``.
-
- .. versionadded:: 1.1
-
- `linenos`
- If set to ``'table'``, output line numbers as a table with two cells,
- one containing the line numbers, the other the whole code. This is
- copy-and-paste-friendly, but may cause alignment problems with some
- browsers or fonts. If set to ``'inline'``, the line numbers will be
- integrated in the ``<pre>`` tag that contains the code (that setting
- is *new in Pygments 0.8*).
-
- For compatibility with Pygments 0.7 and earlier, every true value
- except ``'inline'`` means the same as ``'table'`` (in particular, that
- means also ``True``).
-
- The default value is ``False``, which means no line numbers at all.
-
- **Note:** with the default ("table") line number mechanism, the line
- numbers and code can have different line heights in Internet Explorer
- unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
- CSS property (you get the default line spacing with ``line-height:
- 125%``).
-
- `hl_lines`
+
+ `classprefix`
+ Since the token types use relatively short class names, they may clash
+ with some of your own class names. In this case you can use the
+ `classprefix` option to give a string to prepend to all Pygments-generated
+ CSS class names for token types.
+ Note that this option also affects the output of `get_style_defs()`.
+
+ `cssclass`
+ CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
+ If you set this option, the default selector for `get_style_defs()`
+ will be this class.
+
+ .. versionadded:: 0.9
+ If you select the ``'table'`` line numbers, the wrapping table will
+ have a CSS class of this string plus ``'table'``, the default is
+ accordingly ``'highlighttable'``.
+
+ `cssstyles`
+ Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
+
+ `prestyles`
+ Inline CSS styles for the ``<pre>`` tag (default: ``''``).
+
+ .. versionadded:: 0.11
+
+ `cssfile`
+ If the `full` option is true and this option is given, it must be the
+ name of an external file. If the filename does not include an absolute
+ path, the file's path will be assumed to be relative to the main output
+ file's path, if the latter can be found. The stylesheet is then written
+ to this file instead of the HTML file.
+
+ .. versionadded:: 0.6
+
+ `noclobber_cssfile`
+ If `cssfile` is given and the specified file exists, the css file will
+ not be overwritten. This allows the use of the `full` option in
+ combination with a user specified css file. Default is ``False``.
+
+ .. versionadded:: 1.1
+
+ `linenos`
+ If set to ``'table'``, output line numbers as a table with two cells,
+ one containing the line numbers, the other the whole code. This is
+ copy-and-paste-friendly, but may cause alignment problems with some
+ browsers or fonts. If set to ``'inline'``, the line numbers will be
+ integrated in the ``<pre>`` tag that contains the code (that setting
+ is *new in Pygments 0.8*).
+
+ For compatibility with Pygments 0.7 and earlier, every true value
+ except ``'inline'`` means the same as ``'table'`` (in particular, that
+ means also ``True``).
+
+ The default value is ``False``, which means no line numbers at all.
+
+ **Note:** with the default ("table") line number mechanism, the line
+ numbers and code can have different line heights in Internet Explorer
+ unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
+ CSS property (you get the default line spacing with ``line-height:
+ 125%``).
+
+ `hl_lines`
Specify a list of lines to be highlighted. The line numbers are always
relative to the input (i.e. the first line is line 1) and are
independent of `linenostart`.
-
- .. versionadded:: 0.11
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenospecial`
- If set to a number n > 0, every nth line number is given the CSS
- class ``"special"`` (default: ``0``).
-
- `nobackground`
- If set to ``True``, the formatter won't output the background color
- for the wrapping element (this automatically defaults to ``False``
- when there is no wrapping element [eg: no argument for the
- `get_syntax_defs` method given]) (default: ``False``).
-
- .. versionadded:: 0.6
-
- `lineseparator`
- This string is output between lines of code. It defaults to ``"\n"``,
- which is enough to break a line inside ``<pre>`` tags, but you can
- e.g. set it to ``"<br>"`` to get HTML line breaks.
-
- .. versionadded:: 0.7
-
- `lineanchors`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
+
+ .. versionadded:: 0.11
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ `linenospecial`
+ If set to a number n > 0, every nth line number is given the CSS
+ class ``"special"`` (default: ``0``).
+
+ `nobackground`
+ If set to ``True``, the formatter won't output the background color
+ for the wrapping element (this automatically defaults to ``False``
+ when there is no wrapping element [eg: no argument for the
+ `get_syntax_defs` method given]) (default: ``False``).
+
+ .. versionadded:: 0.6
+
+ `lineseparator`
+ This string is output between lines of code. It defaults to ``"\n"``,
+ which is enough to break a line inside ``<pre>`` tags, but you can
+ e.g. set it to ``"<br>"`` to get HTML line breaks.
+
+ .. versionadded:: 0.7
+
+ `lineanchors`
+ If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
- This allows easy linking to certain lines.
-
- .. versionadded:: 0.9
-
- `linespans`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in a span tag with an ``id`` of ``foo-linenumber``.
- This allows easy access to lines via javascript.
-
- .. versionadded:: 1.6
-
- `anchorlinenos`
- If set to `True`, will wrap line numbers in <a> tags. Used in
- combination with `linenos` and `lineanchors`.
-
- `tagsfile`
- If set to the path of a ctags file, wrap names in anchor tags that
- link to their definitions. `lineanchors` should be used, and the
- tags file should specify line numbers (see the `-n` option to ctags).
-
- .. versionadded:: 1.6
-
- `tagurlformat`
- A string formatting pattern used to generate links to ctags definitions.
- Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
- Defaults to an empty string, resulting in just `#prefix-number` links.
-
- .. versionadded:: 1.6
-
- `filename`
+ This allows easy linking to certain lines.
+
+ .. versionadded:: 0.9
+
+ `linespans`
+ If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
+ output line in a span tag with an ``id`` of ``foo-linenumber``.
+ This allows easy access to lines via javascript.
+
+ .. versionadded:: 1.6
+
+ `anchorlinenos`
+ If set to `True`, will wrap line numbers in <a> tags. Used in
+ combination with `linenos` and `lineanchors`.
+
+ `tagsfile`
+ If set to the path of a ctags file, wrap names in anchor tags that
+ link to their definitions. `lineanchors` should be used, and the
+ tags file should specify line numbers (see the `-n` option to ctags).
+
+ .. versionadded:: 1.6
+
+ `tagurlformat`
+ A string formatting pattern used to generate links to ctags definitions.
+ Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
+ Defaults to an empty string, resulting in just `#prefix-number` links.
+
+ .. versionadded:: 1.6
+
+ `filename`
A string used to generate a filename when rendering ``<pre>`` blocks,
for example if displaying source code. If `linenos` is set to
``'table'`` then the filename will be rendered in an initial row
containing a single `<th>` which spans both columns.
-
- .. versionadded:: 2.1
-
+
+ .. versionadded:: 2.1
+
`wrapcode`
Wrap the code inside ``<pre>`` blocks using ``<code>``, as recommended
by the HTML5 specification.
-
+
.. versionadded:: 2.4
`debug_token_types`
@@ -356,124 +356,124 @@ class HtmlFormatter(Formatter):
.. versionadded:: 2.10
- **Subclassing the HTML formatter**
-
- .. versionadded:: 0.7
-
- The HTML formatter is now built in a way that allows easy subclassing, thus
- customizing the output HTML code. The `format()` method calls
- `self._format_lines()` which returns a generator that yields tuples of ``(1,
- line)``, where the ``1`` indicates that the ``line`` is a line of the
- formatted source code.
-
- If the `nowrap` option is set, the generator is the iterated over and the
- resulting HTML is output.
-
- Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
- other generators. These may add some HTML code to the one generated by
- `_format_lines()`, either by modifying the lines generated by the latter,
- then yielding them again with ``(1, line)``, and/or by yielding other HTML
- code before or after the lines, with ``(0, html)``. The distinction between
- source lines and other code makes it possible to wrap the generator multiple
- times.
-
- The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
-
- A custom `HtmlFormatter` subclass could look like this:
-
- .. sourcecode:: python
-
- class CodeHtmlFormatter(HtmlFormatter):
-
- def wrap(self, source, outfile):
- return self._wrap_code(source)
-
- def _wrap_code(self, source):
- yield 0, '<code>'
- for i, t in source:
- if i == 1:
- # it's a line of formatted code
- t += '<br>'
- yield i, t
- yield 0, '</code>'
-
- This results in wrapping the formatted lines with a ``<code>`` tag, where the
- source lines are broken using ``<br>`` tags.
-
- After calling `wrap()`, the `format()` method also adds the "line numbers"
- and/or "full document" wrappers if the respective options are set. Then, all
- HTML yielded by the wrapped generator is output.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.title = self._decodeifneeded(self.title)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.noclasses = get_bool_opt(options, 'noclasses', False)
- self.classprefix = options.get('classprefix', '')
- self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
- self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
- self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
- self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
- self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
- self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
- self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
- self.filename = self._decodeifneeded(options.get('filename', ''))
+ **Subclassing the HTML formatter**
+
+ .. versionadded:: 0.7
+
+ The HTML formatter is now built in a way that allows easy subclassing, thus
+ customizing the output HTML code. The `format()` method calls
+ `self._format_lines()` which returns a generator that yields tuples of ``(1,
+ line)``, where the ``1`` indicates that the ``line`` is a line of the
+ formatted source code.
+
+ If the `nowrap` option is set, the generator is the iterated over and the
+ resulting HTML is output.
+
+ Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
+ other generators. These may add some HTML code to the one generated by
+ `_format_lines()`, either by modifying the lines generated by the latter,
+ then yielding them again with ``(1, line)``, and/or by yielding other HTML
+ code before or after the lines, with ``(0, html)``. The distinction between
+ source lines and other code makes it possible to wrap the generator multiple
+ times.
+
+ The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
+
+ A custom `HtmlFormatter` subclass could look like this:
+
+ .. sourcecode:: python
+
+ class CodeHtmlFormatter(HtmlFormatter):
+
+ def wrap(self, source, outfile):
+ return self._wrap_code(source)
+
+ def _wrap_code(self, source):
+ yield 0, '<code>'
+ for i, t in source:
+ if i == 1:
+ # it's a line of formatted code
+ t += '<br>'
+ yield i, t
+ yield 0, '</code>'
+
+ This results in wrapping the formatted lines with a ``<code>`` tag, where the
+ source lines are broken using ``<br>`` tags.
+
+ After calling `wrap()`, the `format()` method also adds the "line numbers"
+ and/or "full document" wrappers if the respective options are set. Then, all
+ HTML yielded by the wrapped generator is output.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.title = self._decodeifneeded(self.title)
+ self.nowrap = get_bool_opt(options, 'nowrap', False)
+ self.noclasses = get_bool_opt(options, 'noclasses', False)
+ self.classprefix = options.get('classprefix', '')
+ self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
+ self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
+ self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
+ self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
+ self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
+ self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
+ self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
+ self.filename = self._decodeifneeded(options.get('filename', ''))
self.wrapcode = get_bool_opt(options, 'wrapcode', False)
self.span_element_openers = {}
self.debug_token_types = get_bool_opt(options, 'debug_token_types', False)
-
- if self.tagsfile:
- if not ctags:
- raise RuntimeError('The "ctags" package must to be installed '
- 'to be able to use the "tagsfile" feature.')
- self._ctags = ctags.CTags(self.tagsfile)
-
- linenos = options.get('linenos', False)
- if linenos == 'inline':
- self.linenos = 2
- elif linenos:
- # compatibility with <= 0.7
- self.linenos = 1
- else:
- self.linenos = 0
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
- self.nobackground = get_bool_opt(options, 'nobackground', False)
+
+ if self.tagsfile:
+ if not ctags:
+ raise RuntimeError('The "ctags" package must to be installed '
+ 'to be able to use the "tagsfile" feature.')
+ self._ctags = ctags.CTags(self.tagsfile)
+
+ linenos = options.get('linenos', False)
+ if linenos == 'inline':
+ self.linenos = 2
+ elif linenos:
+ # compatibility with <= 0.7
+ self.linenos = 1
+ else:
+ self.linenos = 0
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
self.lineseparator = options.get('lineseparator', '\n')
- self.lineanchors = options.get('lineanchors', '')
- self.linespans = options.get('linespans', '')
+ self.lineanchors = options.get('lineanchors', '')
+ self.linespans = options.get('linespans', '')
self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False)
- self.hl_lines = set()
- for lineno in get_list_opt(options, 'hl_lines', []):
- try:
- self.hl_lines.add(int(lineno))
- except ValueError:
- pass
-
- self._create_stylesheet()
-
- def _get_css_class(self, ttype):
- """Return the css class of this token type prefixed with
- the classprefix option."""
- ttypeclass = _get_ttype_class(ttype)
- if ttypeclass:
- return self.classprefix + ttypeclass
- return ''
-
- def _get_css_classes(self, ttype):
+ self.hl_lines = set()
+ for lineno in get_list_opt(options, 'hl_lines', []):
+ try:
+ self.hl_lines.add(int(lineno))
+ except ValueError:
+ pass
+
+ self._create_stylesheet()
+
+ def _get_css_class(self, ttype):
+ """Return the css class of this token type prefixed with
+ the classprefix option."""
+ ttypeclass = _get_ttype_class(ttype)
+ if ttypeclass:
+ return self.classprefix + ttypeclass
+ return ''
+
+ def _get_css_classes(self, ttype):
"""Return the CSS classes of this token type prefixed with the classprefix option."""
- cls = self._get_css_class(ttype)
- while ttype not in STANDARD_TYPES:
- ttype = ttype.parent
- cls = self._get_css_class(ttype) + ' ' + cls
+ cls = self._get_css_class(ttype)
+ while ttype not in STANDARD_TYPES:
+ ttype = ttype.parent
+ cls = self._get_css_class(ttype) + ' ' + cls
return cls or ''
-
+
def _get_css_inline_styles(self, ttype):
"""Return the inline CSS styles for this token type."""
cclass = self.ttype2class.get(ttype)
@@ -482,36 +482,36 @@ class HtmlFormatter(Formatter):
cclass = self.ttype2class.get(ttype)
return cclass or ''
- def _create_stylesheet(self):
- t2c = self.ttype2class = {Token: ''}
- c2s = self.class2style = {}
- for ttype, ndef in self.style:
- name = self._get_css_class(ttype)
- style = ''
- if ndef['color']:
+ def _create_stylesheet(self):
+ t2c = self.ttype2class = {Token: ''}
+ c2s = self.class2style = {}
+ for ttype, ndef in self.style:
+ name = self._get_css_class(ttype)
+ style = ''
+ if ndef['color']:
style += 'color: %s; ' % webify(ndef['color'])
- if ndef['bold']:
- style += 'font-weight: bold; '
- if ndef['italic']:
- style += 'font-style: italic; '
- if ndef['underline']:
- style += 'text-decoration: underline; '
- if ndef['bgcolor']:
+ if ndef['bold']:
+ style += 'font-weight: bold; '
+ if ndef['italic']:
+ style += 'font-style: italic; '
+ if ndef['underline']:
+ style += 'text-decoration: underline; '
+ if ndef['bgcolor']:
style += 'background-color: %s; ' % webify(ndef['bgcolor'])
- if ndef['border']:
+ if ndef['border']:
style += 'border: 1px solid %s; ' % webify(ndef['border'])
- if style:
- t2c[ttype] = name
- # save len(ttype) to enable ordering the styles by
- # hierarchy (necessary for CSS cascading rules!)
- c2s[name] = (style[:-2], ttype, len(ttype))
-
- def get_style_defs(self, arg=None):
- """
- Return CSS style definitions for the classes produced by the current
- highlighting style. ``arg`` can be a string or list of selectors to
- insert before the token type classes.
- """
+ if style:
+ t2c[ttype] = name
+ # save len(ttype) to enable ordering the styles by
+ # hierarchy (necessary for CSS cascading rules!)
+ c2s[name] = (style[:-2], ttype, len(ttype))
+
+ def get_style_defs(self, arg=None):
+ """
+ Return CSS style definitions for the classes produced by the current
+ highlighting style. ``arg`` can be a string or list of selectors to
+ insert before the token type classes.
+ """
style_lines = []
style_lines.extend(self.get_linenos_style_defs())
@@ -572,23 +572,23 @@ class HtmlFormatter(Formatter):
return lines
def get_css_prefix(self, arg):
- if arg is None:
- arg = ('cssclass' in self.options and '.'+self.cssclass or '')
+ if arg is None:
+ arg = ('cssclass' in self.options and '.'+self.cssclass or '')
if isinstance(arg, str):
- args = [arg]
- else:
- args = list(arg)
-
- def prefix(cls):
- if cls:
- cls = '.' + cls
- tmp = []
- for arg in args:
- tmp.append((arg and arg + ' ' or '') + cls)
- return ', '.join(tmp)
-
+ args = [arg]
+ else:
+ args = list(arg)
+
+ def prefix(cls):
+ if cls:
+ cls = '.' + cls
+ tmp = []
+ for arg in args:
+ tmp.append((arg and arg + ' ' or '') + cls)
+ return ', '.join(tmp)
+
return prefix
-
+
@property
def _pre_style(self):
return 'line-height: 125%;'
@@ -607,70 +607,70 @@ class HtmlFormatter(Formatter):
self.style.line_number_special_background_color
)
- def _decodeifneeded(self, value):
- if isinstance(value, bytes):
- if self.encoding:
- return value.decode(self.encoding)
- return value.decode()
- return value
-
- def _wrap_full(self, inner, outfile):
- if self.cssfile:
- if os.path.isabs(self.cssfile):
- # it's an absolute filename
- cssfilename = self.cssfile
- else:
- try:
- filename = outfile.name
- if not filename or filename[0] == '<':
- # pseudo files, e.g. name == '<fdopen>'
- raise AttributeError
- cssfilename = os.path.join(os.path.dirname(filename),
- self.cssfile)
- except AttributeError:
- print('Note: Cannot determine output file name, '
- 'using current directory as base for the CSS file name',
- file=sys.stderr)
- cssfilename = self.cssfile
- # write CSS file only if noclobber_cssfile isn't given as an option.
- try:
- if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
+ def _decodeifneeded(self, value):
+ if isinstance(value, bytes):
+ if self.encoding:
+ return value.decode(self.encoding)
+ return value.decode()
+ return value
+
+ def _wrap_full(self, inner, outfile):
+ if self.cssfile:
+ if os.path.isabs(self.cssfile):
+ # it's an absolute filename
+ cssfilename = self.cssfile
+ else:
+ try:
+ filename = outfile.name
+ if not filename or filename[0] == '<':
+ # pseudo files, e.g. name == '<fdopen>'
+ raise AttributeError
+ cssfilename = os.path.join(os.path.dirname(filename),
+ self.cssfile)
+ except AttributeError:
+ print('Note: Cannot determine output file name, '
+ 'using current directory as base for the CSS file name',
+ file=sys.stderr)
+ cssfilename = self.cssfile
+ # write CSS file only if noclobber_cssfile isn't given as an option.
+ try:
+ if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
with open(cssfilename, "w") as cf:
cf.write(CSSFILE_TEMPLATE %
{'styledefs': self.get_style_defs('body')})
except OSError as err:
- err.strerror = 'Error writing CSS file: ' + err.strerror
- raise
-
- yield 0, (DOC_HEADER_EXTERNALCSS %
- dict(title=self.title,
- cssfile=self.cssfile,
- encoding=self.encoding))
- else:
- yield 0, (DOC_HEADER %
- dict(title=self.title,
- styledefs=self.get_style_defs('body'),
- encoding=self.encoding))
-
+ err.strerror = 'Error writing CSS file: ' + err.strerror
+ raise
+
+ yield 0, (DOC_HEADER_EXTERNALCSS %
+ dict(title=self.title,
+ cssfile=self.cssfile,
+ encoding=self.encoding))
+ else:
+ yield 0, (DOC_HEADER %
+ dict(title=self.title,
+ styledefs=self.get_style_defs('body'),
+ encoding=self.encoding))
+
yield from inner
- yield 0, DOC_FOOTER
-
- def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO()
- lncount = 0
- for t, line in inner:
- if t:
- lncount += 1
- dummyoutfile.write(line)
-
- fl = self.linenostart
- mw = len(str(lncount + fl - 1))
- sp = self.linenospecial
- st = self.linenostep
- la = self.lineanchors
- aln = self.anchorlinenos
- nocls = self.noclasses
-
+ yield 0, DOC_FOOTER
+
+ def _wrap_tablelinenos(self, inner):
+ dummyoutfile = StringIO()
+ lncount = 0
+ for t, line in inner:
+ if t:
+ lncount += 1
+ dummyoutfile.write(line)
+
+ fl = self.linenostart
+ mw = len(str(lncount + fl - 1))
+ sp = self.linenospecial
+ st = self.linenostep
+ la = self.lineanchors
+ aln = self.anchorlinenos
+ nocls = self.noclasses
+
lines = []
for i in range(fl, fl+lncount):
@@ -687,14 +687,14 @@ class HtmlFormatter(Formatter):
if nocls:
if special_line:
style = ' style="%s"' % self._linenos_special_style
- else:
+ else:
style = ' style="%s"' % self._linenos_style
else:
if special_line:
style = ' class="special"'
- else:
+ else:
style = ' class="normal"'
-
+
if style:
line = '<span%s>%s</span>' % (style, line)
@@ -711,37 +711,37 @@ class HtmlFormatter(Formatter):
'<span class="filename">' + self.filename + '</span></div>'
'</th></tr>')
- # in case you wonder about the seemingly redundant <div> here: since the
- # content in the other cell also is wrapped in a div, some browsers in
- # some configurations seem to mess up the formatting...
+ # in case you wonder about the seemingly redundant <div> here: since the
+ # content in the other cell also is wrapped in a div, some browsers in
+ # some configurations seem to mess up the formatting...
yield 0, (
'<table class="%stable">' % self.cssclass + filename_tr +
'<tr><td class="linenos"><div class="linenodiv"><pre>' +
ls + '</pre></div></td><td class="code">'
)
- yield 0, dummyoutfile.getvalue()
- yield 0, '</td></tr></table>'
-
- def _wrap_inlinelinenos(self, inner):
- # need a list of lines since we need the width of a single number :(
+ yield 0, dummyoutfile.getvalue()
+ yield 0, '</td></tr></table>'
+
+ def _wrap_inlinelinenos(self, inner):
+ # need a list of lines since we need the width of a single number :(
inner_lines = list(inner)
- sp = self.linenospecial
- st = self.linenostep
- num = self.linenostart
+ sp = self.linenospecial
+ st = self.linenostep
+ num = self.linenostart
mw = len(str(len(inner_lines) + num - 1))
la = self.lineanchors
aln = self.anchorlinenos
nocls = self.noclasses
-
+
for _, inner_line in inner_lines:
print_line = num % st == 0
special_line = sp and num % sp == 0
if print_line:
line = '%*d' % (mw, num)
- else:
+ else:
line = ' ' * mw
-
+
if nocls:
if special_line:
style = ' style="%s"' % self._linenos_special_style
@@ -765,59 +765,59 @@ class HtmlFormatter(Formatter):
yield 1, linenos + inner_line
num += 1
- def _wrap_lineanchors(self, inner):
- s = self.lineanchors
- # subtract 1 since we have to increment i *before* yielding
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
+ def _wrap_lineanchors(self, inner):
+ s = self.lineanchors
+ # subtract 1 since we have to increment i *before* yielding
+ i = self.linenostart - 1
+ for t, line in inner:
+ if t:
+ i += 1
href = "" if self.linenos else ' href="#%s-%d"' % (s, i)
yield 1, '<a id="%s-%d" name="%s-%d"%s></a>' % (s, i, s, i, href) + line
- else:
- yield 0, line
-
- def _wrap_linespans(self, inner):
- s = self.linespans
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
- else:
- yield 0, line
-
- def _wrap_div(self, inner):
- style = []
- if (self.noclasses and not self.nobackground and
- self.style.background_color is not None):
- style.append('background: %s' % (self.style.background_color,))
- if self.cssstyles:
- style.append(self.cssstyles)
- style = '; '.join(style)
-
- yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
- (style and (' style="%s"' % style)) + '>')
+ else:
+ yield 0, line
+
+ def _wrap_linespans(self, inner):
+ s = self.linespans
+ i = self.linenostart - 1
+ for t, line in inner:
+ if t:
+ i += 1
+ yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
+ else:
+ yield 0, line
+
+ def _wrap_div(self, inner):
+ style = []
+ if (self.noclasses and not self.nobackground and
+ self.style.background_color is not None):
+ style.append('background: %s' % (self.style.background_color,))
+ if self.cssstyles:
+ style.append(self.cssstyles)
+ style = '; '.join(style)
+
+ yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
+ (style and (' style="%s"' % style)) + '>')
yield from inner
- yield 0, '</div>\n'
-
- def _wrap_pre(self, inner):
- style = []
- if self.prestyles:
- style.append(self.prestyles)
- if self.noclasses:
+ yield 0, '</div>\n'
+
+ def _wrap_pre(self, inner):
+ style = []
+ if self.prestyles:
+ style.append(self.prestyles)
+ if self.noclasses:
style.append(self._pre_style)
- style = '; '.join(style)
-
+ style = '; '.join(style)
+
if self.filename and self.linenos != 1:
- yield 0, ('<span class="filename">' + self.filename + '</span>')
-
+ yield 0, ('<span class="filename">' + self.filename + '</span>')
+
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
yield from inner
- yield 0, '</pre>'
-
+ yield 0, '</pre>'
+
def _wrap_code(self, inner):
yield 0, '<code>'
yield from inner
@@ -828,18 +828,18 @@ class HtmlFormatter(Formatter):
"""HTML-escape a value and split it by newlines."""
return value.translate(_escape_html_table).split('\n')
- def _format_lines(self, tokensource):
- """
- Just format the tokens, without any wrapping tags.
- Yield individual lines.
- """
- nocls = self.noclasses
- lsep = self.lineseparator
- tagsfile = self.tagsfile
-
- lspan = ''
- line = []
- for ttype, value in tokensource:
+ def _format_lines(self, tokensource):
+ """
+ Just format the tokens, without any wrapping tags.
+ Yield individual lines.
+ """
+ nocls = self.noclasses
+ lsep = self.lineseparator
+ tagsfile = self.tagsfile
+
+ lspan = ''
+ line = []
+ for ttype, value in tokensource:
try:
cspan = self.span_element_openers[ttype]
except KeyError:
@@ -858,126 +858,126 @@ class HtmlFormatter(Formatter):
else:
cspan = ''
self.span_element_openers[ttype] = cspan
-
+
parts = self._translate_parts(value)
-
- if tagsfile and ttype in Token.Name:
- filename, linenumber = self._lookup_ctag(value)
- if linenumber:
- base, filename = os.path.split(filename)
- if base:
- base += '/'
- filename, extension = os.path.splitext(filename)
- url = self.tagurlformat % {'path': base, 'fname': filename,
- 'fext': extension}
- parts[0] = "<a href=\"%s#%s-%d\">%s" % \
- (url, self.lineanchors, linenumber, parts[0])
- parts[-1] = parts[-1] + "</a>"
-
- # for all but the last line
- for part in parts[:-1]:
- if line:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, part,
- (cspan and '</span>'), lsep))
- else: # both are the same
- line.extend((part, (lspan and '</span>'), lsep))
- yield 1, ''.join(line)
- line = []
- elif part:
- yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
- else:
- yield 1, lsep
- # for the last line
- if line and parts[-1]:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, parts[-1]))
- lspan = cspan
- else:
- line.append(parts[-1])
- elif parts[-1]:
- line = [cspan, parts[-1]]
- lspan = cspan
- # else we neither have to open a new span nor set lspan
-
- if line:
- line.extend(((lspan and '</span>'), lsep))
- yield 1, ''.join(line)
-
- def _lookup_ctag(self, token):
- entry = ctags.TagEntry()
+
+ if tagsfile and ttype in Token.Name:
+ filename, linenumber = self._lookup_ctag(value)
+ if linenumber:
+ base, filename = os.path.split(filename)
+ if base:
+ base += '/'
+ filename, extension = os.path.splitext(filename)
+ url = self.tagurlformat % {'path': base, 'fname': filename,
+ 'fext': extension}
+ parts[0] = "<a href=\"%s#%s-%d\">%s" % \
+ (url, self.lineanchors, linenumber, parts[0])
+ parts[-1] = parts[-1] + "</a>"
+
+ # for all but the last line
+ for part in parts[:-1]:
+ if line:
+ if lspan != cspan:
+ line.extend(((lspan and '</span>'), cspan, part,
+ (cspan and '</span>'), lsep))
+ else: # both are the same
+ line.extend((part, (lspan and '</span>'), lsep))
+ yield 1, ''.join(line)
+ line = []
+ elif part:
+ yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
+ else:
+ yield 1, lsep
+ # for the last line
+ if line and parts[-1]:
+ if lspan != cspan:
+ line.extend(((lspan and '</span>'), cspan, parts[-1]))
+ lspan = cspan
+ else:
+ line.append(parts[-1])
+ elif parts[-1]:
+ line = [cspan, parts[-1]]
+ lspan = cspan
+ # else we neither have to open a new span nor set lspan
+
+ if line:
+ line.extend(((lspan and '</span>'), lsep))
+ yield 1, ''.join(line)
+
+ def _lookup_ctag(self, token):
+ entry = ctags.TagEntry()
if self._ctags.find(entry, token.encode(), 0):
- return entry['file'], entry['lineNumber']
- else:
- return None, None
-
- def _highlight_lines(self, tokensource):
- """
- Highlighted the lines specified in the `hl_lines` option by
- post-processing the token stream coming from `_format_lines`.
- """
- hls = self.hl_lines
-
- for i, (t, value) in enumerate(tokensource):
- if t != 1:
- yield t, value
- if i + 1 in hls: # i + 1 because Python indexes start at 0
- if self.noclasses:
- style = ''
- if self.style.highlight_color is not None:
- style = (' style="background-color: %s"' %
- (self.style.highlight_color,))
- yield 1, '<span%s>%s</span>' % (style, value)
- else:
- yield 1, '<span class="hll">%s</span>' % value
- else:
- yield 1, value
-
- def wrap(self, source, outfile):
- """
- Wrap the ``source``, which is a generator yielding
- individual lines, in custom generators. See docstring
- for `format`. Can be overridden.
- """
+ return entry['file'], entry['lineNumber']
+ else:
+ return None, None
+
+ def _highlight_lines(self, tokensource):
+ """
+ Highlighted the lines specified in the `hl_lines` option by
+ post-processing the token stream coming from `_format_lines`.
+ """
+ hls = self.hl_lines
+
+ for i, (t, value) in enumerate(tokensource):
+ if t != 1:
+ yield t, value
+ if i + 1 in hls: # i + 1 because Python indexes start at 0
+ if self.noclasses:
+ style = ''
+ if self.style.highlight_color is not None:
+ style = (' style="background-color: %s"' %
+ (self.style.highlight_color,))
+ yield 1, '<span%s>%s</span>' % (style, value)
+ else:
+ yield 1, '<span class="hll">%s</span>' % value
+ else:
+ yield 1, value
+
+ def wrap(self, source, outfile):
+ """
+ Wrap the ``source``, which is a generator yielding
+ individual lines, in custom generators. See docstring
+ for `format`. Can be overridden.
+ """
if self.wrapcode:
return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
else:
return self._wrap_div(self._wrap_pre(source))
-
- def format_unencoded(self, tokensource, outfile):
- """
- The formatting process uses several nested generators; which of
- them are used is determined by the user's options.
-
- Each generator should take at least one argument, ``inner``,
- and wrap the pieces of text generated by this.
-
- Always yield 2-tuples: (code, text). If "code" is 1, the text
- is part of the original tokensource being highlighted, if it's
- 0, the text is some piece of wrapping. This makes it possible to
- use several different wrappers that process the original source
- linewise, e.g. line number generators.
- """
- source = self._format_lines(tokensource)
+
+ def format_unencoded(self, tokensource, outfile):
+ """
+ The formatting process uses several nested generators; which of
+ them are used is determined by the user's options.
+
+ Each generator should take at least one argument, ``inner``,
+ and wrap the pieces of text generated by this.
+
+ Always yield 2-tuples: (code, text). If "code" is 1, the text
+ is part of the original tokensource being highlighted, if it's
+ 0, the text is some piece of wrapping. This makes it possible to
+ use several different wrappers that process the original source
+ linewise, e.g. line number generators.
+ """
+ source = self._format_lines(tokensource)
# As a special case, we wrap line numbers before line highlighting
# so the line numbers get wrapped in the highlighting tag.
if not self.nowrap and self.linenos == 2:
source = self._wrap_inlinelinenos(source)
- if self.hl_lines:
- source = self._highlight_lines(source)
-
- if not self.nowrap:
- if self.lineanchors:
- source = self._wrap_lineanchors(source)
- if self.linespans:
- source = self._wrap_linespans(source)
- source = self.wrap(source, outfile)
- if self.linenos == 1:
- source = self._wrap_tablelinenos(source)
- if self.full:
- source = self._wrap_full(source, outfile)
-
- for t, piece in source:
- outfile.write(piece)
+ if self.hl_lines:
+ source = self._highlight_lines(source)
+
+ if not self.nowrap:
+ if self.lineanchors:
+ source = self._wrap_lineanchors(source)
+ if self.linespans:
+ source = self._wrap_linespans(source)
+ source = self.wrap(source, outfile)
+ if self.linenos == 1:
+ source = self._wrap_tablelinenos(source)
+ if self.full:
+ source = self._wrap_full(source, outfile)
+
+ for t, piece in source:
+ outfile.write(piece)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/img.py b/contrib/python/Pygments/py3/pygments/formatters/img.py
index f481afc4a4..4536755a0c 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/img.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/img.py
@@ -1,92 +1,92 @@
-"""
- pygments.formatters.img
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pixmap output.
-
+"""
+ pygments.formatters.img
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for Pixmap output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import os
-import sys
-
-from pygments.formatter import Formatter
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+import sys
+
+from pygments.formatter import Formatter
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
get_choice_opt
-
-import subprocess
-
-# Import this carefully
-try:
- from PIL import Image, ImageDraw, ImageFont
- pil_available = True
-except ImportError:
- pil_available = False
-
-try:
- import _winreg
-except ImportError:
- try:
- import winreg as _winreg
- except ImportError:
- _winreg = None
-
-__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
- 'BmpImageFormatter']
-
-
-# For some unknown reason every font calls it something different
-STYLES = {
- 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
- 'ITALIC': ['Oblique', 'Italic'],
- 'BOLD': ['Bold'],
- 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
-}
-
-# A sane default for modern systems
+
+import subprocess
+
+# Import this carefully
+try:
+ from PIL import Image, ImageDraw, ImageFont
+ pil_available = True
+except ImportError:
+ pil_available = False
+
+try:
+ import _winreg
+except ImportError:
+ try:
+ import winreg as _winreg
+ except ImportError:
+ _winreg = None
+
+__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
+ 'BmpImageFormatter']
+
+
+# For some unknown reason every font calls it something different
+STYLES = {
+ 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
+ 'ITALIC': ['Oblique', 'Italic'],
+ 'BOLD': ['Bold'],
+ 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
+}
+
+# A sane default for modern systems
DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
-DEFAULT_FONT_NAME_WIN = 'Courier New'
+DEFAULT_FONT_NAME_WIN = 'Courier New'
DEFAULT_FONT_NAME_MAC = 'Menlo'
-
-
-class PilNotAvailable(ImportError):
- """When Python imaging library is not available"""
-
-
-class FontNotFound(Exception):
- """When there are no usable fonts specified"""
-
-
+
+
+class PilNotAvailable(ImportError):
+ """When Python imaging library is not available"""
+
+
+class FontNotFound(Exception):
+ """When there are no usable fonts specified"""
+
+
class FontManager:
- """
- Manages a set of fonts: normal, italic, bold, etc...
- """
-
- def __init__(self, font_name, font_size=14):
- self.font_name = font_name
- self.font_size = font_size
- self.fonts = {}
- self.encoding = None
- if sys.platform.startswith('win'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_WIN
- self._create_win()
+ """
+ Manages a set of fonts: normal, italic, bold, etc...
+ """
+
+ def __init__(self, font_name, font_size=14):
+ self.font_name = font_name
+ self.font_size = font_size
+ self.fonts = {}
+ self.encoding = None
+ if sys.platform.startswith('win'):
+ if not font_name:
+ self.font_name = DEFAULT_FONT_NAME_WIN
+ self._create_win()
elif sys.platform.startswith('darwin'):
if not font_name:
self.font_name = DEFAULT_FONT_NAME_MAC
self._create_mac()
- else:
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_NIX
- self._create_nix()
-
- def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
- stdout=subprocess.PIPE, stderr=None)
- stdout, _ = proc.communicate()
- if proc.returncode == 0:
- lines = stdout.splitlines()
+ else:
+ if not font_name:
+ self.font_name = DEFAULT_FONT_NAME_NIX
+ self._create_nix()
+
+ def _get_nix_font_path(self, name, style):
+ proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
+ stdout=subprocess.PIPE, stderr=None)
+ stdout, _ = proc.communicate()
+ if proc.returncode == 0:
+ lines = stdout.splitlines()
for line in lines:
if line.startswith(b'Fontconfig warning:'):
continue
@@ -94,28 +94,28 @@ class FontManager:
if path:
return path
return None
-
- def _create_nix(self):
- for name in STYLES['NORMAL']:
- path = self._get_nix_font_path(self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_nix_font_path(self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
+
+ def _create_nix(self):
+ for name in STYLES['NORMAL']:
+ path = self._get_nix_font_path(self.font_name, name)
+ if path is not None:
+ self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ raise FontNotFound('No usable fonts named: "%s"' %
+ self.font_name)
+ for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
+ for stylename in STYLES[style]:
+ path = self._get_nix_font_path(self.font_name, stylename)
+ if path is not None:
+ self.fonts[style] = ImageFont.truetype(path, self.font_size)
+ break
+ else:
+ if style == 'BOLDITALIC':
+ self.fonts[style] = self.fonts['BOLD']
+ else:
+ self.fonts[style] = self.fonts['NORMAL']
+
def _get_mac_font_path(self, font_map, name, style):
return font_map.get((name + ' ' + style).strip().lower())
@@ -148,29 +148,29 @@ class FontManager:
else:
self.fonts[style] = self.fonts['NORMAL']
- def _lookup_win(self, key, basename, styles, fail=False):
- for suffix in ('', ' (TrueType)'):
- for style in styles:
- try:
- valname = '%s%s%s' % (basename, style and ' '+style, suffix)
- val, _ = _winreg.QueryValueEx(key, valname)
- return val
+ def _lookup_win(self, key, basename, styles, fail=False):
+ for suffix in ('', ' (TrueType)'):
+ for style in styles:
+ try:
+ valname = '%s%s%s' % (basename, style and ' '+style, suffix)
+ val, _ = _winreg.QueryValueEx(key, valname)
+ return val
except OSError:
- continue
- else:
- if fail:
- raise FontNotFound('Font %s (%s) not found in registry' %
- (basename, styles[0]))
- return None
-
- def _create_win(self):
+ continue
+ else:
+ if fail:
+ raise FontNotFound('Font %s (%s) not found in registry' %
+ (basename, styles[0]))
+ return None
+
+ def _create_win(self):
lookuperror = None
keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
(_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
(_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
(_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
for keyname in keynames:
- try:
+ try:
key = _winreg.OpenKey(*keyname)
try:
path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
@@ -201,257 +201,257 @@ class FontManager:
if lookuperror:
raise lookuperror
raise FontNotFound('Can\'t open Windows font registry key')
-
- def get_char_size(self):
- """
- Get the character size.
- """
- return self.fonts['NORMAL'].getsize('M')
-
+
+ def get_char_size(self):
+ """
+ Get the character size.
+ """
+ return self.fonts['NORMAL'].getsize('M')
+
def get_text_size(self, text):
"""
Get the text size(width, height).
"""
return self.fonts['NORMAL'].getsize(text)
- def get_font(self, bold, oblique):
- """
- Get the font based on bold and italic flags.
- """
- if bold and oblique:
- return self.fonts['BOLDITALIC']
- elif bold:
- return self.fonts['BOLD']
- elif oblique:
- return self.fonts['ITALIC']
- else:
- return self.fonts['NORMAL']
-
-
-class ImageFormatter(Formatter):
- """
- Create a PNG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 0.10
-
- Additional options accepted:
-
- `image_format`
- An image format to output to that is recognised by PIL, these include:
-
- * "PNG" (default)
- * "JPEG"
- * "BMP"
- * "GIF"
-
- `line_pad`
- The extra spacing (in pixels) between each line of text.
-
- Default: 2
-
- `font_name`
- The font name to be used as the base font from which others, such as
- bold and italic fonts will be generated. This really should be a
- monospace font to look sane.
-
+ def get_font(self, bold, oblique):
+ """
+ Get the font based on bold and italic flags.
+ """
+ if bold and oblique:
+ return self.fonts['BOLDITALIC']
+ elif bold:
+ return self.fonts['BOLD']
+ elif oblique:
+ return self.fonts['ITALIC']
+ else:
+ return self.fonts['NORMAL']
+
+
+class ImageFormatter(Formatter):
+ """
+ Create a PNG image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 0.10
+
+ Additional options accepted:
+
+ `image_format`
+ An image format to output to that is recognised by PIL, these include:
+
+ * "PNG" (default)
+ * "JPEG"
+ * "BMP"
+ * "GIF"
+
+ `line_pad`
+ The extra spacing (in pixels) between each line of text.
+
+ Default: 2
+
+ `font_name`
+ The font name to be used as the base font from which others, such as
+ bold and italic fonts will be generated. This really should be a
+ monospace font to look sane.
+
Default: "Courier New" on Windows, "Menlo" on Mac OS, and
"DejaVu Sans Mono" on \\*nix
-
- `font_size`
- The font size in points to be used.
-
- Default: 14
-
- `image_pad`
- The padding, in pixels to be used at each edge of the resulting image.
-
- Default: 10
-
- `line_numbers`
- Whether line numbers should be shown: True/False
-
- Default: True
-
- `line_number_start`
- The line number of the first line.
-
- Default: 1
-
- `line_number_step`
- The step used when printing line numbers.
-
- Default: 1
-
- `line_number_bg`
- The background colour (in "#123456" format) of the line number bar, or
- None to use the style background color.
-
- Default: "#eed"
-
- `line_number_fg`
- The text color of the line numbers (in "#123456"-like format).
-
- Default: "#886"
-
- `line_number_chars`
- The number of columns of line numbers allowable in the line number
- margin.
-
- Default: 2
-
- `line_number_bold`
- Whether line numbers will be bold: True/False
-
- Default: False
-
- `line_number_italic`
- Whether line numbers will be italicized: True/False
-
- Default: False
-
- `line_number_separator`
- Whether a line will be drawn between the line number area and the
- source code area: True/False
-
- Default: True
-
- `line_number_pad`
- The horizontal padding (in pixels) between the line number margin, and
- the source code area.
-
- Default: 6
-
- `hl_lines`
- Specify a list of lines to be highlighted.
-
- .. versionadded:: 1.2
-
- Default: empty list
-
- `hl_color`
- Specify the color for highlighting lines.
-
- .. versionadded:: 1.2
-
- Default: highlight color of the selected style
- """
-
- # Required by the pygments mapper
- name = 'img'
- aliases = ['img', 'IMG', 'png']
- filenames = ['*.png']
-
- unicodeoutput = False
-
- default_image_format = 'png'
-
- def __init__(self, **options):
- """
- See the class docstring for explanation of options.
- """
- if not pil_available:
- raise PilNotAvailable(
- 'Python Imaging Library is required for this formatter')
- Formatter.__init__(self, **options)
- self.encoding = 'latin1' # let pygments.format() do the right thing
- # Read the style
- self.styles = dict(self.style)
- if self.style.background_color is None:
- self.background_color = '#fff'
- else:
- self.background_color = self.style.background_color
- # Image options
- self.image_format = get_choice_opt(
- options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
- self.default_image_format, normcase=True)
- self.image_pad = get_int_opt(options, 'image_pad', 10)
- self.line_pad = get_int_opt(options, 'line_pad', 2)
- # The fonts
- fontsize = get_int_opt(options, 'font_size', 14)
- self.fonts = FontManager(options.get('font_name', ''), fontsize)
- self.fontw, self.fonth = self.fonts.get_char_size()
- # Line number options
- self.line_number_fg = options.get('line_number_fg', '#886')
- self.line_number_bg = options.get('line_number_bg', '#eed')
- self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
- self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
- self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
- self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
- self.line_numbers = get_bool_opt(options, 'line_numbers', True)
- self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
- self.line_number_step = get_int_opt(options, 'line_number_step', 1)
- self.line_number_start = get_int_opt(options, 'line_number_start', 1)
- if self.line_numbers:
- self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
- else:
- self.line_number_width = 0
- self.hl_lines = []
- hl_lines_str = get_list_opt(options, 'hl_lines', [])
- for line in hl_lines_str:
- try:
- self.hl_lines.append(int(line))
- except ValueError:
- pass
- self.hl_color = options.get('hl_color',
- self.style.highlight_color) or '#f90'
- self.drawables = []
-
- def get_style_defs(self, arg=''):
- raise NotImplementedError('The -S option is meaningless for the image '
- 'formatter. Use -O style=<stylename> instead.')
-
- def _get_line_height(self):
- """
- Get the height of a line.
- """
- return self.fonth + self.line_pad
-
- def _get_line_y(self, lineno):
- """
- Get the Y coordinate of a line number.
- """
- return lineno * self._get_line_height() + self.image_pad
-
- def _get_char_width(self):
- """
- Get the width of a character.
- """
- return self.fontw
-
+
+ `font_size`
+ The font size in points to be used.
+
+ Default: 14
+
+ `image_pad`
+ The padding, in pixels to be used at each edge of the resulting image.
+
+ Default: 10
+
+ `line_numbers`
+ Whether line numbers should be shown: True/False
+
+ Default: True
+
+ `line_number_start`
+ The line number of the first line.
+
+ Default: 1
+
+ `line_number_step`
+ The step used when printing line numbers.
+
+ Default: 1
+
+ `line_number_bg`
+ The background colour (in "#123456" format) of the line number bar, or
+ None to use the style background color.
+
+ Default: "#eed"
+
+ `line_number_fg`
+ The text color of the line numbers (in "#123456"-like format).
+
+ Default: "#886"
+
+ `line_number_chars`
+ The number of columns of line numbers allowable in the line number
+ margin.
+
+ Default: 2
+
+ `line_number_bold`
+ Whether line numbers will be bold: True/False
+
+ Default: False
+
+ `line_number_italic`
+ Whether line numbers will be italicized: True/False
+
+ Default: False
+
+ `line_number_separator`
+ Whether a line will be drawn between the line number area and the
+ source code area: True/False
+
+ Default: True
+
+ `line_number_pad`
+ The horizontal padding (in pixels) between the line number margin, and
+ the source code area.
+
+ Default: 6
+
+ `hl_lines`
+ Specify a list of lines to be highlighted.
+
+ .. versionadded:: 1.2
+
+ Default: empty list
+
+ `hl_color`
+ Specify the color for highlighting lines.
+
+ .. versionadded:: 1.2
+
+ Default: highlight color of the selected style
+ """
+
+ # Required by the pygments mapper
+ name = 'img'
+ aliases = ['img', 'IMG', 'png']
+ filenames = ['*.png']
+
+ unicodeoutput = False
+
+ default_image_format = 'png'
+
+ def __init__(self, **options):
+ """
+ See the class docstring for explanation of options.
+ """
+ if not pil_available:
+ raise PilNotAvailable(
+ 'Python Imaging Library is required for this formatter')
+ Formatter.__init__(self, **options)
+ self.encoding = 'latin1' # let pygments.format() do the right thing
+ # Read the style
+ self.styles = dict(self.style)
+ if self.style.background_color is None:
+ self.background_color = '#fff'
+ else:
+ self.background_color = self.style.background_color
+ # Image options
+ self.image_format = get_choice_opt(
+ options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
+ self.default_image_format, normcase=True)
+ self.image_pad = get_int_opt(options, 'image_pad', 10)
+ self.line_pad = get_int_opt(options, 'line_pad', 2)
+ # The fonts
+ fontsize = get_int_opt(options, 'font_size', 14)
+ self.fonts = FontManager(options.get('font_name', ''), fontsize)
+ self.fontw, self.fonth = self.fonts.get_char_size()
+ # Line number options
+ self.line_number_fg = options.get('line_number_fg', '#886')
+ self.line_number_bg = options.get('line_number_bg', '#eed')
+ self.line_number_chars = get_int_opt(options,
+ 'line_number_chars', 2)
+ self.line_number_bold = get_bool_opt(options,
+ 'line_number_bold', False)
+ self.line_number_italic = get_bool_opt(options,
+ 'line_number_italic', False)
+ self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
+ self.line_numbers = get_bool_opt(options, 'line_numbers', True)
+ self.line_number_separator = get_bool_opt(options,
+ 'line_number_separator', True)
+ self.line_number_step = get_int_opt(options, 'line_number_step', 1)
+ self.line_number_start = get_int_opt(options, 'line_number_start', 1)
+ if self.line_numbers:
+ self.line_number_width = (self.fontw * self.line_number_chars +
+ self.line_number_pad * 2)
+ else:
+ self.line_number_width = 0
+ self.hl_lines = []
+ hl_lines_str = get_list_opt(options, 'hl_lines', [])
+ for line in hl_lines_str:
+ try:
+ self.hl_lines.append(int(line))
+ except ValueError:
+ pass
+ self.hl_color = options.get('hl_color',
+ self.style.highlight_color) or '#f90'
+ self.drawables = []
+
+ def get_style_defs(self, arg=''):
+ raise NotImplementedError('The -S option is meaningless for the image '
+ 'formatter. Use -O style=<stylename> instead.')
+
+ def _get_line_height(self):
+ """
+ Get the height of a line.
+ """
+ return self.fonth + self.line_pad
+
+ def _get_line_y(self, lineno):
+ """
+ Get the Y coordinate of a line number.
+ """
+ return lineno * self._get_line_height() + self.image_pad
+
+ def _get_char_width(self):
+ """
+ Get the width of a character.
+ """
+ return self.fontw
+
def _get_char_x(self, linelength):
- """
- Get the X coordinate of a character position.
- """
+ """
+ Get the X coordinate of a character position.
+ """
return linelength + self.image_pad + self.line_number_width
-
+
def _get_text_pos(self, linelength, lineno):
- """
- Get the actual position for a character and line position.
- """
+ """
+ Get the actual position for a character and line position.
+ """
return self._get_char_x(linelength), self._get_line_y(lineno)
-
- def _get_linenumber_pos(self, lineno):
- """
- Get the actual position for the start of a line number.
- """
- return (self.image_pad, self._get_line_y(lineno))
-
- def _get_text_color(self, style):
- """
- Get the correct color for the token from the style.
- """
- if style['color'] is not None:
- fill = '#' + style['color']
- else:
- fill = '#000'
- return fill
-
+
+ def _get_linenumber_pos(self, lineno):
+ """
+ Get the actual position for the start of a line number.
+ """
+ return (self.image_pad, self._get_line_y(lineno))
+
+ def _get_text_color(self, style):
+ """
+ Get the correct color for the token from the style.
+ """
+ if style['color'] is not None:
+ fill = '#' + style['color']
+ else:
+ fill = '#000'
+ return fill
+
def _get_text_bg_color(self, style):
"""
Get the correct background color for the token from the style.
@@ -462,180 +462,180 @@ class ImageFormatter(Formatter):
bg_color = None
return bg_color
- def _get_style_font(self, style):
- """
- Get the correct font for the style.
- """
- return self.fonts.get_font(style['bold'], style['italic'])
-
+ def _get_style_font(self, style):
+ """
+ Get the correct font for the style.
+ """
+ return self.fonts.get_font(style['bold'], style['italic'])
+
def _get_image_size(self, maxlinelength, maxlineno):
- """
- Get the required image size.
- """
+ """
+ Get the required image size.
+ """
return (self._get_char_x(maxlinelength) + self.image_pad,
- self._get_line_y(maxlineno + 0) + self.image_pad)
-
- def _draw_linenumber(self, posno, lineno):
- """
- Remember a line number drawable to paint later.
- """
- self._draw_text(
- self._get_linenumber_pos(posno),
- str(lineno).rjust(self.line_number_chars),
- font=self.fonts.get_font(self.line_number_bold,
- self.line_number_italic),
+ self._get_line_y(maxlineno + 0) + self.image_pad)
+
+ def _draw_linenumber(self, posno, lineno):
+ """
+ Remember a line number drawable to paint later.
+ """
+ self._draw_text(
+ self._get_linenumber_pos(posno),
+ str(lineno).rjust(self.line_number_chars),
+ font=self.fonts.get_font(self.line_number_bold,
+ self.line_number_italic),
text_fg=self.line_number_fg,
text_bg=None,
- )
-
+ )
+
def _draw_text(self, pos, text, font, text_fg, text_bg):
- """
- Remember a single drawable tuple to paint later.
- """
+ """
+ Remember a single drawable tuple to paint later.
+ """
self.drawables.append((pos, text, font, text_fg, text_bg))
-
- def _create_drawables(self, tokensource):
- """
- Create drawables for the token content.
- """
- lineno = charno = maxcharno = 0
+
+ def _create_drawables(self, tokensource):
+ """
+ Create drawables for the token content.
+ """
+ lineno = charno = maxcharno = 0
maxlinelength = linelength = 0
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- style = self.styles[ttype]
- # TODO: make sure tab expansion happens earlier in the chain. It
- # really ought to be done on the input, as to do it right here is
- # quite complex.
- value = value.expandtabs(4)
- lines = value.splitlines(True)
- # print lines
- for i, line in enumerate(lines):
- temp = line.rstrip('\n')
- if temp:
- self._draw_text(
+ for ttype, value in tokensource:
+ while ttype not in self.styles:
+ ttype = ttype.parent
+ style = self.styles[ttype]
+ # TODO: make sure tab expansion happens earlier in the chain. It
+ # really ought to be done on the input, as to do it right here is
+ # quite complex.
+ value = value.expandtabs(4)
+ lines = value.splitlines(True)
+ # print lines
+ for i, line in enumerate(lines):
+ temp = line.rstrip('\n')
+ if temp:
+ self._draw_text(
self._get_text_pos(linelength, lineno),
- temp,
- font = self._get_style_font(style),
+ temp,
+ font = self._get_style_font(style),
text_fg = self._get_text_color(style),
text_bg = self._get_text_bg_color(style),
- )
+ )
temp_width, temp_hight = self.fonts.get_text_size(temp)
linelength += temp_width
maxlinelength = max(maxlinelength, linelength)
- charno += len(temp)
- maxcharno = max(maxcharno, charno)
- if line.endswith('\n'):
- # add a line for each extra line in the value
+ charno += len(temp)
+ maxcharno = max(maxcharno, charno)
+ if line.endswith('\n'):
+ # add a line for each extra line in the value
linelength = 0
- charno = 0
- lineno += 1
+ charno = 0
+ lineno += 1
self.maxlinelength = maxlinelength
- self.maxcharno = maxcharno
- self.maxlineno = lineno
-
- def _draw_line_numbers(self):
- """
- Create drawables for the line numbers.
- """
- if not self.line_numbers:
- return
+ self.maxcharno = maxcharno
+ self.maxlineno = lineno
+
+ def _draw_line_numbers(self):
+ """
+ Create drawables for the line numbers.
+ """
+ if not self.line_numbers:
+ return
for p in range(self.maxlineno):
- n = p + self.line_number_start
- if (n % self.line_number_step) == 0:
- self._draw_linenumber(p, n)
-
- def _paint_line_number_bg(self, im):
- """
- Paint the line number background on the image.
- """
- if not self.line_numbers:
- return
- if self.line_number_fg is None:
- return
- draw = ImageDraw.Draw(im)
- recth = im.size[-1]
- rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0), (rectw, recth)],
- fill=self.line_number_bg)
+ n = p + self.line_number_start
+ if (n % self.line_number_step) == 0:
+ self._draw_linenumber(p, n)
+
+ def _paint_line_number_bg(self, im):
+ """
+ Paint the line number background on the image.
+ """
+ if not self.line_numbers:
+ return
+ if self.line_number_fg is None:
+ return
+ draw = ImageDraw.Draw(im)
+ recth = im.size[-1]
+ rectw = self.image_pad + self.line_number_width - self.line_number_pad
+ draw.rectangle([(0, 0), (rectw, recth)],
+ fill=self.line_number_bg)
if self.line_number_separator:
draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
- del draw
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- This implementation calculates where it should draw each token on the
- pixmap, then calculates the required pixmap size and draws the items.
- """
- self._create_drawables(tokensource)
- self._draw_line_numbers()
- im = Image.new(
- 'RGB',
+ del draw
+
+ def format(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+
+ This implementation calculates where it should draw each token on the
+ pixmap, then calculates the required pixmap size and draws the items.
+ """
+ self._create_drawables(tokensource)
+ self._draw_line_numbers()
+ im = Image.new(
+ 'RGB',
self._get_image_size(self.maxlinelength, self.maxlineno),
- self.background_color
- )
- self._paint_line_number_bg(im)
- draw = ImageDraw.Draw(im)
- # Highlight
- if self.hl_lines:
- x = self.image_pad + self.line_number_width - self.line_number_pad + 1
- recth = self._get_line_height()
- rectw = im.size[0] - x
- for linenumber in self.hl_lines:
- y = self._get_line_y(linenumber - 1)
- draw.rectangle([(x, y), (x + rectw, y + recth)],
- fill=self.hl_color)
+ self.background_color
+ )
+ self._paint_line_number_bg(im)
+ draw = ImageDraw.Draw(im)
+ # Highlight
+ if self.hl_lines:
+ x = self.image_pad + self.line_number_width - self.line_number_pad + 1
+ recth = self._get_line_height()
+ rectw = im.size[0] - x
+ for linenumber in self.hl_lines:
+ y = self._get_line_y(linenumber - 1)
+ draw.rectangle([(x, y), (x + rectw, y + recth)],
+ fill=self.hl_color)
for pos, value, font, text_fg, text_bg in self.drawables:
if text_bg:
text_size = draw.textsize(text=value, font=font)
draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
draw.text(pos, value, font=font, fill=text_fg)
- im.save(outfile, self.image_format.upper())
-
-
-# Add one formatter per format, so that the "-f gif" option gives the correct result
-# when used in pygmentize.
-
-class GifImageFormatter(ImageFormatter):
- """
- Create a GIF image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_gif'
- aliases = ['gif']
- filenames = ['*.gif']
- default_image_format = 'gif'
-
-
-class JpgImageFormatter(ImageFormatter):
- """
- Create a JPEG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_jpg'
- aliases = ['jpg', 'jpeg']
- filenames = ['*.jpg']
- default_image_format = 'jpeg'
-
-
-class BmpImageFormatter(ImageFormatter):
- """
- Create a bitmap image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_bmp'
- aliases = ['bmp', 'bitmap']
- filenames = ['*.bmp']
- default_image_format = 'bmp'
+ im.save(outfile, self.image_format.upper())
+
+
+# Add one formatter per format, so that the "-f gif" option gives the correct result
+# when used in pygmentize.
+
+class GifImageFormatter(ImageFormatter):
+ """
+ Create a GIF image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_gif'
+ aliases = ['gif']
+ filenames = ['*.gif']
+ default_image_format = 'gif'
+
+
+class JpgImageFormatter(ImageFormatter):
+ """
+ Create a JPEG image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_jpg'
+ aliases = ['jpg', 'jpeg']
+ filenames = ['*.jpg']
+ default_image_format = 'jpeg'
+
+
+class BmpImageFormatter(ImageFormatter):
+ """
+ Create a bitmap image from source code. This uses the Python Imaging Library to
+ generate a pixmap from the source code.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'img_bmp'
+ aliases = ['bmp', 'bitmap']
+ filenames = ['*.bmp']
+ default_image_format = 'bmp'
diff --git a/contrib/python/Pygments/py3/pygments/formatters/irc.py b/contrib/python/Pygments/py3/pygments/formatters/irc.py
index d8da7a39de..6d1e1f7b19 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/irc.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/irc.py
@@ -1,27 +1,27 @@
-"""
- pygments.formatters.irc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for IRC output
-
+"""
+ pygments.formatters.irc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for IRC output
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.util import get_choice_opt
-
-
-__all__ = ['IRCFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-IRC_COLORS = {
- Token: ('', ''),
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+from pygments.util import get_choice_opt
+
+
+__all__ = ['IRCFormatter']
+
+
+#: Map token types to a tuple of color values for light and dark
+#: backgrounds.
+IRC_COLORS = {
+ Token: ('', ''),
+
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
@@ -40,140 +40,140 @@ IRC_COLORS = {
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
-
+
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
+ Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
Generic.Error: ('brightred', 'brightred'),
-
+
Error: ('_brightred_', '_brightred_'),
-}
-
-
-IRC_COLOR_MAP = {
- 'white': 0,
- 'black': 1,
+}
+
+
+IRC_COLOR_MAP = {
+ 'white': 0,
+ 'black': 1,
'blue': 2,
'brightgreen': 3,
'brightred': 4,
'yellow': 5,
'magenta': 6,
- 'orange': 7,
+ 'orange': 7,
'green': 7, #compat w/ ansi
'brightyellow': 8,
- 'lightgreen': 9,
+ 'lightgreen': 9,
'brightcyan': 9, # compat w/ ansi
'cyan': 10,
- 'lightblue': 11,
+ 'lightblue': 11,
'red': 11, # compat w/ ansi
'brightblue': 12,
'brightmagenta': 13,
'brightblack': 14,
'gray': 15,
-}
-
-def ircformat(color, text):
- if len(color) < 1:
- return text
- add = sub = ''
- if '_' in color: # italic
- add += '\x1D'
- sub = '\x1D' + sub
- color = color.strip('_')
- if '*' in color: # bold
- add += '\x02'
- sub = '\x02' + sub
- color = color.strip('*')
- # underline (\x1F) not supported
- # backgrounds (\x03FF,BB) not supported
- if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
- add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
- sub = '\x03' + sub
- return add + text + sub
- return '<'+add+'>'+text+'</'+sub+'>'
-
-
-class IRCFormatter(Formatter):
- r"""
- Format tokens with IRC color sequences
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers in the output as well
- (default: ``False`` = no line numbers).
- """
- name = 'IRC'
- aliases = ['irc', 'IRC']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("\n%04d: " % self._lineno)
-
- def _format_unencoded_with_lineno(self, tokensource, outfile):
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- if value.endswith("\n"):
- self._write_lineno(outfile)
- value = value[:-1]
- color = self.colorscheme.get(ttype)
- while color is None:
+}
+
+def ircformat(color, text):
+ if len(color) < 1:
+ return text
+ add = sub = ''
+ if '_' in color: # italic
+ add += '\x1D'
+ sub = '\x1D' + sub
+ color = color.strip('_')
+ if '*' in color: # bold
+ add += '\x02'
+ sub = '\x02' + sub
+ color = color.strip('*')
+ # underline (\x1F) not supported
+ # backgrounds (\x03FF,BB) not supported
+ if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
+ add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
+ sub = '\x03' + sub
+ return add + text + sub
+ return '<'+add+'>'+text+'</'+sub+'>'
+
+
+class IRCFormatter(Formatter):
+ r"""
+ Format tokens with IRC color sequences
+
+ The `get_style_defs()` method doesn't do anything special since there is
+ no support for common styles.
+
+ Options accepted:
+
+ `bg`
+ Set to ``"light"`` or ``"dark"`` depending on the terminal's background
+ (default: ``"light"``).
+
+ `colorscheme`
+ A dictionary mapping token types to (lightbg, darkbg) color names or
+ ``None`` (default: ``None`` = use builtin colorscheme).
+
+ `linenos`
+ Set to ``True`` to have line numbers in the output as well
+ (default: ``False`` = no line numbers).
+ """
+ name = 'IRC'
+ aliases = ['irc', 'IRC']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.darkbg = get_choice_opt(options, 'bg',
+ ['light', 'dark'], 'light') == 'dark'
+ self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
+
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("\n%04d: " % self._lineno)
+
+ def _format_unencoded_with_lineno(self, tokensource, outfile):
+ self._write_lineno(outfile)
+
+ for ttype, value in tokensource:
+ if value.endswith("\n"):
+ self._write_lineno(outfile)
+ value = value[:-1]
+ color = self.colorscheme.get(ttype)
+ while color is None:
ttype = ttype.parent
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- self._write_lineno(outfile)
- if line:
- outfile.write(ircformat(color, line[:-1]))
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
-
- outfile.write("\n")
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._format_unencoded_with_lineno(tokensource, outfile)
- return
-
- for ttype, value in tokensource:
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype[:-1]
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(ircformat(color, line))
- outfile.write('\n')
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ self._write_lineno(outfile)
+ if line:
+ outfile.write(ircformat(color, line[:-1]))
+ if spl[-1]:
+ outfile.write(ircformat(color, spl[-1]))
+ else:
+ outfile.write(value)
+
+ outfile.write("\n")
+
+ def format_unencoded(self, tokensource, outfile):
+ if self.linenos:
+ self._format_unencoded_with_lineno(tokensource, outfile)
+ return
+
+ for ttype, value in tokensource:
+ color = self.colorscheme.get(ttype)
+ while color is None:
+ ttype = ttype[:-1]
+ color = self.colorscheme.get(ttype)
+ if color:
+ color = color[self.darkbg]
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write(ircformat(color, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write(ircformat(color, spl[-1]))
+ else:
+ outfile.write(value)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/latex.py b/contrib/python/Pygments/py3/pygments/formatters/latex.py
index e32fcebc5a..41e5343bd4 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/latex.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/latex.py
@@ -1,416 +1,416 @@
-"""
- pygments.formatters.latex
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for LaTeX fancyvrb output.
-
+"""
+ pygments.formatters.latex
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for LaTeX fancyvrb output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from io import StringIO
-
-from pygments.formatter import Formatter
+
+from pygments.formatter import Formatter
from pygments.lexer import Lexer, do_insertions
-from pygments.token import Token, STANDARD_TYPES
+from pygments.token import Token, STANDARD_TYPES
from pygments.util import get_bool_opt, get_int_opt
-
-
-__all__ = ['LatexFormatter']
-
-
-def escape_tex(text, commandprefix):
- return text.replace('\\', '\x00'). \
- replace('{', '\x01'). \
- replace('}', '\x02'). \
- replace('\x00', r'\%sZbs{}' % commandprefix). \
- replace('\x01', r'\%sZob{}' % commandprefix). \
- replace('\x02', r'\%sZcb{}' % commandprefix). \
- replace('^', r'\%sZca{}' % commandprefix). \
- replace('_', r'\%sZus{}' % commandprefix). \
- replace('&', r'\%sZam{}' % commandprefix). \
- replace('<', r'\%sZlt{}' % commandprefix). \
- replace('>', r'\%sZgt{}' % commandprefix). \
- replace('#', r'\%sZsh{}' % commandprefix). \
- replace('%', r'\%sZpc{}' % commandprefix). \
- replace('$', r'\%sZdl{}' % commandprefix). \
- replace('-', r'\%sZhy{}' % commandprefix). \
- replace("'", r'\%sZsq{}' % commandprefix). \
- replace('"', r'\%sZdq{}' % commandprefix). \
- replace('~', r'\%sZti{}' % commandprefix)
-
-
-DOC_TEMPLATE = r'''
-\documentclass{%(docclass)s}
-\usepackage{fancyvrb}
-\usepackage{color}
-\usepackage[%(encoding)s]{inputenc}
-%(preamble)s
-
-%(styledefs)s
-
-\begin{document}
-
-\section*{%(title)s}
-
-%(code)s
-\end{document}
-'''
-
-## Small explanation of the mess below :)
-#
-# The previous version of the LaTeX formatter just assigned a command to
-# each token type defined in the current style. That obviously is
-# problematic if the highlighted code is produced for a different style
-# than the style commands themselves.
-#
-# This version works much like the HTML formatter which assigns multiple
-# CSS classes to each <span> tag, from the most specific to the least
-# specific token type, thus falling back to the parent token type if one
-# is not defined. Here, the classes are there too and use the same short
-# forms given in token.STANDARD_TYPES.
-#
-# Highlighted code now only uses one custom command, which by default is
-# \PY and selectable by the commandprefix option (and in addition the
-# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
-# backwards compatibility purposes).
-#
-# \PY has two arguments: the classes, separated by +, and the text to
-# render in that style. The classes are resolved into the respective
-# style commands by magic, which serves to ignore unknown classes.
-#
-# The magic macros are:
-# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
-# to render in \PY@do. Their definition determines the style.
-# * \PY@reset resets \PY@it etc. to do nothing.
-# * \PY@toks parses the list of classes, using magic inspired by the
-# keyval package (but modified to use plusses instead of commas
-# because fancyvrb redefines commas inside its environments).
-# * \PY@tok processes one class, calling the \PY@tok@classname command
-# if it exists.
-# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
-# for its class.
-# * \PY resets the style, parses the classnames and then calls \PY@do.
-#
-# Tip: to read this code, print it out in substituted form using e.g.
-# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
-
-STYLE_TEMPLATE = r'''
-\makeatletter
-\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
- \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
- \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
-\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
-\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
- \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
-\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
- \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
-\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
-
-%(styles)s
-
-\def\%(cp)sZbs{\char`\\}
-\def\%(cp)sZus{\char`\_}
-\def\%(cp)sZob{\char`\{}
-\def\%(cp)sZcb{\char`\}}
-\def\%(cp)sZca{\char`\^}
-\def\%(cp)sZam{\char`\&}
-\def\%(cp)sZlt{\char`\<}
-\def\%(cp)sZgt{\char`\>}
-\def\%(cp)sZsh{\char`\#}
-\def\%(cp)sZpc{\char`\%%}
-\def\%(cp)sZdl{\char`\$}
-\def\%(cp)sZhy{\char`\-}
-\def\%(cp)sZsq{\char`\'}
-\def\%(cp)sZdq{\char`\"}
-\def\%(cp)sZti{\char`\~}
-%% for compatibility with earlier versions
-\def\%(cp)sZat{@}
-\def\%(cp)sZlb{[}
-\def\%(cp)sZrb{]}
-\makeatother
-'''
-
-
-def _get_ttype_name(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-class LatexFormatter(Formatter):
- r"""
- Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
- standard packages.
-
- Without the `full` option, code is formatted as one ``Verbatim``
- environment, like this:
-
- .. sourcecode:: latex
-
- \begin{Verbatim}[commandchars=\\\{\}]
- \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
- \PY{k}{pass}
- \end{Verbatim}
-
- The special command used here (``\PY``) and all the other macros it needs
- are output by the `get_style_defs` method.
-
- With the `full` option, a complete LaTeX document is output, including
- the command definitions in the preamble.
-
- The `get_style_defs()` method of a `LatexFormatter` returns a string
- containing ``\def`` commands defining the macros needed inside the
- ``Verbatim`` environments.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `docclass`
- If the `full` option is enabled, this is the document class to use
- (default: ``'article'``).
-
- `preamble`
- If the `full` option is enabled, this can be further preamble commands,
- e.g. ``\usepackage`` (default: ``''``).
-
- `linenos`
- If set to ``True``, output line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `verboptions`
- Additional options given to the Verbatim environment (see the *fancyvrb*
- docs for possible values) (default: ``''``).
-
- `commandprefix`
- The LaTeX commands used to produce colored output are constructed
- using this prefix and some letters (default: ``'PY'``).
-
- .. versionadded:: 0.7
- .. versionchanged:: 0.10
- The default is now ``'PY'`` instead of ``'C'``.
-
- `texcomments`
- If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
- in comment tokens is not escaped so that LaTeX can render it (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `mathescape`
- If set to ``True``, enables LaTeX math mode escape in comments. That
- is, ``'$...$'`` inside a comment will trigger math mode (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `escapeinside`
- If set to a string of length 2, enables escaping to LaTeX. Text
- delimited by these 2 characters is read as LaTeX code and
- typeset accordingly. It has no effect in string literals. It has
- no effect in comments if `texcomments` or `mathescape` is
- set. (default: ``''``).
-
- .. versionadded:: 2.0
-
- `envname`
- Allows you to pick an alternative environment name replacing Verbatim.
- The alternate environment still has to support Verbatim's option syntax.
- (default: ``'Verbatim'``).
-
- .. versionadded:: 2.0
- """
- name = 'LaTeX'
- aliases = ['latex', 'tex']
- filenames = ['*.tex']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.docclass = options.get('docclass', 'article')
- self.preamble = options.get('preamble', '')
- self.linenos = get_bool_opt(options, 'linenos', False)
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.verboptions = options.get('verboptions', '')
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.commandprefix = options.get('commandprefix', 'PY')
- self.texcomments = get_bool_opt(options, 'texcomments', False)
- self.mathescape = get_bool_opt(options, 'mathescape', False)
- self.escapeinside = options.get('escapeinside', '')
- if len(self.escapeinside) == 2:
- self.left = self.escapeinside[0]
- self.right = self.escapeinside[1]
- else:
- self.escapeinside = ''
+
+
+__all__ = ['LatexFormatter']
+
+
+def escape_tex(text, commandprefix):
+ return text.replace('\\', '\x00'). \
+ replace('{', '\x01'). \
+ replace('}', '\x02'). \
+ replace('\x00', r'\%sZbs{}' % commandprefix). \
+ replace('\x01', r'\%sZob{}' % commandprefix). \
+ replace('\x02', r'\%sZcb{}' % commandprefix). \
+ replace('^', r'\%sZca{}' % commandprefix). \
+ replace('_', r'\%sZus{}' % commandprefix). \
+ replace('&', r'\%sZam{}' % commandprefix). \
+ replace('<', r'\%sZlt{}' % commandprefix). \
+ replace('>', r'\%sZgt{}' % commandprefix). \
+ replace('#', r'\%sZsh{}' % commandprefix). \
+ replace('%', r'\%sZpc{}' % commandprefix). \
+ replace('$', r'\%sZdl{}' % commandprefix). \
+ replace('-', r'\%sZhy{}' % commandprefix). \
+ replace("'", r'\%sZsq{}' % commandprefix). \
+ replace('"', r'\%sZdq{}' % commandprefix). \
+ replace('~', r'\%sZti{}' % commandprefix)
+
+
+DOC_TEMPLATE = r'''
+\documentclass{%(docclass)s}
+\usepackage{fancyvrb}
+\usepackage{color}
+\usepackage[%(encoding)s]{inputenc}
+%(preamble)s
+
+%(styledefs)s
+
+\begin{document}
+
+\section*{%(title)s}
+
+%(code)s
+\end{document}
+'''
+
+## Small explanation of the mess below :)
+#
+# The previous version of the LaTeX formatter just assigned a command to
+# each token type defined in the current style. That obviously is
+# problematic if the highlighted code is produced for a different style
+# than the style commands themselves.
+#
+# This version works much like the HTML formatter which assigns multiple
+# CSS classes to each <span> tag, from the most specific to the least
+# specific token type, thus falling back to the parent token type if one
+# is not defined. Here, the classes are there too and use the same short
+# forms given in token.STANDARD_TYPES.
+#
+# Highlighted code now only uses one custom command, which by default is
+# \PY and selectable by the commandprefix option (and in addition the
+# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
+# backwards compatibility purposes).
+#
+# \PY has two arguments: the classes, separated by +, and the text to
+# render in that style. The classes are resolved into the respective
+# style commands by magic, which serves to ignore unknown classes.
+#
+# The magic macros are:
+# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
+# to render in \PY@do. Their definition determines the style.
+# * \PY@reset resets \PY@it etc. to do nothing.
+# * \PY@toks parses the list of classes, using magic inspired by the
+# keyval package (but modified to use plusses instead of commas
+# because fancyvrb redefines commas inside its environments).
+# * \PY@tok processes one class, calling the \PY@tok@classname command
+# if it exists.
+# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
+# for its class.
+# * \PY resets the style, parses the classnames and then calls \PY@do.
+#
+# Tip: to read this code, print it out in substituted form using e.g.
+# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
+
+STYLE_TEMPLATE = r'''
+\makeatletter
+\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
+ \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
+ \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
+\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
+\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
+ \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
+\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
+ \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
+\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
+
+%(styles)s
+
+\def\%(cp)sZbs{\char`\\}
+\def\%(cp)sZus{\char`\_}
+\def\%(cp)sZob{\char`\{}
+\def\%(cp)sZcb{\char`\}}
+\def\%(cp)sZca{\char`\^}
+\def\%(cp)sZam{\char`\&}
+\def\%(cp)sZlt{\char`\<}
+\def\%(cp)sZgt{\char`\>}
+\def\%(cp)sZsh{\char`\#}
+\def\%(cp)sZpc{\char`\%%}
+\def\%(cp)sZdl{\char`\$}
+\def\%(cp)sZhy{\char`\-}
+\def\%(cp)sZsq{\char`\'}
+\def\%(cp)sZdq{\char`\"}
+\def\%(cp)sZti{\char`\~}
+%% for compatibility with earlier versions
+\def\%(cp)sZat{@}
+\def\%(cp)sZlb{[}
+\def\%(cp)sZrb{]}
+\makeatother
+'''
+
+
+def _get_ttype_name(ttype):
+ fname = STANDARD_TYPES.get(ttype)
+ if fname:
+ return fname
+ aname = ''
+ while fname is None:
+ aname = ttype[-1] + aname
+ ttype = ttype.parent
+ fname = STANDARD_TYPES.get(ttype)
+ return fname + aname
+
+
+class LatexFormatter(Formatter):
+ r"""
+ Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
+ standard packages.
+
+ Without the `full` option, code is formatted as one ``Verbatim``
+ environment, like this:
+
+ .. sourcecode:: latex
+
+ \begin{Verbatim}[commandchars=\\\{\}]
+ \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
+ \PY{k}{pass}
+ \end{Verbatim}
+
+ The special command used here (``\PY``) and all the other macros it needs
+ are output by the `get_style_defs` method.
+
+ With the `full` option, a complete LaTeX document is output, including
+ the command definitions in the preamble.
+
+ The `get_style_defs()` method of a `LatexFormatter` returns a string
+ containing ``\def`` commands defining the macros needed inside the
+ ``Verbatim`` environments.
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `full`
+ Tells the formatter to output a "full" document, i.e. a complete
+ self-contained document (default: ``False``).
+
+ `title`
+ If `full` is true, the title that should be used to caption the
+ document (default: ``''``).
+
+ `docclass`
+ If the `full` option is enabled, this is the document class to use
+ (default: ``'article'``).
+
+ `preamble`
+ If the `full` option is enabled, this can be further preamble commands,
+ e.g. ``\usepackage`` (default: ``''``).
+
+ `linenos`
+ If set to ``True``, output line numbers (default: ``False``).
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ `verboptions`
+ Additional options given to the Verbatim environment (see the *fancyvrb*
+ docs for possible values) (default: ``''``).
+
+ `commandprefix`
+ The LaTeX commands used to produce colored output are constructed
+ using this prefix and some letters (default: ``'PY'``).
+
+ .. versionadded:: 0.7
+ .. versionchanged:: 0.10
+ The default is now ``'PY'`` instead of ``'C'``.
+
+ `texcomments`
+ If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
+ in comment tokens is not escaped so that LaTeX can render it (default:
+ ``False``).
+
+ .. versionadded:: 1.2
+
+ `mathescape`
+ If set to ``True``, enables LaTeX math mode escape in comments. That
+ is, ``'$...$'`` inside a comment will trigger math mode (default:
+ ``False``).
+
+ .. versionadded:: 1.2
+
+ `escapeinside`
+ If set to a string of length 2, enables escaping to LaTeX. Text
+ delimited by these 2 characters is read as LaTeX code and
+ typeset accordingly. It has no effect in string literals. It has
+ no effect in comments if `texcomments` or `mathescape` is
+ set. (default: ``''``).
+
+ .. versionadded:: 2.0
+
+ `envname`
+ Allows you to pick an alternative environment name replacing Verbatim.
+ The alternate environment still has to support Verbatim's option syntax.
+ (default: ``'Verbatim'``).
+
+ .. versionadded:: 2.0
+ """
+ name = 'LaTeX'
+ aliases = ['latex', 'tex']
+ filenames = ['*.tex']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.docclass = options.get('docclass', 'article')
+ self.preamble = options.get('preamble', '')
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.verboptions = options.get('verboptions', '')
+ self.nobackground = get_bool_opt(options, 'nobackground', False)
+ self.commandprefix = options.get('commandprefix', 'PY')
+ self.texcomments = get_bool_opt(options, 'texcomments', False)
+ self.mathescape = get_bool_opt(options, 'mathescape', False)
+ self.escapeinside = options.get('escapeinside', '')
+ if len(self.escapeinside) == 2:
+ self.left = self.escapeinside[0]
+ self.right = self.escapeinside[1]
+ else:
+ self.escapeinside = ''
self.envname = options.get('envname', 'Verbatim')
-
- self._create_stylesheet()
-
- def _create_stylesheet(self):
- t2n = self.ttype2name = {Token: ''}
- c2d = self.cmd2def = {}
- cp = self.commandprefix
-
- def rgbcolor(col):
- if col:
- return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
- for i in (0, 2, 4)])
- else:
- return '1,1,1'
-
- for ttype, ndef in self.style:
- name = _get_ttype_name(ttype)
- cmndef = ''
- if ndef['bold']:
- cmndef += r'\let\$$@bf=\textbf'
- if ndef['italic']:
- cmndef += r'\let\$$@it=\textit'
- if ndef['underline']:
- cmndef += r'\let\$$@ul=\underline'
- if ndef['roman']:
- cmndef += r'\let\$$@ff=\textrm'
- if ndef['sans']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['mono']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['color']:
- cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
- rgbcolor(ndef['color']))
- if ndef['border']:
+
+ self._create_stylesheet()
+
+ def _create_stylesheet(self):
+ t2n = self.ttype2name = {Token: ''}
+ c2d = self.cmd2def = {}
+ cp = self.commandprefix
+
+ def rgbcolor(col):
+ if col:
+ return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
+ for i in (0, 2, 4)])
+ else:
+ return '1,1,1'
+
+ for ttype, ndef in self.style:
+ name = _get_ttype_name(ttype)
+ cmndef = ''
+ if ndef['bold']:
+ cmndef += r'\let\$$@bf=\textbf'
+ if ndef['italic']:
+ cmndef += r'\let\$$@it=\textit'
+ if ndef['underline']:
+ cmndef += r'\let\$$@ul=\underline'
+ if ndef['roman']:
+ cmndef += r'\let\$$@ff=\textrm'
+ if ndef['sans']:
+ cmndef += r'\let\$$@ff=\textsf'
+ if ndef['mono']:
+ cmndef += r'\let\$$@ff=\textsf'
+ if ndef['color']:
+ cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
+ rgbcolor(ndef['color']))
+ if ndef['border']:
cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
- (rgbcolor(ndef['border']),
- rgbcolor(ndef['bgcolor'])))
- elif ndef['bgcolor']:
+ (rgbcolor(ndef['border']),
+ rgbcolor(ndef['bgcolor'])))
+ elif ndef['bgcolor']:
cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
r'\colorbox[rgb]{%s}{\strut ##1}}}' %
- rgbcolor(ndef['bgcolor']))
- if cmndef == '':
- continue
- cmndef = cmndef.replace('$$', cp)
- t2n[ttype] = name
- c2d[name] = cmndef
-
- def get_style_defs(self, arg=''):
- """
- Return the command sequences needed to define the commands
- used to format text in the verbatim environment. ``arg`` is ignored.
- """
- cp = self.commandprefix
- styles = []
+ rgbcolor(ndef['bgcolor']))
+ if cmndef == '':
+ continue
+ cmndef = cmndef.replace('$$', cp)
+ t2n[ttype] = name
+ c2d[name] = cmndef
+
+ def get_style_defs(self, arg=''):
+ """
+ Return the command sequences needed to define the commands
+ used to format text in the verbatim environment. ``arg`` is ignored.
+ """
+ cp = self.commandprefix
+ styles = []
for name, definition in self.cmd2def.items():
styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
- return STYLE_TEMPLATE % {'cp': self.commandprefix,
- 'styles': '\n'.join(styles)}
-
- def format_unencoded(self, tokensource, outfile):
- # TODO: add support for background colors
- t2n = self.ttype2name
- cp = self.commandprefix
-
- if self.full:
- realoutfile = outfile
- outfile = StringIO()
-
+ return STYLE_TEMPLATE % {'cp': self.commandprefix,
+ 'styles': '\n'.join(styles)}
+
+ def format_unencoded(self, tokensource, outfile):
+ # TODO: add support for background colors
+ t2n = self.ttype2name
+ cp = self.commandprefix
+
+ if self.full:
+ realoutfile = outfile
+ outfile = StringIO()
+
outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
- if self.linenos:
- start, step = self.linenostart, self.linenostep
+ if self.linenos:
+ start, step = self.linenostart, self.linenostep
outfile.write(',numbers=left' +
(start and ',firstnumber=%d' % start or '') +
(step and ',stepnumber=%d' % step or ''))
- if self.mathescape or self.texcomments or self.escapeinside:
+ if self.mathescape or self.texcomments or self.escapeinside:
outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
'\\catcode`\\_=8\\relax}')
- if self.verboptions:
+ if self.verboptions:
outfile.write(',' + self.verboptions)
outfile.write(']\n')
-
- for ttype, value in tokensource:
- if ttype in Token.Comment:
- if self.texcomments:
- # Try to guess comment starting lexeme and escape it ...
- start = value[0:1]
+
+ for ttype, value in tokensource:
+ if ttype in Token.Comment:
+ if self.texcomments:
+ # Try to guess comment starting lexeme and escape it ...
+ start = value[0:1]
for i in range(1, len(value)):
- if start[0] != value[i]:
- break
- start += value[i]
-
- value = value[len(start):]
- start = escape_tex(start, cp)
-
- # ... but do not escape inside comment.
- value = start + value
- elif self.mathescape:
- # Only escape parts not inside a math environment.
- parts = value.split('$')
- in_math = False
- for i, part in enumerate(parts):
- if not in_math:
- parts[i] = escape_tex(part, cp)
- in_math = not in_math
- value = '$'.join(parts)
- elif self.escapeinside:
- text = value
- value = ''
- while text:
- a, sep1, text = text.partition(self.left)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- value += escape_tex(a, cp) + b
- else:
- value += escape_tex(a + sep1 + b, cp)
- else:
- value += escape_tex(a, cp)
- else:
- value = escape_tex(value, cp)
- elif ttype not in Token.Escape:
- value = escape_tex(value, cp)
- styles = []
- while ttype is not Token:
- try:
- styles.append(t2n[ttype])
- except KeyError:
- # not in current style
- styles.append(_get_ttype_name(ttype))
- ttype = ttype.parent
- styleval = '+'.join(reversed(styles))
- if styleval:
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
- outfile.write('\n')
- if spl[-1]:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
- else:
- outfile.write(value)
-
+ if start[0] != value[i]:
+ break
+ start += value[i]
+
+ value = value[len(start):]
+ start = escape_tex(start, cp)
+
+ # ... but do not escape inside comment.
+ value = start + value
+ elif self.mathescape:
+ # Only escape parts not inside a math environment.
+ parts = value.split('$')
+ in_math = False
+ for i, part in enumerate(parts):
+ if not in_math:
+ parts[i] = escape_tex(part, cp)
+ in_math = not in_math
+ value = '$'.join(parts)
+ elif self.escapeinside:
+ text = value
+ value = ''
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ value += escape_tex(a, cp) + b
+ else:
+ value += escape_tex(a + sep1 + b, cp)
+ else:
+ value += escape_tex(a, cp)
+ else:
+ value = escape_tex(value, cp)
+ elif ttype not in Token.Escape:
+ value = escape_tex(value, cp)
+ styles = []
+ while ttype is not Token:
+ try:
+ styles.append(t2n[ttype])
+ except KeyError:
+ # not in current style
+ styles.append(_get_ttype_name(ttype))
+ ttype = ttype.parent
+ styleval = '+'.join(reversed(styles))
+ if styleval:
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
+ outfile.write('\n')
+ if spl[-1]:
+ outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
+ else:
+ outfile.write(value)
+
outfile.write('\\end{' + self.envname + '}\n')
-
- if self.full:
+
+ if self.full:
encoding = self.encoding or 'utf8'
# map known existings encodings from LaTeX distribution
encoding = {
@@ -418,33 +418,33 @@ class LatexFormatter(Formatter):
'latin_1': 'latin1',
'iso_8859_1': 'latin1',
}.get(encoding.replace('-', '_'), encoding)
- realoutfile.write(DOC_TEMPLATE %
- dict(docclass = self.docclass,
- preamble = self.preamble,
- title = self.title,
+ realoutfile.write(DOC_TEMPLATE %
+ dict(docclass = self.docclass,
+ preamble = self.preamble,
+ title = self.title,
encoding = encoding,
- styledefs = self.get_style_defs(),
- code = outfile.getvalue()))
-
-
-class LatexEmbeddedLexer(Lexer):
- """
- This lexer takes one lexer as argument, the lexer for the language
- being formatted, and the left and right delimiters for escaped text.
-
- First everything is scanned using the language lexer to obtain
- strings and comments. All other consecutive tokens are merged and
- the resulting text is scanned for escaped segments, which are given
- the Token.Escape type. Finally text that is not escaped is scanned
- again with the language lexer.
- """
- def __init__(self, left, right, lang, **options):
- self.left = left
- self.right = right
- self.lang = lang
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
+ styledefs = self.get_style_defs(),
+ code = outfile.getvalue()))
+
+
+class LatexEmbeddedLexer(Lexer):
+ """
+ This lexer takes one lexer as argument, the lexer for the language
+ being formatted, and the left and right delimiters for escaped text.
+
+ First everything is scanned using the language lexer to obtain
+ strings and comments. All other consecutive tokens are merged and
+ the resulting text is scanned for escaped segments, which are given
+ the Token.Escape type. Finally text that is not escaped is scanned
+ again with the language lexer.
+ """
+ def __init__(self, left, right, lang, **options):
+ self.left = left
+ self.right = right
+ self.lang = lang
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
# find and remove all the escape tokens (replace with an empty string)
# this is very similar to DelegatingLexer.get_tokens_unprocessed.
buffered = ''
@@ -477,35 +477,35 @@ class LatexEmbeddedLexer(Lexer):
def _filter_to(self, it, pred):
""" Keep only the tokens that match `pred`, merge the others together """
- buf = ''
- idx = 0
+ buf = ''
+ idx = 0
for i, t, v in it:
if pred(t):
- if buf:
+ if buf:
yield idx, None, buf
- buf = ''
- yield i, t, v
- else:
- if not buf:
- idx = i
- buf += v
- if buf:
+ buf = ''
+ yield i, t, v
+ else:
+ if not buf:
+ idx = i
+ buf += v
+ if buf:
yield idx, None, buf
-
+
def _find_escape_tokens(self, text):
""" Find escape tokens within text, give token=None otherwise """
index = 0
- while text:
- a, sep1, text = text.partition(self.left)
- if a:
+ while text:
+ a, sep1, text = text.partition(self.left)
+ if a:
yield index, None, a
index += len(a)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- yield index + len(sep1), Token.Escape, b
- index += len(sep1) + len(b) + len(sep2)
- else:
- yield index, Token.Error, sep1
- index += len(sep1)
- text = b
+ if sep1:
+ b, sep2, text = text.partition(self.right)
+ if sep2:
+ yield index + len(sep1), Token.Escape, b
+ index += len(sep1) + len(b) + len(sep2)
+ else:
+ yield index, Token.Error, sep1
+ index += len(sep1)
+ text = b
diff --git a/contrib/python/Pygments/py3/pygments/formatters/other.py b/contrib/python/Pygments/py3/pygments/formatters/other.py
index 1a12c42b96..71971bba23 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/other.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/other.py
@@ -1,161 +1,161 @@
-"""
- pygments.formatters.other
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Other formatters: NullFormatter, RawTokenFormatter.
-
+"""
+ pygments.formatters.other
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Other formatters: NullFormatter, RawTokenFormatter.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
from pygments.util import get_choice_opt
-from pygments.token import Token
-from pygments.console import colorize
-
-__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
-
-
-class NullFormatter(Formatter):
- """
- Output the text unchanged without any formatting.
- """
- name = 'Text only'
- aliases = ['text', 'null']
- filenames = ['*.txt']
-
- def format(self, tokensource, outfile):
- enc = self.encoding
- for ttype, value in tokensource:
- if enc:
- outfile.write(value.encode(enc))
- else:
- outfile.write(value)
-
-
-class RawTokenFormatter(Formatter):
- r"""
- Format tokens as a raw representation for storing token streams.
-
- The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
- be converted to a token stream with the `RawTokenLexer`, described in the
- :doc:`lexer list <lexers>`.
-
- Only two options are accepted:
-
- `compress`
- If set to ``'gz'`` or ``'bz2'``, compress the output with the given
- compression algorithm after encoding (default: ``''``).
- `error_color`
- If set to a color name, highlight error tokens using that color. If
- set but with no value, defaults to ``'red'``.
-
- .. versionadded:: 0.11
-
- """
- name = 'Raw tokens'
- aliases = ['raw', 'tokens']
- filenames = ['*.raw']
-
- unicodeoutput = False
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- # We ignore self.encoding if it is set, since it gets set for lexer
- # and formatter if given with -Oencoding on the command line.
- # The RawTokenFormatter outputs only ASCII. Override here.
- self.encoding = 'ascii' # let pygments.format() do the right thing
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- self.error_color = options.get('error_color', None)
- if self.error_color is True:
- self.error_color = 'red'
- if self.error_color is not None:
- try:
- colorize(self.error_color, '')
- except KeyError:
- raise ValueError("Invalid color %r specified" %
- self.error_color)
-
- def format(self, tokensource, outfile):
- try:
- outfile.write(b'')
- except TypeError:
- raise TypeError('The raw tokens formatter needs a binary '
- 'output file')
- if self.compress == 'gz':
- import gzip
- outfile = gzip.GzipFile('', 'wb', 9, outfile)
+from pygments.token import Token
+from pygments.console import colorize
+
+__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
+
+
+class NullFormatter(Formatter):
+ """
+ Output the text unchanged without any formatting.
+ """
+ name = 'Text only'
+ aliases = ['text', 'null']
+ filenames = ['*.txt']
+
+ def format(self, tokensource, outfile):
+ enc = self.encoding
+ for ttype, value in tokensource:
+ if enc:
+ outfile.write(value.encode(enc))
+ else:
+ outfile.write(value)
+
+
+class RawTokenFormatter(Formatter):
+ r"""
+ Format tokens as a raw representation for storing token streams.
+
+ The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
+ be converted to a token stream with the `RawTokenLexer`, described in the
+ :doc:`lexer list <lexers>`.
+
+ Only two options are accepted:
+
+ `compress`
+ If set to ``'gz'`` or ``'bz2'``, compress the output with the given
+ compression algorithm after encoding (default: ``''``).
+ `error_color`
+ If set to a color name, highlight error tokens using that color. If
+ set but with no value, defaults to ``'red'``.
+
+ .. versionadded:: 0.11
+
+ """
+ name = 'Raw tokens'
+ aliases = ['raw', 'tokens']
+ filenames = ['*.raw']
+
+ unicodeoutput = False
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ # We ignore self.encoding if it is set, since it gets set for lexer
+ # and formatter if given with -Oencoding on the command line.
+ # The RawTokenFormatter outputs only ASCII. Override here.
+ self.encoding = 'ascii' # let pygments.format() do the right thing
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
+ self.error_color = options.get('error_color', None)
+ if self.error_color is True:
+ self.error_color = 'red'
+ if self.error_color is not None:
+ try:
+ colorize(self.error_color, '')
+ except KeyError:
+ raise ValueError("Invalid color %r specified" %
+ self.error_color)
+
+ def format(self, tokensource, outfile):
+ try:
+ outfile.write(b'')
+ except TypeError:
+ raise TypeError('The raw tokens formatter needs a binary '
+ 'output file')
+ if self.compress == 'gz':
+ import gzip
+ outfile = gzip.GzipFile('', 'wb', 9, outfile)
write = outfile.write
flush = outfile.close
- elif self.compress == 'bz2':
- import bz2
- compressor = bz2.BZ2Compressor(9)
+ elif self.compress == 'bz2':
+ import bz2
+ compressor = bz2.BZ2Compressor(9)
- def write(text):
+ def write(text):
outfile.write(compressor.compress(text))
- def flush():
- outfile.write(compressor.flush())
- outfile.flush()
- else:
+ def flush():
+ outfile.write(compressor.flush())
+ outfile.flush()
+ else:
write = outfile.write
- flush = outfile.flush
-
- if self.error_color:
- for ttype, value in tokensource:
+ flush = outfile.flush
+
+ if self.error_color:
+ for ttype, value in tokensource:
line = b"%r\t%r\n" % (ttype, value)
- if ttype is Token.Error:
- write(colorize(self.error_color, line))
- else:
- write(line)
- else:
- for ttype, value in tokensource:
+ if ttype is Token.Error:
+ write(colorize(self.error_color, line))
+ else:
+ write(line)
+ else:
+ for ttype, value in tokensource:
write(b"%r\t%r\n" % (ttype, value))
- flush()
-
+ flush()
+
TESTCASE_BEFORE = '''\
def testNeedsName(lexer):
- fragment = %r
- tokens = [
-'''
+ fragment = %r
+ tokens = [
+'''
TESTCASE_AFTER = '''\
- ]
+ ]
assert list(lexer.get_tokens(fragment)) == tokens
-'''
-
-
-class TestcaseFormatter(Formatter):
- """
- Format tokens as appropriate for a new testcase.
-
- .. versionadded:: 2.0
- """
- name = 'Testcase'
- aliases = ['testcase']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- if self.encoding is not None and self.encoding != 'utf-8':
- raise ValueError("Only None and utf-8 are allowed encodings.")
-
- def format(self, tokensource, outfile):
- indentation = ' ' * 12
- rawbuf = []
- outbuf = []
- for ttype, value in tokensource:
- rawbuf.append(value)
- outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
-
+'''
+
+
+class TestcaseFormatter(Formatter):
+ """
+ Format tokens as appropriate for a new testcase.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Testcase'
+ aliases = ['testcase']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ if self.encoding is not None and self.encoding != 'utf-8':
+ raise ValueError("Only None and utf-8 are allowed encodings.")
+
+ def format(self, tokensource, outfile):
+ indentation = ' ' * 12
+ rawbuf = []
+ outbuf = []
+ for ttype, value in tokensource:
+ rawbuf.append(value)
+ outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
+
before = TESTCASE_BEFORE % (''.join(rawbuf),)
during = ''.join(outbuf)
- after = TESTCASE_AFTER
- if self.encoding is None:
- outfile.write(before + during + after)
- else:
- outfile.write(before.encode('utf-8'))
- outfile.write(during.encode('utf-8'))
- outfile.write(after.encode('utf-8'))
- outfile.flush()
+ after = TESTCASE_AFTER
+ if self.encoding is None:
+ outfile.write(before + during + after)
+ else:
+ outfile.write(before.encode('utf-8'))
+ outfile.write(during.encode('utf-8'))
+ outfile.write(after.encode('utf-8'))
+ outfile.flush()
diff --git a/contrib/python/Pygments/py3/pygments/formatters/rtf.py b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
index ba071c78f4..fb4b4c69a4 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/rtf.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
@@ -1,146 +1,146 @@
-"""
- pygments.formatters.rtf
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- A formatter that generates RTF files.
-
+"""
+ pygments.formatters.rtf
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ A formatter that generates RTF files.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
from pygments.util import get_int_opt, surrogatepair
-
-
-__all__ = ['RtfFormatter']
-
-
-class RtfFormatter(Formatter):
- """
- Format tokens as RTF markup. This formatter automatically outputs full RTF
- documents with color information and other useful stuff. Perfect for Copy and
- Paste into Microsoft(R) Word(R) documents.
-
- Please note that ``encoding`` and ``outencoding`` options are ignored.
- The RTF format is ASCII natively, but handles unicode characters correctly
- thanks to escape sequences.
-
- .. versionadded:: 0.6
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `fontface`
+
+
+__all__ = ['RtfFormatter']
+
+
+class RtfFormatter(Formatter):
+ """
+ Format tokens as RTF markup. This formatter automatically outputs full RTF
+ documents with color information and other useful stuff. Perfect for Copy and
+ Paste into Microsoft(R) Word(R) documents.
+
+ Please note that ``encoding`` and ``outencoding`` options are ignored.
+ The RTF format is ASCII natively, but handles unicode characters correctly
+ thanks to escape sequences.
+
+ .. versionadded:: 0.6
+
+ Additional options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+
+ `fontface`
The used font family, for example ``Bitstream Vera Sans``. Defaults to
- some generic font which is supposed to have fixed width.
-
- `fontsize`
- Size of the font used. Size is specified in half points. The
- default is 24 half-points, giving a size 12 font.
-
- .. versionadded:: 2.0
- """
- name = 'RTF'
- aliases = ['rtf']
- filenames = ['*.rtf']
-
- def __init__(self, **options):
- r"""
- Additional options accepted:
-
- ``fontface``
- Name of the font used. Could for example be ``'Courier New'``
- to further specify the default which is ``'\fmodern'``. The RTF
- specification claims that ``\fmodern`` are "Fixed-pitch serif
- and sans serif fonts". Hope every RTF implementation thinks
- the same about modern...
-
- """
- Formatter.__init__(self, **options)
- self.fontface = options.get('fontface') or ''
- self.fontsize = get_int_opt(options, 'fontsize', 0)
-
- def _escape(self, text):
+ some generic font which is supposed to have fixed width.
+
+ `fontsize`
+ Size of the font used. Size is specified in half points. The
+ default is 24 half-points, giving a size 12 font.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RTF'
+ aliases = ['rtf']
+ filenames = ['*.rtf']
+
+ def __init__(self, **options):
+ r"""
+ Additional options accepted:
+
+ ``fontface``
+ Name of the font used. Could for example be ``'Courier New'``
+ to further specify the default which is ``'\fmodern'``. The RTF
+ specification claims that ``\fmodern`` are "Fixed-pitch serif
+ and sans serif fonts". Hope every RTF implementation thinks
+ the same about modern...
+
+ """
+ Formatter.__init__(self, **options)
+ self.fontface = options.get('fontface') or ''
+ self.fontsize = get_int_opt(options, 'fontsize', 0)
+
+ def _escape(self, text):
return text.replace('\\', '\\\\') \
.replace('{', '\\{') \
.replace('}', '\\}')
-
- def _escape_text(self, text):
+
+ def _escape_text(self, text):
# empty strings, should give a small performance improvement
- if not text:
+ if not text:
return ''
-
- # escape text
- text = self._escape(text)
-
- buf = []
- for c in text:
- cn = ord(c)
- if cn < (2**7):
- # ASCII character
- buf.append(str(c))
- elif (2**7) <= cn < (2**16):
- # single unicode escape sequence
+
+ # escape text
+ text = self._escape(text)
+
+ buf = []
+ for c in text:
+ cn = ord(c)
+ if cn < (2**7):
+ # ASCII character
+ buf.append(str(c))
+ elif (2**7) <= cn < (2**16):
+ # single unicode escape sequence
buf.append('{\\u%d}' % cn)
- elif (2**16) <= cn:
- # RTF limits unicode to 16 bits.
- # Force surrogate pairs
+ elif (2**16) <= cn:
+ # RTF limits unicode to 16 bits.
+ # Force surrogate pairs
buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
-
+
return ''.join(buf).replace('\n', '\\par\n')
-
- def format_unencoded(self, tokensource, outfile):
- # rtf 1.8 header
+
+ def format_unencoded(self, tokensource, outfile):
+ # rtf 1.8 header
outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
'{\\colortbl;' % (self.fontface and
' ' + self._escape(self.fontface) or
''))
-
- # convert colors and save them in a mapping to access them later.
- color_mapping = {}
- offset = 1
- for _, style in self.style:
- for color in style['color'], style['bgcolor'], style['border']:
- if color and color not in color_mapping:
- color_mapping[color] = offset
+
+ # convert colors and save them in a mapping to access them later.
+ color_mapping = {}
+ offset = 1
+ for _, style in self.style:
+ for color in style['color'], style['bgcolor'], style['border']:
+ if color and color not in color_mapping:
+ color_mapping[color] = offset
outfile.write('\\red%d\\green%d\\blue%d;' % (
- int(color[0:2], 16),
- int(color[2:4], 16),
- int(color[4:6], 16)
- ))
- offset += 1
+ int(color[0:2], 16),
+ int(color[2:4], 16),
+ int(color[4:6], 16)
+ ))
+ offset += 1
outfile.write('}\\f0 ')
- if self.fontsize:
+ if self.fontsize:
outfile.write('\\fs%d' % self.fontsize)
-
- # highlight stream
- for ttype, value in tokensource:
- while not self.style.styles_token(ttype) and ttype.parent:
- ttype = ttype.parent
- style = self.style.style_for_token(ttype)
- buf = []
- if style['bgcolor']:
+
+ # highlight stream
+ for ttype, value in tokensource:
+ while not self.style.styles_token(ttype) and ttype.parent:
+ ttype = ttype.parent
+ style = self.style.style_for_token(ttype)
+ buf = []
+ if style['bgcolor']:
buf.append('\\cb%d' % color_mapping[style['bgcolor']])
- if style['color']:
+ if style['color']:
buf.append('\\cf%d' % color_mapping[style['color']])
- if style['bold']:
+ if style['bold']:
buf.append('\\b')
- if style['italic']:
+ if style['italic']:
buf.append('\\i')
- if style['underline']:
+ if style['underline']:
buf.append('\\ul')
- if style['border']:
+ if style['border']:
buf.append('\\chbrdr\\chcfpat%d' %
- color_mapping[style['border']])
+ color_mapping[style['border']])
start = ''.join(buf)
- if start:
+ if start:
outfile.write('{%s ' % start)
- outfile.write(self._escape_text(value))
- if start:
+ outfile.write(self._escape_text(value))
+ if start:
outfile.write('}')
-
+
outfile.write('}')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/svg.py b/contrib/python/Pygments/py3/pygments/formatters/svg.py
index 547a7bbcdd..e761244178 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/svg.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/svg.py
@@ -1,57 +1,57 @@
-"""
- pygments.formatters.svg
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for SVG output.
-
+"""
+ pygments.formatters.svg
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for SVG output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
from pygments.token import Comment
-from pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['SvgFormatter']
-
-
-def escape_html(text):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.replace('&', '&amp;'). \
- replace('<', '&lt;'). \
- replace('>', '&gt;'). \
- replace('"', '&quot;'). \
- replace("'", '&#39;')
-
-
-class2style = {}
-
-class SvgFormatter(Formatter):
- """
- Format tokens as an SVG graphics file. This formatter is still experimental.
- Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
- coordinates containing ``<tspan>`` elements with the individual token styles.
-
- By default, this formatter outputs a full SVG document including doctype
- declaration and the ``<svg>`` root element.
-
- .. versionadded:: 0.9
-
- Additional options accepted:
-
- `nowrap`
- Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
- don't add a XML declaration and a doctype. If true, the `fontfamily`
- and `fontsize` options are ignored. Defaults to ``False``.
-
- `fontfamily`
- The value to give the wrapping ``<g>`` element's ``font-family``
- attribute, defaults to ``"monospace"``.
-
- `fontsize`
- The value to give the wrapping ``<g>`` element's ``font-size``
- attribute, defaults to ``"14px"``.
-
+from pygments.util import get_bool_opt, get_int_opt
+
+__all__ = ['SvgFormatter']
+
+
+def escape_html(text):
+ """Escape &, <, > as well as single and double quotes for HTML."""
+ return text.replace('&', '&amp;'). \
+ replace('<', '&lt;'). \
+ replace('>', '&gt;'). \
+ replace('"', '&quot;'). \
+ replace("'", '&#39;')
+
+
+class2style = {}
+
+class SvgFormatter(Formatter):
+ """
+ Format tokens as an SVG graphics file. This formatter is still experimental.
+ Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
+ coordinates containing ``<tspan>`` elements with the individual token styles.
+
+ By default, this formatter outputs a full SVG document including doctype
+ declaration and the ``<svg>`` root element.
+
+ .. versionadded:: 0.9
+
+ Additional options accepted:
+
+ `nowrap`
+ Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
+ don't add a XML declaration and a doctype. If true, the `fontfamily`
+ and `fontsize` options are ignored. Defaults to ``False``.
+
+ `fontfamily`
+ The value to give the wrapping ``<g>`` element's ``font-family``
+ attribute, defaults to ``"monospace"``.
+
+ `fontsize`
+ The value to give the wrapping ``<g>`` element's ``font-size``
+ attribute, defaults to ``"14px"``.
+
`linenos`
If ``True``, add line numbers (default: ``False``).
@@ -65,73 +65,73 @@ class SvgFormatter(Formatter):
Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
for up to 4-digit line numbers. Increase width for longer code blocks).
- `xoffset`
- Starting offset in X direction, defaults to ``0``.
-
- `yoffset`
- Starting offset in Y direction, defaults to the font size if it is given
- in pixels, or ``20`` else. (This is necessary since text coordinates
- refer to the text baseline, not the top edge.)
-
- `ystep`
- Offset to add to the Y coordinate for each subsequent line. This should
- roughly be the text size plus 5. It defaults to that value if the text
- size is given in pixels, or ``25`` else.
-
- `spacehack`
- Convert spaces in the source to ``&#160;``, which are non-breaking
- spaces. SVG provides the ``xml:space`` attribute to control how
- whitespace inside tags is handled, in theory, the ``preserve`` value
- could be used to keep all whitespace as-is. However, many current SVG
- viewers don't obey that rule, so this option is provided as a workaround
- and defaults to ``True``.
- """
- name = 'SVG'
- aliases = ['svg']
- filenames = ['*.svg']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.fontfamily = options.get('fontfamily', 'monospace')
- self.fontsize = options.get('fontsize', '14px')
- self.xoffset = get_int_opt(options, 'xoffset', 0)
- fs = self.fontsize.strip()
- if fs.endswith('px'): fs = fs[:-2].strip()
- try:
- int_fs = int(fs)
- except:
- int_fs = 20
- self.yoffset = get_int_opt(options, 'yoffset', int_fs)
- self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
- self.spacehack = get_bool_opt(options, 'spacehack', True)
+ `xoffset`
+ Starting offset in X direction, defaults to ``0``.
+
+ `yoffset`
+ Starting offset in Y direction, defaults to the font size if it is given
+ in pixels, or ``20`` else. (This is necessary since text coordinates
+ refer to the text baseline, not the top edge.)
+
+ `ystep`
+ Offset to add to the Y coordinate for each subsequent line. This should
+ roughly be the text size plus 5. It defaults to that value if the text
+ size is given in pixels, or ``25`` else.
+
+ `spacehack`
+ Convert spaces in the source to ``&#160;``, which are non-breaking
+ spaces. SVG provides the ``xml:space`` attribute to control how
+ whitespace inside tags is handled, in theory, the ``preserve`` value
+ could be used to keep all whitespace as-is. However, many current SVG
+ viewers don't obey that rule, so this option is provided as a workaround
+ and defaults to ``True``.
+ """
+ name = 'SVG'
+ aliases = ['svg']
+ filenames = ['*.svg']
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.nowrap = get_bool_opt(options, 'nowrap', False)
+ self.fontfamily = options.get('fontfamily', 'monospace')
+ self.fontsize = options.get('fontsize', '14px')
+ self.xoffset = get_int_opt(options, 'xoffset', 0)
+ fs = self.fontsize.strip()
+ if fs.endswith('px'): fs = fs[:-2].strip()
+ try:
+ int_fs = int(fs)
+ except:
+ int_fs = 20
+ self.yoffset = get_int_opt(options, 'yoffset', int_fs)
+ self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
+ self.spacehack = get_bool_opt(options, 'spacehack', True)
self.linenos = get_bool_opt(options,'linenos',False)
self.linenostart = get_int_opt(options,'linenostart',1)
self.linenostep = get_int_opt(options,'linenostep',1)
self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
- self._stylecache = {}
-
- def format_unencoded(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- For our implementation we put all lines in their own 'line group'.
- """
- x = self.xoffset
- y = self.yoffset
- if not self.nowrap:
- if self.encoding:
- outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
- self.encoding)
- else:
- outfile.write('<?xml version="1.0"?>\n')
- outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
- '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
- 'svg10.dtd">\n')
- outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write('<g font-family="%s" font-size="%s">\n' %
- (self.fontfamily, self.fontsize))
+ self._stylecache = {}
+
+ def format_unencoded(self, tokensource, outfile):
+ """
+ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
+ tuples and write it into ``outfile``.
+
+ For our implementation we put all lines in their own 'line group'.
+ """
+ x = self.xoffset
+ y = self.yoffset
+ if not self.nowrap:
+ if self.encoding:
+ outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
+ self.encoding)
+ else:
+ outfile.write('<?xml version="1.0"?>\n')
+ outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
+ '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
+ 'svg10.dtd">\n')
+ outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
+ outfile.write('<g font-family="%s" font-size="%s">\n' %
+ (self.fontfamily, self.fontsize))
counter = self.linenostart
counter_step = self.linenostep
@@ -146,17 +146,17 @@ class SvgFormatter(Formatter):
counter += 1
outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
- for ttype, value in tokensource:
- style = self._get_style(ttype)
- tspan = style and '<tspan' + style + '>' or ''
- tspanend = tspan and '</tspan>' or ''
- value = escape_html(value)
- if self.spacehack:
- value = value.expandtabs().replace(' ', '&#160;')
- parts = value.split('\n')
- for part in parts[:-1]:
- outfile.write(tspan + part + tspanend)
- y += self.ystep
+ for ttype, value in tokensource:
+ style = self._get_style(ttype)
+ tspan = style and '<tspan' + style + '>' or ''
+ tspanend = tspan and '</tspan>' or ''
+ value = escape_html(value)
+ if self.spacehack:
+ value = value.expandtabs().replace(' ', '&#160;')
+ parts = value.split('\n')
+ for part in parts[:-1]:
+ outfile.write(tspan + part + tspanend)
+ y += self.ystep
outfile.write('</text>\n')
if self.linenos and counter % counter_step == 0:
outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
@@ -164,25 +164,25 @@ class SvgFormatter(Formatter):
counter += 1
outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
- outfile.write(tspan + parts[-1] + tspanend)
- outfile.write('</text>')
-
- if not self.nowrap:
- outfile.write('</g></svg>\n')
-
- def _get_style(self, tokentype):
- if tokentype in self._stylecache:
- return self._stylecache[tokentype]
- otokentype = tokentype
- while not self.style.styles_token(tokentype):
- tokentype = tokentype.parent
- value = self.style.style_for_token(tokentype)
- result = ''
- if value['color']:
- result = ' fill="#' + value['color'] + '"'
- if value['bold']:
- result += ' font-weight="bold"'
- if value['italic']:
- result += ' font-style="italic"'
- self._stylecache[otokentype] = result
- return result
+ outfile.write(tspan + parts[-1] + tspanend)
+ outfile.write('</text>')
+
+ if not self.nowrap:
+ outfile.write('</g></svg>\n')
+
+ def _get_style(self, tokentype):
+ if tokentype in self._stylecache:
+ return self._stylecache[tokentype]
+ otokentype = tokentype
+ while not self.style.styles_token(tokentype):
+ tokentype = tokentype.parent
+ value = self.style.style_for_token(tokentype)
+ result = ''
+ if value['color']:
+ result = ' fill="#' + value['color'] + '"'
+ if value['bold']:
+ result += ' font-weight="bold"'
+ if value['italic']:
+ result += ' font-style="italic"'
+ self._stylecache[otokentype] = result
+ return result
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal.py b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
index a27594d0df..d216897533 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
@@ -1,28 +1,28 @@
-"""
- pygments.formatters.terminal
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for terminal output with ANSI sequences.
-
+"""
+ pygments.formatters.terminal
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for terminal output with ANSI sequences.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.formatter import Formatter
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pygments.console import ansiformat
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TerminalFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-TERMINAL_COLORS = {
- Token: ('', ''),
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.formatter import Formatter
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+from pygments.console import ansiformat
+from pygments.util import get_choice_opt
+
+
+__all__ = ['TerminalFormatter']
+
+
+#: Map token types to a tuple of color values for light and dark
+#: backgrounds.
+TERMINAL_COLORS = {
+ Token: ('', ''),
+
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
@@ -41,87 +41,87 @@ TERMINAL_COLORS = {
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
-
+
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
+ Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Prompt: ('**', '**'),
+ Generic.Prompt: ('**', '**'),
Generic.Error: ('brightred', 'brightred'),
-
+
Error: ('_brightred_', '_brightred_'),
-}
-
-
-class TerminalFormatter(Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a text console.
- Color sequences are terminated at newlines, so that paging the output
- works correctly.
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal'
- aliases = ['terminal', 'console']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def _get_color(self, ttype):
- # self.colorscheme is a dict containing usually generic types, so we
- # have to walk the tree of dots. The base Token type must be a key,
- # even if it's empty string, as in the default above.
- colors = self.colorscheme.get(ttype)
- while colors is None:
- ttype = ttype.parent
- colors = self.colorscheme.get(ttype)
- return colors[self.darkbg]
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self._get_color(ttype)
-
- for line in value.splitlines(True):
- if color:
- outfile.write(ansiformat(color, line.rstrip('\n')))
- else:
- outfile.write(line.rstrip('\n'))
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if self.linenos:
- outfile.write("\n")
+}
+
+
+class TerminalFormatter(Formatter):
+ r"""
+ Format tokens with ANSI color sequences, for output in a text console.
+ Color sequences are terminated at newlines, so that paging the output
+ works correctly.
+
+ The `get_style_defs()` method doesn't do anything special since there is
+ no support for common styles.
+
+ Options accepted:
+
+ `bg`
+ Set to ``"light"`` or ``"dark"`` depending on the terminal's background
+ (default: ``"light"``).
+
+ `colorscheme`
+ A dictionary mapping token types to (lightbg, darkbg) color names or
+ ``None`` (default: ``None`` = use builtin colorscheme).
+
+ `linenos`
+ Set to ``True`` to have line numbers on the terminal output as well
+ (default: ``False`` = no line numbers).
+ """
+ name = 'Terminal'
+ aliases = ['terminal', 'console']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+ self.darkbg = get_choice_opt(options, 'bg',
+ ['light', 'dark'], 'light') == 'dark'
+ self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
+ self.linenos = options.get('linenos', False)
+ self._lineno = 0
+
+ def format(self, tokensource, outfile):
+ return Formatter.format(self, tokensource, outfile)
+
+ def _write_lineno(self, outfile):
+ self._lineno += 1
+ outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
+
+ def _get_color(self, ttype):
+ # self.colorscheme is a dict containing usually generic types, so we
+ # have to walk the tree of dots. The base Token type must be a key,
+ # even if it's empty string, as in the default above.
+ colors = self.colorscheme.get(ttype)
+ while colors is None:
+ ttype = ttype.parent
+ colors = self.colorscheme.get(ttype)
+ return colors[self.darkbg]
+
+ def format_unencoded(self, tokensource, outfile):
+ if self.linenos:
+ self._write_lineno(outfile)
+
+ for ttype, value in tokensource:
+ color = self._get_color(ttype)
+
+ for line in value.splitlines(True):
+ if color:
+ outfile.write(ansiformat(color, line.rstrip('\n')))
+ else:
+ outfile.write(line.rstrip('\n'))
+ if line.endswith('\n'):
+ if self.linenos:
+ self._write_lineno(outfile)
+ else:
+ outfile.write('\n')
+
+ if self.linenos:
+ outfile.write("\n")
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
index be6f890f44..0ec735a83a 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
@@ -1,52 +1,52 @@
-"""
- pygments.formatters.terminal256
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for 256-color terminal output with ANSI sequences.
-
- RGB-to-XTERM color conversion routines adapted from xterm256-conv
- tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
- by Wolfgang Frisch.
-
- Formatter version 1.
-
+"""
+ pygments.formatters.terminal256
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Formatter for 256-color terminal output with ANSI sequences.
+
+ RGB-to-XTERM color conversion routines adapted from xterm256-conv
+ tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
+ by Wolfgang Frisch.
+
+ Formatter version 1.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# TODO:
-# - Options to map style's bold/underline/italic/border attributes
-# to some ANSI attrbutes (something like 'italic=underline')
-# - An option to output "style RGB to xterm RGB/index" conversion table
-# - An option to indicate that we are running in "reverse background"
-# xterm. This means that default colors are white-on-black, not
-# black-on-while, so colors like "white background" need to be converted
-# to "white background, black foreground", etc...
-
-from pygments.formatter import Formatter
+ :license: BSD, see LICENSE for details.
+"""
+
+# TODO:
+# - Options to map style's bold/underline/italic/border attributes
+# to some ANSI attrbutes (something like 'italic=underline')
+# - An option to output "style RGB to xterm RGB/index" conversion table
+# - An option to indicate that we are running in "reverse background"
+# xterm. This means that default colors are white-on-black, not
+# black-on-while, so colors like "white background" need to be converted
+# to "white background, black foreground", etc...
+
+from pygments.formatter import Formatter
from pygments.console import codes
from pygments.style import ansicolors
-
-
-__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
-
-
-class EscapeSequence:
+
+
+__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
+
+
+class EscapeSequence:
def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False):
- self.fg = fg
- self.bg = bg
- self.bold = bold
- self.underline = underline
+ self.fg = fg
+ self.bg = bg
+ self.bold = bold
+ self.underline = underline
self.italic = italic
-
- def escape(self, attrs):
- if len(attrs):
- return "\x1b[" + ";".join(attrs) + "m"
- return ""
-
- def color_string(self):
- attrs = []
- if self.fg is not None:
+
+ def escape(self, attrs):
+ if len(attrs):
+ return "\x1b[" + ";".join(attrs) + "m"
+ return ""
+
+ def color_string(self):
+ attrs = []
+ if self.fg is not None:
if self.fg in ansicolors:
esc = codes[self.fg.replace('ansi','')]
if ';01m' in esc:
@@ -55,58 +55,58 @@ class EscapeSequence:
attrs.append(esc[2:4])
else:
attrs.extend(("38", "5", "%i" % self.fg))
- if self.bg is not None:
+ if self.bg is not None:
if self.bg in ansicolors:
esc = codes[self.bg.replace('ansi','')]
# extract fg color code, add 10 for bg.
attrs.append(str(int(esc[2:4])+10))
else:
attrs.extend(("48", "5", "%i" % self.bg))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
+ if self.bold:
+ attrs.append("01")
+ if self.underline:
+ attrs.append("04")
if self.italic:
attrs.append("03")
- return self.escape(attrs)
-
- def true_color_string(self):
- attrs = []
- if self.fg:
- attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
- if self.bg:
- attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
+ return self.escape(attrs)
+
+ def true_color_string(self):
+ attrs = []
+ if self.fg:
+ attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
+ if self.bg:
+ attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
+ if self.bold:
+ attrs.append("01")
+ if self.underline:
+ attrs.append("04")
if self.italic:
attrs.append("03")
- return self.escape(attrs)
-
- def reset_string(self):
- attrs = []
- if self.fg is not None:
- attrs.append("39")
- if self.bg is not None:
- attrs.append("49")
+ return self.escape(attrs)
+
+ def reset_string(self):
+ attrs = []
+ if self.fg is not None:
+ attrs.append("39")
+ if self.bg is not None:
+ attrs.append("49")
if self.bold or self.underline or self.italic:
- attrs.append("00")
- return self.escape(attrs)
-
-
-class Terminal256Formatter(Formatter):
- """
- Format tokens with ANSI color sequences, for output in a 256-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- The formatter takes colors from a style defined by the `style` option
- and converts them to nearest ANSI 256-color escape sequences. Bold and
- underline attributes from the style are preserved (and displayed).
-
- .. versionadded:: 0.9
-
+ attrs.append("00")
+ return self.escape(attrs)
+
+
+class Terminal256Formatter(Formatter):
+ """
+ Format tokens with ANSI color sequences, for output in a 256-color
+ terminal or console. Like in `TerminalFormatter` color sequences
+ are terminated at newlines, so that paging the output works correctly.
+
+ The formatter takes colors from a style defined by the `style` option
+ and converts them to nearest ANSI 256-color escape sequences. Bold and
+ underline attributes from the style are preserved (and displayed).
+
+ .. versionadded:: 0.9
+
.. versionchanged:: 2.2
If the used style defines foreground colors in the form ``#ansi*``, then
`Terminal256Formatter` will map these to non extended foreground color.
@@ -118,221 +118,221 @@ class Terminal256Formatter(Formatter):
See :ref:`this table <new-ansi-color-names>` for more information.
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
+ Options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
`linenos`
Set to ``True`` to have line numbers on the terminal output as well
(default: ``False`` = no line numbers).
- """
- name = 'Terminal256'
- aliases = ['terminal256', 'console256', '256']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.xterm_colors = []
- self.best_match = {}
- self.style_string = {}
-
- self.usebold = 'nobold' not in options
- self.useunderline = 'nounderline' not in options
+ """
+ name = 'Terminal256'
+ aliases = ['terminal256', 'console256', '256']
+ filenames = []
+
+ def __init__(self, **options):
+ Formatter.__init__(self, **options)
+
+ self.xterm_colors = []
+ self.best_match = {}
+ self.style_string = {}
+
+ self.usebold = 'nobold' not in options
+ self.useunderline = 'nounderline' not in options
self.useitalic = 'noitalic' not in options
-
- self._build_color_table() # build an RGB-to-256 color conversion table
- self._setup_styles() # convert selected style's colors to term. colors
-
+
+ self._build_color_table() # build an RGB-to-256 color conversion table
+ self._setup_styles() # convert selected style's colors to term. colors
+
self.linenos = options.get('linenos', False)
self._lineno = 0
- def _build_color_table(self):
- # colors 0..15: 16 basic colors
-
- self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
- self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
- self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
- self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
- self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
- self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
- self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
- self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
- self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
- self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
- self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
- self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
- self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
- self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
- self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
- self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
-
- # colors 16..232: the 6x6x6 color cube
-
- valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
-
- for i in range(217):
- r = valuerange[(i // 36) % 6]
- g = valuerange[(i // 6) % 6]
- b = valuerange[i % 6]
- self.xterm_colors.append((r, g, b))
-
- # colors 233..253: grayscale
-
- for i in range(1, 22):
- v = 8 + i * 10
- self.xterm_colors.append((v, v, v))
-
- def _closest_color(self, r, g, b):
- distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
- match = 0
-
- for i in range(0, 254):
- values = self.xterm_colors[i]
-
- rd = r - values[0]
- gd = g - values[1]
- bd = b - values[2]
- d = rd*rd + gd*gd + bd*bd
-
- if d < distance:
- match = i
- distance = d
- return match
-
- def _color_index(self, color):
- index = self.best_match.get(color, None)
+ def _build_color_table(self):
+ # colors 0..15: 16 basic colors
+
+ self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
+ self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
+ self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
+ self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
+ self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
+ self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
+ self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
+ self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
+ self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
+ self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
+ self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
+ self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
+ self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
+ self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
+ self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
+ self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
+
+ # colors 16..232: the 6x6x6 color cube
+
+ valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
+
+ for i in range(217):
+ r = valuerange[(i // 36) % 6]
+ g = valuerange[(i // 6) % 6]
+ b = valuerange[i % 6]
+ self.xterm_colors.append((r, g, b))
+
+ # colors 233..253: grayscale
+
+ for i in range(1, 22):
+ v = 8 + i * 10
+ self.xterm_colors.append((v, v, v))
+
+ def _closest_color(self, r, g, b):
+ distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
+ match = 0
+
+ for i in range(0, 254):
+ values = self.xterm_colors[i]
+
+ rd = r - values[0]
+ gd = g - values[1]
+ bd = b - values[2]
+ d = rd*rd + gd*gd + bd*bd
+
+ if d < distance:
+ match = i
+ distance = d
+ return match
+
+ def _color_index(self, color):
+ index = self.best_match.get(color, None)
if color in ansicolors:
# strip the `ansi/#ansi` part and look up code
index = color
self.best_match[color] = index
- if index is None:
- try:
- rgb = int(str(color), 16)
- except ValueError:
- rgb = 0
-
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- index = self._closest_color(r, g, b)
- self.best_match[color] = index
- return index
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
+ if index is None:
+ try:
+ rgb = int(str(color), 16)
+ except ValueError:
+ rgb = 0
+
+ r = (rgb >> 16) & 0xff
+ g = (rgb >> 8) & 0xff
+ b = rgb & 0xff
+ index = self._closest_color(r, g, b)
+ self.best_match[color] = index
+ return index
+
+ def _setup_styles(self):
+ for ttype, ndef in self.style:
+ escape = EscapeSequence()
# get foreground from ansicolor if set
if ndef['ansicolor']:
escape.fg = self._color_index(ndef['ansicolor'])
elif ndef['color']:
- escape.fg = self._color_index(ndef['color'])
+ escape.fg = self._color_index(ndef['color'])
if ndef['bgansicolor']:
escape.bg = self._color_index(ndef['bgansicolor'])
elif ndef['bgcolor']:
- escape.bg = self._color_index(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
+ escape.bg = self._color_index(ndef['bgcolor'])
+ if self.usebold and ndef['bold']:
+ escape.bold = True
+ if self.useunderline and ndef['underline']:
+ escape.underline = True
if self.useitalic and ndef['italic']:
escape.italic = True
- self.style_string[str(ttype)] = (escape.color_string(),
- escape.reset_string())
-
+ self.style_string[str(ttype)] = (escape.color_string(),
+ escape.reset_string())
+
def _write_lineno(self, outfile):
self._lineno += 1
outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def format_unencoded(self, tokensource, outfile):
+ def format(self, tokensource, outfile):
+ return Formatter.format(self, tokensource, outfile)
+
+ def format_unencoded(self, tokensource, outfile):
if self.linenos:
self._write_lineno(outfile)
- for ttype, value in tokensource:
- not_found = True
- while ttype and not_found:
- try:
- # outfile.write( "<" + str(ttype) + ">" )
- on, off = self.style_string[str(ttype)]
-
- # Like TerminalFormatter, add "reset colors" escape sequence
- # on newline.
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(on + line + off)
+ for ttype, value in tokensource:
+ not_found = True
+ while ttype and not_found:
+ try:
+ # outfile.write( "<" + str(ttype) + ">" )
+ on, off = self.style_string[str(ttype)]
+
+ # Like TerminalFormatter, add "reset colors" escape sequence
+ # on newline.
+ spl = value.split('\n')
+ for line in spl[:-1]:
+ if line:
+ outfile.write(on + line + off)
if self.linenos:
self._write_lineno(outfile)
else:
outfile.write('\n')
- if spl[-1]:
- outfile.write(on + spl[-1] + off)
-
- not_found = False
- # outfile.write( '#' + str(ttype) + '#' )
-
- except KeyError:
- # ottype = ttype
+ if spl[-1]:
+ outfile.write(on + spl[-1] + off)
+
+ not_found = False
+ # outfile.write( '#' + str(ttype) + '#' )
+
+ except KeyError:
+ # ottype = ttype
ttype = ttype.parent
- # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
-
- if not_found:
- outfile.write(value)
-
+ # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
+
+ if not_found:
+ outfile.write(value)
+
if self.linenos:
outfile.write("\n")
-
-
-
-class TerminalTrueColorFormatter(Terminal256Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a true-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- .. versionadded:: 2.1
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
- """
- name = 'TerminalTrueColor'
- aliases = ['terminal16m', 'console16m', '16m']
- filenames = []
-
- def _build_color_table(self):
- pass
-
- def _color_tuple(self, color):
- try:
- rgb = int(str(color), 16)
- except ValueError:
- return None
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- return (r, g, b)
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- if ndef['color']:
- escape.fg = self._color_tuple(ndef['color'])
- if ndef['bgcolor']:
- escape.bg = self._color_tuple(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
+
+
+
+class TerminalTrueColorFormatter(Terminal256Formatter):
+ r"""
+ Format tokens with ANSI color sequences, for output in a true-color
+ terminal or console. Like in `TerminalFormatter` color sequences
+ are terminated at newlines, so that paging the output works correctly.
+
+ .. versionadded:: 2.1
+
+ Options accepted:
+
+ `style`
+ The style to use, can be a string or a Style subclass (default:
+ ``'default'``).
+ """
+ name = 'TerminalTrueColor'
+ aliases = ['terminal16m', 'console16m', '16m']
+ filenames = []
+
+ def _build_color_table(self):
+ pass
+
+ def _color_tuple(self, color):
+ try:
+ rgb = int(str(color), 16)
+ except ValueError:
+ return None
+ r = (rgb >> 16) & 0xff
+ g = (rgb >> 8) & 0xff
+ b = rgb & 0xff
+ return (r, g, b)
+
+ def _setup_styles(self):
+ for ttype, ndef in self.style:
+ escape = EscapeSequence()
+ if ndef['color']:
+ escape.fg = self._color_tuple(ndef['color'])
+ if ndef['bgcolor']:
+ escape.bg = self._color_tuple(ndef['bgcolor'])
+ if self.usebold and ndef['bold']:
+ escape.bold = True
+ if self.useunderline and ndef['underline']:
+ escape.underline = True
if self.useitalic and ndef['italic']:
escape.italic = True
- self.style_string[str(ttype)] = (escape.true_color_string(),
- escape.reset_string())
+ self.style_string[str(ttype)] = (escape.true_color_string(),
+ escape.reset_string())
diff --git a/contrib/python/Pygments/py3/pygments/lexer.py b/contrib/python/Pygments/py3/pygments/lexer.py
index 33d738a8d6..224ace5d7e 100644
--- a/contrib/python/Pygments/py3/pygments/lexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexer.py
@@ -1,651 +1,651 @@
-"""
- pygments.lexer
- ~~~~~~~~~~~~~~
-
- Base lexer classes.
-
+"""
+ pygments.lexer
+ ~~~~~~~~~~~~~~
+
+ Base lexer classes.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import time
-
-from pygments.filter import apply_filters, Filter
-from pygments.filters import get_filter_by_name
-from pygments.token import Error, Text, Other, _TokenType
-from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+import time
+
+from pygments.filter import apply_filters, Filter
+from pygments.filters import get_filter_by_name
+from pygments.token import Error, Text, Other, _TokenType
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
make_analysator, Future, guess_decode
-from pygments.regexopt import regex_opt
-
-__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
- 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
- 'default', 'words']
-
-
-_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
- (b'\xff\xfe\0\0', 'utf-32'),
- (b'\0\0\xfe\xff', 'utf-32be'),
- (b'\xff\xfe', 'utf-16'),
- (b'\xfe\xff', 'utf-16be')]
-
-_default_analyse = staticmethod(lambda x: 0.0)
-
-
-class LexerMeta(type):
- """
- This metaclass automagically converts ``analyse_text`` methods into
- static methods which always return float values.
- """
-
- def __new__(mcs, name, bases, d):
- if 'analyse_text' in d:
- d['analyse_text'] = make_analysator(d['analyse_text'])
- return type.__new__(mcs, name, bases, d)
-
-
+from pygments.regexopt import regex_opt
+
+__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
+ 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
+ 'default', 'words']
+
+
+_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
+ (b'\xff\xfe\0\0', 'utf-32'),
+ (b'\0\0\xfe\xff', 'utf-32be'),
+ (b'\xff\xfe', 'utf-16'),
+ (b'\xfe\xff', 'utf-16be')]
+
+_default_analyse = staticmethod(lambda x: 0.0)
+
+
+class LexerMeta(type):
+ """
+ This metaclass automagically converts ``analyse_text`` methods into
+ static methods which always return float values.
+ """
+
+ def __new__(mcs, name, bases, d):
+ if 'analyse_text' in d:
+ d['analyse_text'] = make_analysator(d['analyse_text'])
+ return type.__new__(mcs, name, bases, d)
+
+
class Lexer(metaclass=LexerMeta):
- """
- Lexer for a specific language.
-
- Basic options recognized:
- ``stripnl``
- Strip leading and trailing newlines from the input (default: True).
- ``stripall``
- Strip all leading and trailing whitespace from the input
- (default: False).
- ``ensurenl``
- Make sure that the input ends with a newline (default: True). This
- is required for some lexers that consume input linewise.
-
- .. versionadded:: 1.3
-
- ``tabsize``
- If given and greater than 0, expand tabs in the input (default: 0).
- ``encoding``
- If given, must be an encoding name. This encoding will be used to
- convert the input string to Unicode, if it is not already a Unicode
- string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
- Latin1 detection. Can also be ``'chardet'`` to use the chardet
- library, if it is installed.
- ``inencoding``
- Overrides the ``encoding`` if given.
- """
-
- #: Name of the lexer
- name = None
-
- #: Shortcuts for the lexer
- aliases = []
-
- #: File name globs
- filenames = []
-
- #: Secondary file name globs
- alias_filenames = []
-
- #: MIME types
- mimetypes = []
-
- #: Priority, should multiple lexers match and no content is provided
- priority = 0
-
- def __init__(self, **options):
- self.options = options
- self.stripnl = get_bool_opt(options, 'stripnl', True)
- self.stripall = get_bool_opt(options, 'stripall', False)
- self.ensurenl = get_bool_opt(options, 'ensurenl', True)
- self.tabsize = get_int_opt(options, 'tabsize', 0)
- self.encoding = options.get('encoding', 'guess')
- self.encoding = options.get('inencoding') or self.encoding
- self.filters = []
- for filter_ in get_list_opt(options, 'filters', ()):
- self.add_filter(filter_)
-
- def __repr__(self):
- if self.options:
- return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
- self.options)
- else:
- return '<pygments.lexers.%s>' % self.__class__.__name__
-
- def add_filter(self, filter_, **options):
- """
- Add a new stream filter to this lexer.
- """
- if not isinstance(filter_, Filter):
- filter_ = get_filter_by_name(filter_, **options)
- self.filters.append(filter_)
-
- def analyse_text(text):
- """
- Has to return a float between ``0`` and ``1`` that indicates
- if a lexer wants to highlight this text. Used by ``guess_lexer``.
- If this method returns ``0`` it won't highlight it in any case, if
- it returns ``1`` highlighting with this lexer is guaranteed.
-
- The `LexerMeta` metaclass automatically wraps this function so
- that it works like a static method (no ``self`` or ``cls``
- parameter) and the return value is automatically converted to
- `float`. If the return value is an object that is boolean `False`
- it's the same as if the return values was ``0.0``.
- """
-
- def get_tokens(self, text, unfiltered=False):
- """
- Return an iterable of (tokentype, value) pairs generated from
- `text`. If `unfiltered` is set to `True`, the filtering mechanism
- is bypassed even if filters are defined.
-
- Also preprocess the text, i.e. expand tabs and strip it if
- wanted and applies registered filters.
- """
+ """
+ Lexer for a specific language.
+
+ Basic options recognized:
+ ``stripnl``
+ Strip leading and trailing newlines from the input (default: True).
+ ``stripall``
+ Strip all leading and trailing whitespace from the input
+ (default: False).
+ ``ensurenl``
+ Make sure that the input ends with a newline (default: True). This
+ is required for some lexers that consume input linewise.
+
+ .. versionadded:: 1.3
+
+ ``tabsize``
+ If given and greater than 0, expand tabs in the input (default: 0).
+ ``encoding``
+ If given, must be an encoding name. This encoding will be used to
+ convert the input string to Unicode, if it is not already a Unicode
+ string (default: ``'guess'``, which uses a simple UTF-8 / Locale /
+ Latin1 detection. Can also be ``'chardet'`` to use the chardet
+ library, if it is installed.
+ ``inencoding``
+ Overrides the ``encoding`` if given.
+ """
+
+ #: Name of the lexer
+ name = None
+
+ #: Shortcuts for the lexer
+ aliases = []
+
+ #: File name globs
+ filenames = []
+
+ #: Secondary file name globs
+ alias_filenames = []
+
+ #: MIME types
+ mimetypes = []
+
+ #: Priority, should multiple lexers match and no content is provided
+ priority = 0
+
+ def __init__(self, **options):
+ self.options = options
+ self.stripnl = get_bool_opt(options, 'stripnl', True)
+ self.stripall = get_bool_opt(options, 'stripall', False)
+ self.ensurenl = get_bool_opt(options, 'ensurenl', True)
+ self.tabsize = get_int_opt(options, 'tabsize', 0)
+ self.encoding = options.get('encoding', 'guess')
+ self.encoding = options.get('inencoding') or self.encoding
+ self.filters = []
+ for filter_ in get_list_opt(options, 'filters', ()):
+ self.add_filter(filter_)
+
+ def __repr__(self):
+ if self.options:
+ return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
+ self.options)
+ else:
+ return '<pygments.lexers.%s>' % self.__class__.__name__
+
+ def add_filter(self, filter_, **options):
+ """
+ Add a new stream filter to this lexer.
+ """
+ if not isinstance(filter_, Filter):
+ filter_ = get_filter_by_name(filter_, **options)
+ self.filters.append(filter_)
+
+ def analyse_text(text):
+ """
+ Has to return a float between ``0`` and ``1`` that indicates
+ if a lexer wants to highlight this text. Used by ``guess_lexer``.
+ If this method returns ``0`` it won't highlight it in any case, if
+ it returns ``1`` highlighting with this lexer is guaranteed.
+
+ The `LexerMeta` metaclass automatically wraps this function so
+ that it works like a static method (no ``self`` or ``cls``
+ parameter) and the return value is automatically converted to
+ `float`. If the return value is an object that is boolean `False`
+ it's the same as if the return values was ``0.0``.
+ """
+
+ def get_tokens(self, text, unfiltered=False):
+ """
+ Return an iterable of (tokentype, value) pairs generated from
+ `text`. If `unfiltered` is set to `True`, the filtering mechanism
+ is bypassed even if filters are defined.
+
+ Also preprocess the text, i.e. expand tabs and strip it if
+ wanted and applies registered filters.
+ """
if not isinstance(text, str):
- if self.encoding == 'guess':
- text, _ = guess_decode(text)
- elif self.encoding == 'chardet':
- try:
- import chardet
+ if self.encoding == 'guess':
+ text, _ = guess_decode(text)
+ elif self.encoding == 'chardet':
+ try:
+ import chardet
except ImportError as e:
- raise ImportError('To enable chardet encoding guessing, '
- 'please install the chardet library '
+ raise ImportError('To enable chardet encoding guessing, '
+ 'please install the chardet library '
'from http://chardet.feedparser.org/') from e
- # check for BOM first
- decoded = None
- for bom, encoding in _encoding_map:
- if text.startswith(bom):
- decoded = text[len(bom):].decode(encoding, 'replace')
- break
- # no BOM found, so use chardet
- if decoded is None:
- enc = chardet.detect(text[:1024]) # Guess using first 1KB
- decoded = text.decode(enc.get('encoding') or 'utf-8',
- 'replace')
- text = decoded
- else:
- text = text.decode(self.encoding)
+ # check for BOM first
+ decoded = None
+ for bom, encoding in _encoding_map:
+ if text.startswith(bom):
+ decoded = text[len(bom):].decode(encoding, 'replace')
+ break
+ # no BOM found, so use chardet
+ if decoded is None:
+ enc = chardet.detect(text[:1024]) # Guess using first 1KB
+ decoded = text.decode(enc.get('encoding') or 'utf-8',
+ 'replace')
+ text = decoded
+ else:
+ text = text.decode(self.encoding)
if text.startswith('\ufeff'):
text = text[len('\ufeff'):]
- else:
+ else:
if text.startswith('\ufeff'):
text = text[len('\ufeff'):]
-
- # text now *is* a unicode string
- text = text.replace('\r\n', '\n')
- text = text.replace('\r', '\n')
- if self.stripall:
- text = text.strip()
- elif self.stripnl:
- text = text.strip('\n')
- if self.tabsize > 0:
- text = text.expandtabs(self.tabsize)
- if self.ensurenl and not text.endswith('\n'):
- text += '\n'
-
- def streamer():
- for _, t, v in self.get_tokens_unprocessed(text):
- yield t, v
- stream = streamer()
- if not unfiltered:
- stream = apply_filters(stream, self.filters, self)
- return stream
-
- def get_tokens_unprocessed(self, text):
- """
- Return an iterable of (index, tokentype, value) pairs where "index"
- is the starting position of the token within the input text.
-
- In subclasses, implement this method as a generator to
- maximize effectiveness.
- """
- raise NotImplementedError
-
-
-class DelegatingLexer(Lexer):
- """
- This lexer takes two lexer as arguments. A root lexer and
- a language lexer. First everything is scanned using the language
- lexer, afterwards all ``Other`` tokens are lexed using the root
- lexer.
-
- The lexers from the ``template`` lexer package use this base lexer.
- """
-
- def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
- self.root_lexer = _root_lexer(**options)
- self.language_lexer = _language_lexer(**options)
- self.needle = _needle
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- buffered = ''
- insertions = []
- lng_buffer = []
- for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
- if t is self.needle:
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- lng_buffer = []
- buffered += v
- else:
- lng_buffer.append((i, t, v))
- if lng_buffer:
- insertions.append((len(buffered), lng_buffer))
- return do_insertions(insertions,
- self.root_lexer.get_tokens_unprocessed(buffered))
-
-
-# ------------------------------------------------------------------------------
-# RegexLexer and ExtendedRegexLexer
-#
-
-
-class include(str): # pylint: disable=invalid-name
- """
- Indicates that a state should include rules from another state.
- """
- pass
-
-
+
+ # text now *is* a unicode string
+ text = text.replace('\r\n', '\n')
+ text = text.replace('\r', '\n')
+ if self.stripall:
+ text = text.strip()
+ elif self.stripnl:
+ text = text.strip('\n')
+ if self.tabsize > 0:
+ text = text.expandtabs(self.tabsize)
+ if self.ensurenl and not text.endswith('\n'):
+ text += '\n'
+
+ def streamer():
+ for _, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+ stream = streamer()
+ if not unfiltered:
+ stream = apply_filters(stream, self.filters, self)
+ return stream
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Return an iterable of (index, tokentype, value) pairs where "index"
+ is the starting position of the token within the input text.
+
+ In subclasses, implement this method as a generator to
+ maximize effectiveness.
+ """
+ raise NotImplementedError
+
+
+class DelegatingLexer(Lexer):
+ """
+ This lexer takes two lexer as arguments. A root lexer and
+ a language lexer. First everything is scanned using the language
+ lexer, afterwards all ``Other`` tokens are lexed using the root
+ lexer.
+
+ The lexers from the ``template`` lexer package use this base lexer.
+ """
+
+ def __init__(self, _root_lexer, _language_lexer, _needle=Other, **options):
+ self.root_lexer = _root_lexer(**options)
+ self.language_lexer = _language_lexer(**options)
+ self.needle = _needle
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ buffered = ''
+ insertions = []
+ lng_buffer = []
+ for i, t, v in self.language_lexer.get_tokens_unprocessed(text):
+ if t is self.needle:
+ if lng_buffer:
+ insertions.append((len(buffered), lng_buffer))
+ lng_buffer = []
+ buffered += v
+ else:
+ lng_buffer.append((i, t, v))
+ if lng_buffer:
+ insertions.append((len(buffered), lng_buffer))
+ return do_insertions(insertions,
+ self.root_lexer.get_tokens_unprocessed(buffered))
+
+
+# ------------------------------------------------------------------------------
+# RegexLexer and ExtendedRegexLexer
+#
+
+
+class include(str): # pylint: disable=invalid-name
+ """
+ Indicates that a state should include rules from another state.
+ """
+ pass
+
+
class _inherit:
- """
- Indicates the a state should inherit from its superclass.
- """
- def __repr__(self):
- return 'inherit'
-
-inherit = _inherit() # pylint: disable=invalid-name
-
-
-class combined(tuple): # pylint: disable=invalid-name
- """
- Indicates a state combined from multiple states.
- """
-
- def __new__(cls, *args):
- return tuple.__new__(cls, args)
-
- def __init__(self, *args):
- # tuple.__init__ doesn't do anything
- pass
-
-
+ """
+ Indicates the a state should inherit from its superclass.
+ """
+ def __repr__(self):
+ return 'inherit'
+
+inherit = _inherit() # pylint: disable=invalid-name
+
+
+class combined(tuple): # pylint: disable=invalid-name
+ """
+ Indicates a state combined from multiple states.
+ """
+
+ def __new__(cls, *args):
+ return tuple.__new__(cls, args)
+
+ def __init__(self, *args):
+ # tuple.__init__ doesn't do anything
+ pass
+
+
class _PseudoMatch:
- """
- A pseudo match object constructed from a string.
- """
-
- def __init__(self, start, text):
- self._text = text
- self._start = start
-
- def start(self, arg=None):
- return self._start
-
- def end(self, arg=None):
- return self._start + len(self._text)
-
- def group(self, arg=None):
- if arg:
- raise IndexError('No such group')
- return self._text
-
- def groups(self):
- return (self._text,)
-
- def groupdict(self):
- return {}
-
-
-def bygroups(*args):
- """
- Callback that yields multiple actions for each group in the match.
- """
- def callback(lexer, match, ctx=None):
- for i, action in enumerate(args):
- if action is None:
- continue
- elif type(action) is _TokenType:
- data = match.group(i + 1)
- if data:
- yield match.start(i + 1), action, data
- else:
- data = match.group(i + 1)
- if data is not None:
- if ctx:
- ctx.pos = match.start(i + 1)
+ """
+ A pseudo match object constructed from a string.
+ """
+
+ def __init__(self, start, text):
+ self._text = text
+ self._start = start
+
+ def start(self, arg=None):
+ return self._start
+
+ def end(self, arg=None):
+ return self._start + len(self._text)
+
+ def group(self, arg=None):
+ if arg:
+ raise IndexError('No such group')
+ return self._text
+
+ def groups(self):
+ return (self._text,)
+
+ def groupdict(self):
+ return {}
+
+
+def bygroups(*args):
+ """
+ Callback that yields multiple actions for each group in the match.
+ """
+ def callback(lexer, match, ctx=None):
+ for i, action in enumerate(args):
+ if action is None:
+ continue
+ elif type(action) is _TokenType:
+ data = match.group(i + 1)
+ if data:
+ yield match.start(i + 1), action, data
+ else:
+ data = match.group(i + 1)
+ if data is not None:
+ if ctx:
+ ctx.pos = match.start(i + 1)
for item in action(lexer,
_PseudoMatch(match.start(i + 1), data), ctx):
- if item:
- yield item
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
+ if item:
+ yield item
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
class _This:
- """
- Special singleton used for indicating the caller class.
- Used by ``using``.
- """
-
-this = _This()
-
-
-def using(_other, **kwargs):
- """
- Callback that processes the match with a different lexer.
-
- The keyword arguments are forwarded to the lexer, except `state` which
- is handled separately.
-
- `state` specifies the state that the new lexer will start in, and can
- be an enumerable such as ('root', 'inline', 'string') or a simple
- string which is assumed to be on top of the root state.
-
- Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
- """
- gt_kwargs = {}
- if 'state' in kwargs:
- s = kwargs.pop('state')
- if isinstance(s, (list, tuple)):
- gt_kwargs['stack'] = s
- else:
- gt_kwargs['stack'] = ('root', s)
-
- if _other is this:
- def callback(lexer, match, ctx=None):
- # if keyword arguments are given the callback
- # function has to create a new lexer instance
- if kwargs:
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = lexer.__class__(**kwargs)
- else:
- lx = lexer
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- else:
- def callback(lexer, match, ctx=None):
- # XXX: cache that somehow
- kwargs.update(lexer.options)
- lx = _other(**kwargs)
-
- s = match.start()
- for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
- yield i + s, t, v
- if ctx:
- ctx.pos = match.end()
- return callback
-
-
-class default:
- """
- Indicates a state or state action (e.g. #pop) to apply.
- For example default('#pop') is equivalent to ('', Token, '#pop')
- Note that state tuples may be used as well.
-
- .. versionadded:: 2.0
- """
- def __init__(self, state):
- self.state = state
-
-
-class words(Future):
- """
- Indicates a list of literal words that is transformed into an optimized
- regex that matches any of the words.
-
- .. versionadded:: 2.0
- """
- def __init__(self, words, prefix='', suffix=''):
- self.words = words
- self.prefix = prefix
- self.suffix = suffix
-
- def get(self):
- return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
-
-
-class RegexLexerMeta(LexerMeta):
- """
- Metaclass for RegexLexer, creates the self._tokens attribute from
- self.tokens on the first instantiation.
- """
-
- def _process_regex(cls, regex, rflags, state):
- """Preprocess the regular expression component of a token definition."""
- if isinstance(regex, Future):
- regex = regex.get()
- return re.compile(regex, rflags).match
-
- def _process_token(cls, token):
- """Preprocess the token component of a token definition."""
- assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
- return token
-
- def _process_new_state(cls, new_state, unprocessed, processed):
- """Preprocess the state transition action of a token definition."""
- if isinstance(new_state, str):
- # an existing state
- if new_state == '#pop':
- return -1
- elif new_state in unprocessed:
- return (new_state,)
- elif new_state == '#push':
- return new_state
- elif new_state[:5] == '#pop:':
- return -int(new_state[5:])
- else:
- assert False, 'unknown new state %r' % new_state
- elif isinstance(new_state, combined):
- # combine a new state from existing ones
- tmp_state = '_tmp_%d' % cls._tmpname
- cls._tmpname += 1
- itokens = []
- for istate in new_state:
- assert istate != new_state, 'circular state ref %r' % istate
- itokens.extend(cls._process_state(unprocessed,
- processed, istate))
- processed[tmp_state] = itokens
- return (tmp_state,)
- elif isinstance(new_state, tuple):
- # push more than one state
- for istate in new_state:
- assert (istate in unprocessed or
- istate in ('#pop', '#push')), \
- 'unknown new state ' + istate
- return new_state
- else:
- assert False, 'unknown new state def %r' % new_state
-
- def _process_state(cls, unprocessed, processed, state):
- """Preprocess a single state definition."""
+ """
+ Special singleton used for indicating the caller class.
+ Used by ``using``.
+ """
+
+this = _This()
+
+
+def using(_other, **kwargs):
+ """
+ Callback that processes the match with a different lexer.
+
+ The keyword arguments are forwarded to the lexer, except `state` which
+ is handled separately.
+
+ `state` specifies the state that the new lexer will start in, and can
+ be an enumerable such as ('root', 'inline', 'string') or a simple
+ string which is assumed to be on top of the root state.
+
+ Note: For that to work, `_other` must not be an `ExtendedRegexLexer`.
+ """
+ gt_kwargs = {}
+ if 'state' in kwargs:
+ s = kwargs.pop('state')
+ if isinstance(s, (list, tuple)):
+ gt_kwargs['stack'] = s
+ else:
+ gt_kwargs['stack'] = ('root', s)
+
+ if _other is this:
+ def callback(lexer, match, ctx=None):
+ # if keyword arguments are given the callback
+ # function has to create a new lexer instance
+ if kwargs:
+ # XXX: cache that somehow
+ kwargs.update(lexer.options)
+ lx = lexer.__class__(**kwargs)
+ else:
+ lx = lexer
+ s = match.start()
+ for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ else:
+ def callback(lexer, match, ctx=None):
+ # XXX: cache that somehow
+ kwargs.update(lexer.options)
+ lx = _other(**kwargs)
+
+ s = match.start()
+ for i, t, v in lx.get_tokens_unprocessed(match.group(), **gt_kwargs):
+ yield i + s, t, v
+ if ctx:
+ ctx.pos = match.end()
+ return callback
+
+
+class default:
+ """
+ Indicates a state or state action (e.g. #pop) to apply.
+ For example default('#pop') is equivalent to ('', Token, '#pop')
+ Note that state tuples may be used as well.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, state):
+ self.state = state
+
+
+class words(Future):
+ """
+ Indicates a list of literal words that is transformed into an optimized
+ regex that matches any of the words.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, words, prefix='', suffix=''):
+ self.words = words
+ self.prefix = prefix
+ self.suffix = suffix
+
+ def get(self):
+ return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
+
+
+class RegexLexerMeta(LexerMeta):
+ """
+ Metaclass for RegexLexer, creates the self._tokens attribute from
+ self.tokens on the first instantiation.
+ """
+
+ def _process_regex(cls, regex, rflags, state):
+ """Preprocess the regular expression component of a token definition."""
+ if isinstance(regex, Future):
+ regex = regex.get()
+ return re.compile(regex, rflags).match
+
+ def _process_token(cls, token):
+ """Preprocess the token component of a token definition."""
+ assert type(token) is _TokenType or callable(token), \
+ 'token type must be simple type or callable, not %r' % (token,)
+ return token
+
+ def _process_new_state(cls, new_state, unprocessed, processed):
+ """Preprocess the state transition action of a token definition."""
+ if isinstance(new_state, str):
+ # an existing state
+ if new_state == '#pop':
+ return -1
+ elif new_state in unprocessed:
+ return (new_state,)
+ elif new_state == '#push':
+ return new_state
+ elif new_state[:5] == '#pop:':
+ return -int(new_state[5:])
+ else:
+ assert False, 'unknown new state %r' % new_state
+ elif isinstance(new_state, combined):
+ # combine a new state from existing ones
+ tmp_state = '_tmp_%d' % cls._tmpname
+ cls._tmpname += 1
+ itokens = []
+ for istate in new_state:
+ assert istate != new_state, 'circular state ref %r' % istate
+ itokens.extend(cls._process_state(unprocessed,
+ processed, istate))
+ processed[tmp_state] = itokens
+ return (tmp_state,)
+ elif isinstance(new_state, tuple):
+ # push more than one state
+ for istate in new_state:
+ assert (istate in unprocessed or
+ istate in ('#pop', '#push')), \
+ 'unknown new state ' + istate
+ return new_state
+ else:
+ assert False, 'unknown new state def %r' % new_state
+
+ def _process_state(cls, unprocessed, processed, state):
+ """Preprocess a single state definition."""
assert type(state) is str, "wrong state name %r" % state
- assert state[0] != '#', "invalid state name %r" % state
- if state in processed:
- return processed[state]
- tokens = processed[state] = []
- rflags = cls.flags
- for tdef in unprocessed[state]:
- if isinstance(tdef, include):
- # it's a state reference
- assert tdef != state, "circular state reference %r" % state
- tokens.extend(cls._process_state(unprocessed, processed,
- str(tdef)))
- continue
- if isinstance(tdef, _inherit):
- # should be processed already, but may not in the case of:
- # 1. the state has no counterpart in any parent
- # 2. the state includes more than one 'inherit'
- continue
- if isinstance(tdef, default):
- new_state = cls._process_new_state(tdef.state, unprocessed, processed)
- tokens.append((re.compile('').match, None, new_state))
- continue
-
- assert type(tdef) is tuple, "wrong rule def %r" % tdef
-
- try:
- rex = cls._process_regex(tdef[0], rflags, state)
- except Exception as err:
- raise ValueError("uncompilable regex %r in state %r of %r: %s" %
+ assert state[0] != '#', "invalid state name %r" % state
+ if state in processed:
+ return processed[state]
+ tokens = processed[state] = []
+ rflags = cls.flags
+ for tdef in unprocessed[state]:
+ if isinstance(tdef, include):
+ # it's a state reference
+ assert tdef != state, "circular state reference %r" % state
+ tokens.extend(cls._process_state(unprocessed, processed,
+ str(tdef)))
+ continue
+ if isinstance(tdef, _inherit):
+ # should be processed already, but may not in the case of:
+ # 1. the state has no counterpart in any parent
+ # 2. the state includes more than one 'inherit'
+ continue
+ if isinstance(tdef, default):
+ new_state = cls._process_new_state(tdef.state, unprocessed, processed)
+ tokens.append((re.compile('').match, None, new_state))
+ continue
+
+ assert type(tdef) is tuple, "wrong rule def %r" % tdef
+
+ try:
+ rex = cls._process_regex(tdef[0], rflags, state)
+ except Exception as err:
+ raise ValueError("uncompilable regex %r in state %r of %r: %s" %
(tdef[0], state, cls, err)) from err
-
- token = cls._process_token(tdef[1])
-
- if len(tdef) == 2:
- new_state = None
- else:
- new_state = cls._process_new_state(tdef[2],
- unprocessed, processed)
-
- tokens.append((rex, token, new_state))
- return tokens
-
- def process_tokendef(cls, name, tokendefs=None):
- """Preprocess a dictionary of token definitions."""
- processed = cls._all_tokens[name] = {}
- tokendefs = tokendefs or cls.tokens[name]
- for state in list(tokendefs):
- cls._process_state(tokendefs, processed, state)
- return processed
-
- def get_tokendefs(cls):
- """
- Merge tokens from superclasses in MRO order, returning a single tokendef
- dictionary.
-
- Any state that is not defined by a subclass will be inherited
- automatically. States that *are* defined by subclasses will, by
- default, override that state in the superclass. If a subclass wishes to
- inherit definitions from a superclass, it can use the special value
- "inherit", which will cause the superclass' state definition to be
- included at that point in the state.
- """
- tokens = {}
- inheritable = {}
- for c in cls.__mro__:
- toks = c.__dict__.get('tokens', {})
-
+
+ token = cls._process_token(tdef[1])
+
+ if len(tdef) == 2:
+ new_state = None
+ else:
+ new_state = cls._process_new_state(tdef[2],
+ unprocessed, processed)
+
+ tokens.append((rex, token, new_state))
+ return tokens
+
+ def process_tokendef(cls, name, tokendefs=None):
+ """Preprocess a dictionary of token definitions."""
+ processed = cls._all_tokens[name] = {}
+ tokendefs = tokendefs or cls.tokens[name]
+ for state in list(tokendefs):
+ cls._process_state(tokendefs, processed, state)
+ return processed
+
+ def get_tokendefs(cls):
+ """
+ Merge tokens from superclasses in MRO order, returning a single tokendef
+ dictionary.
+
+ Any state that is not defined by a subclass will be inherited
+ automatically. States that *are* defined by subclasses will, by
+ default, override that state in the superclass. If a subclass wishes to
+ inherit definitions from a superclass, it can use the special value
+ "inherit", which will cause the superclass' state definition to be
+ included at that point in the state.
+ """
+ tokens = {}
+ inheritable = {}
+ for c in cls.__mro__:
+ toks = c.__dict__.get('tokens', {})
+
for state, items in toks.items():
- curitems = tokens.get(state)
- if curitems is None:
- # N.b. because this is assigned by reference, sufficiently
- # deep hierarchies are processed incrementally (e.g. for
- # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
- # will not see any inherits in B).
- tokens[state] = items
- try:
- inherit_ndx = items.index(inherit)
- except ValueError:
- continue
- inheritable[state] = inherit_ndx
- continue
-
- inherit_ndx = inheritable.pop(state, None)
- if inherit_ndx is None:
- continue
-
- # Replace the "inherit" value with the items
- curitems[inherit_ndx:inherit_ndx+1] = items
- try:
- # N.b. this is the index in items (that is, the superclass
- # copy), so offset required when storing below.
- new_inh_ndx = items.index(inherit)
- except ValueError:
- pass
- else:
- inheritable[state] = inherit_ndx + new_inh_ndx
-
- return tokens
-
- def __call__(cls, *args, **kwds):
- """Instantiate cls after preprocessing its token definitions."""
- if '_tokens' not in cls.__dict__:
- cls._all_tokens = {}
- cls._tmpname = 0
- if hasattr(cls, 'token_variants') and cls.token_variants:
- # don't process yet
- pass
- else:
- cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
-
- return type.__call__(cls, *args, **kwds)
-
-
+ curitems = tokens.get(state)
+ if curitems is None:
+ # N.b. because this is assigned by reference, sufficiently
+ # deep hierarchies are processed incrementally (e.g. for
+ # A(B), B(C), C(RegexLexer), B will be premodified so X(B)
+ # will not see any inherits in B).
+ tokens[state] = items
+ try:
+ inherit_ndx = items.index(inherit)
+ except ValueError:
+ continue
+ inheritable[state] = inherit_ndx
+ continue
+
+ inherit_ndx = inheritable.pop(state, None)
+ if inherit_ndx is None:
+ continue
+
+ # Replace the "inherit" value with the items
+ curitems[inherit_ndx:inherit_ndx+1] = items
+ try:
+ # N.b. this is the index in items (that is, the superclass
+ # copy), so offset required when storing below.
+ new_inh_ndx = items.index(inherit)
+ except ValueError:
+ pass
+ else:
+ inheritable[state] = inherit_ndx + new_inh_ndx
+
+ return tokens
+
+ def __call__(cls, *args, **kwds):
+ """Instantiate cls after preprocessing its token definitions."""
+ if '_tokens' not in cls.__dict__:
+ cls._all_tokens = {}
+ cls._tmpname = 0
+ if hasattr(cls, 'token_variants') and cls.token_variants:
+ # don't process yet
+ pass
+ else:
+ cls._tokens = cls.process_tokendef('', cls.get_tokendefs())
+
+ return type.__call__(cls, *args, **kwds)
+
+
class RegexLexer(Lexer, metaclass=RegexLexerMeta):
- """
- Base for simple stateful regular expression-based lexers.
- Simplifies the lexing process so that you need only
- provide a list of states and regular expressions.
- """
-
- #: Flags for compiling the regular expressions.
- #: Defaults to MULTILINE.
- flags = re.MULTILINE
-
+ """
+ Base for simple stateful regular expression-based lexers.
+ Simplifies the lexing process so that you need only
+ provide a list of states and regular expressions.
+ """
+
+ #: Flags for compiling the regular expressions.
+ #: Defaults to MULTILINE.
+ flags = re.MULTILINE
+
#: At all time there is a stack of states. Initially, the stack contains
#: a single state 'root'. The top of the stack is called "the current state".
#:
- #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
- #:
- #: ``new_state`` can be omitted to signify no state transition.
+ #: Dict of ``{'state': [(regex, tokentype, new_state), ...], ...}``
+ #:
+ #: ``new_state`` can be omitted to signify no state transition.
#: If ``new_state`` is a string, it is pushed on the stack. This ensure
#: the new current state is ``new_state``.
#: If ``new_state`` is a tuple of strings, all of those strings are pushed
#: on the stack and the current state will be the last element of the list.
#: ``new_state`` can also be ``combined('state1', 'state2', ...)``
- #: to signify a new, anonymous state combined from the rules of two
- #: or more existing ones.
- #: Furthermore, it can be '#pop' to signify going back one step in
- #: the state stack, or '#push' to push the current state on the stack
+ #: to signify a new, anonymous state combined from the rules of two
+ #: or more existing ones.
+ #: Furthermore, it can be '#pop' to signify going back one step in
+ #: the state stack, or '#push' to push the current state on the stack
#: again. Note that if you push while in a combined state, the combined
#: state itself is pushed, and not only the state in which the rule is
#: defined.
- #:
- #: The tuple can also be replaced with ``include('state')``, in which
- #: case the rules from the state named by the string are included in the
- #: current one.
- tokens = {}
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """
- Split ``text`` into (tokentype, text) pairs.
-
- ``stack`` is the inital stack (default: ``['root']``)
- """
- pos = 0
- tokendefs = self._tokens
- statestack = list(stack)
- statetokens = tokendefs[statestack[-1]]
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, pos)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield pos, action, m.group()
- else:
+ #:
+ #: The tuple can also be replaced with ``include('state')``, in which
+ #: case the rules from the state named by the string are included in the
+ #: current one.
+ tokens = {}
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+
+ ``stack`` is the inital stack (default: ``['root']``)
+ """
+ pos = 0
+ tokendefs = self._tokens
+ statestack = list(stack)
+ statetokens = tokendefs[statestack[-1]]
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ m = rexmatch(text, pos)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield pos, action, m.group()
+ else:
yield from action(self, m)
- pos = m.end()
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
+ pos = m.end()
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
if len(statestack) > 1:
statestack.pop()
- elif state == '#push':
- statestack.append(statestack[-1])
- else:
- statestack.append(state)
- elif isinstance(new_state, int):
+ elif state == '#push':
+ statestack.append(statestack[-1])
+ else:
+ statestack.append(state)
+ elif isinstance(new_state, int):
# pop, but keep at least one state on the stack
# (random code leading to unexpected pops should
# not allow exceptions)
@@ -653,227 +653,227 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
del statestack[1:]
else:
del statestack[new_state:]
- elif new_state == '#push':
- statestack.append(statestack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[statestack[-1]]
- break
- else:
- # We are here only if all state tokens have been considered
- # and there was not a match on any of them.
- try:
- if text[pos] == '\n':
- # at EOL, reset state to "root"
- statestack = ['root']
- statetokens = tokendefs['root']
+ elif new_state == '#push':
+ statestack.append(statestack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = tokendefs[statestack[-1]]
+ break
+ else:
+ # We are here only if all state tokens have been considered
+ # and there was not a match on any of them.
+ try:
+ if text[pos] == '\n':
+ # at EOL, reset state to "root"
+ statestack = ['root']
+ statetokens = tokendefs['root']
yield pos, Text, '\n'
- pos += 1
- continue
- yield pos, Error, text[pos]
- pos += 1
- except IndexError:
- break
-
-
+ pos += 1
+ continue
+ yield pos, Error, text[pos]
+ pos += 1
+ except IndexError:
+ break
+
+
class LexerContext:
- """
- A helper object that holds lexer position data.
- """
-
- def __init__(self, text, pos, stack=None, end=None):
- self.text = text
- self.pos = pos
- self.end = end or len(text) # end=0 not supported ;-)
- self.stack = stack or ['root']
-
- def __repr__(self):
- return 'LexerContext(%r, %r, %r)' % (
- self.text, self.pos, self.stack)
-
-
-class ExtendedRegexLexer(RegexLexer):
- """
- A RegexLexer that uses a context object to store its state.
- """
-
- def get_tokens_unprocessed(self, text=None, context=None):
- """
- Split ``text`` into (tokentype, text) pairs.
- If ``context`` is given, use this lexer context instead.
- """
- tokendefs = self._tokens
- if not context:
- ctx = LexerContext(text, 0)
- statetokens = tokendefs['root']
- else:
- ctx = context
- statetokens = tokendefs[ctx.stack[-1]]
- text = ctx.text
- while 1:
- for rexmatch, action, new_state in statetokens:
- m = rexmatch(text, ctx.pos, ctx.end)
- if m:
- if action is not None:
- if type(action) is _TokenType:
- yield ctx.pos, action, m.group()
- ctx.pos = m.end()
- else:
+ """
+ A helper object that holds lexer position data.
+ """
+
+ def __init__(self, text, pos, stack=None, end=None):
+ self.text = text
+ self.pos = pos
+ self.end = end or len(text) # end=0 not supported ;-)
+ self.stack = stack or ['root']
+
+ def __repr__(self):
+ return 'LexerContext(%r, %r, %r)' % (
+ self.text, self.pos, self.stack)
+
+
+class ExtendedRegexLexer(RegexLexer):
+ """
+ A RegexLexer that uses a context object to store its state.
+ """
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+ If ``context`` is given, use this lexer context instead.
+ """
+ tokendefs = self._tokens
+ if not context:
+ ctx = LexerContext(text, 0)
+ statetokens = tokendefs['root']
+ else:
+ ctx = context
+ statetokens = tokendefs[ctx.stack[-1]]
+ text = ctx.text
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ m = rexmatch(text, ctx.pos, ctx.end)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield ctx.pos, action, m.group()
+ ctx.pos = m.end()
+ else:
yield from action(self, m, ctx)
- if not new_state:
- # altered the state stack?
- statetokens = tokendefs[ctx.stack[-1]]
- # CAUTION: callback must set ctx.pos!
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
+ if not new_state:
+ # altered the state stack?
+ statetokens = tokendefs[ctx.stack[-1]]
+ # CAUTION: callback must set ctx.pos!
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
if len(ctx.stack) > 1:
ctx.stack.pop()
- elif state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- ctx.stack.append(state)
- elif isinstance(new_state, int):
+ elif state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ ctx.stack.append(state)
+ elif isinstance(new_state, int):
# see RegexLexer for why this check is made
if abs(new_state) >= len(ctx.stack):
del ctx.state[1:]
else:
del ctx.stack[new_state:]
- elif new_state == '#push':
- ctx.stack.append(ctx.stack[-1])
- else:
- assert False, "wrong state def: %r" % new_state
- statetokens = tokendefs[ctx.stack[-1]]
- break
- else:
- try:
- if ctx.pos >= ctx.end:
- break
- if text[ctx.pos] == '\n':
- # at EOL, reset state to "root"
- ctx.stack = ['root']
- statetokens = tokendefs['root']
+ elif new_state == '#push':
+ ctx.stack.append(ctx.stack[-1])
+ else:
+ assert False, "wrong state def: %r" % new_state
+ statetokens = tokendefs[ctx.stack[-1]]
+ break
+ else:
+ try:
+ if ctx.pos >= ctx.end:
+ break
+ if text[ctx.pos] == '\n':
+ # at EOL, reset state to "root"
+ ctx.stack = ['root']
+ statetokens = tokendefs['root']
yield ctx.pos, Text, '\n'
- ctx.pos += 1
- continue
- yield ctx.pos, Error, text[ctx.pos]
- ctx.pos += 1
- except IndexError:
- break
-
-
-def do_insertions(insertions, tokens):
- """
- Helper for lexers which must combine the results of several
- sublexers.
-
- ``insertions`` is a list of ``(index, itokens)`` pairs.
- Each ``itokens`` iterable should be inserted at position
- ``index`` into the token stream given by the ``tokens``
- argument.
-
- The result is a combined token stream.
-
- TODO: clean up the code here.
- """
- insertions = iter(insertions)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- # no insertions
+ ctx.pos += 1
+ continue
+ yield ctx.pos, Error, text[ctx.pos]
+ ctx.pos += 1
+ except IndexError:
+ break
+
+
+def do_insertions(insertions, tokens):
+ """
+ Helper for lexers which must combine the results of several
+ sublexers.
+
+ ``insertions`` is a list of ``(index, itokens)`` pairs.
+ Each ``itokens`` iterable should be inserted at position
+ ``index`` into the token stream given by the ``tokens``
+ argument.
+
+ The result is a combined token stream.
+
+ TODO: clean up the code here.
+ """
+ insertions = iter(insertions)
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ # no insertions
yield from tokens
- return
-
- realpos = None
- insleft = True
-
- # iterate over the token stream where we want to insert
- # the tokens from the insertion list.
- for i, t, v in tokens:
- # first iteration. store the postition of first item
- if realpos is None:
- realpos = i
- oldi = 0
- while insleft and i + len(v) >= index:
- tmpval = v[oldi:index - i]
+ return
+
+ realpos = None
+ insleft = True
+
+ # iterate over the token stream where we want to insert
+ # the tokens from the insertion list.
+ for i, t, v in tokens:
+ # first iteration. store the postition of first item
+ if realpos is None:
+ realpos = i
+ oldi = 0
+ while insleft and i + len(v) >= index:
+ tmpval = v[oldi:index - i]
if tmpval:
yield realpos, t, tmpval
realpos += len(tmpval)
- for it_index, it_token, it_value in itokens:
- yield realpos, it_token, it_value
- realpos += len(it_value)
- oldi = index - i
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
+ for it_index, it_token, it_value in itokens:
+ yield realpos, it_token, it_value
+ realpos += len(it_value)
+ oldi = index - i
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ insleft = False
+ break # not strictly necessary
if oldi < len(v):
yield realpos, t, v[oldi:]
realpos += len(v) - oldi
-
- # leftover tokens
- while insleft:
- # no normal tokens, set realpos to zero
- realpos = realpos or 0
- for p, t, v in itokens:
- yield realpos, t, v
- realpos += len(v)
- try:
- index, itokens = next(insertions)
- except StopIteration:
- insleft = False
- break # not strictly necessary
-
-
-class ProfilingRegexLexerMeta(RegexLexerMeta):
- """Metaclass for ProfilingRegexLexer, collects regex timing info."""
-
- def _process_regex(cls, regex, rflags, state):
- if isinstance(regex, words):
- rex = regex_opt(regex.words, prefix=regex.prefix,
- suffix=regex.suffix)
- else:
- rex = regex
- compiled = re.compile(rex, rflags)
-
- def match_func(text, pos, endpos=sys.maxsize):
- info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
- t0 = time.time()
- res = compiled.match(text, pos, endpos)
- t1 = time.time()
- info[0] += 1
- info[1] += t1 - t0
- return res
- return match_func
-
-
+
+ # leftover tokens
+ while insleft:
+ # no normal tokens, set realpos to zero
+ realpos = realpos or 0
+ for p, t, v in itokens:
+ yield realpos, t, v
+ realpos += len(v)
+ try:
+ index, itokens = next(insertions)
+ except StopIteration:
+ insleft = False
+ break # not strictly necessary
+
+
+class ProfilingRegexLexerMeta(RegexLexerMeta):
+ """Metaclass for ProfilingRegexLexer, collects regex timing info."""
+
+ def _process_regex(cls, regex, rflags, state):
+ if isinstance(regex, words):
+ rex = regex_opt(regex.words, prefix=regex.prefix,
+ suffix=regex.suffix)
+ else:
+ rex = regex
+ compiled = re.compile(rex, rflags)
+
+ def match_func(text, pos, endpos=sys.maxsize):
+ info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
+ t0 = time.time()
+ res = compiled.match(text, pos, endpos)
+ t1 = time.time()
+ info[0] += 1
+ info[1] += t1 - t0
+ return res
+ return match_func
+
+
class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
- """Drop-in replacement for RegexLexer that does profiling of its regexes."""
-
- _prof_data = []
- _prof_sort_index = 4 # defaults to time per call
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- # this needs to be a stack, since using(this) will produce nested calls
- self.__class__._prof_data.append({})
+ """Drop-in replacement for RegexLexer that does profiling of its regexes."""
+
+ _prof_data = []
+ _prof_sort_index = 4 # defaults to time per call
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ # this needs to be a stack, since using(this) will produce nested calls
+ self.__class__._prof_data.append({})
yield from RegexLexer.get_tokens_unprocessed(self, text, stack)
- rawdata = self.__class__._prof_data.pop()
- data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
- n, 1000 * t, 1000 * t / n)
- for ((s, r), (n, t)) in rawdata.items()),
- key=lambda x: x[self._prof_sort_index],
- reverse=True)
- sum_total = sum(x[3] for x in data)
-
- print()
- print('Profiling result for %s lexing %d chars in %.3f ms' %
- (self.__class__.__name__, len(text), sum_total))
- print('=' * 110)
- print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
- print('-' * 110)
- for d in data:
- print('%-20s %-65s %5d %8.4f %8.4f' % d)
- print('=' * 110)
+ rawdata = self.__class__._prof_data.pop()
+ data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
+ n, 1000 * t, 1000 * t / n)
+ for ((s, r), (n, t)) in rawdata.items()),
+ key=lambda x: x[self._prof_sort_index],
+ reverse=True)
+ sum_total = sum(x[3] for x in data)
+
+ print()
+ print('Profiling result for %s lexing %d chars in %.3f ms' %
+ (self.__class__.__name__, len(text), sum_total))
+ print('=' * 110)
+ print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
+ print('-' * 110)
+ for d in data:
+ print('%-20s %-65s %5d %8.4f %8.4f' % d)
+ print('=' * 110)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/__init__.py b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
index 9b89b6da3f..515535dd91 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
@@ -1,80 +1,80 @@
-"""
- pygments.lexers
- ~~~~~~~~~~~~~~~
-
- Pygments lexers.
-
+"""
+ pygments.lexers
+ ~~~~~~~~~~~~~~~
+
+ Pygments lexers.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import sys
-import types
-import fnmatch
-from os.path import basename
-
-from pygments.lexers._mapping import LEXERS
-from pygments.modeline import get_filetype_from_buffer
-from pygments.plugin import find_plugin_lexers
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import sys
+import types
+import fnmatch
+from os.path import basename
+
+from pygments.lexers._mapping import LEXERS
+from pygments.modeline import get_filetype_from_buffer
+from pygments.plugin import find_plugin_lexers
from pygments.util import ClassNotFound, guess_decode
-
+
COMPAT = {
'Python3Lexer': 'PythonLexer',
'Python3TracebackLexer': 'PythonTracebackLexer',
}
-
-__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
+
+__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT)
-
-_lexer_cache = {}
-_pattern_cache = {}
-
-
-def _fn_matches(fn, glob):
- """Return whether the supplied file name fn matches pattern filename."""
- if glob not in _pattern_cache:
- pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
- return pattern.match(fn)
- return _pattern_cache[glob].match(fn)
-
-
-def _load_lexers(module_name):
- """Load a lexer (and all others in the module too)."""
- mod = __import__(module_name, None, None, ['__all__'])
- for lexer_name in mod.__all__:
- cls = getattr(mod, lexer_name)
- _lexer_cache[cls.name] = cls
-
-
-def get_all_lexers():
- """Return a generator of tuples in the form ``(name, aliases,
- filenames, mimetypes)`` of all know lexers.
- """
+
+_lexer_cache = {}
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
+
+
+def _load_lexers(module_name):
+ """Load a lexer (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for lexer_name in mod.__all__:
+ cls = getattr(mod, lexer_name)
+ _lexer_cache[cls.name] = cls
+
+
+def get_all_lexers():
+ """Return a generator of tuples in the form ``(name, aliases,
+ filenames, mimetypes)`` of all know lexers.
+ """
for item in LEXERS.values():
- yield item[1:]
- for lexer in find_plugin_lexers():
- yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
-
-
-def find_lexer_class(name):
- """Lookup a lexer class by name.
-
- Return None if not found.
- """
- if name in _lexer_cache:
- return _lexer_cache[name]
- # lookup builtin lexers
+ yield item[1:]
+ for lexer in find_plugin_lexers():
+ yield lexer.name, lexer.aliases, lexer.filenames, lexer.mimetypes
+
+
+def find_lexer_class(name):
+ """Lookup a lexer class by name.
+
+ Return None if not found.
+ """
+ if name in _lexer_cache:
+ return _lexer_cache[name]
+ # lookup builtin lexers
for module_name, lname, aliases, _, _ in LEXERS.values():
- if name == lname:
- _load_lexers(module_name)
- return _lexer_cache[name]
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if cls.name == name:
- return cls
-
-
+ if name == lname:
+ _load_lexers(module_name)
+ return _lexer_cache[name]
+ # continue with lexers from setuptools entrypoints
+ for cls in find_plugin_lexers():
+ if cls.name == name:
+ return cls
+
+
def find_lexer_class_by_name(_alias):
"""Lookup a lexer class by alias.
@@ -97,27 +97,27 @@ def find_lexer_class_by_name(_alias):
raise ClassNotFound('no lexer for alias %r found' % _alias)
-def get_lexer_by_name(_alias, **options):
- """Get a lexer by an alias.
-
- Raises ClassNotFound if not found.
- """
- if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
- # lookup builtin lexers
+def get_lexer_by_name(_alias, **options):
+ """Get a lexer by an alias.
+
+ Raises ClassNotFound if not found.
+ """
+ if not _alias:
+ raise ClassNotFound('no lexer for alias %r found' % _alias)
+
+ # lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.values():
- if _alias.lower() in aliases:
- if name not in _lexer_cache:
- _load_lexers(module_name)
- return _lexer_cache[name](**options)
- # continue with lexers from setuptools entrypoints
- for cls in find_plugin_lexers():
- if _alias.lower() in cls.aliases:
- return cls(**options)
- raise ClassNotFound('no lexer for alias %r found' % _alias)
-
-
+ if _alias.lower() in aliases:
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ return _lexer_cache[name](**options)
+ # continue with lexers from setuptools entrypoints
+ for cls in find_plugin_lexers():
+ if _alias.lower() in cls.aliases:
+ return cls(**options)
+ raise ClassNotFound('no lexer for alias %r found' % _alias)
+
+
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
"""Load a lexer from a file.
@@ -153,144 +153,144 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
raise ClassNotFound('error when loading custom lexer: %s' % err)
-def find_lexer_class_for_filename(_fn, code=None):
- """Get a lexer for a filename.
-
- If multiple lexers match the filename pattern, use ``analyse_text()`` to
- figure out which one is more appropriate.
-
- Returns None if not found.
- """
- matches = []
- fn = basename(_fn)
+def find_lexer_class_for_filename(_fn, code=None):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Returns None if not found.
+ """
+ matches = []
+ fn = basename(_fn)
for modname, name, _, filenames, _ in LEXERS.values():
- for filename in filenames:
- if _fn_matches(fn, filename):
- if name not in _lexer_cache:
- _load_lexers(modname)
- matches.append((_lexer_cache[name], filename))
- for cls in find_plugin_lexers():
- for filename in cls.filenames:
- if _fn_matches(fn, filename):
- matches.append((cls, filename))
-
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _lexer_cache:
+ _load_lexers(modname)
+ matches.append((_lexer_cache[name], filename))
+ for cls in find_plugin_lexers():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ matches.append((cls, filename))
+
if isinstance(code, bytes):
- # decode it, since all analyse_text functions expect unicode
- code = guess_decode(code)
-
- def get_rating(info):
- cls, filename = info
- # explicit patterns get a bonus
- bonus = '*' not in filename and 0.5 or 0
- # The class _always_ defines analyse_text because it's included in
- # the Lexer class. The default implementation returns None which
- # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
- # to find lexers which need it overridden.
- if code:
+ # decode it, since all analyse_text functions expect unicode
+ code = guess_decode(code)
+
+ def get_rating(info):
+ cls, filename = info
+ # explicit patterns get a bonus
+ bonus = '*' not in filename and 0.5 or 0
+ # The class _always_ defines analyse_text because it's included in
+ # the Lexer class. The default implementation returns None which
+ # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
+ # to find lexers which need it overridden.
+ if code:
return cls.analyse_text(code) + bonus, cls.__name__
return cls.priority + bonus, cls.__name__
-
- if matches:
- matches.sort(key=get_rating)
- # print "Possible lexers, after sort:", matches
- return matches[-1][0]
-
-
-def get_lexer_for_filename(_fn, code=None, **options):
- """Get a lexer for a filename.
-
- If multiple lexers match the filename pattern, use ``analyse_text()`` to
- figure out which one is more appropriate.
-
- Raises ClassNotFound if not found.
- """
- res = find_lexer_class_for_filename(_fn, code)
- if not res:
- raise ClassNotFound('no lexer for filename %r found' % _fn)
- return res(**options)
-
-
-def get_lexer_for_mimetype(_mime, **options):
- """Get a lexer for a mimetype.
-
- Raises ClassNotFound if not found.
- """
+
+ if matches:
+ matches.sort(key=get_rating)
+ # print "Possible lexers, after sort:", matches
+ return matches[-1][0]
+
+
+def get_lexer_for_filename(_fn, code=None, **options):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Raises ClassNotFound if not found.
+ """
+ res = find_lexer_class_for_filename(_fn, code)
+ if not res:
+ raise ClassNotFound('no lexer for filename %r found' % _fn)
+ return res(**options)
+
+
+def get_lexer_for_mimetype(_mime, **options):
+ """Get a lexer for a mimetype.
+
+ Raises ClassNotFound if not found.
+ """
for modname, name, _, _, mimetypes in LEXERS.values():
- if _mime in mimetypes:
- if name not in _lexer_cache:
- _load_lexers(modname)
- return _lexer_cache[name](**options)
- for cls in find_plugin_lexers():
- if _mime in cls.mimetypes:
- return cls(**options)
- raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-
-
-def _iter_lexerclasses(plugins=True):
- """Return an iterator over all lexer classes."""
- for key in sorted(LEXERS):
- module_name, name = LEXERS[key][:2]
- if name not in _lexer_cache:
- _load_lexers(module_name)
- yield _lexer_cache[name]
- if plugins:
+ if _mime in mimetypes:
+ if name not in _lexer_cache:
+ _load_lexers(modname)
+ return _lexer_cache[name](**options)
+ for cls in find_plugin_lexers():
+ if _mime in cls.mimetypes:
+ return cls(**options)
+ raise ClassNotFound('no lexer for mimetype %r found' % _mime)
+
+
+def _iter_lexerclasses(plugins=True):
+ """Return an iterator over all lexer classes."""
+ for key in sorted(LEXERS):
+ module_name, name = LEXERS[key][:2]
+ if name not in _lexer_cache:
+ _load_lexers(module_name)
+ yield _lexer_cache[name]
+ if plugins:
yield from find_plugin_lexers()
-
-
-def guess_lexer_for_filename(_fn, _text, **options):
- """
- Lookup all lexers that handle those filenames primary (``filenames``)
- or secondary (``alias_filenames``). Then run a text analysis for those
- lexers and choose the best result.
-
- usage::
-
- >>> from pygments.lexers import guess_lexer_for_filename
- >>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
- <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
- >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
- <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
- >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
- <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
- """
- fn = basename(_fn)
- primary = {}
- matching_lexers = set()
- for lexer in _iter_lexerclasses():
- for filename in lexer.filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = True
- for filename in lexer.alias_filenames:
- if _fn_matches(fn, filename):
- matching_lexers.add(lexer)
- primary[lexer] = False
- if not matching_lexers:
- raise ClassNotFound('no lexer for filename %r found' % fn)
- if len(matching_lexers) == 1:
- return matching_lexers.pop()(**options)
- result = []
- for lexer in matching_lexers:
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- result.append((rv, lexer))
-
- def type_sort(t):
- # sort by:
- # - analyse score
- # - is primary filename pattern?
- # - priority
- # - last resort: class name
- return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
- result.sort(key=type_sort)
-
- return result[-1][1](**options)
-
-
-def guess_lexer(_text, **options):
- """Guess a lexer by strong distinctions in the text (eg, shebang)."""
-
+
+
+def guess_lexer_for_filename(_fn, _text, **options):
+ """
+ Lookup all lexers that handle those filenames primary (``filenames``)
+ or secondary (``alias_filenames``). Then run a text analysis for those
+ lexers and choose the best result.
+
+ usage::
+
+ >>> from pygments.lexers import guess_lexer_for_filename
+ >>> guess_lexer_for_filename('hello.html', '<%= @foo %>')
+ <pygments.lexers.templates.RhtmlLexer object at 0xb7d2f32c>
+ >>> guess_lexer_for_filename('hello.html', '<h1>{{ title|e }}</h1>')
+ <pygments.lexers.templates.HtmlDjangoLexer object at 0xb7d2f2ac>
+ >>> guess_lexer_for_filename('style.css', 'a { color: <?= $link ?> }')
+ <pygments.lexers.templates.CssPhpLexer object at 0xb7ba518c>
+ """
+ fn = basename(_fn)
+ primary = {}
+ matching_lexers = set()
+ for lexer in _iter_lexerclasses():
+ for filename in lexer.filenames:
+ if _fn_matches(fn, filename):
+ matching_lexers.add(lexer)
+ primary[lexer] = True
+ for filename in lexer.alias_filenames:
+ if _fn_matches(fn, filename):
+ matching_lexers.add(lexer)
+ primary[lexer] = False
+ if not matching_lexers:
+ raise ClassNotFound('no lexer for filename %r found' % fn)
+ if len(matching_lexers) == 1:
+ return matching_lexers.pop()(**options)
+ result = []
+ for lexer in matching_lexers:
+ rv = lexer.analyse_text(_text)
+ if rv == 1.0:
+ return lexer(**options)
+ result.append((rv, lexer))
+
+ def type_sort(t):
+ # sort by:
+ # - analyse score
+ # - is primary filename pattern?
+ # - priority
+ # - last resort: class name
+ return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
+ result.sort(key=type_sort)
+
+ return result[-1][1](**options)
+
+
+def guess_lexer(_text, **options):
+ """Guess a lexer by strong distinctions in the text (eg, shebang)."""
+
if not isinstance(_text, str):
inencoding = options.get('inencoding', options.get('encoding'))
if inencoding:
@@ -298,44 +298,44 @@ def guess_lexer(_text, **options):
else:
_text, _ = guess_decode(_text)
- # try to get a vim modeline first
- ft = get_filetype_from_buffer(_text)
-
- if ft is not None:
- try:
- return get_lexer_by_name(ft, **options)
- except ClassNotFound:
- pass
-
- best_lexer = [0.0, None]
- for lexer in _iter_lexerclasses():
- rv = lexer.analyse_text(_text)
- if rv == 1.0:
- return lexer(**options)
- if rv > best_lexer[0]:
- best_lexer[:] = (rv, lexer)
- if not best_lexer[0] or best_lexer[1] is None:
- raise ClassNotFound('no lexer matching the text found')
- return best_lexer[1](**options)
-
-
-class _automodule(types.ModuleType):
- """Automatically import lexers."""
-
- def __getattr__(self, name):
- info = LEXERS.get(name)
- if info:
- _load_lexers(info[0])
- cls = _lexer_cache[info[1]]
- setattr(self, name, cls)
- return cls
+ # try to get a vim modeline first
+ ft = get_filetype_from_buffer(_text)
+
+ if ft is not None:
+ try:
+ return get_lexer_by_name(ft, **options)
+ except ClassNotFound:
+ pass
+
+ best_lexer = [0.0, None]
+ for lexer in _iter_lexerclasses():
+ rv = lexer.analyse_text(_text)
+ if rv == 1.0:
+ return lexer(**options)
+ if rv > best_lexer[0]:
+ best_lexer[:] = (rv, lexer)
+ if not best_lexer[0] or best_lexer[1] is None:
+ raise ClassNotFound('no lexer matching the text found')
+ return best_lexer[1](**options)
+
+
+class _automodule(types.ModuleType):
+ """Automatically import lexers."""
+
+ def __getattr__(self, name):
+ info = LEXERS.get(name)
+ if info:
+ _load_lexers(info[0])
+ cls = _lexer_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
if name in COMPAT:
return getattr(self, COMPAT[name])
- raise AttributeError(name)
-
-
-oldmod = sys.modules[__name__]
-newmod = _automodule(__name__)
-newmod.__dict__.update(oldmod.__dict__)
-sys.modules[__name__] = newmod
-del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
index 74e057ef40..de46cbe3e8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
@@ -1,1644 +1,1644 @@
-"""
- pygments.lexers._asy_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the asy-function names and asy-variable names of
- Asymptote.
-
- Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
- TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
- for function and variable names.
-
+"""
+ pygments.lexers._asy_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the asy-function names and asy-variable names of
+ Asymptote.
+
+ Do not edit the ASYFUNCNAME and ASYVARNAME sets by hand.
+ TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
+ for function and variable names.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
ASYFUNCNAME = {
- 'AND',
- 'Arc',
- 'ArcArrow',
- 'ArcArrows',
- 'Arrow',
- 'Arrows',
- 'Automatic',
- 'AvantGarde',
- 'BBox',
- 'BWRainbow',
- 'BWRainbow2',
- 'Bar',
- 'Bars',
- 'BeginArcArrow',
- 'BeginArrow',
- 'BeginBar',
- 'BeginDotMargin',
- 'BeginMargin',
- 'BeginPenMargin',
- 'Blank',
- 'Bookman',
- 'Bottom',
- 'BottomTop',
- 'Bounds',
- 'Break',
- 'Broken',
- 'BrokenLog',
- 'Ceil',
- 'Circle',
- 'CircleBarIntervalMarker',
- 'Cos',
- 'Courier',
- 'CrossIntervalMarker',
- 'DefaultFormat',
- 'DefaultLogFormat',
- 'Degrees',
- 'Dir',
- 'DotMargin',
- 'DotMargins',
- 'Dotted',
- 'Draw',
- 'Drawline',
- 'Embed',
- 'EndArcArrow',
- 'EndArrow',
- 'EndBar',
- 'EndDotMargin',
- 'EndMargin',
- 'EndPenMargin',
- 'Fill',
- 'FillDraw',
- 'Floor',
- 'Format',
- 'Full',
- 'Gaussian',
- 'Gaussrand',
- 'Gaussrandpair',
- 'Gradient',
- 'Grayscale',
- 'Helvetica',
- 'Hermite',
- 'HookHead',
- 'InOutTicks',
- 'InTicks',
- 'J',
- 'Label',
- 'Landscape',
- 'Left',
- 'LeftRight',
- 'LeftTicks',
- 'Legend',
- 'Linear',
- 'Link',
- 'Log',
- 'LogFormat',
- 'Margin',
- 'Margins',
- 'Mark',
- 'MidArcArrow',
- 'MidArrow',
- 'NOT',
- 'NewCenturySchoolBook',
- 'NoBox',
- 'NoMargin',
- 'NoModifier',
- 'NoTicks',
- 'NoTicks3',
- 'NoZero',
- 'NoZeroFormat',
- 'None',
- 'OR',
- 'OmitFormat',
- 'OmitTick',
- 'OutTicks',
- 'Ox',
- 'Oy',
- 'Palatino',
- 'PaletteTicks',
- 'Pen',
- 'PenMargin',
- 'PenMargins',
- 'Pentype',
- 'Portrait',
- 'RadialShade',
- 'Rainbow',
- 'Range',
- 'Relative',
- 'Right',
- 'RightTicks',
- 'Rotate',
- 'Round',
- 'SQR',
- 'Scale',
- 'ScaleX',
- 'ScaleY',
- 'ScaleZ',
- 'Seascape',
- 'Shift',
- 'Sin',
- 'Slant',
- 'Spline',
- 'StickIntervalMarker',
- 'Straight',
- 'Symbol',
- 'Tan',
- 'TeXify',
- 'Ticks',
- 'Ticks3',
- 'TildeIntervalMarker',
- 'TimesRoman',
- 'Top',
- 'TrueMargin',
- 'UnFill',
- 'UpsideDown',
- 'Wheel',
- 'X',
- 'XEquals',
- 'XOR',
- 'XY',
- 'XYEquals',
- 'XYZero',
- 'XYgrid',
- 'XZEquals',
- 'XZZero',
- 'XZero',
- 'XZgrid',
- 'Y',
- 'YEquals',
- 'YXgrid',
- 'YZ',
- 'YZEquals',
- 'YZZero',
- 'YZero',
- 'YZgrid',
- 'Z',
- 'ZX',
- 'ZXgrid',
- 'ZYgrid',
- 'ZapfChancery',
- 'ZapfDingbats',
- '_cputime',
- '_draw',
- '_eval',
- '_image',
- '_labelpath',
- '_projection',
- '_strokepath',
- '_texpath',
- 'aCos',
- 'aSin',
- 'aTan',
- 'abort',
- 'abs',
- 'accel',
- 'acos',
- 'acosh',
- 'acot',
- 'acsc',
- 'add',
- 'addArrow',
- 'addMargins',
- 'addSaveFunction',
- 'addnode',
- 'addnodes',
- 'addpenarc',
- 'addpenline',
- 'addseg',
- 'adjust',
- 'alias',
- 'align',
- 'all',
- 'altitude',
- 'angabscissa',
- 'angle',
- 'angpoint',
- 'animate',
- 'annotate',
- 'anticomplementary',
- 'antipedal',
- 'apply',
- 'approximate',
- 'arc',
- 'arcarrowsize',
- 'arccircle',
- 'arcdir',
- 'arcfromcenter',
- 'arcfromfocus',
- 'arclength',
- 'arcnodesnumber',
- 'arcpoint',
- 'arcsubtended',
- 'arcsubtendedcenter',
- 'arctime',
- 'arctopath',
- 'array',
- 'arrow',
- 'arrow2',
- 'arrowbase',
- 'arrowbasepoints',
- 'arrowsize',
- 'asec',
- 'asin',
- 'asinh',
- 'ask',
- 'assert',
- 'asy',
- 'asycode',
- 'asydir',
- 'asyfigure',
- 'asyfilecode',
- 'asyinclude',
- 'asywrite',
- 'atan',
- 'atan2',
- 'atanh',
- 'atbreakpoint',
- 'atexit',
- 'atime',
- 'attach',
- 'attract',
- 'atupdate',
- 'autoformat',
- 'autoscale',
- 'autoscale3',
- 'axes',
- 'axes3',
- 'axialshade',
- 'axis',
- 'axiscoverage',
- 'azimuth',
- 'babel',
- 'background',
- 'bangles',
- 'bar',
- 'barmarksize',
- 'barsize',
- 'basealign',
- 'baseline',
- 'bbox',
- 'beep',
- 'begin',
- 'beginclip',
- 'begingroup',
- 'beginpoint',
- 'between',
- 'bevel',
- 'bezier',
- 'bezierP',
- 'bezierPP',
- 'bezierPPP',
- 'bezulate',
- 'bibliography',
- 'bibliographystyle',
- 'binarytree',
- 'binarytreeNode',
- 'binomial',
- 'binput',
- 'bins',
- 'bisector',
- 'bisectorpoint',
- 'blend',
- 'boutput',
- 'box',
- 'bqe',
- 'breakpoint',
- 'breakpoints',
- 'brick',
- 'buildRestoreDefaults',
- 'buildRestoreThunk',
- 'buildcycle',
- 'bulletcolor',
- 'canonical',
- 'canonicalcartesiansystem',
- 'cartesiansystem',
- 'case1',
- 'case2',
- 'case3',
- 'cbrt',
- 'cd',
- 'ceil',
- 'center',
- 'centerToFocus',
- 'centroid',
- 'cevian',
- 'change2',
- 'changecoordsys',
- 'checkSegment',
- 'checkconditionlength',
- 'checker',
- 'checklengths',
- 'checkposition',
- 'checktriangle',
- 'choose',
- 'circle',
- 'circlebarframe',
- 'circlemarkradius',
- 'circlenodesnumber',
- 'circumcenter',
- 'circumcircle',
- 'clamped',
- 'clear',
- 'clip',
- 'clipdraw',
- 'close',
- 'cmyk',
- 'code',
- 'colatitude',
- 'collect',
- 'collinear',
- 'color',
- 'colorless',
- 'colors',
- 'colorspace',
- 'comma',
- 'compassmark',
- 'complement',
- 'complementary',
- 'concat',
- 'concurrent',
- 'cone',
- 'conic',
- 'conicnodesnumber',
- 'conictype',
- 'conj',
- 'connect',
- 'containmentTree',
- 'contains',
- 'contour',
- 'contour3',
- 'controlSpecifier',
- 'convert',
- 'coordinates',
- 'coordsys',
- 'copy',
- 'cos',
- 'cosh',
- 'cot',
- 'countIntersections',
- 'cputime',
- 'crop',
- 'cropcode',
- 'cross',
- 'crossframe',
- 'crosshatch',
- 'crossmarksize',
- 'csc',
- 'cubicroots',
- 'curabscissa',
- 'curlSpecifier',
- 'curpoint',
- 'currentarrow',
- 'currentexitfunction',
- 'currentmomarrow',
- 'currentpolarconicroutine',
- 'curve',
- 'cut',
- 'cutafter',
- 'cutbefore',
- 'cyclic',
- 'cylinder',
- 'debugger',
- 'deconstruct',
- 'defaultdir',
- 'defaultformat',
- 'defaultpen',
- 'defined',
- 'degenerate',
- 'degrees',
- 'delete',
- 'deletepreamble',
- 'determinant',
- 'diagonal',
- 'diamond',
- 'diffdiv',
- 'dir',
- 'dirSpecifier',
- 'dirtime',
- 'display',
- 'distance',
- 'divisors',
- 'do_overpaint',
- 'dot',
- 'dotframe',
- 'dotsize',
- 'downcase',
- 'draw',
- 'drawAll',
- 'drawDoubleLine',
- 'drawFermion',
- 'drawGhost',
- 'drawGluon',
- 'drawMomArrow',
- 'drawPhoton',
- 'drawScalar',
- 'drawVertex',
- 'drawVertexBox',
- 'drawVertexBoxO',
- 'drawVertexBoxX',
- 'drawVertexO',
- 'drawVertexOX',
- 'drawVertexTriangle',
- 'drawVertexTriangleO',
- 'drawVertexX',
- 'drawarrow',
- 'drawarrow2',
- 'drawline',
- 'drawtick',
- 'duplicate',
- 'elle',
- 'ellipse',
- 'ellipsenodesnumber',
- 'embed',
- 'embed3',
- 'empty',
- 'enclose',
- 'end',
- 'endScript',
- 'endclip',
- 'endgroup',
- 'endl',
- 'endpoint',
- 'endpoints',
- 'eof',
- 'eol',
- 'equation',
- 'equations',
- 'erase',
- 'erasestep',
- 'erf',
- 'erfc',
- 'error',
- 'errorbar',
- 'errorbars',
- 'eval',
- 'excenter',
- 'excircle',
- 'exit',
- 'exitXasyMode',
- 'exitfunction',
- 'exp',
- 'expfactors',
- 'expi',
- 'expm1',
- 'exradius',
- 'extend',
- 'extension',
- 'extouch',
- 'fabs',
- 'factorial',
- 'fermat',
- 'fft',
- 'fhorner',
- 'figure',
- 'file',
- 'filecode',
- 'fill',
- 'filldraw',
- 'filloutside',
- 'fillrule',
- 'filltype',
- 'find',
- 'finite',
- 'finiteDifferenceJacobian',
- 'firstcut',
- 'firstframe',
- 'fit',
- 'fit2',
- 'fixedscaling',
- 'floor',
- 'flush',
- 'fmdefaults',
- 'fmod',
- 'focusToCenter',
- 'font',
- 'fontcommand',
- 'fontsize',
- 'foot',
- 'format',
- 'frac',
- 'frequency',
- 'fromCenter',
- 'fromFocus',
- 'fspline',
- 'functionshade',
- 'gamma',
- 'generate_random_backtrace',
- 'generateticks',
- 'gergonne',
- 'getc',
- 'getint',
- 'getpair',
- 'getreal',
- 'getstring',
- 'gettriple',
- 'gluon',
- 'gouraudshade',
- 'graph',
- 'graphic',
- 'gray',
- 'grestore',
- 'grid',
- 'grid3',
- 'gsave',
- 'halfbox',
- 'hatch',
- 'hdiffdiv',
- 'hermite',
- 'hex',
- 'histogram',
- 'history',
- 'hline',
- 'hprojection',
- 'hsv',
- 'hyperbola',
- 'hyperbolanodesnumber',
- 'hyperlink',
- 'hypot',
- 'identity',
- 'image',
- 'incenter',
- 'incentral',
- 'incircle',
- 'increasing',
- 'incrementposition',
- 'indexedTransform',
- 'indexedfigure',
- 'initXasyMode',
- 'initdefaults',
- 'input',
- 'inradius',
- 'insert',
- 'inside',
- 'integrate',
- 'interactive',
- 'interior',
- 'interp',
- 'interpolate',
- 'intersect',
- 'intersection',
- 'intersectionpoint',
- 'intersectionpoints',
- 'intersections',
- 'intouch',
- 'inverse',
- 'inversion',
- 'invisible',
- 'is3D',
- 'isDuplicate',
- 'isogonal',
- 'isogonalconjugate',
- 'isotomic',
- 'isotomicconjugate',
- 'isparabola',
- 'italic',
- 'item',
- 'key',
- 'kurtosis',
- 'kurtosisexcess',
- 'label',
- 'labelaxis',
- 'labelmargin',
- 'labelpath',
- 'labels',
- 'labeltick',
- 'labelx',
- 'labelx3',
- 'labely',
- 'labely3',
- 'labelz',
- 'labelz3',
- 'lastcut',
- 'latex',
- 'latitude',
- 'latticeshade',
- 'layer',
- 'layout',
- 'ldexp',
- 'leastsquares',
- 'legend',
- 'legenditem',
- 'length',
- 'lift',
- 'light',
- 'limits',
- 'line',
- 'linear',
- 'linecap',
- 'lineinversion',
- 'linejoin',
- 'linemargin',
- 'lineskip',
- 'linetype',
- 'linewidth',
- 'link',
- 'list',
- 'lm_enorm',
- 'lm_evaluate_default',
- 'lm_lmdif',
- 'lm_lmpar',
- 'lm_minimize',
- 'lm_print_default',
- 'lm_print_quiet',
- 'lm_qrfac',
- 'lm_qrsolv',
- 'locale',
- 'locate',
- 'locatefile',
- 'location',
- 'log',
- 'log10',
- 'log1p',
- 'logaxiscoverage',
- 'longitude',
- 'lookup',
- 'magnetize',
- 'makeNode',
- 'makedraw',
- 'makepen',
- 'map',
- 'margin',
- 'markangle',
- 'markangleradius',
- 'markanglespace',
- 'markarc',
- 'marker',
- 'markinterval',
- 'marknodes',
- 'markrightangle',
- 'markuniform',
- 'mass',
- 'masscenter',
- 'massformat',
- 'math',
- 'max',
- 'max3',
- 'maxbezier',
- 'maxbound',
- 'maxcoords',
- 'maxlength',
- 'maxratio',
- 'maxtimes',
- 'mean',
- 'medial',
- 'median',
- 'midpoint',
- 'min',
- 'min3',
- 'minbezier',
- 'minbound',
- 'minipage',
- 'minratio',
- 'mintimes',
- 'miterlimit',
- 'momArrowPath',
- 'momarrowsize',
- 'monotonic',
- 'multifigure',
- 'nativeformat',
- 'natural',
- 'needshipout',
- 'newl',
- 'newpage',
- 'newslide',
- 'newton',
- 'newtree',
- 'nextframe',
- 'nextnormal',
- 'nextpage',
- 'nib',
- 'nodabscissa',
- 'none',
- 'norm',
- 'normalvideo',
- 'notaknot',
- 'nowarn',
- 'numberpage',
- 'nurb',
- 'object',
- 'offset',
- 'onpath',
- 'opacity',
- 'opposite',
- 'orientation',
- 'orig_circlenodesnumber',
- 'orig_circlenodesnumber1',
- 'orig_draw',
- 'orig_ellipsenodesnumber',
- 'orig_ellipsenodesnumber1',
- 'orig_hyperbolanodesnumber',
- 'orig_parabolanodesnumber',
- 'origin',
- 'orthic',
- 'orthocentercenter',
- 'outformat',
- 'outline',
- 'outprefix',
- 'output',
- 'overloadedMessage',
- 'overwrite',
- 'pack',
- 'pad',
- 'pairs',
- 'palette',
- 'parabola',
- 'parabolanodesnumber',
- 'parallel',
- 'partialsum',
- 'path',
- 'path3',
- 'pattern',
- 'pause',
- 'pdf',
- 'pedal',
- 'periodic',
- 'perp',
- 'perpendicular',
- 'perpendicularmark',
- 'phantom',
- 'phi1',
- 'phi2',
- 'phi3',
- 'photon',
- 'piecewisestraight',
- 'point',
- 'polar',
- 'polarconicroutine',
- 'polargraph',
- 'polygon',
- 'postcontrol',
- 'postscript',
- 'pow10',
- 'ppoint',
- 'prc',
- 'prc0',
- 'precision',
- 'precontrol',
- 'prepend',
- 'print_random_addresses',
- 'project',
- 'projection',
- 'purge',
- 'pwhermite',
- 'quadrant',
- 'quadraticroots',
- 'quantize',
- 'quarticroots',
- 'quotient',
- 'radialshade',
- 'radians',
- 'radicalcenter',
- 'radicalline',
- 'radius',
- 'rand',
- 'randompath',
- 'rd',
- 'readline',
- 'realmult',
- 'realquarticroots',
- 'rectangle',
- 'rectangular',
- 'rectify',
- 'reflect',
- 'relabscissa',
- 'relative',
- 'relativedistance',
- 'reldir',
- 'relpoint',
- 'reltime',
- 'remainder',
- 'remark',
- 'removeDuplicates',
- 'rename',
- 'replace',
- 'report',
- 'resetdefaultpen',
- 'restore',
- 'restoredefaults',
- 'reverse',
- 'reversevideo',
- 'rf',
- 'rfind',
- 'rgb',
- 'rgba',
- 'rgbint',
- 'rms',
- 'rotate',
- 'rotateO',
- 'rotation',
- 'round',
- 'roundbox',
- 'roundedpath',
- 'roundrectangle',
- 'samecoordsys',
- 'sameside',
- 'sample',
- 'save',
- 'savedefaults',
- 'saveline',
- 'scale',
- 'scale3',
- 'scaleO',
- 'scaleT',
- 'scaleless',
- 'scientific',
- 'search',
- 'searchtree',
- 'sec',
- 'secondaryX',
- 'secondaryY',
- 'seconds',
- 'section',
- 'sector',
- 'seek',
- 'seekeof',
- 'segment',
- 'sequence',
- 'setpens',
- 'sgn',
- 'sgnd',
- 'sharpangle',
- 'sharpdegrees',
- 'shift',
- 'shiftless',
- 'shipout',
- 'shipout3',
- 'show',
- 'side',
- 'simeq',
- 'simpson',
- 'sin',
- 'single',
- 'sinh',
- 'size',
- 'size3',
- 'skewness',
- 'skip',
- 'slant',
- 'sleep',
- 'slope',
- 'slopefield',
- 'solve',
- 'solveBVP',
- 'sort',
- 'sourceline',
- 'sphere',
- 'split',
- 'sqrt',
- 'square',
- 'srand',
- 'standardizecoordsys',
- 'startScript',
- 'startTrembling',
- 'stdev',
- 'step',
- 'stickframe',
- 'stickmarksize',
- 'stickmarkspace',
- 'stop',
- 'straight',
- 'straightness',
- 'string',
- 'stripdirectory',
- 'stripextension',
- 'stripfile',
- 'strokepath',
- 'subdivide',
- 'subitem',
- 'subpath',
- 'substr',
- 'sum',
- 'surface',
- 'symmedial',
- 'symmedian',
- 'system',
- 'tab',
- 'tableau',
- 'tan',
- 'tangent',
- 'tangential',
- 'tangents',
- 'tanh',
- 'tell',
- 'tensionSpecifier',
- 'tensorshade',
- 'tex',
- 'texcolor',
- 'texify',
- 'texpath',
- 'texpreamble',
- 'texreset',
- 'texshipout',
- 'texsize',
- 'textpath',
- 'thick',
- 'thin',
- 'tick',
- 'tickMax',
- 'tickMax3',
- 'tickMin',
- 'tickMin3',
- 'ticklabelshift',
- 'ticklocate',
- 'tildeframe',
- 'tildemarksize',
- 'tile',
- 'tiling',
- 'time',
- 'times',
- 'title',
- 'titlepage',
- 'topbox',
- 'transform',
- 'transformation',
- 'transpose',
- 'tremble',
- 'trembleFuzz',
- 'tremble_circlenodesnumber',
- 'tremble_circlenodesnumber1',
- 'tremble_draw',
- 'tremble_ellipsenodesnumber',
- 'tremble_ellipsenodesnumber1',
- 'tremble_hyperbolanodesnumber',
- 'tremble_marknodes',
- 'tremble_markuniform',
- 'tremble_parabolanodesnumber',
- 'triangle',
- 'triangleAbc',
- 'triangleabc',
- 'triangulate',
- 'tricoef',
- 'tridiagonal',
- 'trilinear',
- 'trim',
- 'trueMagnetize',
- 'truepoint',
- 'tube',
- 'uncycle',
- 'unfill',
- 'uniform',
- 'unit',
- 'unitrand',
- 'unitsize',
- 'unityroot',
- 'unstraighten',
- 'upcase',
- 'updatefunction',
- 'uperiodic',
- 'upscale',
- 'uptodate',
- 'usepackage',
- 'usersetting',
- 'usetypescript',
- 'usleep',
- 'value',
- 'variance',
- 'variancebiased',
- 'vbox',
- 'vector',
- 'vectorfield',
- 'verbatim',
- 'view',
- 'vline',
- 'vperiodic',
- 'vprojection',
- 'warn',
- 'warning',
- 'windingnumber',
- 'write',
- 'xaxis',
- 'xaxis3',
- 'xaxis3At',
- 'xaxisAt',
- 'xequals',
- 'xinput',
- 'xlimits',
- 'xoutput',
- 'xpart',
- 'xscale',
- 'xscaleO',
- 'xtick',
- 'xtick3',
- 'xtrans',
- 'yaxis',
- 'yaxis3',
- 'yaxis3At',
- 'yaxisAt',
- 'yequals',
- 'ylimits',
- 'ypart',
- 'yscale',
- 'yscaleO',
- 'ytick',
- 'ytick3',
- 'ytrans',
- 'zaxis3',
- 'zaxis3At',
- 'zero',
- 'zero3',
- 'zlimits',
- 'zpart',
- 'ztick',
- 'ztick3',
- 'ztrans'
+ 'AND',
+ 'Arc',
+ 'ArcArrow',
+ 'ArcArrows',
+ 'Arrow',
+ 'Arrows',
+ 'Automatic',
+ 'AvantGarde',
+ 'BBox',
+ 'BWRainbow',
+ 'BWRainbow2',
+ 'Bar',
+ 'Bars',
+ 'BeginArcArrow',
+ 'BeginArrow',
+ 'BeginBar',
+ 'BeginDotMargin',
+ 'BeginMargin',
+ 'BeginPenMargin',
+ 'Blank',
+ 'Bookman',
+ 'Bottom',
+ 'BottomTop',
+ 'Bounds',
+ 'Break',
+ 'Broken',
+ 'BrokenLog',
+ 'Ceil',
+ 'Circle',
+ 'CircleBarIntervalMarker',
+ 'Cos',
+ 'Courier',
+ 'CrossIntervalMarker',
+ 'DefaultFormat',
+ 'DefaultLogFormat',
+ 'Degrees',
+ 'Dir',
+ 'DotMargin',
+ 'DotMargins',
+ 'Dotted',
+ 'Draw',
+ 'Drawline',
+ 'Embed',
+ 'EndArcArrow',
+ 'EndArrow',
+ 'EndBar',
+ 'EndDotMargin',
+ 'EndMargin',
+ 'EndPenMargin',
+ 'Fill',
+ 'FillDraw',
+ 'Floor',
+ 'Format',
+ 'Full',
+ 'Gaussian',
+ 'Gaussrand',
+ 'Gaussrandpair',
+ 'Gradient',
+ 'Grayscale',
+ 'Helvetica',
+ 'Hermite',
+ 'HookHead',
+ 'InOutTicks',
+ 'InTicks',
+ 'J',
+ 'Label',
+ 'Landscape',
+ 'Left',
+ 'LeftRight',
+ 'LeftTicks',
+ 'Legend',
+ 'Linear',
+ 'Link',
+ 'Log',
+ 'LogFormat',
+ 'Margin',
+ 'Margins',
+ 'Mark',
+ 'MidArcArrow',
+ 'MidArrow',
+ 'NOT',
+ 'NewCenturySchoolBook',
+ 'NoBox',
+ 'NoMargin',
+ 'NoModifier',
+ 'NoTicks',
+ 'NoTicks3',
+ 'NoZero',
+ 'NoZeroFormat',
+ 'None',
+ 'OR',
+ 'OmitFormat',
+ 'OmitTick',
+ 'OutTicks',
+ 'Ox',
+ 'Oy',
+ 'Palatino',
+ 'PaletteTicks',
+ 'Pen',
+ 'PenMargin',
+ 'PenMargins',
+ 'Pentype',
+ 'Portrait',
+ 'RadialShade',
+ 'Rainbow',
+ 'Range',
+ 'Relative',
+ 'Right',
+ 'RightTicks',
+ 'Rotate',
+ 'Round',
+ 'SQR',
+ 'Scale',
+ 'ScaleX',
+ 'ScaleY',
+ 'ScaleZ',
+ 'Seascape',
+ 'Shift',
+ 'Sin',
+ 'Slant',
+ 'Spline',
+ 'StickIntervalMarker',
+ 'Straight',
+ 'Symbol',
+ 'Tan',
+ 'TeXify',
+ 'Ticks',
+ 'Ticks3',
+ 'TildeIntervalMarker',
+ 'TimesRoman',
+ 'Top',
+ 'TrueMargin',
+ 'UnFill',
+ 'UpsideDown',
+ 'Wheel',
+ 'X',
+ 'XEquals',
+ 'XOR',
+ 'XY',
+ 'XYEquals',
+ 'XYZero',
+ 'XYgrid',
+ 'XZEquals',
+ 'XZZero',
+ 'XZero',
+ 'XZgrid',
+ 'Y',
+ 'YEquals',
+ 'YXgrid',
+ 'YZ',
+ 'YZEquals',
+ 'YZZero',
+ 'YZero',
+ 'YZgrid',
+ 'Z',
+ 'ZX',
+ 'ZXgrid',
+ 'ZYgrid',
+ 'ZapfChancery',
+ 'ZapfDingbats',
+ '_cputime',
+ '_draw',
+ '_eval',
+ '_image',
+ '_labelpath',
+ '_projection',
+ '_strokepath',
+ '_texpath',
+ 'aCos',
+ 'aSin',
+ 'aTan',
+ 'abort',
+ 'abs',
+ 'accel',
+ 'acos',
+ 'acosh',
+ 'acot',
+ 'acsc',
+ 'add',
+ 'addArrow',
+ 'addMargins',
+ 'addSaveFunction',
+ 'addnode',
+ 'addnodes',
+ 'addpenarc',
+ 'addpenline',
+ 'addseg',
+ 'adjust',
+ 'alias',
+ 'align',
+ 'all',
+ 'altitude',
+ 'angabscissa',
+ 'angle',
+ 'angpoint',
+ 'animate',
+ 'annotate',
+ 'anticomplementary',
+ 'antipedal',
+ 'apply',
+ 'approximate',
+ 'arc',
+ 'arcarrowsize',
+ 'arccircle',
+ 'arcdir',
+ 'arcfromcenter',
+ 'arcfromfocus',
+ 'arclength',
+ 'arcnodesnumber',
+ 'arcpoint',
+ 'arcsubtended',
+ 'arcsubtendedcenter',
+ 'arctime',
+ 'arctopath',
+ 'array',
+ 'arrow',
+ 'arrow2',
+ 'arrowbase',
+ 'arrowbasepoints',
+ 'arrowsize',
+ 'asec',
+ 'asin',
+ 'asinh',
+ 'ask',
+ 'assert',
+ 'asy',
+ 'asycode',
+ 'asydir',
+ 'asyfigure',
+ 'asyfilecode',
+ 'asyinclude',
+ 'asywrite',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'atbreakpoint',
+ 'atexit',
+ 'atime',
+ 'attach',
+ 'attract',
+ 'atupdate',
+ 'autoformat',
+ 'autoscale',
+ 'autoscale3',
+ 'axes',
+ 'axes3',
+ 'axialshade',
+ 'axis',
+ 'axiscoverage',
+ 'azimuth',
+ 'babel',
+ 'background',
+ 'bangles',
+ 'bar',
+ 'barmarksize',
+ 'barsize',
+ 'basealign',
+ 'baseline',
+ 'bbox',
+ 'beep',
+ 'begin',
+ 'beginclip',
+ 'begingroup',
+ 'beginpoint',
+ 'between',
+ 'bevel',
+ 'bezier',
+ 'bezierP',
+ 'bezierPP',
+ 'bezierPPP',
+ 'bezulate',
+ 'bibliography',
+ 'bibliographystyle',
+ 'binarytree',
+ 'binarytreeNode',
+ 'binomial',
+ 'binput',
+ 'bins',
+ 'bisector',
+ 'bisectorpoint',
+ 'blend',
+ 'boutput',
+ 'box',
+ 'bqe',
+ 'breakpoint',
+ 'breakpoints',
+ 'brick',
+ 'buildRestoreDefaults',
+ 'buildRestoreThunk',
+ 'buildcycle',
+ 'bulletcolor',
+ 'canonical',
+ 'canonicalcartesiansystem',
+ 'cartesiansystem',
+ 'case1',
+ 'case2',
+ 'case3',
+ 'cbrt',
+ 'cd',
+ 'ceil',
+ 'center',
+ 'centerToFocus',
+ 'centroid',
+ 'cevian',
+ 'change2',
+ 'changecoordsys',
+ 'checkSegment',
+ 'checkconditionlength',
+ 'checker',
+ 'checklengths',
+ 'checkposition',
+ 'checktriangle',
+ 'choose',
+ 'circle',
+ 'circlebarframe',
+ 'circlemarkradius',
+ 'circlenodesnumber',
+ 'circumcenter',
+ 'circumcircle',
+ 'clamped',
+ 'clear',
+ 'clip',
+ 'clipdraw',
+ 'close',
+ 'cmyk',
+ 'code',
+ 'colatitude',
+ 'collect',
+ 'collinear',
+ 'color',
+ 'colorless',
+ 'colors',
+ 'colorspace',
+ 'comma',
+ 'compassmark',
+ 'complement',
+ 'complementary',
+ 'concat',
+ 'concurrent',
+ 'cone',
+ 'conic',
+ 'conicnodesnumber',
+ 'conictype',
+ 'conj',
+ 'connect',
+ 'containmentTree',
+ 'contains',
+ 'contour',
+ 'contour3',
+ 'controlSpecifier',
+ 'convert',
+ 'coordinates',
+ 'coordsys',
+ 'copy',
+ 'cos',
+ 'cosh',
+ 'cot',
+ 'countIntersections',
+ 'cputime',
+ 'crop',
+ 'cropcode',
+ 'cross',
+ 'crossframe',
+ 'crosshatch',
+ 'crossmarksize',
+ 'csc',
+ 'cubicroots',
+ 'curabscissa',
+ 'curlSpecifier',
+ 'curpoint',
+ 'currentarrow',
+ 'currentexitfunction',
+ 'currentmomarrow',
+ 'currentpolarconicroutine',
+ 'curve',
+ 'cut',
+ 'cutafter',
+ 'cutbefore',
+ 'cyclic',
+ 'cylinder',
+ 'debugger',
+ 'deconstruct',
+ 'defaultdir',
+ 'defaultformat',
+ 'defaultpen',
+ 'defined',
+ 'degenerate',
+ 'degrees',
+ 'delete',
+ 'deletepreamble',
+ 'determinant',
+ 'diagonal',
+ 'diamond',
+ 'diffdiv',
+ 'dir',
+ 'dirSpecifier',
+ 'dirtime',
+ 'display',
+ 'distance',
+ 'divisors',
+ 'do_overpaint',
+ 'dot',
+ 'dotframe',
+ 'dotsize',
+ 'downcase',
+ 'draw',
+ 'drawAll',
+ 'drawDoubleLine',
+ 'drawFermion',
+ 'drawGhost',
+ 'drawGluon',
+ 'drawMomArrow',
+ 'drawPhoton',
+ 'drawScalar',
+ 'drawVertex',
+ 'drawVertexBox',
+ 'drawVertexBoxO',
+ 'drawVertexBoxX',
+ 'drawVertexO',
+ 'drawVertexOX',
+ 'drawVertexTriangle',
+ 'drawVertexTriangleO',
+ 'drawVertexX',
+ 'drawarrow',
+ 'drawarrow2',
+ 'drawline',
+ 'drawtick',
+ 'duplicate',
+ 'elle',
+ 'ellipse',
+ 'ellipsenodesnumber',
+ 'embed',
+ 'embed3',
+ 'empty',
+ 'enclose',
+ 'end',
+ 'endScript',
+ 'endclip',
+ 'endgroup',
+ 'endl',
+ 'endpoint',
+ 'endpoints',
+ 'eof',
+ 'eol',
+ 'equation',
+ 'equations',
+ 'erase',
+ 'erasestep',
+ 'erf',
+ 'erfc',
+ 'error',
+ 'errorbar',
+ 'errorbars',
+ 'eval',
+ 'excenter',
+ 'excircle',
+ 'exit',
+ 'exitXasyMode',
+ 'exitfunction',
+ 'exp',
+ 'expfactors',
+ 'expi',
+ 'expm1',
+ 'exradius',
+ 'extend',
+ 'extension',
+ 'extouch',
+ 'fabs',
+ 'factorial',
+ 'fermat',
+ 'fft',
+ 'fhorner',
+ 'figure',
+ 'file',
+ 'filecode',
+ 'fill',
+ 'filldraw',
+ 'filloutside',
+ 'fillrule',
+ 'filltype',
+ 'find',
+ 'finite',
+ 'finiteDifferenceJacobian',
+ 'firstcut',
+ 'firstframe',
+ 'fit',
+ 'fit2',
+ 'fixedscaling',
+ 'floor',
+ 'flush',
+ 'fmdefaults',
+ 'fmod',
+ 'focusToCenter',
+ 'font',
+ 'fontcommand',
+ 'fontsize',
+ 'foot',
+ 'format',
+ 'frac',
+ 'frequency',
+ 'fromCenter',
+ 'fromFocus',
+ 'fspline',
+ 'functionshade',
+ 'gamma',
+ 'generate_random_backtrace',
+ 'generateticks',
+ 'gergonne',
+ 'getc',
+ 'getint',
+ 'getpair',
+ 'getreal',
+ 'getstring',
+ 'gettriple',
+ 'gluon',
+ 'gouraudshade',
+ 'graph',
+ 'graphic',
+ 'gray',
+ 'grestore',
+ 'grid',
+ 'grid3',
+ 'gsave',
+ 'halfbox',
+ 'hatch',
+ 'hdiffdiv',
+ 'hermite',
+ 'hex',
+ 'histogram',
+ 'history',
+ 'hline',
+ 'hprojection',
+ 'hsv',
+ 'hyperbola',
+ 'hyperbolanodesnumber',
+ 'hyperlink',
+ 'hypot',
+ 'identity',
+ 'image',
+ 'incenter',
+ 'incentral',
+ 'incircle',
+ 'increasing',
+ 'incrementposition',
+ 'indexedTransform',
+ 'indexedfigure',
+ 'initXasyMode',
+ 'initdefaults',
+ 'input',
+ 'inradius',
+ 'insert',
+ 'inside',
+ 'integrate',
+ 'interactive',
+ 'interior',
+ 'interp',
+ 'interpolate',
+ 'intersect',
+ 'intersection',
+ 'intersectionpoint',
+ 'intersectionpoints',
+ 'intersections',
+ 'intouch',
+ 'inverse',
+ 'inversion',
+ 'invisible',
+ 'is3D',
+ 'isDuplicate',
+ 'isogonal',
+ 'isogonalconjugate',
+ 'isotomic',
+ 'isotomicconjugate',
+ 'isparabola',
+ 'italic',
+ 'item',
+ 'key',
+ 'kurtosis',
+ 'kurtosisexcess',
+ 'label',
+ 'labelaxis',
+ 'labelmargin',
+ 'labelpath',
+ 'labels',
+ 'labeltick',
+ 'labelx',
+ 'labelx3',
+ 'labely',
+ 'labely3',
+ 'labelz',
+ 'labelz3',
+ 'lastcut',
+ 'latex',
+ 'latitude',
+ 'latticeshade',
+ 'layer',
+ 'layout',
+ 'ldexp',
+ 'leastsquares',
+ 'legend',
+ 'legenditem',
+ 'length',
+ 'lift',
+ 'light',
+ 'limits',
+ 'line',
+ 'linear',
+ 'linecap',
+ 'lineinversion',
+ 'linejoin',
+ 'linemargin',
+ 'lineskip',
+ 'linetype',
+ 'linewidth',
+ 'link',
+ 'list',
+ 'lm_enorm',
+ 'lm_evaluate_default',
+ 'lm_lmdif',
+ 'lm_lmpar',
+ 'lm_minimize',
+ 'lm_print_default',
+ 'lm_print_quiet',
+ 'lm_qrfac',
+ 'lm_qrsolv',
+ 'locale',
+ 'locate',
+ 'locatefile',
+ 'location',
+ 'log',
+ 'log10',
+ 'log1p',
+ 'logaxiscoverage',
+ 'longitude',
+ 'lookup',
+ 'magnetize',
+ 'makeNode',
+ 'makedraw',
+ 'makepen',
+ 'map',
+ 'margin',
+ 'markangle',
+ 'markangleradius',
+ 'markanglespace',
+ 'markarc',
+ 'marker',
+ 'markinterval',
+ 'marknodes',
+ 'markrightangle',
+ 'markuniform',
+ 'mass',
+ 'masscenter',
+ 'massformat',
+ 'math',
+ 'max',
+ 'max3',
+ 'maxbezier',
+ 'maxbound',
+ 'maxcoords',
+ 'maxlength',
+ 'maxratio',
+ 'maxtimes',
+ 'mean',
+ 'medial',
+ 'median',
+ 'midpoint',
+ 'min',
+ 'min3',
+ 'minbezier',
+ 'minbound',
+ 'minipage',
+ 'minratio',
+ 'mintimes',
+ 'miterlimit',
+ 'momArrowPath',
+ 'momarrowsize',
+ 'monotonic',
+ 'multifigure',
+ 'nativeformat',
+ 'natural',
+ 'needshipout',
+ 'newl',
+ 'newpage',
+ 'newslide',
+ 'newton',
+ 'newtree',
+ 'nextframe',
+ 'nextnormal',
+ 'nextpage',
+ 'nib',
+ 'nodabscissa',
+ 'none',
+ 'norm',
+ 'normalvideo',
+ 'notaknot',
+ 'nowarn',
+ 'numberpage',
+ 'nurb',
+ 'object',
+ 'offset',
+ 'onpath',
+ 'opacity',
+ 'opposite',
+ 'orientation',
+ 'orig_circlenodesnumber',
+ 'orig_circlenodesnumber1',
+ 'orig_draw',
+ 'orig_ellipsenodesnumber',
+ 'orig_ellipsenodesnumber1',
+ 'orig_hyperbolanodesnumber',
+ 'orig_parabolanodesnumber',
+ 'origin',
+ 'orthic',
+ 'orthocentercenter',
+ 'outformat',
+ 'outline',
+ 'outprefix',
+ 'output',
+ 'overloadedMessage',
+ 'overwrite',
+ 'pack',
+ 'pad',
+ 'pairs',
+ 'palette',
+ 'parabola',
+ 'parabolanodesnumber',
+ 'parallel',
+ 'partialsum',
+ 'path',
+ 'path3',
+ 'pattern',
+ 'pause',
+ 'pdf',
+ 'pedal',
+ 'periodic',
+ 'perp',
+ 'perpendicular',
+ 'perpendicularmark',
+ 'phantom',
+ 'phi1',
+ 'phi2',
+ 'phi3',
+ 'photon',
+ 'piecewisestraight',
+ 'point',
+ 'polar',
+ 'polarconicroutine',
+ 'polargraph',
+ 'polygon',
+ 'postcontrol',
+ 'postscript',
+ 'pow10',
+ 'ppoint',
+ 'prc',
+ 'prc0',
+ 'precision',
+ 'precontrol',
+ 'prepend',
+ 'print_random_addresses',
+ 'project',
+ 'projection',
+ 'purge',
+ 'pwhermite',
+ 'quadrant',
+ 'quadraticroots',
+ 'quantize',
+ 'quarticroots',
+ 'quotient',
+ 'radialshade',
+ 'radians',
+ 'radicalcenter',
+ 'radicalline',
+ 'radius',
+ 'rand',
+ 'randompath',
+ 'rd',
+ 'readline',
+ 'realmult',
+ 'realquarticroots',
+ 'rectangle',
+ 'rectangular',
+ 'rectify',
+ 'reflect',
+ 'relabscissa',
+ 'relative',
+ 'relativedistance',
+ 'reldir',
+ 'relpoint',
+ 'reltime',
+ 'remainder',
+ 'remark',
+ 'removeDuplicates',
+ 'rename',
+ 'replace',
+ 'report',
+ 'resetdefaultpen',
+ 'restore',
+ 'restoredefaults',
+ 'reverse',
+ 'reversevideo',
+ 'rf',
+ 'rfind',
+ 'rgb',
+ 'rgba',
+ 'rgbint',
+ 'rms',
+ 'rotate',
+ 'rotateO',
+ 'rotation',
+ 'round',
+ 'roundbox',
+ 'roundedpath',
+ 'roundrectangle',
+ 'samecoordsys',
+ 'sameside',
+ 'sample',
+ 'save',
+ 'savedefaults',
+ 'saveline',
+ 'scale',
+ 'scale3',
+ 'scaleO',
+ 'scaleT',
+ 'scaleless',
+ 'scientific',
+ 'search',
+ 'searchtree',
+ 'sec',
+ 'secondaryX',
+ 'secondaryY',
+ 'seconds',
+ 'section',
+ 'sector',
+ 'seek',
+ 'seekeof',
+ 'segment',
+ 'sequence',
+ 'setpens',
+ 'sgn',
+ 'sgnd',
+ 'sharpangle',
+ 'sharpdegrees',
+ 'shift',
+ 'shiftless',
+ 'shipout',
+ 'shipout3',
+ 'show',
+ 'side',
+ 'simeq',
+ 'simpson',
+ 'sin',
+ 'single',
+ 'sinh',
+ 'size',
+ 'size3',
+ 'skewness',
+ 'skip',
+ 'slant',
+ 'sleep',
+ 'slope',
+ 'slopefield',
+ 'solve',
+ 'solveBVP',
+ 'sort',
+ 'sourceline',
+ 'sphere',
+ 'split',
+ 'sqrt',
+ 'square',
+ 'srand',
+ 'standardizecoordsys',
+ 'startScript',
+ 'startTrembling',
+ 'stdev',
+ 'step',
+ 'stickframe',
+ 'stickmarksize',
+ 'stickmarkspace',
+ 'stop',
+ 'straight',
+ 'straightness',
+ 'string',
+ 'stripdirectory',
+ 'stripextension',
+ 'stripfile',
+ 'strokepath',
+ 'subdivide',
+ 'subitem',
+ 'subpath',
+ 'substr',
+ 'sum',
+ 'surface',
+ 'symmedial',
+ 'symmedian',
+ 'system',
+ 'tab',
+ 'tableau',
+ 'tan',
+ 'tangent',
+ 'tangential',
+ 'tangents',
+ 'tanh',
+ 'tell',
+ 'tensionSpecifier',
+ 'tensorshade',
+ 'tex',
+ 'texcolor',
+ 'texify',
+ 'texpath',
+ 'texpreamble',
+ 'texreset',
+ 'texshipout',
+ 'texsize',
+ 'textpath',
+ 'thick',
+ 'thin',
+ 'tick',
+ 'tickMax',
+ 'tickMax3',
+ 'tickMin',
+ 'tickMin3',
+ 'ticklabelshift',
+ 'ticklocate',
+ 'tildeframe',
+ 'tildemarksize',
+ 'tile',
+ 'tiling',
+ 'time',
+ 'times',
+ 'title',
+ 'titlepage',
+ 'topbox',
+ 'transform',
+ 'transformation',
+ 'transpose',
+ 'tremble',
+ 'trembleFuzz',
+ 'tremble_circlenodesnumber',
+ 'tremble_circlenodesnumber1',
+ 'tremble_draw',
+ 'tremble_ellipsenodesnumber',
+ 'tremble_ellipsenodesnumber1',
+ 'tremble_hyperbolanodesnumber',
+ 'tremble_marknodes',
+ 'tremble_markuniform',
+ 'tremble_parabolanodesnumber',
+ 'triangle',
+ 'triangleAbc',
+ 'triangleabc',
+ 'triangulate',
+ 'tricoef',
+ 'tridiagonal',
+ 'trilinear',
+ 'trim',
+ 'trueMagnetize',
+ 'truepoint',
+ 'tube',
+ 'uncycle',
+ 'unfill',
+ 'uniform',
+ 'unit',
+ 'unitrand',
+ 'unitsize',
+ 'unityroot',
+ 'unstraighten',
+ 'upcase',
+ 'updatefunction',
+ 'uperiodic',
+ 'upscale',
+ 'uptodate',
+ 'usepackage',
+ 'usersetting',
+ 'usetypescript',
+ 'usleep',
+ 'value',
+ 'variance',
+ 'variancebiased',
+ 'vbox',
+ 'vector',
+ 'vectorfield',
+ 'verbatim',
+ 'view',
+ 'vline',
+ 'vperiodic',
+ 'vprojection',
+ 'warn',
+ 'warning',
+ 'windingnumber',
+ 'write',
+ 'xaxis',
+ 'xaxis3',
+ 'xaxis3At',
+ 'xaxisAt',
+ 'xequals',
+ 'xinput',
+ 'xlimits',
+ 'xoutput',
+ 'xpart',
+ 'xscale',
+ 'xscaleO',
+ 'xtick',
+ 'xtick3',
+ 'xtrans',
+ 'yaxis',
+ 'yaxis3',
+ 'yaxis3At',
+ 'yaxisAt',
+ 'yequals',
+ 'ylimits',
+ 'ypart',
+ 'yscale',
+ 'yscaleO',
+ 'ytick',
+ 'ytick3',
+ 'ytrans',
+ 'zaxis3',
+ 'zaxis3At',
+ 'zero',
+ 'zero3',
+ 'zlimits',
+ 'zpart',
+ 'ztick',
+ 'ztick3',
+ 'ztrans'
}
-
+
ASYVARNAME = {
- 'AliceBlue',
- 'Align',
- 'Allow',
- 'AntiqueWhite',
- 'Apricot',
- 'Aqua',
- 'Aquamarine',
- 'Aspect',
- 'Azure',
- 'BeginPoint',
- 'Beige',
- 'Bisque',
- 'Bittersweet',
- 'Black',
- 'BlanchedAlmond',
- 'Blue',
- 'BlueGreen',
- 'BlueViolet',
- 'Both',
- 'Break',
- 'BrickRed',
- 'Brown',
- 'BurlyWood',
- 'BurntOrange',
- 'CCW',
- 'CW',
- 'CadetBlue',
- 'CarnationPink',
- 'Center',
- 'Centered',
- 'Cerulean',
- 'Chartreuse',
- 'Chocolate',
- 'Coeff',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'Crop',
- 'Cyan',
- 'Dandelion',
- 'DarkBlue',
- 'DarkCyan',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkMagenta',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkRed',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DefaultHead',
- 'DimGray',
- 'DodgerBlue',
- 'Dotted',
- 'Draw',
- 'E',
- 'ENE',
- 'EPS',
- 'ESE',
- 'E_Euler',
- 'E_PC',
- 'E_RK2',
- 'E_RK3BS',
- 'Emerald',
- 'EndPoint',
- 'Euler',
- 'Fill',
- 'FillDraw',
- 'FireBrick',
- 'FloralWhite',
- 'ForestGreen',
- 'Fuchsia',
- 'Gainsboro',
- 'GhostWhite',
- 'Gold',
- 'Goldenrod',
- 'Gray',
- 'Green',
- 'GreenYellow',
- 'Honeydew',
- 'HookHead',
- 'Horizontal',
- 'HotPink',
- 'I',
- 'IgnoreAspect',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'JOIN_IN',
- 'JOIN_OUT',
- 'JungleGreen',
- 'Khaki',
- 'LM_DWARF',
- 'LM_MACHEP',
- 'LM_SQRT_DWARF',
- 'LM_SQRT_GIANT',
- 'LM_USERTOL',
- 'Label',
- 'Lavender',
- 'LavenderBlush',
- 'LawnGreen',
- 'LeftJustified',
- 'LeftSide',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrodYellow',
- 'LightGreen',
- 'LightGrey',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'Lime',
- 'LimeGreen',
- 'Linear',
- 'Linen',
- 'Log',
- 'Logarithmic',
- 'Magenta',
- 'Mahogany',
- 'Mark',
- 'MarkFill',
- 'Maroon',
- 'Max',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'Melon',
- 'MidPoint',
- 'MidnightBlue',
- 'Min',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'Move',
- 'MoveQuiet',
- 'Mulberry',
- 'N',
- 'NE',
- 'NNE',
- 'NNW',
- 'NW',
- 'NavajoWhite',
- 'Navy',
- 'NavyBlue',
- 'NoAlign',
- 'NoCrop',
- 'NoFill',
- 'NoSide',
- 'OldLace',
- 'Olive',
- 'OliveDrab',
- 'OliveGreen',
- 'Orange',
- 'OrangeRed',
- 'Orchid',
- 'Ox',
- 'Oy',
- 'PC',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'Peach',
- 'PeachPuff',
- 'Periwinkle',
- 'Peru',
- 'PineGreen',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'ProcessBlue',
- 'Purple',
- 'RK2',
- 'RK3',
- 'RK3BS',
- 'RK4',
- 'RK5',
- 'RK5DP',
- 'RK5F',
- 'RawSienna',
- 'Red',
- 'RedOrange',
- 'RedViolet',
- 'Rhodamine',
- 'RightJustified',
- 'RightSide',
- 'RosyBrown',
- 'RoyalBlue',
- 'RoyalPurple',
- 'RubineRed',
- 'S',
- 'SE',
- 'SSE',
- 'SSW',
- 'SW',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sepia',
- 'Sienna',
- 'Silver',
- 'SimpleHead',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Suppress',
- 'SuppressQuiet',
- 'Tan',
- 'TeXHead',
- 'Teal',
- 'TealBlue',
- 'Thistle',
- 'Ticksize',
- 'Tomato',
- 'Turquoise',
- 'UnFill',
- 'VERSION',
- 'Value',
- 'Vertical',
- 'Violet',
- 'VioletRed',
- 'W',
- 'WNW',
- 'WSW',
- 'Wheat',
- 'White',
- 'WhiteSmoke',
- 'WildStrawberry',
- 'XYAlign',
- 'YAlign',
- 'Yellow',
- 'YellowGreen',
- 'YellowOrange',
- 'addpenarc',
- 'addpenline',
- 'align',
- 'allowstepping',
- 'angularsystem',
- 'animationdelay',
- 'appendsuffix',
- 'arcarrowangle',
- 'arcarrowfactor',
- 'arrow2sizelimit',
- 'arrowangle',
- 'arrowbarb',
- 'arrowdir',
- 'arrowfactor',
- 'arrowhookfactor',
- 'arrowlength',
- 'arrowsizelimit',
- 'arrowtexfactor',
- 'authorpen',
- 'axis',
- 'axiscoverage',
- 'axislabelfactor',
- 'background',
- 'backgroundcolor',
- 'backgroundpen',
- 'barfactor',
- 'barmarksizefactor',
- 'basealign',
- 'baselinetemplate',
- 'beveljoin',
- 'bigvertexpen',
- 'bigvertexsize',
- 'black',
- 'blue',
- 'bm',
- 'bottom',
- 'bp',
- 'brown',
- 'bullet',
- 'byfoci',
- 'byvertices',
- 'camerafactor',
- 'chartreuse',
- 'circlemarkradiusfactor',
- 'circlenodesnumberfactor',
- 'circleprecision',
- 'circlescale',
- 'cm',
- 'codefile',
- 'codepen',
- 'codeskip',
- 'colorPen',
- 'coloredNodes',
- 'coloredSegments',
- 'conditionlength',
- 'conicnodesfactor',
- 'count',
- 'cputimeformat',
- 'crossmarksizefactor',
- 'currentcoordsys',
- 'currentlight',
- 'currentpatterns',
- 'currentpen',
- 'currentpicture',
- 'currentposition',
- 'currentprojection',
- 'curvilinearsystem',
- 'cuttings',
- 'cyan',
- 'darkblue',
- 'darkbrown',
- 'darkcyan',
- 'darkgray',
- 'darkgreen',
- 'darkgrey',
- 'darkmagenta',
- 'darkolive',
- 'darkred',
- 'dashdotted',
- 'dashed',
- 'datepen',
- 'dateskip',
- 'debuggerlines',
- 'debugging',
- 'deepblue',
- 'deepcyan',
- 'deepgray',
- 'deepgreen',
- 'deepgrey',
- 'deepmagenta',
- 'deepred',
- 'default',
- 'defaultControl',
- 'defaultS',
- 'defaultbackpen',
- 'defaultcoordsys',
- 'defaultfilename',
- 'defaultformat',
- 'defaultmassformat',
- 'defaultpen',
- 'diagnostics',
- 'differentlengths',
- 'dot',
- 'dotfactor',
- 'dotframe',
- 'dotted',
- 'doublelinepen',
- 'doublelinespacing',
- 'down',
- 'duplicateFuzz',
- 'ellipsenodesnumberfactor',
- 'eps',
- 'epsgeo',
- 'epsilon',
- 'evenodd',
- 'extendcap',
- 'fermionpen',
- 'figureborder',
- 'figuremattpen',
- 'firstnode',
- 'firststep',
- 'foregroundcolor',
- 'fuchsia',
- 'fuzz',
- 'gapfactor',
- 'ghostpen',
- 'gluonamplitude',
- 'gluonpen',
- 'gluonratio',
- 'gray',
- 'green',
- 'grey',
- 'hatchepsilon',
- 'havepagenumber',
- 'heavyblue',
- 'heavycyan',
- 'heavygray',
- 'heavygreen',
- 'heavygrey',
- 'heavymagenta',
- 'heavyred',
- 'hline',
- 'hwratio',
- 'hyperbolanodesnumberfactor',
- 'identity4',
- 'ignore',
- 'inXasyMode',
- 'inch',
- 'inches',
- 'includegraphicscommand',
- 'inf',
- 'infinity',
- 'institutionpen',
- 'intMax',
- 'intMin',
- 'invert',
- 'invisible',
- 'itempen',
- 'itemskip',
- 'itemstep',
- 'labelmargin',
- 'landscape',
- 'lastnode',
- 'left',
- 'legendhskip',
- 'legendlinelength',
- 'legendmargin',
- 'legendmarkersize',
- 'legendmaxrelativewidth',
- 'legendvskip',
- 'lightblue',
- 'lightcyan',
- 'lightgray',
- 'lightgreen',
- 'lightgrey',
- 'lightmagenta',
- 'lightolive',
- 'lightred',
- 'lightyellow',
- 'linemargin',
- 'lm_infmsg',
- 'lm_shortmsg',
- 'longdashdotted',
- 'longdashed',
- 'magenta',
- 'magneticPoints',
- 'magneticRadius',
- 'mantissaBits',
- 'markangleradius',
- 'markangleradiusfactor',
- 'markanglespace',
- 'markanglespacefactor',
- 'mediumblue',
- 'mediumcyan',
- 'mediumgray',
- 'mediumgreen',
- 'mediumgrey',
- 'mediummagenta',
- 'mediumred',
- 'mediumyellow',
- 'middle',
- 'minDistDefault',
- 'minblockheight',
- 'minblockwidth',
- 'mincirclediameter',
- 'minipagemargin',
- 'minipagewidth',
- 'minvertexangle',
- 'miterjoin',
- 'mm',
- 'momarrowfactor',
- 'momarrowlength',
- 'momarrowmargin',
- 'momarrowoffset',
- 'momarrowpen',
- 'monoPen',
- 'morepoints',
- 'nCircle',
- 'newbulletcolor',
- 'ngraph',
- 'nil',
- 'nmesh',
- 'nobasealign',
- 'nodeMarginDefault',
- 'nodesystem',
- 'nomarker',
- 'nopoint',
- 'noprimary',
- 'nullpath',
- 'nullpen',
- 'numarray',
- 'ocgindex',
- 'oldbulletcolor',
- 'olive',
- 'orange',
- 'origin',
- 'overpaint',
- 'page',
- 'pageheight',
- 'pagemargin',
- 'pagenumberalign',
- 'pagenumberpen',
- 'pagenumberposition',
- 'pagewidth',
- 'paleblue',
- 'palecyan',
- 'palegray',
- 'palegreen',
- 'palegrey',
- 'palemagenta',
- 'palered',
- 'paleyellow',
- 'parabolanodesnumberfactor',
- 'perpfactor',
- 'phi',
- 'photonamplitude',
- 'photonpen',
- 'photonratio',
- 'pi',
- 'pink',
- 'plain',
- 'plus',
- 'preamblenodes',
- 'pt',
- 'purple',
- 'r3',
- 'r4a',
- 'r4b',
- 'randMax',
- 'realDigits',
- 'realEpsilon',
- 'realMax',
- 'realMin',
- 'red',
- 'relativesystem',
- 'reverse',
- 'right',
- 'roundcap',
- 'roundjoin',
- 'royalblue',
- 'salmon',
- 'saveFunctions',
- 'scalarpen',
- 'sequencereal',
- 'settings',
- 'shipped',
- 'signedtrailingzero',
- 'solid',
- 'springgreen',
- 'sqrtEpsilon',
- 'squarecap',
- 'squarepen',
- 'startposition',
- 'stdin',
- 'stdout',
- 'stepfactor',
- 'stepfraction',
- 'steppagenumberpen',
- 'stepping',
- 'stickframe',
- 'stickmarksizefactor',
- 'stickmarkspacefactor',
- 'textpen',
- 'ticksize',
- 'tildeframe',
- 'tildemarksizefactor',
- 'tinv',
- 'titlealign',
- 'titlepagepen',
- 'titlepageposition',
- 'titlepen',
- 'titleskip',
- 'top',
- 'trailingzero',
- 'treeLevelStep',
- 'treeMinNodeWidth',
- 'treeNodeStep',
- 'trembleAngle',
- 'trembleFrequency',
- 'trembleRandom',
- 'tremblingMode',
- 'undefined',
- 'unitcircle',
- 'unitsquare',
- 'up',
- 'urlpen',
- 'urlskip',
- 'version',
- 'vertexpen',
- 'vertexsize',
- 'viewportmargin',
- 'viewportsize',
- 'vline',
- 'white',
- 'wye',
- 'xformStack',
- 'yellow',
- 'ylabelwidth',
- 'zerotickfuzz',
- 'zerowinding'
+ 'AliceBlue',
+ 'Align',
+ 'Allow',
+ 'AntiqueWhite',
+ 'Apricot',
+ 'Aqua',
+ 'Aquamarine',
+ 'Aspect',
+ 'Azure',
+ 'BeginPoint',
+ 'Beige',
+ 'Bisque',
+ 'Bittersweet',
+ 'Black',
+ 'BlanchedAlmond',
+ 'Blue',
+ 'BlueGreen',
+ 'BlueViolet',
+ 'Both',
+ 'Break',
+ 'BrickRed',
+ 'Brown',
+ 'BurlyWood',
+ 'BurntOrange',
+ 'CCW',
+ 'CW',
+ 'CadetBlue',
+ 'CarnationPink',
+ 'Center',
+ 'Centered',
+ 'Cerulean',
+ 'Chartreuse',
+ 'Chocolate',
+ 'Coeff',
+ 'Coral',
+ 'CornflowerBlue',
+ 'Cornsilk',
+ 'Crimson',
+ 'Crop',
+ 'Cyan',
+ 'Dandelion',
+ 'DarkBlue',
+ 'DarkCyan',
+ 'DarkGoldenrod',
+ 'DarkGray',
+ 'DarkGreen',
+ 'DarkKhaki',
+ 'DarkMagenta',
+ 'DarkOliveGreen',
+ 'DarkOrange',
+ 'DarkOrchid',
+ 'DarkRed',
+ 'DarkSalmon',
+ 'DarkSeaGreen',
+ 'DarkSlateBlue',
+ 'DarkSlateGray',
+ 'DarkTurquoise',
+ 'DarkViolet',
+ 'DeepPink',
+ 'DeepSkyBlue',
+ 'DefaultHead',
+ 'DimGray',
+ 'DodgerBlue',
+ 'Dotted',
+ 'Draw',
+ 'E',
+ 'ENE',
+ 'EPS',
+ 'ESE',
+ 'E_Euler',
+ 'E_PC',
+ 'E_RK2',
+ 'E_RK3BS',
+ 'Emerald',
+ 'EndPoint',
+ 'Euler',
+ 'Fill',
+ 'FillDraw',
+ 'FireBrick',
+ 'FloralWhite',
+ 'ForestGreen',
+ 'Fuchsia',
+ 'Gainsboro',
+ 'GhostWhite',
+ 'Gold',
+ 'Goldenrod',
+ 'Gray',
+ 'Green',
+ 'GreenYellow',
+ 'Honeydew',
+ 'HookHead',
+ 'Horizontal',
+ 'HotPink',
+ 'I',
+ 'IgnoreAspect',
+ 'IndianRed',
+ 'Indigo',
+ 'Ivory',
+ 'JOIN_IN',
+ 'JOIN_OUT',
+ 'JungleGreen',
+ 'Khaki',
+ 'LM_DWARF',
+ 'LM_MACHEP',
+ 'LM_SQRT_DWARF',
+ 'LM_SQRT_GIANT',
+ 'LM_USERTOL',
+ 'Label',
+ 'Lavender',
+ 'LavenderBlush',
+ 'LawnGreen',
+ 'LeftJustified',
+ 'LeftSide',
+ 'LemonChiffon',
+ 'LightBlue',
+ 'LightCoral',
+ 'LightCyan',
+ 'LightGoldenrodYellow',
+ 'LightGreen',
+ 'LightGrey',
+ 'LightPink',
+ 'LightSalmon',
+ 'LightSeaGreen',
+ 'LightSkyBlue',
+ 'LightSlateGray',
+ 'LightSteelBlue',
+ 'LightYellow',
+ 'Lime',
+ 'LimeGreen',
+ 'Linear',
+ 'Linen',
+ 'Log',
+ 'Logarithmic',
+ 'Magenta',
+ 'Mahogany',
+ 'Mark',
+ 'MarkFill',
+ 'Maroon',
+ 'Max',
+ 'MediumAquamarine',
+ 'MediumBlue',
+ 'MediumOrchid',
+ 'MediumPurple',
+ 'MediumSeaGreen',
+ 'MediumSlateBlue',
+ 'MediumSpringGreen',
+ 'MediumTurquoise',
+ 'MediumVioletRed',
+ 'Melon',
+ 'MidPoint',
+ 'MidnightBlue',
+ 'Min',
+ 'MintCream',
+ 'MistyRose',
+ 'Moccasin',
+ 'Move',
+ 'MoveQuiet',
+ 'Mulberry',
+ 'N',
+ 'NE',
+ 'NNE',
+ 'NNW',
+ 'NW',
+ 'NavajoWhite',
+ 'Navy',
+ 'NavyBlue',
+ 'NoAlign',
+ 'NoCrop',
+ 'NoFill',
+ 'NoSide',
+ 'OldLace',
+ 'Olive',
+ 'OliveDrab',
+ 'OliveGreen',
+ 'Orange',
+ 'OrangeRed',
+ 'Orchid',
+ 'Ox',
+ 'Oy',
+ 'PC',
+ 'PaleGoldenrod',
+ 'PaleGreen',
+ 'PaleTurquoise',
+ 'PaleVioletRed',
+ 'PapayaWhip',
+ 'Peach',
+ 'PeachPuff',
+ 'Periwinkle',
+ 'Peru',
+ 'PineGreen',
+ 'Pink',
+ 'Plum',
+ 'PowderBlue',
+ 'ProcessBlue',
+ 'Purple',
+ 'RK2',
+ 'RK3',
+ 'RK3BS',
+ 'RK4',
+ 'RK5',
+ 'RK5DP',
+ 'RK5F',
+ 'RawSienna',
+ 'Red',
+ 'RedOrange',
+ 'RedViolet',
+ 'Rhodamine',
+ 'RightJustified',
+ 'RightSide',
+ 'RosyBrown',
+ 'RoyalBlue',
+ 'RoyalPurple',
+ 'RubineRed',
+ 'S',
+ 'SE',
+ 'SSE',
+ 'SSW',
+ 'SW',
+ 'SaddleBrown',
+ 'Salmon',
+ 'SandyBrown',
+ 'SeaGreen',
+ 'Seashell',
+ 'Sepia',
+ 'Sienna',
+ 'Silver',
+ 'SimpleHead',
+ 'SkyBlue',
+ 'SlateBlue',
+ 'SlateGray',
+ 'Snow',
+ 'SpringGreen',
+ 'SteelBlue',
+ 'Suppress',
+ 'SuppressQuiet',
+ 'Tan',
+ 'TeXHead',
+ 'Teal',
+ 'TealBlue',
+ 'Thistle',
+ 'Ticksize',
+ 'Tomato',
+ 'Turquoise',
+ 'UnFill',
+ 'VERSION',
+ 'Value',
+ 'Vertical',
+ 'Violet',
+ 'VioletRed',
+ 'W',
+ 'WNW',
+ 'WSW',
+ 'Wheat',
+ 'White',
+ 'WhiteSmoke',
+ 'WildStrawberry',
+ 'XYAlign',
+ 'YAlign',
+ 'Yellow',
+ 'YellowGreen',
+ 'YellowOrange',
+ 'addpenarc',
+ 'addpenline',
+ 'align',
+ 'allowstepping',
+ 'angularsystem',
+ 'animationdelay',
+ 'appendsuffix',
+ 'arcarrowangle',
+ 'arcarrowfactor',
+ 'arrow2sizelimit',
+ 'arrowangle',
+ 'arrowbarb',
+ 'arrowdir',
+ 'arrowfactor',
+ 'arrowhookfactor',
+ 'arrowlength',
+ 'arrowsizelimit',
+ 'arrowtexfactor',
+ 'authorpen',
+ 'axis',
+ 'axiscoverage',
+ 'axislabelfactor',
+ 'background',
+ 'backgroundcolor',
+ 'backgroundpen',
+ 'barfactor',
+ 'barmarksizefactor',
+ 'basealign',
+ 'baselinetemplate',
+ 'beveljoin',
+ 'bigvertexpen',
+ 'bigvertexsize',
+ 'black',
+ 'blue',
+ 'bm',
+ 'bottom',
+ 'bp',
+ 'brown',
+ 'bullet',
+ 'byfoci',
+ 'byvertices',
+ 'camerafactor',
+ 'chartreuse',
+ 'circlemarkradiusfactor',
+ 'circlenodesnumberfactor',
+ 'circleprecision',
+ 'circlescale',
+ 'cm',
+ 'codefile',
+ 'codepen',
+ 'codeskip',
+ 'colorPen',
+ 'coloredNodes',
+ 'coloredSegments',
+ 'conditionlength',
+ 'conicnodesfactor',
+ 'count',
+ 'cputimeformat',
+ 'crossmarksizefactor',
+ 'currentcoordsys',
+ 'currentlight',
+ 'currentpatterns',
+ 'currentpen',
+ 'currentpicture',
+ 'currentposition',
+ 'currentprojection',
+ 'curvilinearsystem',
+ 'cuttings',
+ 'cyan',
+ 'darkblue',
+ 'darkbrown',
+ 'darkcyan',
+ 'darkgray',
+ 'darkgreen',
+ 'darkgrey',
+ 'darkmagenta',
+ 'darkolive',
+ 'darkred',
+ 'dashdotted',
+ 'dashed',
+ 'datepen',
+ 'dateskip',
+ 'debuggerlines',
+ 'debugging',
+ 'deepblue',
+ 'deepcyan',
+ 'deepgray',
+ 'deepgreen',
+ 'deepgrey',
+ 'deepmagenta',
+ 'deepred',
+ 'default',
+ 'defaultControl',
+ 'defaultS',
+ 'defaultbackpen',
+ 'defaultcoordsys',
+ 'defaultfilename',
+ 'defaultformat',
+ 'defaultmassformat',
+ 'defaultpen',
+ 'diagnostics',
+ 'differentlengths',
+ 'dot',
+ 'dotfactor',
+ 'dotframe',
+ 'dotted',
+ 'doublelinepen',
+ 'doublelinespacing',
+ 'down',
+ 'duplicateFuzz',
+ 'ellipsenodesnumberfactor',
+ 'eps',
+ 'epsgeo',
+ 'epsilon',
+ 'evenodd',
+ 'extendcap',
+ 'fermionpen',
+ 'figureborder',
+ 'figuremattpen',
+ 'firstnode',
+ 'firststep',
+ 'foregroundcolor',
+ 'fuchsia',
+ 'fuzz',
+ 'gapfactor',
+ 'ghostpen',
+ 'gluonamplitude',
+ 'gluonpen',
+ 'gluonratio',
+ 'gray',
+ 'green',
+ 'grey',
+ 'hatchepsilon',
+ 'havepagenumber',
+ 'heavyblue',
+ 'heavycyan',
+ 'heavygray',
+ 'heavygreen',
+ 'heavygrey',
+ 'heavymagenta',
+ 'heavyred',
+ 'hline',
+ 'hwratio',
+ 'hyperbolanodesnumberfactor',
+ 'identity4',
+ 'ignore',
+ 'inXasyMode',
+ 'inch',
+ 'inches',
+ 'includegraphicscommand',
+ 'inf',
+ 'infinity',
+ 'institutionpen',
+ 'intMax',
+ 'intMin',
+ 'invert',
+ 'invisible',
+ 'itempen',
+ 'itemskip',
+ 'itemstep',
+ 'labelmargin',
+ 'landscape',
+ 'lastnode',
+ 'left',
+ 'legendhskip',
+ 'legendlinelength',
+ 'legendmargin',
+ 'legendmarkersize',
+ 'legendmaxrelativewidth',
+ 'legendvskip',
+ 'lightblue',
+ 'lightcyan',
+ 'lightgray',
+ 'lightgreen',
+ 'lightgrey',
+ 'lightmagenta',
+ 'lightolive',
+ 'lightred',
+ 'lightyellow',
+ 'linemargin',
+ 'lm_infmsg',
+ 'lm_shortmsg',
+ 'longdashdotted',
+ 'longdashed',
+ 'magenta',
+ 'magneticPoints',
+ 'magneticRadius',
+ 'mantissaBits',
+ 'markangleradius',
+ 'markangleradiusfactor',
+ 'markanglespace',
+ 'markanglespacefactor',
+ 'mediumblue',
+ 'mediumcyan',
+ 'mediumgray',
+ 'mediumgreen',
+ 'mediumgrey',
+ 'mediummagenta',
+ 'mediumred',
+ 'mediumyellow',
+ 'middle',
+ 'minDistDefault',
+ 'minblockheight',
+ 'minblockwidth',
+ 'mincirclediameter',
+ 'minipagemargin',
+ 'minipagewidth',
+ 'minvertexangle',
+ 'miterjoin',
+ 'mm',
+ 'momarrowfactor',
+ 'momarrowlength',
+ 'momarrowmargin',
+ 'momarrowoffset',
+ 'momarrowpen',
+ 'monoPen',
+ 'morepoints',
+ 'nCircle',
+ 'newbulletcolor',
+ 'ngraph',
+ 'nil',
+ 'nmesh',
+ 'nobasealign',
+ 'nodeMarginDefault',
+ 'nodesystem',
+ 'nomarker',
+ 'nopoint',
+ 'noprimary',
+ 'nullpath',
+ 'nullpen',
+ 'numarray',
+ 'ocgindex',
+ 'oldbulletcolor',
+ 'olive',
+ 'orange',
+ 'origin',
+ 'overpaint',
+ 'page',
+ 'pageheight',
+ 'pagemargin',
+ 'pagenumberalign',
+ 'pagenumberpen',
+ 'pagenumberposition',
+ 'pagewidth',
+ 'paleblue',
+ 'palecyan',
+ 'palegray',
+ 'palegreen',
+ 'palegrey',
+ 'palemagenta',
+ 'palered',
+ 'paleyellow',
+ 'parabolanodesnumberfactor',
+ 'perpfactor',
+ 'phi',
+ 'photonamplitude',
+ 'photonpen',
+ 'photonratio',
+ 'pi',
+ 'pink',
+ 'plain',
+ 'plus',
+ 'preamblenodes',
+ 'pt',
+ 'purple',
+ 'r3',
+ 'r4a',
+ 'r4b',
+ 'randMax',
+ 'realDigits',
+ 'realEpsilon',
+ 'realMax',
+ 'realMin',
+ 'red',
+ 'relativesystem',
+ 'reverse',
+ 'right',
+ 'roundcap',
+ 'roundjoin',
+ 'royalblue',
+ 'salmon',
+ 'saveFunctions',
+ 'scalarpen',
+ 'sequencereal',
+ 'settings',
+ 'shipped',
+ 'signedtrailingzero',
+ 'solid',
+ 'springgreen',
+ 'sqrtEpsilon',
+ 'squarecap',
+ 'squarepen',
+ 'startposition',
+ 'stdin',
+ 'stdout',
+ 'stepfactor',
+ 'stepfraction',
+ 'steppagenumberpen',
+ 'stepping',
+ 'stickframe',
+ 'stickmarksizefactor',
+ 'stickmarkspacefactor',
+ 'textpen',
+ 'ticksize',
+ 'tildeframe',
+ 'tildemarksizefactor',
+ 'tinv',
+ 'titlealign',
+ 'titlepagepen',
+ 'titlepageposition',
+ 'titlepen',
+ 'titleskip',
+ 'top',
+ 'trailingzero',
+ 'treeLevelStep',
+ 'treeMinNodeWidth',
+ 'treeNodeStep',
+ 'trembleAngle',
+ 'trembleFrequency',
+ 'trembleRandom',
+ 'tremblingMode',
+ 'undefined',
+ 'unitcircle',
+ 'unitsquare',
+ 'up',
+ 'urlpen',
+ 'urlskip',
+ 'version',
+ 'vertexpen',
+ 'vertexsize',
+ 'viewportmargin',
+ 'viewportsize',
+ 'vline',
+ 'white',
+ 'wye',
+ 'xformStack',
+ 'yellow',
+ 'ylabelwidth',
+ 'zerotickfuzz',
+ 'zerowinding'
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
index 08d70e1549..a9e01d6586 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
@@ -1,231 +1,231 @@
-"""
- pygments.lexers._cl_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- ANSI Common Lisp builtins.
-
+"""
+ pygments.lexers._cl_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ ANSI Common Lisp builtins.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
BUILTIN_FUNCTIONS = { # 638 functions
- '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
- 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
- 'adjustable-array-p', 'adjust-array', 'allocate-instance',
- 'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
- 'apropos-list', 'aref', 'arithmetic-error-operands',
- 'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
- 'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
- 'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
- 'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
- 'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
- 'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
- 'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
- 'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
- 'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
- 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
- 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
- 'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
- 'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
- 'characterp', 'char-code', 'char-downcase', 'char-equal',
- 'char-greaterp', 'char-int', 'char-lessp', 'char-name',
- 'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
- 'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
- 'close', 'clrhash', 'code-char', 'coerce', 'compile',
- 'compiled-function-p', 'compile-file', 'compile-file-pathname',
- 'compiler-macro-function', 'complement', 'complex', 'complexp',
- 'compute-applicable-methods', 'compute-restarts', 'concatenate',
- 'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
- 'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
- 'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
- 'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
- 'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
- 'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
- 'delete-package', 'denominator', 'deposit-field', 'describe',
- 'describe-object', 'digit-char', 'digit-char-p', 'directory',
- 'directory-namestring', 'disassemble', 'documentation', 'dpb',
- 'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
- 'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
- 'enough-namestring', 'ensure-directories-exist',
- 'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
- 'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
- 'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
- 'file-error-pathname', 'file-length', 'file-namestring',
- 'file-position', 'file-string-length', 'file-write-date',
- 'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
- 'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
- 'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
- 'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
- 'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
- 'fround', 'ftruncate', 'funcall', 'function-keywords',
- 'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
- 'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
- 'gethash', 'get-internal-real-time', 'get-internal-run-time',
- 'get-macro-character', 'get-output-stream-string', 'get-properties',
- 'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
- 'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'host-namestring', 'identity', 'imagpart', 'import',
- 'initialize-instance', 'input-stream-p', 'inspect',
- 'integer-decode-float', 'integer-length', 'integerp',
- 'interactive-stream-p', 'intern', 'intersection',
- 'invalid-method-error', 'invoke-debugger', 'invoke-restart',
- 'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
- 'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
- 'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
- 'listen', 'list-length', 'listp', 'load',
- 'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
- 'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
- 'logical-pathname-translations', 'logior', 'lognand', 'lognor',
- 'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
- 'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
- 'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
- 'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
- 'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
- 'make-instance', 'make-instances-obsolete', 'make-list',
- 'make-load-form', 'make-load-form-saving-slots', 'make-package',
- 'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
- 'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
- 'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
- 'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
- 'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
- 'merge', 'merge-pathnames', 'method-combination-error',
- 'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
- 'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
- 'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
- 'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
- 'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
- 'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
- 'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
- 'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
- 'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
- 'package-name', 'package-nicknames', 'packagep',
- 'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
- 'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
- 'pathname-device', 'pathname-directory', 'pathname-host',
- 'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
- 'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
- 'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
- 'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
- 'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
- 'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
- 'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
- 'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
- 'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
- 'read-from-string', 'read-line', 'read-preserving-whitespace',
- 'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
- 'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
- 'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
- 'remprop', 'rename-file', 'rename-package', 'replace', 'require',
- 'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
- 'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
- 'search', 'second', 'set', 'set-difference',
- 'set-dispatch-macro-character', 'set-exclusive-or',
- 'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
- 'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
- 'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
- 'simple-condition-format-arguments', 'simple-condition-format-control',
- 'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
- 'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
- 'slot-unbound', 'slot-value', 'software-type', 'software-version',
- 'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
- 'standard-char-p', 'store-value', 'stream-element-type',
- 'stream-error-stream', 'stream-external-format', 'streamp', 'string',
- 'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
- 'string-capitalize', 'string-downcase', 'string-equal',
- 'string-greaterp', 'string-left-trim', 'string-lessp',
- 'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
- 'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
- 'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
- 'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
- 'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
- 'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
- 'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
- 'translate-logical-pathname', 'translate-pathname', 'tree-equal',
- 'truename', 'truncate', 'two-way-stream-input-stream',
- 'two-way-stream-output-stream', 'type-error-datum',
- 'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
- 'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
- 'update-instance-for-different-class',
- 'update-instance-for-redefined-class', 'upgraded-array-element-type',
- 'upgraded-complex-part-type', 'upper-case-p', 'use-package',
- 'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
- 'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
- 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
- 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
- 'y-or-n-p', 'zerop',
+ '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
+ 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
+ 'adjustable-array-p', 'adjust-array', 'allocate-instance',
+ 'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
+ 'apropos-list', 'aref', 'arithmetic-error-operands',
+ 'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
+ 'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
+ 'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
+ 'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
+ 'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
+ 'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
+ 'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
+ 'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
+ 'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
+ 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
+ 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
+ 'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
+ 'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
+ 'characterp', 'char-code', 'char-downcase', 'char-equal',
+ 'char-greaterp', 'char-int', 'char-lessp', 'char-name',
+ 'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
+ 'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
+ 'close', 'clrhash', 'code-char', 'coerce', 'compile',
+ 'compiled-function-p', 'compile-file', 'compile-file-pathname',
+ 'compiler-macro-function', 'complement', 'complex', 'complexp',
+ 'compute-applicable-methods', 'compute-restarts', 'concatenate',
+ 'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
+ 'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
+ 'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
+ 'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
+ 'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
+ 'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
+ 'delete-package', 'denominator', 'deposit-field', 'describe',
+ 'describe-object', 'digit-char', 'digit-char-p', 'directory',
+ 'directory-namestring', 'disassemble', 'documentation', 'dpb',
+ 'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
+ 'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
+ 'enough-namestring', 'ensure-directories-exist',
+ 'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
+ 'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
+ 'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
+ 'file-error-pathname', 'file-length', 'file-namestring',
+ 'file-position', 'file-string-length', 'file-write-date',
+ 'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
+ 'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
+ 'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
+ 'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
+ 'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
+ 'fround', 'ftruncate', 'funcall', 'function-keywords',
+ 'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
+ 'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
+ 'gethash', 'get-internal-real-time', 'get-internal-run-time',
+ 'get-macro-character', 'get-output-stream-string', 'get-properties',
+ 'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
+ 'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
+ 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
+ 'host-namestring', 'identity', 'imagpart', 'import',
+ 'initialize-instance', 'input-stream-p', 'inspect',
+ 'integer-decode-float', 'integer-length', 'integerp',
+ 'interactive-stream-p', 'intern', 'intersection',
+ 'invalid-method-error', 'invoke-debugger', 'invoke-restart',
+ 'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
+ 'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
+ 'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
+ 'listen', 'list-length', 'listp', 'load',
+ 'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
+ 'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
+ 'logical-pathname-translations', 'logior', 'lognand', 'lognor',
+ 'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
+ 'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
+ 'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
+ 'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
+ 'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
+ 'make-instance', 'make-instances-obsolete', 'make-list',
+ 'make-load-form', 'make-load-form-saving-slots', 'make-package',
+ 'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
+ 'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
+ 'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
+ 'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
+ 'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
+ 'merge', 'merge-pathnames', 'method-combination-error',
+ 'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
+ 'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
+ 'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
+ 'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
+ 'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
+ 'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
+ 'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
+ 'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
+ 'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
+ 'package-name', 'package-nicknames', 'packagep',
+ 'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
+ 'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
+ 'pathname-device', 'pathname-directory', 'pathname-host',
+ 'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
+ 'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
+ 'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
+ 'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
+ 'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
+ 'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
+ 'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
+ 'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
+ 'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
+ 'read-from-string', 'read-line', 'read-preserving-whitespace',
+ 'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
+ 'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
+ 'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
+ 'remprop', 'rename-file', 'rename-package', 'replace', 'require',
+ 'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
+ 'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
+ 'search', 'second', 'set', 'set-difference',
+ 'set-dispatch-macro-character', 'set-exclusive-or',
+ 'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
+ 'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
+ 'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
+ 'simple-condition-format-arguments', 'simple-condition-format-control',
+ 'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
+ 'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
+ 'slot-unbound', 'slot-value', 'software-type', 'software-version',
+ 'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
+ 'standard-char-p', 'store-value', 'stream-element-type',
+ 'stream-error-stream', 'stream-external-format', 'streamp', 'string',
+ 'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
+ 'string-capitalize', 'string-downcase', 'string-equal',
+ 'string-greaterp', 'string-left-trim', 'string-lessp',
+ 'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
+ 'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
+ 'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
+ 'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
+ 'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
+ 'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
+ 'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
+ 'translate-logical-pathname', 'translate-pathname', 'tree-equal',
+ 'truename', 'truncate', 'two-way-stream-input-stream',
+ 'two-way-stream-output-stream', 'type-error-datum',
+ 'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
+ 'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
+ 'update-instance-for-different-class',
+ 'update-instance-for-redefined-class', 'upgraded-array-element-type',
+ 'upgraded-complex-part-type', 'upper-case-p', 'use-package',
+ 'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
+ 'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
+ 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
+ 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
+ 'y-or-n-p', 'zerop',
}
-
+
SPECIAL_FORMS = {
- 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
- 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
- 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
- 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
- 'unwind-protect',
+ 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
+ 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
+ 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
+ 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
+ 'unwind-protect',
}
-
+
MACROS = {
- 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
- 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
- 'define-compiler-macro', 'define-condition', 'define-method-combination',
- 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
- 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
- 'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
- 'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
- 'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
- 'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
- 'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
- 'multiple-value-setq', 'nth-value', 'or', 'pop',
- 'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
- 'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
- 'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
- 'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
- 'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
- 'with-condition-restarts', 'with-hash-table-iterator',
- 'with-input-from-string', 'with-open-file', 'with-open-stream',
- 'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
- 'with-slots', 'with-standard-io-syntax',
+ 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
+ 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
+ 'define-compiler-macro', 'define-condition', 'define-method-combination',
+ 'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
+ 'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
+ 'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
+ 'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
+ 'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
+ 'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
+ 'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
+ 'multiple-value-setq', 'nth-value', 'or', 'pop',
+ 'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
+ 'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
+ 'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
+ 'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
+ 'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
+ 'with-condition-restarts', 'with-hash-table-iterator',
+ 'with-input-from-string', 'with-open-file', 'with-open-stream',
+ 'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
+ 'with-slots', 'with-standard-io-syntax',
}
-
+
LAMBDA_LIST_KEYWORDS = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
+ '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
+ '&rest', '&whole',
}
-
+
DECLARATIONS = {
- 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
- 'ignorable', 'notinline', 'type',
+ 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
+ 'ignorable', 'notinline', 'type',
}
-
+
BUILTIN_TYPES = {
- 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
- 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
- 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
- 'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
- 'simple-vector', 'standard-char', 'unsigned-byte',
-
- # Condition Types
- 'arithmetic-error', 'cell-error', 'condition', 'control-error',
- 'division-by-zero', 'end-of-file', 'error', 'file-error',
- 'floating-point-inexact', 'floating-point-overflow',
- 'floating-point-underflow', 'floating-point-invalid-operation',
- 'parse-error', 'package-error', 'print-not-readable', 'program-error',
- 'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
- 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
- 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
- 'undefined-function', 'warning',
+ 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
+ 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
+ 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
+ 'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
+ 'simple-vector', 'standard-char', 'unsigned-byte',
+
+ # Condition Types
+ 'arithmetic-error', 'cell-error', 'condition', 'control-error',
+ 'division-by-zero', 'end-of-file', 'error', 'file-error',
+ 'floating-point-inexact', 'floating-point-overflow',
+ 'floating-point-underflow', 'floating-point-invalid-operation',
+ 'parse-error', 'package-error', 'print-not-readable', 'program-error',
+ 'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
+ 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
+ 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
+ 'undefined-function', 'warning',
}
-
+
BUILTIN_CLASSES = {
- 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
- 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
- 'file-stream', 'float', 'function', 'generic-function', 'hash-table',
- 'integer', 'list', 'logical-pathname', 'method-combination', 'method',
- 'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
- 'real', 'random-state', 'restart', 'sequence', 'standard-class',
- 'standard-generic-function', 'standard-method', 'standard-object',
- 'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
- 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
+ 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
+ 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
+ 'file-stream', 'float', 'function', 'generic-function', 'hash-table',
+ 'integer', 'list', 'logical-pathname', 'method-combination', 'method',
+ 'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
+ 'real', 'random-state', 'restart', 'sequence', 'standard-class',
+ 'standard-generic-function', 'standard-method', 'standard-object',
+ 'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
+ 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
index 72d86db1e7..4535c4fa05 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
@@ -1,41 +1,41 @@
-"""
- pygments.lexers._cocoa_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file defines a set of types used across Cocoa frameworks from Apple.
- There is a list of @interfaces, @protocols and some other (structs, unions)
-
- File may be also used as standalone generator for aboves.
-
+"""
+ pygments.lexers._cocoa_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file defines a set of types used across Cocoa frameworks from Apple.
+ There is a list of @interfaces, @protocols and some other (structs, unions)
+
+ File may be also used as standalone generator for aboves.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
COCOA_INTERFACES = {'AAAttribution', 'ABNewPersonViewController', 'ABPeoplePickerNavigationController', 'ABPersonViewController', 'ABUnknownPersonViewController', 'ACAccount', 'ACAccountCredential', 'ACAccountStore', 'ACAccountType', 'ADBannerView', 'ADClient', 'ADInterstitialAd', 'ADInterstitialAdPresentationViewController', 'AEAssessmentConfiguration', 'AEAssessmentSession', 'ALAsset', 'ALAssetRepresentation', 'ALAssetsFilter', 'ALAssetsGroup', 'ALAssetsLibrary', 'APActivationPayload', 'ARAnchor', 'ARAppClipCodeAnchor', 'ARBody2D', 'ARBodyAnchor', 'ARBodyTrackingConfiguration', 'ARCamera', 'ARCoachingOverlayView', 'ARCollaborationData', 'ARConfiguration', 'ARDepthData', 'ARDirectionalLightEstimate', 'AREnvironmentProbeAnchor', 'ARFaceAnchor', 'ARFaceGeometry', 'ARFaceTrackingConfiguration', 'ARFrame', 'ARGeoAnchor', 'ARGeoTrackingConfiguration', 'ARGeoTrackingStatus', 'ARGeometryElement', 'ARGeometrySource', 'ARHitTestResult', 'ARImageAnchor', 'ARImageTrackingConfiguration', 'ARLightEstimate', 'ARMatteGenerator', 'ARMeshAnchor', 'ARMeshGeometry', 'ARObjectAnchor', 'ARObjectScanningConfiguration', 'AROrientationTrackingConfiguration', 'ARParticipantAnchor', 'ARPlaneAnchor', 'ARPlaneGeometry', 'ARPointCloud', 'ARPositionalTrackingConfiguration', 'ARQuickLookPreviewItem', 'ARRaycastQuery', 'ARRaycastResult', 'ARReferenceImage', 'ARReferenceObject', 'ARSCNFaceGeometry', 'ARSCNPlaneGeometry', 'ARSCNView', 'ARSKView', 'ARSession', 'ARSkeleton', 'ARSkeleton2D', 'ARSkeleton3D', 'ARSkeletonDefinition', 'ARTrackedRaycast', 'ARVideoFormat', 'ARView', 'ARWorldMap', 'ARWorldTrackingConfiguration', 'ASAccountAuthenticationModificationController', 'ASAccountAuthenticationModificationExtensionContext', 'ASAccountAuthenticationModificationReplacePasswordWithSignInWithAppleRequest', 'ASAccountAuthenticationModificationRequest', 'ASAccountAuthenticationModificationUpgradePasswordToStrongPasswordRequest', 'ASAccountAuthenticationModificationViewController', 'ASAuthorization', 'ASAuthorizationAppleIDButton', 'ASAuthorizationAppleIDCredential', 'ASAuthorizationAppleIDProvider', 'ASAuthorizationAppleIDRequest', 'ASAuthorizationController', 'ASAuthorizationOpenIDRequest', 'ASAuthorizationPasswordProvider', 'ASAuthorizationPasswordRequest', 'ASAuthorizationProviderExtensionAuthorizationRequest', 'ASAuthorizationRequest', 'ASAuthorizationSingleSignOnCredential', 'ASAuthorizationSingleSignOnProvider', 'ASAuthorizationSingleSignOnRequest', 'ASCredentialIdentityStore', 'ASCredentialIdentityStoreState', 'ASCredentialProviderExtensionContext', 'ASCredentialProviderViewController', 'ASCredentialServiceIdentifier', 'ASIdentifierManager', 'ASPasswordCredential', 'ASPasswordCredentialIdentity', 'ASWebAuthenticationSession', 'ASWebAuthenticationSessionRequest', 'ASWebAuthenticationSessionWebBrowserSessionManager', 'ATTrackingManager', 'AUAudioUnit', 'AUAudioUnitBus', 'AUAudioUnitBusArray', 'AUAudioUnitPreset', 'AUAudioUnitV2Bridge', 'AUAudioUnitViewConfiguration', 'AUParameter', 'AUParameterGroup', 'AUParameterNode', 'AUParameterTree', 'AUViewController', 'AVAggregateAssetDownloadTask', 'AVAsset', 'AVAssetCache', 'AVAssetDownloadStorageManagementPolicy', 'AVAssetDownloadStorageManager', 'AVAssetDownloadTask', 'AVAssetDownloadURLSession', 'AVAssetExportSession', 'AVAssetImageGenerator', 'AVAssetReader', 'AVAssetReaderAudioMixOutput', 'AVAssetReaderOutput', 'AVAssetReaderOutputMetadataAdaptor', 'AVAssetReaderSampleReferenceOutput', 'AVAssetReaderTrackOutput', 'AVAssetReaderVideoCompositionOutput', 'AVAssetResourceLoader', 'AVAssetResourceLoadingContentInformationRequest', 'AVAssetResourceLoadingDataRequest', 'AVAssetResourceLoadingRequest', 'AVAssetResourceLoadingRequestor', 'AVAssetResourceRenewalRequest', 'AVAssetSegmentReport', 'AVAssetSegmentReportSampleInformation', 'AVAssetSegmentTrackReport', 'AVAssetTrack', 'AVAssetTrackGroup', 'AVAssetTrackSegment', 'AVAssetWriter', 'AVAssetWriterInput', 'AVAssetWriterInputGroup', 'AVAssetWriterInputMetadataAdaptor', 'AVAssetWriterInputPassDescription', 'AVAssetWriterInputPixelBufferAdaptor', 'AVAsynchronousCIImageFilteringRequest', 'AVAsynchronousVideoCompositionRequest', 'AVAudioMix', 'AVAudioMixInputParameters', 'AVAudioSession', 'AVCameraCalibrationData', 'AVCaptureAudioChannel', 'AVCaptureAudioDataOutput', 'AVCaptureAudioFileOutput', 'AVCaptureAudioPreviewOutput', 'AVCaptureAutoExposureBracketedStillImageSettings', 'AVCaptureBracketedStillImageSettings', 'AVCaptureConnection', 'AVCaptureDataOutputSynchronizer', 'AVCaptureDepthDataOutput', 'AVCaptureDevice', 'AVCaptureDeviceDiscoverySession', 'AVCaptureDeviceFormat', 'AVCaptureDeviceInput', 'AVCaptureDeviceInputSource', 'AVCaptureFileOutput', 'AVCaptureInput', 'AVCaptureInputPort', 'AVCaptureManualExposureBracketedStillImageSettings', 'AVCaptureMetadataInput', 'AVCaptureMetadataOutput', 'AVCaptureMovieFileOutput', 'AVCaptureMultiCamSession', 'AVCaptureOutput', 'AVCapturePhoto', 'AVCapturePhotoBracketSettings', 'AVCapturePhotoOutput', 'AVCapturePhotoSettings', 'AVCaptureResolvedPhotoSettings', 'AVCaptureScreenInput', 'AVCaptureSession', 'AVCaptureStillImageOutput', 'AVCaptureSynchronizedData', 'AVCaptureSynchronizedDataCollection', 'AVCaptureSynchronizedDepthData', 'AVCaptureSynchronizedMetadataObjectData', 'AVCaptureSynchronizedSampleBufferData', 'AVCaptureSystemPressureState', 'AVCaptureVideoDataOutput', 'AVCaptureVideoPreviewLayer', 'AVComposition', 'AVCompositionTrack', 'AVCompositionTrackFormatDescriptionReplacement', 'AVCompositionTrackSegment', 'AVContentKeyRequest', 'AVContentKeyResponse', 'AVContentKeySession', 'AVDateRangeMetadataGroup', 'AVDepthData', 'AVDisplayCriteria', 'AVFragmentedAsset', 'AVFragmentedAssetMinder', 'AVFragmentedAssetTrack', 'AVFragmentedMovie', 'AVFragmentedMovieMinder', 'AVFragmentedMovieTrack', 'AVFrameRateRange', 'AVMediaDataStorage', 'AVMediaSelection', 'AVMediaSelectionGroup', 'AVMediaSelectionOption', 'AVMetadataBodyObject', 'AVMetadataCatBodyObject', 'AVMetadataDogBodyObject', 'AVMetadataFaceObject', 'AVMetadataGroup', 'AVMetadataHumanBodyObject', 'AVMetadataItem', 'AVMetadataItemFilter', 'AVMetadataItemValueRequest', 'AVMetadataMachineReadableCodeObject', 'AVMetadataObject', 'AVMetadataSalientObject', 'AVMovie', 'AVMovieTrack', 'AVMutableAssetDownloadStorageManagementPolicy', 'AVMutableAudioMix', 'AVMutableAudioMixInputParameters', 'AVMutableComposition', 'AVMutableCompositionTrack', 'AVMutableDateRangeMetadataGroup', 'AVMutableMediaSelection', 'AVMutableMetadataItem', 'AVMutableMovie', 'AVMutableMovieTrack', 'AVMutableTimedMetadataGroup', 'AVMutableVideoComposition', 'AVMutableVideoCompositionInstruction', 'AVMutableVideoCompositionLayerInstruction', 'AVOutputSettingsAssistant', 'AVPersistableContentKeyRequest', 'AVPictureInPictureController', 'AVPlayer', 'AVPlayerItem', 'AVPlayerItemAccessLog', 'AVPlayerItemAccessLogEvent', 'AVPlayerItemErrorLog', 'AVPlayerItemErrorLogEvent', 'AVPlayerItemLegibleOutput', 'AVPlayerItemMediaDataCollector', 'AVPlayerItemMetadataCollector', 'AVPlayerItemMetadataOutput', 'AVPlayerItemOutput', 'AVPlayerItemTrack', 'AVPlayerItemVideoOutput', 'AVPlayerLayer', 'AVPlayerLooper', 'AVPlayerMediaSelectionCriteria', 'AVPlayerViewController', 'AVPortraitEffectsMatte', 'AVQueuePlayer', 'AVRouteDetector', 'AVRoutePickerView', 'AVSampleBufferAudioRenderer', 'AVSampleBufferDisplayLayer', 'AVSampleBufferRenderSynchronizer', 'AVSemanticSegmentationMatte', 'AVSynchronizedLayer', 'AVTextStyleRule', 'AVTimedMetadataGroup', 'AVURLAsset', 'AVVideoComposition', 'AVVideoCompositionCoreAnimationTool', 'AVVideoCompositionInstruction', 'AVVideoCompositionLayerInstruction', 'AVVideoCompositionRenderContext', 'AVVideoCompositionRenderHint', 'AXCustomContent', 'BCChatAction', 'BCChatButton', 'BGAppRefreshTask', 'BGAppRefreshTaskRequest', 'BGProcessingTask', 'BGProcessingTaskRequest', 'BGTask', 'BGTaskRequest', 'BGTaskScheduler', 'CAAnimation', 'CAAnimationGroup', 'CABTMIDICentralViewController', 'CABTMIDILocalPeripheralViewController', 'CABasicAnimation', 'CADisplayLink', 'CAEAGLLayer', 'CAEmitterCell', 'CAEmitterLayer', 'CAGradientLayer', 'CAInterAppAudioSwitcherView', 'CAInterAppAudioTransportView', 'CAKeyframeAnimation', 'CALayer', 'CAMediaTimingFunction', 'CAMetalLayer', 'CAPropertyAnimation', 'CAReplicatorLayer', 'CAScrollLayer', 'CAShapeLayer', 'CASpringAnimation', 'CATextLayer', 'CATiledLayer', 'CATransaction', 'CATransformLayer', 'CATransition', 'CAValueFunction', 'CBATTRequest', 'CBAttribute', 'CBCentral', 'CBCentralManager', 'CBCharacteristic', 'CBDescriptor', 'CBL2CAPChannel', 'CBManager', 'CBMutableCharacteristic', 'CBMutableDescriptor', 'CBMutableService', 'CBPeer', 'CBPeripheral', 'CBPeripheralManager', 'CBService', 'CBUUID', 'CHHapticDynamicParameter', 'CHHapticEngine', 'CHHapticEvent', 'CHHapticEventParameter', 'CHHapticParameterCurve', 'CHHapticParameterCurveControlPoint', 'CHHapticPattern', 'CIAztecCodeDescriptor', 'CIBarcodeDescriptor', 'CIBlendKernel', 'CIColor', 'CIColorKernel', 'CIContext', 'CIDataMatrixCodeDescriptor', 'CIDetector', 'CIFaceFeature', 'CIFeature', 'CIFilter', 'CIFilterGenerator', 'CIFilterShape', 'CIImage', 'CIImageAccumulator', 'CIImageProcessorKernel', 'CIKernel', 'CIPDF417CodeDescriptor', 'CIPlugIn', 'CIQRCodeDescriptor', 'CIQRCodeFeature', 'CIRectangleFeature', 'CIRenderDestination', 'CIRenderInfo', 'CIRenderTask', 'CISampler', 'CITextFeature', 'CIVector', 'CIWarpKernel', 'CKAcceptSharesOperation', 'CKAsset', 'CKContainer', 'CKDatabase', 'CKDatabaseNotification', 'CKDatabaseOperation', 'CKDatabaseSubscription', 'CKDiscoverAllUserIdentitiesOperation', 'CKDiscoverUserIdentitiesOperation', 'CKFetchDatabaseChangesOperation', 'CKFetchNotificationChangesOperation', 'CKFetchRecordChangesOperation', 'CKFetchRecordZoneChangesConfiguration', 'CKFetchRecordZoneChangesOperation', 'CKFetchRecordZoneChangesOptions', 'CKFetchRecordZonesOperation', 'CKFetchRecordsOperation', 'CKFetchShareMetadataOperation', 'CKFetchShareParticipantsOperation', 'CKFetchSubscriptionsOperation', 'CKFetchWebAuthTokenOperation', 'CKLocationSortDescriptor', 'CKMarkNotificationsReadOperation', 'CKModifyBadgeOperation', 'CKModifyRecordZonesOperation', 'CKModifyRecordsOperation', 'CKModifySubscriptionsOperation', 'CKNotification', 'CKNotificationID', 'CKNotificationInfo', 'CKOperation', 'CKOperationConfiguration', 'CKOperationGroup', 'CKQuery', 'CKQueryCursor', 'CKQueryNotification', 'CKQueryOperation', 'CKQuerySubscription', 'CKRecord', 'CKRecordID', 'CKRecordZone', 'CKRecordZoneID', 'CKRecordZoneNotification', 'CKRecordZoneSubscription', 'CKReference', 'CKServerChangeToken', 'CKShare', 'CKShareMetadata', 'CKShareParticipant', 'CKSubscription', 'CKUserIdentity', 'CKUserIdentityLookupInfo', 'CLBeacon', 'CLBeaconIdentityConstraint', 'CLBeaconRegion', 'CLCircularRegion', 'CLFloor', 'CLGeocoder', 'CLHeading', 'CLKComplication', 'CLKComplicationDescriptor', 'CLKComplicationServer', 'CLKComplicationTemplate', 'CLKComplicationTemplateCircularSmallRingImage', 'CLKComplicationTemplateCircularSmallRingText', 'CLKComplicationTemplateCircularSmallSimpleImage', 'CLKComplicationTemplateCircularSmallSimpleText', 'CLKComplicationTemplateCircularSmallStackImage', 'CLKComplicationTemplateCircularSmallStackText', 'CLKComplicationTemplateExtraLargeColumnsText', 'CLKComplicationTemplateExtraLargeRingImage', 'CLKComplicationTemplateExtraLargeRingText', 'CLKComplicationTemplateExtraLargeSimpleImage', 'CLKComplicationTemplateExtraLargeSimpleText', 'CLKComplicationTemplateExtraLargeStackImage', 'CLKComplicationTemplateExtraLargeStackText', 'CLKComplicationTemplateGraphicBezelCircularText', 'CLKComplicationTemplateGraphicCircular', 'CLKComplicationTemplateGraphicCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicCircularClosedGaugeText', 'CLKComplicationTemplateGraphicCircularImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicCircularStackImage', 'CLKComplicationTemplateGraphicCircularStackText', 'CLKComplicationTemplateGraphicCornerCircularImage', 'CLKComplicationTemplateGraphicCornerGaugeImage', 'CLKComplicationTemplateGraphicCornerGaugeText', 'CLKComplicationTemplateGraphicCornerStackText', 'CLKComplicationTemplateGraphicCornerTextImage', 'CLKComplicationTemplateGraphicExtraLargeCircular', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularClosedGaugeText', 'CLKComplicationTemplateGraphicExtraLargeCircularImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeImage', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeRangeText', 'CLKComplicationTemplateGraphicExtraLargeCircularOpenGaugeSimpleText', 'CLKComplicationTemplateGraphicExtraLargeCircularStackImage', 'CLKComplicationTemplateGraphicExtraLargeCircularStackText', 'CLKComplicationTemplateGraphicRectangularFullImage', 'CLKComplicationTemplateGraphicRectangularLargeImage', 'CLKComplicationTemplateGraphicRectangularStandardBody', 'CLKComplicationTemplateGraphicRectangularTextGauge', 'CLKComplicationTemplateModularLargeColumns', 'CLKComplicationTemplateModularLargeStandardBody', 'CLKComplicationTemplateModularLargeTable', 'CLKComplicationTemplateModularLargeTallBody', 'CLKComplicationTemplateModularSmallColumnsText', 'CLKComplicationTemplateModularSmallRingImage', 'CLKComplicationTemplateModularSmallRingText', 'CLKComplicationTemplateModularSmallSimpleImage', 'CLKComplicationTemplateModularSmallSimpleText', 'CLKComplicationTemplateModularSmallStackImage', 'CLKComplicationTemplateModularSmallStackText', 'CLKComplicationTemplateUtilitarianLargeFlat', 'CLKComplicationTemplateUtilitarianSmallFlat', 'CLKComplicationTemplateUtilitarianSmallRingImage', 'CLKComplicationTemplateUtilitarianSmallRingText', 'CLKComplicationTemplateUtilitarianSmallSquare', 'CLKComplicationTimelineEntry', 'CLKDateTextProvider', 'CLKFullColorImageProvider', 'CLKGaugeProvider', 'CLKImageProvider', 'CLKRelativeDateTextProvider', 'CLKSimpleGaugeProvider', 'CLKSimpleTextProvider', 'CLKTextProvider', 'CLKTimeIntervalGaugeProvider', 'CLKTimeIntervalTextProvider', 'CLKTimeTextProvider', 'CLKWatchFaceLibrary', 'CLLocation', 'CLLocationManager', 'CLPlacemark', 'CLRegion', 'CLSActivity', 'CLSActivityItem', 'CLSBinaryItem', 'CLSContext', 'CLSDataStore', 'CLSObject', 'CLSProgressReportingCapability', 'CLSQuantityItem', 'CLSScoreItem', 'CLVisit', 'CMAccelerometerData', 'CMAltimeter', 'CMAltitudeData', 'CMAttitude', 'CMDeviceMotion', 'CMDyskineticSymptomResult', 'CMFallDetectionEvent', 'CMFallDetectionManager', 'CMGyroData', 'CMHeadphoneMotionManager', 'CMLogItem', 'CMMagnetometerData', 'CMMotionActivity', 'CMMotionActivityManager', 'CMMotionManager', 'CMMovementDisorderManager', 'CMPedometer', 'CMPedometerData', 'CMPedometerEvent', 'CMRecordedAccelerometerData', 'CMRecordedRotationRateData', 'CMRotationRateData', 'CMSensorDataList', 'CMSensorRecorder', 'CMStepCounter', 'CMTremorResult', 'CNChangeHistoryAddContactEvent', 'CNChangeHistoryAddGroupEvent', 'CNChangeHistoryAddMemberToGroupEvent', 'CNChangeHistoryAddSubgroupToGroupEvent', 'CNChangeHistoryDeleteContactEvent', 'CNChangeHistoryDeleteGroupEvent', 'CNChangeHistoryDropEverythingEvent', 'CNChangeHistoryEvent', 'CNChangeHistoryFetchRequest', 'CNChangeHistoryRemoveMemberFromGroupEvent', 'CNChangeHistoryRemoveSubgroupFromGroupEvent', 'CNChangeHistoryUpdateContactEvent', 'CNChangeHistoryUpdateGroupEvent', 'CNContact', 'CNContactFetchRequest', 'CNContactFormatter', 'CNContactPickerViewController', 'CNContactProperty', 'CNContactRelation', 'CNContactStore', 'CNContactVCardSerialization', 'CNContactViewController', 'CNContactsUserDefaults', 'CNContainer', 'CNFetchRequest', 'CNFetchResult', 'CNGroup', 'CNInstantMessageAddress', 'CNLabeledValue', 'CNMutableContact', 'CNMutableGroup', 'CNMutablePostalAddress', 'CNPhoneNumber', 'CNPostalAddress', 'CNPostalAddressFormatter', 'CNSaveRequest', 'CNSocialProfile', 'CPActionSheetTemplate', 'CPAlertAction', 'CPAlertTemplate', 'CPBarButton', 'CPButton', 'CPContact', 'CPContactCallButton', 'CPContactDirectionsButton', 'CPContactMessageButton', 'CPContactTemplate', 'CPDashboardButton', 'CPDashboardController', 'CPGridButton', 'CPGridTemplate', 'CPImageSet', 'CPInformationItem', 'CPInformationRatingItem', 'CPInformationTemplate', 'CPInterfaceController', 'CPListImageRowItem', 'CPListItem', 'CPListSection', 'CPListTemplate', 'CPManeuver', 'CPMapButton', 'CPMapTemplate', 'CPMessageComposeBarButton', 'CPMessageListItem', 'CPMessageListItemLeadingConfiguration', 'CPMessageListItemTrailingConfiguration', 'CPNavigationAlert', 'CPNavigationSession', 'CPNowPlayingAddToLibraryButton', 'CPNowPlayingButton', 'CPNowPlayingImageButton', 'CPNowPlayingMoreButton', 'CPNowPlayingPlaybackRateButton', 'CPNowPlayingRepeatButton', 'CPNowPlayingShuffleButton', 'CPNowPlayingTemplate', 'CPPointOfInterest', 'CPPointOfInterestTemplate', 'CPRouteChoice', 'CPSearchTemplate', 'CPSessionConfiguration', 'CPTabBarTemplate', 'CPTemplate', 'CPTemplateApplicationDashboardScene', 'CPTemplateApplicationScene', 'CPTextButton', 'CPTravelEstimates', 'CPTrip', 'CPTripPreviewTextConfiguration', 'CPVoiceControlState', 'CPVoiceControlTemplate', 'CPWindow', 'CSCustomAttributeKey', 'CSIndexExtensionRequestHandler', 'CSLocalizedString', 'CSPerson', 'CSSearchQuery', 'CSSearchableIndex', 'CSSearchableItem', 'CSSearchableItemAttributeSet', 'CTCall', 'CTCallCenter', 'CTCarrier', 'CTCellularData', 'CTCellularPlanProvisioning', 'CTCellularPlanProvisioningRequest', 'CTSubscriber', 'CTSubscriberInfo', 'CTTelephonyNetworkInfo', 'CXAction', 'CXAnswerCallAction', 'CXCall', 'CXCallAction', 'CXCallController', 'CXCallDirectoryExtensionContext', 'CXCallDirectoryManager', 'CXCallDirectoryProvider', 'CXCallObserver', 'CXCallUpdate', 'CXEndCallAction', 'CXHandle', 'CXPlayDTMFCallAction', 'CXProvider', 'CXProviderConfiguration', 'CXSetGroupCallAction', 'CXSetHeldCallAction', 'CXSetMutedCallAction', 'CXStartCallAction', 'CXTransaction', 'DCAppAttestService', 'DCDevice', 'EAAccessory', 'EAAccessoryManager', 'EAGLContext', 'EAGLSharegroup', 'EASession', 'EAWiFiUnconfiguredAccessory', 'EAWiFiUnconfiguredAccessoryBrowser', 'EKAlarm', 'EKCalendar', 'EKCalendarChooser', 'EKCalendarItem', 'EKEvent', 'EKEventEditViewController', 'EKEventStore', 'EKEventViewController', 'EKObject', 'EKParticipant', 'EKRecurrenceDayOfWeek', 'EKRecurrenceEnd', 'EKRecurrenceRule', 'EKReminder', 'EKSource', 'EKStructuredLocation', 'ENExposureConfiguration', 'ENExposureDaySummary', 'ENExposureDetectionSummary', 'ENExposureInfo', 'ENExposureSummaryItem', 'ENExposureWindow', 'ENManager', 'ENScanInstance', 'ENTemporaryExposureKey', 'EntityRotationGestureRecognizer', 'EntityScaleGestureRecognizer', 'EntityTranslationGestureRecognizer', 'FPUIActionExtensionContext', 'FPUIActionExtensionViewController', 'GCColor', 'GCController', 'GCControllerAxisInput', 'GCControllerButtonInput', 'GCControllerDirectionPad', 'GCControllerElement', 'GCControllerTouchpad', 'GCDeviceBattery', 'GCDeviceCursor', 'GCDeviceHaptics', 'GCDeviceLight', 'GCDirectionalGamepad', 'GCDualShockGamepad', 'GCEventViewController', 'GCExtendedGamepad', 'GCExtendedGamepadSnapshot', 'GCGamepad', 'GCGamepadSnapshot', 'GCKeyboard', 'GCKeyboardInput', 'GCMicroGamepad', 'GCMicroGamepadSnapshot', 'GCMotion', 'GCMouse', 'GCMouseInput', 'GCPhysicalInputProfile', 'GCXboxGamepad', 'GKARC4RandomSource', 'GKAccessPoint', 'GKAchievement', 'GKAchievementChallenge', 'GKAchievementDescription', 'GKAchievementViewController', 'GKAgent', 'GKAgent2D', 'GKAgent3D', 'GKBasePlayer', 'GKBehavior', 'GKBillowNoiseSource', 'GKChallenge', 'GKChallengeEventHandler', 'GKCheckerboardNoiseSource', 'GKCircleObstacle', 'GKCloudPlayer', 'GKCoherentNoiseSource', 'GKComponent', 'GKComponentSystem', 'GKCompositeBehavior', 'GKConstantNoiseSource', 'GKCylindersNoiseSource', 'GKDecisionNode', 'GKDecisionTree', 'GKEntity', 'GKFriendRequestComposeViewController', 'GKGameCenterViewController', 'GKGameSession', 'GKGameSessionSharingViewController', 'GKGaussianDistribution', 'GKGoal', 'GKGraph', 'GKGraphNode', 'GKGraphNode2D', 'GKGraphNode3D', 'GKGridGraph', 'GKGridGraphNode', 'GKInvite', 'GKLeaderboard', 'GKLeaderboardEntry', 'GKLeaderboardScore', 'GKLeaderboardSet', 'GKLeaderboardViewController', 'GKLinearCongruentialRandomSource', 'GKLocalPlayer', 'GKMatch', 'GKMatchRequest', 'GKMatchmaker', 'GKMatchmakerViewController', 'GKMersenneTwisterRandomSource', 'GKMeshGraph', 'GKMinmaxStrategist', 'GKMonteCarloStrategist', 'GKNSPredicateRule', 'GKNoise', 'GKNoiseMap', 'GKNoiseSource', 'GKNotificationBanner', 'GKObstacle', 'GKObstacleGraph', 'GKOctree', 'GKOctreeNode', 'GKPath', 'GKPeerPickerController', 'GKPerlinNoiseSource', 'GKPlayer', 'GKPolygonObstacle', 'GKQuadtree', 'GKQuadtreeNode', 'GKRTree', 'GKRandomDistribution', 'GKRandomSource', 'GKRidgedNoiseSource', 'GKRule', 'GKRuleSystem', 'GKSCNNodeComponent', 'GKSKNodeComponent', 'GKSavedGame', 'GKScene', 'GKScore', 'GKScoreChallenge', 'GKSession', 'GKShuffledDistribution', 'GKSphereObstacle', 'GKSpheresNoiseSource', 'GKState', 'GKStateMachine', 'GKTurnBasedEventHandler', 'GKTurnBasedExchangeReply', 'GKTurnBasedMatch', 'GKTurnBasedMatchmakerViewController', 'GKTurnBasedParticipant', 'GKVoiceChat', 'GKVoiceChatService', 'GKVoronoiNoiseSource', 'GLKBaseEffect', 'GLKEffectProperty', 'GLKEffectPropertyFog', 'GLKEffectPropertyLight', 'GLKEffectPropertyMaterial', 'GLKEffectPropertyTexture', 'GLKEffectPropertyTransform', 'GLKMesh', 'GLKMeshBuffer', 'GLKMeshBufferAllocator', 'GLKReflectionMapEffect', 'GLKSkyboxEffect', 'GLKSubmesh', 'GLKTextureInfo', 'GLKTextureLoader', 'GLKView', 'GLKViewController', 'HKActivityMoveModeObject', 'HKActivityRingView', 'HKActivitySummary', 'HKActivitySummaryQuery', 'HKActivitySummaryType', 'HKAnchoredObjectQuery', 'HKAudiogramSample', 'HKAudiogramSampleType', 'HKAudiogramSensitivityPoint', 'HKBiologicalSexObject', 'HKBloodTypeObject', 'HKCDADocument', 'HKCDADocumentSample', 'HKCategorySample', 'HKCategoryType', 'HKCharacteristicType', 'HKClinicalRecord', 'HKClinicalType', 'HKCorrelation', 'HKCorrelationQuery', 'HKCorrelationType', 'HKCumulativeQuantitySample', 'HKCumulativeQuantitySeriesSample', 'HKDeletedObject', 'HKDevice', 'HKDiscreteQuantitySample', 'HKDocumentQuery', 'HKDocumentSample', 'HKDocumentType', 'HKElectrocardiogram', 'HKElectrocardiogramQuery', 'HKElectrocardiogramType', 'HKElectrocardiogramVoltageMeasurement', 'HKFHIRResource', 'HKFHIRVersion', 'HKFitzpatrickSkinTypeObject', 'HKHealthStore', 'HKHeartbeatSeriesBuilder', 'HKHeartbeatSeriesQuery', 'HKHeartbeatSeriesSample', 'HKLiveWorkoutBuilder', 'HKLiveWorkoutDataSource', 'HKObject', 'HKObjectType', 'HKObserverQuery', 'HKQuantity', 'HKQuantitySample', 'HKQuantitySeriesSampleBuilder', 'HKQuantitySeriesSampleQuery', 'HKQuantityType', 'HKQuery', 'HKQueryAnchor', 'HKSample', 'HKSampleQuery', 'HKSampleType', 'HKSeriesBuilder', 'HKSeriesSample', 'HKSeriesType', 'HKSource', 'HKSourceQuery', 'HKSourceRevision', 'HKStatistics', 'HKStatisticsCollection', 'HKStatisticsCollectionQuery', 'HKStatisticsQuery', 'HKUnit', 'HKWheelchairUseObject', 'HKWorkout', 'HKWorkoutBuilder', 'HKWorkoutConfiguration', 'HKWorkoutEvent', 'HKWorkoutRoute', 'HKWorkoutRouteBuilder', 'HKWorkoutRouteQuery', 'HKWorkoutSession', 'HKWorkoutType', 'HMAccessControl', 'HMAccessory', 'HMAccessoryBrowser', 'HMAccessoryCategory', 'HMAccessoryOwnershipToken', 'HMAccessoryProfile', 'HMAccessorySetupPayload', 'HMAction', 'HMActionSet', 'HMAddAccessoryRequest', 'HMCalendarEvent', 'HMCameraAudioControl', 'HMCameraControl', 'HMCameraProfile', 'HMCameraSettingsControl', 'HMCameraSnapshot', 'HMCameraSnapshotControl', 'HMCameraSource', 'HMCameraStream', 'HMCameraStreamControl', 'HMCameraView', 'HMCharacteristic', 'HMCharacteristicEvent', 'HMCharacteristicMetadata', 'HMCharacteristicThresholdRangeEvent', 'HMCharacteristicWriteAction', 'HMDurationEvent', 'HMEvent', 'HMEventTrigger', 'HMHome', 'HMHomeAccessControl', 'HMHomeManager', 'HMLocationEvent', 'HMMutableCalendarEvent', 'HMMutableCharacteristicEvent', 'HMMutableCharacteristicThresholdRangeEvent', 'HMMutableDurationEvent', 'HMMutableLocationEvent', 'HMMutablePresenceEvent', 'HMMutableSignificantTimeEvent', 'HMNetworkConfigurationProfile', 'HMNumberRange', 'HMPresenceEvent', 'HMRoom', 'HMService', 'HMServiceGroup', 'HMSignificantTimeEvent', 'HMTimeEvent', 'HMTimerTrigger', 'HMTrigger', 'HMUser', 'HMZone', 'ICCameraDevice', 'ICCameraFile', 'ICCameraFolder', 'ICCameraItem', 'ICDevice', 'ICDeviceBrowser', 'ICScannerBandData', 'ICScannerDevice', 'ICScannerFeature', 'ICScannerFeatureBoolean', 'ICScannerFeatureEnumeration', 'ICScannerFeatureRange', 'ICScannerFeatureTemplate', 'ICScannerFunctionalUnit', 'ICScannerFunctionalUnitDocumentFeeder', 'ICScannerFunctionalUnitFlatbed', 'ICScannerFunctionalUnitNegativeTransparency', 'ICScannerFunctionalUnitPositiveTransparency', 'ILCallClassificationRequest', 'ILCallCommunication', 'ILClassificationRequest', 'ILClassificationResponse', 'ILClassificationUIExtensionContext', 'ILClassificationUIExtensionViewController', 'ILCommunication', 'ILMessageClassificationRequest', 'ILMessageCommunication', 'ILMessageFilterExtension', 'ILMessageFilterExtensionContext', 'ILMessageFilterQueryRequest', 'ILMessageFilterQueryResponse', 'ILNetworkResponse', 'INAccountTypeResolutionResult', 'INActivateCarSignalIntent', 'INActivateCarSignalIntentResponse', 'INAddMediaIntent', 'INAddMediaIntentResponse', 'INAddMediaMediaDestinationResolutionResult', 'INAddMediaMediaItemResolutionResult', 'INAddTasksIntent', 'INAddTasksIntentResponse', 'INAddTasksTargetTaskListResolutionResult', 'INAddTasksTemporalEventTriggerResolutionResult', 'INAirline', 'INAirport', 'INAirportGate', 'INAppendToNoteIntent', 'INAppendToNoteIntentResponse', 'INBalanceAmount', 'INBalanceTypeResolutionResult', 'INBillDetails', 'INBillPayee', 'INBillPayeeResolutionResult', 'INBillTypeResolutionResult', 'INBoatReservation', 'INBoatTrip', 'INBookRestaurantReservationIntent', 'INBookRestaurantReservationIntentResponse', 'INBooleanResolutionResult', 'INBusReservation', 'INBusTrip', 'INCallCapabilityResolutionResult', 'INCallDestinationTypeResolutionResult', 'INCallRecord', 'INCallRecordFilter', 'INCallRecordResolutionResult', 'INCallRecordTypeOptionsResolutionResult', 'INCallRecordTypeResolutionResult', 'INCancelRideIntent', 'INCancelRideIntentResponse', 'INCancelWorkoutIntent', 'INCancelWorkoutIntentResponse', 'INCar', 'INCarAirCirculationModeResolutionResult', 'INCarAudioSourceResolutionResult', 'INCarDefrosterResolutionResult', 'INCarHeadUnit', 'INCarSeatResolutionResult', 'INCarSignalOptionsResolutionResult', 'INCreateNoteIntent', 'INCreateNoteIntentResponse', 'INCreateTaskListIntent', 'INCreateTaskListIntentResponse', 'INCurrencyAmount', 'INCurrencyAmountResolutionResult', 'INDailyRoutineRelevanceProvider', 'INDateComponentsRange', 'INDateComponentsRangeResolutionResult', 'INDateComponentsResolutionResult', 'INDateRelevanceProvider', 'INDateSearchTypeResolutionResult', 'INDefaultCardTemplate', 'INDeleteTasksIntent', 'INDeleteTasksIntentResponse', 'INDeleteTasksTaskListResolutionResult', 'INDeleteTasksTaskResolutionResult', 'INDoubleResolutionResult', 'INEndWorkoutIntent', 'INEndWorkoutIntentResponse', 'INEnergyResolutionResult', 'INEnumResolutionResult', 'INExtension', 'INFile', 'INFileResolutionResult', 'INFlight', 'INFlightReservation', 'INGetAvailableRestaurantReservationBookingDefaultsIntent', 'INGetAvailableRestaurantReservationBookingDefaultsIntentResponse', 'INGetAvailableRestaurantReservationBookingsIntent', 'INGetAvailableRestaurantReservationBookingsIntentResponse', 'INGetCarLockStatusIntent', 'INGetCarLockStatusIntentResponse', 'INGetCarPowerLevelStatusIntent', 'INGetCarPowerLevelStatusIntentResponse', 'INGetReservationDetailsIntent', 'INGetReservationDetailsIntentResponse', 'INGetRestaurantGuestIntent', 'INGetRestaurantGuestIntentResponse', 'INGetRideStatusIntent', 'INGetRideStatusIntentResponse', 'INGetUserCurrentRestaurantReservationBookingsIntent', 'INGetUserCurrentRestaurantReservationBookingsIntentResponse', 'INGetVisualCodeIntent', 'INGetVisualCodeIntentResponse', 'INImage', 'INImageNoteContent', 'INIntegerResolutionResult', 'INIntent', 'INIntentResolutionResult', 'INIntentResponse', 'INInteraction', 'INLengthResolutionResult', 'INListCarsIntent', 'INListCarsIntentResponse', 'INListRideOptionsIntent', 'INListRideOptionsIntentResponse', 'INLocationRelevanceProvider', 'INLocationSearchTypeResolutionResult', 'INLodgingReservation', 'INMassResolutionResult', 'INMediaAffinityTypeResolutionResult', 'INMediaDestination', 'INMediaDestinationResolutionResult', 'INMediaItem', 'INMediaItemResolutionResult', 'INMediaSearch', 'INMediaUserContext', 'INMessage', 'INMessageAttributeOptionsResolutionResult', 'INMessageAttributeResolutionResult', 'INNote', 'INNoteContent', 'INNoteContentResolutionResult', 'INNoteContentTypeResolutionResult', 'INNoteResolutionResult', 'INNotebookItemTypeResolutionResult', 'INObject', 'INObjectCollection', 'INObjectResolutionResult', 'INObjectSection', 'INOutgoingMessageTypeResolutionResult', 'INParameter', 'INPauseWorkoutIntent', 'INPauseWorkoutIntentResponse', 'INPayBillIntent', 'INPayBillIntentResponse', 'INPaymentAccount', 'INPaymentAccountResolutionResult', 'INPaymentAmount', 'INPaymentAmountResolutionResult', 'INPaymentMethod', 'INPaymentMethodResolutionResult', 'INPaymentRecord', 'INPaymentStatusResolutionResult', 'INPerson', 'INPersonHandle', 'INPersonResolutionResult', 'INPlacemarkResolutionResult', 'INPlayMediaIntent', 'INPlayMediaIntentResponse', 'INPlayMediaMediaItemResolutionResult', 'INPlayMediaPlaybackSpeedResolutionResult', 'INPlaybackQueueLocationResolutionResult', 'INPlaybackRepeatModeResolutionResult', 'INPreferences', 'INPriceRange', 'INRadioTypeResolutionResult', 'INRecurrenceRule', 'INRelativeReferenceResolutionResult', 'INRelativeSettingResolutionResult', 'INRelevanceProvider', 'INRelevantShortcut', 'INRelevantShortcutStore', 'INRentalCar', 'INRentalCarReservation', 'INRequestPaymentCurrencyAmountResolutionResult', 'INRequestPaymentIntent', 'INRequestPaymentIntentResponse', 'INRequestPaymentPayerResolutionResult', 'INRequestRideIntent', 'INRequestRideIntentResponse', 'INReservation', 'INReservationAction', 'INRestaurant', 'INRestaurantGuest', 'INRestaurantGuestDisplayPreferences', 'INRestaurantGuestResolutionResult', 'INRestaurantOffer', 'INRestaurantReservation', 'INRestaurantReservationBooking', 'INRestaurantReservationUserBooking', 'INRestaurantResolutionResult', 'INResumeWorkoutIntent', 'INResumeWorkoutIntentResponse', 'INRideCompletionStatus', 'INRideDriver', 'INRideFareLineItem', 'INRideOption', 'INRidePartySizeOption', 'INRideStatus', 'INRideVehicle', 'INSaveProfileInCarIntent', 'INSaveProfileInCarIntentResponse', 'INSearchCallHistoryIntent', 'INSearchCallHistoryIntentResponse', 'INSearchForAccountsIntent', 'INSearchForAccountsIntentResponse', 'INSearchForBillsIntent', 'INSearchForBillsIntentResponse', 'INSearchForMediaIntent', 'INSearchForMediaIntentResponse', 'INSearchForMediaMediaItemResolutionResult', 'INSearchForMessagesIntent', 'INSearchForMessagesIntentResponse', 'INSearchForNotebookItemsIntent', 'INSearchForNotebookItemsIntentResponse', 'INSearchForPhotosIntent', 'INSearchForPhotosIntentResponse', 'INSeat', 'INSendMessageAttachment', 'INSendMessageIntent', 'INSendMessageIntentResponse', 'INSendMessageRecipientResolutionResult', 'INSendPaymentCurrencyAmountResolutionResult', 'INSendPaymentIntent', 'INSendPaymentIntentResponse', 'INSendPaymentPayeeResolutionResult', 'INSendRideFeedbackIntent', 'INSendRideFeedbackIntentResponse', 'INSetAudioSourceInCarIntent', 'INSetAudioSourceInCarIntentResponse', 'INSetCarLockStatusIntent', 'INSetCarLockStatusIntentResponse', 'INSetClimateSettingsInCarIntent', 'INSetClimateSettingsInCarIntentResponse', 'INSetDefrosterSettingsInCarIntent', 'INSetDefrosterSettingsInCarIntentResponse', 'INSetMessageAttributeIntent', 'INSetMessageAttributeIntentResponse', 'INSetProfileInCarIntent', 'INSetProfileInCarIntentResponse', 'INSetRadioStationIntent', 'INSetRadioStationIntentResponse', 'INSetSeatSettingsInCarIntent', 'INSetSeatSettingsInCarIntentResponse', 'INSetTaskAttributeIntent', 'INSetTaskAttributeIntentResponse', 'INSetTaskAttributeTemporalEventTriggerResolutionResult', 'INShortcut', 'INSnoozeTasksIntent', 'INSnoozeTasksIntentResponse', 'INSnoozeTasksTaskResolutionResult', 'INSpatialEventTrigger', 'INSpatialEventTriggerResolutionResult', 'INSpeakableString', 'INSpeakableStringResolutionResult', 'INSpeedResolutionResult', 'INStartAudioCallIntent', 'INStartAudioCallIntentResponse', 'INStartCallCallCapabilityResolutionResult', 'INStartCallCallRecordToCallBackResolutionResult', 'INStartCallContactResolutionResult', 'INStartCallIntent', 'INStartCallIntentResponse', 'INStartPhotoPlaybackIntent', 'INStartPhotoPlaybackIntentResponse', 'INStartVideoCallIntent', 'INStartVideoCallIntentResponse', 'INStartWorkoutIntent', 'INStartWorkoutIntentResponse', 'INStringResolutionResult', 'INTask', 'INTaskList', 'INTaskListResolutionResult', 'INTaskPriorityResolutionResult', 'INTaskResolutionResult', 'INTaskStatusResolutionResult', 'INTemperatureResolutionResult', 'INTemporalEventTrigger', 'INTemporalEventTriggerResolutionResult', 'INTemporalEventTriggerTypeOptionsResolutionResult', 'INTermsAndConditions', 'INTextNoteContent', 'INTicketedEvent', 'INTicketedEventReservation', 'INTimeIntervalResolutionResult', 'INTrainReservation', 'INTrainTrip', 'INTransferMoneyIntent', 'INTransferMoneyIntentResponse', 'INUIAddVoiceShortcutButton', 'INUIAddVoiceShortcutViewController', 'INUIEditVoiceShortcutViewController', 'INURLResolutionResult', 'INUpcomingMediaManager', 'INUpdateMediaAffinityIntent', 'INUpdateMediaAffinityIntentResponse', 'INUpdateMediaAffinityMediaItemResolutionResult', 'INUserContext', 'INVisualCodeTypeResolutionResult', 'INVocabulary', 'INVoiceShortcut', 'INVoiceShortcutCenter', 'INVolumeResolutionResult', 'INWorkoutGoalUnitTypeResolutionResult', 'INWorkoutLocationTypeResolutionResult', 'IOSurface', 'JSContext', 'JSManagedValue', 'JSValue', 'JSVirtualMachine', 'LAContext', 'LPLinkMetadata', 'LPLinkView', 'LPMetadataProvider', 'MCAdvertiserAssistant', 'MCBrowserViewController', 'MCNearbyServiceAdvertiser', 'MCNearbyServiceBrowser', 'MCPeerID', 'MCSession', 'MDLAnimatedMatrix4x4', 'MDLAnimatedQuaternion', 'MDLAnimatedQuaternionArray', 'MDLAnimatedScalar', 'MDLAnimatedScalarArray', 'MDLAnimatedValue', 'MDLAnimatedVector2', 'MDLAnimatedVector3', 'MDLAnimatedVector3Array', 'MDLAnimatedVector4', 'MDLAnimationBindComponent', 'MDLAreaLight', 'MDLAsset', 'MDLBundleAssetResolver', 'MDLCamera', 'MDLCheckerboardTexture', 'MDLColorSwatchTexture', 'MDLLight', 'MDLLightProbe', 'MDLMaterial', 'MDLMaterialProperty', 'MDLMaterialPropertyConnection', 'MDLMaterialPropertyGraph', 'MDLMaterialPropertyNode', 'MDLMatrix4x4Array', 'MDLMesh', 'MDLMeshBufferData', 'MDLMeshBufferDataAllocator', 'MDLMeshBufferMap', 'MDLMeshBufferZoneDefault', 'MDLNoiseTexture', 'MDLNormalMapTexture', 'MDLObject', 'MDLObjectContainer', 'MDLPackedJointAnimation', 'MDLPathAssetResolver', 'MDLPhotometricLight', 'MDLPhysicallyPlausibleLight', 'MDLPhysicallyPlausibleScatteringFunction', 'MDLRelativeAssetResolver', 'MDLScatteringFunction', 'MDLSkeleton', 'MDLSkyCubeTexture', 'MDLStereoscopicCamera', 'MDLSubmesh', 'MDLSubmeshTopology', 'MDLTexture', 'MDLTextureFilter', 'MDLTextureSampler', 'MDLTransform', 'MDLTransformMatrixOp', 'MDLTransformOrientOp', 'MDLTransformRotateOp', 'MDLTransformRotateXOp', 'MDLTransformRotateYOp', 'MDLTransformRotateZOp', 'MDLTransformScaleOp', 'MDLTransformStack', 'MDLTransformTranslateOp', 'MDLURLTexture', 'MDLVertexAttribute', 'MDLVertexAttributeData', 'MDLVertexBufferLayout', 'MDLVertexDescriptor', 'MDLVoxelArray', 'MFMailComposeViewController', 'MFMessageComposeViewController', 'MIDICIDeviceInfo', 'MIDICIDiscoveredNode', 'MIDICIDiscoveryManager', 'MIDICIProfile', 'MIDICIProfileState', 'MIDICIResponder', 'MIDICISession', 'MIDINetworkConnection', 'MIDINetworkHost', 'MIDINetworkSession', 'MKAnnotationView', 'MKCircle', 'MKCircleRenderer', 'MKCircleView', 'MKClusterAnnotation', 'MKCompassButton', 'MKDirections', 'MKDirectionsRequest', 'MKDirectionsResponse', 'MKDistanceFormatter', 'MKETAResponse', 'MKGeoJSONDecoder', 'MKGeoJSONFeature', 'MKGeodesicPolyline', 'MKGradientPolylineRenderer', 'MKLocalPointsOfInterestRequest', 'MKLocalSearch', 'MKLocalSearchCompleter', 'MKLocalSearchCompletion', 'MKLocalSearchRequest', 'MKLocalSearchResponse', 'MKMapCamera', 'MKMapCameraBoundary', 'MKMapCameraZoomRange', 'MKMapItem', 'MKMapSnapshot', 'MKMapSnapshotOptions', 'MKMapSnapshotter', 'MKMapView', 'MKMarkerAnnotationView', 'MKMultiPoint', 'MKMultiPolygon', 'MKMultiPolygonRenderer', 'MKMultiPolyline', 'MKMultiPolylineRenderer', 'MKOverlayPathRenderer', 'MKOverlayPathView', 'MKOverlayRenderer', 'MKOverlayView', 'MKPinAnnotationView', 'MKPitchControl', 'MKPlacemark', 'MKPointAnnotation', 'MKPointOfInterestFilter', 'MKPolygon', 'MKPolygonRenderer', 'MKPolygonView', 'MKPolyline', 'MKPolylineRenderer', 'MKPolylineView', 'MKReverseGeocoder', 'MKRoute', 'MKRouteStep', 'MKScaleView', 'MKShape', 'MKTileOverlay', 'MKTileOverlayRenderer', 'MKUserLocation', 'MKUserLocationView', 'MKUserTrackingBarButtonItem', 'MKUserTrackingButton', 'MKZoomControl', 'MLArrayBatchProvider', 'MLCActivationDescriptor', 'MLCActivationLayer', 'MLCArithmeticLayer', 'MLCBatchNormalizationLayer', 'MLCConcatenationLayer', 'MLCConvolutionDescriptor', 'MLCConvolutionLayer', 'MLCDevice', 'MLCDropoutLayer', 'MLCEmbeddingDescriptor', 'MLCEmbeddingLayer', 'MLCFullyConnectedLayer', 'MLCGramMatrixLayer', 'MLCGraph', 'MLCGroupNormalizationLayer', 'MLCInferenceGraph', 'MLCInstanceNormalizationLayer', 'MLCLSTMDescriptor', 'MLCLSTMLayer', 'MLCLayer', 'MLCLayerNormalizationLayer', 'MLCLossDescriptor', 'MLCLossLayer', 'MLCMatMulDescriptor', 'MLCMatMulLayer', 'MLCMultiheadAttentionDescriptor', 'MLCMultiheadAttentionLayer', 'MLCPaddingLayer', 'MLCPoolingDescriptor', 'MLCPoolingLayer', 'MLCReductionLayer', 'MLCReshapeLayer', 'MLCSliceLayer', 'MLCSoftmaxLayer', 'MLCSplitLayer', 'MLCTensor', 'MLCTensorData', 'MLCTensorDescriptor', 'MLCTensorOptimizerDeviceData', 'MLCTensorParameter', 'MLCTrainingGraph', 'MLCTransposeLayer', 'MLCUpsampleLayer', 'MLCYOLOLossDescriptor', 'MLCYOLOLossLayer', 'MLDictionaryConstraint', 'MLDictionaryFeatureProvider', 'MLFeatureDescription', 'MLFeatureValue', 'MLImageConstraint', 'MLImageSize', 'MLImageSizeConstraint', 'MLKey', 'MLMetricKey', 'MLModel', 'MLModelCollection', 'MLModelCollectionEntry', 'MLModelConfiguration', 'MLModelDescription', 'MLMultiArray', 'MLMultiArrayConstraint', 'MLMultiArrayShapeConstraint', 'MLNumericConstraint', 'MLParameterDescription', 'MLParameterKey', 'MLPredictionOptions', 'MLSequence', 'MLSequenceConstraint', 'MLTask', 'MLUpdateContext', 'MLUpdateProgressHandlers', 'MLUpdateTask', 'MPChangeLanguageOptionCommandEvent', 'MPChangePlaybackPositionCommand', 'MPChangePlaybackPositionCommandEvent', 'MPChangePlaybackRateCommand', 'MPChangePlaybackRateCommandEvent', 'MPChangeRepeatModeCommand', 'MPChangeRepeatModeCommandEvent', 'MPChangeShuffleModeCommand', 'MPChangeShuffleModeCommandEvent', 'MPContentItem', 'MPFeedbackCommand', 'MPFeedbackCommandEvent', 'MPMediaEntity', 'MPMediaItem', 'MPMediaItemArtwork', 'MPMediaItemCollection', 'MPMediaLibrary', 'MPMediaPickerController', 'MPMediaPlaylist', 'MPMediaPlaylistCreationMetadata', 'MPMediaPredicate', 'MPMediaPropertyPredicate', 'MPMediaQuery', 'MPMediaQuerySection', 'MPMovieAccessLog', 'MPMovieAccessLogEvent', 'MPMovieErrorLog', 'MPMovieErrorLogEvent', 'MPMoviePlayerController', 'MPMoviePlayerViewController', 'MPMusicPlayerApplicationController', 'MPMusicPlayerController', 'MPMusicPlayerControllerMutableQueue', 'MPMusicPlayerControllerQueue', 'MPMusicPlayerMediaItemQueueDescriptor', 'MPMusicPlayerPlayParameters', 'MPMusicPlayerPlayParametersQueueDescriptor', 'MPMusicPlayerQueueDescriptor', 'MPMusicPlayerStoreQueueDescriptor', 'MPNowPlayingInfoCenter', 'MPNowPlayingInfoLanguageOption', 'MPNowPlayingInfoLanguageOptionGroup', 'MPNowPlayingSession', 'MPPlayableContentManager', 'MPPlayableContentManagerContext', 'MPRatingCommand', 'MPRatingCommandEvent', 'MPRemoteCommand', 'MPRemoteCommandCenter', 'MPRemoteCommandEvent', 'MPSGraph', 'MPSGraphConvolution2DOpDescriptor', 'MPSGraphDepthwiseConvolution2DOpDescriptor', 'MPSGraphDevice', 'MPSGraphExecutionDescriptor', 'MPSGraphOperation', 'MPSGraphPooling2DOpDescriptor', 'MPSGraphShapedType', 'MPSGraphTensor', 'MPSGraphTensorData', 'MPSGraphVariableOp', 'MPSeekCommandEvent', 'MPSkipIntervalCommand', 'MPSkipIntervalCommandEvent', 'MPTimedMetadata', 'MPVolumeView', 'MSConversation', 'MSMessage', 'MSMessageLayout', 'MSMessageLiveLayout', 'MSMessageTemplateLayout', 'MSMessagesAppViewController', 'MSServiceAccount', 'MSSession', 'MSSetupSession', 'MSSticker', 'MSStickerBrowserView', 'MSStickerBrowserViewController', 'MSStickerView', 'MTKMesh', 'MTKMeshBuffer', 'MTKMeshBufferAllocator', 'MTKSubmesh', 'MTKTextureLoader', 'MTKView', 'MTLAccelerationStructureBoundingBoxGeometryDescriptor', 'MTLAccelerationStructureDescriptor', 'MTLAccelerationStructureGeometryDescriptor', 'MTLAccelerationStructureTriangleGeometryDescriptor', 'MTLArgument', 'MTLArgumentDescriptor', 'MTLArrayType', 'MTLAttribute', 'MTLAttributeDescriptor', 'MTLAttributeDescriptorArray', 'MTLBinaryArchiveDescriptor', 'MTLBlitPassDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptor', 'MTLBlitPassSampleBufferAttachmentDescriptorArray', 'MTLBufferLayoutDescriptor', 'MTLBufferLayoutDescriptorArray', 'MTLCaptureDescriptor', 'MTLCaptureManager', 'MTLCommandBufferDescriptor', 'MTLCompileOptions', 'MTLComputePassDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptor', 'MTLComputePassSampleBufferAttachmentDescriptorArray', 'MTLComputePipelineDescriptor', 'MTLComputePipelineReflection', 'MTLCounterSampleBufferDescriptor', 'MTLDepthStencilDescriptor', 'MTLFunctionConstant', 'MTLFunctionConstantValues', 'MTLFunctionDescriptor', 'MTLHeapDescriptor', 'MTLIndirectCommandBufferDescriptor', 'MTLInstanceAccelerationStructureDescriptor', 'MTLIntersectionFunctionDescriptor', 'MTLIntersectionFunctionTableDescriptor', 'MTLLinkedFunctions', 'MTLPipelineBufferDescriptor', 'MTLPipelineBufferDescriptorArray', 'MTLPointerType', 'MTLPrimitiveAccelerationStructureDescriptor', 'MTLRasterizationRateLayerArray', 'MTLRasterizationRateLayerDescriptor', 'MTLRasterizationRateMapDescriptor', 'MTLRasterizationRateSampleArray', 'MTLRenderPassAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptor', 'MTLRenderPassColorAttachmentDescriptorArray', 'MTLRenderPassDepthAttachmentDescriptor', 'MTLRenderPassDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptor', 'MTLRenderPassSampleBufferAttachmentDescriptorArray', 'MTLRenderPassStencilAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptor', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MTLRenderPipelineDescriptor', 'MTLRenderPipelineReflection', 'MTLResourceStatePassDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptor', 'MTLResourceStatePassSampleBufferAttachmentDescriptorArray', 'MTLSamplerDescriptor', 'MTLSharedEventHandle', 'MTLSharedEventListener', 'MTLSharedTextureHandle', 'MTLStageInputOutputDescriptor', 'MTLStencilDescriptor', 'MTLStructMember', 'MTLStructType', 'MTLTextureDescriptor', 'MTLTextureReferenceType', 'MTLTileRenderPipelineColorAttachmentDescriptor', 'MTLTileRenderPipelineColorAttachmentDescriptorArray', 'MTLTileRenderPipelineDescriptor', 'MTLType', 'MTLVertexAttribute', 'MTLVertexAttributeDescriptor', 'MTLVertexAttributeDescriptorArray', 'MTLVertexBufferLayoutDescriptor', 'MTLVertexBufferLayoutDescriptorArray', 'MTLVertexDescriptor', 'MTLVisibleFunctionTableDescriptor', 'MXAnimationMetric', 'MXAppExitMetric', 'MXAppLaunchMetric', 'MXAppResponsivenessMetric', 'MXAppRunTimeMetric', 'MXAverage', 'MXBackgroundExitData', 'MXCPUExceptionDiagnostic', 'MXCPUMetric', 'MXCallStackTree', 'MXCellularConditionMetric', 'MXCrashDiagnostic', 'MXDiagnostic', 'MXDiagnosticPayload', 'MXDiskIOMetric', 'MXDiskWriteExceptionDiagnostic', 'MXDisplayMetric', 'MXForegroundExitData', 'MXGPUMetric', 'MXHangDiagnostic', 'MXHistogram', 'MXHistogramBucket', 'MXLocationActivityMetric', 'MXMemoryMetric', 'MXMetaData', 'MXMetric', 'MXMetricManager', 'MXMetricPayload', 'MXNetworkTransferMetric', 'MXSignpostIntervalData', 'MXSignpostMetric', 'MXUnitAveragePixelLuminance', 'MXUnitSignalBars', 'MyClass', 'NCWidgetController', 'NEAppProxyFlow', 'NEAppProxyProvider', 'NEAppProxyProviderManager', 'NEAppProxyTCPFlow', 'NEAppProxyUDPFlow', 'NEAppPushManager', 'NEAppPushProvider', 'NEAppRule', 'NEDNSOverHTTPSSettings', 'NEDNSOverTLSSettings', 'NEDNSProxyManager', 'NEDNSProxyProvider', 'NEDNSProxyProviderProtocol', 'NEDNSSettings', 'NEDNSSettingsManager', 'NEEvaluateConnectionRule', 'NEFilterBrowserFlow', 'NEFilterControlProvider', 'NEFilterControlVerdict', 'NEFilterDataProvider', 'NEFilterDataVerdict', 'NEFilterFlow', 'NEFilterManager', 'NEFilterNewFlowVerdict', 'NEFilterPacketContext', 'NEFilterPacketProvider', 'NEFilterProvider', 'NEFilterProviderConfiguration', 'NEFilterRemediationVerdict', 'NEFilterReport', 'NEFilterRule', 'NEFilterSettings', 'NEFilterSocketFlow', 'NEFilterVerdict', 'NEFlowMetaData', 'NEHotspotConfiguration', 'NEHotspotConfigurationManager', 'NEHotspotEAPSettings', 'NEHotspotHS20Settings', 'NEHotspotHelper', 'NEHotspotHelperCommand', 'NEHotspotHelperResponse', 'NEHotspotNetwork', 'NEIPv4Route', 'NEIPv4Settings', 'NEIPv6Route', 'NEIPv6Settings', 'NENetworkRule', 'NEOnDemandRule', 'NEOnDemandRuleConnect', 'NEOnDemandRuleDisconnect', 'NEOnDemandRuleEvaluateConnection', 'NEOnDemandRuleIgnore', 'NEPacket', 'NEPacketTunnelFlow', 'NEPacketTunnelNetworkSettings', 'NEPacketTunnelProvider', 'NEProvider', 'NEProxyServer', 'NEProxySettings', 'NETransparentProxyManager', 'NETransparentProxyNetworkSettings', 'NETransparentProxyProvider', 'NETunnelNetworkSettings', 'NETunnelProvider', 'NETunnelProviderManager', 'NETunnelProviderProtocol', 'NETunnelProviderSession', 'NEVPNConnection', 'NEVPNIKEv2SecurityAssociationParameters', 'NEVPNManager', 'NEVPNProtocol', 'NEVPNProtocolIKEv2', 'NEVPNProtocolIPSec', 'NFCISO15693CustomCommandConfiguration', 'NFCISO15693ReadMultipleBlocksConfiguration', 'NFCISO15693ReaderSession', 'NFCISO7816APDU', 'NFCNDEFMessage', 'NFCNDEFPayload', 'NFCNDEFReaderSession', 'NFCReaderSession', 'NFCTagCommandConfiguration', 'NFCTagReaderSession', 'NFCVASCommandConfiguration', 'NFCVASReaderSession', 'NFCVASResponse', 'NIConfiguration', 'NIDiscoveryToken', 'NINearbyObject', 'NINearbyPeerConfiguration', 'NISession', 'NKAssetDownload', 'NKIssue', 'NKLibrary', 'NLEmbedding', 'NLGazetteer', 'NLLanguageRecognizer', 'NLModel', 'NLModelConfiguration', 'NLTagger', 'NLTokenizer', 'NSArray', 'NSAssertionHandler', 'NSAsynchronousFetchRequest', 'NSAsynchronousFetchResult', 'NSAtomicStore', 'NSAtomicStoreCacheNode', 'NSAttributeDescription', 'NSAttributedString', 'NSAutoreleasePool', 'NSBatchDeleteRequest', 'NSBatchDeleteResult', 'NSBatchInsertRequest', 'NSBatchInsertResult', 'NSBatchUpdateRequest', 'NSBatchUpdateResult', 'NSBlockOperation', 'NSBundle', 'NSBundleResourceRequest', 'NSByteCountFormatter', 'NSCache', 'NSCachedURLResponse', 'NSCalendar', 'NSCharacterSet', 'NSCoder', 'NSCollectionLayoutAnchor', 'NSCollectionLayoutBoundarySupplementaryItem', 'NSCollectionLayoutDecorationItem', 'NSCollectionLayoutDimension', 'NSCollectionLayoutEdgeSpacing', 'NSCollectionLayoutGroup', 'NSCollectionLayoutGroupCustomItem', 'NSCollectionLayoutItem', 'NSCollectionLayoutSection', 'NSCollectionLayoutSize', 'NSCollectionLayoutSpacing', 'NSCollectionLayoutSupplementaryItem', 'NSComparisonPredicate', 'NSCompoundPredicate', 'NSCondition', 'NSConditionLock', 'NSConstantString', 'NSConstraintConflict', 'NSCoreDataCoreSpotlightDelegate', 'NSCountedSet', 'NSData', 'NSDataAsset', 'NSDataDetector', 'NSDate', 'NSDateComponents', 'NSDateComponentsFormatter', 'NSDateFormatter', 'NSDateInterval', 'NSDateIntervalFormatter', 'NSDecimalNumber', 'NSDecimalNumberHandler', 'NSDerivedAttributeDescription', 'NSDictionary', 'NSDiffableDataSourceSectionSnapshot', 'NSDiffableDataSourceSectionTransaction', 'NSDiffableDataSourceSnapshot', 'NSDiffableDataSourceTransaction', 'NSDimension', 'NSDirectoryEnumerator', 'NSEnergyFormatter', 'NSEntityDescription', 'NSEntityMapping', 'NSEntityMigrationPolicy', 'NSEnumerator', 'NSError', 'NSEvent', 'NSException', 'NSExpression', 'NSExpressionDescription', 'NSExtensionContext', 'NSExtensionItem', 'NSFetchIndexDescription', 'NSFetchIndexElementDescription', 'NSFetchRequest', 'NSFetchRequestExpression', 'NSFetchedPropertyDescription', 'NSFetchedResultsController', 'NSFileAccessIntent', 'NSFileCoordinator', 'NSFileHandle', 'NSFileManager', 'NSFileProviderDomain', 'NSFileProviderExtension', 'NSFileProviderManager', 'NSFileProviderService', 'NSFileSecurity', 'NSFileVersion', 'NSFileWrapper', 'NSFormatter', 'NSHTTPCookie', 'NSHTTPCookieStorage', 'NSHTTPURLResponse', 'NSHashTable', 'NSISO8601DateFormatter', 'NSIncrementalStore', 'NSIncrementalStoreNode', 'NSIndexPath', 'NSIndexSet', 'NSInputStream', 'NSInvocation', 'NSInvocationOperation', 'NSItemProvider', 'NSJSONSerialization', 'NSKeyedArchiver', 'NSKeyedUnarchiver', 'NSLayoutAnchor', 'NSLayoutConstraint', 'NSLayoutDimension', 'NSLayoutManager', 'NSLayoutXAxisAnchor', 'NSLayoutYAxisAnchor', 'NSLengthFormatter', 'NSLinguisticTagger', 'NSListFormatter', 'NSLocale', 'NSLock', 'NSMachPort', 'NSManagedObject', 'NSManagedObjectContext', 'NSManagedObjectID', 'NSManagedObjectModel', 'NSMapTable', 'NSMappingModel', 'NSMassFormatter', 'NSMeasurement', 'NSMeasurementFormatter', 'NSMenuToolbarItem', 'NSMergeConflict', 'NSMergePolicy', 'NSMessagePort', 'NSMetadataItem', 'NSMetadataQuery', 'NSMetadataQueryAttributeValueTuple', 'NSMetadataQueryResultGroup', 'NSMethodSignature', 'NSMigrationManager', 'NSMutableArray', 'NSMutableAttributedString', 'NSMutableCharacterSet', 'NSMutableData', 'NSMutableDictionary', 'NSMutableIndexSet', 'NSMutableOrderedSet', 'NSMutableParagraphStyle', 'NSMutableSet', 'NSMutableString', 'NSMutableURLRequest', 'NSNetService', 'NSNetServiceBrowser', 'NSNotification', 'NSNotificationCenter', 'NSNotificationQueue', 'NSNull', 'NSNumber', 'NSNumberFormatter', 'NSObject', 'NSOperation', 'NSOperationQueue', 'NSOrderedCollectionChange', 'NSOrderedCollectionDifference', 'NSOrderedSet', 'NSOrthography', 'NSOutputStream', 'NSParagraphStyle', 'NSPersistentCloudKitContainer', 'NSPersistentCloudKitContainerEvent', 'NSPersistentCloudKitContainerEventRequest', 'NSPersistentCloudKitContainerEventResult', 'NSPersistentCloudKitContainerOptions', 'NSPersistentContainer', 'NSPersistentHistoryChange', 'NSPersistentHistoryChangeRequest', 'NSPersistentHistoryResult', 'NSPersistentHistoryToken', 'NSPersistentHistoryTransaction', 'NSPersistentStore', 'NSPersistentStoreAsynchronousResult', 'NSPersistentStoreCoordinator', 'NSPersistentStoreDescription', 'NSPersistentStoreRequest', 'NSPersistentStoreResult', 'NSPersonNameComponents', 'NSPersonNameComponentsFormatter', 'NSPipe', 'NSPointerArray', 'NSPointerFunctions', 'NSPort', 'NSPredicate', 'NSProcessInfo', 'NSProgress', 'NSPropertyDescription', 'NSPropertyListSerialization', 'NSPropertyMapping', 'NSProxy', 'NSPurgeableData', 'NSQueryGenerationToken', 'NSRecursiveLock', 'NSRegularExpression', 'NSRelationshipDescription', 'NSRelativeDateTimeFormatter', 'NSRunLoop', 'NSSaveChangesRequest', 'NSScanner', 'NSSecureUnarchiveFromDataTransformer', 'NSSet', 'NSShadow', 'NSSharingServicePickerToolbarItem', 'NSSharingServicePickerTouchBarItem', 'NSSimpleCString', 'NSSocketPort', 'NSSortDescriptor', 'NSStream', 'NSString', 'NSStringDrawingContext', 'NSTextAttachment', 'NSTextCheckingResult', 'NSTextContainer', 'NSTextStorage', 'NSTextTab', 'NSThread', 'NSTimeZone', 'NSTimer', 'NSToolbarItem', 'NSURL', 'NSURLAuthenticationChallenge', 'NSURLCache', 'NSURLComponents', 'NSURLConnection', 'NSURLCredential', 'NSURLCredentialStorage', 'NSURLProtectionSpace', 'NSURLProtocol', 'NSURLQueryItem', 'NSURLRequest', 'NSURLResponse', 'NSURLSession', 'NSURLSessionConfiguration', 'NSURLSessionDataTask', 'NSURLSessionDownloadTask', 'NSURLSessionStreamTask', 'NSURLSessionTask', 'NSURLSessionTaskMetrics', 'NSURLSessionTaskTransactionMetrics', 'NSURLSessionUploadTask', 'NSURLSessionWebSocketMessage', 'NSURLSessionWebSocketTask', 'NSUUID', 'NSUbiquitousKeyValueStore', 'NSUndoManager', 'NSUnit', 'NSUnitAcceleration', 'NSUnitAngle', 'NSUnitArea', 'NSUnitConcentrationMass', 'NSUnitConverter', 'NSUnitConverterLinear', 'NSUnitDispersion', 'NSUnitDuration', 'NSUnitElectricCharge', 'NSUnitElectricCurrent', 'NSUnitElectricPotentialDifference', 'NSUnitElectricResistance', 'NSUnitEnergy', 'NSUnitFrequency', 'NSUnitFuelEfficiency', 'NSUnitIlluminance', 'NSUnitInformationStorage', 'NSUnitLength', 'NSUnitMass', 'NSUnitPower', 'NSUnitPressure', 'NSUnitSpeed', 'NSUnitTemperature', 'NSUnitVolume', 'NSUserActivity', 'NSUserDefaults', 'NSValue', 'NSValueTransformer', 'NSXMLParser', 'NSXPCCoder', 'NSXPCConnection', 'NSXPCInterface', 'NSXPCListener', 'NSXPCListenerEndpoint', 'NWBonjourServiceEndpoint', 'NWEndpoint', 'NWHostEndpoint', 'NWPath', 'NWTCPConnection', 'NWTLSParameters', 'NWUDPSession', 'OSLogEntry', 'OSLogEntryActivity', 'OSLogEntryBoundary', 'OSLogEntryLog', 'OSLogEntrySignpost', 'OSLogEnumerator', 'OSLogMessageComponent', 'OSLogPosition', 'OSLogStore', 'PDFAction', 'PDFActionGoTo', 'PDFActionNamed', 'PDFActionRemoteGoTo', 'PDFActionResetForm', 'PDFActionURL', 'PDFAnnotation', 'PDFAppearanceCharacteristics', 'PDFBorder', 'PDFDestination', 'PDFDocument', 'PDFOutline', 'PDFPage', 'PDFSelection', 'PDFThumbnailView', 'PDFView', 'PHAdjustmentData', 'PHAsset', 'PHAssetChangeRequest', 'PHAssetCollection', 'PHAssetCollectionChangeRequest', 'PHAssetCreationRequest', 'PHAssetResource', 'PHAssetResourceCreationOptions', 'PHAssetResourceManager', 'PHAssetResourceRequestOptions', 'PHCachingImageManager', 'PHChange', 'PHChangeRequest', 'PHCloudIdentifier', 'PHCollection', 'PHCollectionList', 'PHCollectionListChangeRequest', 'PHContentEditingInput', 'PHContentEditingInputRequestOptions', 'PHContentEditingOutput', 'PHEditingExtensionContext', 'PHFetchOptions', 'PHFetchResult', 'PHFetchResultChangeDetails', 'PHImageManager', 'PHImageRequestOptions', 'PHLivePhoto', 'PHLivePhotoEditingContext', 'PHLivePhotoRequestOptions', 'PHLivePhotoView', 'PHObject', 'PHObjectChangeDetails', 'PHObjectPlaceholder', 'PHPhotoLibrary', 'PHPickerConfiguration', 'PHPickerFilter', 'PHPickerResult', 'PHPickerViewController', 'PHProject', 'PHProjectChangeRequest', 'PHVideoRequestOptions', 'PKAddCarKeyPassConfiguration', 'PKAddPassButton', 'PKAddPassesViewController', 'PKAddPaymentPassRequest', 'PKAddPaymentPassRequestConfiguration', 'PKAddPaymentPassViewController', 'PKAddSecureElementPassConfiguration', 'PKAddSecureElementPassViewController', 'PKAddShareablePassConfiguration', 'PKBarcodeEventConfigurationRequest', 'PKBarcodeEventMetadataRequest', 'PKBarcodeEventMetadataResponse', 'PKBarcodeEventSignatureRequest', 'PKBarcodeEventSignatureResponse', 'PKCanvasView', 'PKContact', 'PKDisbursementAuthorizationController', 'PKDisbursementRequest', 'PKDisbursementVoucher', 'PKDrawing', 'PKEraserTool', 'PKFloatRange', 'PKInk', 'PKInkingTool', 'PKIssuerProvisioningExtensionHandler', 'PKIssuerProvisioningExtensionPassEntry', 'PKIssuerProvisioningExtensionPaymentPassEntry', 'PKIssuerProvisioningExtensionStatus', 'PKLabeledValue', 'PKLassoTool', 'PKObject', 'PKPass', 'PKPassLibrary', 'PKPayment', 'PKPaymentAuthorizationController', 'PKPaymentAuthorizationResult', 'PKPaymentAuthorizationViewController', 'PKPaymentButton', 'PKPaymentInformationEventExtension', 'PKPaymentMerchantSession', 'PKPaymentMethod', 'PKPaymentPass', 'PKPaymentRequest', 'PKPaymentRequestMerchantSessionUpdate', 'PKPaymentRequestPaymentMethodUpdate', 'PKPaymentRequestShippingContactUpdate', 'PKPaymentRequestShippingMethodUpdate', 'PKPaymentRequestUpdate', 'PKPaymentSummaryItem', 'PKPaymentToken', 'PKPushCredentials', 'PKPushPayload', 'PKPushRegistry', 'PKSecureElementPass', 'PKShareablePassMetadata', 'PKShippingMethod', 'PKStroke', 'PKStrokePath', 'PKStrokePoint', 'PKSuicaPassProperties', 'PKTool', 'PKToolPicker', 'PKTransitPassProperties', 'QLFileThumbnailRequest', 'QLPreviewController', 'QLThumbnailGenerationRequest', 'QLThumbnailGenerator', 'QLThumbnailProvider', 'QLThumbnailReply', 'QLThumbnailRepresentation', 'RPBroadcastActivityController', 'RPBroadcastActivityViewController', 'RPBroadcastConfiguration', 'RPBroadcastController', 'RPBroadcastHandler', 'RPBroadcastMP4ClipHandler', 'RPBroadcastSampleHandler', 'RPPreviewViewController', 'RPScreenRecorder', 'RPSystemBroadcastPickerView', 'SCNAccelerationConstraint', 'SCNAction', 'SCNAnimation', 'SCNAnimationEvent', 'SCNAnimationPlayer', 'SCNAudioPlayer', 'SCNAudioSource', 'SCNAvoidOccluderConstraint', 'SCNBillboardConstraint', 'SCNBox', 'SCNCamera', 'SCNCameraController', 'SCNCapsule', 'SCNCone', 'SCNConstraint', 'SCNCylinder', 'SCNDistanceConstraint', 'SCNFloor', 'SCNGeometry', 'SCNGeometryElement', 'SCNGeometrySource', 'SCNGeometryTessellator', 'SCNHitTestResult', 'SCNIKConstraint', 'SCNLevelOfDetail', 'SCNLight', 'SCNLookAtConstraint', 'SCNMaterial', 'SCNMaterialProperty', 'SCNMorpher', 'SCNNode', 'SCNParticlePropertyController', 'SCNParticleSystem', 'SCNPhysicsBallSocketJoint', 'SCNPhysicsBehavior', 'SCNPhysicsBody', 'SCNPhysicsConeTwistJoint', 'SCNPhysicsContact', 'SCNPhysicsField', 'SCNPhysicsHingeJoint', 'SCNPhysicsShape', 'SCNPhysicsSliderJoint', 'SCNPhysicsVehicle', 'SCNPhysicsVehicleWheel', 'SCNPhysicsWorld', 'SCNPlane', 'SCNProgram', 'SCNPyramid', 'SCNReferenceNode', 'SCNRenderer', 'SCNReplicatorConstraint', 'SCNScene', 'SCNSceneSource', 'SCNShape', 'SCNSkinner', 'SCNSliderConstraint', 'SCNSphere', 'SCNTechnique', 'SCNText', 'SCNTimingFunction', 'SCNTorus', 'SCNTransaction', 'SCNTransformConstraint', 'SCNTube', 'SCNView', 'SFAcousticFeature', 'SFAuthenticationSession', 'SFContentBlockerManager', 'SFContentBlockerState', 'SFSafariViewController', 'SFSafariViewControllerConfiguration', 'SFSpeechAudioBufferRecognitionRequest', 'SFSpeechRecognitionRequest', 'SFSpeechRecognitionResult', 'SFSpeechRecognitionTask', 'SFSpeechRecognizer', 'SFSpeechURLRecognitionRequest', 'SFTranscription', 'SFTranscriptionSegment', 'SFVoiceAnalytics', 'SK3DNode', 'SKAction', 'SKAdNetwork', 'SKArcadeService', 'SKAttribute', 'SKAttributeValue', 'SKAudioNode', 'SKCameraNode', 'SKCloudServiceController', 'SKCloudServiceSetupViewController', 'SKConstraint', 'SKCropNode', 'SKDownload', 'SKEffectNode', 'SKEmitterNode', 'SKFieldNode', 'SKKeyframeSequence', 'SKLabelNode', 'SKLightNode', 'SKMutablePayment', 'SKMutableTexture', 'SKNode', 'SKOverlay', 'SKOverlayAppClipConfiguration', 'SKOverlayAppConfiguration', 'SKOverlayConfiguration', 'SKOverlayTransitionContext', 'SKPayment', 'SKPaymentDiscount', 'SKPaymentQueue', 'SKPaymentTransaction', 'SKPhysicsBody', 'SKPhysicsContact', 'SKPhysicsJoint', 'SKPhysicsJointFixed', 'SKPhysicsJointLimit', 'SKPhysicsJointPin', 'SKPhysicsJointSliding', 'SKPhysicsJointSpring', 'SKPhysicsWorld', 'SKProduct', 'SKProductDiscount', 'SKProductStorePromotionController', 'SKProductSubscriptionPeriod', 'SKProductsRequest', 'SKProductsResponse', 'SKRange', 'SKReachConstraints', 'SKReceiptRefreshRequest', 'SKReferenceNode', 'SKRegion', 'SKRenderer', 'SKRequest', 'SKScene', 'SKShader', 'SKShapeNode', 'SKSpriteNode', 'SKStoreProductViewController', 'SKStoreReviewController', 'SKStorefront', 'SKTexture', 'SKTextureAtlas', 'SKTileDefinition', 'SKTileGroup', 'SKTileGroupRule', 'SKTileMapNode', 'SKTileSet', 'SKTransformNode', 'SKTransition', 'SKUniform', 'SKVideoNode', 'SKView', 'SKWarpGeometry', 'SKWarpGeometryGrid', 'SLComposeServiceViewController', 'SLComposeSheetConfigurationItem', 'SLComposeViewController', 'SLRequest', 'SNAudioFileAnalyzer', 'SNAudioStreamAnalyzer', 'SNClassification', 'SNClassificationResult', 'SNClassifySoundRequest', 'SRAmbientLightSample', 'SRApplicationUsage', 'SRDeletionRecord', 'SRDevice', 'SRDeviceUsageReport', 'SRFetchRequest', 'SRFetchResult', 'SRKeyboardMetrics', 'SRKeyboardProbabilityMetric', 'SRMessagesUsageReport', 'SRNotificationUsage', 'SRPhoneUsageReport', 'SRSensorReader', 'SRVisit', 'SRWebUsage', 'SRWristDetection', 'SSReadingList', 'STScreenTimeConfiguration', 'STScreenTimeConfigurationObserver', 'STWebHistory', 'STWebpageController', 'TKBERTLVRecord', 'TKCompactTLVRecord', 'TKSimpleTLVRecord', 'TKSmartCard', 'TKSmartCardATR', 'TKSmartCardATRInterfaceGroup', 'TKSmartCardPINFormat', 'TKSmartCardSlot', 'TKSmartCardSlotManager', 'TKSmartCardToken', 'TKSmartCardTokenDriver', 'TKSmartCardTokenSession', 'TKSmartCardUserInteraction', 'TKSmartCardUserInteractionForPINOperation', 'TKSmartCardUserInteractionForSecurePINChange', 'TKSmartCardUserInteractionForSecurePINVerification', 'TKTLVRecord', 'TKToken', 'TKTokenAuthOperation', 'TKTokenConfiguration', 'TKTokenDriver', 'TKTokenDriverConfiguration', 'TKTokenKeyAlgorithm', 'TKTokenKeyExchangeParameters', 'TKTokenKeychainCertificate', 'TKTokenKeychainContents', 'TKTokenKeychainItem', 'TKTokenKeychainKey', 'TKTokenPasswordAuthOperation', 'TKTokenSession', 'TKTokenSmartCardPINAuthOperation', 'TKTokenWatcher', 'TWRequest', 'TWTweetComposeViewController', 'UIAcceleration', 'UIAccelerometer', 'UIAccessibilityCustomAction', 'UIAccessibilityCustomRotor', 'UIAccessibilityCustomRotorItemResult', 'UIAccessibilityCustomRotorSearchPredicate', 'UIAccessibilityElement', 'UIAccessibilityLocationDescriptor', 'UIAction', 'UIActionSheet', 'UIActivity', 'UIActivityIndicatorView', 'UIActivityItemProvider', 'UIActivityItemsConfiguration', 'UIActivityViewController', 'UIAlertAction', 'UIAlertController', 'UIAlertView', 'UIApplication', 'UIApplicationShortcutIcon', 'UIApplicationShortcutItem', 'UIAttachmentBehavior', 'UIBackgroundConfiguration', 'UIBarAppearance', 'UIBarButtonItem', 'UIBarButtonItemAppearance', 'UIBarButtonItemGroup', 'UIBarButtonItemStateAppearance', 'UIBarItem', 'UIBezierPath', 'UIBlurEffect', 'UIButton', 'UICellAccessory', 'UICellAccessoryCheckmark', 'UICellAccessoryCustomView', 'UICellAccessoryDelete', 'UICellAccessoryDisclosureIndicator', 'UICellAccessoryInsert', 'UICellAccessoryLabel', 'UICellAccessoryMultiselect', 'UICellAccessoryOutlineDisclosure', 'UICellAccessoryReorder', 'UICellConfigurationState', 'UICloudSharingController', 'UICollectionLayoutListConfiguration', 'UICollectionReusableView', 'UICollectionView', 'UICollectionViewCell', 'UICollectionViewCellRegistration', 'UICollectionViewCompositionalLayout', 'UICollectionViewCompositionalLayoutConfiguration', 'UICollectionViewController', 'UICollectionViewDiffableDataSource', 'UICollectionViewDiffableDataSourceReorderingHandlers', 'UICollectionViewDiffableDataSourceSectionSnapshotHandlers', 'UICollectionViewDropPlaceholder', 'UICollectionViewDropProposal', 'UICollectionViewFlowLayout', 'UICollectionViewFlowLayoutInvalidationContext', 'UICollectionViewFocusUpdateContext', 'UICollectionViewLayout', 'UICollectionViewLayoutAttributes', 'UICollectionViewLayoutInvalidationContext', 'UICollectionViewListCell', 'UICollectionViewPlaceholder', 'UICollectionViewSupplementaryRegistration', 'UICollectionViewTransitionLayout', 'UICollectionViewUpdateItem', 'UICollisionBehavior', 'UIColor', 'UIColorPickerViewController', 'UIColorWell', 'UICommand', 'UICommandAlternate', 'UIContextMenuConfiguration', 'UIContextMenuInteraction', 'UIContextualAction', 'UIControl', 'UICubicTimingParameters', 'UIDatePicker', 'UIDeferredMenuElement', 'UIDevice', 'UIDictationPhrase', 'UIDocument', 'UIDocumentBrowserAction', 'UIDocumentBrowserTransitionController', 'UIDocumentBrowserViewController', 'UIDocumentInteractionController', 'UIDocumentMenuViewController', 'UIDocumentPickerExtensionViewController', 'UIDocumentPickerViewController', 'UIDragInteraction', 'UIDragItem', 'UIDragPreview', 'UIDragPreviewParameters', 'UIDragPreviewTarget', 'UIDropInteraction', 'UIDropProposal', 'UIDynamicAnimator', 'UIDynamicBehavior', 'UIDynamicItemBehavior', 'UIDynamicItemGroup', 'UIEvent', 'UIFeedbackGenerator', 'UIFieldBehavior', 'UIFocusAnimationCoordinator', 'UIFocusDebugger', 'UIFocusGuide', 'UIFocusMovementHint', 'UIFocusSystem', 'UIFocusUpdateContext', 'UIFont', 'UIFontDescriptor', 'UIFontMetrics', 'UIFontPickerViewController', 'UIFontPickerViewControllerConfiguration', 'UIGestureRecognizer', 'UIGraphicsImageRenderer', 'UIGraphicsImageRendererContext', 'UIGraphicsImageRendererFormat', 'UIGraphicsPDFRenderer', 'UIGraphicsPDFRendererContext', 'UIGraphicsPDFRendererFormat', 'UIGraphicsRenderer', 'UIGraphicsRendererContext', 'UIGraphicsRendererFormat', 'UIGravityBehavior', 'UIHoverGestureRecognizer', 'UIImage', 'UIImageAsset', 'UIImageConfiguration', 'UIImagePickerController', 'UIImageSymbolConfiguration', 'UIImageView', 'UIImpactFeedbackGenerator', 'UIIndirectScribbleInteraction', 'UIInputView', 'UIInputViewController', 'UIInterpolatingMotionEffect', 'UIKey', 'UIKeyCommand', 'UILabel', 'UILargeContentViewerInteraction', 'UILayoutGuide', 'UILexicon', 'UILexiconEntry', 'UIListContentConfiguration', 'UIListContentImageProperties', 'UIListContentTextProperties', 'UIListContentView', 'UILocalNotification', 'UILocalizedIndexedCollation', 'UILongPressGestureRecognizer', 'UIManagedDocument', 'UIMarkupTextPrintFormatter', 'UIMenu', 'UIMenuController', 'UIMenuElement', 'UIMenuItem', 'UIMenuSystem', 'UIMotionEffect', 'UIMotionEffectGroup', 'UIMutableApplicationShortcutItem', 'UIMutableUserNotificationAction', 'UIMutableUserNotificationCategory', 'UINavigationBar', 'UINavigationBarAppearance', 'UINavigationController', 'UINavigationItem', 'UINib', 'UINotificationFeedbackGenerator', 'UIOpenURLContext', 'UIPageControl', 'UIPageViewController', 'UIPanGestureRecognizer', 'UIPasteConfiguration', 'UIPasteboard', 'UIPencilInteraction', 'UIPercentDrivenInteractiveTransition', 'UIPickerView', 'UIPinchGestureRecognizer', 'UIPointerEffect', 'UIPointerHighlightEffect', 'UIPointerHoverEffect', 'UIPointerInteraction', 'UIPointerLiftEffect', 'UIPointerLockState', 'UIPointerRegion', 'UIPointerRegionRequest', 'UIPointerShape', 'UIPointerStyle', 'UIPopoverBackgroundView', 'UIPopoverController', 'UIPopoverPresentationController', 'UIPresentationController', 'UIPress', 'UIPressesEvent', 'UIPreviewAction', 'UIPreviewActionGroup', 'UIPreviewInteraction', 'UIPreviewParameters', 'UIPreviewTarget', 'UIPrintFormatter', 'UIPrintInfo', 'UIPrintInteractionController', 'UIPrintPageRenderer', 'UIPrintPaper', 'UIPrinter', 'UIPrinterPickerController', 'UIProgressView', 'UIPushBehavior', 'UIReferenceLibraryViewController', 'UIRefreshControl', 'UIRegion', 'UIResponder', 'UIRotationGestureRecognizer', 'UIScene', 'UISceneActivationConditions', 'UISceneActivationRequestOptions', 'UISceneConfiguration', 'UISceneConnectionOptions', 'UISceneDestructionRequestOptions', 'UISceneOpenExternalURLOptions', 'UISceneOpenURLOptions', 'UISceneSession', 'UISceneSizeRestrictions', 'UIScreen', 'UIScreenEdgePanGestureRecognizer', 'UIScreenMode', 'UIScreenshotService', 'UIScribbleInteraction', 'UIScrollView', 'UISearchBar', 'UISearchContainerViewController', 'UISearchController', 'UISearchDisplayController', 'UISearchSuggestionItem', 'UISearchTextField', 'UISearchToken', 'UISegmentedControl', 'UISelectionFeedbackGenerator', 'UISimpleTextPrintFormatter', 'UISlider', 'UISnapBehavior', 'UISplitViewController', 'UISpringLoadedInteraction', 'UISpringTimingParameters', 'UIStackView', 'UIStatusBarManager', 'UIStepper', 'UIStoryboard', 'UIStoryboardPopoverSegue', 'UIStoryboardSegue', 'UIStoryboardUnwindSegueSource', 'UISwipeActionsConfiguration', 'UISwipeGestureRecognizer', 'UISwitch', 'UITabBar', 'UITabBarAppearance', 'UITabBarController', 'UITabBarItem', 'UITabBarItemAppearance', 'UITabBarItemStateAppearance', 'UITableView', 'UITableViewCell', 'UITableViewController', 'UITableViewDiffableDataSource', 'UITableViewDropPlaceholder', 'UITableViewDropProposal', 'UITableViewFocusUpdateContext', 'UITableViewHeaderFooterView', 'UITableViewPlaceholder', 'UITableViewRowAction', 'UITapGestureRecognizer', 'UITargetedDragPreview', 'UITargetedPreview', 'UITextChecker', 'UITextDragPreviewRenderer', 'UITextDropProposal', 'UITextField', 'UITextFormattingCoordinator', 'UITextInputAssistantItem', 'UITextInputMode', 'UITextInputPasswordRules', 'UITextInputStringTokenizer', 'UITextInteraction', 'UITextPlaceholder', 'UITextPosition', 'UITextRange', 'UITextSelectionRect', 'UITextView', 'UITitlebar', 'UIToolbar', 'UIToolbarAppearance', 'UITouch', 'UITraitCollection', 'UIUserNotificationAction', 'UIUserNotificationCategory', 'UIUserNotificationSettings', 'UIVibrancyEffect', 'UIVideoEditorController', 'UIView', 'UIViewConfigurationState', 'UIViewController', 'UIViewPrintFormatter', 'UIViewPropertyAnimator', 'UIVisualEffect', 'UIVisualEffectView', 'UIWebView', 'UIWindow', 'UIWindowScene', 'UIWindowSceneDestructionRequestOptions', 'UNCalendarNotificationTrigger', 'UNLocationNotificationTrigger', 'UNMutableNotificationContent', 'UNNotification', 'UNNotificationAction', 'UNNotificationAttachment', 'UNNotificationCategory', 'UNNotificationContent', 'UNNotificationRequest', 'UNNotificationResponse', 'UNNotificationServiceExtension', 'UNNotificationSettings', 'UNNotificationSound', 'UNNotificationTrigger', 'UNPushNotificationTrigger', 'UNTextInputNotificationAction', 'UNTextInputNotificationResponse', 'UNTimeIntervalNotificationTrigger', 'UNUserNotificationCenter', 'UTType', 'VNBarcodeObservation', 'VNCircle', 'VNClassificationObservation', 'VNClassifyImageRequest', 'VNContour', 'VNContoursObservation', 'VNCoreMLFeatureValueObservation', 'VNCoreMLModel', 'VNCoreMLRequest', 'VNDetectBarcodesRequest', 'VNDetectContoursRequest', 'VNDetectFaceCaptureQualityRequest', 'VNDetectFaceLandmarksRequest', 'VNDetectFaceRectanglesRequest', 'VNDetectHorizonRequest', 'VNDetectHumanBodyPoseRequest', 'VNDetectHumanHandPoseRequest', 'VNDetectHumanRectanglesRequest', 'VNDetectRectanglesRequest', 'VNDetectTextRectanglesRequest', 'VNDetectTrajectoriesRequest', 'VNDetectedObjectObservation', 'VNDetectedPoint', 'VNDocumentCameraScan', 'VNDocumentCameraViewController', 'VNFaceLandmarkRegion', 'VNFaceLandmarkRegion2D', 'VNFaceLandmarks', 'VNFaceLandmarks2D', 'VNFaceObservation', 'VNFeaturePrintObservation', 'VNGenerateAttentionBasedSaliencyImageRequest', 'VNGenerateImageFeaturePrintRequest', 'VNGenerateObjectnessBasedSaliencyImageRequest', 'VNGenerateOpticalFlowRequest', 'VNGeometryUtils', 'VNHomographicImageRegistrationRequest', 'VNHorizonObservation', 'VNHumanBodyPoseObservation', 'VNHumanHandPoseObservation', 'VNImageAlignmentObservation', 'VNImageBasedRequest', 'VNImageHomographicAlignmentObservation', 'VNImageRegistrationRequest', 'VNImageRequestHandler', 'VNImageTranslationAlignmentObservation', 'VNObservation', 'VNPixelBufferObservation', 'VNPoint', 'VNRecognizeAnimalsRequest', 'VNRecognizeTextRequest', 'VNRecognizedObjectObservation', 'VNRecognizedPoint', 'VNRecognizedPointsObservation', 'VNRecognizedText', 'VNRecognizedTextObservation', 'VNRectangleObservation', 'VNRequest', 'VNSaliencyImageObservation', 'VNSequenceRequestHandler', 'VNStatefulRequest', 'VNTargetedImageRequest', 'VNTextObservation', 'VNTrackObjectRequest', 'VNTrackRectangleRequest', 'VNTrackingRequest', 'VNTrajectoryObservation', 'VNTranslationalImageRegistrationRequest', 'VNVector', 'VNVideoProcessor', 'VNVideoProcessorCadence', 'VNVideoProcessorFrameRateCadence', 'VNVideoProcessorRequestProcessingOptions', 'VNVideoProcessorTimeIntervalCadence', 'VSAccountApplicationProvider', 'VSAccountManager', 'VSAccountManagerResult', 'VSAccountMetadata', 'VSAccountMetadataRequest', 'VSAccountProviderResponse', 'VSSubscription', 'VSSubscriptionRegistrationCenter', 'WCSession', 'WCSessionFile', 'WCSessionFileTransfer', 'WCSessionUserInfoTransfer', 'WKBackForwardList', 'WKBackForwardListItem', 'WKContentRuleList', 'WKContentRuleListStore', 'WKContentWorld', 'WKContextMenuElementInfo', 'WKFindConfiguration', 'WKFindResult', 'WKFrameInfo', 'WKHTTPCookieStore', 'WKNavigation', 'WKNavigationAction', 'WKNavigationResponse', 'WKOpenPanelParameters', 'WKPDFConfiguration', 'WKPreferences', 'WKPreviewElementInfo', 'WKProcessPool', 'WKScriptMessage', 'WKSecurityOrigin', 'WKSnapshotConfiguration', 'WKUserContentController', 'WKUserScript', 'WKWebView', 'WKWebViewConfiguration', 'WKWebpagePreferences', 'WKWebsiteDataRecord', 'WKWebsiteDataStore', 'WKWindowFeatures', '__EntityAccessibilityWrapper'}
COCOA_PROTOCOLS = {'ABNewPersonViewControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'ABPersonViewControllerDelegate', 'ABUnknownPersonViewControllerDelegate', 'ADActionViewControllerChildInterface', 'ADActionViewControllerInterface', 'ADBannerViewDelegate', 'ADInterstitialAdDelegate', 'AEAssessmentSessionDelegate', 'ARAnchorCopying', 'ARCoachingOverlayViewDelegate', 'ARSCNViewDelegate', 'ARSKViewDelegate', 'ARSessionDelegate', 'ARSessionObserver', 'ARSessionProviding', 'ARTrackable', 'ASAccountAuthenticationModificationControllerDelegate', 'ASAccountAuthenticationModificationControllerPresentationContextProviding', 'ASAuthorizationControllerDelegate', 'ASAuthorizationControllerPresentationContextProviding', 'ASAuthorizationCredential', 'ASAuthorizationProvider', 'ASAuthorizationProviderExtensionAuthorizationRequestHandler', 'ASWebAuthenticationPresentationContextProviding', 'ASWebAuthenticationSessionRequestDelegate', 'ASWebAuthenticationSessionWebBrowserSessionHandling', 'AUAudioUnitFactory', 'AVAssetDownloadDelegate', 'AVAssetResourceLoaderDelegate', 'AVAssetWriterDelegate', 'AVAsynchronousKeyValueLoading', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'AVCaptureDataOutputSynchronizerDelegate', 'AVCaptureDepthDataOutputDelegate', 'AVCaptureFileOutputDelegate', 'AVCaptureFileOutputRecordingDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'AVCapturePhotoCaptureDelegate', 'AVCapturePhotoFileDataRepresentationCustomizer', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'AVContentKeyRecipient', 'AVContentKeySessionDelegate', 'AVFragmentMinding', 'AVPictureInPictureControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'AVPlayerItemMetadataCollectorPushDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'AVPlayerItemOutputPullDelegate', 'AVPlayerItemOutputPushDelegate', 'AVPlayerViewControllerDelegate', 'AVQueuedSampleBufferRendering', 'AVRoutePickerViewDelegate', 'AVVideoCompositing', 'AVVideoCompositionInstruction', 'AVVideoCompositionValidationHandling', 'AXCustomContentProvider', 'CAAction', 'CAAnimationDelegate', 'CALayerDelegate', 'CAMediaTiming', 'CAMetalDrawable', 'CBCentralManagerDelegate', 'CBPeripheralDelegate', 'CBPeripheralManagerDelegate', 'CHHapticAdvancedPatternPlayer', 'CHHapticDeviceCapability', 'CHHapticParameterAttributes', 'CHHapticPatternPlayer', 'CIAccordionFoldTransition', 'CIAffineClamp', 'CIAffineTile', 'CIAreaAverage', 'CIAreaHistogram', 'CIAreaMaximum', 'CIAreaMaximumAlpha', 'CIAreaMinMax', 'CIAreaMinMaxRed', 'CIAreaMinimum', 'CIAreaMinimumAlpha', 'CIAreaReductionFilter', 'CIAttributedTextImageGenerator', 'CIAztecCodeGenerator', 'CIBarcodeGenerator', 'CIBarsSwipeTransition', 'CIBicubicScaleTransform', 'CIBlendWithMask', 'CIBloom', 'CIBokehBlur', 'CIBoxBlur', 'CIBumpDistortion', 'CIBumpDistortionLinear', 'CICMYKHalftone', 'CICheckerboardGenerator', 'CICircleSplashDistortion', 'CICircularScreen', 'CICircularWrap', 'CICode128BarcodeGenerator', 'CIColorAbsoluteDifference', 'CIColorClamp', 'CIColorControls', 'CIColorCrossPolynomial', 'CIColorCube', 'CIColorCubeWithColorSpace', 'CIColorCubesMixedWithMask', 'CIColorCurves', 'CIColorInvert', 'CIColorMap', 'CIColorMatrix', 'CIColorMonochrome', 'CIColorPolynomial', 'CIColorPosterize', 'CIColorThreshold', 'CIColorThresholdOtsu', 'CIColumnAverage', 'CIComicEffect', 'CICompositeOperation', 'CIConvolution', 'CICopyMachineTransition', 'CICoreMLModel', 'CICrystallize', 'CIDepthOfField', 'CIDepthToDisparity', 'CIDiscBlur', 'CIDisintegrateWithMaskTransition', 'CIDisparityToDepth', 'CIDisplacementDistortion', 'CIDissolveTransition', 'CIDither', 'CIDocumentEnhancer', 'CIDotScreen', 'CIDroste', 'CIEdgePreserveUpsample', 'CIEdgeWork', 'CIEdges', 'CIEightfoldReflectedTile', 'CIExposureAdjust', 'CIFalseColor', 'CIFilter', 'CIFilterConstructor', 'CIFlashTransition', 'CIFourCoordinateGeometryFilter', 'CIFourfoldReflectedTile', 'CIFourfoldRotatedTile', 'CIFourfoldTranslatedTile', 'CIGaborGradients', 'CIGammaAdjust', 'CIGaussianBlur', 'CIGaussianGradient', 'CIGlassDistortion', 'CIGlassLozenge', 'CIGlideReflectedTile', 'CIGloom', 'CIHatchedScreen', 'CIHeightFieldFromMask', 'CIHexagonalPixellate', 'CIHighlightShadowAdjust', 'CIHistogramDisplay', 'CIHoleDistortion', 'CIHueAdjust', 'CIHueSaturationValueGradient', 'CIImageProcessorInput', 'CIImageProcessorOutput', 'CIKMeans', 'CIKaleidoscope', 'CIKeystoneCorrectionCombined', 'CIKeystoneCorrectionHorizontal', 'CIKeystoneCorrectionVertical', 'CILabDeltaE', 'CILanczosScaleTransform', 'CILenticularHaloGenerator', 'CILightTunnel', 'CILineOverlay', 'CILineScreen', 'CILinearGradient', 'CILinearToSRGBToneCurve', 'CIMaskToAlpha', 'CIMaskedVariableBlur', 'CIMaximumComponent', 'CIMedian', 'CIMeshGenerator', 'CIMinimumComponent', 'CIMix', 'CIModTransition', 'CIMorphologyGradient', 'CIMorphologyMaximum', 'CIMorphologyMinimum', 'CIMorphologyRectangleMaximum', 'CIMorphologyRectangleMinimum', 'CIMotionBlur', 'CINinePartStretched', 'CINinePartTiled', 'CINoiseReduction', 'CIOpTile', 'CIPDF417BarcodeGenerator', 'CIPageCurlTransition', 'CIPageCurlWithShadowTransition', 'CIPaletteCentroid', 'CIPalettize', 'CIParallelogramTile', 'CIPerspectiveCorrection', 'CIPerspectiveRotate', 'CIPerspectiveTile', 'CIPerspectiveTransform', 'CIPerspectiveTransformWithExtent', 'CIPhotoEffect', 'CIPinchDistortion', 'CIPixellate', 'CIPlugInRegistration', 'CIPointillize', 'CIQRCodeGenerator', 'CIRadialGradient', 'CIRandomGenerator', 'CIRippleTransition', 'CIRoundedRectangleGenerator', 'CIRowAverage', 'CISRGBToneCurveToLinear', 'CISaliencyMap', 'CISepiaTone', 'CIShadedMaterial', 'CISharpenLuminance', 'CISixfoldReflectedTile', 'CISixfoldRotatedTile', 'CISmoothLinearGradient', 'CISpotColor', 'CISpotLight', 'CIStarShineGenerator', 'CIStraighten', 'CIStretchCrop', 'CIStripesGenerator', 'CISunbeamsGenerator', 'CISwipeTransition', 'CITemperatureAndTint', 'CITextImageGenerator', 'CIThermal', 'CIToneCurve', 'CITorusLensDistortion', 'CITransitionFilter', 'CITriangleKaleidoscope', 'CITriangleTile', 'CITwelvefoldReflectedTile', 'CITwirlDistortion', 'CIUnsharpMask', 'CIVibrance', 'CIVignette', 'CIVignetteEffect', 'CIVortexDistortion', 'CIWhitePointAdjust', 'CIXRay', 'CIZoomBlur', 'CKRecordKeyValueSetting', 'CKRecordValue', 'CLKComplicationDataSource', 'CLLocationManagerDelegate', 'CLSContextProvider', 'CLSDataStoreDelegate', 'CMFallDetectionDelegate', 'CMHeadphoneMotionManagerDelegate', 'CNChangeHistoryEventVisitor', 'CNContactPickerDelegate', 'CNContactViewControllerDelegate', 'CNKeyDescriptor', 'CPApplicationDelegate', 'CPBarButtonProviding', 'CPInterfaceControllerDelegate', 'CPListTemplateDelegate', 'CPListTemplateItem', 'CPMapTemplateDelegate', 'CPNowPlayingTemplateObserver', 'CPPointOfInterestTemplateDelegate', 'CPSearchTemplateDelegate', 'CPSelectableListItem', 'CPSessionConfigurationDelegate', 'CPTabBarTemplateDelegate', 'CPTemplateApplicationDashboardSceneDelegate', 'CPTemplateApplicationSceneDelegate', 'CSSearchableIndexDelegate', 'CTSubscriberDelegate', 'CTTelephonyNetworkInfoDelegate', 'CXCallDirectoryExtensionContextDelegate', 'CXCallObserverDelegate', 'CXProviderDelegate', 'EAAccessoryDelegate', 'EAGLDrawable', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'EKCalendarChooserDelegate', 'EKEventEditViewDelegate', 'EKEventViewDelegate', 'GCDevice', 'GKAchievementViewControllerDelegate', 'GKAgentDelegate', 'GKChallengeEventHandlerDelegate', 'GKChallengeListener', 'GKFriendRequestComposeViewControllerDelegate', 'GKGameCenterControllerDelegate', 'GKGameModel', 'GKGameModelPlayer', 'GKGameModelUpdate', 'GKGameSessionEventListener', 'GKGameSessionSharingViewControllerDelegate', 'GKInviteEventListener', 'GKLeaderboardViewControllerDelegate', 'GKLocalPlayerListener', 'GKMatchDelegate', 'GKMatchmakerViewControllerDelegate', 'GKPeerPickerControllerDelegate', 'GKRandom', 'GKSavedGameListener', 'GKSceneRootNodeType', 'GKSessionDelegate', 'GKStrategist', 'GKTurnBasedEventListener', 'GKTurnBasedMatchmakerViewControllerDelegate', 'GKVoiceChatClient', 'GLKNamedEffect', 'GLKViewControllerDelegate', 'GLKViewDelegate', 'HKLiveWorkoutBuilderDelegate', 'HKWorkoutSessionDelegate', 'HMAccessoryBrowserDelegate', 'HMAccessoryDelegate', 'HMCameraSnapshotControlDelegate', 'HMCameraStreamControlDelegate', 'HMHomeDelegate', 'HMHomeManagerDelegate', 'HMNetworkConfigurationProfileDelegate', 'ICCameraDeviceDelegate', 'ICCameraDeviceDownloadDelegate', 'ICDeviceBrowserDelegate', 'ICDeviceDelegate', 'ICScannerDeviceDelegate', 'ILMessageFilterQueryHandling', 'INActivateCarSignalIntentHandling', 'INAddMediaIntentHandling', 'INAddTasksIntentHandling', 'INAppendToNoteIntentHandling', 'INBookRestaurantReservationIntentHandling', 'INCallsDomainHandling', 'INCancelRideIntentHandling', 'INCancelWorkoutIntentHandling', 'INCarCommandsDomainHandling', 'INCarPlayDomainHandling', 'INCreateNoteIntentHandling', 'INCreateTaskListIntentHandling', 'INDeleteTasksIntentHandling', 'INEndWorkoutIntentHandling', 'INGetAvailableRestaurantReservationBookingDefaultsIntentHandling', 'INGetAvailableRestaurantReservationBookingsIntentHandling', 'INGetCarLockStatusIntentHandling', 'INGetCarPowerLevelStatusIntentHandling', 'INGetCarPowerLevelStatusIntentResponseObserver', 'INGetRestaurantGuestIntentHandling', 'INGetRideStatusIntentHandling', 'INGetRideStatusIntentResponseObserver', 'INGetUserCurrentRestaurantReservationBookingsIntentHandling', 'INGetVisualCodeIntentHandling', 'INIntentHandlerProviding', 'INListCarsIntentHandling', 'INListRideOptionsIntentHandling', 'INMessagesDomainHandling', 'INNotebookDomainHandling', 'INPauseWorkoutIntentHandling', 'INPayBillIntentHandling', 'INPaymentsDomainHandling', 'INPhotosDomainHandling', 'INPlayMediaIntentHandling', 'INRadioDomainHandling', 'INRequestPaymentIntentHandling', 'INRequestRideIntentHandling', 'INResumeWorkoutIntentHandling', 'INRidesharingDomainHandling', 'INSaveProfileInCarIntentHandling', 'INSearchCallHistoryIntentHandling', 'INSearchForAccountsIntentHandling', 'INSearchForBillsIntentHandling', 'INSearchForMediaIntentHandling', 'INSearchForMessagesIntentHandling', 'INSearchForNotebookItemsIntentHandling', 'INSearchForPhotosIntentHandling', 'INSendMessageIntentHandling', 'INSendPaymentIntentHandling', 'INSendRideFeedbackIntentHandling', 'INSetAudioSourceInCarIntentHandling', 'INSetCarLockStatusIntentHandling', 'INSetClimateSettingsInCarIntentHandling', 'INSetDefrosterSettingsInCarIntentHandling', 'INSetMessageAttributeIntentHandling', 'INSetProfileInCarIntentHandling', 'INSetRadioStationIntentHandling', 'INSetSeatSettingsInCarIntentHandling', 'INSetTaskAttributeIntentHandling', 'INSnoozeTasksIntentHandling', 'INSpeakable', 'INStartAudioCallIntentHandling', 'INStartCallIntentHandling', 'INStartPhotoPlaybackIntentHandling', 'INStartVideoCallIntentHandling', 'INStartWorkoutIntentHandling', 'INTransferMoneyIntentHandling', 'INUIAddVoiceShortcutButtonDelegate', 'INUIAddVoiceShortcutViewControllerDelegate', 'INUIEditVoiceShortcutViewControllerDelegate', 'INUIHostedViewControlling', 'INUIHostedViewSiriProviding', 'INUpdateMediaAffinityIntentHandling', 'INVisualCodeDomainHandling', 'INWorkoutsDomainHandling', 'JSExport', 'MCAdvertiserAssistantDelegate', 'MCBrowserViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MCNearbyServiceBrowserDelegate', 'MCSessionDelegate', 'MDLAssetResolver', 'MDLComponent', 'MDLJointAnimation', 'MDLLightProbeIrradianceDataSource', 'MDLMeshBuffer', 'MDLMeshBufferAllocator', 'MDLMeshBufferZone', 'MDLNamed', 'MDLObjectContainerComponent', 'MDLTransformComponent', 'MDLTransformOp', 'MFMailComposeViewControllerDelegate', 'MFMessageComposeViewControllerDelegate', 'MIDICIProfileResponderDelegate', 'MKAnnotation', 'MKGeoJSONObject', 'MKLocalSearchCompleterDelegate', 'MKMapViewDelegate', 'MKOverlay', 'MKReverseGeocoderDelegate', 'MLBatchProvider', 'MLCustomLayer', 'MLCustomModel', 'MLFeatureProvider', 'MLWritable', 'MPMediaPickerControllerDelegate', 'MPMediaPlayback', 'MPNowPlayingSessionDelegate', 'MPPlayableContentDataSource', 'MPPlayableContentDelegate', 'MPSystemMusicPlayerController', 'MSAuthenticationPresentationContext', 'MSMessagesAppTranscriptPresentation', 'MSStickerBrowserViewDataSource', 'MTKViewDelegate', 'MTLAccelerationStructure', 'MTLAccelerationStructureCommandEncoder', 'MTLArgumentEncoder', 'MTLBinaryArchive', 'MTLBlitCommandEncoder', 'MTLBuffer', 'MTLCaptureScope', 'MTLCommandBuffer', 'MTLCommandBufferEncoderInfo', 'MTLCommandEncoder', 'MTLCommandQueue', 'MTLComputeCommandEncoder', 'MTLComputePipelineState', 'MTLCounter', 'MTLCounterSampleBuffer', 'MTLCounterSet', 'MTLDepthStencilState', 'MTLDevice', 'MTLDrawable', 'MTLDynamicLibrary', 'MTLEvent', 'MTLFence', 'MTLFunction', 'MTLFunctionHandle', 'MTLFunctionLog', 'MTLFunctionLogDebugLocation', 'MTLHeap', 'MTLIndirectCommandBuffer', 'MTLIndirectComputeCommand', 'MTLIndirectComputeCommandEncoder', 'MTLIndirectRenderCommand', 'MTLIndirectRenderCommandEncoder', 'MTLIntersectionFunctionTable', 'MTLLibrary', 'MTLLogContainer', 'MTLParallelRenderCommandEncoder', 'MTLRasterizationRateMap', 'MTLRenderCommandEncoder', 'MTLRenderPipelineState', 'MTLResource', 'MTLResourceStateCommandEncoder', 'MTLSamplerState', 'MTLSharedEvent', 'MTLTexture', 'MTLVisibleFunctionTable', 'MXMetricManagerSubscriber', 'MyClassJavaScriptMethods', 'NCWidgetProviding', 'NEAppPushDelegate', 'NFCFeliCaTag', 'NFCISO15693Tag', 'NFCISO7816Tag', 'NFCMiFareTag', 'NFCNDEFReaderSessionDelegate', 'NFCNDEFTag', 'NFCReaderSession', 'NFCReaderSessionDelegate', 'NFCTag', 'NFCTagReaderSessionDelegate', 'NFCVASReaderSessionDelegate', 'NISessionDelegate', 'NSCacheDelegate', 'NSCoding', 'NSCollectionLayoutContainer', 'NSCollectionLayoutEnvironment', 'NSCollectionLayoutVisibleItem', 'NSCopying', 'NSDecimalNumberBehaviors', 'NSDiscardableContent', 'NSExtensionRequestHandling', 'NSFastEnumeration', 'NSFetchRequestResult', 'NSFetchedResultsControllerDelegate', 'NSFetchedResultsSectionInfo', 'NSFileManagerDelegate', 'NSFilePresenter', 'NSFileProviderChangeObserver', 'NSFileProviderEnumerationObserver', 'NSFileProviderEnumerator', 'NSFileProviderItem', 'NSFileProviderServiceSource', 'NSItemProviderReading', 'NSItemProviderWriting', 'NSKeyedArchiverDelegate', 'NSKeyedUnarchiverDelegate', 'NSLayoutManagerDelegate', 'NSLocking', 'NSMachPortDelegate', 'NSMetadataQueryDelegate', 'NSMutableCopying', 'NSNetServiceBrowserDelegate', 'NSNetServiceDelegate', 'NSPortDelegate', 'NSProgressReporting', 'NSSecureCoding', 'NSStreamDelegate', 'NSTextAttachmentContainer', 'NSTextLayoutOrientationProvider', 'NSTextStorageDelegate', 'NSURLAuthenticationChallengeSender', 'NSURLConnectionDataDelegate', 'NSURLConnectionDelegate', 'NSURLConnectionDownloadDelegate', 'NSURLProtocolClient', 'NSURLSessionDataDelegate', 'NSURLSessionDelegate', 'NSURLSessionDownloadDelegate', 'NSURLSessionStreamDelegate', 'NSURLSessionTaskDelegate', 'NSURLSessionWebSocketDelegate', 'NSUserActivityDelegate', 'NSXMLParserDelegate', 'NSXPCListenerDelegate', 'NSXPCProxyCreating', 'NWTCPConnectionAuthenticationDelegate', 'OSLogEntryFromProcess', 'OSLogEntryWithPayload', 'PDFDocumentDelegate', 'PDFViewDelegate', 'PHContentEditingController', 'PHLivePhotoFrame', 'PHLivePhotoViewDelegate', 'PHPhotoLibraryAvailabilityObserver', 'PHPhotoLibraryChangeObserver', 'PHPickerViewControllerDelegate', 'PKAddPassesViewControllerDelegate', 'PKAddPaymentPassViewControllerDelegate', 'PKAddSecureElementPassViewControllerDelegate', 'PKCanvasViewDelegate', 'PKDisbursementAuthorizationControllerDelegate', 'PKIssuerProvisioningExtensionAuthorizationProviding', 'PKPaymentAuthorizationControllerDelegate', 'PKPaymentAuthorizationViewControllerDelegate', 'PKPaymentInformationRequestHandling', 'PKPushRegistryDelegate', 'PKToolPickerObserver', 'PreviewDisplaying', 'QLPreviewControllerDataSource', 'QLPreviewControllerDelegate', 'QLPreviewItem', 'QLPreviewingController', 'RPBroadcastActivityControllerDelegate', 'RPBroadcastActivityViewControllerDelegate', 'RPBroadcastControllerDelegate', 'RPPreviewViewControllerDelegate', 'RPScreenRecorderDelegate', 'SCNActionable', 'SCNAnimatable', 'SCNAnimation', 'SCNAvoidOccluderConstraintDelegate', 'SCNBoundingVolume', 'SCNBufferStream', 'SCNCameraControlConfiguration', 'SCNCameraControllerDelegate', 'SCNNodeRendererDelegate', 'SCNPhysicsContactDelegate', 'SCNProgramDelegate', 'SCNSceneExportDelegate', 'SCNSceneRenderer', 'SCNSceneRendererDelegate', 'SCNShadable', 'SCNTechniqueSupport', 'SFSafariViewControllerDelegate', 'SFSpeechRecognitionTaskDelegate', 'SFSpeechRecognizerDelegate', 'SKCloudServiceSetupViewControllerDelegate', 'SKOverlayDelegate', 'SKPaymentQueueDelegate', 'SKPaymentTransactionObserver', 'SKPhysicsContactDelegate', 'SKProductsRequestDelegate', 'SKRequestDelegate', 'SKSceneDelegate', 'SKStoreProductViewControllerDelegate', 'SKViewDelegate', 'SKWarpable', 'SNRequest', 'SNResult', 'SNResultsObserving', 'SRSensorReaderDelegate', 'TKSmartCardTokenDriverDelegate', 'TKSmartCardUserInteractionDelegate', 'TKTokenDelegate', 'TKTokenDriverDelegate', 'TKTokenSessionDelegate', 'UIAccelerometerDelegate', 'UIAccessibilityContainerDataTable', 'UIAccessibilityContainerDataTableCell', 'UIAccessibilityContentSizeCategoryImageAdjusting', 'UIAccessibilityIdentification', 'UIAccessibilityReadingContent', 'UIActionSheetDelegate', 'UIActivityItemSource', 'UIActivityItemsConfigurationReading', 'UIAdaptivePresentationControllerDelegate', 'UIAlertViewDelegate', 'UIAppearance', 'UIAppearanceContainer', 'UIApplicationDelegate', 'UIBarPositioning', 'UIBarPositioningDelegate', 'UICloudSharingControllerDelegate', 'UICollectionViewDataSource', 'UICollectionViewDataSourcePrefetching', 'UICollectionViewDelegate', 'UICollectionViewDelegateFlowLayout', 'UICollectionViewDragDelegate', 'UICollectionViewDropCoordinator', 'UICollectionViewDropDelegate', 'UICollectionViewDropItem', 'UICollectionViewDropPlaceholderContext', 'UICollisionBehaviorDelegate', 'UIColorPickerViewControllerDelegate', 'UIConfigurationState', 'UIContentConfiguration', 'UIContentContainer', 'UIContentSizeCategoryAdjusting', 'UIContentView', 'UIContextMenuInteractionAnimating', 'UIContextMenuInteractionCommitAnimating', 'UIContextMenuInteractionDelegate', 'UICoordinateSpace', 'UIDataSourceModelAssociation', 'UIDataSourceTranslating', 'UIDocumentBrowserViewControllerDelegate', 'UIDocumentInteractionControllerDelegate', 'UIDocumentMenuDelegate', 'UIDocumentPickerDelegate', 'UIDragAnimating', 'UIDragDropSession', 'UIDragInteractionDelegate', 'UIDragSession', 'UIDropInteractionDelegate', 'UIDropSession', 'UIDynamicAnimatorDelegate', 'UIDynamicItem', 'UIFocusAnimationContext', 'UIFocusDebuggerOutput', 'UIFocusEnvironment', 'UIFocusItem', 'UIFocusItemContainer', 'UIFocusItemScrollableContainer', 'UIFontPickerViewControllerDelegate', 'UIGestureRecognizerDelegate', 'UIGuidedAccessRestrictionDelegate', 'UIImageConfiguration', 'UIImagePickerControllerDelegate', 'UIIndirectScribbleInteractionDelegate', 'UIInputViewAudioFeedback', 'UIInteraction', 'UIItemProviderPresentationSizeProviding', 'UIKeyInput', 'UILargeContentViewerInteractionDelegate', 'UILargeContentViewerItem', 'UILayoutSupport', 'UIMenuBuilder', 'UINavigationBarDelegate', 'UINavigationControllerDelegate', 'UIObjectRestoration', 'UIPageViewControllerDataSource', 'UIPageViewControllerDelegate', 'UIPasteConfigurationSupporting', 'UIPencilInteractionDelegate', 'UIPickerViewAccessibilityDelegate', 'UIPickerViewDataSource', 'UIPickerViewDelegate', 'UIPointerInteractionAnimating', 'UIPointerInteractionDelegate', 'UIPopoverBackgroundViewMethods', 'UIPopoverControllerDelegate', 'UIPopoverPresentationControllerDelegate', 'UIPreviewActionItem', 'UIPreviewInteractionDelegate', 'UIPrintInteractionControllerDelegate', 'UIPrinterPickerControllerDelegate', 'UIResponderStandardEditActions', 'UISceneDelegate', 'UIScreenshotServiceDelegate', 'UIScribbleInteractionDelegate', 'UIScrollViewAccessibilityDelegate', 'UIScrollViewDelegate', 'UISearchBarDelegate', 'UISearchControllerDelegate', 'UISearchDisplayDelegate', 'UISearchResultsUpdating', 'UISearchSuggestion', 'UISearchTextFieldDelegate', 'UISearchTextFieldPasteItem', 'UISplitViewControllerDelegate', 'UISpringLoadedInteractionBehavior', 'UISpringLoadedInteractionContext', 'UISpringLoadedInteractionEffect', 'UISpringLoadedInteractionSupporting', 'UIStateRestoring', 'UITabBarControllerDelegate', 'UITabBarDelegate', 'UITableViewDataSource', 'UITableViewDataSourcePrefetching', 'UITableViewDelegate', 'UITableViewDragDelegate', 'UITableViewDropCoordinator', 'UITableViewDropDelegate', 'UITableViewDropItem', 'UITableViewDropPlaceholderContext', 'UITextDocumentProxy', 'UITextDragDelegate', 'UITextDragRequest', 'UITextDraggable', 'UITextDropDelegate', 'UITextDropRequest', 'UITextDroppable', 'UITextFieldDelegate', 'UITextFormattingCoordinatorDelegate', 'UITextInput', 'UITextInputDelegate', 'UITextInputTokenizer', 'UITextInputTraits', 'UITextInteractionDelegate', 'UITextPasteConfigurationSupporting', 'UITextPasteDelegate', 'UITextPasteItem', 'UITextSelecting', 'UITextViewDelegate', 'UITimingCurveProvider', 'UIToolbarDelegate', 'UITraitEnvironment', 'UIUserActivityRestoring', 'UIVideoEditorControllerDelegate', 'UIViewAnimating', 'UIViewControllerAnimatedTransitioning', 'UIViewControllerContextTransitioning', 'UIViewControllerInteractiveTransitioning', 'UIViewControllerPreviewing', 'UIViewControllerPreviewingDelegate', 'UIViewControllerRestoration', 'UIViewControllerTransitionCoordinator', 'UIViewControllerTransitionCoordinatorContext', 'UIViewControllerTransitioningDelegate', 'UIViewImplicitlyAnimating', 'UIWebViewDelegate', 'UIWindowSceneDelegate', 'UNNotificationContentExtension', 'UNUserNotificationCenterDelegate', 'VNDocumentCameraViewControllerDelegate', 'VNFaceObservationAccepting', 'VNRequestProgressProviding', 'VNRequestRevisionProviding', 'VSAccountManagerDelegate', 'WCSessionDelegate', 'WKHTTPCookieStoreObserver', 'WKNavigationDelegate', 'WKPreviewActionItem', 'WKScriptMessageHandler', 'WKScriptMessageHandlerWithReply', 'WKUIDelegate', 'WKURLSchemeHandler', 'WKURLSchemeTask'}
COCOA_PRIMITIVES = {'ACErrorCode', 'ALCcontext_struct', 'ALCdevice_struct', 'ALMXGlyphEntry', 'ALMXHeader', 'API_UNAVAILABLE', 'AUChannelInfo', 'AUDependentParameter', 'AUDistanceAttenuationData', 'AUHostIdentifier', 'AUHostVersionIdentifier', 'AUInputSamplesInOutputCallbackStruct', 'AUMIDIEvent', 'AUMIDIOutputCallbackStruct', 'AUNodeInteraction', 'AUNodeRenderCallback', 'AUNumVersion', 'AUParameterAutomationEvent', 'AUParameterEvent', 'AUParameterMIDIMapping', 'AUPreset', 'AUPresetEvent', 'AURecordedParameterEvent', 'AURenderCallbackStruct', 'AURenderEventHeader', 'AUSamplerBankPresetData', 'AUSamplerInstrumentData', 'AnchorPoint', 'AnchorPointTable', 'AnkrTable', 'AudioBalanceFade', 'AudioBuffer', 'AudioBufferList', 'AudioBytePacketTranslation', 'AudioChannelDescription', 'AudioChannelLayout', 'AudioClassDescription', 'AudioCodecMagicCookieInfo', 'AudioCodecPrimeInfo', 'AudioComponentDescription', 'AudioComponentPlugInInterface', 'AudioConverterPrimeInfo', 'AudioFileMarker', 'AudioFileMarkerList', 'AudioFilePacketTableInfo', 'AudioFileRegion', 'AudioFileRegionList', 'AudioFileTypeAndFormatID', 'AudioFile_SMPTE_Time', 'AudioFormatInfo', 'AudioFormatListItem', 'AudioFramePacketTranslation', 'AudioIndependentPacketTranslation', 'AudioOutputUnitMIDICallbacks', 'AudioOutputUnitStartAtTimeParams', 'AudioPacketDependencyInfoTranslation', 'AudioPacketRangeByteCountTranslation', 'AudioPacketRollDistanceTranslation', 'AudioPanningInfo', 'AudioQueueBuffer', 'AudioQueueChannelAssignment', 'AudioQueueLevelMeterState', 'AudioQueueParameterEvent', 'AudioStreamBasicDescription', 'AudioStreamPacketDescription', 'AudioTimeStamp', 'AudioUnitCocoaViewInfo', 'AudioUnitConnection', 'AudioUnitExternalBuffer', 'AudioUnitFrequencyResponseBin', 'AudioUnitMIDIControlMapping', 'AudioUnitMeterClipping', 'AudioUnitNodeConnection', 'AudioUnitOtherPluginDesc', 'AudioUnitParameter', 'AudioUnitParameterEvent', 'AudioUnitParameterHistoryInfo', 'AudioUnitParameterInfo', 'AudioUnitParameterNameInfo', 'AudioUnitParameterStringFromValue', 'AudioUnitParameterValueFromString', 'AudioUnitParameterValueName', 'AudioUnitParameterValueTranslation', 'AudioUnitPresetMAS_SettingData', 'AudioUnitPresetMAS_Settings', 'AudioUnitProperty', 'AudioUnitRenderContext', 'AudioValueRange', 'AudioValueTranslation', 'AuthorizationOpaqueRef', 'BslnFormat0Part', 'BslnFormat1Part', 'BslnFormat2Part', 'BslnFormat3Part', 'BslnTable', 'CABarBeatTime', 'CAFAudioDescription', 'CAFChunkHeader', 'CAFDataChunk', 'CAFFileHeader', 'CAFInfoStrings', 'CAFInstrumentChunk', 'CAFMarker', 'CAFMarkerChunk', 'CAFOverviewChunk', 'CAFOverviewSample', 'CAFPacketTableHeader', 'CAFPeakChunk', 'CAFPositionPeak', 'CAFRegion', 'CAFRegionChunk', 'CAFStringID', 'CAFStrings', 'CAFUMIDChunk', 'CAF_SMPTE_Time', 'CAF_UUID_ChunkHeader', 'CA_BOXABLE', 'CFHostClientContext', 'CFNetServiceClientContext', 'CF_BRIDGED_MUTABLE_TYPE', 'CF_BRIDGED_TYPE', 'CF_RELATED_TYPE', 'CGAffineTransform', 'CGDataConsumerCallbacks', 'CGDataProviderDirectCallbacks', 'CGDataProviderSequentialCallbacks', 'CGFunctionCallbacks', 'CGPDFArray', 'CGPDFContentStream', 'CGPDFDictionary', 'CGPDFObject', 'CGPDFOperatorTable', 'CGPDFScanner', 'CGPDFStream', 'CGPDFString', 'CGPathElement', 'CGPatternCallbacks', 'CGVector', 'CG_BOXABLE', 'CLLocationCoordinate2D', 'CM_BRIDGED_TYPE', 'CTParagraphStyleSetting', 'CVPlanarComponentInfo', 'CVPlanarPixelBufferInfo', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'CVSMPTETime', 'CV_BRIDGED_TYPE', 'ComponentInstanceRecord', 'ExtendedAudioFormatInfo', 'ExtendedControlEvent', 'ExtendedNoteOnEvent', 'ExtendedTempoEvent', 'FontVariation', 'GCQuaternion', 'GKBox', 'GKQuad', 'GKTriangle', 'GLKEffectPropertyPrv', 'HostCallbackInfo', 'IIO_BRIDGED_TYPE', 'IUnknownVTbl', 'JustDirectionTable', 'JustPCAction', 'JustPCActionSubrecord', 'JustPCConditionalAddAction', 'JustPCDecompositionAction', 'JustPCDuctilityAction', 'JustPCGlyphRepeatAddAction', 'JustPostcompTable', 'JustTable', 'JustWidthDeltaEntry', 'JustWidthDeltaGroup', 'KernIndexArrayHeader', 'KernKerningPair', 'KernOffsetTable', 'KernOrderedListEntry', 'KernOrderedListHeader', 'KernSimpleArrayHeader', 'KernStateEntry', 'KernStateHeader', 'KernSubtableHeader', 'KernTableHeader', 'KernVersion0Header', 'KernVersion0SubtableHeader', 'KerxAnchorPointAction', 'KerxControlPointAction', 'KerxControlPointEntry', 'KerxControlPointHeader', 'KerxCoordinateAction', 'KerxIndexArrayHeader', 'KerxKerningPair', 'KerxOrderedListEntry', 'KerxOrderedListHeader', 'KerxSimpleArrayHeader', 'KerxStateEntry', 'KerxStateHeader', 'KerxSubtableHeader', 'KerxTableHeader', 'LcarCaretClassEntry', 'LcarCaretTable', 'LtagStringRange', 'LtagTable', 'MDL_CLASS_EXPORT', 'MIDICIDeviceIdentification', 'MIDIChannelMessage', 'MIDIControlTransform', 'MIDIDriverInterface', 'MIDIEventList', 'MIDIEventPacket', 'MIDIIOErrorNotification', 'MIDIMessage_128', 'MIDIMessage_64', 'MIDIMessage_96', 'MIDIMetaEvent', 'MIDINoteMessage', 'MIDINotification', 'MIDIObjectAddRemoveNotification', 'MIDIObjectPropertyChangeNotification', 'MIDIPacket', 'MIDIPacketList', 'MIDIRawData', 'MIDISysexSendRequest', 'MIDIThruConnectionEndpoint', 'MIDIThruConnectionParams', 'MIDITransform', 'MIDIValueMap', 'MPSDeviceOptions', 'MixerDistanceParams', 'MortChain', 'MortContextualSubtable', 'MortFeatureEntry', 'MortInsertionSubtable', 'MortLigatureSubtable', 'MortRearrangementSubtable', 'MortSubtable', 'MortSwashSubtable', 'MortTable', 'MorxChain', 'MorxContextualSubtable', 'MorxInsertionSubtable', 'MorxLigatureSubtable', 'MorxRearrangementSubtable', 'MorxSubtable', 'MorxTable', 'MusicDeviceNoteParams', 'MusicDeviceStdNoteParams', 'MusicEventUserData', 'MusicTrackLoopInfo', 'NoteParamsControlValue', 'OpaqueAudioComponent', 'OpaqueAudioComponentInstance', 'OpaqueAudioConverter', 'OpaqueAudioQueue', 'OpaqueAudioQueueProcessingTap', 'OpaqueAudioQueueTimeline', 'OpaqueExtAudioFile', 'OpaqueJSClass', 'OpaqueJSContext', 'OpaqueJSContextGroup', 'OpaqueJSPropertyNameAccumulator', 'OpaqueJSPropertyNameArray', 'OpaqueJSString', 'OpaqueJSValue', 'OpaqueMusicEventIterator', 'OpaqueMusicPlayer', 'OpaqueMusicSequence', 'OpaqueMusicTrack', 'OpbdSideValues', 'OpbdTable', 'ParameterEvent', 'PropLookupSegment', 'PropLookupSingle', 'PropTable', 'ROTAGlyphEntry', 'ROTAHeader', 'SCNMatrix4', 'SCNVector3', 'SCNVector4', 'SFNTLookupArrayHeader', 'SFNTLookupBinarySearchHeader', 'SFNTLookupSegment', 'SFNTLookupSegmentHeader', 'SFNTLookupSingle', 'SFNTLookupSingleHeader', 'SFNTLookupTable', 'SFNTLookupTrimmedArrayHeader', 'SFNTLookupVectorHeader', 'SMPTETime', 'STClassTable', 'STEntryOne', 'STEntryTwo', 'STEntryZero', 'STHeader', 'STXEntryOne', 'STXEntryTwo', 'STXEntryZero', 'STXHeader', 'ScheduledAudioFileRegion', 'ScheduledAudioSlice', 'SecKeychainAttribute', 'SecKeychainAttributeInfo', 'SecKeychainAttributeList', 'TrakTable', 'TrakTableData', 'TrakTableEntry', 'UIAccessibility', 'VTDecompressionOutputCallbackRecord', 'VTInt32Point', 'VTInt32Size', '_CFHTTPAuthentication', '_GLKMatrix2', '_GLKMatrix3', '_GLKMatrix4', '_GLKQuaternion', '_GLKVector2', '_GLKVector3', '_GLKVector4', '_GLKVertexAttributeParameters', '_MTLAxisAlignedBoundingBox', '_MTLPackedFloat3', '_MTLPackedFloat4x3', '_NSRange', '_NSZone', '__CFHTTPMessage', '__CFHost', '__CFNetDiagnostic', '__CFNetService', '__CFNetServiceBrowser', '__CFNetServiceMonitor', '__CFXMLNode', '__CFXMLParser', '__GLsync', '__SecAccess', '__SecCertificate', '__SecIdentity', '__SecKey', '__SecRandom', '__attribute__', 'gss_OID_desc_struct', 'gss_OID_set_desc_struct', 'gss_auth_identity', 'gss_buffer_desc_struct', 'gss_buffer_set_desc_struct', 'gss_channel_bindings_struct', 'gss_cred_id_t_desc_struct', 'gss_ctx_id_t_desc_struct', 'gss_iov_buffer_desc_struct', 'gss_krb5_cfx_keydata', 'gss_krb5_lucid_context_v1', 'gss_krb5_lucid_context_version', 'gss_krb5_lucid_key', 'gss_krb5_rfc1964_keydata', 'gss_name_t_desc_struct', 'opaqueCMBufferQueueTriggerToken', 'sfntCMapEncoding', 'sfntCMapExtendedSubHeader', 'sfntCMapHeader', 'sfntCMapSubHeader', 'sfntDescriptorHeader', 'sfntDirectory', 'sfntDirectoryEntry', 'sfntFeatureHeader', 'sfntFeatureName', 'sfntFontDescriptor', 'sfntFontFeatureSetting', 'sfntFontRunFeature', 'sfntInstance', 'sfntNameHeader', 'sfntNameRecord', 'sfntVariationAxis', 'sfntVariationHeader'}
-
-if __name__ == '__main__': # pragma: no cover
- import os
- import re
-
+
+if __name__ == '__main__': # pragma: no cover
+ import os
+ import re
+
FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/'
- frameworks = os.listdir(FRAMEWORKS_PATH)
-
- all_interfaces = set()
- all_protocols = set()
- all_primitives = set()
- for framework in frameworks:
- frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
- if not os.path.exists(frameworkHeadersDir):
- continue
-
- headerFilenames = os.listdir(frameworkHeadersDir)
-
- for f in headerFilenames:
- if not f.endswith('.h'):
- continue
- headerFilePath = frameworkHeadersDir + f
+ frameworks = os.listdir(FRAMEWORKS_PATH)
+
+ all_interfaces = set()
+ all_protocols = set()
+ all_primitives = set()
+ for framework in frameworks:
+ frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
+ if not os.path.exists(frameworkHeadersDir):
+ continue
+
+ headerFilenames = os.listdir(frameworkHeadersDir)
+
+ for f in headerFilenames:
+ if not f.endswith('.h'):
+ continue
+ headerFilePath = frameworkHeadersDir + f
try:
with open(headerFilePath, encoding='utf-8') as f:
@@ -45,31 +45,31 @@ if __name__ == '__main__': # pragma: no cover
continue
res = re.findall(r'(?<=@interface )\w+', content)
- for r in res:
- all_interfaces.add(r)
-
+ for r in res:
+ all_interfaces.add(r)
+
res = re.findall(r'(?<=@protocol )\w+', content)
- for r in res:
- all_protocols.add(r)
-
+ for r in res:
+ all_protocols.add(r)
+
res = re.findall(r'(?<=typedef enum )\w+', content)
- for r in res:
- all_primitives.add(r)
-
+ for r in res:
+ all_primitives.add(r)
+
res = re.findall(r'(?<=typedef struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
+ for r in res:
+ all_primitives.add(r)
+
res = re.findall(r'(?<=typedef const struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
-
- print("ALL interfaces: \n")
+ for r in res:
+ all_primitives.add(r)
+
+
+ print("ALL interfaces: \n")
print(sorted(list(all_interfaces)))
-
- print("\nALL protocols: \n")
+
+ print("\nALL protocols: \n")
print(sorted(list(all_protocols)))
-
- print("\nALL primitives: \n")
+
+ print("\nALL primitives: \n")
print(sorted(list(all_primitives)))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
index e7e395dc6a..1a605f36a1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
@@ -1,11 +1,11 @@
-"""
- pygments.lexers._csound_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
+"""
+ pygments.lexers._csound_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
REMOVED_OPCODES = set('''
OSCsendA
beadsynt
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
index 8fd0ff1be2..9605e078c7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
@@ -1,5326 +1,5326 @@
-"""
- pygments.lexers._lasso_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Built-in Lasso types, traits, methods, and members.
-
+"""
+ pygments.lexers._lasso_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Built-in Lasso types, traits, methods, and members.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-BUILTINS = {
- 'Types': (
- 'array',
- 'atbegin',
- 'boolean',
- 'bson_iter',
- 'bson',
- 'bytes_document_body',
- 'bytes',
- 'cache_server_element',
- 'cache_server',
- 'capture',
- 'client_address',
- 'client_ip',
- 'component_container',
- 'component_render_state',
- 'component',
- 'curl',
- 'curltoken',
- 'currency',
- 'custom',
- 'data_document',
- 'database_registry',
- 'date',
- 'dateandtime',
- 'dbgp_packet',
- 'dbgp_server',
- 'debugging_stack',
- 'decimal',
- 'delve',
- 'dir',
- 'dirdesc',
- 'dns_response',
- 'document_base',
- 'document_body',
- 'document_header',
- 'dsinfo',
- 'duration',
- 'eacher',
- 'email_compose',
- 'email_parse',
- 'email_pop',
- 'email_queue_impl_base',
- 'email_queue_impl',
- 'email_smtp',
- 'email_stage_impl_base',
- 'email_stage_impl',
- 'fastcgi_each_fcgi_param',
- 'fastcgi_server',
- 'fcgi_record',
- 'fcgi_request',
- 'file',
- 'filedesc',
- 'filemaker_datasource',
- 'generateforeachkeyed',
- 'generateforeachunkeyed',
- 'generateseries',
- 'hash_map',
- 'html_atomic_element',
- 'html_attr',
- 'html_base',
- 'html_binary',
- 'html_br',
- 'html_cdata',
- 'html_container_element',
- 'html_div',
- 'html_document_body',
- 'html_document_head',
- 'html_eol',
- 'html_fieldset',
- 'html_form',
- 'html_h1',
- 'html_h2',
- 'html_h3',
- 'html_h4',
- 'html_h5',
- 'html_h6',
- 'html_hr',
- 'html_img',
- 'html_input',
- 'html_json',
- 'html_label',
- 'html_legend',
- 'html_link',
- 'html_meta',
- 'html_object',
- 'html_option',
- 'html_raw',
- 'html_script',
- 'html_select',
- 'html_span',
- 'html_style',
- 'html_table',
- 'html_td',
- 'html_text',
- 'html_th',
- 'html_tr',
- 'http_document_header',
- 'http_document',
- 'http_error',
- 'http_header_field',
- 'http_server_connection_handler_globals',
- 'http_server_connection_handler',
- 'http_server_request_logger_thread',
- 'http_server_web_connection',
- 'http_server',
- 'image',
- 'include_cache',
- 'inline_type',
- 'integer',
- 'java_jnienv',
- 'jbyte',
- 'jbytearray',
- 'jchar',
- 'jchararray',
- 'jfieldid',
- 'jfloat',
- 'jint',
- 'jmethodid',
- 'jobject',
- 'jshort',
- 'json_decode',
- 'json_encode',
- 'json_literal',
- 'json_object',
- 'keyword',
- 'lassoapp_compiledsrc_appsource',
- 'lassoapp_compiledsrc_fileresource',
- 'lassoapp_content_rep_halt',
- 'lassoapp_dirsrc_appsource',
- 'lassoapp_dirsrc_fileresource',
- 'lassoapp_installer',
- 'lassoapp_livesrc_appsource',
- 'lassoapp_livesrc_fileresource',
- 'lassoapp_long_expiring_bytes',
- 'lassoapp_manualsrc_appsource',
- 'lassoapp_zip_file_server',
- 'lassoapp_zipsrc_appsource',
- 'lassoapp_zipsrc_fileresource',
- 'ldap',
- 'library_thread_loader',
- 'list_node',
- 'list',
- 'locale',
- 'log_impl_base',
- 'log_impl',
- 'magick_image',
- 'map_node',
- 'map',
- 'memberstream',
- 'memory_session_driver_impl_entry',
- 'memory_session_driver_impl',
- 'memory_session_driver',
- 'mime_reader',
- 'mongo_client',
- 'mongo_collection',
- 'mongo_cursor',
- 'mustache_ctx',
- 'mysql_session_driver_impl',
- 'mysql_session_driver',
- 'net_named_pipe',
- 'net_tcp_ssl',
- 'net_tcp',
- 'net_udp_packet',
- 'net_udp',
- 'null',
- 'odbc_session_driver_impl',
- 'odbc_session_driver',
- 'opaque',
- 'os_process',
- 'pair_compare',
- 'pair',
- 'pairup',
- 'pdf_barcode',
- 'pdf_chunk',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_hyphenator',
- 'pdf_image',
- 'pdf_list',
- 'pdf_paragraph',
- 'pdf_phrase',
- 'pdf_read',
- 'pdf_table',
- 'pdf_text',
- 'pdf_typebase',
- 'percent',
- 'portal_impl',
- 'queriable_groupby',
- 'queriable_grouping',
- 'queriable_groupjoin',
- 'queriable_join',
- 'queriable_orderby',
- 'queriable_orderbydescending',
- 'queriable_select',
- 'queriable_selectmany',
- 'queriable_skip',
- 'queriable_take',
- 'queriable_thenby',
- 'queriable_thenbydescending',
- 'queriable_where',
- 'queue',
- 'raw_document_body',
- 'regexp',
- 'repeat',
- 'scientific',
- 'security_registry',
- 'serialization_element',
- 'serialization_object_identity_compare',
- 'serialization_reader',
- 'serialization_writer_ref',
- 'serialization_writer_standin',
- 'serialization_writer',
- 'session_delete_expired_thread',
- 'set',
- 'signature',
- 'sourcefile',
- 'sqlite_column',
- 'sqlite_currentrow',
- 'sqlite_db',
- 'sqlite_results',
- 'sqlite_session_driver_impl_entry',
- 'sqlite_session_driver_impl',
- 'sqlite_session_driver',
- 'sqlite_table',
- 'sqlite3_stmt',
- 'sqlite3',
- 'staticarray',
- 'string',
- 'sys_process',
- 'tag',
- 'text_document',
- 'tie',
- 'timeonly',
- 'trait',
- 'tree_base',
- 'tree_node',
- 'tree_nullnode',
- 'ucal',
- 'usgcpu',
- 'usgvm',
- 'void',
- 'web_error_atend',
- 'web_node_base',
- 'web_node_content_representation_css_specialized',
- 'web_node_content_representation_html_specialized',
- 'web_node_content_representation_js_specialized',
- 'web_node_content_representation_xhr_container',
- 'web_node_echo',
- 'web_node_root',
- 'web_request_impl',
- 'web_request',
- 'web_response_impl',
- 'web_response',
- 'web_router',
- 'websocket_handler',
- 'worker_pool',
- 'xml_attr',
- 'xml_cdatasection',
- 'xml_characterdata',
- 'xml_comment',
- 'xml_document',
- 'xml_documentfragment',
- 'xml_documenttype',
- 'xml_domimplementation',
- 'xml_element',
- 'xml_entity',
- 'xml_entityreference',
- 'xml_namednodemap_attr',
- 'xml_namednodemap_ht',
- 'xml_namednodemap',
- 'xml_node',
- 'xml_nodelist',
- 'xml_notation',
- 'xml_processinginstruction',
- 'xml_text',
- 'xmlstream',
- 'zip_file_impl',
- 'zip_file',
- 'zip_impl',
- 'zip',
- ),
- 'Traits': (
- 'any',
- 'formattingbase',
- 'html_attributed',
- 'html_element_coreattrs',
- 'html_element_eventsattrs',
- 'html_element_i18nattrs',
- 'lassoapp_capabilities',
- 'lassoapp_resource',
- 'lassoapp_source',
- 'queriable_asstring',
- 'session_driver',
- 'trait_array',
- 'trait_asstring',
- 'trait_backcontractible',
- 'trait_backended',
- 'trait_backexpandable',
- 'trait_close',
- 'trait_contractible',
- 'trait_decompose_assignment',
- 'trait_doubleended',
- 'trait_each_sub',
- 'trait_encodeurl',
- 'trait_endedfullymutable',
- 'trait_expandable',
- 'trait_file',
- 'trait_finite',
- 'trait_finiteforeach',
- 'trait_foreach',
- 'trait_foreachtextelement',
- 'trait_frontcontractible',
- 'trait_frontended',
- 'trait_frontexpandable',
- 'trait_fullymutable',
- 'trait_generator',
- 'trait_generatorcentric',
- 'trait_hashable',
- 'trait_json_serialize',
- 'trait_keyed',
- 'trait_keyedfinite',
- 'trait_keyedforeach',
- 'trait_keyedmutable',
- 'trait_list',
- 'trait_map',
- 'trait_net',
- 'trait_pathcomponents',
- 'trait_positionallykeyed',
- 'trait_positionallysearchable',
- 'trait_queriable',
- 'trait_queriablelambda',
- 'trait_readbytes',
- 'trait_readstring',
- 'trait_scalar',
- 'trait_searchable',
- 'trait_serializable',
- 'trait_setencoding',
- 'trait_setoperations',
- 'trait_stack',
- 'trait_treenode',
- 'trait_writebytes',
- 'trait_writestring',
- 'trait_xml_elementcompat',
- 'trait_xml_nodecompat',
- 'web_connection',
- 'web_node_container',
- 'web_node_content_css_specialized',
- 'web_node_content_document',
- 'web_node_content_html_specialized',
- 'web_node_content_js_specialized',
- 'web_node_content_json_specialized',
- 'web_node_content_representation',
- 'web_node_content',
- 'web_node_postable',
- 'web_node',
- ),
- 'Unbound Methods': (
- 'abort_clear',
- 'abort_now',
- 'abort',
- 'action_param',
- 'action_params',
- 'action_statement',
- 'admin_authorization',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_initialize',
- 'admin_lassoservicepath',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'all',
- 'auth_admin',
- 'auth_check',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'bw',
- 'capture_nearestloopabort',
- 'capture_nearestloopcontinue',
- 'capture_nearestloopcount',
- 'checked',
- 'cipher_decrypt_private',
- 'cipher_decrypt_public',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt_private',
- 'cipher_encrypt_public',
- 'cipher_encrypt',
- 'cipher_generate_key',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'cipher_open',
- 'cipher_seal',
- 'cipher_sign',
- 'cipher_verify',
- 'client_addr',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparam',
- 'client_getparams',
- 'client_headers',
- 'client_integertoip',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparam',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column_name',
- 'column_names',
- 'column_type',
- 'column',
- 'compress',
- 'content_addheader',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_replaceheader',
- 'content_type',
- 'cookie_set',
- 'cookie',
- 'curl_easy_cleanup',
- 'curl_easy_duphandle',
- 'curl_easy_getinfo',
- 'curl_easy_init',
- 'curl_easy_reset',
- 'curl_easy_setopt',
- 'curl_easy_strerror',
- 'curl_getdate',
- 'curl_http_version_1_0',
- 'curl_http_version_1_1',
- 'curl_http_version_none',
- 'curl_ipresolve_v4',
- 'curl_ipresolve_v6',
- 'curl_ipresolve_whatever',
- 'curl_multi_perform',
- 'curl_multi_result',
- 'curl_netrc_ignored',
- 'curl_netrc_optional',
- 'curl_netrc_required',
+ :license: BSD, see LICENSE for details.
+"""
+
+BUILTINS = {
+ 'Types': (
+ 'array',
+ 'atbegin',
+ 'boolean',
+ 'bson_iter',
+ 'bson',
+ 'bytes_document_body',
+ 'bytes',
+ 'cache_server_element',
+ 'cache_server',
+ 'capture',
+ 'client_address',
+ 'client_ip',
+ 'component_container',
+ 'component_render_state',
+ 'component',
+ 'curl',
+ 'curltoken',
+ 'currency',
+ 'custom',
+ 'data_document',
+ 'database_registry',
+ 'date',
+ 'dateandtime',
+ 'dbgp_packet',
+ 'dbgp_server',
+ 'debugging_stack',
+ 'decimal',
+ 'delve',
+ 'dir',
+ 'dirdesc',
+ 'dns_response',
+ 'document_base',
+ 'document_body',
+ 'document_header',
+ 'dsinfo',
+ 'duration',
+ 'eacher',
+ 'email_compose',
+ 'email_parse',
+ 'email_pop',
+ 'email_queue_impl_base',
+ 'email_queue_impl',
+ 'email_smtp',
+ 'email_stage_impl_base',
+ 'email_stage_impl',
+ 'fastcgi_each_fcgi_param',
+ 'fastcgi_server',
+ 'fcgi_record',
+ 'fcgi_request',
+ 'file',
+ 'filedesc',
+ 'filemaker_datasource',
+ 'generateforeachkeyed',
+ 'generateforeachunkeyed',
+ 'generateseries',
+ 'hash_map',
+ 'html_atomic_element',
+ 'html_attr',
+ 'html_base',
+ 'html_binary',
+ 'html_br',
+ 'html_cdata',
+ 'html_container_element',
+ 'html_div',
+ 'html_document_body',
+ 'html_document_head',
+ 'html_eol',
+ 'html_fieldset',
+ 'html_form',
+ 'html_h1',
+ 'html_h2',
+ 'html_h3',
+ 'html_h4',
+ 'html_h5',
+ 'html_h6',
+ 'html_hr',
+ 'html_img',
+ 'html_input',
+ 'html_json',
+ 'html_label',
+ 'html_legend',
+ 'html_link',
+ 'html_meta',
+ 'html_object',
+ 'html_option',
+ 'html_raw',
+ 'html_script',
+ 'html_select',
+ 'html_span',
+ 'html_style',
+ 'html_table',
+ 'html_td',
+ 'html_text',
+ 'html_th',
+ 'html_tr',
+ 'http_document_header',
+ 'http_document',
+ 'http_error',
+ 'http_header_field',
+ 'http_server_connection_handler_globals',
+ 'http_server_connection_handler',
+ 'http_server_request_logger_thread',
+ 'http_server_web_connection',
+ 'http_server',
+ 'image',
+ 'include_cache',
+ 'inline_type',
+ 'integer',
+ 'java_jnienv',
+ 'jbyte',
+ 'jbytearray',
+ 'jchar',
+ 'jchararray',
+ 'jfieldid',
+ 'jfloat',
+ 'jint',
+ 'jmethodid',
+ 'jobject',
+ 'jshort',
+ 'json_decode',
+ 'json_encode',
+ 'json_literal',
+ 'json_object',
+ 'keyword',
+ 'lassoapp_compiledsrc_appsource',
+ 'lassoapp_compiledsrc_fileresource',
+ 'lassoapp_content_rep_halt',
+ 'lassoapp_dirsrc_appsource',
+ 'lassoapp_dirsrc_fileresource',
+ 'lassoapp_installer',
+ 'lassoapp_livesrc_appsource',
+ 'lassoapp_livesrc_fileresource',
+ 'lassoapp_long_expiring_bytes',
+ 'lassoapp_manualsrc_appsource',
+ 'lassoapp_zip_file_server',
+ 'lassoapp_zipsrc_appsource',
+ 'lassoapp_zipsrc_fileresource',
+ 'ldap',
+ 'library_thread_loader',
+ 'list_node',
+ 'list',
+ 'locale',
+ 'log_impl_base',
+ 'log_impl',
+ 'magick_image',
+ 'map_node',
+ 'map',
+ 'memberstream',
+ 'memory_session_driver_impl_entry',
+ 'memory_session_driver_impl',
+ 'memory_session_driver',
+ 'mime_reader',
+ 'mongo_client',
+ 'mongo_collection',
+ 'mongo_cursor',
+ 'mustache_ctx',
+ 'mysql_session_driver_impl',
+ 'mysql_session_driver',
+ 'net_named_pipe',
+ 'net_tcp_ssl',
+ 'net_tcp',
+ 'net_udp_packet',
+ 'net_udp',
+ 'null',
+ 'odbc_session_driver_impl',
+ 'odbc_session_driver',
+ 'opaque',
+ 'os_process',
+ 'pair_compare',
+ 'pair',
+ 'pairup',
+ 'pdf_barcode',
+ 'pdf_chunk',
+ 'pdf_color',
+ 'pdf_doc',
+ 'pdf_font',
+ 'pdf_hyphenator',
+ 'pdf_image',
+ 'pdf_list',
+ 'pdf_paragraph',
+ 'pdf_phrase',
+ 'pdf_read',
+ 'pdf_table',
+ 'pdf_text',
+ 'pdf_typebase',
+ 'percent',
+ 'portal_impl',
+ 'queriable_groupby',
+ 'queriable_grouping',
+ 'queriable_groupjoin',
+ 'queriable_join',
+ 'queriable_orderby',
+ 'queriable_orderbydescending',
+ 'queriable_select',
+ 'queriable_selectmany',
+ 'queriable_skip',
+ 'queriable_take',
+ 'queriable_thenby',
+ 'queriable_thenbydescending',
+ 'queriable_where',
+ 'queue',
+ 'raw_document_body',
+ 'regexp',
+ 'repeat',
+ 'scientific',
+ 'security_registry',
+ 'serialization_element',
+ 'serialization_object_identity_compare',
+ 'serialization_reader',
+ 'serialization_writer_ref',
+ 'serialization_writer_standin',
+ 'serialization_writer',
+ 'session_delete_expired_thread',
+ 'set',
+ 'signature',
+ 'sourcefile',
+ 'sqlite_column',
+ 'sqlite_currentrow',
+ 'sqlite_db',
+ 'sqlite_results',
+ 'sqlite_session_driver_impl_entry',
+ 'sqlite_session_driver_impl',
+ 'sqlite_session_driver',
+ 'sqlite_table',
+ 'sqlite3_stmt',
+ 'sqlite3',
+ 'staticarray',
+ 'string',
+ 'sys_process',
+ 'tag',
+ 'text_document',
+ 'tie',
+ 'timeonly',
+ 'trait',
+ 'tree_base',
+ 'tree_node',
+ 'tree_nullnode',
+ 'ucal',
+ 'usgcpu',
+ 'usgvm',
+ 'void',
+ 'web_error_atend',
+ 'web_node_base',
+ 'web_node_content_representation_css_specialized',
+ 'web_node_content_representation_html_specialized',
+ 'web_node_content_representation_js_specialized',
+ 'web_node_content_representation_xhr_container',
+ 'web_node_echo',
+ 'web_node_root',
+ 'web_request_impl',
+ 'web_request',
+ 'web_response_impl',
+ 'web_response',
+ 'web_router',
+ 'websocket_handler',
+ 'worker_pool',
+ 'xml_attr',
+ 'xml_cdatasection',
+ 'xml_characterdata',
+ 'xml_comment',
+ 'xml_document',
+ 'xml_documentfragment',
+ 'xml_documenttype',
+ 'xml_domimplementation',
+ 'xml_element',
+ 'xml_entity',
+ 'xml_entityreference',
+ 'xml_namednodemap_attr',
+ 'xml_namednodemap_ht',
+ 'xml_namednodemap',
+ 'xml_node',
+ 'xml_nodelist',
+ 'xml_notation',
+ 'xml_processinginstruction',
+ 'xml_text',
+ 'xmlstream',
+ 'zip_file_impl',
+ 'zip_file',
+ 'zip_impl',
+ 'zip',
+ ),
+ 'Traits': (
+ 'any',
+ 'formattingbase',
+ 'html_attributed',
+ 'html_element_coreattrs',
+ 'html_element_eventsattrs',
+ 'html_element_i18nattrs',
+ 'lassoapp_capabilities',
+ 'lassoapp_resource',
+ 'lassoapp_source',
+ 'queriable_asstring',
+ 'session_driver',
+ 'trait_array',
+ 'trait_asstring',
+ 'trait_backcontractible',
+ 'trait_backended',
+ 'trait_backexpandable',
+ 'trait_close',
+ 'trait_contractible',
+ 'trait_decompose_assignment',
+ 'trait_doubleended',
+ 'trait_each_sub',
+ 'trait_encodeurl',
+ 'trait_endedfullymutable',
+ 'trait_expandable',
+ 'trait_file',
+ 'trait_finite',
+ 'trait_finiteforeach',
+ 'trait_foreach',
+ 'trait_foreachtextelement',
+ 'trait_frontcontractible',
+ 'trait_frontended',
+ 'trait_frontexpandable',
+ 'trait_fullymutable',
+ 'trait_generator',
+ 'trait_generatorcentric',
+ 'trait_hashable',
+ 'trait_json_serialize',
+ 'trait_keyed',
+ 'trait_keyedfinite',
+ 'trait_keyedforeach',
+ 'trait_keyedmutable',
+ 'trait_list',
+ 'trait_map',
+ 'trait_net',
+ 'trait_pathcomponents',
+ 'trait_positionallykeyed',
+ 'trait_positionallysearchable',
+ 'trait_queriable',
+ 'trait_queriablelambda',
+ 'trait_readbytes',
+ 'trait_readstring',
+ 'trait_scalar',
+ 'trait_searchable',
+ 'trait_serializable',
+ 'trait_setencoding',
+ 'trait_setoperations',
+ 'trait_stack',
+ 'trait_treenode',
+ 'trait_writebytes',
+ 'trait_writestring',
+ 'trait_xml_elementcompat',
+ 'trait_xml_nodecompat',
+ 'web_connection',
+ 'web_node_container',
+ 'web_node_content_css_specialized',
+ 'web_node_content_document',
+ 'web_node_content_html_specialized',
+ 'web_node_content_js_specialized',
+ 'web_node_content_json_specialized',
+ 'web_node_content_representation',
+ 'web_node_content',
+ 'web_node_postable',
+ 'web_node',
+ ),
+ 'Unbound Methods': (
+ 'abort_clear',
+ 'abort_now',
+ 'abort',
+ 'action_param',
+ 'action_params',
+ 'action_statement',
+ 'admin_authorization',
+ 'admin_currentgroups',
+ 'admin_currentuserid',
+ 'admin_currentusername',
+ 'admin_getpref',
+ 'admin_initialize',
+ 'admin_lassoservicepath',
+ 'admin_removepref',
+ 'admin_setpref',
+ 'admin_userexists',
+ 'all',
+ 'auth_admin',
+ 'auth_check',
+ 'auth_custom',
+ 'auth_group',
+ 'auth_prompt',
+ 'auth_user',
+ 'bom_utf16be',
+ 'bom_utf16le',
+ 'bom_utf32be',
+ 'bom_utf32le',
+ 'bom_utf8',
+ 'bw',
+ 'capture_nearestloopabort',
+ 'capture_nearestloopcontinue',
+ 'capture_nearestloopcount',
+ 'checked',
+ 'cipher_decrypt_private',
+ 'cipher_decrypt_public',
+ 'cipher_decrypt',
+ 'cipher_digest',
+ 'cipher_encrypt_private',
+ 'cipher_encrypt_public',
+ 'cipher_encrypt',
+ 'cipher_generate_key',
+ 'cipher_hmac',
+ 'cipher_keylength',
+ 'cipher_list',
+ 'cipher_open',
+ 'cipher_seal',
+ 'cipher_sign',
+ 'cipher_verify',
+ 'client_addr',
+ 'client_authorization',
+ 'client_browser',
+ 'client_contentlength',
+ 'client_contenttype',
+ 'client_cookielist',
+ 'client_cookies',
+ 'client_encoding',
+ 'client_formmethod',
+ 'client_getargs',
+ 'client_getparam',
+ 'client_getparams',
+ 'client_headers',
+ 'client_integertoip',
+ 'client_iptointeger',
+ 'client_password',
+ 'client_postargs',
+ 'client_postparam',
+ 'client_postparams',
+ 'client_type',
+ 'client_url',
+ 'client_username',
+ 'cn',
+ 'column_name',
+ 'column_names',
+ 'column_type',
+ 'column',
+ 'compress',
+ 'content_addheader',
+ 'content_body',
+ 'content_encoding',
+ 'content_header',
+ 'content_replaceheader',
+ 'content_type',
+ 'cookie_set',
+ 'cookie',
+ 'curl_easy_cleanup',
+ 'curl_easy_duphandle',
+ 'curl_easy_getinfo',
+ 'curl_easy_init',
+ 'curl_easy_reset',
+ 'curl_easy_setopt',
+ 'curl_easy_strerror',
+ 'curl_getdate',
+ 'curl_http_version_1_0',
+ 'curl_http_version_1_1',
+ 'curl_http_version_none',
+ 'curl_ipresolve_v4',
+ 'curl_ipresolve_v6',
+ 'curl_ipresolve_whatever',
+ 'curl_multi_perform',
+ 'curl_multi_result',
+ 'curl_netrc_ignored',
+ 'curl_netrc_optional',
+ 'curl_netrc_required',
'curl_sslversion_default',
'curl_sslversion_sslv2',
'curl_sslversion_sslv3',
'curl_sslversion_tlsv1',
- 'curl_version_asynchdns',
- 'curl_version_debug',
- 'curl_version_gssnegotiate',
- 'curl_version_idn',
- 'curl_version_info',
- 'curl_version_ipv6',
- 'curl_version_kerberos4',
- 'curl_version_largefile',
- 'curl_version_libz',
- 'curl_version_ntlm',
- 'curl_version_spnego',
- 'curl_version_ssl',
- 'curl_version',
- 'curlauth_any',
- 'curlauth_anysafe',
- 'curlauth_basic',
- 'curlauth_digest',
- 'curlauth_gssnegotiate',
- 'curlauth_none',
- 'curlauth_ntlm',
- 'curle_aborted_by_callback',
- 'curle_bad_calling_order',
- 'curle_bad_content_encoding',
- 'curle_bad_download_resume',
- 'curle_bad_function_argument',
- 'curle_bad_password_entered',
- 'curle_couldnt_connect',
- 'curle_couldnt_resolve_host',
- 'curle_couldnt_resolve_proxy',
- 'curle_failed_init',
- 'curle_file_couldnt_read_file',
- 'curle_filesize_exceeded',
- 'curle_ftp_access_denied',
- 'curle_ftp_cant_get_host',
- 'curle_ftp_cant_reconnect',
- 'curle_ftp_couldnt_get_size',
- 'curle_ftp_couldnt_retr_file',
- 'curle_ftp_couldnt_set_ascii',
- 'curle_ftp_couldnt_set_binary',
- 'curle_ftp_couldnt_use_rest',
- 'curle_ftp_port_failed',
- 'curle_ftp_quote_error',
- 'curle_ftp_ssl_failed',
- 'curle_ftp_user_password_incorrect',
- 'curle_ftp_weird_227_format',
- 'curle_ftp_weird_pass_reply',
- 'curle_ftp_weird_pasv_reply',
- 'curle_ftp_weird_server_reply',
- 'curle_ftp_weird_user_reply',
- 'curle_ftp_write_error',
- 'curle_function_not_found',
- 'curle_got_nothing',
- 'curle_http_post_error',
- 'curle_http_range_error',
- 'curle_http_returned_error',
- 'curle_interface_failed',
- 'curle_ldap_cannot_bind',
- 'curle_ldap_invalid_url',
- 'curle_ldap_search_failed',
- 'curle_library_not_found',
- 'curle_login_denied',
- 'curle_malformat_user',
- 'curle_obsolete',
- 'curle_ok',
- 'curle_operation_timeouted',
- 'curle_out_of_memory',
- 'curle_partial_file',
- 'curle_read_error',
- 'curle_recv_error',
- 'curle_send_error',
- 'curle_send_fail_rewind',
- 'curle_share_in_use',
- 'curle_ssl_cacert',
- 'curle_ssl_certproblem',
- 'curle_ssl_cipher',
- 'curle_ssl_connect_error',
- 'curle_ssl_engine_initfailed',
- 'curle_ssl_engine_notfound',
- 'curle_ssl_engine_setfailed',
- 'curle_ssl_peer_certificate',
- 'curle_telnet_option_syntax',
- 'curle_too_many_redirects',
- 'curle_unknown_telnet_option',
- 'curle_unsupported_protocol',
- 'curle_url_malformat_user',
- 'curle_url_malformat',
- 'curle_write_error',
- 'curlftpauth_default',
- 'curlftpauth_ssl',
- 'curlftpauth_tls',
- 'curlftpssl_all',
- 'curlftpssl_control',
- 'curlftpssl_last',
- 'curlftpssl_none',
- 'curlftpssl_try',
- 'curlinfo_connect_time',
- 'curlinfo_content_length_download',
- 'curlinfo_content_length_upload',
- 'curlinfo_content_type',
- 'curlinfo_effective_url',
- 'curlinfo_filetime',
- 'curlinfo_header_size',
- 'curlinfo_http_connectcode',
- 'curlinfo_httpauth_avail',
- 'curlinfo_namelookup_time',
- 'curlinfo_num_connects',
- 'curlinfo_os_errno',
- 'curlinfo_pretransfer_time',
- 'curlinfo_proxyauth_avail',
- 'curlinfo_redirect_count',
- 'curlinfo_redirect_time',
- 'curlinfo_request_size',
- 'curlinfo_response_code',
- 'curlinfo_size_download',
- 'curlinfo_size_upload',
- 'curlinfo_speed_download',
- 'curlinfo_speed_upload',
- 'curlinfo_ssl_engines',
- 'curlinfo_ssl_verifyresult',
- 'curlinfo_starttransfer_time',
- 'curlinfo_total_time',
- 'curlmsg_done',
- 'curlopt_autoreferer',
- 'curlopt_buffersize',
- 'curlopt_cainfo',
- 'curlopt_capath',
- 'curlopt_connecttimeout',
- 'curlopt_cookie',
- 'curlopt_cookiefile',
- 'curlopt_cookiejar',
- 'curlopt_cookiesession',
- 'curlopt_crlf',
- 'curlopt_customrequest',
- 'curlopt_dns_use_global_cache',
- 'curlopt_egdsocket',
- 'curlopt_encoding',
- 'curlopt_failonerror',
- 'curlopt_filetime',
- 'curlopt_followlocation',
- 'curlopt_forbid_reuse',
- 'curlopt_fresh_connect',
- 'curlopt_ftp_account',
- 'curlopt_ftp_create_missing_dirs',
- 'curlopt_ftp_response_timeout',
- 'curlopt_ftp_ssl',
- 'curlopt_ftp_use_eprt',
- 'curlopt_ftp_use_epsv',
- 'curlopt_ftpappend',
- 'curlopt_ftplistonly',
- 'curlopt_ftpport',
- 'curlopt_ftpsslauth',
- 'curlopt_header',
- 'curlopt_http_version',
- 'curlopt_http200aliases',
- 'curlopt_httpauth',
- 'curlopt_httpget',
- 'curlopt_httpheader',
- 'curlopt_httppost',
- 'curlopt_httpproxytunnel',
- 'curlopt_infilesize_large',
- 'curlopt_infilesize',
- 'curlopt_interface',
- 'curlopt_ipresolve',
- 'curlopt_krb4level',
- 'curlopt_low_speed_limit',
- 'curlopt_low_speed_time',
- 'curlopt_mail_from',
- 'curlopt_mail_rcpt',
- 'curlopt_maxconnects',
- 'curlopt_maxfilesize_large',
- 'curlopt_maxfilesize',
- 'curlopt_maxredirs',
- 'curlopt_netrc_file',
- 'curlopt_netrc',
- 'curlopt_nobody',
- 'curlopt_noprogress',
- 'curlopt_port',
- 'curlopt_post',
- 'curlopt_postfields',
- 'curlopt_postfieldsize_large',
- 'curlopt_postfieldsize',
- 'curlopt_postquote',
- 'curlopt_prequote',
- 'curlopt_proxy',
- 'curlopt_proxyauth',
- 'curlopt_proxyport',
- 'curlopt_proxytype',
- 'curlopt_proxyuserpwd',
- 'curlopt_put',
- 'curlopt_quote',
- 'curlopt_random_file',
- 'curlopt_range',
- 'curlopt_readdata',
- 'curlopt_referer',
- 'curlopt_resume_from_large',
- 'curlopt_resume_from',
- 'curlopt_ssl_cipher_list',
- 'curlopt_ssl_verifyhost',
- 'curlopt_ssl_verifypeer',
- 'curlopt_sslcert',
- 'curlopt_sslcerttype',
- 'curlopt_sslengine_default',
- 'curlopt_sslengine',
- 'curlopt_sslkey',
- 'curlopt_sslkeypasswd',
- 'curlopt_sslkeytype',
- 'curlopt_sslversion',
- 'curlopt_tcp_nodelay',
- 'curlopt_timecondition',
- 'curlopt_timeout',
- 'curlopt_timevalue',
- 'curlopt_transfertext',
- 'curlopt_unrestricted_auth',
- 'curlopt_upload',
- 'curlopt_url',
- 'curlopt_use_ssl',
- 'curlopt_useragent',
- 'curlopt_userpwd',
- 'curlopt_verbose',
- 'curlopt_writedata',
- 'curlproxy_http',
- 'curlproxy_socks4',
- 'curlproxy_socks5',
- 'database_adddefaultsqlitehost',
- 'database_database',
- 'database_initialize',
- 'database_name',
- 'database_qs',
- 'database_table_database_tables',
- 'database_table_datasource_databases',
- 'database_table_datasource_hosts',
- 'database_table_datasources',
- 'database_table_table_fields',
- 'database_util_cleanpath',
- 'dbgp_stop_stack_name',
- 'debugging_break',
- 'debugging_breakpoint_get',
- 'debugging_breakpoint_list',
- 'debugging_breakpoint_remove',
- 'debugging_breakpoint_set',
- 'debugging_breakpoint_update',
- 'debugging_context_locals',
- 'debugging_context_self',
- 'debugging_context_vars',
- 'debugging_detach',
- 'debugging_enabled',
- 'debugging_get_context',
- 'debugging_get_stack',
- 'debugging_run',
- 'debugging_step_in',
- 'debugging_step_out',
- 'debugging_step_over',
- 'debugging_stop',
- 'debugging_terminate',
- 'decimal_random',
- 'decompress',
- 'decrypt_blowfish',
- 'define_atbegin',
- 'define_atend',
- 'dns_default',
- 'dns_lookup',
- 'document',
- 'email_attachment_mime_type',
- 'email_batch',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_fix_address_list',
- 'email_fix_address',
- 'email_fs_error_clean',
- 'email_immediate',
- 'email_initialize',
- 'email_merge',
- 'email_mxlookup',
- 'email_pop_priv_extract',
- 'email_pop_priv_quote',
- 'email_pop_priv_substring',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_qheader',
- 'encoding_iso88591',
- 'encoding_utf8',
- 'encrypt_blowfish',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eol',
- 'eq',
- 'error_code_aborted',
- 'error_code_dividebyzero',
- 'error_code_filenotfound',
- 'error_code_invalidparameter',
- 'error_code_methodnotfound',
- 'error_code_networkerror',
- 'error_code_noerror',
- 'error_code_resnotfound',
- 'error_code_runtimeassertion',
- 'error_code',
- 'error_msg_aborted',
- 'error_msg_dividebyzero',
- 'error_msg_filenotfound',
- 'error_msg_invalidparameter',
- 'error_msg_methodnotfound',
- 'error_msg_networkerror',
- 'error_msg_noerror',
- 'error_msg_resnotfound',
- 'error_msg_runtimeassertion',
- 'error_msg',
- 'error_obj',
- 'error_pop',
- 'error_push',
- 'error_reset',
- 'error_stack',
- 'escape_tag',
- 'evdns_resolve_ipv4',
- 'evdns_resolve_ipv6',
- 'evdns_resolve_reverse_ipv6',
- 'evdns_resolve_reverse',
- 'ew',
- 'fail_if',
- 'fail_ifnot',
- 'fail_now',
- 'fail',
- 'failure_clear',
- 'fastcgi_createfcgirequest',
- 'fastcgi_handlecon',
- 'fastcgi_handlereq',
- 'fastcgi_initialize',
- 'fastcgi_initiate_request',
- 'fcgi_abort_request',
- 'fcgi_authorize',
- 'fcgi_begin_request',
- 'fcgi_bodychunksize',
- 'fcgi_cant_mpx_conn',
- 'fcgi_data',
- 'fcgi_end_request',
- 'fcgi_filter',
- 'fcgi_get_values_result',
- 'fcgi_get_values',
- 'fcgi_keep_conn',
- 'fcgi_makeendrequestbody',
- 'fcgi_makestdoutbody',
- 'fcgi_max_conns',
- 'fcgi_max_reqs',
- 'fcgi_mpxs_conns',
- 'fcgi_null_request_id',
- 'fcgi_overloaded',
- 'fcgi_params',
- 'fcgi_read_timeout_seconds',
- 'fcgi_readparam',
- 'fcgi_request_complete',
- 'fcgi_responder',
- 'fcgi_stderr',
- 'fcgi_stdin',
- 'fcgi_stdout',
- 'fcgi_unknown_role',
- 'fcgi_unknown_type',
- 'fcgi_version_1',
- 'fcgi_x_stdin',
- 'field_name',
- 'field_names',
- 'field',
- 'file_copybuffersize',
- 'file_defaultencoding',
- 'file_forceroot',
- 'file_modechar',
- 'file_modeline',
- 'file_stderr',
- 'file_stdin',
- 'file_stdout',
- 'file_tempfile',
- 'filemakerds_initialize',
- 'filemakerds',
- 'found_count',
- 'ft',
- 'ftp_deletefile',
- 'ftp_getdata',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putdata',
- 'ftp_putfile',
- 'full',
- 'generateforeach',
- 'gt',
- 'gte',
- 'handle_failure',
- 'handle',
- 'hash_primes',
- 'html_comment',
- 'http_char_colon',
- 'http_char_cr',
- 'http_char_htab',
- 'http_char_lf',
- 'http_char_question',
- 'http_char_space',
- 'http_default_files',
- 'http_read_headers',
- 'http_read_timeout_secs',
- 'http_server_apps_path',
- 'http_server_request_logger',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'include_cache_compare',
- 'include_currentpath',
- 'include_filepath',
- 'include_localpath',
- 'include_once',
- 'include_path',
- 'include_raw',
- 'include_url',
- 'include',
- 'includes',
- 'inline_colinfo_name_pos',
- 'inline_colinfo_type_pos',
- 'inline_colinfo_valuelist_pos',
- 'inline_columninfo_pos',
- 'inline_foundcount_pos',
- 'inline_namedget',
- 'inline_namedput',
- 'inline_resultrows_pos',
- 'inline_scopeget',
- 'inline_scopepop',
- 'inline_scopepush',
- 'inline',
- 'integer_bitor',
- 'integer_random',
- 'io_dir_dt_blk',
- 'io_dir_dt_chr',
- 'io_dir_dt_dir',
- 'io_dir_dt_fifo',
- 'io_dir_dt_lnk',
- 'io_dir_dt_reg',
- 'io_dir_dt_sock',
- 'io_dir_dt_unknown',
- 'io_dir_dt_wht',
- 'io_file_access',
- 'io_file_chdir',
- 'io_file_chmod',
- 'io_file_chown',
- 'io_file_dirname',
- 'io_file_f_dupfd',
- 'io_file_f_getfd',
- 'io_file_f_getfl',
- 'io_file_f_getlk',
- 'io_file_f_rdlck',
- 'io_file_f_setfd',
- 'io_file_f_setfl',
- 'io_file_f_setlk',
- 'io_file_f_setlkw',
- 'io_file_f_test',
- 'io_file_f_tlock',
- 'io_file_f_ulock',
- 'io_file_f_unlck',
- 'io_file_f_wrlck',
- 'io_file_fd_cloexec',
- 'io_file_fioasync',
- 'io_file_fioclex',
- 'io_file_fiodtype',
- 'io_file_fiogetown',
- 'io_file_fionbio',
- 'io_file_fionclex',
- 'io_file_fionread',
- 'io_file_fiosetown',
- 'io_file_getcwd',
- 'io_file_lchown',
- 'io_file_link',
- 'io_file_lockf',
- 'io_file_lstat_atime',
- 'io_file_lstat_mode',
- 'io_file_lstat_mtime',
- 'io_file_lstat_size',
- 'io_file_mkdir',
- 'io_file_mkfifo',
- 'io_file_mkstemp',
- 'io_file_o_append',
- 'io_file_o_async',
- 'io_file_o_creat',
- 'io_file_o_excl',
- 'io_file_o_exlock',
- 'io_file_o_fsync',
- 'io_file_o_nofollow',
- 'io_file_o_nonblock',
- 'io_file_o_rdonly',
- 'io_file_o_rdwr',
- 'io_file_o_shlock',
- 'io_file_o_sync',
- 'io_file_o_trunc',
- 'io_file_o_wronly',
- 'io_file_pipe',
- 'io_file_readlink',
- 'io_file_realpath',
- 'io_file_remove',
- 'io_file_rename',
- 'io_file_rmdir',
- 'io_file_s_ifblk',
- 'io_file_s_ifchr',
- 'io_file_s_ifdir',
- 'io_file_s_ififo',
- 'io_file_s_iflnk',
- 'io_file_s_ifmt',
- 'io_file_s_ifreg',
- 'io_file_s_ifsock',
- 'io_file_s_irgrp',
- 'io_file_s_iroth',
- 'io_file_s_irusr',
- 'io_file_s_irwxg',
- 'io_file_s_irwxo',
- 'io_file_s_irwxu',
- 'io_file_s_isgid',
- 'io_file_s_isuid',
- 'io_file_s_isvtx',
- 'io_file_s_iwgrp',
- 'io_file_s_iwoth',
- 'io_file_s_iwusr',
- 'io_file_s_ixgrp',
- 'io_file_s_ixoth',
- 'io_file_s_ixusr',
- 'io_file_seek_cur',
- 'io_file_seek_end',
- 'io_file_seek_set',
- 'io_file_stat_atime',
- 'io_file_stat_mode',
- 'io_file_stat_mtime',
- 'io_file_stat_size',
- 'io_file_stderr',
- 'io_file_stdin',
- 'io_file_stdout',
- 'io_file_symlink',
- 'io_file_tempnam',
- 'io_file_truncate',
- 'io_file_umask',
- 'io_file_unlink',
- 'io_net_accept',
- 'io_net_af_inet',
- 'io_net_af_inet6',
- 'io_net_af_unix',
- 'io_net_bind',
- 'io_net_connect',
- 'io_net_getpeername',
- 'io_net_getsockname',
- 'io_net_ipproto_ip',
- 'io_net_ipproto_udp',
- 'io_net_listen',
- 'io_net_msg_oob',
- 'io_net_msg_peek',
- 'io_net_msg_waitall',
- 'io_net_recv',
- 'io_net_recvfrom',
- 'io_net_send',
- 'io_net_sendto',
- 'io_net_shut_rd',
- 'io_net_shut_rdwr',
- 'io_net_shut_wr',
- 'io_net_shutdown',
- 'io_net_so_acceptconn',
- 'io_net_so_broadcast',
- 'io_net_so_debug',
- 'io_net_so_dontroute',
- 'io_net_so_error',
- 'io_net_so_keepalive',
- 'io_net_so_linger',
- 'io_net_so_oobinline',
- 'io_net_so_rcvbuf',
- 'io_net_so_rcvlowat',
- 'io_net_so_rcvtimeo',
- 'io_net_so_reuseaddr',
- 'io_net_so_sndbuf',
- 'io_net_so_sndlowat',
- 'io_net_so_sndtimeo',
- 'io_net_so_timestamp',
- 'io_net_so_type',
- 'io_net_so_useloopback',
- 'io_net_sock_dgram',
- 'io_net_sock_raw',
- 'io_net_sock_rdm',
- 'io_net_sock_seqpacket',
- 'io_net_sock_stream',
- 'io_net_socket',
- 'io_net_sol_socket',
- 'io_net_ssl_accept',
- 'io_net_ssl_begin',
- 'io_net_ssl_connect',
- 'io_net_ssl_end',
- 'io_net_ssl_error',
- 'io_net_ssl_errorstring',
- 'io_net_ssl_funcerrorstring',
- 'io_net_ssl_liberrorstring',
- 'io_net_ssl_read',
- 'io_net_ssl_reasonerrorstring',
- 'io_net_ssl_setacceptstate',
- 'io_net_ssl_setconnectstate',
- 'io_net_ssl_setverifylocations',
- 'io_net_ssl_shutdown',
- 'io_net_ssl_usecertificatechainfile',
- 'io_net_ssl_useprivatekeyfile',
- 'io_net_ssl_write',
- 'java_jvm_create',
- 'java_jvm_getenv',
- 'jdbc_initialize',
- 'json_back_slash',
- 'json_back_space',
- 'json_close_array',
- 'json_close_object',
- 'json_colon',
- 'json_comma',
- 'json_consume_array',
- 'json_consume_object',
- 'json_consume_string',
- 'json_consume_token',
- 'json_cr',
- 'json_debug',
- 'json_deserialize',
- 'json_e_lower',
- 'json_e_upper',
- 'json_f_lower',
- 'json_form_feed',
- 'json_forward_slash',
- 'json_lf',
- 'json_n_lower',
- 'json_negative',
- 'json_open_array',
- 'json_open_object',
- 'json_period',
+ 'curl_version_asynchdns',
+ 'curl_version_debug',
+ 'curl_version_gssnegotiate',
+ 'curl_version_idn',
+ 'curl_version_info',
+ 'curl_version_ipv6',
+ 'curl_version_kerberos4',
+ 'curl_version_largefile',
+ 'curl_version_libz',
+ 'curl_version_ntlm',
+ 'curl_version_spnego',
+ 'curl_version_ssl',
+ 'curl_version',
+ 'curlauth_any',
+ 'curlauth_anysafe',
+ 'curlauth_basic',
+ 'curlauth_digest',
+ 'curlauth_gssnegotiate',
+ 'curlauth_none',
+ 'curlauth_ntlm',
+ 'curle_aborted_by_callback',
+ 'curle_bad_calling_order',
+ 'curle_bad_content_encoding',
+ 'curle_bad_download_resume',
+ 'curle_bad_function_argument',
+ 'curle_bad_password_entered',
+ 'curle_couldnt_connect',
+ 'curle_couldnt_resolve_host',
+ 'curle_couldnt_resolve_proxy',
+ 'curle_failed_init',
+ 'curle_file_couldnt_read_file',
+ 'curle_filesize_exceeded',
+ 'curle_ftp_access_denied',
+ 'curle_ftp_cant_get_host',
+ 'curle_ftp_cant_reconnect',
+ 'curle_ftp_couldnt_get_size',
+ 'curle_ftp_couldnt_retr_file',
+ 'curle_ftp_couldnt_set_ascii',
+ 'curle_ftp_couldnt_set_binary',
+ 'curle_ftp_couldnt_use_rest',
+ 'curle_ftp_port_failed',
+ 'curle_ftp_quote_error',
+ 'curle_ftp_ssl_failed',
+ 'curle_ftp_user_password_incorrect',
+ 'curle_ftp_weird_227_format',
+ 'curle_ftp_weird_pass_reply',
+ 'curle_ftp_weird_pasv_reply',
+ 'curle_ftp_weird_server_reply',
+ 'curle_ftp_weird_user_reply',
+ 'curle_ftp_write_error',
+ 'curle_function_not_found',
+ 'curle_got_nothing',
+ 'curle_http_post_error',
+ 'curle_http_range_error',
+ 'curle_http_returned_error',
+ 'curle_interface_failed',
+ 'curle_ldap_cannot_bind',
+ 'curle_ldap_invalid_url',
+ 'curle_ldap_search_failed',
+ 'curle_library_not_found',
+ 'curle_login_denied',
+ 'curle_malformat_user',
+ 'curle_obsolete',
+ 'curle_ok',
+ 'curle_operation_timeouted',
+ 'curle_out_of_memory',
+ 'curle_partial_file',
+ 'curle_read_error',
+ 'curle_recv_error',
+ 'curle_send_error',
+ 'curle_send_fail_rewind',
+ 'curle_share_in_use',
+ 'curle_ssl_cacert',
+ 'curle_ssl_certproblem',
+ 'curle_ssl_cipher',
+ 'curle_ssl_connect_error',
+ 'curle_ssl_engine_initfailed',
+ 'curle_ssl_engine_notfound',
+ 'curle_ssl_engine_setfailed',
+ 'curle_ssl_peer_certificate',
+ 'curle_telnet_option_syntax',
+ 'curle_too_many_redirects',
+ 'curle_unknown_telnet_option',
+ 'curle_unsupported_protocol',
+ 'curle_url_malformat_user',
+ 'curle_url_malformat',
+ 'curle_write_error',
+ 'curlftpauth_default',
+ 'curlftpauth_ssl',
+ 'curlftpauth_tls',
+ 'curlftpssl_all',
+ 'curlftpssl_control',
+ 'curlftpssl_last',
+ 'curlftpssl_none',
+ 'curlftpssl_try',
+ 'curlinfo_connect_time',
+ 'curlinfo_content_length_download',
+ 'curlinfo_content_length_upload',
+ 'curlinfo_content_type',
+ 'curlinfo_effective_url',
+ 'curlinfo_filetime',
+ 'curlinfo_header_size',
+ 'curlinfo_http_connectcode',
+ 'curlinfo_httpauth_avail',
+ 'curlinfo_namelookup_time',
+ 'curlinfo_num_connects',
+ 'curlinfo_os_errno',
+ 'curlinfo_pretransfer_time',
+ 'curlinfo_proxyauth_avail',
+ 'curlinfo_redirect_count',
+ 'curlinfo_redirect_time',
+ 'curlinfo_request_size',
+ 'curlinfo_response_code',
+ 'curlinfo_size_download',
+ 'curlinfo_size_upload',
+ 'curlinfo_speed_download',
+ 'curlinfo_speed_upload',
+ 'curlinfo_ssl_engines',
+ 'curlinfo_ssl_verifyresult',
+ 'curlinfo_starttransfer_time',
+ 'curlinfo_total_time',
+ 'curlmsg_done',
+ 'curlopt_autoreferer',
+ 'curlopt_buffersize',
+ 'curlopt_cainfo',
+ 'curlopt_capath',
+ 'curlopt_connecttimeout',
+ 'curlopt_cookie',
+ 'curlopt_cookiefile',
+ 'curlopt_cookiejar',
+ 'curlopt_cookiesession',
+ 'curlopt_crlf',
+ 'curlopt_customrequest',
+ 'curlopt_dns_use_global_cache',
+ 'curlopt_egdsocket',
+ 'curlopt_encoding',
+ 'curlopt_failonerror',
+ 'curlopt_filetime',
+ 'curlopt_followlocation',
+ 'curlopt_forbid_reuse',
+ 'curlopt_fresh_connect',
+ 'curlopt_ftp_account',
+ 'curlopt_ftp_create_missing_dirs',
+ 'curlopt_ftp_response_timeout',
+ 'curlopt_ftp_ssl',
+ 'curlopt_ftp_use_eprt',
+ 'curlopt_ftp_use_epsv',
+ 'curlopt_ftpappend',
+ 'curlopt_ftplistonly',
+ 'curlopt_ftpport',
+ 'curlopt_ftpsslauth',
+ 'curlopt_header',
+ 'curlopt_http_version',
+ 'curlopt_http200aliases',
+ 'curlopt_httpauth',
+ 'curlopt_httpget',
+ 'curlopt_httpheader',
+ 'curlopt_httppost',
+ 'curlopt_httpproxytunnel',
+ 'curlopt_infilesize_large',
+ 'curlopt_infilesize',
+ 'curlopt_interface',
+ 'curlopt_ipresolve',
+ 'curlopt_krb4level',
+ 'curlopt_low_speed_limit',
+ 'curlopt_low_speed_time',
+ 'curlopt_mail_from',
+ 'curlopt_mail_rcpt',
+ 'curlopt_maxconnects',
+ 'curlopt_maxfilesize_large',
+ 'curlopt_maxfilesize',
+ 'curlopt_maxredirs',
+ 'curlopt_netrc_file',
+ 'curlopt_netrc',
+ 'curlopt_nobody',
+ 'curlopt_noprogress',
+ 'curlopt_port',
+ 'curlopt_post',
+ 'curlopt_postfields',
+ 'curlopt_postfieldsize_large',
+ 'curlopt_postfieldsize',
+ 'curlopt_postquote',
+ 'curlopt_prequote',
+ 'curlopt_proxy',
+ 'curlopt_proxyauth',
+ 'curlopt_proxyport',
+ 'curlopt_proxytype',
+ 'curlopt_proxyuserpwd',
+ 'curlopt_put',
+ 'curlopt_quote',
+ 'curlopt_random_file',
+ 'curlopt_range',
+ 'curlopt_readdata',
+ 'curlopt_referer',
+ 'curlopt_resume_from_large',
+ 'curlopt_resume_from',
+ 'curlopt_ssl_cipher_list',
+ 'curlopt_ssl_verifyhost',
+ 'curlopt_ssl_verifypeer',
+ 'curlopt_sslcert',
+ 'curlopt_sslcerttype',
+ 'curlopt_sslengine_default',
+ 'curlopt_sslengine',
+ 'curlopt_sslkey',
+ 'curlopt_sslkeypasswd',
+ 'curlopt_sslkeytype',
+ 'curlopt_sslversion',
+ 'curlopt_tcp_nodelay',
+ 'curlopt_timecondition',
+ 'curlopt_timeout',
+ 'curlopt_timevalue',
+ 'curlopt_transfertext',
+ 'curlopt_unrestricted_auth',
+ 'curlopt_upload',
+ 'curlopt_url',
+ 'curlopt_use_ssl',
+ 'curlopt_useragent',
+ 'curlopt_userpwd',
+ 'curlopt_verbose',
+ 'curlopt_writedata',
+ 'curlproxy_http',
+ 'curlproxy_socks4',
+ 'curlproxy_socks5',
+ 'database_adddefaultsqlitehost',
+ 'database_database',
+ 'database_initialize',
+ 'database_name',
+ 'database_qs',
+ 'database_table_database_tables',
+ 'database_table_datasource_databases',
+ 'database_table_datasource_hosts',
+ 'database_table_datasources',
+ 'database_table_table_fields',
+ 'database_util_cleanpath',
+ 'dbgp_stop_stack_name',
+ 'debugging_break',
+ 'debugging_breakpoint_get',
+ 'debugging_breakpoint_list',
+ 'debugging_breakpoint_remove',
+ 'debugging_breakpoint_set',
+ 'debugging_breakpoint_update',
+ 'debugging_context_locals',
+ 'debugging_context_self',
+ 'debugging_context_vars',
+ 'debugging_detach',
+ 'debugging_enabled',
+ 'debugging_get_context',
+ 'debugging_get_stack',
+ 'debugging_run',
+ 'debugging_step_in',
+ 'debugging_step_out',
+ 'debugging_step_over',
+ 'debugging_stop',
+ 'debugging_terminate',
+ 'decimal_random',
+ 'decompress',
+ 'decrypt_blowfish',
+ 'define_atbegin',
+ 'define_atend',
+ 'dns_default',
+ 'dns_lookup',
+ 'document',
+ 'email_attachment_mime_type',
+ 'email_batch',
+ 'email_digestchallenge',
+ 'email_digestresponse',
+ 'email_extract',
+ 'email_findemails',
+ 'email_fix_address_list',
+ 'email_fix_address',
+ 'email_fs_error_clean',
+ 'email_immediate',
+ 'email_initialize',
+ 'email_merge',
+ 'email_mxlookup',
+ 'email_pop_priv_extract',
+ 'email_pop_priv_quote',
+ 'email_pop_priv_substring',
+ 'email_queue',
+ 'email_result',
+ 'email_safeemail',
+ 'email_send',
+ 'email_status',
+ 'email_token',
+ 'email_translatebreakstocrlf',
+ 'encode_qheader',
+ 'encoding_iso88591',
+ 'encoding_utf8',
+ 'encrypt_blowfish',
+ 'encrypt_crammd5',
+ 'encrypt_hmac',
+ 'encrypt_md5',
+ 'eol',
+ 'eq',
+ 'error_code_aborted',
+ 'error_code_dividebyzero',
+ 'error_code_filenotfound',
+ 'error_code_invalidparameter',
+ 'error_code_methodnotfound',
+ 'error_code_networkerror',
+ 'error_code_noerror',
+ 'error_code_resnotfound',
+ 'error_code_runtimeassertion',
+ 'error_code',
+ 'error_msg_aborted',
+ 'error_msg_dividebyzero',
+ 'error_msg_filenotfound',
+ 'error_msg_invalidparameter',
+ 'error_msg_methodnotfound',
+ 'error_msg_networkerror',
+ 'error_msg_noerror',
+ 'error_msg_resnotfound',
+ 'error_msg_runtimeassertion',
+ 'error_msg',
+ 'error_obj',
+ 'error_pop',
+ 'error_push',
+ 'error_reset',
+ 'error_stack',
+ 'escape_tag',
+ 'evdns_resolve_ipv4',
+ 'evdns_resolve_ipv6',
+ 'evdns_resolve_reverse_ipv6',
+ 'evdns_resolve_reverse',
+ 'ew',
+ 'fail_if',
+ 'fail_ifnot',
+ 'fail_now',
+ 'fail',
+ 'failure_clear',
+ 'fastcgi_createfcgirequest',
+ 'fastcgi_handlecon',
+ 'fastcgi_handlereq',
+ 'fastcgi_initialize',
+ 'fastcgi_initiate_request',
+ 'fcgi_abort_request',
+ 'fcgi_authorize',
+ 'fcgi_begin_request',
+ 'fcgi_bodychunksize',
+ 'fcgi_cant_mpx_conn',
+ 'fcgi_data',
+ 'fcgi_end_request',
+ 'fcgi_filter',
+ 'fcgi_get_values_result',
+ 'fcgi_get_values',
+ 'fcgi_keep_conn',
+ 'fcgi_makeendrequestbody',
+ 'fcgi_makestdoutbody',
+ 'fcgi_max_conns',
+ 'fcgi_max_reqs',
+ 'fcgi_mpxs_conns',
+ 'fcgi_null_request_id',
+ 'fcgi_overloaded',
+ 'fcgi_params',
+ 'fcgi_read_timeout_seconds',
+ 'fcgi_readparam',
+ 'fcgi_request_complete',
+ 'fcgi_responder',
+ 'fcgi_stderr',
+ 'fcgi_stdin',
+ 'fcgi_stdout',
+ 'fcgi_unknown_role',
+ 'fcgi_unknown_type',
+ 'fcgi_version_1',
+ 'fcgi_x_stdin',
+ 'field_name',
+ 'field_names',
+ 'field',
+ 'file_copybuffersize',
+ 'file_defaultencoding',
+ 'file_forceroot',
+ 'file_modechar',
+ 'file_modeline',
+ 'file_stderr',
+ 'file_stdin',
+ 'file_stdout',
+ 'file_tempfile',
+ 'filemakerds_initialize',
+ 'filemakerds',
+ 'found_count',
+ 'ft',
+ 'ftp_deletefile',
+ 'ftp_getdata',
+ 'ftp_getfile',
+ 'ftp_getlisting',
+ 'ftp_putdata',
+ 'ftp_putfile',
+ 'full',
+ 'generateforeach',
+ 'gt',
+ 'gte',
+ 'handle_failure',
+ 'handle',
+ 'hash_primes',
+ 'html_comment',
+ 'http_char_colon',
+ 'http_char_cr',
+ 'http_char_htab',
+ 'http_char_lf',
+ 'http_char_question',
+ 'http_char_space',
+ 'http_default_files',
+ 'http_read_headers',
+ 'http_read_timeout_secs',
+ 'http_server_apps_path',
+ 'http_server_request_logger',
+ 'if_empty',
+ 'if_false',
+ 'if_null',
+ 'if_true',
+ 'include_cache_compare',
+ 'include_currentpath',
+ 'include_filepath',
+ 'include_localpath',
+ 'include_once',
+ 'include_path',
+ 'include_raw',
+ 'include_url',
+ 'include',
+ 'includes',
+ 'inline_colinfo_name_pos',
+ 'inline_colinfo_type_pos',
+ 'inline_colinfo_valuelist_pos',
+ 'inline_columninfo_pos',
+ 'inline_foundcount_pos',
+ 'inline_namedget',
+ 'inline_namedput',
+ 'inline_resultrows_pos',
+ 'inline_scopeget',
+ 'inline_scopepop',
+ 'inline_scopepush',
+ 'inline',
+ 'integer_bitor',
+ 'integer_random',
+ 'io_dir_dt_blk',
+ 'io_dir_dt_chr',
+ 'io_dir_dt_dir',
+ 'io_dir_dt_fifo',
+ 'io_dir_dt_lnk',
+ 'io_dir_dt_reg',
+ 'io_dir_dt_sock',
+ 'io_dir_dt_unknown',
+ 'io_dir_dt_wht',
+ 'io_file_access',
+ 'io_file_chdir',
+ 'io_file_chmod',
+ 'io_file_chown',
+ 'io_file_dirname',
+ 'io_file_f_dupfd',
+ 'io_file_f_getfd',
+ 'io_file_f_getfl',
+ 'io_file_f_getlk',
+ 'io_file_f_rdlck',
+ 'io_file_f_setfd',
+ 'io_file_f_setfl',
+ 'io_file_f_setlk',
+ 'io_file_f_setlkw',
+ 'io_file_f_test',
+ 'io_file_f_tlock',
+ 'io_file_f_ulock',
+ 'io_file_f_unlck',
+ 'io_file_f_wrlck',
+ 'io_file_fd_cloexec',
+ 'io_file_fioasync',
+ 'io_file_fioclex',
+ 'io_file_fiodtype',
+ 'io_file_fiogetown',
+ 'io_file_fionbio',
+ 'io_file_fionclex',
+ 'io_file_fionread',
+ 'io_file_fiosetown',
+ 'io_file_getcwd',
+ 'io_file_lchown',
+ 'io_file_link',
+ 'io_file_lockf',
+ 'io_file_lstat_atime',
+ 'io_file_lstat_mode',
+ 'io_file_lstat_mtime',
+ 'io_file_lstat_size',
+ 'io_file_mkdir',
+ 'io_file_mkfifo',
+ 'io_file_mkstemp',
+ 'io_file_o_append',
+ 'io_file_o_async',
+ 'io_file_o_creat',
+ 'io_file_o_excl',
+ 'io_file_o_exlock',
+ 'io_file_o_fsync',
+ 'io_file_o_nofollow',
+ 'io_file_o_nonblock',
+ 'io_file_o_rdonly',
+ 'io_file_o_rdwr',
+ 'io_file_o_shlock',
+ 'io_file_o_sync',
+ 'io_file_o_trunc',
+ 'io_file_o_wronly',
+ 'io_file_pipe',
+ 'io_file_readlink',
+ 'io_file_realpath',
+ 'io_file_remove',
+ 'io_file_rename',
+ 'io_file_rmdir',
+ 'io_file_s_ifblk',
+ 'io_file_s_ifchr',
+ 'io_file_s_ifdir',
+ 'io_file_s_ififo',
+ 'io_file_s_iflnk',
+ 'io_file_s_ifmt',
+ 'io_file_s_ifreg',
+ 'io_file_s_ifsock',
+ 'io_file_s_irgrp',
+ 'io_file_s_iroth',
+ 'io_file_s_irusr',
+ 'io_file_s_irwxg',
+ 'io_file_s_irwxo',
+ 'io_file_s_irwxu',
+ 'io_file_s_isgid',
+ 'io_file_s_isuid',
+ 'io_file_s_isvtx',
+ 'io_file_s_iwgrp',
+ 'io_file_s_iwoth',
+ 'io_file_s_iwusr',
+ 'io_file_s_ixgrp',
+ 'io_file_s_ixoth',
+ 'io_file_s_ixusr',
+ 'io_file_seek_cur',
+ 'io_file_seek_end',
+ 'io_file_seek_set',
+ 'io_file_stat_atime',
+ 'io_file_stat_mode',
+ 'io_file_stat_mtime',
+ 'io_file_stat_size',
+ 'io_file_stderr',
+ 'io_file_stdin',
+ 'io_file_stdout',
+ 'io_file_symlink',
+ 'io_file_tempnam',
+ 'io_file_truncate',
+ 'io_file_umask',
+ 'io_file_unlink',
+ 'io_net_accept',
+ 'io_net_af_inet',
+ 'io_net_af_inet6',
+ 'io_net_af_unix',
+ 'io_net_bind',
+ 'io_net_connect',
+ 'io_net_getpeername',
+ 'io_net_getsockname',
+ 'io_net_ipproto_ip',
+ 'io_net_ipproto_udp',
+ 'io_net_listen',
+ 'io_net_msg_oob',
+ 'io_net_msg_peek',
+ 'io_net_msg_waitall',
+ 'io_net_recv',
+ 'io_net_recvfrom',
+ 'io_net_send',
+ 'io_net_sendto',
+ 'io_net_shut_rd',
+ 'io_net_shut_rdwr',
+ 'io_net_shut_wr',
+ 'io_net_shutdown',
+ 'io_net_so_acceptconn',
+ 'io_net_so_broadcast',
+ 'io_net_so_debug',
+ 'io_net_so_dontroute',
+ 'io_net_so_error',
+ 'io_net_so_keepalive',
+ 'io_net_so_linger',
+ 'io_net_so_oobinline',
+ 'io_net_so_rcvbuf',
+ 'io_net_so_rcvlowat',
+ 'io_net_so_rcvtimeo',
+ 'io_net_so_reuseaddr',
+ 'io_net_so_sndbuf',
+ 'io_net_so_sndlowat',
+ 'io_net_so_sndtimeo',
+ 'io_net_so_timestamp',
+ 'io_net_so_type',
+ 'io_net_so_useloopback',
+ 'io_net_sock_dgram',
+ 'io_net_sock_raw',
+ 'io_net_sock_rdm',
+ 'io_net_sock_seqpacket',
+ 'io_net_sock_stream',
+ 'io_net_socket',
+ 'io_net_sol_socket',
+ 'io_net_ssl_accept',
+ 'io_net_ssl_begin',
+ 'io_net_ssl_connect',
+ 'io_net_ssl_end',
+ 'io_net_ssl_error',
+ 'io_net_ssl_errorstring',
+ 'io_net_ssl_funcerrorstring',
+ 'io_net_ssl_liberrorstring',
+ 'io_net_ssl_read',
+ 'io_net_ssl_reasonerrorstring',
+ 'io_net_ssl_setacceptstate',
+ 'io_net_ssl_setconnectstate',
+ 'io_net_ssl_setverifylocations',
+ 'io_net_ssl_shutdown',
+ 'io_net_ssl_usecertificatechainfile',
+ 'io_net_ssl_useprivatekeyfile',
+ 'io_net_ssl_write',
+ 'java_jvm_create',
+ 'java_jvm_getenv',
+ 'jdbc_initialize',
+ 'json_back_slash',
+ 'json_back_space',
+ 'json_close_array',
+ 'json_close_object',
+ 'json_colon',
+ 'json_comma',
+ 'json_consume_array',
+ 'json_consume_object',
+ 'json_consume_string',
+ 'json_consume_token',
+ 'json_cr',
+ 'json_debug',
+ 'json_deserialize',
+ 'json_e_lower',
+ 'json_e_upper',
+ 'json_f_lower',
+ 'json_form_feed',
+ 'json_forward_slash',
+ 'json_lf',
+ 'json_n_lower',
+ 'json_negative',
+ 'json_open_array',
+ 'json_open_object',
+ 'json_period',
'json_positive',
- 'json_quote_double',
- 'json_rpccall',
- 'json_serialize',
- 'json_t_lower',
- 'json_tab',
- 'json_white_space',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_currentaction',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_methodexists',
- 'lasso_tagexists',
- 'lasso_uniqueid',
- 'lasso_version',
- 'lassoapp_current_app',
- 'lassoapp_current_include',
- 'lassoapp_do_with_include',
- 'lassoapp_exists',
- 'lassoapp_find_missing_file',
- 'lassoapp_format_mod_date',
- 'lassoapp_get_capabilities_name',
- 'lassoapp_include_current',
- 'lassoapp_include',
- 'lassoapp_initialize_db',
- 'lassoapp_initialize',
- 'lassoapp_invoke_resource',
- 'lassoapp_issourcefileextension',
- 'lassoapp_link',
- 'lassoapp_load_module',
- 'lassoapp_mime_get',
- 'lassoapp_mime_type_appcache',
- 'lassoapp_mime_type_css',
- 'lassoapp_mime_type_csv',
- 'lassoapp_mime_type_doc',
- 'lassoapp_mime_type_docx',
- 'lassoapp_mime_type_eof',
- 'lassoapp_mime_type_eot',
- 'lassoapp_mime_type_gif',
- 'lassoapp_mime_type_html',
- 'lassoapp_mime_type_ico',
- 'lassoapp_mime_type_jpg',
- 'lassoapp_mime_type_js',
- 'lassoapp_mime_type_lasso',
- 'lassoapp_mime_type_map',
- 'lassoapp_mime_type_pdf',
- 'lassoapp_mime_type_png',
- 'lassoapp_mime_type_ppt',
- 'lassoapp_mime_type_rss',
- 'lassoapp_mime_type_svg',
- 'lassoapp_mime_type_swf',
- 'lassoapp_mime_type_tif',
- 'lassoapp_mime_type_ttf',
- 'lassoapp_mime_type_txt',
- 'lassoapp_mime_type_woff',
- 'lassoapp_mime_type_xaml',
- 'lassoapp_mime_type_xap',
- 'lassoapp_mime_type_xbap',
- 'lassoapp_mime_type_xhr',
- 'lassoapp_mime_type_xml',
- 'lassoapp_mime_type_zip',
- 'lassoapp_path_to_method_name',
- 'lassoapp_settingsdb',
- 'layout_name',
- 'lcapi_datasourceadd',
- 'lcapi_datasourcecloseconnection',
- 'lcapi_datasourcedelete',
- 'lcapi_datasourceduplicate',
- 'lcapi_datasourceexecsql',
- 'lcapi_datasourcefindall',
- 'lcapi_datasourceimage',
- 'lcapi_datasourceinfo',
- 'lcapi_datasourceinit',
- 'lcapi_datasourcematchesname',
- 'lcapi_datasourcenames',
- 'lcapi_datasourcenothing',
- 'lcapi_datasourceopand',
- 'lcapi_datasourceopany',
- 'lcapi_datasourceopbw',
- 'lcapi_datasourceopct',
- 'lcapi_datasourceopeq',
- 'lcapi_datasourceopew',
- 'lcapi_datasourceopft',
- 'lcapi_datasourceopgt',
- 'lcapi_datasourceopgteq',
- 'lcapi_datasourceopin',
- 'lcapi_datasourceoplt',
- 'lcapi_datasourceoplteq',
- 'lcapi_datasourceopnbw',
- 'lcapi_datasourceopnct',
- 'lcapi_datasourceopneq',
- 'lcapi_datasourceopnew',
- 'lcapi_datasourceopnin',
- 'lcapi_datasourceopno',
- 'lcapi_datasourceopnot',
- 'lcapi_datasourceopnrx',
- 'lcapi_datasourceopor',
- 'lcapi_datasourceoprx',
- 'lcapi_datasourcepreparesql',
- 'lcapi_datasourceprotectionnone',
- 'lcapi_datasourceprotectionreadonly',
- 'lcapi_datasourcerandom',
- 'lcapi_datasourceschemanames',
- 'lcapi_datasourcescripts',
- 'lcapi_datasourcesearch',
- 'lcapi_datasourcesortascending',
- 'lcapi_datasourcesortcustom',
- 'lcapi_datasourcesortdescending',
- 'lcapi_datasourcetablenames',
- 'lcapi_datasourceterm',
- 'lcapi_datasourcetickle',
- 'lcapi_datasourcetypeblob',
- 'lcapi_datasourcetypeboolean',
- 'lcapi_datasourcetypedate',
- 'lcapi_datasourcetypedecimal',
- 'lcapi_datasourcetypeinteger',
- 'lcapi_datasourcetypestring',
- 'lcapi_datasourceunpreparesql',
- 'lcapi_datasourceupdate',
- 'lcapi_fourchartointeger',
- 'lcapi_listdatasources',
- 'lcapi_loadmodule',
- 'lcapi_loadmodules',
- 'lcapi_updatedatasourceslist',
- 'ldap_scope_base',
+ 'json_quote_double',
+ 'json_rpccall',
+ 'json_serialize',
+ 'json_t_lower',
+ 'json_tab',
+ 'json_white_space',
+ 'keycolumn_name',
+ 'keycolumn_value',
+ 'keyfield_name',
+ 'keyfield_value',
+ 'lasso_currentaction',
+ 'lasso_errorreporting',
+ 'lasso_executiontimelimit',
+ 'lasso_methodexists',
+ 'lasso_tagexists',
+ 'lasso_uniqueid',
+ 'lasso_version',
+ 'lassoapp_current_app',
+ 'lassoapp_current_include',
+ 'lassoapp_do_with_include',
+ 'lassoapp_exists',
+ 'lassoapp_find_missing_file',
+ 'lassoapp_format_mod_date',
+ 'lassoapp_get_capabilities_name',
+ 'lassoapp_include_current',
+ 'lassoapp_include',
+ 'lassoapp_initialize_db',
+ 'lassoapp_initialize',
+ 'lassoapp_invoke_resource',
+ 'lassoapp_issourcefileextension',
+ 'lassoapp_link',
+ 'lassoapp_load_module',
+ 'lassoapp_mime_get',
+ 'lassoapp_mime_type_appcache',
+ 'lassoapp_mime_type_css',
+ 'lassoapp_mime_type_csv',
+ 'lassoapp_mime_type_doc',
+ 'lassoapp_mime_type_docx',
+ 'lassoapp_mime_type_eof',
+ 'lassoapp_mime_type_eot',
+ 'lassoapp_mime_type_gif',
+ 'lassoapp_mime_type_html',
+ 'lassoapp_mime_type_ico',
+ 'lassoapp_mime_type_jpg',
+ 'lassoapp_mime_type_js',
+ 'lassoapp_mime_type_lasso',
+ 'lassoapp_mime_type_map',
+ 'lassoapp_mime_type_pdf',
+ 'lassoapp_mime_type_png',
+ 'lassoapp_mime_type_ppt',
+ 'lassoapp_mime_type_rss',
+ 'lassoapp_mime_type_svg',
+ 'lassoapp_mime_type_swf',
+ 'lassoapp_mime_type_tif',
+ 'lassoapp_mime_type_ttf',
+ 'lassoapp_mime_type_txt',
+ 'lassoapp_mime_type_woff',
+ 'lassoapp_mime_type_xaml',
+ 'lassoapp_mime_type_xap',
+ 'lassoapp_mime_type_xbap',
+ 'lassoapp_mime_type_xhr',
+ 'lassoapp_mime_type_xml',
+ 'lassoapp_mime_type_zip',
+ 'lassoapp_path_to_method_name',
+ 'lassoapp_settingsdb',
+ 'layout_name',
+ 'lcapi_datasourceadd',
+ 'lcapi_datasourcecloseconnection',
+ 'lcapi_datasourcedelete',
+ 'lcapi_datasourceduplicate',
+ 'lcapi_datasourceexecsql',
+ 'lcapi_datasourcefindall',
+ 'lcapi_datasourceimage',
+ 'lcapi_datasourceinfo',
+ 'lcapi_datasourceinit',
+ 'lcapi_datasourcematchesname',
+ 'lcapi_datasourcenames',
+ 'lcapi_datasourcenothing',
+ 'lcapi_datasourceopand',
+ 'lcapi_datasourceopany',
+ 'lcapi_datasourceopbw',
+ 'lcapi_datasourceopct',
+ 'lcapi_datasourceopeq',
+ 'lcapi_datasourceopew',
+ 'lcapi_datasourceopft',
+ 'lcapi_datasourceopgt',
+ 'lcapi_datasourceopgteq',
+ 'lcapi_datasourceopin',
+ 'lcapi_datasourceoplt',
+ 'lcapi_datasourceoplteq',
+ 'lcapi_datasourceopnbw',
+ 'lcapi_datasourceopnct',
+ 'lcapi_datasourceopneq',
+ 'lcapi_datasourceopnew',
+ 'lcapi_datasourceopnin',
+ 'lcapi_datasourceopno',
+ 'lcapi_datasourceopnot',
+ 'lcapi_datasourceopnrx',
+ 'lcapi_datasourceopor',
+ 'lcapi_datasourceoprx',
+ 'lcapi_datasourcepreparesql',
+ 'lcapi_datasourceprotectionnone',
+ 'lcapi_datasourceprotectionreadonly',
+ 'lcapi_datasourcerandom',
+ 'lcapi_datasourceschemanames',
+ 'lcapi_datasourcescripts',
+ 'lcapi_datasourcesearch',
+ 'lcapi_datasourcesortascending',
+ 'lcapi_datasourcesortcustom',
+ 'lcapi_datasourcesortdescending',
+ 'lcapi_datasourcetablenames',
+ 'lcapi_datasourceterm',
+ 'lcapi_datasourcetickle',
+ 'lcapi_datasourcetypeblob',
+ 'lcapi_datasourcetypeboolean',
+ 'lcapi_datasourcetypedate',
+ 'lcapi_datasourcetypedecimal',
+ 'lcapi_datasourcetypeinteger',
+ 'lcapi_datasourcetypestring',
+ 'lcapi_datasourceunpreparesql',
+ 'lcapi_datasourceupdate',
+ 'lcapi_fourchartointeger',
+ 'lcapi_listdatasources',
+ 'lcapi_loadmodule',
+ 'lcapi_loadmodules',
+ 'lcapi_updatedatasourceslist',
+ 'ldap_scope_base',
'ldap_scope_children',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'library_once',
- 'library',
- 'ljapi_initialize',
- 'locale_availablelocales',
- 'locale_canada',
- 'locale_canadafrench',
- 'locale_china',
- 'locale_chinese',
- 'locale_default',
- 'locale_english',
- 'locale_format_style_date_time',
- 'locale_format_style_default',
- 'locale_format_style_full',
- 'locale_format_style_long',
- 'locale_format_style_medium',
- 'locale_format_style_none',
- 'locale_format_style_short',
- 'locale_format',
- 'locale_france',
- 'locale_french',
- 'locale_german',
- 'locale_germany',
- 'locale_isocountries',
- 'locale_isolanguages',
- 'locale_italian',
- 'locale_italy',
- 'locale_japan',
- 'locale_japanese',
- 'locale_korea',
- 'locale_korean',
- 'locale_prc',
- 'locale_setdefault',
- 'locale_simplifiedchinese',
- 'locale_taiwan',
- 'locale_traditionalchinese',
- 'locale_uk',
- 'locale_us',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_initialize',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_max_file_size',
- 'log_setdestination',
- 'log_sql',
- 'log_trim_file_size',
- 'log_warning',
- 'log',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'loop_key_pop',
- 'loop_key_push',
- 'loop_key',
- 'loop_pop',
- 'loop_push',
- 'loop_value_pop',
- 'loop_value_push',
- 'loop_value',
- 'loop',
- 'lt',
- 'lte',
- 'main_thread_only',
- 'max',
- 'maxrecords_value',
- 'median',
- 'method_name',
- 'micros',
- 'millis',
- 'min',
- 'minimal',
- 'mongo_insert_continue_on_error',
- 'mongo_insert_no_validate',
- 'mongo_insert_none',
- 'mongo_query_await_data',
- 'mongo_query_exhaust',
- 'mongo_query_no_cursor_timeout',
- 'mongo_query_none',
- 'mongo_query_oplog_replay',
- 'mongo_query_partial',
- 'mongo_query_slave_ok',
- 'mongo_query_tailable_cursor',
- 'mongo_remove_none',
- 'mongo_remove_single_remove',
- 'mongo_update_multi_update',
- 'mongo_update_no_validate',
- 'mongo_update_none',
- 'mongo_update_upsert',
- 'mustache_compile_file',
- 'mustache_compile_string',
- 'mustache_include',
- 'mysqlds',
- 'namespace_global',
- 'namespace_import',
- 'namespace_using',
- 'nbw',
- 'ncn',
- 'neq',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'new',
- 'none',
- 'nrx',
- 'nslookup',
- 'odbc_session_driver_mssql',
- 'odbc',
- 'output_none',
- 'output',
- 'pdf_package',
- 'pdf_rectangle',
- 'pdf_serve',
- 'pi',
- 'portal',
- 'postgresql',
- 'process',
- 'protect_now',
- 'protect',
- 'queriable_average',
- 'queriable_defaultcompare',
- 'queriable_do',
- 'queriable_internal_combinebindings',
- 'queriable_max',
- 'queriable_min',
- 'queriable_qsort',
- 'queriable_reversecompare',
- 'queriable_sum',
- 'random_seed',
- 'range',
- 'records_array',
- 'records_map',
- 'records',
- 'redirect_url',
- 'referer_url',
- 'referrer_url',
- 'register_thread',
- 'register',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'response_root',
- 'resultset_count',
- 'resultset',
- 'resultsets',
- 'rows_array',
- 'rows_impl',
- 'rows',
- 'rx',
- 'schema_name',
- 'security_database',
- 'security_default_realm',
- 'security_initialize',
- 'security_table_groups',
- 'security_table_ug_map',
- 'security_table_users',
- 'selected',
- 'series',
- 'server_admin',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_protocol',
- 'server_push',
- 'server_signature',
- 'server_software',
- 'session_abort',
- 'session_addvar',
- 'session_decorate',
- 'session_deleteexpired',
- 'session_end',
- 'session_getdefaultdriver',
- 'session_id',
- 'session_initialize',
- 'session_removevar',
- 'session_result',
- 'session_setdefaultdriver',
- 'session_start',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_id',
- 'site_name',
- 'skiprecords_value',
- 'sleep',
- 'split_thread',
- 'sqlite_abort',
- 'sqlite_auth',
- 'sqlite_blob',
- 'sqlite_busy',
- 'sqlite_cantopen',
- 'sqlite_constraint',
- 'sqlite_corrupt',
- 'sqlite_createdb',
- 'sqlite_done',
- 'sqlite_empty',
- 'sqlite_error',
- 'sqlite_float',
- 'sqlite_format',
- 'sqlite_full',
- 'sqlite_integer',
- 'sqlite_internal',
- 'sqlite_interrupt',
- 'sqlite_ioerr',
- 'sqlite_locked',
- 'sqlite_mismatch',
- 'sqlite_misuse',
- 'sqlite_nolfs',
- 'sqlite_nomem',
- 'sqlite_notadb',
- 'sqlite_notfound',
- 'sqlite_null',
- 'sqlite_ok',
- 'sqlite_perm',
- 'sqlite_protocol',
- 'sqlite_range',
- 'sqlite_readonly',
- 'sqlite_row',
- 'sqlite_schema',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'sqlite_text',
- 'sqlite_toobig',
- 'sqliteconnector',
- 'staticarray_join',
- 'stdout',
- 'stdoutnl',
- 'string_validcharset',
- 'suspend',
- 'sys_appspath',
- 'sys_chroot',
- 'sys_clock',
- 'sys_clockspersec',
- 'sys_credits',
- 'sys_databasespath',
- 'sys_detach_exec',
- 'sys_difftime',
- 'sys_dll_ext',
- 'sys_drand48',
- 'sys_environ',
- 'sys_eol',
- 'sys_erand48',
- 'sys_errno',
- 'sys_exec_pid_to_os_pid',
- 'sys_exec',
- 'sys_exit',
- 'sys_fork',
- 'sys_garbagecollect',
- 'sys_getbytessincegc',
- 'sys_getchar',
- 'sys_getegid',
- 'sys_getenv',
- 'sys_geteuid',
- 'sys_getgid',
- 'sys_getgrnam',
- 'sys_getheapfreebytes',
- 'sys_getheapsize',
- 'sys_getlogin',
- 'sys_getpid',
- 'sys_getppid',
- 'sys_getpwnam',
- 'sys_getpwuid',
- 'sys_getstartclock',
- 'sys_getthreadcount',
- 'sys_getuid',
- 'sys_growheapby',
- 'sys_homepath',
- 'sys_is_full_path',
- 'sys_is_windows',
- 'sys_isfullpath',
- 'sys_iswindows',
- 'sys_iterate',
- 'sys_jrand48',
- 'sys_kill_exec',
- 'sys_kill',
- 'sys_lcong48',
- 'sys_librariespath',
- 'sys_listtraits',
- 'sys_listtypes',
- 'sys_listunboundmethods',
- 'sys_loadlibrary',
- 'sys_lrand48',
- 'sys_masterhomepath',
- 'sys_mrand48',
- 'sys_nrand48',
- 'sys_pid_exec',
- 'sys_pointersize',
- 'sys_rand',
- 'sys_random',
- 'sys_seed48',
- 'sys_setenv',
- 'sys_setgid',
- 'sys_setsid',
- 'sys_setuid',
- 'sys_sigabrt',
- 'sys_sigalrm',
- 'sys_sigbus',
- 'sys_sigchld',
- 'sys_sigcont',
- 'sys_sigfpe',
- 'sys_sighup',
- 'sys_sigill',
- 'sys_sigint',
- 'sys_sigkill',
- 'sys_sigpipe',
- 'sys_sigprof',
- 'sys_sigquit',
- 'sys_sigsegv',
- 'sys_sigstop',
- 'sys_sigsys',
- 'sys_sigterm',
- 'sys_sigtrap',
- 'sys_sigtstp',
- 'sys_sigttin',
- 'sys_sigttou',
- 'sys_sigurg',
- 'sys_sigusr1',
- 'sys_sigusr2',
- 'sys_sigvtalrm',
- 'sys_sigxcpu',
- 'sys_sigxfsz',
- 'sys_srand',
- 'sys_srand48',
- 'sys_srandom',
- 'sys_strerror',
- 'sys_supportpath',
- 'sys_test_exec',
- 'sys_time',
- 'sys_uname',
- 'sys_unsetenv',
- 'sys_usercapimodulepath',
- 'sys_userstartuppath',
- 'sys_version',
- 'sys_wait_exec',
- 'sys_waitpid',
- 'sys_wcontinued',
- 'sys_while',
- 'sys_wnohang',
- 'sys_wuntraced',
- 'table_name',
- 'tag_exists',
- 'tag_name',
- 'thread_var_get',
- 'thread_var_pop',
- 'thread_var_push',
- 'threadvar_find',
- 'threadvar_get',
- 'threadvar_set_asrt',
- 'threadvar_set',
- 'timer',
- 'token_value',
- 'treemap',
- 'u_lb_alphabetic',
- 'u_lb_ambiguous',
- 'u_lb_break_after',
- 'u_lb_break_before',
- 'u_lb_break_both',
- 'u_lb_break_symbols',
- 'u_lb_carriage_return',
- 'u_lb_close_punctuation',
- 'u_lb_combining_mark',
- 'u_lb_complex_context',
- 'u_lb_contingent_break',
- 'u_lb_exclamation',
- 'u_lb_glue',
- 'u_lb_h2',
- 'u_lb_h3',
- 'u_lb_hyphen',
- 'u_lb_ideographic',
- 'u_lb_infix_numeric',
- 'u_lb_inseparable',
- 'u_lb_jl',
- 'u_lb_jt',
- 'u_lb_jv',
- 'u_lb_line_feed',
- 'u_lb_mandatory_break',
- 'u_lb_next_line',
- 'u_lb_nonstarter',
- 'u_lb_numeric',
- 'u_lb_open_punctuation',
- 'u_lb_postfix_numeric',
- 'u_lb_prefix_numeric',
- 'u_lb_quotation',
- 'u_lb_space',
- 'u_lb_surrogate',
- 'u_lb_unknown',
- 'u_lb_word_joiner',
- 'u_lb_zwspace',
- 'u_nt_decimal',
- 'u_nt_digit',
- 'u_nt_none',
- 'u_nt_numeric',
- 'u_sb_aterm',
- 'u_sb_close',
- 'u_sb_format',
- 'u_sb_lower',
- 'u_sb_numeric',
- 'u_sb_oletter',
- 'u_sb_other',
- 'u_sb_sep',
- 'u_sb_sp',
- 'u_sb_sterm',
- 'u_sb_upper',
- 'u_wb_aletter',
- 'u_wb_extendnumlet',
- 'u_wb_format',
- 'u_wb_katakana',
- 'u_wb_midletter',
- 'u_wb_midnum',
- 'u_wb_numeric',
- 'u_wb_other',
- 'ucal_ampm',
- 'ucal_dayofmonth',
- 'ucal_dayofweek',
- 'ucal_dayofweekinmonth',
- 'ucal_dayofyear',
- 'ucal_daysinfirstweek',
- 'ucal_dowlocal',
- 'ucal_dstoffset',
- 'ucal_era',
- 'ucal_extendedyear',
- 'ucal_firstdayofweek',
- 'ucal_hour',
- 'ucal_hourofday',
- 'ucal_julianday',
- 'ucal_lenient',
- 'ucal_listtimezones',
- 'ucal_millisecond',
- 'ucal_millisecondsinday',
- 'ucal_minute',
- 'ucal_month',
- 'ucal_second',
- 'ucal_weekofmonth',
- 'ucal_weekofyear',
- 'ucal_year',
- 'ucal_yearwoy',
- 'ucal_zoneoffset',
- 'uchar_age',
- 'uchar_alphabetic',
- 'uchar_ascii_hex_digit',
- 'uchar_bidi_class',
- 'uchar_bidi_control',
- 'uchar_bidi_mirrored',
- 'uchar_bidi_mirroring_glyph',
- 'uchar_block',
- 'uchar_canonical_combining_class',
- 'uchar_case_folding',
- 'uchar_case_sensitive',
- 'uchar_dash',
- 'uchar_decomposition_type',
- 'uchar_default_ignorable_code_point',
- 'uchar_deprecated',
- 'uchar_diacritic',
- 'uchar_east_asian_width',
- 'uchar_extender',
- 'uchar_full_composition_exclusion',
- 'uchar_general_category_mask',
- 'uchar_general_category',
- 'uchar_grapheme_base',
- 'uchar_grapheme_cluster_break',
- 'uchar_grapheme_extend',
- 'uchar_grapheme_link',
- 'uchar_hangul_syllable_type',
- 'uchar_hex_digit',
- 'uchar_hyphen',
- 'uchar_id_continue',
- 'uchar_ideographic',
- 'uchar_ids_binary_operator',
- 'uchar_ids_trinary_operator',
- 'uchar_iso_comment',
- 'uchar_join_control',
- 'uchar_joining_group',
- 'uchar_joining_type',
- 'uchar_lead_canonical_combining_class',
- 'uchar_line_break',
- 'uchar_logical_order_exception',
- 'uchar_lowercase_mapping',
- 'uchar_lowercase',
- 'uchar_math',
- 'uchar_name',
- 'uchar_nfc_inert',
- 'uchar_nfc_quick_check',
- 'uchar_nfd_inert',
- 'uchar_nfd_quick_check',
- 'uchar_nfkc_inert',
- 'uchar_nfkc_quick_check',
- 'uchar_nfkd_inert',
- 'uchar_nfkd_quick_check',
- 'uchar_noncharacter_code_point',
- 'uchar_numeric_type',
- 'uchar_numeric_value',
- 'uchar_pattern_syntax',
- 'uchar_pattern_white_space',
- 'uchar_posix_alnum',
- 'uchar_posix_blank',
- 'uchar_posix_graph',
- 'uchar_posix_print',
- 'uchar_posix_xdigit',
- 'uchar_quotation_mark',
- 'uchar_radical',
- 'uchar_s_term',
- 'uchar_script',
- 'uchar_segment_starter',
- 'uchar_sentence_break',
- 'uchar_simple_case_folding',
- 'uchar_simple_lowercase_mapping',
- 'uchar_simple_titlecase_mapping',
- 'uchar_simple_uppercase_mapping',
- 'uchar_soft_dotted',
- 'uchar_terminal_punctuation',
- 'uchar_titlecase_mapping',
- 'uchar_trail_canonical_combining_class',
- 'uchar_unicode_1_name',
- 'uchar_unified_ideograph',
- 'uchar_uppercase_mapping',
- 'uchar_uppercase',
- 'uchar_variation_selector',
- 'uchar_white_space',
- 'uchar_word_break',
- 'uchar_xid_continue',
- 'uncompress',
- 'usage',
- 'uuid_compare',
- 'uuid_copy',
- 'uuid_generate_random',
- 'uuid_generate_time',
- 'uuid_generate',
- 'uuid_is_null',
- 'uuid_parse',
- 'uuid_unparse_lower',
- 'uuid_unparse_upper',
- 'uuid_unparse',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var_keys',
- 'var_values',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'web_handlefcgirequest',
- 'web_node_content_representation_css',
- 'web_node_content_representation_html',
- 'web_node_content_representation_js',
- 'web_node_content_representation_xhr',
- 'web_node_forpath',
- 'web_nodes_initialize',
- 'web_nodes_normalizeextension',
- 'web_nodes_processcontentnode',
- 'web_nodes_requesthandler',
- 'web_response_nodesentry',
- 'web_router_database',
- 'web_router_initialize',
- 'websocket_handler_timeout',
- 'wexitstatus',
- 'wifcontinued',
- 'wifexited',
- 'wifsignaled',
- 'wifstopped',
- 'wstopsig',
- 'wtermsig',
- 'xml_transform',
- 'xml',
- 'zip_add_dir',
- 'zip_add',
- 'zip_checkcons',
- 'zip_close',
- 'zip_cm_bzip2',
- 'zip_cm_default',
- 'zip_cm_deflate',
- 'zip_cm_deflate64',
- 'zip_cm_implode',
- 'zip_cm_pkware_implode',
- 'zip_cm_reduce_1',
- 'zip_cm_reduce_2',
- 'zip_cm_reduce_3',
- 'zip_cm_reduce_4',
- 'zip_cm_shrink',
- 'zip_cm_store',
- 'zip_create',
- 'zip_delete',
- 'zip_em_3des_112',
- 'zip_em_3des_168',
- 'zip_em_aes_128',
- 'zip_em_aes_192',
- 'zip_em_aes_256',
- 'zip_em_des',
- 'zip_em_none',
- 'zip_em_rc2_old',
- 'zip_em_rc2',
- 'zip_em_rc4',
- 'zip_em_trad_pkware',
- 'zip_em_unknown',
- 'zip_er_changed',
- 'zip_er_close',
- 'zip_er_compnotsupp',
- 'zip_er_crc',
- 'zip_er_deleted',
- 'zip_er_eof',
- 'zip_er_exists',
- 'zip_er_incons',
- 'zip_er_internal',
- 'zip_er_inval',
- 'zip_er_memory',
- 'zip_er_multidisk',
- 'zip_er_noent',
- 'zip_er_nozip',
- 'zip_er_ok',
- 'zip_er_open',
- 'zip_er_read',
- 'zip_er_remove',
- 'zip_er_rename',
- 'zip_er_seek',
- 'zip_er_tmpopen',
- 'zip_er_write',
- 'zip_er_zipclosed',
- 'zip_er_zlib',
- 'zip_error_get_sys_type',
- 'zip_error_get',
- 'zip_error_to_str',
- 'zip_et_none',
- 'zip_et_sys',
- 'zip_et_zlib',
- 'zip_excl',
- 'zip_fclose',
- 'zip_file_error_get',
- 'zip_file_strerror',
- 'zip_fl_compressed',
- 'zip_fl_nocase',
- 'zip_fl_nodir',
- 'zip_fl_unchanged',
- 'zip_fopen_index',
- 'zip_fopen',
- 'zip_fread',
- 'zip_get_archive_comment',
- 'zip_get_file_comment',
- 'zip_get_name',
- 'zip_get_num_files',
- 'zip_name_locate',
- 'zip_open',
- 'zip_rename',
- 'zip_replace',
- 'zip_set_archive_comment',
- 'zip_set_file_comment',
- 'zip_stat_index',
- 'zip_stat',
- 'zip_strerror',
- 'zip_unchange_all',
- 'zip_unchange_archive',
- 'zip_unchange',
- 'zlib_version',
- ),
- 'Lasso 8 Tags': (
- '__char',
- '__sync_timestamp__',
- '_admin_addgroup',
- '_admin_adduser',
- '_admin_defaultconnector',
- '_admin_defaultconnectornames',
- '_admin_defaultdatabase',
- '_admin_defaultfield',
- '_admin_defaultgroup',
- '_admin_defaulthost',
- '_admin_defaulttable',
- '_admin_defaultuser',
- '_admin_deleteconnector',
- '_admin_deletedatabase',
- '_admin_deletefield',
- '_admin_deletegroup',
- '_admin_deletehost',
- '_admin_deletetable',
- '_admin_deleteuser',
- '_admin_duplicategroup',
- '_admin_internaldatabase',
- '_admin_listconnectors',
- '_admin_listdatabases',
- '_admin_listfields',
- '_admin_listgroups',
- '_admin_listhosts',
- '_admin_listtables',
- '_admin_listusers',
- '_admin_refreshconnector',
- '_admin_refreshsecurity',
- '_admin_servicepath',
- '_admin_updateconnector',
- '_admin_updatedatabase',
- '_admin_updatefield',
- '_admin_updategroup',
- '_admin_updatehost',
- '_admin_updatetable',
- '_admin_updateuser',
- '_chartfx_activation_string',
- '_chartfx_getchallengestring',
- '_chop_args',
- '_chop_mimes',
- '_client_addr_old',
- '_client_address_old',
- '_client_ip_old',
- '_database_names',
- '_datasource_reload',
- '_date_current',
- '_date_format',
- '_date_msec',
- '_date_parse',
- '_execution_timelimit',
- '_file_chmod',
- '_initialize',
- '_jdbc_acceptsurl',
- '_jdbc_debug',
- '_jdbc_deletehost',
- '_jdbc_driverclasses',
- '_jdbc_driverinfo',
- '_jdbc_metainfo',
- '_jdbc_propertyinfo',
- '_jdbc_setdriver',
- '_lasso_param',
- '_log_helper',
- '_proc_noparam',
- '_proc_withparam',
- '_recursion_limit',
- '_request_param',
- '_security_binaryexpiration',
- '_security_flushcaches',
- '_security_isserialized',
- '_security_serialexpiration',
- '_srand',
- '_strict_literals',
- '_substring',
- '_xmlrpc_exconverter',
- '_xmlrpc_inconverter',
- '_xmlrpc_xmlinconverter',
- 'abort',
- 'action_addinfo',
- 'action_addrecord',
- 'action_param',
- 'action_params',
- 'action_setfoundcount',
- 'action_setrecordid',
- 'action_settotalcount',
- 'action_statement',
- 'admin_allowedfileroots',
- 'admin_changeuser',
- 'admin_createuser',
- 'admin_currentgroups',
- 'admin_currentuserid',
- 'admin_currentusername',
- 'admin_getpref',
- 'admin_groupassignuser',
- 'admin_grouplistusers',
- 'admin_groupremoveuser',
- 'admin_lassoservicepath',
- 'admin_listgroups',
- 'admin_refreshlicensing',
- 'admin_refreshsecurity',
- 'admin_reloaddatasource',
- 'admin_removepref',
- 'admin_setpref',
- 'admin_userexists',
- 'admin_userlistgroups',
- 'all',
- 'and',
- 'array',
- 'array_iterator',
- 'auth',
- 'auth_admin',
- 'auth_auth',
- 'auth_custom',
- 'auth_group',
- 'auth_prompt',
- 'auth_user',
- 'base64',
- 'bean',
- 'bigint',
- 'bom_utf16be',
- 'bom_utf16le',
- 'bom_utf32be',
- 'bom_utf32le',
- 'bom_utf8',
- 'boolean',
- 'bw',
- 'bytes',
- 'cache',
- 'cache_delete',
- 'cache_empty',
- 'cache_exists',
- 'cache_fetch',
- 'cache_internal',
- 'cache_maintenance',
- 'cache_object',
- 'cache_preferences',
- 'cache_store',
- 'case',
- 'chartfx',
- 'chartfx_records',
- 'chartfx_serve',
- 'checked',
- 'choice_list',
- 'choice_listitem',
- 'choicelistitem',
- 'cipher_decrypt',
- 'cipher_digest',
- 'cipher_encrypt',
- 'cipher_hmac',
- 'cipher_keylength',
- 'cipher_list',
- 'click_text',
- 'client_addr',
- 'client_address',
- 'client_authorization',
- 'client_browser',
- 'client_contentlength',
- 'client_contenttype',
- 'client_cookielist',
- 'client_cookies',
- 'client_encoding',
- 'client_formmethod',
- 'client_getargs',
- 'client_getparams',
- 'client_headers',
- 'client_ip',
- 'client_ipfrominteger',
- 'client_iptointeger',
- 'client_password',
- 'client_postargs',
- 'client_postparams',
- 'client_type',
- 'client_url',
- 'client_username',
- 'cn',
- 'column',
- 'column_name',
- 'column_names',
- 'compare_beginswith',
- 'compare_contains',
- 'compare_endswith',
- 'compare_equalto',
- 'compare_greaterthan',
- 'compare_greaterthanorequals',
- 'compare_greaterthanorequls',
- 'compare_lessthan',
- 'compare_lessthanorequals',
- 'compare_notbeginswith',
- 'compare_notcontains',
- 'compare_notendswith',
- 'compare_notequalto',
- 'compare_notregexp',
- 'compare_regexp',
- 'compare_strictequalto',
- 'compare_strictnotequalto',
- 'compiler_removecacheddoc',
- 'compiler_setdefaultparserflags',
- 'compress',
- 'content_body',
- 'content_encoding',
- 'content_header',
- 'content_type',
- 'cookie',
- 'cookie_set',
- 'curl_ftp_getfile',
- 'curl_ftp_getlisting',
- 'curl_ftp_putfile',
- 'curl_include_url',
- 'currency',
- 'database_changecolumn',
- 'database_changefield',
- 'database_createcolumn',
- 'database_createfield',
- 'database_createtable',
- 'database_fmcontainer',
- 'database_hostinfo',
- 'database_inline',
- 'database_name',
- 'database_nameitem',
- 'database_names',
- 'database_realname',
- 'database_removecolumn',
- 'database_removefield',
- 'database_removetable',
- 'database_repeating',
- 'database_repeating_valueitem',
- 'database_repeatingvalueitem',
- 'database_schemanameitem',
- 'database_schemanames',
- 'database_tablecolumn',
- 'database_tablenameitem',
- 'database_tablenames',
- 'datasource_name',
- 'datasource_register',
- 'date',
- 'date__date_current',
- 'date__date_format',
- 'date__date_msec',
- 'date__date_parse',
- 'date_add',
- 'date_date',
- 'date_difference',
- 'date_duration',
- 'date_format',
- 'date_getcurrentdate',
- 'date_getday',
- 'date_getdayofweek',
- 'date_gethour',
- 'date_getlocaltimezone',
- 'date_getminute',
- 'date_getmonth',
- 'date_getsecond',
- 'date_gettime',
- 'date_getyear',
- 'date_gmttolocal',
- 'date_localtogmt',
- 'date_maximum',
- 'date_minimum',
- 'date_msec',
- 'date_setformat',
- 'date_subtract',
- 'db_layoutnameitem',
- 'db_layoutnames',
- 'db_nameitem',
- 'db_names',
- 'db_tablenameitem',
- 'db_tablenames',
- 'dbi_column_names',
- 'dbi_field_names',
- 'decimal',
- 'decimal_setglobaldefaultprecision',
- 'decode_base64',
- 'decode_bheader',
- 'decode_hex',
- 'decode_html',
- 'decode_json',
- 'decode_qheader',
- 'decode_quotedprintable',
- 'decode_quotedprintablebytes',
- 'decode_url',
- 'decode_xml',
- 'decompress',
- 'decrypt_blowfish',
- 'decrypt_blowfish2',
- 'default',
- 'define_atbegin',
- 'define_atend',
- 'define_constant',
- 'define_prototype',
- 'define_tag',
- 'define_tagp',
- 'define_type',
- 'define_typep',
- 'deserialize',
- 'directory_directorynameitem',
- 'directory_lister',
- 'directory_nameitem',
- 'directorynameitem',
- 'dns_default',
- 'dns_lookup',
- 'dns_response',
- 'duration',
- 'else',
- 'email_batch',
- 'email_compose',
- 'email_digestchallenge',
- 'email_digestresponse',
- 'email_extract',
- 'email_findemails',
- 'email_immediate',
- 'email_merge',
- 'email_mxerror',
- 'email_mxlookup',
- 'email_parse',
- 'email_pop',
- 'email_queue',
- 'email_result',
- 'email_safeemail',
- 'email_send',
- 'email_smtp',
- 'email_status',
- 'email_token',
- 'email_translatebreakstocrlf',
- 'encode_base64',
- 'encode_bheader',
- 'encode_break',
- 'encode_breaks',
- 'encode_crc32',
- 'encode_hex',
- 'encode_html',
- 'encode_htmltoxml',
- 'encode_json',
- 'encode_qheader',
- 'encode_quotedprintable',
- 'encode_quotedprintablebytes',
- 'encode_set',
- 'encode_smart',
- 'encode_sql',
- 'encode_sql92',
- 'encode_stricturl',
- 'encode_url',
- 'encode_xml',
- 'encrypt_blowfish',
- 'encrypt_blowfish2',
- 'encrypt_crammd5',
- 'encrypt_hmac',
- 'encrypt_md5',
- 'eq',
- 'error_adderror',
- 'error_code',
- 'error_code_aborted',
- 'error_code_assert',
- 'error_code_bof',
- 'error_code_connectioninvalid',
- 'error_code_couldnotclosefile',
- 'error_code_couldnotcreateoropenfile',
- 'error_code_couldnotdeletefile',
- 'error_code_couldnotdisposememory',
- 'error_code_couldnotlockmemory',
- 'error_code_couldnotreadfromfile',
- 'error_code_couldnotunlockmemory',
- 'error_code_couldnotwritetofile',
- 'error_code_criterianotmet',
- 'error_code_datasourceerror',
- 'error_code_directoryfull',
- 'error_code_diskfull',
- 'error_code_dividebyzero',
- 'error_code_eof',
- 'error_code_failure',
- 'error_code_fieldrestriction',
- 'error_code_file',
- 'error_code_filealreadyexists',
- 'error_code_filecorrupt',
- 'error_code_fileinvalid',
- 'error_code_fileinvalidaccessmode',
- 'error_code_fileisclosed',
- 'error_code_fileisopen',
- 'error_code_filelocked',
- 'error_code_filenotfound',
- 'error_code_fileunlocked',
- 'error_code_httpfilenotfound',
- 'error_code_illegalinstruction',
- 'error_code_illegaluseoffrozeninstance',
- 'error_code_invaliddatabase',
- 'error_code_invalidfilename',
- 'error_code_invalidmemoryobject',
- 'error_code_invalidparameter',
- 'error_code_invalidpassword',
- 'error_code_invalidpathname',
- 'error_code_invalidusername',
- 'error_code_ioerror',
- 'error_code_loopaborted',
- 'error_code_memory',
- 'error_code_network',
- 'error_code_nilpointer',
- 'error_code_noerr',
- 'error_code_nopermission',
- 'error_code_outofmemory',
- 'error_code_outofstackspace',
- 'error_code_overflow',
- 'error_code_postconditionfailed',
- 'error_code_preconditionfailed',
- 'error_code_resnotfound',
- 'error_code_resource',
- 'error_code_streamreaderror',
- 'error_code_streamwriteerror',
- 'error_code_syntaxerror',
- 'error_code_tagnotfound',
- 'error_code_unknownerror',
- 'error_code_varnotfound',
- 'error_code_volumedoesnotexist',
- 'error_code_webactionnotsupported',
- 'error_code_webadderror',
- 'error_code_webdeleteerror',
- 'error_code_webmodulenotfound',
- 'error_code_webnosuchobject',
- 'error_code_webrepeatingrelatedfield',
- 'error_code_webrequiredfieldmissing',
- 'error_code_webtimeout',
- 'error_code_webupdateerror',
- 'error_columnrestriction',
- 'error_currenterror',
- 'error_databaseconnectionunavailable',
- 'error_databasetimeout',
- 'error_deleteerror',
- 'error_fieldrestriction',
- 'error_filenotfound',
- 'error_invaliddatabase',
- 'error_invalidpassword',
- 'error_invalidusername',
- 'error_modulenotfound',
- 'error_msg',
- 'error_msg_aborted',
- 'error_msg_assert',
- 'error_msg_bof',
- 'error_msg_connectioninvalid',
- 'error_msg_couldnotclosefile',
- 'error_msg_couldnotcreateoropenfile',
- 'error_msg_couldnotdeletefile',
- 'error_msg_couldnotdisposememory',
- 'error_msg_couldnotlockmemory',
- 'error_msg_couldnotreadfromfile',
- 'error_msg_couldnotunlockmemory',
- 'error_msg_couldnotwritetofile',
- 'error_msg_criterianotmet',
- 'error_msg_datasourceerror',
- 'error_msg_directoryfull',
- 'error_msg_diskfull',
- 'error_msg_dividebyzero',
- 'error_msg_eof',
- 'error_msg_failure',
- 'error_msg_fieldrestriction',
- 'error_msg_file',
- 'error_msg_filealreadyexists',
- 'error_msg_filecorrupt',
- 'error_msg_fileinvalid',
- 'error_msg_fileinvalidaccessmode',
- 'error_msg_fileisclosed',
- 'error_msg_fileisopen',
- 'error_msg_filelocked',
- 'error_msg_filenotfound',
- 'error_msg_fileunlocked',
- 'error_msg_httpfilenotfound',
- 'error_msg_illegalinstruction',
- 'error_msg_illegaluseoffrozeninstance',
- 'error_msg_invaliddatabase',
- 'error_msg_invalidfilename',
- 'error_msg_invalidmemoryobject',
- 'error_msg_invalidparameter',
- 'error_msg_invalidpassword',
- 'error_msg_invalidpathname',
- 'error_msg_invalidusername',
- 'error_msg_ioerror',
- 'error_msg_loopaborted',
- 'error_msg_memory',
- 'error_msg_network',
- 'error_msg_nilpointer',
- 'error_msg_noerr',
- 'error_msg_nopermission',
- 'error_msg_outofmemory',
- 'error_msg_outofstackspace',
- 'error_msg_overflow',
- 'error_msg_postconditionfailed',
- 'error_msg_preconditionfailed',
- 'error_msg_resnotfound',
- 'error_msg_resource',
- 'error_msg_streamreaderror',
- 'error_msg_streamwriteerror',
- 'error_msg_syntaxerror',
- 'error_msg_tagnotfound',
- 'error_msg_unknownerror',
- 'error_msg_varnotfound',
- 'error_msg_volumedoesnotexist',
- 'error_msg_webactionnotsupported',
- 'error_msg_webadderror',
- 'error_msg_webdeleteerror',
- 'error_msg_webmodulenotfound',
- 'error_msg_webnosuchobject',
- 'error_msg_webrepeatingrelatedfield',
- 'error_msg_webrequiredfieldmissing',
- 'error_msg_webtimeout',
- 'error_msg_webupdateerror',
- 'error_noerror',
- 'error_nopermission',
- 'error_norecordsfound',
- 'error_outofmemory',
- 'error_pop',
- 'error_push',
- 'error_reqcolumnmissing',
- 'error_reqfieldmissing',
- 'error_requiredcolumnmissing',
- 'error_requiredfieldmissing',
- 'error_reset',
- 'error_seterrorcode',
- 'error_seterrormessage',
- 'error_updateerror',
- 'euro',
- 'event_schedule',
- 'ew',
- 'fail',
- 'fail_if',
- 'false',
- 'field',
- 'field_name',
- 'field_names',
- 'file',
- 'file_autoresolvefullpaths',
- 'file_chmod',
- 'file_control',
- 'file_copy',
- 'file_create',
- 'file_creationdate',
- 'file_currenterror',
- 'file_delete',
- 'file_exists',
- 'file_getlinecount',
- 'file_getsize',
- 'file_isdirectory',
- 'file_listdirectory',
- 'file_moddate',
- 'file_modechar',
- 'file_modeline',
- 'file_move',
- 'file_openread',
- 'file_openreadwrite',
- 'file_openwrite',
- 'file_openwriteappend',
- 'file_openwritetruncate',
- 'file_probeeol',
- 'file_processuploads',
- 'file_read',
- 'file_readline',
- 'file_rename',
- 'file_serve',
- 'file_setsize',
- 'file_stream',
- 'file_streamcopy',
- 'file_uploads',
- 'file_waitread',
- 'file_waittimeout',
- 'file_waitwrite',
- 'file_write',
- 'find_soap_ops',
- 'form_param',
- 'found_count',
- 'ft',
- 'ftp_getfile',
- 'ftp_getlisting',
- 'ftp_putfile',
- 'full',
- 'global',
- 'global_defined',
- 'global_remove',
- 'global_reset',
- 'globals',
- 'gt',
- 'gte',
- 'handle',
- 'handle_error',
- 'header',
- 'html_comment',
- 'http_getfile',
- 'ical_alarm',
- 'ical_attribute',
- 'ical_calendar',
- 'ical_daylight',
- 'ical_event',
- 'ical_freebusy',
- 'ical_item',
- 'ical_journal',
- 'ical_parse',
- 'ical_standard',
- 'ical_timezone',
- 'ical_todo',
- 'if',
- 'if_empty',
- 'if_false',
- 'if_null',
- 'if_true',
- 'image',
- 'image_url',
- 'img',
- 'include',
- 'include_cgi',
- 'include_currentpath',
- 'include_once',
- 'include_raw',
- 'include_url',
- 'inline',
- 'integer',
- 'iterate',
- 'iterator',
- 'java',
- 'java_bean',
- 'json_records',
- 'json_rpccall',
- 'keycolumn_name',
- 'keycolumn_value',
- 'keyfield_name',
- 'keyfield_value',
- 'lasso_comment',
- 'lasso_currentaction',
- 'lasso_datasourceis',
- 'lasso_datasourceis4d',
- 'lasso_datasourceisfilemaker',
- 'lasso_datasourceisfilemaker7',
- 'lasso_datasourceisfilemaker9',
- 'lasso_datasourceisfilemakersa',
- 'lasso_datasourceisjdbc',
- 'lasso_datasourceislassomysql',
- 'lasso_datasourceismysql',
- 'lasso_datasourceisodbc',
- 'lasso_datasourceisopenbase',
- 'lasso_datasourceisoracle',
- 'lasso_datasourceispostgresql',
- 'lasso_datasourceisspotlight',
- 'lasso_datasourceissqlite',
- 'lasso_datasourceissqlserver',
- 'lasso_datasourcemodulename',
- 'lasso_datatype',
- 'lasso_disableondemand',
- 'lasso_errorreporting',
- 'lasso_executiontimelimit',
- 'lasso_parser',
- 'lasso_process',
- 'lasso_sessionid',
- 'lasso_siteid',
- 'lasso_siteisrunning',
- 'lasso_sitename',
- 'lasso_siterestart',
- 'lasso_sitestart',
- 'lasso_sitestop',
- 'lasso_tagexists',
- 'lasso_tagmodulename',
- 'lasso_uniqueid',
- 'lasso_updatecheck',
- 'lasso_uptime',
- 'lasso_version',
- 'lassoapp_create',
- 'lassoapp_dump',
- 'lassoapp_flattendir',
- 'lassoapp_getappdata',
- 'lassoapp_link',
- 'lassoapp_list',
- 'lassoapp_process',
- 'lassoapp_unitize',
- 'layout_name',
- 'ldap',
- 'ldap_scope_base',
- 'ldap_scope_onelevel',
- 'ldap_scope_subtree',
- 'ldml',
- 'ldml_ldml',
- 'library',
- 'library_once',
- 'link',
- 'link_currentaction',
- 'link_currentactionparams',
- 'link_currentactionurl',
- 'link_currentgroup',
- 'link_currentgroupparams',
- 'link_currentgroupurl',
- 'link_currentrecord',
- 'link_currentrecordparams',
- 'link_currentrecordurl',
- 'link_currentsearch',
- 'link_currentsearchparams',
- 'link_currentsearchurl',
- 'link_detail',
- 'link_detailparams',
- 'link_detailurl',
- 'link_firstgroup',
- 'link_firstgroupparams',
- 'link_firstgroupurl',
- 'link_firstrecord',
- 'link_firstrecordparams',
- 'link_firstrecordurl',
- 'link_lastgroup',
- 'link_lastgroupparams',
- 'link_lastgroupurl',
- 'link_lastrecord',
- 'link_lastrecordparams',
- 'link_lastrecordurl',
- 'link_nextgroup',
- 'link_nextgroupparams',
- 'link_nextgroupurl',
- 'link_nextrecord',
- 'link_nextrecordparams',
- 'link_nextrecordurl',
- 'link_params',
- 'link_prevgroup',
- 'link_prevgroupparams',
- 'link_prevgroupurl',
- 'link_prevrecord',
- 'link_prevrecordparams',
- 'link_prevrecordurl',
- 'link_setformat',
- 'link_url',
- 'list',
- 'list_additem',
- 'list_fromlist',
- 'list_fromstring',
- 'list_getitem',
- 'list_itemcount',
- 'list_iterator',
- 'list_removeitem',
- 'list_replaceitem',
- 'list_reverseiterator',
- 'list_tostring',
- 'literal',
- 'ljax_end',
- 'ljax_hastarget',
- 'ljax_include',
- 'ljax_start',
- 'ljax_target',
- 'local',
- 'local_defined',
- 'local_remove',
- 'local_reset',
- 'locale_format',
- 'locals',
- 'log',
- 'log_always',
- 'log_critical',
- 'log_deprecated',
- 'log_destination_console',
- 'log_destination_database',
- 'log_destination_file',
- 'log_detail',
- 'log_level_critical',
- 'log_level_deprecated',
- 'log_level_detail',
- 'log_level_sql',
- 'log_level_warning',
- 'log_setdestination',
- 'log_sql',
- 'log_warning',
- 'logicalop_value',
- 'logicaloperator_value',
- 'loop',
- 'loop_abort',
- 'loop_continue',
- 'loop_count',
- 'lt',
- 'lte',
- 'magick_image',
- 'map',
- 'map_iterator',
- 'match_comparator',
- 'match_notrange',
- 'match_notregexp',
- 'match_range',
- 'match_regexp',
- 'math_abs',
- 'math_acos',
- 'math_add',
- 'math_asin',
- 'math_atan',
- 'math_atan2',
- 'math_ceil',
- 'math_converteuro',
- 'math_cos',
- 'math_div',
- 'math_exp',
- 'math_floor',
- 'math_internal_rand',
- 'math_internal_randmax',
- 'math_internal_srand',
- 'math_ln',
- 'math_log',
- 'math_log10',
- 'math_max',
- 'math_min',
- 'math_mod',
- 'math_mult',
- 'math_pow',
- 'math_random',
- 'math_range',
- 'math_rint',
- 'math_roman',
- 'math_round',
- 'math_sin',
- 'math_sqrt',
- 'math_sub',
- 'math_tan',
- 'maxrecords_value',
- 'memory_session_driver',
- 'mime_type',
- 'minimal',
- 'misc__srand',
- 'misc_randomnumber',
- 'misc_roman',
- 'misc_valid_creditcard',
- 'mysql_session_driver',
- 'named_param',
- 'namespace_current',
- 'namespace_delimiter',
- 'namespace_exists',
- 'namespace_file_fullpathexists',
- 'namespace_global',
- 'namespace_import',
- 'namespace_load',
- 'namespace_page',
- 'namespace_unload',
- 'namespace_using',
- 'neq',
- 'net',
- 'net_connectinprogress',
- 'net_connectok',
- 'net_typessl',
- 'net_typessltcp',
- 'net_typessludp',
- 'net_typetcp',
- 'net_typeudp',
- 'net_waitread',
- 'net_waittimeout',
- 'net_waitwrite',
- 'no_default_output',
- 'none',
- 'noprocess',
- 'not',
- 'nrx',
- 'nslookup',
- 'null',
- 'object',
- 'once',
- 'oneoff',
- 'op_logicalvalue',
- 'operator_logicalvalue',
- 'option',
- 'or',
- 'os_process',
- 'output',
- 'output_none',
- 'pair',
- 'params_up',
- 'pdf_barcode',
- 'pdf_color',
- 'pdf_doc',
- 'pdf_font',
- 'pdf_image',
- 'pdf_list',
- 'pdf_read',
- 'pdf_serve',
- 'pdf_table',
- 'pdf_text',
- 'percent',
- 'portal',
- 'postcondition',
- 'precondition',
- 'prettyprintingnsmap',
- 'prettyprintingtypemap',
- 'priorityqueue',
- 'private',
- 'proc_convert',
- 'proc_convertbody',
- 'proc_convertone',
- 'proc_extract',
- 'proc_extractone',
- 'proc_find',
- 'proc_first',
- 'proc_foreach',
- 'proc_get',
- 'proc_join',
- 'proc_lasso',
- 'proc_last',
- 'proc_map_entry',
- 'proc_null',
- 'proc_regexp',
- 'proc_xml',
- 'proc_xslt',
- 'process',
- 'protect',
- 'queue',
- 'rand',
- 'randomnumber',
- 'raw',
- 'recid_value',
- 'record_count',
- 'recordcount',
- 'recordid_value',
- 'records',
- 'records_array',
- 'records_map',
- 'redirect_url',
- 'reference',
- 'referer',
- 'referer_url',
- 'referrer',
- 'referrer_url',
- 'regexp',
- 'repeating',
- 'repeating_valueitem',
- 'repeatingvalueitem',
- 'repetition',
- 'req_column',
- 'req_field',
- 'required_column',
- 'required_field',
- 'response_fileexists',
- 'response_filepath',
- 'response_localpath',
- 'response_path',
- 'response_realm',
- 'resultset',
- 'resultset_count',
- 'return',
- 'return_value',
- 'reverseiterator',
- 'roman',
- 'row_count',
- 'rows',
- 'rows_array',
- 'run_children',
- 'rx',
- 'schema_name',
- 'scientific',
- 'search_args',
- 'search_arguments',
- 'search_columnitem',
- 'search_fielditem',
- 'search_operatoritem',
- 'search_opitem',
- 'search_valueitem',
- 'searchfielditem',
- 'searchoperatoritem',
- 'searchopitem',
- 'searchvalueitem',
- 'select',
- 'selected',
- 'self',
- 'serialize',
- 'series',
- 'server_date',
- 'server_day',
- 'server_ip',
- 'server_name',
- 'server_port',
- 'server_push',
- 'server_siteisrunning',
- 'server_sitestart',
- 'server_sitestop',
- 'server_time',
- 'session_abort',
- 'session_addoutputfilter',
- 'session_addvar',
- 'session_addvariable',
- 'session_deleteexpired',
- 'session_driver',
- 'session_end',
- 'session_id',
- 'session_removevar',
- 'session_removevariable',
- 'session_result',
- 'session_setdriver',
- 'session_start',
- 'set',
- 'set_iterator',
- 'set_reverseiterator',
- 'shown_count',
- 'shown_first',
- 'shown_last',
- 'site_atbegin',
- 'site_id',
- 'site_name',
- 'site_restart',
- 'skiprecords_value',
- 'sleep',
- 'soap_convertpartstopairs',
- 'soap_definetag',
- 'soap_info',
- 'soap_lastrequest',
- 'soap_lastresponse',
- 'soap_stub',
- 'sort_args',
- 'sort_arguments',
- 'sort_columnitem',
- 'sort_fielditem',
- 'sort_orderitem',
- 'sortcolumnitem',
- 'sortfielditem',
- 'sortorderitem',
- 'sqlite_createdb',
- 'sqlite_session_driver',
- 'sqlite_setsleepmillis',
- 'sqlite_setsleeptries',
- 'srand',
- 'stack',
- 'stock_quote',
- 'string',
- 'string_charfromname',
- 'string_concatenate',
- 'string_countfields',
- 'string_endswith',
- 'string_extract',
- 'string_findposition',
- 'string_findregexp',
- 'string_fordigit',
- 'string_getfield',
- 'string_getunicodeversion',
- 'string_insert',
- 'string_isalpha',
- 'string_isalphanumeric',
- 'string_isdigit',
- 'string_ishexdigit',
- 'string_islower',
- 'string_isnumeric',
- 'string_ispunctuation',
- 'string_isspace',
- 'string_isupper',
- 'string_length',
- 'string_lowercase',
- 'string_remove',
- 'string_removeleading',
- 'string_removetrailing',
- 'string_replace',
- 'string_replaceregexp',
- 'string_todecimal',
- 'string_tointeger',
- 'string_uppercase',
- 'string_validcharset',
- 'table_name',
- 'table_realname',
- 'tag',
- 'tag_name',
- 'tags',
- 'tags_find',
- 'tags_list',
- 'tcp_close',
- 'tcp_open',
- 'tcp_send',
- 'tcp_tcp_close',
- 'tcp_tcp_open',
- 'tcp_tcp_send',
- 'thread_abort',
- 'thread_atomic',
- 'thread_event',
- 'thread_exists',
- 'thread_getcurrentid',
- 'thread_getpriority',
- 'thread_info',
- 'thread_list',
- 'thread_lock',
- 'thread_pipe',
- 'thread_priority_default',
- 'thread_priority_high',
- 'thread_priority_low',
- 'thread_rwlock',
- 'thread_semaphore',
- 'thread_setpriority',
- 'token_value',
- 'total_records',
- 'treemap',
- 'treemap_iterator',
- 'true',
- 'url_rewrite',
- 'valid_creditcard',
- 'valid_date',
- 'valid_email',
- 'valid_url',
- 'value_list',
- 'value_listitem',
- 'valuelistitem',
- 'var',
- 'var_defined',
- 'var_remove',
- 'var_reset',
- 'var_set',
- 'variable',
- 'variable_defined',
- 'variable_set',
- 'variables',
- 'variant_count',
- 'vars',
- 'wap_isenabled',
- 'wap_maxbuttons',
- 'wap_maxcolumns',
- 'wap_maxhorzpixels',
- 'wap_maxrows',
- 'wap_maxvertpixels',
- 'while',
- 'wsdl_extract',
- 'wsdl_getbinding',
- 'wsdl_getbindingforoperation',
- 'wsdl_getbindingoperations',
- 'wsdl_getmessagenamed',
- 'wsdl_getmessageparts',
- 'wsdl_getmessagetriofromporttype',
- 'wsdl_getopbodystyle',
- 'wsdl_getopbodyuse',
- 'wsdl_getoperation',
- 'wsdl_getoplocation',
- 'wsdl_getopmessagetypes',
- 'wsdl_getopsoapaction',
- 'wsdl_getportaddress',
- 'wsdl_getportsforservice',
- 'wsdl_getporttype',
- 'wsdl_getporttypeoperation',
- 'wsdl_getservicedocumentation',
- 'wsdl_getservices',
- 'wsdl_gettargetnamespace',
- 'wsdl_issoapoperation',
- 'wsdl_listoperations',
- 'wsdl_maketest',
- 'xml',
- 'xml_extract',
- 'xml_rpc',
- 'xml_rpccall',
- 'xml_rw',
- 'xml_serve',
- 'xml_transform',
- 'xml_xml',
- 'xml_xmlstream',
- 'xmlstream',
- 'xsd_attribute',
- 'xsd_blankarraybase',
- 'xsd_blankbase',
- 'xsd_buildtype',
- 'xsd_cache',
- 'xsd_checkcardinality',
- 'xsd_continueall',
- 'xsd_continueannotation',
- 'xsd_continueany',
- 'xsd_continueanyattribute',
- 'xsd_continueattribute',
- 'xsd_continueattributegroup',
- 'xsd_continuechoice',
- 'xsd_continuecomplexcontent',
- 'xsd_continuecomplextype',
- 'xsd_continuedocumentation',
- 'xsd_continueextension',
- 'xsd_continuegroup',
- 'xsd_continuekey',
- 'xsd_continuelist',
- 'xsd_continuerestriction',
- 'xsd_continuesequence',
- 'xsd_continuesimplecontent',
- 'xsd_continuesimpletype',
- 'xsd_continueunion',
- 'xsd_deserialize',
- 'xsd_fullyqualifyname',
- 'xsd_generate',
- 'xsd_generateblankfromtype',
- 'xsd_generateblanksimpletype',
- 'xsd_generatetype',
- 'xsd_getschematype',
- 'xsd_issimpletype',
- 'xsd_loadschema',
- 'xsd_lookupnamespaceuri',
- 'xsd_lookuptype',
- 'xsd_processany',
- 'xsd_processattribute',
- 'xsd_processattributegroup',
- 'xsd_processcomplextype',
- 'xsd_processelement',
- 'xsd_processgroup',
- 'xsd_processimport',
- 'xsd_processinclude',
- 'xsd_processschema',
- 'xsd_processsimpletype',
- 'xsd_ref',
- 'xsd_type',
- )
-}
-MEMBERS = {
- 'Member Methods': (
- 'abort',
- 'abs',
- 'accept_charset',
- 'accept',
- 'acceptconnections',
- 'acceptdeserializedelement',
- 'acceptnossl',
- 'acceptpost',
- 'accesskey',
- 'acos',
- 'acosh',
- 'action',
- 'actionparams',
- 'active_tick',
- 'add',
- 'addatend',
- 'addattachment',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addcolumninfo',
- 'addcombobox',
- 'addcomment',
- 'addcomponent',
- 'addcomponents',
- 'addcss',
- 'adddatabasetable',
- 'adddatasource',
- 'adddatasourcedatabase',
- 'adddatasourcehost',
- 'adddir',
- 'adddirpath',
- 'addendjs',
- 'addendjstext',
- 'adderror',
- 'addfavicon',
- 'addfile',
- 'addgroup',
- 'addheader',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addjs',
- 'addjstext',
- 'addlist',
- 'addmathfunctions',
- 'addmember',
- 'addoneheaderline',
- 'addpage',
- 'addparagraph',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addpostdispatch',
- 'addpredispatch',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addrow',
- 'addsection',
- 'addselectlist',
- 'addset',
- 'addsubmitbutton',
- 'addsubnode',
- 'addtable',
- 'addtask',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'addtobuffer',
- 'addtrait',
- 'adduser',
- 'addusertogroup',
- 'addwarning',
- 'addzip',
- 'allocobject',
- 'am',
- 'ampm',
- 'annotate',
- 'answer',
- 'apop',
- 'append',
- 'appendarray',
- 'appendarraybegin',
- 'appendarrayend',
- 'appendbool',
- 'appendbytes',
- 'appendchar',
- 'appendchild',
- 'appendcolon',
- 'appendcomma',
- 'appenddata',
- 'appenddatetime',
- 'appenddbpointer',
- 'appenddecimal',
- 'appenddocument',
- 'appendimagetolist',
- 'appendinteger',
- 'appendnowutc',
- 'appendnull',
- 'appendoid',
- 'appendregex',
- 'appendreplacement',
- 'appendstring',
- 'appendtail',
- 'appendtime',
- 'applyheatcolors',
- 'appmessage',
- 'appname',
- 'appprefix',
- 'appstatus',
- 'arc',
- 'archive',
- 'arguments',
- 'argumentvalue',
- 'asarray',
- 'asarraystring',
- 'asasync',
- 'asbytes',
- 'ascopy',
- 'ascopydeep',
- 'asdecimal',
- 'asgenerator',
- 'asin',
- 'asinh',
- 'asinteger',
- 'askeyedgenerator',
- 'aslazystring',
- 'aslist',
- 'asraw',
- 'asstaticarray',
- 'asstring',
- 'asstringhex',
- 'asstringoct',
- 'asxml',
- 'atan',
- 'atan2',
- 'atanh',
- 'atend',
- 'atends',
- 'atime',
- 'attributecount',
- 'attributes',
- 'attrs',
- 'auth',
- 'authenticate',
- 'authorize',
- 'autocollectbuffer',
- 'average',
- 'back',
- 'basename',
- 'basepaths',
- 'baseuri',
- 'bcc',
- 'beginssl',
- 'beginswith',
- 'begintls',
- 'bestcharset',
- 'bind_blob',
- 'bind_double',
- 'bind_int',
- 'bind_null',
- 'bind_parameter_index',
- 'bind_text',
- 'bind',
- 'bindcount',
- 'bindone',
- 'bindparam',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'bodybytes',
- 'boundary',
- 'bptoxml',
- 'bptypetostr',
- 'bucketnumber',
- 'buff',
- 'buildquery',
- 'businessdaysbetween',
- 'by',
- 'bytes',
- 'cachedappprefix',
- 'cachedroot',
- 'callboolean',
- 'callbooleanmethod',
- 'callbytemethod',
- 'callcharmethod',
- 'calldoublemethod',
- 'calledname',
- 'callfirst',
- 'callfloat',
- 'callfloatmethod',
- 'callint',
- 'callintmethod',
- 'calllongmethod',
- 'callnonvirtualbooleanmethod',
- 'callnonvirtualbytemethod',
- 'callnonvirtualcharmethod',
- 'callnonvirtualdoublemethod',
- 'callnonvirtualfloatmethod',
- 'callnonvirtualintmethod',
- 'callnonvirtuallongmethod',
- 'callnonvirtualobjectmethod',
- 'callnonvirtualshortmethod',
- 'callnonvirtualvoidmethod',
- 'callobject',
- 'callobjectmethod',
- 'callshortmethod',
- 'callsite_col',
- 'callsite_file',
- 'callsite_line',
- 'callstack',
- 'callstaticboolean',
- 'callstaticbooleanmethod',
- 'callstaticbytemethod',
- 'callstaticcharmethod',
- 'callstaticdoublemethod',
- 'callstaticfloatmethod',
- 'callstaticint',
- 'callstaticintmethod',
- 'callstaticlongmethod',
- 'callstaticobject',
- 'callstaticobjectmethod',
- 'callstaticshortmethod',
- 'callstaticstring',
- 'callstaticvoidmethod',
- 'callstring',
- 'callvoid',
- 'callvoidmethod',
- 'cancel',
- 'cap',
- 'capa',
- 'capabilities',
- 'capi',
- 'cbrt',
- 'cc',
- 'ceil',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'checkdebugging',
- 'checked',
- 'checkuser',
- 'childnodes',
- 'chk',
- 'chmod',
- 'choosecolumntype',
- 'chown',
- 'chunked',
- 'circle',
- 'class',
- 'classid',
- 'clear',
- 'clonenode',
- 'close',
- 'closepath',
- 'closeprepared',
- 'closewrite',
- 'code',
- 'codebase',
- 'codetype',
- 'colmap',
- 'colorspace',
- 'column_blob',
- 'column_count',
- 'column_decltype',
- 'column_double',
- 'column_int64',
- 'column_name',
- 'column_text',
- 'column_type',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'componentdelimiter',
- 'components',
- 'composite',
- 'compress',
- 'concat',
- 'condtoint',
- 'configureds',
- 'configuredskeys',
- 'connect',
- 'connection',
- 'connectionhandler',
- 'connhandler',
- 'consume_domain',
- 'consume_label',
- 'consume_message',
- 'consume_rdata',
- 'consume_string',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'content',
- 'contentlength',
- 'contents',
- 'contenttype',
- 'continuation',
- 'continuationpacket',
- 'continuationpoint',
- 'continuationstack',
- 'continue',
- 'contrast',
- 'conventionaltop',
- 'convert',
- 'cookie',
- 'cookies',
- 'cookiesarray',
- 'cookiesary',
- 'copyto',
- 'cos',
- 'cosh',
- 'count',
- 'countkeys',
- 'country',
- 'countusersbygroup',
- 'crc',
- 'create',
- 'createattribute',
- 'createattributens',
- 'createcdatasection',
- 'createcomment',
- 'createdocument',
- 'createdocumentfragment',
- 'createdocumenttype',
- 'createelement',
- 'createelementns',
- 'createentityreference',
- 'createindex',
- 'createprocessinginstruction',
- 'createtable',
- 'createtextnode',
- 'criteria',
- 'crop',
- 'csscontent',
- 'curl',
- 'current',
- 'currentfile',
- 'curveto',
- 'd',
- 'data',
- 'databasecolumnnames',
- 'databasecolumns',
- 'databasemap',
- 'databasename',
- 'datasourcecolumnnames',
- 'datasourcecolumns',
- 'datasourcemap',
- 'date',
- 'day',
- 'dayofmonth',
- 'dayofweek',
- 'dayofweekinmonth',
- 'dayofyear',
- 'days',
- 'daysbetween',
- 'db',
- 'dbtablestable',
- 'debug',
- 'declare',
- 'decodebase64',
- 'decodehex',
- 'decodehtml',
- 'decodeqp',
- 'decodeurl',
- 'decodexml',
- 'decompose',
- 'decomposeassignment',
- 'defaultcontentrepresentation',
- 'defer',
- 'deg2rad',
- 'dele',
- 'delete',
- 'deletedata',
- 'deleteglobalref',
- 'deletelocalref',
- 'delim',
- 'depth',
- 'dereferencepointer',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detectcharset',
- 'didinclude',
- 'difference',
- 'digit',
- 'dir',
- 'displaycountry',
- 'displaylanguage',
- 'displayname',
- 'displayscript',
- 'displayvariant',
- 'div',
- 'dns_response',
- 'do',
- 'doatbegins',
- 'doatends',
- 'doccomment',
- 'doclose',
- 'doctype',
- 'document',
- 'documentelement',
- 'documentroot',
- 'domainbody',
- 'done',
- 'dosessions',
- 'dowithclose',
- 'dowlocal',
- 'download',
- 'drawtext',
- 'drop',
- 'dropindex',
- 'dsdbtable',
- 'dshoststable',
- 'dsinfo',
- 'dst',
- 'dstable',
- 'dstoffset',
- 'dtdid',
- 'dup',
- 'dup2',
- 'each',
- 'eachbyte',
- 'eachcharacter',
- 'eachchild',
- 'eachcomponent',
- 'eachdir',
- 'eachdirpath',
- 'eachdirpathrecursive',
- 'eachentry',
- 'eachfile',
- 'eachfilename',
- 'eachfilepath',
- 'eachfilepathrecursive',
- 'eachkey',
- 'eachline',
- 'eachlinebreak',
- 'eachmatch',
- 'eachnode',
- 'eachpair',
- 'eachpath',
- 'eachpathrecursive',
- 'eachrow',
- 'eachsub',
- 'eachword',
- 'eachwordbreak',
- 'element',
- 'eligiblepath',
- 'eligiblepaths',
- 'encodebase64',
- 'encodehex',
- 'encodehtml',
- 'encodehtmltoxml',
- 'encodemd5',
- 'encodepassword',
- 'encodeqp',
- 'encodesql',
- 'encodesql92',
- 'encodeurl',
- 'encodevalue',
- 'encodexml',
- 'encoding',
- 'enctype',
- 'end',
- 'endjs',
- 'endssl',
- 'endswith',
- 'endtls',
- 'enhance',
- 'ensurestopped',
- 'entities',
- 'entry',
- 'env',
- 'equals',
- 'era',
- 'erf',
- 'erfc',
- 'err',
- 'errcode',
- 'errmsg',
- 'error',
- 'errors',
- 'errstack',
- 'escape_member',
- 'establisherrorstate',
- 'exceptioncheck',
- 'exceptionclear',
- 'exceptiondescribe',
- 'exceptionoccurred',
- 'exchange',
- 'execinits',
- 'execinstalls',
- 'execute',
- 'executelazy',
- 'executenow',
- 'exists',
- 'exit',
- 'exitcode',
- 'exp',
- 'expire',
- 'expireminutes',
- 'expiresminutes',
- 'expm1',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportas',
- 'exportbytes',
- 'exportfdf',
- 'exportpointerbits',
- 'exportsigned16bits',
- 'exportsigned32bits',
- 'exportsigned64bits',
- 'exportsigned8bits',
- 'exportstring',
- 'expose',
- 'extendedyear',
- 'extensiondelimiter',
- 'extensions',
- 'extract',
- 'extractfast',
- 'extractfastone',
- 'extractimage',
- 'extractone',
- 'f',
- 'fabs',
- 'fail',
- 'failnoconnectionhandler',
- 'family',
- 'fatalerror',
- 'fcgireq',
- 'fchdir',
- 'fchmod',
- 'fchown',
- 'fd',
- 'features',
- 'fetchdata',
- 'fieldnames',
- 'fieldposition',
- 'fieldstable',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'filename',
- 'filenames',
- 'filequeue',
- 'fileuploads',
- 'fileuploadsary',
- 'filterinputcolumn',
- 'finalize',
- 'find',
- 'findall',
- 'findandmodify',
- 'findbucket',
- 'findcase',
- 'findclass',
- 'findcount',
- 'finddescendant',
- 'findfirst',
- 'findinclude',
- 'findinctx',
- 'findindex',
- 'findlast',
- 'findpattern',
- 'findposition',
- 'findsymbols',
- 'first',
- 'firstchild',
- 'firstcomponent',
- 'firstdayofweek',
- 'firstnode',
- 'fixformat',
- 'flags',
- 'fliph',
- 'flipv',
- 'floor',
- 'flush',
- 'foldcase',
- 'foo',
- 'for',
- 'forcedrowid',
- 'foreach',
- 'foreachaccept',
- 'foreachbyte',
- 'foreachcharacter',
- 'foreachchild',
- 'foreachday',
- 'foreachentry',
- 'foreachfile',
- 'foreachfilename',
- 'foreachkey',
- 'foreachline',
- 'foreachlinebreak',
- 'foreachmatch',
- 'foreachnode',
- 'foreachpair',
- 'foreachpathcomponent',
- 'foreachrow',
- 'foreachspool',
- 'foreachsub',
- 'foreachwordbreak',
- 'form',
- 'format',
- 'formatas',
- 'formatcontextelement',
- 'formatcontextelements',
- 'formatnumber',
- 'free',
- 'frexp',
- 'from',
- 'fromname',
- 'fromport',
- 'fromreflectedfield',
- 'fromreflectedmethod',
- 'front',
- 'fsync',
- 'ftpdeletefile',
- 'ftpgetlisting',
- 'ftruncate',
- 'fullpath',
- 'fx',
- 'gamma',
- 'gatewayinterface',
- 'gen',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getappsource',
- 'getarraylength',
- 'getattr',
- 'getattribute',
- 'getattributenamespace',
- 'getattributenode',
- 'getattributenodens',
- 'getattributens',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbold',
- 'getbooleanarrayelements',
- 'getbooleanarrayregion',
- 'getbooleanfield',
- 'getbordercolor',
- 'getborderwidth',
- 'getbytearrayelements',
- 'getbytearrayregion',
- 'getbytefield',
- 'getchararrayelements',
- 'getchararrayregion',
- 'getcharfield',
- 'getclass',
- 'getcode',
- 'getcolor',
- 'getcolumn',
- 'getcolumncount',
- 'getcolumns',
- 'getdatabasebyalias',
- 'getdatabasebyid',
- 'getdatabasebyname',
- 'getdatabasehost',
- 'getdatabasetable',
- 'getdatabasetablebyalias',
- 'getdatabasetablebyid',
- 'getdatabasetablepart',
- 'getdatasource',
- 'getdatasourcedatabase',
- 'getdatasourcedatabasebyid',
- 'getdatasourcehost',
- 'getdatasourceid',
- 'getdatasourcename',
- 'getdefaultstorage',
- 'getdoublearrayelements',
- 'getdoublearrayregion',
- 'getdoublefield',
- 'getelementbyid',
- 'getelementsbytagname',
- 'getelementsbytagnamens',
- 'getencoding',
- 'getface',
- 'getfield',
- 'getfieldid',
- 'getfile',
- 'getfloatarrayelements',
- 'getfloatarrayregion',
- 'getfloatfield',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getgroup',
- 'getgroupid',
- 'getheader',
- 'getheaders',
- 'gethostdatabase',
- 'gethtmlattr',
- 'gethtmlattrstring',
- 'getinclude',
- 'getintarrayelements',
- 'getintarrayregion',
- 'getintfield',
- 'getisocomment',
- 'getitalic',
- 'getlasterror',
- 'getlcapitype',
- 'getlibrary',
- 'getlongarrayelements',
- 'getlongarrayregion',
- 'getlongfield',
- 'getmargins',
- 'getmethodid',
- 'getmode',
- 'getnameditem',
- 'getnameditemns',
- 'getnode',
- 'getnumericvalue',
- 'getobjectarrayelement',
- 'getobjectclass',
- 'getobjectfield',
- 'getpadding',
- 'getpagenumber',
- 'getparts',
- 'getprefs',
- 'getpropertyvalue',
- 'getprowcount',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getset',
- 'getshortarrayelements',
- 'getshortarrayregion',
- 'getshortfield',
- 'getsize',
- 'getsortfieldspart',
- 'getspacing',
- 'getstaticbooleanfield',
- 'getstaticbytefield',
- 'getstaticcharfield',
- 'getstaticdoublefield',
- 'getstaticfieldid',
- 'getstaticfloatfield',
- 'getstaticintfield',
- 'getstaticlongfield',
- 'getstaticmethodid',
- 'getstaticobjectfield',
- 'getstaticshortfield',
- 'getstatus',
- 'getstringchars',
- 'getstringlength',
- 'getstyle',
- 'getsupportedencodings',
- 'gettablebyid',
- 'gettext',
- 'gettextalignment',
- 'gettextsize',
- 'gettrigger',
- 'gettype',
- 'getunderline',
- 'getuniquealiasname',
- 'getuser',
- 'getuserbykey',
- 'getuserid',
- 'getversion',
- 'getzipfilebytes',
- 'givenblock',
- 'gmt',
- 'gotconnection',
- 'gotfileupload',
- 'groupby',
- 'groupcolumns',
- 'groupcount',
- 'groupjoin',
- 'handlebreakpointget',
- 'handlebreakpointlist',
- 'handlebreakpointremove',
- 'handlebreakpointset',
- 'handlebreakpointupdate',
- 'handlecontextget',
- 'handlecontextnames',
- 'handlecontinuation',
- 'handledefinitionbody',
- 'handledefinitionhead',
- 'handledefinitionresource',
- 'handledevconnection',
- 'handleevalexpired',
- 'handlefeatureget',
- 'handlefeatureset',
- 'handlelassoappcontent',
- 'handlelassoappresponse',
- 'handlenested',
- 'handlenormalconnection',
- 'handlepop',
- 'handleresource',
- 'handlesource',
- 'handlestackget',
- 'handlestderr',
- 'handlestdin',
- 'handlestdout',
- 'handshake',
- 'hasattribute',
- 'hasattributens',
- 'hasattributes',
- 'hasbinaryproperty',
- 'haschildnodes',
- 'hasexpired',
- 'hasfeature',
- 'hasfield',
- 'hash',
- 'hashtmlattr',
- 'hasmethod',
- 'hastable',
- 'hastrailingcomponent',
- 'hasvalue',
- 'head',
- 'header',
- 'headerbytes',
- 'headers',
- 'headersarray',
- 'headersmap',
- 'height',
- 'histogram',
- 'home',
- 'host',
- 'hostcolumnnames',
- 'hostcolumnnames2',
- 'hostcolumns',
- 'hostcolumns2',
- 'hostdatasource',
- 'hostextra',
- 'hostid',
- 'hostisdynamic',
- 'hostmap',
- 'hostmap2',
- 'hostname',
- 'hostpassword',
- 'hostport',
- 'hostschema',
- 'hosttableencoding',
- 'hosttonet16',
- 'hosttonet32',
- 'hosttonet64',
- 'hostusername',
- 'hour',
- 'hourofampm',
- 'hourofday',
- 'hoursbetween',
- 'href',
- 'hreflang',
- 'htmlcontent',
- 'htmlizestacktrace',
- 'htmlizestacktracelink',
- 'httpaccept',
- 'httpacceptencoding',
- 'httpacceptlanguage',
- 'httpauthorization',
- 'httpcachecontrol',
- 'httpconnection',
- 'httpcookie',
- 'httpequiv',
- 'httphost',
- 'httpreferer',
- 'httpreferrer',
- 'httpuseragent',
- 'hypot',
- 'id',
- 'idealinmemory',
- 'idle',
- 'idmap',
- 'ifempty',
- 'ifkey',
- 'ifnotempty',
- 'ifnotkey',
- 'ignorecase',
- 'ilogb',
- 'imgptr',
- 'implementation',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importas',
- 'importbytes',
- 'importfdf',
- 'importnode',
- 'importpointer',
- 'importstring',
- 'in',
- 'include',
- 'includebytes',
- 'includelibrary',
- 'includelibraryonce',
- 'includeonce',
- 'includes',
- 'includestack',
- 'indaylighttime',
- 'index',
- 'init',
- 'initialize',
- 'initrequest',
- 'inits',
- 'inneroncompare',
- 'input',
- 'inputcolumns',
- 'inputtype',
- 'insert',
- 'insertback',
- 'insertbefore',
- 'insertdata',
- 'insertfirst',
- 'insertfrom',
- 'insertfront',
- 'insertinternal',
- 'insertlast',
- 'insertpage',
- 'install',
- 'installs',
- 'integer',
- 'internalsubset',
- 'interrupt',
- 'intersection',
- 'inttocond',
- 'invoke',
- 'invokeautocollect',
- 'invokeuntil',
- 'invokewhile',
- 'ioctl',
- 'isa',
- 'isalive',
- 'isallof',
- 'isalnum',
- 'isalpha',
- 'isanyof',
- 'isbase',
- 'isblank',
- 'iscntrl',
- 'isdigit',
- 'isdir',
+ 'ldap_scope_onelevel',
+ 'ldap_scope_subtree',
+ 'library_once',
+ 'library',
+ 'ljapi_initialize',
+ 'locale_availablelocales',
+ 'locale_canada',
+ 'locale_canadafrench',
+ 'locale_china',
+ 'locale_chinese',
+ 'locale_default',
+ 'locale_english',
+ 'locale_format_style_date_time',
+ 'locale_format_style_default',
+ 'locale_format_style_full',
+ 'locale_format_style_long',
+ 'locale_format_style_medium',
+ 'locale_format_style_none',
+ 'locale_format_style_short',
+ 'locale_format',
+ 'locale_france',
+ 'locale_french',
+ 'locale_german',
+ 'locale_germany',
+ 'locale_isocountries',
+ 'locale_isolanguages',
+ 'locale_italian',
+ 'locale_italy',
+ 'locale_japan',
+ 'locale_japanese',
+ 'locale_korea',
+ 'locale_korean',
+ 'locale_prc',
+ 'locale_setdefault',
+ 'locale_simplifiedchinese',
+ 'locale_taiwan',
+ 'locale_traditionalchinese',
+ 'locale_uk',
+ 'locale_us',
+ 'log_always',
+ 'log_critical',
+ 'log_deprecated',
+ 'log_destination_console',
+ 'log_destination_database',
+ 'log_destination_file',
+ 'log_detail',
+ 'log_initialize',
+ 'log_level_critical',
+ 'log_level_deprecated',
+ 'log_level_detail',
+ 'log_level_sql',
+ 'log_level_warning',
+ 'log_max_file_size',
+ 'log_setdestination',
+ 'log_sql',
+ 'log_trim_file_size',
+ 'log_warning',
+ 'log',
+ 'loop_abort',
+ 'loop_continue',
+ 'loop_count',
+ 'loop_key_pop',
+ 'loop_key_push',
+ 'loop_key',
+ 'loop_pop',
+ 'loop_push',
+ 'loop_value_pop',
+ 'loop_value_push',
+ 'loop_value',
+ 'loop',
+ 'lt',
+ 'lte',
+ 'main_thread_only',
+ 'max',
+ 'maxrecords_value',
+ 'median',
+ 'method_name',
+ 'micros',
+ 'millis',
+ 'min',
+ 'minimal',
+ 'mongo_insert_continue_on_error',
+ 'mongo_insert_no_validate',
+ 'mongo_insert_none',
+ 'mongo_query_await_data',
+ 'mongo_query_exhaust',
+ 'mongo_query_no_cursor_timeout',
+ 'mongo_query_none',
+ 'mongo_query_oplog_replay',
+ 'mongo_query_partial',
+ 'mongo_query_slave_ok',
+ 'mongo_query_tailable_cursor',
+ 'mongo_remove_none',
+ 'mongo_remove_single_remove',
+ 'mongo_update_multi_update',
+ 'mongo_update_no_validate',
+ 'mongo_update_none',
+ 'mongo_update_upsert',
+ 'mustache_compile_file',
+ 'mustache_compile_string',
+ 'mustache_include',
+ 'mysqlds',
+ 'namespace_global',
+ 'namespace_import',
+ 'namespace_using',
+ 'nbw',
+ 'ncn',
+ 'neq',
+ 'net_connectinprogress',
+ 'net_connectok',
+ 'net_typessl',
+ 'net_typessltcp',
+ 'net_typessludp',
+ 'net_typetcp',
+ 'net_typeudp',
+ 'net_waitread',
+ 'net_waittimeout',
+ 'net_waitwrite',
+ 'new',
+ 'none',
+ 'nrx',
+ 'nslookup',
+ 'odbc_session_driver_mssql',
+ 'odbc',
+ 'output_none',
+ 'output',
+ 'pdf_package',
+ 'pdf_rectangle',
+ 'pdf_serve',
+ 'pi',
+ 'portal',
+ 'postgresql',
+ 'process',
+ 'protect_now',
+ 'protect',
+ 'queriable_average',
+ 'queriable_defaultcompare',
+ 'queriable_do',
+ 'queriable_internal_combinebindings',
+ 'queriable_max',
+ 'queriable_min',
+ 'queriable_qsort',
+ 'queriable_reversecompare',
+ 'queriable_sum',
+ 'random_seed',
+ 'range',
+ 'records_array',
+ 'records_map',
+ 'records',
+ 'redirect_url',
+ 'referer_url',
+ 'referrer_url',
+ 'register_thread',
+ 'register',
+ 'response_filepath',
+ 'response_localpath',
+ 'response_path',
+ 'response_realm',
+ 'response_root',
+ 'resultset_count',
+ 'resultset',
+ 'resultsets',
+ 'rows_array',
+ 'rows_impl',
+ 'rows',
+ 'rx',
+ 'schema_name',
+ 'security_database',
+ 'security_default_realm',
+ 'security_initialize',
+ 'security_table_groups',
+ 'security_table_ug_map',
+ 'security_table_users',
+ 'selected',
+ 'series',
+ 'server_admin',
+ 'server_ip',
+ 'server_name',
+ 'server_port',
+ 'server_protocol',
+ 'server_push',
+ 'server_signature',
+ 'server_software',
+ 'session_abort',
+ 'session_addvar',
+ 'session_decorate',
+ 'session_deleteexpired',
+ 'session_end',
+ 'session_getdefaultdriver',
+ 'session_id',
+ 'session_initialize',
+ 'session_removevar',
+ 'session_result',
+ 'session_setdefaultdriver',
+ 'session_start',
+ 'shown_count',
+ 'shown_first',
+ 'shown_last',
+ 'site_id',
+ 'site_name',
+ 'skiprecords_value',
+ 'sleep',
+ 'split_thread',
+ 'sqlite_abort',
+ 'sqlite_auth',
+ 'sqlite_blob',
+ 'sqlite_busy',
+ 'sqlite_cantopen',
+ 'sqlite_constraint',
+ 'sqlite_corrupt',
+ 'sqlite_createdb',
+ 'sqlite_done',
+ 'sqlite_empty',
+ 'sqlite_error',
+ 'sqlite_float',
+ 'sqlite_format',
+ 'sqlite_full',
+ 'sqlite_integer',
+ 'sqlite_internal',
+ 'sqlite_interrupt',
+ 'sqlite_ioerr',
+ 'sqlite_locked',
+ 'sqlite_mismatch',
+ 'sqlite_misuse',
+ 'sqlite_nolfs',
+ 'sqlite_nomem',
+ 'sqlite_notadb',
+ 'sqlite_notfound',
+ 'sqlite_null',
+ 'sqlite_ok',
+ 'sqlite_perm',
+ 'sqlite_protocol',
+ 'sqlite_range',
+ 'sqlite_readonly',
+ 'sqlite_row',
+ 'sqlite_schema',
+ 'sqlite_setsleepmillis',
+ 'sqlite_setsleeptries',
+ 'sqlite_text',
+ 'sqlite_toobig',
+ 'sqliteconnector',
+ 'staticarray_join',
+ 'stdout',
+ 'stdoutnl',
+ 'string_validcharset',
+ 'suspend',
+ 'sys_appspath',
+ 'sys_chroot',
+ 'sys_clock',
+ 'sys_clockspersec',
+ 'sys_credits',
+ 'sys_databasespath',
+ 'sys_detach_exec',
+ 'sys_difftime',
+ 'sys_dll_ext',
+ 'sys_drand48',
+ 'sys_environ',
+ 'sys_eol',
+ 'sys_erand48',
+ 'sys_errno',
+ 'sys_exec_pid_to_os_pid',
+ 'sys_exec',
+ 'sys_exit',
+ 'sys_fork',
+ 'sys_garbagecollect',
+ 'sys_getbytessincegc',
+ 'sys_getchar',
+ 'sys_getegid',
+ 'sys_getenv',
+ 'sys_geteuid',
+ 'sys_getgid',
+ 'sys_getgrnam',
+ 'sys_getheapfreebytes',
+ 'sys_getheapsize',
+ 'sys_getlogin',
+ 'sys_getpid',
+ 'sys_getppid',
+ 'sys_getpwnam',
+ 'sys_getpwuid',
+ 'sys_getstartclock',
+ 'sys_getthreadcount',
+ 'sys_getuid',
+ 'sys_growheapby',
+ 'sys_homepath',
+ 'sys_is_full_path',
+ 'sys_is_windows',
+ 'sys_isfullpath',
+ 'sys_iswindows',
+ 'sys_iterate',
+ 'sys_jrand48',
+ 'sys_kill_exec',
+ 'sys_kill',
+ 'sys_lcong48',
+ 'sys_librariespath',
+ 'sys_listtraits',
+ 'sys_listtypes',
+ 'sys_listunboundmethods',
+ 'sys_loadlibrary',
+ 'sys_lrand48',
+ 'sys_masterhomepath',
+ 'sys_mrand48',
+ 'sys_nrand48',
+ 'sys_pid_exec',
+ 'sys_pointersize',
+ 'sys_rand',
+ 'sys_random',
+ 'sys_seed48',
+ 'sys_setenv',
+ 'sys_setgid',
+ 'sys_setsid',
+ 'sys_setuid',
+ 'sys_sigabrt',
+ 'sys_sigalrm',
+ 'sys_sigbus',
+ 'sys_sigchld',
+ 'sys_sigcont',
+ 'sys_sigfpe',
+ 'sys_sighup',
+ 'sys_sigill',
+ 'sys_sigint',
+ 'sys_sigkill',
+ 'sys_sigpipe',
+ 'sys_sigprof',
+ 'sys_sigquit',
+ 'sys_sigsegv',
+ 'sys_sigstop',
+ 'sys_sigsys',
+ 'sys_sigterm',
+ 'sys_sigtrap',
+ 'sys_sigtstp',
+ 'sys_sigttin',
+ 'sys_sigttou',
+ 'sys_sigurg',
+ 'sys_sigusr1',
+ 'sys_sigusr2',
+ 'sys_sigvtalrm',
+ 'sys_sigxcpu',
+ 'sys_sigxfsz',
+ 'sys_srand',
+ 'sys_srand48',
+ 'sys_srandom',
+ 'sys_strerror',
+ 'sys_supportpath',
+ 'sys_test_exec',
+ 'sys_time',
+ 'sys_uname',
+ 'sys_unsetenv',
+ 'sys_usercapimodulepath',
+ 'sys_userstartuppath',
+ 'sys_version',
+ 'sys_wait_exec',
+ 'sys_waitpid',
+ 'sys_wcontinued',
+ 'sys_while',
+ 'sys_wnohang',
+ 'sys_wuntraced',
+ 'table_name',
+ 'tag_exists',
+ 'tag_name',
+ 'thread_var_get',
+ 'thread_var_pop',
+ 'thread_var_push',
+ 'threadvar_find',
+ 'threadvar_get',
+ 'threadvar_set_asrt',
+ 'threadvar_set',
+ 'timer',
+ 'token_value',
+ 'treemap',
+ 'u_lb_alphabetic',
+ 'u_lb_ambiguous',
+ 'u_lb_break_after',
+ 'u_lb_break_before',
+ 'u_lb_break_both',
+ 'u_lb_break_symbols',
+ 'u_lb_carriage_return',
+ 'u_lb_close_punctuation',
+ 'u_lb_combining_mark',
+ 'u_lb_complex_context',
+ 'u_lb_contingent_break',
+ 'u_lb_exclamation',
+ 'u_lb_glue',
+ 'u_lb_h2',
+ 'u_lb_h3',
+ 'u_lb_hyphen',
+ 'u_lb_ideographic',
+ 'u_lb_infix_numeric',
+ 'u_lb_inseparable',
+ 'u_lb_jl',
+ 'u_lb_jt',
+ 'u_lb_jv',
+ 'u_lb_line_feed',
+ 'u_lb_mandatory_break',
+ 'u_lb_next_line',
+ 'u_lb_nonstarter',
+ 'u_lb_numeric',
+ 'u_lb_open_punctuation',
+ 'u_lb_postfix_numeric',
+ 'u_lb_prefix_numeric',
+ 'u_lb_quotation',
+ 'u_lb_space',
+ 'u_lb_surrogate',
+ 'u_lb_unknown',
+ 'u_lb_word_joiner',
+ 'u_lb_zwspace',
+ 'u_nt_decimal',
+ 'u_nt_digit',
+ 'u_nt_none',
+ 'u_nt_numeric',
+ 'u_sb_aterm',
+ 'u_sb_close',
+ 'u_sb_format',
+ 'u_sb_lower',
+ 'u_sb_numeric',
+ 'u_sb_oletter',
+ 'u_sb_other',
+ 'u_sb_sep',
+ 'u_sb_sp',
+ 'u_sb_sterm',
+ 'u_sb_upper',
+ 'u_wb_aletter',
+ 'u_wb_extendnumlet',
+ 'u_wb_format',
+ 'u_wb_katakana',
+ 'u_wb_midletter',
+ 'u_wb_midnum',
+ 'u_wb_numeric',
+ 'u_wb_other',
+ 'ucal_ampm',
+ 'ucal_dayofmonth',
+ 'ucal_dayofweek',
+ 'ucal_dayofweekinmonth',
+ 'ucal_dayofyear',
+ 'ucal_daysinfirstweek',
+ 'ucal_dowlocal',
+ 'ucal_dstoffset',
+ 'ucal_era',
+ 'ucal_extendedyear',
+ 'ucal_firstdayofweek',
+ 'ucal_hour',
+ 'ucal_hourofday',
+ 'ucal_julianday',
+ 'ucal_lenient',
+ 'ucal_listtimezones',
+ 'ucal_millisecond',
+ 'ucal_millisecondsinday',
+ 'ucal_minute',
+ 'ucal_month',
+ 'ucal_second',
+ 'ucal_weekofmonth',
+ 'ucal_weekofyear',
+ 'ucal_year',
+ 'ucal_yearwoy',
+ 'ucal_zoneoffset',
+ 'uchar_age',
+ 'uchar_alphabetic',
+ 'uchar_ascii_hex_digit',
+ 'uchar_bidi_class',
+ 'uchar_bidi_control',
+ 'uchar_bidi_mirrored',
+ 'uchar_bidi_mirroring_glyph',
+ 'uchar_block',
+ 'uchar_canonical_combining_class',
+ 'uchar_case_folding',
+ 'uchar_case_sensitive',
+ 'uchar_dash',
+ 'uchar_decomposition_type',
+ 'uchar_default_ignorable_code_point',
+ 'uchar_deprecated',
+ 'uchar_diacritic',
+ 'uchar_east_asian_width',
+ 'uchar_extender',
+ 'uchar_full_composition_exclusion',
+ 'uchar_general_category_mask',
+ 'uchar_general_category',
+ 'uchar_grapheme_base',
+ 'uchar_grapheme_cluster_break',
+ 'uchar_grapheme_extend',
+ 'uchar_grapheme_link',
+ 'uchar_hangul_syllable_type',
+ 'uchar_hex_digit',
+ 'uchar_hyphen',
+ 'uchar_id_continue',
+ 'uchar_ideographic',
+ 'uchar_ids_binary_operator',
+ 'uchar_ids_trinary_operator',
+ 'uchar_iso_comment',
+ 'uchar_join_control',
+ 'uchar_joining_group',
+ 'uchar_joining_type',
+ 'uchar_lead_canonical_combining_class',
+ 'uchar_line_break',
+ 'uchar_logical_order_exception',
+ 'uchar_lowercase_mapping',
+ 'uchar_lowercase',
+ 'uchar_math',
+ 'uchar_name',
+ 'uchar_nfc_inert',
+ 'uchar_nfc_quick_check',
+ 'uchar_nfd_inert',
+ 'uchar_nfd_quick_check',
+ 'uchar_nfkc_inert',
+ 'uchar_nfkc_quick_check',
+ 'uchar_nfkd_inert',
+ 'uchar_nfkd_quick_check',
+ 'uchar_noncharacter_code_point',
+ 'uchar_numeric_type',
+ 'uchar_numeric_value',
+ 'uchar_pattern_syntax',
+ 'uchar_pattern_white_space',
+ 'uchar_posix_alnum',
+ 'uchar_posix_blank',
+ 'uchar_posix_graph',
+ 'uchar_posix_print',
+ 'uchar_posix_xdigit',
+ 'uchar_quotation_mark',
+ 'uchar_radical',
+ 'uchar_s_term',
+ 'uchar_script',
+ 'uchar_segment_starter',
+ 'uchar_sentence_break',
+ 'uchar_simple_case_folding',
+ 'uchar_simple_lowercase_mapping',
+ 'uchar_simple_titlecase_mapping',
+ 'uchar_simple_uppercase_mapping',
+ 'uchar_soft_dotted',
+ 'uchar_terminal_punctuation',
+ 'uchar_titlecase_mapping',
+ 'uchar_trail_canonical_combining_class',
+ 'uchar_unicode_1_name',
+ 'uchar_unified_ideograph',
+ 'uchar_uppercase_mapping',
+ 'uchar_uppercase',
+ 'uchar_variation_selector',
+ 'uchar_white_space',
+ 'uchar_word_break',
+ 'uchar_xid_continue',
+ 'uncompress',
+ 'usage',
+ 'uuid_compare',
+ 'uuid_copy',
+ 'uuid_generate_random',
+ 'uuid_generate_time',
+ 'uuid_generate',
+ 'uuid_is_null',
+ 'uuid_parse',
+ 'uuid_unparse_lower',
+ 'uuid_unparse_upper',
+ 'uuid_unparse',
+ 'value_list',
+ 'value_listitem',
+ 'valuelistitem',
+ 'var_keys',
+ 'var_values',
+ 'wap_isenabled',
+ 'wap_maxbuttons',
+ 'wap_maxcolumns',
+ 'wap_maxhorzpixels',
+ 'wap_maxrows',
+ 'wap_maxvertpixels',
+ 'web_handlefcgirequest',
+ 'web_node_content_representation_css',
+ 'web_node_content_representation_html',
+ 'web_node_content_representation_js',
+ 'web_node_content_representation_xhr',
+ 'web_node_forpath',
+ 'web_nodes_initialize',
+ 'web_nodes_normalizeextension',
+ 'web_nodes_processcontentnode',
+ 'web_nodes_requesthandler',
+ 'web_response_nodesentry',
+ 'web_router_database',
+ 'web_router_initialize',
+ 'websocket_handler_timeout',
+ 'wexitstatus',
+ 'wifcontinued',
+ 'wifexited',
+ 'wifsignaled',
+ 'wifstopped',
+ 'wstopsig',
+ 'wtermsig',
+ 'xml_transform',
+ 'xml',
+ 'zip_add_dir',
+ 'zip_add',
+ 'zip_checkcons',
+ 'zip_close',
+ 'zip_cm_bzip2',
+ 'zip_cm_default',
+ 'zip_cm_deflate',
+ 'zip_cm_deflate64',
+ 'zip_cm_implode',
+ 'zip_cm_pkware_implode',
+ 'zip_cm_reduce_1',
+ 'zip_cm_reduce_2',
+ 'zip_cm_reduce_3',
+ 'zip_cm_reduce_4',
+ 'zip_cm_shrink',
+ 'zip_cm_store',
+ 'zip_create',
+ 'zip_delete',
+ 'zip_em_3des_112',
+ 'zip_em_3des_168',
+ 'zip_em_aes_128',
+ 'zip_em_aes_192',
+ 'zip_em_aes_256',
+ 'zip_em_des',
+ 'zip_em_none',
+ 'zip_em_rc2_old',
+ 'zip_em_rc2',
+ 'zip_em_rc4',
+ 'zip_em_trad_pkware',
+ 'zip_em_unknown',
+ 'zip_er_changed',
+ 'zip_er_close',
+ 'zip_er_compnotsupp',
+ 'zip_er_crc',
+ 'zip_er_deleted',
+ 'zip_er_eof',
+ 'zip_er_exists',
+ 'zip_er_incons',
+ 'zip_er_internal',
+ 'zip_er_inval',
+ 'zip_er_memory',
+ 'zip_er_multidisk',
+ 'zip_er_noent',
+ 'zip_er_nozip',
+ 'zip_er_ok',
+ 'zip_er_open',
+ 'zip_er_read',
+ 'zip_er_remove',
+ 'zip_er_rename',
+ 'zip_er_seek',
+ 'zip_er_tmpopen',
+ 'zip_er_write',
+ 'zip_er_zipclosed',
+ 'zip_er_zlib',
+ 'zip_error_get_sys_type',
+ 'zip_error_get',
+ 'zip_error_to_str',
+ 'zip_et_none',
+ 'zip_et_sys',
+ 'zip_et_zlib',
+ 'zip_excl',
+ 'zip_fclose',
+ 'zip_file_error_get',
+ 'zip_file_strerror',
+ 'zip_fl_compressed',
+ 'zip_fl_nocase',
+ 'zip_fl_nodir',
+ 'zip_fl_unchanged',
+ 'zip_fopen_index',
+ 'zip_fopen',
+ 'zip_fread',
+ 'zip_get_archive_comment',
+ 'zip_get_file_comment',
+ 'zip_get_name',
+ 'zip_get_num_files',
+ 'zip_name_locate',
+ 'zip_open',
+ 'zip_rename',
+ 'zip_replace',
+ 'zip_set_archive_comment',
+ 'zip_set_file_comment',
+ 'zip_stat_index',
+ 'zip_stat',
+ 'zip_strerror',
+ 'zip_unchange_all',
+ 'zip_unchange_archive',
+ 'zip_unchange',
+ 'zlib_version',
+ ),
+ 'Lasso 8 Tags': (
+ '__char',
+ '__sync_timestamp__',
+ '_admin_addgroup',
+ '_admin_adduser',
+ '_admin_defaultconnector',
+ '_admin_defaultconnectornames',
+ '_admin_defaultdatabase',
+ '_admin_defaultfield',
+ '_admin_defaultgroup',
+ '_admin_defaulthost',
+ '_admin_defaulttable',
+ '_admin_defaultuser',
+ '_admin_deleteconnector',
+ '_admin_deletedatabase',
+ '_admin_deletefield',
+ '_admin_deletegroup',
+ '_admin_deletehost',
+ '_admin_deletetable',
+ '_admin_deleteuser',
+ '_admin_duplicategroup',
+ '_admin_internaldatabase',
+ '_admin_listconnectors',
+ '_admin_listdatabases',
+ '_admin_listfields',
+ '_admin_listgroups',
+ '_admin_listhosts',
+ '_admin_listtables',
+ '_admin_listusers',
+ '_admin_refreshconnector',
+ '_admin_refreshsecurity',
+ '_admin_servicepath',
+ '_admin_updateconnector',
+ '_admin_updatedatabase',
+ '_admin_updatefield',
+ '_admin_updategroup',
+ '_admin_updatehost',
+ '_admin_updatetable',
+ '_admin_updateuser',
+ '_chartfx_activation_string',
+ '_chartfx_getchallengestring',
+ '_chop_args',
+ '_chop_mimes',
+ '_client_addr_old',
+ '_client_address_old',
+ '_client_ip_old',
+ '_database_names',
+ '_datasource_reload',
+ '_date_current',
+ '_date_format',
+ '_date_msec',
+ '_date_parse',
+ '_execution_timelimit',
+ '_file_chmod',
+ '_initialize',
+ '_jdbc_acceptsurl',
+ '_jdbc_debug',
+ '_jdbc_deletehost',
+ '_jdbc_driverclasses',
+ '_jdbc_driverinfo',
+ '_jdbc_metainfo',
+ '_jdbc_propertyinfo',
+ '_jdbc_setdriver',
+ '_lasso_param',
+ '_log_helper',
+ '_proc_noparam',
+ '_proc_withparam',
+ '_recursion_limit',
+ '_request_param',
+ '_security_binaryexpiration',
+ '_security_flushcaches',
+ '_security_isserialized',
+ '_security_serialexpiration',
+ '_srand',
+ '_strict_literals',
+ '_substring',
+ '_xmlrpc_exconverter',
+ '_xmlrpc_inconverter',
+ '_xmlrpc_xmlinconverter',
+ 'abort',
+ 'action_addinfo',
+ 'action_addrecord',
+ 'action_param',
+ 'action_params',
+ 'action_setfoundcount',
+ 'action_setrecordid',
+ 'action_settotalcount',
+ 'action_statement',
+ 'admin_allowedfileroots',
+ 'admin_changeuser',
+ 'admin_createuser',
+ 'admin_currentgroups',
+ 'admin_currentuserid',
+ 'admin_currentusername',
+ 'admin_getpref',
+ 'admin_groupassignuser',
+ 'admin_grouplistusers',
+ 'admin_groupremoveuser',
+ 'admin_lassoservicepath',
+ 'admin_listgroups',
+ 'admin_refreshlicensing',
+ 'admin_refreshsecurity',
+ 'admin_reloaddatasource',
+ 'admin_removepref',
+ 'admin_setpref',
+ 'admin_userexists',
+ 'admin_userlistgroups',
+ 'all',
+ 'and',
+ 'array',
+ 'array_iterator',
+ 'auth',
+ 'auth_admin',
+ 'auth_auth',
+ 'auth_custom',
+ 'auth_group',
+ 'auth_prompt',
+ 'auth_user',
+ 'base64',
+ 'bean',
+ 'bigint',
+ 'bom_utf16be',
+ 'bom_utf16le',
+ 'bom_utf32be',
+ 'bom_utf32le',
+ 'bom_utf8',
+ 'boolean',
+ 'bw',
+ 'bytes',
+ 'cache',
+ 'cache_delete',
+ 'cache_empty',
+ 'cache_exists',
+ 'cache_fetch',
+ 'cache_internal',
+ 'cache_maintenance',
+ 'cache_object',
+ 'cache_preferences',
+ 'cache_store',
+ 'case',
+ 'chartfx',
+ 'chartfx_records',
+ 'chartfx_serve',
+ 'checked',
+ 'choice_list',
+ 'choice_listitem',
+ 'choicelistitem',
+ 'cipher_decrypt',
+ 'cipher_digest',
+ 'cipher_encrypt',
+ 'cipher_hmac',
+ 'cipher_keylength',
+ 'cipher_list',
+ 'click_text',
+ 'client_addr',
+ 'client_address',
+ 'client_authorization',
+ 'client_browser',
+ 'client_contentlength',
+ 'client_contenttype',
+ 'client_cookielist',
+ 'client_cookies',
+ 'client_encoding',
+ 'client_formmethod',
+ 'client_getargs',
+ 'client_getparams',
+ 'client_headers',
+ 'client_ip',
+ 'client_ipfrominteger',
+ 'client_iptointeger',
+ 'client_password',
+ 'client_postargs',
+ 'client_postparams',
+ 'client_type',
+ 'client_url',
+ 'client_username',
+ 'cn',
+ 'column',
+ 'column_name',
+ 'column_names',
+ 'compare_beginswith',
+ 'compare_contains',
+ 'compare_endswith',
+ 'compare_equalto',
+ 'compare_greaterthan',
+ 'compare_greaterthanorequals',
+ 'compare_greaterthanorequls',
+ 'compare_lessthan',
+ 'compare_lessthanorequals',
+ 'compare_notbeginswith',
+ 'compare_notcontains',
+ 'compare_notendswith',
+ 'compare_notequalto',
+ 'compare_notregexp',
+ 'compare_regexp',
+ 'compare_strictequalto',
+ 'compare_strictnotequalto',
+ 'compiler_removecacheddoc',
+ 'compiler_setdefaultparserflags',
+ 'compress',
+ 'content_body',
+ 'content_encoding',
+ 'content_header',
+ 'content_type',
+ 'cookie',
+ 'cookie_set',
+ 'curl_ftp_getfile',
+ 'curl_ftp_getlisting',
+ 'curl_ftp_putfile',
+ 'curl_include_url',
+ 'currency',
+ 'database_changecolumn',
+ 'database_changefield',
+ 'database_createcolumn',
+ 'database_createfield',
+ 'database_createtable',
+ 'database_fmcontainer',
+ 'database_hostinfo',
+ 'database_inline',
+ 'database_name',
+ 'database_nameitem',
+ 'database_names',
+ 'database_realname',
+ 'database_removecolumn',
+ 'database_removefield',
+ 'database_removetable',
+ 'database_repeating',
+ 'database_repeating_valueitem',
+ 'database_repeatingvalueitem',
+ 'database_schemanameitem',
+ 'database_schemanames',
+ 'database_tablecolumn',
+ 'database_tablenameitem',
+ 'database_tablenames',
+ 'datasource_name',
+ 'datasource_register',
+ 'date',
+ 'date__date_current',
+ 'date__date_format',
+ 'date__date_msec',
+ 'date__date_parse',
+ 'date_add',
+ 'date_date',
+ 'date_difference',
+ 'date_duration',
+ 'date_format',
+ 'date_getcurrentdate',
+ 'date_getday',
+ 'date_getdayofweek',
+ 'date_gethour',
+ 'date_getlocaltimezone',
+ 'date_getminute',
+ 'date_getmonth',
+ 'date_getsecond',
+ 'date_gettime',
+ 'date_getyear',
+ 'date_gmttolocal',
+ 'date_localtogmt',
+ 'date_maximum',
+ 'date_minimum',
+ 'date_msec',
+ 'date_setformat',
+ 'date_subtract',
+ 'db_layoutnameitem',
+ 'db_layoutnames',
+ 'db_nameitem',
+ 'db_names',
+ 'db_tablenameitem',
+ 'db_tablenames',
+ 'dbi_column_names',
+ 'dbi_field_names',
+ 'decimal',
+ 'decimal_setglobaldefaultprecision',
+ 'decode_base64',
+ 'decode_bheader',
+ 'decode_hex',
+ 'decode_html',
+ 'decode_json',
+ 'decode_qheader',
+ 'decode_quotedprintable',
+ 'decode_quotedprintablebytes',
+ 'decode_url',
+ 'decode_xml',
+ 'decompress',
+ 'decrypt_blowfish',
+ 'decrypt_blowfish2',
+ 'default',
+ 'define_atbegin',
+ 'define_atend',
+ 'define_constant',
+ 'define_prototype',
+ 'define_tag',
+ 'define_tagp',
+ 'define_type',
+ 'define_typep',
+ 'deserialize',
+ 'directory_directorynameitem',
+ 'directory_lister',
+ 'directory_nameitem',
+ 'directorynameitem',
+ 'dns_default',
+ 'dns_lookup',
+ 'dns_response',
+ 'duration',
+ 'else',
+ 'email_batch',
+ 'email_compose',
+ 'email_digestchallenge',
+ 'email_digestresponse',
+ 'email_extract',
+ 'email_findemails',
+ 'email_immediate',
+ 'email_merge',
+ 'email_mxerror',
+ 'email_mxlookup',
+ 'email_parse',
+ 'email_pop',
+ 'email_queue',
+ 'email_result',
+ 'email_safeemail',
+ 'email_send',
+ 'email_smtp',
+ 'email_status',
+ 'email_token',
+ 'email_translatebreakstocrlf',
+ 'encode_base64',
+ 'encode_bheader',
+ 'encode_break',
+ 'encode_breaks',
+ 'encode_crc32',
+ 'encode_hex',
+ 'encode_html',
+ 'encode_htmltoxml',
+ 'encode_json',
+ 'encode_qheader',
+ 'encode_quotedprintable',
+ 'encode_quotedprintablebytes',
+ 'encode_set',
+ 'encode_smart',
+ 'encode_sql',
+ 'encode_sql92',
+ 'encode_stricturl',
+ 'encode_url',
+ 'encode_xml',
+ 'encrypt_blowfish',
+ 'encrypt_blowfish2',
+ 'encrypt_crammd5',
+ 'encrypt_hmac',
+ 'encrypt_md5',
+ 'eq',
+ 'error_adderror',
+ 'error_code',
+ 'error_code_aborted',
+ 'error_code_assert',
+ 'error_code_bof',
+ 'error_code_connectioninvalid',
+ 'error_code_couldnotclosefile',
+ 'error_code_couldnotcreateoropenfile',
+ 'error_code_couldnotdeletefile',
+ 'error_code_couldnotdisposememory',
+ 'error_code_couldnotlockmemory',
+ 'error_code_couldnotreadfromfile',
+ 'error_code_couldnotunlockmemory',
+ 'error_code_couldnotwritetofile',
+ 'error_code_criterianotmet',
+ 'error_code_datasourceerror',
+ 'error_code_directoryfull',
+ 'error_code_diskfull',
+ 'error_code_dividebyzero',
+ 'error_code_eof',
+ 'error_code_failure',
+ 'error_code_fieldrestriction',
+ 'error_code_file',
+ 'error_code_filealreadyexists',
+ 'error_code_filecorrupt',
+ 'error_code_fileinvalid',
+ 'error_code_fileinvalidaccessmode',
+ 'error_code_fileisclosed',
+ 'error_code_fileisopen',
+ 'error_code_filelocked',
+ 'error_code_filenotfound',
+ 'error_code_fileunlocked',
+ 'error_code_httpfilenotfound',
+ 'error_code_illegalinstruction',
+ 'error_code_illegaluseoffrozeninstance',
+ 'error_code_invaliddatabase',
+ 'error_code_invalidfilename',
+ 'error_code_invalidmemoryobject',
+ 'error_code_invalidparameter',
+ 'error_code_invalidpassword',
+ 'error_code_invalidpathname',
+ 'error_code_invalidusername',
+ 'error_code_ioerror',
+ 'error_code_loopaborted',
+ 'error_code_memory',
+ 'error_code_network',
+ 'error_code_nilpointer',
+ 'error_code_noerr',
+ 'error_code_nopermission',
+ 'error_code_outofmemory',
+ 'error_code_outofstackspace',
+ 'error_code_overflow',
+ 'error_code_postconditionfailed',
+ 'error_code_preconditionfailed',
+ 'error_code_resnotfound',
+ 'error_code_resource',
+ 'error_code_streamreaderror',
+ 'error_code_streamwriteerror',
+ 'error_code_syntaxerror',
+ 'error_code_tagnotfound',
+ 'error_code_unknownerror',
+ 'error_code_varnotfound',
+ 'error_code_volumedoesnotexist',
+ 'error_code_webactionnotsupported',
+ 'error_code_webadderror',
+ 'error_code_webdeleteerror',
+ 'error_code_webmodulenotfound',
+ 'error_code_webnosuchobject',
+ 'error_code_webrepeatingrelatedfield',
+ 'error_code_webrequiredfieldmissing',
+ 'error_code_webtimeout',
+ 'error_code_webupdateerror',
+ 'error_columnrestriction',
+ 'error_currenterror',
+ 'error_databaseconnectionunavailable',
+ 'error_databasetimeout',
+ 'error_deleteerror',
+ 'error_fieldrestriction',
+ 'error_filenotfound',
+ 'error_invaliddatabase',
+ 'error_invalidpassword',
+ 'error_invalidusername',
+ 'error_modulenotfound',
+ 'error_msg',
+ 'error_msg_aborted',
+ 'error_msg_assert',
+ 'error_msg_bof',
+ 'error_msg_connectioninvalid',
+ 'error_msg_couldnotclosefile',
+ 'error_msg_couldnotcreateoropenfile',
+ 'error_msg_couldnotdeletefile',
+ 'error_msg_couldnotdisposememory',
+ 'error_msg_couldnotlockmemory',
+ 'error_msg_couldnotreadfromfile',
+ 'error_msg_couldnotunlockmemory',
+ 'error_msg_couldnotwritetofile',
+ 'error_msg_criterianotmet',
+ 'error_msg_datasourceerror',
+ 'error_msg_directoryfull',
+ 'error_msg_diskfull',
+ 'error_msg_dividebyzero',
+ 'error_msg_eof',
+ 'error_msg_failure',
+ 'error_msg_fieldrestriction',
+ 'error_msg_file',
+ 'error_msg_filealreadyexists',
+ 'error_msg_filecorrupt',
+ 'error_msg_fileinvalid',
+ 'error_msg_fileinvalidaccessmode',
+ 'error_msg_fileisclosed',
+ 'error_msg_fileisopen',
+ 'error_msg_filelocked',
+ 'error_msg_filenotfound',
+ 'error_msg_fileunlocked',
+ 'error_msg_httpfilenotfound',
+ 'error_msg_illegalinstruction',
+ 'error_msg_illegaluseoffrozeninstance',
+ 'error_msg_invaliddatabase',
+ 'error_msg_invalidfilename',
+ 'error_msg_invalidmemoryobject',
+ 'error_msg_invalidparameter',
+ 'error_msg_invalidpassword',
+ 'error_msg_invalidpathname',
+ 'error_msg_invalidusername',
+ 'error_msg_ioerror',
+ 'error_msg_loopaborted',
+ 'error_msg_memory',
+ 'error_msg_network',
+ 'error_msg_nilpointer',
+ 'error_msg_noerr',
+ 'error_msg_nopermission',
+ 'error_msg_outofmemory',
+ 'error_msg_outofstackspace',
+ 'error_msg_overflow',
+ 'error_msg_postconditionfailed',
+ 'error_msg_preconditionfailed',
+ 'error_msg_resnotfound',
+ 'error_msg_resource',
+ 'error_msg_streamreaderror',
+ 'error_msg_streamwriteerror',
+ 'error_msg_syntaxerror',
+ 'error_msg_tagnotfound',
+ 'error_msg_unknownerror',
+ 'error_msg_varnotfound',
+ 'error_msg_volumedoesnotexist',
+ 'error_msg_webactionnotsupported',
+ 'error_msg_webadderror',
+ 'error_msg_webdeleteerror',
+ 'error_msg_webmodulenotfound',
+ 'error_msg_webnosuchobject',
+ 'error_msg_webrepeatingrelatedfield',
+ 'error_msg_webrequiredfieldmissing',
+ 'error_msg_webtimeout',
+ 'error_msg_webupdateerror',
+ 'error_noerror',
+ 'error_nopermission',
+ 'error_norecordsfound',
+ 'error_outofmemory',
+ 'error_pop',
+ 'error_push',
+ 'error_reqcolumnmissing',
+ 'error_reqfieldmissing',
+ 'error_requiredcolumnmissing',
+ 'error_requiredfieldmissing',
+ 'error_reset',
+ 'error_seterrorcode',
+ 'error_seterrormessage',
+ 'error_updateerror',
+ 'euro',
+ 'event_schedule',
+ 'ew',
+ 'fail',
+ 'fail_if',
+ 'false',
+ 'field',
+ 'field_name',
+ 'field_names',
+ 'file',
+ 'file_autoresolvefullpaths',
+ 'file_chmod',
+ 'file_control',
+ 'file_copy',
+ 'file_create',
+ 'file_creationdate',
+ 'file_currenterror',
+ 'file_delete',
+ 'file_exists',
+ 'file_getlinecount',
+ 'file_getsize',
+ 'file_isdirectory',
+ 'file_listdirectory',
+ 'file_moddate',
+ 'file_modechar',
+ 'file_modeline',
+ 'file_move',
+ 'file_openread',
+ 'file_openreadwrite',
+ 'file_openwrite',
+ 'file_openwriteappend',
+ 'file_openwritetruncate',
+ 'file_probeeol',
+ 'file_processuploads',
+ 'file_read',
+ 'file_readline',
+ 'file_rename',
+ 'file_serve',
+ 'file_setsize',
+ 'file_stream',
+ 'file_streamcopy',
+ 'file_uploads',
+ 'file_waitread',
+ 'file_waittimeout',
+ 'file_waitwrite',
+ 'file_write',
+ 'find_soap_ops',
+ 'form_param',
+ 'found_count',
+ 'ft',
+ 'ftp_getfile',
+ 'ftp_getlisting',
+ 'ftp_putfile',
+ 'full',
+ 'global',
+ 'global_defined',
+ 'global_remove',
+ 'global_reset',
+ 'globals',
+ 'gt',
+ 'gte',
+ 'handle',
+ 'handle_error',
+ 'header',
+ 'html_comment',
+ 'http_getfile',
+ 'ical_alarm',
+ 'ical_attribute',
+ 'ical_calendar',
+ 'ical_daylight',
+ 'ical_event',
+ 'ical_freebusy',
+ 'ical_item',
+ 'ical_journal',
+ 'ical_parse',
+ 'ical_standard',
+ 'ical_timezone',
+ 'ical_todo',
+ 'if',
+ 'if_empty',
+ 'if_false',
+ 'if_null',
+ 'if_true',
+ 'image',
+ 'image_url',
+ 'img',
+ 'include',
+ 'include_cgi',
+ 'include_currentpath',
+ 'include_once',
+ 'include_raw',
+ 'include_url',
+ 'inline',
+ 'integer',
+ 'iterate',
+ 'iterator',
+ 'java',
+ 'java_bean',
+ 'json_records',
+ 'json_rpccall',
+ 'keycolumn_name',
+ 'keycolumn_value',
+ 'keyfield_name',
+ 'keyfield_value',
+ 'lasso_comment',
+ 'lasso_currentaction',
+ 'lasso_datasourceis',
+ 'lasso_datasourceis4d',
+ 'lasso_datasourceisfilemaker',
+ 'lasso_datasourceisfilemaker7',
+ 'lasso_datasourceisfilemaker9',
+ 'lasso_datasourceisfilemakersa',
+ 'lasso_datasourceisjdbc',
+ 'lasso_datasourceislassomysql',
+ 'lasso_datasourceismysql',
+ 'lasso_datasourceisodbc',
+ 'lasso_datasourceisopenbase',
+ 'lasso_datasourceisoracle',
+ 'lasso_datasourceispostgresql',
+ 'lasso_datasourceisspotlight',
+ 'lasso_datasourceissqlite',
+ 'lasso_datasourceissqlserver',
+ 'lasso_datasourcemodulename',
+ 'lasso_datatype',
+ 'lasso_disableondemand',
+ 'lasso_errorreporting',
+ 'lasso_executiontimelimit',
+ 'lasso_parser',
+ 'lasso_process',
+ 'lasso_sessionid',
+ 'lasso_siteid',
+ 'lasso_siteisrunning',
+ 'lasso_sitename',
+ 'lasso_siterestart',
+ 'lasso_sitestart',
+ 'lasso_sitestop',
+ 'lasso_tagexists',
+ 'lasso_tagmodulename',
+ 'lasso_uniqueid',
+ 'lasso_updatecheck',
+ 'lasso_uptime',
+ 'lasso_version',
+ 'lassoapp_create',
+ 'lassoapp_dump',
+ 'lassoapp_flattendir',
+ 'lassoapp_getappdata',
+ 'lassoapp_link',
+ 'lassoapp_list',
+ 'lassoapp_process',
+ 'lassoapp_unitize',
+ 'layout_name',
+ 'ldap',
+ 'ldap_scope_base',
+ 'ldap_scope_onelevel',
+ 'ldap_scope_subtree',
+ 'ldml',
+ 'ldml_ldml',
+ 'library',
+ 'library_once',
+ 'link',
+ 'link_currentaction',
+ 'link_currentactionparams',
+ 'link_currentactionurl',
+ 'link_currentgroup',
+ 'link_currentgroupparams',
+ 'link_currentgroupurl',
+ 'link_currentrecord',
+ 'link_currentrecordparams',
+ 'link_currentrecordurl',
+ 'link_currentsearch',
+ 'link_currentsearchparams',
+ 'link_currentsearchurl',
+ 'link_detail',
+ 'link_detailparams',
+ 'link_detailurl',
+ 'link_firstgroup',
+ 'link_firstgroupparams',
+ 'link_firstgroupurl',
+ 'link_firstrecord',
+ 'link_firstrecordparams',
+ 'link_firstrecordurl',
+ 'link_lastgroup',
+ 'link_lastgroupparams',
+ 'link_lastgroupurl',
+ 'link_lastrecord',
+ 'link_lastrecordparams',
+ 'link_lastrecordurl',
+ 'link_nextgroup',
+ 'link_nextgroupparams',
+ 'link_nextgroupurl',
+ 'link_nextrecord',
+ 'link_nextrecordparams',
+ 'link_nextrecordurl',
+ 'link_params',
+ 'link_prevgroup',
+ 'link_prevgroupparams',
+ 'link_prevgroupurl',
+ 'link_prevrecord',
+ 'link_prevrecordparams',
+ 'link_prevrecordurl',
+ 'link_setformat',
+ 'link_url',
+ 'list',
+ 'list_additem',
+ 'list_fromlist',
+ 'list_fromstring',
+ 'list_getitem',
+ 'list_itemcount',
+ 'list_iterator',
+ 'list_removeitem',
+ 'list_replaceitem',
+ 'list_reverseiterator',
+ 'list_tostring',
+ 'literal',
+ 'ljax_end',
+ 'ljax_hastarget',
+ 'ljax_include',
+ 'ljax_start',
+ 'ljax_target',
+ 'local',
+ 'local_defined',
+ 'local_remove',
+ 'local_reset',
+ 'locale_format',
+ 'locals',
+ 'log',
+ 'log_always',
+ 'log_critical',
+ 'log_deprecated',
+ 'log_destination_console',
+ 'log_destination_database',
+ 'log_destination_file',
+ 'log_detail',
+ 'log_level_critical',
+ 'log_level_deprecated',
+ 'log_level_detail',
+ 'log_level_sql',
+ 'log_level_warning',
+ 'log_setdestination',
+ 'log_sql',
+ 'log_warning',
+ 'logicalop_value',
+ 'logicaloperator_value',
+ 'loop',
+ 'loop_abort',
+ 'loop_continue',
+ 'loop_count',
+ 'lt',
+ 'lte',
+ 'magick_image',
+ 'map',
+ 'map_iterator',
+ 'match_comparator',
+ 'match_notrange',
+ 'match_notregexp',
+ 'match_range',
+ 'match_regexp',
+ 'math_abs',
+ 'math_acos',
+ 'math_add',
+ 'math_asin',
+ 'math_atan',
+ 'math_atan2',
+ 'math_ceil',
+ 'math_converteuro',
+ 'math_cos',
+ 'math_div',
+ 'math_exp',
+ 'math_floor',
+ 'math_internal_rand',
+ 'math_internal_randmax',
+ 'math_internal_srand',
+ 'math_ln',
+ 'math_log',
+ 'math_log10',
+ 'math_max',
+ 'math_min',
+ 'math_mod',
+ 'math_mult',
+ 'math_pow',
+ 'math_random',
+ 'math_range',
+ 'math_rint',
+ 'math_roman',
+ 'math_round',
+ 'math_sin',
+ 'math_sqrt',
+ 'math_sub',
+ 'math_tan',
+ 'maxrecords_value',
+ 'memory_session_driver',
+ 'mime_type',
+ 'minimal',
+ 'misc__srand',
+ 'misc_randomnumber',
+ 'misc_roman',
+ 'misc_valid_creditcard',
+ 'mysql_session_driver',
+ 'named_param',
+ 'namespace_current',
+ 'namespace_delimiter',
+ 'namespace_exists',
+ 'namespace_file_fullpathexists',
+ 'namespace_global',
+ 'namespace_import',
+ 'namespace_load',
+ 'namespace_page',
+ 'namespace_unload',
+ 'namespace_using',
+ 'neq',
+ 'net',
+ 'net_connectinprogress',
+ 'net_connectok',
+ 'net_typessl',
+ 'net_typessltcp',
+ 'net_typessludp',
+ 'net_typetcp',
+ 'net_typeudp',
+ 'net_waitread',
+ 'net_waittimeout',
+ 'net_waitwrite',
+ 'no_default_output',
+ 'none',
+ 'noprocess',
+ 'not',
+ 'nrx',
+ 'nslookup',
+ 'null',
+ 'object',
+ 'once',
+ 'oneoff',
+ 'op_logicalvalue',
+ 'operator_logicalvalue',
+ 'option',
+ 'or',
+ 'os_process',
+ 'output',
+ 'output_none',
+ 'pair',
+ 'params_up',
+ 'pdf_barcode',
+ 'pdf_color',
+ 'pdf_doc',
+ 'pdf_font',
+ 'pdf_image',
+ 'pdf_list',
+ 'pdf_read',
+ 'pdf_serve',
+ 'pdf_table',
+ 'pdf_text',
+ 'percent',
+ 'portal',
+ 'postcondition',
+ 'precondition',
+ 'prettyprintingnsmap',
+ 'prettyprintingtypemap',
+ 'priorityqueue',
+ 'private',
+ 'proc_convert',
+ 'proc_convertbody',
+ 'proc_convertone',
+ 'proc_extract',
+ 'proc_extractone',
+ 'proc_find',
+ 'proc_first',
+ 'proc_foreach',
+ 'proc_get',
+ 'proc_join',
+ 'proc_lasso',
+ 'proc_last',
+ 'proc_map_entry',
+ 'proc_null',
+ 'proc_regexp',
+ 'proc_xml',
+ 'proc_xslt',
+ 'process',
+ 'protect',
+ 'queue',
+ 'rand',
+ 'randomnumber',
+ 'raw',
+ 'recid_value',
+ 'record_count',
+ 'recordcount',
+ 'recordid_value',
+ 'records',
+ 'records_array',
+ 'records_map',
+ 'redirect_url',
+ 'reference',
+ 'referer',
+ 'referer_url',
+ 'referrer',
+ 'referrer_url',
+ 'regexp',
+ 'repeating',
+ 'repeating_valueitem',
+ 'repeatingvalueitem',
+ 'repetition',
+ 'req_column',
+ 'req_field',
+ 'required_column',
+ 'required_field',
+ 'response_fileexists',
+ 'response_filepath',
+ 'response_localpath',
+ 'response_path',
+ 'response_realm',
+ 'resultset',
+ 'resultset_count',
+ 'return',
+ 'return_value',
+ 'reverseiterator',
+ 'roman',
+ 'row_count',
+ 'rows',
+ 'rows_array',
+ 'run_children',
+ 'rx',
+ 'schema_name',
+ 'scientific',
+ 'search_args',
+ 'search_arguments',
+ 'search_columnitem',
+ 'search_fielditem',
+ 'search_operatoritem',
+ 'search_opitem',
+ 'search_valueitem',
+ 'searchfielditem',
+ 'searchoperatoritem',
+ 'searchopitem',
+ 'searchvalueitem',
+ 'select',
+ 'selected',
+ 'self',
+ 'serialize',
+ 'series',
+ 'server_date',
+ 'server_day',
+ 'server_ip',
+ 'server_name',
+ 'server_port',
+ 'server_push',
+ 'server_siteisrunning',
+ 'server_sitestart',
+ 'server_sitestop',
+ 'server_time',
+ 'session_abort',
+ 'session_addoutputfilter',
+ 'session_addvar',
+ 'session_addvariable',
+ 'session_deleteexpired',
+ 'session_driver',
+ 'session_end',
+ 'session_id',
+ 'session_removevar',
+ 'session_removevariable',
+ 'session_result',
+ 'session_setdriver',
+ 'session_start',
+ 'set',
+ 'set_iterator',
+ 'set_reverseiterator',
+ 'shown_count',
+ 'shown_first',
+ 'shown_last',
+ 'site_atbegin',
+ 'site_id',
+ 'site_name',
+ 'site_restart',
+ 'skiprecords_value',
+ 'sleep',
+ 'soap_convertpartstopairs',
+ 'soap_definetag',
+ 'soap_info',
+ 'soap_lastrequest',
+ 'soap_lastresponse',
+ 'soap_stub',
+ 'sort_args',
+ 'sort_arguments',
+ 'sort_columnitem',
+ 'sort_fielditem',
+ 'sort_orderitem',
+ 'sortcolumnitem',
+ 'sortfielditem',
+ 'sortorderitem',
+ 'sqlite_createdb',
+ 'sqlite_session_driver',
+ 'sqlite_setsleepmillis',
+ 'sqlite_setsleeptries',
+ 'srand',
+ 'stack',
+ 'stock_quote',
+ 'string',
+ 'string_charfromname',
+ 'string_concatenate',
+ 'string_countfields',
+ 'string_endswith',
+ 'string_extract',
+ 'string_findposition',
+ 'string_findregexp',
+ 'string_fordigit',
+ 'string_getfield',
+ 'string_getunicodeversion',
+ 'string_insert',
+ 'string_isalpha',
+ 'string_isalphanumeric',
+ 'string_isdigit',
+ 'string_ishexdigit',
+ 'string_islower',
+ 'string_isnumeric',
+ 'string_ispunctuation',
+ 'string_isspace',
+ 'string_isupper',
+ 'string_length',
+ 'string_lowercase',
+ 'string_remove',
+ 'string_removeleading',
+ 'string_removetrailing',
+ 'string_replace',
+ 'string_replaceregexp',
+ 'string_todecimal',
+ 'string_tointeger',
+ 'string_uppercase',
+ 'string_validcharset',
+ 'table_name',
+ 'table_realname',
+ 'tag',
+ 'tag_name',
+ 'tags',
+ 'tags_find',
+ 'tags_list',
+ 'tcp_close',
+ 'tcp_open',
+ 'tcp_send',
+ 'tcp_tcp_close',
+ 'tcp_tcp_open',
+ 'tcp_tcp_send',
+ 'thread_abort',
+ 'thread_atomic',
+ 'thread_event',
+ 'thread_exists',
+ 'thread_getcurrentid',
+ 'thread_getpriority',
+ 'thread_info',
+ 'thread_list',
+ 'thread_lock',
+ 'thread_pipe',
+ 'thread_priority_default',
+ 'thread_priority_high',
+ 'thread_priority_low',
+ 'thread_rwlock',
+ 'thread_semaphore',
+ 'thread_setpriority',
+ 'token_value',
+ 'total_records',
+ 'treemap',
+ 'treemap_iterator',
+ 'true',
+ 'url_rewrite',
+ 'valid_creditcard',
+ 'valid_date',
+ 'valid_email',
+ 'valid_url',
+ 'value_list',
+ 'value_listitem',
+ 'valuelistitem',
+ 'var',
+ 'var_defined',
+ 'var_remove',
+ 'var_reset',
+ 'var_set',
+ 'variable',
+ 'variable_defined',
+ 'variable_set',
+ 'variables',
+ 'variant_count',
+ 'vars',
+ 'wap_isenabled',
+ 'wap_maxbuttons',
+ 'wap_maxcolumns',
+ 'wap_maxhorzpixels',
+ 'wap_maxrows',
+ 'wap_maxvertpixels',
+ 'while',
+ 'wsdl_extract',
+ 'wsdl_getbinding',
+ 'wsdl_getbindingforoperation',
+ 'wsdl_getbindingoperations',
+ 'wsdl_getmessagenamed',
+ 'wsdl_getmessageparts',
+ 'wsdl_getmessagetriofromporttype',
+ 'wsdl_getopbodystyle',
+ 'wsdl_getopbodyuse',
+ 'wsdl_getoperation',
+ 'wsdl_getoplocation',
+ 'wsdl_getopmessagetypes',
+ 'wsdl_getopsoapaction',
+ 'wsdl_getportaddress',
+ 'wsdl_getportsforservice',
+ 'wsdl_getporttype',
+ 'wsdl_getporttypeoperation',
+ 'wsdl_getservicedocumentation',
+ 'wsdl_getservices',
+ 'wsdl_gettargetnamespace',
+ 'wsdl_issoapoperation',
+ 'wsdl_listoperations',
+ 'wsdl_maketest',
+ 'xml',
+ 'xml_extract',
+ 'xml_rpc',
+ 'xml_rpccall',
+ 'xml_rw',
+ 'xml_serve',
+ 'xml_transform',
+ 'xml_xml',
+ 'xml_xmlstream',
+ 'xmlstream',
+ 'xsd_attribute',
+ 'xsd_blankarraybase',
+ 'xsd_blankbase',
+ 'xsd_buildtype',
+ 'xsd_cache',
+ 'xsd_checkcardinality',
+ 'xsd_continueall',
+ 'xsd_continueannotation',
+ 'xsd_continueany',
+ 'xsd_continueanyattribute',
+ 'xsd_continueattribute',
+ 'xsd_continueattributegroup',
+ 'xsd_continuechoice',
+ 'xsd_continuecomplexcontent',
+ 'xsd_continuecomplextype',
+ 'xsd_continuedocumentation',
+ 'xsd_continueextension',
+ 'xsd_continuegroup',
+ 'xsd_continuekey',
+ 'xsd_continuelist',
+ 'xsd_continuerestriction',
+ 'xsd_continuesequence',
+ 'xsd_continuesimplecontent',
+ 'xsd_continuesimpletype',
+ 'xsd_continueunion',
+ 'xsd_deserialize',
+ 'xsd_fullyqualifyname',
+ 'xsd_generate',
+ 'xsd_generateblankfromtype',
+ 'xsd_generateblanksimpletype',
+ 'xsd_generatetype',
+ 'xsd_getschematype',
+ 'xsd_issimpletype',
+ 'xsd_loadschema',
+ 'xsd_lookupnamespaceuri',
+ 'xsd_lookuptype',
+ 'xsd_processany',
+ 'xsd_processattribute',
+ 'xsd_processattributegroup',
+ 'xsd_processcomplextype',
+ 'xsd_processelement',
+ 'xsd_processgroup',
+ 'xsd_processimport',
+ 'xsd_processinclude',
+ 'xsd_processschema',
+ 'xsd_processsimpletype',
+ 'xsd_ref',
+ 'xsd_type',
+ )
+}
+MEMBERS = {
+ 'Member Methods': (
+ 'abort',
+ 'abs',
+ 'accept_charset',
+ 'accept',
+ 'acceptconnections',
+ 'acceptdeserializedelement',
+ 'acceptnossl',
+ 'acceptpost',
+ 'accesskey',
+ 'acos',
+ 'acosh',
+ 'action',
+ 'actionparams',
+ 'active_tick',
+ 'add',
+ 'addatend',
+ 'addattachment',
+ 'addbarcode',
+ 'addchapter',
+ 'addcheckbox',
+ 'addcolumninfo',
+ 'addcombobox',
+ 'addcomment',
+ 'addcomponent',
+ 'addcomponents',
+ 'addcss',
+ 'adddatabasetable',
+ 'adddatasource',
+ 'adddatasourcedatabase',
+ 'adddatasourcehost',
+ 'adddir',
+ 'adddirpath',
+ 'addendjs',
+ 'addendjstext',
+ 'adderror',
+ 'addfavicon',
+ 'addfile',
+ 'addgroup',
+ 'addheader',
+ 'addhiddenfield',
+ 'addhtmlpart',
+ 'addimage',
+ 'addjavascript',
+ 'addjs',
+ 'addjstext',
+ 'addlist',
+ 'addmathfunctions',
+ 'addmember',
+ 'addoneheaderline',
+ 'addpage',
+ 'addparagraph',
+ 'addpart',
+ 'addpasswordfield',
+ 'addphrase',
+ 'addpostdispatch',
+ 'addpredispatch',
+ 'addradiobutton',
+ 'addradiogroup',
+ 'addresetbutton',
+ 'addrow',
+ 'addsection',
+ 'addselectlist',
+ 'addset',
+ 'addsubmitbutton',
+ 'addsubnode',
+ 'addtable',
+ 'addtask',
+ 'addtext',
+ 'addtextarea',
+ 'addtextfield',
+ 'addtextpart',
+ 'addtobuffer',
+ 'addtrait',
+ 'adduser',
+ 'addusertogroup',
+ 'addwarning',
+ 'addzip',
+ 'allocobject',
+ 'am',
+ 'ampm',
+ 'annotate',
+ 'answer',
+ 'apop',
+ 'append',
+ 'appendarray',
+ 'appendarraybegin',
+ 'appendarrayend',
+ 'appendbool',
+ 'appendbytes',
+ 'appendchar',
+ 'appendchild',
+ 'appendcolon',
+ 'appendcomma',
+ 'appenddata',
+ 'appenddatetime',
+ 'appenddbpointer',
+ 'appenddecimal',
+ 'appenddocument',
+ 'appendimagetolist',
+ 'appendinteger',
+ 'appendnowutc',
+ 'appendnull',
+ 'appendoid',
+ 'appendregex',
+ 'appendreplacement',
+ 'appendstring',
+ 'appendtail',
+ 'appendtime',
+ 'applyheatcolors',
+ 'appmessage',
+ 'appname',
+ 'appprefix',
+ 'appstatus',
+ 'arc',
+ 'archive',
+ 'arguments',
+ 'argumentvalue',
+ 'asarray',
+ 'asarraystring',
+ 'asasync',
+ 'asbytes',
+ 'ascopy',
+ 'ascopydeep',
+ 'asdecimal',
+ 'asgenerator',
+ 'asin',
+ 'asinh',
+ 'asinteger',
+ 'askeyedgenerator',
+ 'aslazystring',
+ 'aslist',
+ 'asraw',
+ 'asstaticarray',
+ 'asstring',
+ 'asstringhex',
+ 'asstringoct',
+ 'asxml',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'atend',
+ 'atends',
+ 'atime',
+ 'attributecount',
+ 'attributes',
+ 'attrs',
+ 'auth',
+ 'authenticate',
+ 'authorize',
+ 'autocollectbuffer',
+ 'average',
+ 'back',
+ 'basename',
+ 'basepaths',
+ 'baseuri',
+ 'bcc',
+ 'beginssl',
+ 'beginswith',
+ 'begintls',
+ 'bestcharset',
+ 'bind_blob',
+ 'bind_double',
+ 'bind_int',
+ 'bind_null',
+ 'bind_parameter_index',
+ 'bind_text',
+ 'bind',
+ 'bindcount',
+ 'bindone',
+ 'bindparam',
+ 'bitand',
+ 'bitclear',
+ 'bitflip',
+ 'bitformat',
+ 'bitnot',
+ 'bitor',
+ 'bitset',
+ 'bitshiftleft',
+ 'bitshiftright',
+ 'bittest',
+ 'bitxor',
+ 'blur',
+ 'body',
+ 'bodybytes',
+ 'boundary',
+ 'bptoxml',
+ 'bptypetostr',
+ 'bucketnumber',
+ 'buff',
+ 'buildquery',
+ 'businessdaysbetween',
+ 'by',
+ 'bytes',
+ 'cachedappprefix',
+ 'cachedroot',
+ 'callboolean',
+ 'callbooleanmethod',
+ 'callbytemethod',
+ 'callcharmethod',
+ 'calldoublemethod',
+ 'calledname',
+ 'callfirst',
+ 'callfloat',
+ 'callfloatmethod',
+ 'callint',
+ 'callintmethod',
+ 'calllongmethod',
+ 'callnonvirtualbooleanmethod',
+ 'callnonvirtualbytemethod',
+ 'callnonvirtualcharmethod',
+ 'callnonvirtualdoublemethod',
+ 'callnonvirtualfloatmethod',
+ 'callnonvirtualintmethod',
+ 'callnonvirtuallongmethod',
+ 'callnonvirtualobjectmethod',
+ 'callnonvirtualshortmethod',
+ 'callnonvirtualvoidmethod',
+ 'callobject',
+ 'callobjectmethod',
+ 'callshortmethod',
+ 'callsite_col',
+ 'callsite_file',
+ 'callsite_line',
+ 'callstack',
+ 'callstaticboolean',
+ 'callstaticbooleanmethod',
+ 'callstaticbytemethod',
+ 'callstaticcharmethod',
+ 'callstaticdoublemethod',
+ 'callstaticfloatmethod',
+ 'callstaticint',
+ 'callstaticintmethod',
+ 'callstaticlongmethod',
+ 'callstaticobject',
+ 'callstaticobjectmethod',
+ 'callstaticshortmethod',
+ 'callstaticstring',
+ 'callstaticvoidmethod',
+ 'callstring',
+ 'callvoid',
+ 'callvoidmethod',
+ 'cancel',
+ 'cap',
+ 'capa',
+ 'capabilities',
+ 'capi',
+ 'cbrt',
+ 'cc',
+ 'ceil',
+ 'chardigitvalue',
+ 'charname',
+ 'charset',
+ 'chartype',
+ 'checkdebugging',
+ 'checked',
+ 'checkuser',
+ 'childnodes',
+ 'chk',
+ 'chmod',
+ 'choosecolumntype',
+ 'chown',
+ 'chunked',
+ 'circle',
+ 'class',
+ 'classid',
+ 'clear',
+ 'clonenode',
+ 'close',
+ 'closepath',
+ 'closeprepared',
+ 'closewrite',
+ 'code',
+ 'codebase',
+ 'codetype',
+ 'colmap',
+ 'colorspace',
+ 'column_blob',
+ 'column_count',
+ 'column_decltype',
+ 'column_double',
+ 'column_int64',
+ 'column_name',
+ 'column_text',
+ 'column_type',
+ 'command',
+ 'comments',
+ 'compare',
+ 'comparecodepointorder',
+ 'componentdelimiter',
+ 'components',
+ 'composite',
+ 'compress',
+ 'concat',
+ 'condtoint',
+ 'configureds',
+ 'configuredskeys',
+ 'connect',
+ 'connection',
+ 'connectionhandler',
+ 'connhandler',
+ 'consume_domain',
+ 'consume_label',
+ 'consume_message',
+ 'consume_rdata',
+ 'consume_string',
+ 'contains',
+ 'content_disposition',
+ 'content_transfer_encoding',
+ 'content_type',
+ 'content',
+ 'contentlength',
+ 'contents',
+ 'contenttype',
+ 'continuation',
+ 'continuationpacket',
+ 'continuationpoint',
+ 'continuationstack',
+ 'continue',
+ 'contrast',
+ 'conventionaltop',
+ 'convert',
+ 'cookie',
+ 'cookies',
+ 'cookiesarray',
+ 'cookiesary',
+ 'copyto',
+ 'cos',
+ 'cosh',
+ 'count',
+ 'countkeys',
+ 'country',
+ 'countusersbygroup',
+ 'crc',
+ 'create',
+ 'createattribute',
+ 'createattributens',
+ 'createcdatasection',
+ 'createcomment',
+ 'createdocument',
+ 'createdocumentfragment',
+ 'createdocumenttype',
+ 'createelement',
+ 'createelementns',
+ 'createentityreference',
+ 'createindex',
+ 'createprocessinginstruction',
+ 'createtable',
+ 'createtextnode',
+ 'criteria',
+ 'crop',
+ 'csscontent',
+ 'curl',
+ 'current',
+ 'currentfile',
+ 'curveto',
+ 'd',
+ 'data',
+ 'databasecolumnnames',
+ 'databasecolumns',
+ 'databasemap',
+ 'databasename',
+ 'datasourcecolumnnames',
+ 'datasourcecolumns',
+ 'datasourcemap',
+ 'date',
+ 'day',
+ 'dayofmonth',
+ 'dayofweek',
+ 'dayofweekinmonth',
+ 'dayofyear',
+ 'days',
+ 'daysbetween',
+ 'db',
+ 'dbtablestable',
+ 'debug',
+ 'declare',
+ 'decodebase64',
+ 'decodehex',
+ 'decodehtml',
+ 'decodeqp',
+ 'decodeurl',
+ 'decodexml',
+ 'decompose',
+ 'decomposeassignment',
+ 'defaultcontentrepresentation',
+ 'defer',
+ 'deg2rad',
+ 'dele',
+ 'delete',
+ 'deletedata',
+ 'deleteglobalref',
+ 'deletelocalref',
+ 'delim',
+ 'depth',
+ 'dereferencepointer',
+ 'describe',
+ 'description',
+ 'deserialize',
+ 'detach',
+ 'detectcharset',
+ 'didinclude',
+ 'difference',
+ 'digit',
+ 'dir',
+ 'displaycountry',
+ 'displaylanguage',
+ 'displayname',
+ 'displayscript',
+ 'displayvariant',
+ 'div',
+ 'dns_response',
+ 'do',
+ 'doatbegins',
+ 'doatends',
+ 'doccomment',
+ 'doclose',
+ 'doctype',
+ 'document',
+ 'documentelement',
+ 'documentroot',
+ 'domainbody',
+ 'done',
+ 'dosessions',
+ 'dowithclose',
+ 'dowlocal',
+ 'download',
+ 'drawtext',
+ 'drop',
+ 'dropindex',
+ 'dsdbtable',
+ 'dshoststable',
+ 'dsinfo',
+ 'dst',
+ 'dstable',
+ 'dstoffset',
+ 'dtdid',
+ 'dup',
+ 'dup2',
+ 'each',
+ 'eachbyte',
+ 'eachcharacter',
+ 'eachchild',
+ 'eachcomponent',
+ 'eachdir',
+ 'eachdirpath',
+ 'eachdirpathrecursive',
+ 'eachentry',
+ 'eachfile',
+ 'eachfilename',
+ 'eachfilepath',
+ 'eachfilepathrecursive',
+ 'eachkey',
+ 'eachline',
+ 'eachlinebreak',
+ 'eachmatch',
+ 'eachnode',
+ 'eachpair',
+ 'eachpath',
+ 'eachpathrecursive',
+ 'eachrow',
+ 'eachsub',
+ 'eachword',
+ 'eachwordbreak',
+ 'element',
+ 'eligiblepath',
+ 'eligiblepaths',
+ 'encodebase64',
+ 'encodehex',
+ 'encodehtml',
+ 'encodehtmltoxml',
+ 'encodemd5',
+ 'encodepassword',
+ 'encodeqp',
+ 'encodesql',
+ 'encodesql92',
+ 'encodeurl',
+ 'encodevalue',
+ 'encodexml',
+ 'encoding',
+ 'enctype',
+ 'end',
+ 'endjs',
+ 'endssl',
+ 'endswith',
+ 'endtls',
+ 'enhance',
+ 'ensurestopped',
+ 'entities',
+ 'entry',
+ 'env',
+ 'equals',
+ 'era',
+ 'erf',
+ 'erfc',
+ 'err',
+ 'errcode',
+ 'errmsg',
+ 'error',
+ 'errors',
+ 'errstack',
+ 'escape_member',
+ 'establisherrorstate',
+ 'exceptioncheck',
+ 'exceptionclear',
+ 'exceptiondescribe',
+ 'exceptionoccurred',
+ 'exchange',
+ 'execinits',
+ 'execinstalls',
+ 'execute',
+ 'executelazy',
+ 'executenow',
+ 'exists',
+ 'exit',
+ 'exitcode',
+ 'exp',
+ 'expire',
+ 'expireminutes',
+ 'expiresminutes',
+ 'expm1',
+ 'export16bits',
+ 'export32bits',
+ 'export64bits',
+ 'export8bits',
+ 'exportas',
+ 'exportbytes',
+ 'exportfdf',
+ 'exportpointerbits',
+ 'exportsigned16bits',
+ 'exportsigned32bits',
+ 'exportsigned64bits',
+ 'exportsigned8bits',
+ 'exportstring',
+ 'expose',
+ 'extendedyear',
+ 'extensiondelimiter',
+ 'extensions',
+ 'extract',
+ 'extractfast',
+ 'extractfastone',
+ 'extractimage',
+ 'extractone',
+ 'f',
+ 'fabs',
+ 'fail',
+ 'failnoconnectionhandler',
+ 'family',
+ 'fatalerror',
+ 'fcgireq',
+ 'fchdir',
+ 'fchmod',
+ 'fchown',
+ 'fd',
+ 'features',
+ 'fetchdata',
+ 'fieldnames',
+ 'fieldposition',
+ 'fieldstable',
+ 'fieldtype',
+ 'fieldvalue',
+ 'file',
+ 'filename',
+ 'filenames',
+ 'filequeue',
+ 'fileuploads',
+ 'fileuploadsary',
+ 'filterinputcolumn',
+ 'finalize',
+ 'find',
+ 'findall',
+ 'findandmodify',
+ 'findbucket',
+ 'findcase',
+ 'findclass',
+ 'findcount',
+ 'finddescendant',
+ 'findfirst',
+ 'findinclude',
+ 'findinctx',
+ 'findindex',
+ 'findlast',
+ 'findpattern',
+ 'findposition',
+ 'findsymbols',
+ 'first',
+ 'firstchild',
+ 'firstcomponent',
+ 'firstdayofweek',
+ 'firstnode',
+ 'fixformat',
+ 'flags',
+ 'fliph',
+ 'flipv',
+ 'floor',
+ 'flush',
+ 'foldcase',
+ 'foo',
+ 'for',
+ 'forcedrowid',
+ 'foreach',
+ 'foreachaccept',
+ 'foreachbyte',
+ 'foreachcharacter',
+ 'foreachchild',
+ 'foreachday',
+ 'foreachentry',
+ 'foreachfile',
+ 'foreachfilename',
+ 'foreachkey',
+ 'foreachline',
+ 'foreachlinebreak',
+ 'foreachmatch',
+ 'foreachnode',
+ 'foreachpair',
+ 'foreachpathcomponent',
+ 'foreachrow',
+ 'foreachspool',
+ 'foreachsub',
+ 'foreachwordbreak',
+ 'form',
+ 'format',
+ 'formatas',
+ 'formatcontextelement',
+ 'formatcontextelements',
+ 'formatnumber',
+ 'free',
+ 'frexp',
+ 'from',
+ 'fromname',
+ 'fromport',
+ 'fromreflectedfield',
+ 'fromreflectedmethod',
+ 'front',
+ 'fsync',
+ 'ftpdeletefile',
+ 'ftpgetlisting',
+ 'ftruncate',
+ 'fullpath',
+ 'fx',
+ 'gamma',
+ 'gatewayinterface',
+ 'gen',
+ 'generatechecksum',
+ 'get',
+ 'getabswidth',
+ 'getalignment',
+ 'getappsource',
+ 'getarraylength',
+ 'getattr',
+ 'getattribute',
+ 'getattributenamespace',
+ 'getattributenode',
+ 'getattributenodens',
+ 'getattributens',
+ 'getbarheight',
+ 'getbarmultiplier',
+ 'getbarwidth',
+ 'getbaseline',
+ 'getbold',
+ 'getbooleanarrayelements',
+ 'getbooleanarrayregion',
+ 'getbooleanfield',
+ 'getbordercolor',
+ 'getborderwidth',
+ 'getbytearrayelements',
+ 'getbytearrayregion',
+ 'getbytefield',
+ 'getchararrayelements',
+ 'getchararrayregion',
+ 'getcharfield',
+ 'getclass',
+ 'getcode',
+ 'getcolor',
+ 'getcolumn',
+ 'getcolumncount',
+ 'getcolumns',
+ 'getdatabasebyalias',
+ 'getdatabasebyid',
+ 'getdatabasebyname',
+ 'getdatabasehost',
+ 'getdatabasetable',
+ 'getdatabasetablebyalias',
+ 'getdatabasetablebyid',
+ 'getdatabasetablepart',
+ 'getdatasource',
+ 'getdatasourcedatabase',
+ 'getdatasourcedatabasebyid',
+ 'getdatasourcehost',
+ 'getdatasourceid',
+ 'getdatasourcename',
+ 'getdefaultstorage',
+ 'getdoublearrayelements',
+ 'getdoublearrayregion',
+ 'getdoublefield',
+ 'getelementbyid',
+ 'getelementsbytagname',
+ 'getelementsbytagnamens',
+ 'getencoding',
+ 'getface',
+ 'getfield',
+ 'getfieldid',
+ 'getfile',
+ 'getfloatarrayelements',
+ 'getfloatarrayregion',
+ 'getfloatfield',
+ 'getfont',
+ 'getformat',
+ 'getfullfontname',
+ 'getgroup',
+ 'getgroupid',
+ 'getheader',
+ 'getheaders',
+ 'gethostdatabase',
+ 'gethtmlattr',
+ 'gethtmlattrstring',
+ 'getinclude',
+ 'getintarrayelements',
+ 'getintarrayregion',
+ 'getintfield',
+ 'getisocomment',
+ 'getitalic',
+ 'getlasterror',
+ 'getlcapitype',
+ 'getlibrary',
+ 'getlongarrayelements',
+ 'getlongarrayregion',
+ 'getlongfield',
+ 'getmargins',
+ 'getmethodid',
+ 'getmode',
+ 'getnameditem',
+ 'getnameditemns',
+ 'getnode',
+ 'getnumericvalue',
+ 'getobjectarrayelement',
+ 'getobjectclass',
+ 'getobjectfield',
+ 'getpadding',
+ 'getpagenumber',
+ 'getparts',
+ 'getprefs',
+ 'getpropertyvalue',
+ 'getprowcount',
+ 'getpsfontname',
+ 'getrange',
+ 'getrowcount',
+ 'getset',
+ 'getshortarrayelements',
+ 'getshortarrayregion',
+ 'getshortfield',
+ 'getsize',
+ 'getsortfieldspart',
+ 'getspacing',
+ 'getstaticbooleanfield',
+ 'getstaticbytefield',
+ 'getstaticcharfield',
+ 'getstaticdoublefield',
+ 'getstaticfieldid',
+ 'getstaticfloatfield',
+ 'getstaticintfield',
+ 'getstaticlongfield',
+ 'getstaticmethodid',
+ 'getstaticobjectfield',
+ 'getstaticshortfield',
+ 'getstatus',
+ 'getstringchars',
+ 'getstringlength',
+ 'getstyle',
+ 'getsupportedencodings',
+ 'gettablebyid',
+ 'gettext',
+ 'gettextalignment',
+ 'gettextsize',
+ 'gettrigger',
+ 'gettype',
+ 'getunderline',
+ 'getuniquealiasname',
+ 'getuser',
+ 'getuserbykey',
+ 'getuserid',
+ 'getversion',
+ 'getzipfilebytes',
+ 'givenblock',
+ 'gmt',
+ 'gotconnection',
+ 'gotfileupload',
+ 'groupby',
+ 'groupcolumns',
+ 'groupcount',
+ 'groupjoin',
+ 'handlebreakpointget',
+ 'handlebreakpointlist',
+ 'handlebreakpointremove',
+ 'handlebreakpointset',
+ 'handlebreakpointupdate',
+ 'handlecontextget',
+ 'handlecontextnames',
+ 'handlecontinuation',
+ 'handledefinitionbody',
+ 'handledefinitionhead',
+ 'handledefinitionresource',
+ 'handledevconnection',
+ 'handleevalexpired',
+ 'handlefeatureget',
+ 'handlefeatureset',
+ 'handlelassoappcontent',
+ 'handlelassoappresponse',
+ 'handlenested',
+ 'handlenormalconnection',
+ 'handlepop',
+ 'handleresource',
+ 'handlesource',
+ 'handlestackget',
+ 'handlestderr',
+ 'handlestdin',
+ 'handlestdout',
+ 'handshake',
+ 'hasattribute',
+ 'hasattributens',
+ 'hasattributes',
+ 'hasbinaryproperty',
+ 'haschildnodes',
+ 'hasexpired',
+ 'hasfeature',
+ 'hasfield',
+ 'hash',
+ 'hashtmlattr',
+ 'hasmethod',
+ 'hastable',
+ 'hastrailingcomponent',
+ 'hasvalue',
+ 'head',
+ 'header',
+ 'headerbytes',
+ 'headers',
+ 'headersarray',
+ 'headersmap',
+ 'height',
+ 'histogram',
+ 'home',
+ 'host',
+ 'hostcolumnnames',
+ 'hostcolumnnames2',
+ 'hostcolumns',
+ 'hostcolumns2',
+ 'hostdatasource',
+ 'hostextra',
+ 'hostid',
+ 'hostisdynamic',
+ 'hostmap',
+ 'hostmap2',
+ 'hostname',
+ 'hostpassword',
+ 'hostport',
+ 'hostschema',
+ 'hosttableencoding',
+ 'hosttonet16',
+ 'hosttonet32',
+ 'hosttonet64',
+ 'hostusername',
+ 'hour',
+ 'hourofampm',
+ 'hourofday',
+ 'hoursbetween',
+ 'href',
+ 'hreflang',
+ 'htmlcontent',
+ 'htmlizestacktrace',
+ 'htmlizestacktracelink',
+ 'httpaccept',
+ 'httpacceptencoding',
+ 'httpacceptlanguage',
+ 'httpauthorization',
+ 'httpcachecontrol',
+ 'httpconnection',
+ 'httpcookie',
+ 'httpequiv',
+ 'httphost',
+ 'httpreferer',
+ 'httpreferrer',
+ 'httpuseragent',
+ 'hypot',
+ 'id',
+ 'idealinmemory',
+ 'idle',
+ 'idmap',
+ 'ifempty',
+ 'ifkey',
+ 'ifnotempty',
+ 'ifnotkey',
+ 'ignorecase',
+ 'ilogb',
+ 'imgptr',
+ 'implementation',
+ 'import16bits',
+ 'import32bits',
+ 'import64bits',
+ 'import8bits',
+ 'importas',
+ 'importbytes',
+ 'importfdf',
+ 'importnode',
+ 'importpointer',
+ 'importstring',
+ 'in',
+ 'include',
+ 'includebytes',
+ 'includelibrary',
+ 'includelibraryonce',
+ 'includeonce',
+ 'includes',
+ 'includestack',
+ 'indaylighttime',
+ 'index',
+ 'init',
+ 'initialize',
+ 'initrequest',
+ 'inits',
+ 'inneroncompare',
+ 'input',
+ 'inputcolumns',
+ 'inputtype',
+ 'insert',
+ 'insertback',
+ 'insertbefore',
+ 'insertdata',
+ 'insertfirst',
+ 'insertfrom',
+ 'insertfront',
+ 'insertinternal',
+ 'insertlast',
+ 'insertpage',
+ 'install',
+ 'installs',
+ 'integer',
+ 'internalsubset',
+ 'interrupt',
+ 'intersection',
+ 'inttocond',
+ 'invoke',
+ 'invokeautocollect',
+ 'invokeuntil',
+ 'invokewhile',
+ 'ioctl',
+ 'isa',
+ 'isalive',
+ 'isallof',
+ 'isalnum',
+ 'isalpha',
+ 'isanyof',
+ 'isbase',
+ 'isblank',
+ 'iscntrl',
+ 'isdigit',
+ 'isdir',
'isdirectory',
- 'isempty',
- 'isemptyelement',
- 'isfirststep',
- 'isfullpath',
- 'isgraph',
- 'ishttps',
- 'isidle',
- 'isinstanceof',
- 'islink',
- 'islower',
- 'ismultipart',
- 'isnan',
- 'isnota',
- 'isnotempty',
- 'isnothing',
- 'iso3country',
- 'iso3language',
- 'isopen',
- 'isprint',
- 'ispunct',
- 'issameobject',
- 'isset',
- 'issourcefile',
- 'isspace',
- 'isssl',
- 'issupported',
- 'istitle',
- 'istruetype',
- 'istype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'isvalid',
- 'iswhitespace',
- 'isxdigit',
- 'isxhr',
- 'item',
- 'j0',
- 'j1',
- 'javascript',
- 'jbarcode',
- 'jcolor',
- 'jfont',
- 'jimage',
- 'jlist',
- 'jn',
- 'jobjectisa',
- 'join',
- 'jread',
- 'jscontent',
- 'jsonfornode',
- 'jsonhtml',
- 'jsonisleaf',
- 'jsonlabel',
- 'jtable',
- 'jtext',
- 'julianday',
- 'kernel',
- 'key',
- 'keycolumns',
- 'keys',
- 'keywords',
- 'kill',
- 'label',
- 'lang',
- 'language',
- 'last_insert_rowid',
- 'last',
- 'lastaccessdate',
- 'lastaccesstime',
- 'lastchild',
- 'lastcomponent',
- 'lasterror',
- 'lastinsertid',
- 'lastnode',
- 'lastpoint',
- 'lasttouched',
- 'lazyvalue',
- 'ldexp',
- 'leaveopen',
- 'left',
- 'length',
- 'lgamma',
- 'line',
- 'linediffers',
- 'linkto',
- 'linktype',
- 'list',
- 'listactivedatasources',
- 'listalldatabases',
- 'listalltables',
- 'listdatabasetables',
- 'listdatasourcedatabases',
- 'listdatasourcehosts',
- 'listdatasources',
- 'listen',
- 'listgroups',
- 'listgroupsbyuser',
- 'listhostdatabases',
- 'listhosts',
- 'listmethods',
- 'listnode',
- 'listusers',
- 'listusersbygroup',
- 'loadcerts',
- 'loaddatasourcehostinfo',
- 'loaddatasourceinfo',
- 'loadlibrary',
- 'localaddress',
- 'localname',
- 'locals',
- 'lock',
- 'log',
- 'log10',
- 'log1p',
- 'logb',
- 'lookupnamespace',
- 'lop',
- 'lowagiefont',
- 'lowercase',
- 'makecolor',
- 'makecolumnlist',
- 'makecolumnmap',
- 'makecookieyumyum',
- 'makefullpath',
- 'makeinheritedcopy',
- 'makenonrelative',
- 'makeurl',
- 'map',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'matchtriggers',
- 'max',
- 'maxinmemory',
- 'maxlength',
- 'maxrows',
- 'maxworkers',
- 'maybeslash',
- 'maybevalue',
- 'md5hex',
- 'media',
- 'members',
- 'merge',
- 'meta',
- 'method',
- 'methodname',
- 'millisecond',
- 'millisecondsinday',
- 'mime_boundary',
- 'mime_contenttype',
- 'mime_hdrs',
- 'mime',
- 'mimes',
- 'min',
- 'minute',
- 'minutesbetween',
- 'moddatestr',
- 'mode',
- 'modf',
- 'modificationdate',
- 'modificationtime',
- 'modulate',
- 'monitorenter',
- 'monitorexit',
- 'month',
- 'moveto',
- 'movetoattribute',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'msg',
- 'mtime',
- 'multiple',
- 'n',
- 'name',
- 'named',
- 'namespaceuri',
- 'needinitialization',
- 'net',
- 'nettohost16',
- 'nettohost32',
- 'nettohost64',
- 'new',
- 'newbooleanarray',
- 'newbytearray',
- 'newchararray',
- 'newdoublearray',
- 'newfloatarray',
- 'newglobalref',
- 'newintarray',
- 'newlongarray',
- 'newobject',
- 'newobjectarray',
- 'newshortarray',
- 'newstring',
- 'next',
- 'nextafter',
- 'nextnode',
- 'nextprime',
- 'nextprune',
- 'nextprunedelta',
- 'nextsibling',
- 'nodeforpath',
- 'nodelist',
- 'nodename',
- 'nodetype',
- 'nodevalue',
- 'noop',
- 'normalize',
- 'notationname',
- 'notations',
- 'novaluelists',
- 'numsets',
- 'object',
- 'objects',
- 'objecttype',
- 'onclick',
- 'oncompare',
- 'oncomparestrict',
- 'onconvert',
- 'oncreate',
- 'ondblclick',
- 'onkeydown',
- 'onkeypress',
- 'onkeyup',
- 'onmousedown',
- 'onmousemove',
- 'onmouseout',
- 'onmouseover',
- 'onmouseup',
- 'onreset',
- 'onsubmit',
- 'ontop',
- 'open',
- 'openappend',
- 'openread',
- 'opentruncate',
- 'openwith',
- 'openwrite',
- 'openwriteonly',
- 'orderby',
- 'orderbydescending',
- 'out',
- 'output',
- 'outputencoding',
- 'ownerdocument',
- 'ownerelement',
- 'padleading',
- 'padtrailing',
- 'padzero',
- 'pagecount',
- 'pagerotation',
- 'pagesize',
- 'param',
- 'paramdescs',
- 'params',
- 'parent',
- 'parentdir',
- 'parentnode',
- 'parse_body',
- 'parse_boundary',
- 'parse_charset',
- 'parse_content_disposition',
- 'parse_content_transfer_encoding',
- 'parse_content_type',
- 'parse_hdrs',
- 'parse_mode',
- 'parse_msg',
- 'parse_parts',
- 'parse_rawhdrs',
- 'parse',
- 'parseas',
- 'parsedocument',
- 'parsenumber',
- 'parseoneheaderline',
- 'pass',
- 'path',
- 'pathinfo',
- 'pathtouri',
- 'pathtranslated',
- 'pause',
- 'payload',
- 'pdifference',
- 'perform',
- 'performonce',
- 'perms',
- 'pid',
- 'pixel',
- 'pm',
- 'polldbg',
- 'pollide',
- 'pop_capa',
- 'pop_cmd',
- 'pop_debug',
- 'pop_err',
- 'pop_get',
- 'pop_ids',
- 'pop_index',
- 'pop_log',
- 'pop_mode',
- 'pop_net',
- 'pop_res',
- 'pop_server',
- 'pop_timeout',
- 'pop_token',
- 'pop',
- 'popctx',
- 'popinclude',
- 'populate',
- 'port',
- 'position',
- 'postdispatch',
- 'postparam',
- 'postparams',
- 'postparamsary',
- 'poststring',
- 'pow',
- 'predispatch',
- 'prefix',
- 'preflight',
- 'prepare',
- 'prepared',
- 'pretty',
- 'prev',
- 'previoussibling',
- 'printsimplemsg',
- 'private_compare',
- 'private_find',
- 'private_findlast',
- 'private_merge',
- 'private_rebalanceforinsert',
- 'private_rebalanceforremove',
- 'private_replaceall',
- 'private_replacefirst',
- 'private_rotateleft',
- 'private_rotateright',
- 'private_setrange',
- 'private_split',
- 'probemimetype',
- 'provides',
- 'proxying',
- 'prune',
- 'publicid',
- 'pullhttpheader',
- 'pullmimepost',
- 'pulloneheaderline',
- 'pullpost',
- 'pullrawpost',
- 'pullrawpostchunks',
- 'pullrequest',
- 'pullrequestline',
- 'push',
- 'pushctx',
- 'pushinclude',
- 'qdarray',
- 'qdcount',
- 'queryparam',
- 'queryparams',
- 'queryparamsary',
- 'querystring',
- 'queue_maintenance',
- 'queue_messages',
- 'queue_status',
- 'queue',
- 'quit',
- 'r',
- 'raw',
- 'rawcontent',
- 'rawdiff',
- 'rawheader',
- 'rawheaders',
- 'rawinvokable',
- 'read',
- 'readattributevalue',
- 'readbytes',
- 'readbytesfully',
- 'readdestinations',
- 'readerror',
- 'readidobjects',
- 'readline',
- 'readmessage',
- 'readnumber',
- 'readobject',
- 'readobjecttcp',
- 'readpacket',
- 'readsomebytes',
- 'readstring',
- 'ready',
- 'realdoc',
- 'realpath',
- 'receivefd',
- 'recipients',
- 'recover',
- 'rect',
- 'rectype',
- 'red',
- 'redirectto',
- 'referrals',
- 'refid',
- 'refobj',
- 'refresh',
- 'rel',
- 'remainder',
- 'remoteaddr',
- 'remoteaddress',
- 'remoteport',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removeattributenode',
- 'removeattributens',
- 'removeback',
- 'removechild',
- 'removedatabasetable',
- 'removedatasource',
- 'removedatasourcedatabase',
- 'removedatasourcehost',
- 'removefield',
- 'removefirst',
- 'removefront',
- 'removegroup',
- 'removelast',
- 'removeleading',
- 'removenameditem',
- 'removenameditemns',
- 'removenode',
- 'removesubnode',
- 'removetrailing',
- 'removeuser',
- 'removeuserfromallgroups',
- 'removeuserfromgroup',
- 'rename',
- 'renderbytes',
- 'renderdocumentbytes',
- 'renderstring',
- 'replace',
- 'replaceall',
- 'replacechild',
- 'replacedata',
- 'replacefirst',
- 'replaceheader',
- 'replacepattern',
- 'representnode',
- 'representnoderesult',
- 'reqid',
- 'requestid',
- 'requestmethod',
- 'requestparams',
- 'requesturi',
- 'requires',
- 'reserve',
- 'reset',
- 'resize',
- 'resolutionh',
- 'resolutionv',
- 'resolvelinks',
- 'resourcedata',
- 'resourceinvokable',
- 'resourcename',
- 'resources',
- 'respond',
- 'restart',
- 'restname',
- 'result',
- 'results',
- 'resume',
- 'retr',
- 'retrieve',
- 'returncolumns',
- 'returntype',
- 'rev',
- 'reverse',
- 'rewind',
- 'right',
- 'rint',
- 'roll',
- 'root',
- 'rootmap',
- 'rotate',
- 'route',
- 'rowsfound',
- 'rset',
- 'rule',
- 'rules',
- 'run',
- 'running',
- 'runonce',
- 's',
- 'sa',
- 'safeexport8bits',
- 'sameas',
- 'save',
- 'savedata',
- 'scalb',
- 'scale',
- 'scanfordatasource',
- 'scantasks',
- 'scanworkers',
- 'schemaname',
- 'scheme',
- 'script',
- 'scriptextensions',
- 'scriptfilename',
- 'scriptname',
- 'scripttype',
- 'scripturi',
- 'scripturl',
- 'scrubkeywords',
- 'search',
- 'searchinbucket',
- 'searchurl',
- 'second',
- 'secondsbetween',
- 'seek',
- 'select',
- 'selected',
- 'selectmany',
- 'self',
- 'send',
- 'sendchunk',
- 'sendfd',
- 'sendfile',
- 'sendpacket',
- 'sendresponse',
- 'separator',
- 'serializationelements',
- 'serialize',
- 'serveraddr',
- 'serveradmin',
- 'servername',
- 'serverport',
- 'serverprotocol',
- 'serversignature',
- 'serversoftware',
- 'sessionsdump',
- 'sessionsmap',
- 'set',
- 'setalignment',
- 'setattr',
- 'setattribute',
- 'setattributenode',
- 'setattributenodens',
- 'setattributens',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setbold',
- 'setbooleanarrayregion',
- 'setbooleanfield',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytearrayregion',
- 'setbytefield',
- 'setchararrayregion',
- 'setcharfield',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setcookie',
- 'setcwd',
- 'setdefaultstorage',
- 'setdestination',
- 'setdoublearrayregion',
- 'setdoublefield',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfindpattern',
- 'setfloatarrayregion',
- 'setfloatfield',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheaders',
- 'sethtmlattr',
- 'setignorecase',
- 'setinput',
- 'setintarrayregion',
- 'setintfield',
- 'setitalic',
- 'setlinewidth',
- 'setlongarrayregion',
- 'setlongfield',
- 'setmarker',
- 'setmaxfilesize',
- 'setmode',
- 'setname',
- 'setnameditem',
- 'setnameditemns',
- 'setobjectarrayelement',
- 'setobjectfield',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setrange',
- 'setreplacepattern',
- 'setshortarrayregion',
- 'setshortfield',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'setstaticbooleanfield',
- 'setstaticbytefield',
- 'setstaticcharfield',
- 'setstaticdoublefield',
- 'setstaticfloatfield',
- 'setstaticintfield',
- 'setstaticlongfield',
- 'setstaticobjectfield',
- 'setstaticshortfield',
- 'setstatus',
- 'settextalignment',
- 'settextsize',
- 'settimezone',
- 'settrait',
- 'setunderline',
- 'sharpen',
- 'shouldabort',
- 'shouldclose',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'shutdownrd',
- 'shutdownrdwr',
- 'shutdownwr',
- 'sin',
- 'sinh',
- 'size',
- 'skip',
- 'skiprows',
- 'sort',
- 'sortcolumns',
- 'source',
- 'sourcecolumn',
- 'sourcefile',
- 'sourceline',
- 'specified',
- 'split',
- 'splitconnection',
- 'splitdebuggingthread',
- 'splitextension',
- 'splittext',
- 'splitthread',
- 'splittoprivatedev',
- 'splituppath',
- 'sql',
- 'sqlite3',
- 'sqrt',
- 'src',
- 'srcpath',
- 'sslerrfail',
- 'stack',
- 'standby',
- 'start',
- 'startone',
- 'startup',
- 'stat',
- 'statement',
- 'statementonly',
- 'stats',
- 'status',
- 'statuscode',
- 'statusmsg',
- 'stdin',
- 'step',
- 'stls',
- 'stop',
- 'stoprunning',
- 'storedata',
- 'stripfirstcomponent',
- 'striplastcomponent',
- 'style',
- 'styletype',
- 'sub',
- 'subject',
- 'subnode',
- 'subnodes',
- 'substringdata',
- 'subtract',
- 'subtraits',
- 'sum',
- 'supportscontentrepresentation',
- 'swapbytes',
- 'systemid',
- 't',
- 'tabindex',
- 'table',
- 'tablecolumnnames',
- 'tablecolumns',
- 'tablehascolumn',
- 'tableizestacktrace',
- 'tableizestacktracelink',
- 'tablemap',
- 'tablename',
- 'tables',
- 'tabs',
- 'tabstr',
- 'tag',
- 'tagname',
- 'take',
- 'tan',
- 'tanh',
- 'target',
- 'tasks',
- 'tb',
- 'tell',
- 'testexitcode',
- 'testlock',
- 'textwidth',
- 'thenby',
- 'thenbydescending',
- 'threadreaddesc',
- 'throw',
- 'thrownew',
- 'time',
- 'timezone',
- 'title',
- 'titlecase',
- 'to',
- 'token',
- 'tolower',
- 'top',
- 'toreflectedfield',
- 'toreflectedmethod',
- 'total_changes',
- 'totitle',
- 'touch',
- 'toupper',
- 'toxmlstring',
- 'trace',
- 'trackingid',
- 'trait',
- 'transform',
- 'trigger',
- 'trim',
- 'trunk',
- 'tryfinderrorfile',
- 'trylock',
- 'tryreadobject',
- 'type',
- 'typename',
- 'uidl',
- 'uncompress',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unspool',
- 'up',
- 'update',
- 'updategroup',
- 'upload',
- 'uppercase',
- 'url',
- 'used',
- 'usemap',
- 'user',
- 'usercolumns',
- 'valid',
- 'validate',
- 'validatesessionstable',
- 'value',
- 'values',
- 'valuetype',
- 'variant',
- 'version',
- 'wait',
- 'waitforcompletion',
- 'warnings',
- 'week',
- 'weekofmonth',
- 'weekofyear',
- 'where',
- 'width',
- 'workers',
- 'workinginputcolumns',
- 'workingkeycolumns',
- 'workingkeyfield_name',
- 'workingreturncolumns',
- 'workingsortcolumns',
- 'write',
- 'writebodybytes',
- 'writebytes',
- 'writeheader',
- 'writeheaderbytes',
- 'writeheaderline',
- 'writeid',
- 'writemessage',
- 'writeobject',
- 'writeobjecttcp',
- 'writestring',
- 'wroteheaders',
- 'xhtml',
- 'xmllang',
- 'y0',
- 'y1',
- 'year',
- 'yearwoy',
- 'yn',
- 'z',
- 'zip',
- 'zipfile',
- 'zipfilename',
- 'zipname',
- 'zips',
- 'zoneoffset',
- ),
- 'Lasso 8 Member Tags': (
- 'accept',
- 'add',
- 'addattachment',
- 'addattribute',
- 'addbarcode',
- 'addchapter',
- 'addcheckbox',
- 'addchild',
- 'addcombobox',
- 'addcomment',
- 'addcontent',
- 'addhiddenfield',
- 'addhtmlpart',
- 'addimage',
- 'addjavascript',
- 'addlist',
- 'addnamespace',
- 'addnextsibling',
- 'addpage',
- 'addparagraph',
- 'addparenttype',
- 'addpart',
- 'addpasswordfield',
- 'addphrase',
- 'addprevsibling',
- 'addradiobutton',
- 'addradiogroup',
- 'addresetbutton',
- 'addsection',
- 'addselectlist',
- 'addsibling',
- 'addsubmitbutton',
- 'addtable',
- 'addtext',
- 'addtextarea',
- 'addtextfield',
- 'addtextpart',
- 'alarms',
- 'annotate',
- 'answer',
- 'append',
- 'appendreplacement',
- 'appendtail',
- 'arc',
- 'asasync',
- 'astype',
- 'atbegin',
- 'atbottom',
- 'atend',
- 'atfarleft',
- 'atfarright',
- 'attop',
- 'attributecount',
- 'attributes',
- 'authenticate',
- 'authorize',
- 'backward',
- 'baseuri',
- 'bcc',
- 'beanproperties',
- 'beginswith',
- 'bind',
- 'bitand',
- 'bitclear',
- 'bitflip',
- 'bitformat',
- 'bitnot',
- 'bitor',
- 'bitset',
- 'bitshiftleft',
- 'bitshiftright',
- 'bittest',
- 'bitxor',
- 'blur',
- 'body',
- 'boundary',
- 'bytes',
- 'call',
- 'cancel',
- 'capabilities',
- 'cc',
- 'chardigitvalue',
- 'charname',
- 'charset',
- 'chartype',
- 'children',
- 'circle',
- 'close',
- 'closepath',
- 'closewrite',
- 'code',
- 'colorspace',
- 'command',
- 'comments',
- 'compare',
- 'comparecodepointorder',
- 'compile',
- 'composite',
- 'connect',
- 'contains',
- 'content_disposition',
- 'content_transfer_encoding',
- 'content_type',
- 'contents',
- 'contrast',
- 'convert',
- 'crop',
- 'curveto',
- 'data',
- 'date',
- 'day',
- 'daylights',
- 'dayofweek',
- 'dayofyear',
- 'decrement',
- 'delete',
- 'depth',
- 'describe',
- 'description',
- 'deserialize',
- 'detach',
- 'detachreference',
- 'difference',
- 'digit',
- 'document',
- 'down',
- 'drawtext',
- 'dst',
- 'dump',
- 'endswith',
- 'enhance',
- 'equals',
- 'errors',
- 'eval',
- 'events',
- 'execute',
- 'export16bits',
- 'export32bits',
- 'export64bits',
- 'export8bits',
- 'exportfdf',
- 'exportstring',
- 'extract',
- 'extractone',
- 'fieldnames',
- 'fieldtype',
- 'fieldvalue',
- 'file',
- 'find',
- 'findindex',
- 'findnamespace',
- 'findnamespacebyhref',
- 'findpattern',
- 'findposition',
- 'first',
- 'firstchild',
- 'fliph',
- 'flipv',
- 'flush',
- 'foldcase',
- 'foreach',
- 'format',
- 'forward',
- 'freebusies',
- 'freezetype',
- 'freezevalue',
- 'from',
- 'fulltype',
- 'generatechecksum',
- 'get',
- 'getabswidth',
- 'getalignment',
- 'getattribute',
- 'getattributenamespace',
- 'getbarheight',
- 'getbarmultiplier',
- 'getbarwidth',
- 'getbaseline',
- 'getbordercolor',
- 'getborderwidth',
- 'getcode',
- 'getcolor',
- 'getcolumncount',
- 'getencoding',
- 'getface',
- 'getfont',
- 'getformat',
- 'getfullfontname',
- 'getheaders',
- 'getmargins',
- 'getmethod',
- 'getnumericvalue',
- 'getpadding',
- 'getpagenumber',
- 'getparams',
- 'getproperty',
- 'getpsfontname',
- 'getrange',
- 'getrowcount',
- 'getsize',
- 'getspacing',
- 'getsupportedencodings',
- 'gettextalignment',
- 'gettextsize',
- 'gettype',
- 'gmt',
- 'groupcount',
- 'hasattribute',
- 'haschildren',
- 'hasvalue',
- 'header',
- 'headers',
- 'height',
- 'histogram',
- 'hosttonet16',
- 'hosttonet32',
- 'hour',
- 'id',
- 'ignorecase',
- 'import16bits',
- 'import32bits',
- 'import64bits',
- 'import8bits',
- 'importfdf',
- 'importstring',
- 'increment',
- 'input',
- 'insert',
- 'insertatcurrent',
- 'insertfirst',
- 'insertfrom',
- 'insertlast',
- 'insertpage',
- 'integer',
- 'intersection',
- 'invoke',
- 'isa',
- 'isalnum',
- 'isalpha',
- 'isbase',
- 'iscntrl',
- 'isdigit',
- 'isemptyelement',
- 'islower',
- 'isopen',
- 'isprint',
- 'isspace',
- 'istitle',
- 'istruetype',
- 'isualphabetic',
- 'isulowercase',
- 'isupper',
- 'isuuppercase',
- 'isuwhitespace',
- 'iswhitespace',
- 'iterator',
- 'javascript',
- 'join',
- 'journals',
- 'key',
- 'keys',
- 'last',
- 'lastchild',
- 'lasterror',
- 'left',
- 'length',
- 'line',
- 'listen',
- 'localaddress',
- 'localname',
- 'lock',
- 'lookupnamespace',
- 'lowercase',
- 'marker',
- 'matches',
- 'matchesstart',
- 'matchposition',
- 'matchstring',
- 'merge',
- 'millisecond',
- 'minute',
- 'mode',
- 'modulate',
- 'month',
- 'moveto',
- 'movetoattributenamespace',
- 'movetoelement',
- 'movetofirstattribute',
- 'movetonextattribute',
- 'name',
- 'namespaces',
- 'namespaceuri',
- 'nettohost16',
- 'nettohost32',
- 'newchild',
- 'next',
- 'nextsibling',
- 'nodetype',
- 'open',
- 'output',
- 'padleading',
- 'padtrailing',
- 'pagecount',
- 'pagesize',
- 'paraminfo',
- 'params',
- 'parent',
- 'path',
- 'pixel',
- 'position',
- 'prefix',
- 'previoussibling',
- 'properties',
- 'rawheaders',
- 'read',
- 'readattributevalue',
- 'readerror',
- 'readfrom',
- 'readline',
- 'readlock',
- 'readstring',
- 'readunlock',
- 'recipients',
- 'rect',
- 'refcount',
- 'referrals',
- 'remoteaddress',
- 'remove',
- 'removeall',
- 'removeattribute',
- 'removechild',
- 'removecurrent',
- 'removefirst',
- 'removelast',
- 'removeleading',
- 'removenamespace',
- 'removetrailing',
- 'render',
- 'replace',
- 'replaceall',
- 'replacefirst',
- 'replacepattern',
- 'replacewith',
- 'reserve',
- 'reset',
- 'resolutionh',
- 'resolutionv',
- 'response',
- 'results',
- 'retrieve',
- 'returntype',
- 'reverse',
- 'reverseiterator',
- 'right',
- 'rotate',
- 'run',
- 'save',
- 'scale',
- 'search',
- 'second',
- 'send',
- 'serialize',
- 'set',
- 'setalignment',
- 'setbarheight',
- 'setbarmultiplier',
- 'setbarwidth',
- 'setbaseline',
- 'setblocking',
- 'setbordercolor',
- 'setborderwidth',
- 'setbytes',
- 'setcode',
- 'setcolor',
- 'setcolorspace',
- 'setdatatype',
- 'setencoding',
- 'setface',
- 'setfieldvalue',
- 'setfont',
- 'setformat',
- 'setgeneratechecksum',
- 'setheight',
- 'setlassodata',
- 'setlinewidth',
- 'setmarker',
- 'setmode',
- 'setname',
- 'setpadding',
- 'setpagenumber',
- 'setpagerange',
- 'setposition',
- 'setproperty',
- 'setrange',
- 'setshowchecksum',
- 'setsize',
- 'setspacing',
- 'settemplate',
- 'settemplatestr',
- 'settextalignment',
- 'settextdata',
- 'settextsize',
- 'settype',
- 'setunderline',
- 'setwidth',
- 'setxmldata',
- 'sharpen',
- 'showchecksum',
- 'showcode39startstop',
- 'showeanguardbars',
- 'signal',
- 'signalall',
- 'size',
- 'smooth',
- 'sort',
- 'sortwith',
- 'split',
- 'standards',
- 'steal',
- 'subject',
- 'substring',
- 'subtract',
- 'swapbytes',
- 'textwidth',
- 'time',
- 'timezones',
- 'titlecase',
- 'to',
- 'todos',
- 'tolower',
- 'totitle',
- 'toupper',
- 'transform',
- 'trim',
- 'type',
- 'unescape',
- 'union',
- 'uniqueid',
- 'unlock',
- 'unserialize',
- 'up',
- 'uppercase',
- 'value',
- 'values',
- 'valuetype',
- 'wait',
- 'waskeyword',
- 'week',
- 'width',
- 'write',
- 'writelock',
- 'writeto',
- 'writeunlock',
- 'xmllang',
- 'xmlschematype',
- 'year',
- )
-}
+ 'isempty',
+ 'isemptyelement',
+ 'isfirststep',
+ 'isfullpath',
+ 'isgraph',
+ 'ishttps',
+ 'isidle',
+ 'isinstanceof',
+ 'islink',
+ 'islower',
+ 'ismultipart',
+ 'isnan',
+ 'isnota',
+ 'isnotempty',
+ 'isnothing',
+ 'iso3country',
+ 'iso3language',
+ 'isopen',
+ 'isprint',
+ 'ispunct',
+ 'issameobject',
+ 'isset',
+ 'issourcefile',
+ 'isspace',
+ 'isssl',
+ 'issupported',
+ 'istitle',
+ 'istruetype',
+ 'istype',
+ 'isualphabetic',
+ 'isulowercase',
+ 'isupper',
+ 'isuuppercase',
+ 'isuwhitespace',
+ 'isvalid',
+ 'iswhitespace',
+ 'isxdigit',
+ 'isxhr',
+ 'item',
+ 'j0',
+ 'j1',
+ 'javascript',
+ 'jbarcode',
+ 'jcolor',
+ 'jfont',
+ 'jimage',
+ 'jlist',
+ 'jn',
+ 'jobjectisa',
+ 'join',
+ 'jread',
+ 'jscontent',
+ 'jsonfornode',
+ 'jsonhtml',
+ 'jsonisleaf',
+ 'jsonlabel',
+ 'jtable',
+ 'jtext',
+ 'julianday',
+ 'kernel',
+ 'key',
+ 'keycolumns',
+ 'keys',
+ 'keywords',
+ 'kill',
+ 'label',
+ 'lang',
+ 'language',
+ 'last_insert_rowid',
+ 'last',
+ 'lastaccessdate',
+ 'lastaccesstime',
+ 'lastchild',
+ 'lastcomponent',
+ 'lasterror',
+ 'lastinsertid',
+ 'lastnode',
+ 'lastpoint',
+ 'lasttouched',
+ 'lazyvalue',
+ 'ldexp',
+ 'leaveopen',
+ 'left',
+ 'length',
+ 'lgamma',
+ 'line',
+ 'linediffers',
+ 'linkto',
+ 'linktype',
+ 'list',
+ 'listactivedatasources',
+ 'listalldatabases',
+ 'listalltables',
+ 'listdatabasetables',
+ 'listdatasourcedatabases',
+ 'listdatasourcehosts',
+ 'listdatasources',
+ 'listen',
+ 'listgroups',
+ 'listgroupsbyuser',
+ 'listhostdatabases',
+ 'listhosts',
+ 'listmethods',
+ 'listnode',
+ 'listusers',
+ 'listusersbygroup',
+ 'loadcerts',
+ 'loaddatasourcehostinfo',
+ 'loaddatasourceinfo',
+ 'loadlibrary',
+ 'localaddress',
+ 'localname',
+ 'locals',
+ 'lock',
+ 'log',
+ 'log10',
+ 'log1p',
+ 'logb',
+ 'lookupnamespace',
+ 'lop',
+ 'lowagiefont',
+ 'lowercase',
+ 'makecolor',
+ 'makecolumnlist',
+ 'makecolumnmap',
+ 'makecookieyumyum',
+ 'makefullpath',
+ 'makeinheritedcopy',
+ 'makenonrelative',
+ 'makeurl',
+ 'map',
+ 'marker',
+ 'matches',
+ 'matchesstart',
+ 'matchposition',
+ 'matchstring',
+ 'matchtriggers',
+ 'max',
+ 'maxinmemory',
+ 'maxlength',
+ 'maxrows',
+ 'maxworkers',
+ 'maybeslash',
+ 'maybevalue',
+ 'md5hex',
+ 'media',
+ 'members',
+ 'merge',
+ 'meta',
+ 'method',
+ 'methodname',
+ 'millisecond',
+ 'millisecondsinday',
+ 'mime_boundary',
+ 'mime_contenttype',
+ 'mime_hdrs',
+ 'mime',
+ 'mimes',
+ 'min',
+ 'minute',
+ 'minutesbetween',
+ 'moddatestr',
+ 'mode',
+ 'modf',
+ 'modificationdate',
+ 'modificationtime',
+ 'modulate',
+ 'monitorenter',
+ 'monitorexit',
+ 'month',
+ 'moveto',
+ 'movetoattribute',
+ 'movetoattributenamespace',
+ 'movetoelement',
+ 'movetofirstattribute',
+ 'movetonextattribute',
+ 'msg',
+ 'mtime',
+ 'multiple',
+ 'n',
+ 'name',
+ 'named',
+ 'namespaceuri',
+ 'needinitialization',
+ 'net',
+ 'nettohost16',
+ 'nettohost32',
+ 'nettohost64',
+ 'new',
+ 'newbooleanarray',
+ 'newbytearray',
+ 'newchararray',
+ 'newdoublearray',
+ 'newfloatarray',
+ 'newglobalref',
+ 'newintarray',
+ 'newlongarray',
+ 'newobject',
+ 'newobjectarray',
+ 'newshortarray',
+ 'newstring',
+ 'next',
+ 'nextafter',
+ 'nextnode',
+ 'nextprime',
+ 'nextprune',
+ 'nextprunedelta',
+ 'nextsibling',
+ 'nodeforpath',
+ 'nodelist',
+ 'nodename',
+ 'nodetype',
+ 'nodevalue',
+ 'noop',
+ 'normalize',
+ 'notationname',
+ 'notations',
+ 'novaluelists',
+ 'numsets',
+ 'object',
+ 'objects',
+ 'objecttype',
+ 'onclick',
+ 'oncompare',
+ 'oncomparestrict',
+ 'onconvert',
+ 'oncreate',
+ 'ondblclick',
+ 'onkeydown',
+ 'onkeypress',
+ 'onkeyup',
+ 'onmousedown',
+ 'onmousemove',
+ 'onmouseout',
+ 'onmouseover',
+ 'onmouseup',
+ 'onreset',
+ 'onsubmit',
+ 'ontop',
+ 'open',
+ 'openappend',
+ 'openread',
+ 'opentruncate',
+ 'openwith',
+ 'openwrite',
+ 'openwriteonly',
+ 'orderby',
+ 'orderbydescending',
+ 'out',
+ 'output',
+ 'outputencoding',
+ 'ownerdocument',
+ 'ownerelement',
+ 'padleading',
+ 'padtrailing',
+ 'padzero',
+ 'pagecount',
+ 'pagerotation',
+ 'pagesize',
+ 'param',
+ 'paramdescs',
+ 'params',
+ 'parent',
+ 'parentdir',
+ 'parentnode',
+ 'parse_body',
+ 'parse_boundary',
+ 'parse_charset',
+ 'parse_content_disposition',
+ 'parse_content_transfer_encoding',
+ 'parse_content_type',
+ 'parse_hdrs',
+ 'parse_mode',
+ 'parse_msg',
+ 'parse_parts',
+ 'parse_rawhdrs',
+ 'parse',
+ 'parseas',
+ 'parsedocument',
+ 'parsenumber',
+ 'parseoneheaderline',
+ 'pass',
+ 'path',
+ 'pathinfo',
+ 'pathtouri',
+ 'pathtranslated',
+ 'pause',
+ 'payload',
+ 'pdifference',
+ 'perform',
+ 'performonce',
+ 'perms',
+ 'pid',
+ 'pixel',
+ 'pm',
+ 'polldbg',
+ 'pollide',
+ 'pop_capa',
+ 'pop_cmd',
+ 'pop_debug',
+ 'pop_err',
+ 'pop_get',
+ 'pop_ids',
+ 'pop_index',
+ 'pop_log',
+ 'pop_mode',
+ 'pop_net',
+ 'pop_res',
+ 'pop_server',
+ 'pop_timeout',
+ 'pop_token',
+ 'pop',
+ 'popctx',
+ 'popinclude',
+ 'populate',
+ 'port',
+ 'position',
+ 'postdispatch',
+ 'postparam',
+ 'postparams',
+ 'postparamsary',
+ 'poststring',
+ 'pow',
+ 'predispatch',
+ 'prefix',
+ 'preflight',
+ 'prepare',
+ 'prepared',
+ 'pretty',
+ 'prev',
+ 'previoussibling',
+ 'printsimplemsg',
+ 'private_compare',
+ 'private_find',
+ 'private_findlast',
+ 'private_merge',
+ 'private_rebalanceforinsert',
+ 'private_rebalanceforremove',
+ 'private_replaceall',
+ 'private_replacefirst',
+ 'private_rotateleft',
+ 'private_rotateright',
+ 'private_setrange',
+ 'private_split',
+ 'probemimetype',
+ 'provides',
+ 'proxying',
+ 'prune',
+ 'publicid',
+ 'pullhttpheader',
+ 'pullmimepost',
+ 'pulloneheaderline',
+ 'pullpost',
+ 'pullrawpost',
+ 'pullrawpostchunks',
+ 'pullrequest',
+ 'pullrequestline',
+ 'push',
+ 'pushctx',
+ 'pushinclude',
+ 'qdarray',
+ 'qdcount',
+ 'queryparam',
+ 'queryparams',
+ 'queryparamsary',
+ 'querystring',
+ 'queue_maintenance',
+ 'queue_messages',
+ 'queue_status',
+ 'queue',
+ 'quit',
+ 'r',
+ 'raw',
+ 'rawcontent',
+ 'rawdiff',
+ 'rawheader',
+ 'rawheaders',
+ 'rawinvokable',
+ 'read',
+ 'readattributevalue',
+ 'readbytes',
+ 'readbytesfully',
+ 'readdestinations',
+ 'readerror',
+ 'readidobjects',
+ 'readline',
+ 'readmessage',
+ 'readnumber',
+ 'readobject',
+ 'readobjecttcp',
+ 'readpacket',
+ 'readsomebytes',
+ 'readstring',
+ 'ready',
+ 'realdoc',
+ 'realpath',
+ 'receivefd',
+ 'recipients',
+ 'recover',
+ 'rect',
+ 'rectype',
+ 'red',
+ 'redirectto',
+ 'referrals',
+ 'refid',
+ 'refobj',
+ 'refresh',
+ 'rel',
+ 'remainder',
+ 'remoteaddr',
+ 'remoteaddress',
+ 'remoteport',
+ 'remove',
+ 'removeall',
+ 'removeattribute',
+ 'removeattributenode',
+ 'removeattributens',
+ 'removeback',
+ 'removechild',
+ 'removedatabasetable',
+ 'removedatasource',
+ 'removedatasourcedatabase',
+ 'removedatasourcehost',
+ 'removefield',
+ 'removefirst',
+ 'removefront',
+ 'removegroup',
+ 'removelast',
+ 'removeleading',
+ 'removenameditem',
+ 'removenameditemns',
+ 'removenode',
+ 'removesubnode',
+ 'removetrailing',
+ 'removeuser',
+ 'removeuserfromallgroups',
+ 'removeuserfromgroup',
+ 'rename',
+ 'renderbytes',
+ 'renderdocumentbytes',
+ 'renderstring',
+ 'replace',
+ 'replaceall',
+ 'replacechild',
+ 'replacedata',
+ 'replacefirst',
+ 'replaceheader',
+ 'replacepattern',
+ 'representnode',
+ 'representnoderesult',
+ 'reqid',
+ 'requestid',
+ 'requestmethod',
+ 'requestparams',
+ 'requesturi',
+ 'requires',
+ 'reserve',
+ 'reset',
+ 'resize',
+ 'resolutionh',
+ 'resolutionv',
+ 'resolvelinks',
+ 'resourcedata',
+ 'resourceinvokable',
+ 'resourcename',
+ 'resources',
+ 'respond',
+ 'restart',
+ 'restname',
+ 'result',
+ 'results',
+ 'resume',
+ 'retr',
+ 'retrieve',
+ 'returncolumns',
+ 'returntype',
+ 'rev',
+ 'reverse',
+ 'rewind',
+ 'right',
+ 'rint',
+ 'roll',
+ 'root',
+ 'rootmap',
+ 'rotate',
+ 'route',
+ 'rowsfound',
+ 'rset',
+ 'rule',
+ 'rules',
+ 'run',
+ 'running',
+ 'runonce',
+ 's',
+ 'sa',
+ 'safeexport8bits',
+ 'sameas',
+ 'save',
+ 'savedata',
+ 'scalb',
+ 'scale',
+ 'scanfordatasource',
+ 'scantasks',
+ 'scanworkers',
+ 'schemaname',
+ 'scheme',
+ 'script',
+ 'scriptextensions',
+ 'scriptfilename',
+ 'scriptname',
+ 'scripttype',
+ 'scripturi',
+ 'scripturl',
+ 'scrubkeywords',
+ 'search',
+ 'searchinbucket',
+ 'searchurl',
+ 'second',
+ 'secondsbetween',
+ 'seek',
+ 'select',
+ 'selected',
+ 'selectmany',
+ 'self',
+ 'send',
+ 'sendchunk',
+ 'sendfd',
+ 'sendfile',
+ 'sendpacket',
+ 'sendresponse',
+ 'separator',
+ 'serializationelements',
+ 'serialize',
+ 'serveraddr',
+ 'serveradmin',
+ 'servername',
+ 'serverport',
+ 'serverprotocol',
+ 'serversignature',
+ 'serversoftware',
+ 'sessionsdump',
+ 'sessionsmap',
+ 'set',
+ 'setalignment',
+ 'setattr',
+ 'setattribute',
+ 'setattributenode',
+ 'setattributenodens',
+ 'setattributens',
+ 'setbarheight',
+ 'setbarmultiplier',
+ 'setbarwidth',
+ 'setbaseline',
+ 'setbold',
+ 'setbooleanarrayregion',
+ 'setbooleanfield',
+ 'setbordercolor',
+ 'setborderwidth',
+ 'setbytearrayregion',
+ 'setbytefield',
+ 'setchararrayregion',
+ 'setcharfield',
+ 'setcode',
+ 'setcolor',
+ 'setcolorspace',
+ 'setcookie',
+ 'setcwd',
+ 'setdefaultstorage',
+ 'setdestination',
+ 'setdoublearrayregion',
+ 'setdoublefield',
+ 'setencoding',
+ 'setface',
+ 'setfieldvalue',
+ 'setfindpattern',
+ 'setfloatarrayregion',
+ 'setfloatfield',
+ 'setfont',
+ 'setformat',
+ 'setgeneratechecksum',
+ 'setheaders',
+ 'sethtmlattr',
+ 'setignorecase',
+ 'setinput',
+ 'setintarrayregion',
+ 'setintfield',
+ 'setitalic',
+ 'setlinewidth',
+ 'setlongarrayregion',
+ 'setlongfield',
+ 'setmarker',
+ 'setmaxfilesize',
+ 'setmode',
+ 'setname',
+ 'setnameditem',
+ 'setnameditemns',
+ 'setobjectarrayelement',
+ 'setobjectfield',
+ 'setpadding',
+ 'setpagenumber',
+ 'setpagerange',
+ 'setposition',
+ 'setrange',
+ 'setreplacepattern',
+ 'setshortarrayregion',
+ 'setshortfield',
+ 'setshowchecksum',
+ 'setsize',
+ 'setspacing',
+ 'setstaticbooleanfield',
+ 'setstaticbytefield',
+ 'setstaticcharfield',
+ 'setstaticdoublefield',
+ 'setstaticfloatfield',
+ 'setstaticintfield',
+ 'setstaticlongfield',
+ 'setstaticobjectfield',
+ 'setstaticshortfield',
+ 'setstatus',
+ 'settextalignment',
+ 'settextsize',
+ 'settimezone',
+ 'settrait',
+ 'setunderline',
+ 'sharpen',
+ 'shouldabort',
+ 'shouldclose',
+ 'showchecksum',
+ 'showcode39startstop',
+ 'showeanguardbars',
+ 'shutdownrd',
+ 'shutdownrdwr',
+ 'shutdownwr',
+ 'sin',
+ 'sinh',
+ 'size',
+ 'skip',
+ 'skiprows',
+ 'sort',
+ 'sortcolumns',
+ 'source',
+ 'sourcecolumn',
+ 'sourcefile',
+ 'sourceline',
+ 'specified',
+ 'split',
+ 'splitconnection',
+ 'splitdebuggingthread',
+ 'splitextension',
+ 'splittext',
+ 'splitthread',
+ 'splittoprivatedev',
+ 'splituppath',
+ 'sql',
+ 'sqlite3',
+ 'sqrt',
+ 'src',
+ 'srcpath',
+ 'sslerrfail',
+ 'stack',
+ 'standby',
+ 'start',
+ 'startone',
+ 'startup',
+ 'stat',
+ 'statement',
+ 'statementonly',
+ 'stats',
+ 'status',
+ 'statuscode',
+ 'statusmsg',
+ 'stdin',
+ 'step',
+ 'stls',
+ 'stop',
+ 'stoprunning',
+ 'storedata',
+ 'stripfirstcomponent',
+ 'striplastcomponent',
+ 'style',
+ 'styletype',
+ 'sub',
+ 'subject',
+ 'subnode',
+ 'subnodes',
+ 'substringdata',
+ 'subtract',
+ 'subtraits',
+ 'sum',
+ 'supportscontentrepresentation',
+ 'swapbytes',
+ 'systemid',
+ 't',
+ 'tabindex',
+ 'table',
+ 'tablecolumnnames',
+ 'tablecolumns',
+ 'tablehascolumn',
+ 'tableizestacktrace',
+ 'tableizestacktracelink',
+ 'tablemap',
+ 'tablename',
+ 'tables',
+ 'tabs',
+ 'tabstr',
+ 'tag',
+ 'tagname',
+ 'take',
+ 'tan',
+ 'tanh',
+ 'target',
+ 'tasks',
+ 'tb',
+ 'tell',
+ 'testexitcode',
+ 'testlock',
+ 'textwidth',
+ 'thenby',
+ 'thenbydescending',
+ 'threadreaddesc',
+ 'throw',
+ 'thrownew',
+ 'time',
+ 'timezone',
+ 'title',
+ 'titlecase',
+ 'to',
+ 'token',
+ 'tolower',
+ 'top',
+ 'toreflectedfield',
+ 'toreflectedmethod',
+ 'total_changes',
+ 'totitle',
+ 'touch',
+ 'toupper',
+ 'toxmlstring',
+ 'trace',
+ 'trackingid',
+ 'trait',
+ 'transform',
+ 'trigger',
+ 'trim',
+ 'trunk',
+ 'tryfinderrorfile',
+ 'trylock',
+ 'tryreadobject',
+ 'type',
+ 'typename',
+ 'uidl',
+ 'uncompress',
+ 'unescape',
+ 'union',
+ 'uniqueid',
+ 'unlock',
+ 'unspool',
+ 'up',
+ 'update',
+ 'updategroup',
+ 'upload',
+ 'uppercase',
+ 'url',
+ 'used',
+ 'usemap',
+ 'user',
+ 'usercolumns',
+ 'valid',
+ 'validate',
+ 'validatesessionstable',
+ 'value',
+ 'values',
+ 'valuetype',
+ 'variant',
+ 'version',
+ 'wait',
+ 'waitforcompletion',
+ 'warnings',
+ 'week',
+ 'weekofmonth',
+ 'weekofyear',
+ 'where',
+ 'width',
+ 'workers',
+ 'workinginputcolumns',
+ 'workingkeycolumns',
+ 'workingkeyfield_name',
+ 'workingreturncolumns',
+ 'workingsortcolumns',
+ 'write',
+ 'writebodybytes',
+ 'writebytes',
+ 'writeheader',
+ 'writeheaderbytes',
+ 'writeheaderline',
+ 'writeid',
+ 'writemessage',
+ 'writeobject',
+ 'writeobjecttcp',
+ 'writestring',
+ 'wroteheaders',
+ 'xhtml',
+ 'xmllang',
+ 'y0',
+ 'y1',
+ 'year',
+ 'yearwoy',
+ 'yn',
+ 'z',
+ 'zip',
+ 'zipfile',
+ 'zipfilename',
+ 'zipname',
+ 'zips',
+ 'zoneoffset',
+ ),
+ 'Lasso 8 Member Tags': (
+ 'accept',
+ 'add',
+ 'addattachment',
+ 'addattribute',
+ 'addbarcode',
+ 'addchapter',
+ 'addcheckbox',
+ 'addchild',
+ 'addcombobox',
+ 'addcomment',
+ 'addcontent',
+ 'addhiddenfield',
+ 'addhtmlpart',
+ 'addimage',
+ 'addjavascript',
+ 'addlist',
+ 'addnamespace',
+ 'addnextsibling',
+ 'addpage',
+ 'addparagraph',
+ 'addparenttype',
+ 'addpart',
+ 'addpasswordfield',
+ 'addphrase',
+ 'addprevsibling',
+ 'addradiobutton',
+ 'addradiogroup',
+ 'addresetbutton',
+ 'addsection',
+ 'addselectlist',
+ 'addsibling',
+ 'addsubmitbutton',
+ 'addtable',
+ 'addtext',
+ 'addtextarea',
+ 'addtextfield',
+ 'addtextpart',
+ 'alarms',
+ 'annotate',
+ 'answer',
+ 'append',
+ 'appendreplacement',
+ 'appendtail',
+ 'arc',
+ 'asasync',
+ 'astype',
+ 'atbegin',
+ 'atbottom',
+ 'atend',
+ 'atfarleft',
+ 'atfarright',
+ 'attop',
+ 'attributecount',
+ 'attributes',
+ 'authenticate',
+ 'authorize',
+ 'backward',
+ 'baseuri',
+ 'bcc',
+ 'beanproperties',
+ 'beginswith',
+ 'bind',
+ 'bitand',
+ 'bitclear',
+ 'bitflip',
+ 'bitformat',
+ 'bitnot',
+ 'bitor',
+ 'bitset',
+ 'bitshiftleft',
+ 'bitshiftright',
+ 'bittest',
+ 'bitxor',
+ 'blur',
+ 'body',
+ 'boundary',
+ 'bytes',
+ 'call',
+ 'cancel',
+ 'capabilities',
+ 'cc',
+ 'chardigitvalue',
+ 'charname',
+ 'charset',
+ 'chartype',
+ 'children',
+ 'circle',
+ 'close',
+ 'closepath',
+ 'closewrite',
+ 'code',
+ 'colorspace',
+ 'command',
+ 'comments',
+ 'compare',
+ 'comparecodepointorder',
+ 'compile',
+ 'composite',
+ 'connect',
+ 'contains',
+ 'content_disposition',
+ 'content_transfer_encoding',
+ 'content_type',
+ 'contents',
+ 'contrast',
+ 'convert',
+ 'crop',
+ 'curveto',
+ 'data',
+ 'date',
+ 'day',
+ 'daylights',
+ 'dayofweek',
+ 'dayofyear',
+ 'decrement',
+ 'delete',
+ 'depth',
+ 'describe',
+ 'description',
+ 'deserialize',
+ 'detach',
+ 'detachreference',
+ 'difference',
+ 'digit',
+ 'document',
+ 'down',
+ 'drawtext',
+ 'dst',
+ 'dump',
+ 'endswith',
+ 'enhance',
+ 'equals',
+ 'errors',
+ 'eval',
+ 'events',
+ 'execute',
+ 'export16bits',
+ 'export32bits',
+ 'export64bits',
+ 'export8bits',
+ 'exportfdf',
+ 'exportstring',
+ 'extract',
+ 'extractone',
+ 'fieldnames',
+ 'fieldtype',
+ 'fieldvalue',
+ 'file',
+ 'find',
+ 'findindex',
+ 'findnamespace',
+ 'findnamespacebyhref',
+ 'findpattern',
+ 'findposition',
+ 'first',
+ 'firstchild',
+ 'fliph',
+ 'flipv',
+ 'flush',
+ 'foldcase',
+ 'foreach',
+ 'format',
+ 'forward',
+ 'freebusies',
+ 'freezetype',
+ 'freezevalue',
+ 'from',
+ 'fulltype',
+ 'generatechecksum',
+ 'get',
+ 'getabswidth',
+ 'getalignment',
+ 'getattribute',
+ 'getattributenamespace',
+ 'getbarheight',
+ 'getbarmultiplier',
+ 'getbarwidth',
+ 'getbaseline',
+ 'getbordercolor',
+ 'getborderwidth',
+ 'getcode',
+ 'getcolor',
+ 'getcolumncount',
+ 'getencoding',
+ 'getface',
+ 'getfont',
+ 'getformat',
+ 'getfullfontname',
+ 'getheaders',
+ 'getmargins',
+ 'getmethod',
+ 'getnumericvalue',
+ 'getpadding',
+ 'getpagenumber',
+ 'getparams',
+ 'getproperty',
+ 'getpsfontname',
+ 'getrange',
+ 'getrowcount',
+ 'getsize',
+ 'getspacing',
+ 'getsupportedencodings',
+ 'gettextalignment',
+ 'gettextsize',
+ 'gettype',
+ 'gmt',
+ 'groupcount',
+ 'hasattribute',
+ 'haschildren',
+ 'hasvalue',
+ 'header',
+ 'headers',
+ 'height',
+ 'histogram',
+ 'hosttonet16',
+ 'hosttonet32',
+ 'hour',
+ 'id',
+ 'ignorecase',
+ 'import16bits',
+ 'import32bits',
+ 'import64bits',
+ 'import8bits',
+ 'importfdf',
+ 'importstring',
+ 'increment',
+ 'input',
+ 'insert',
+ 'insertatcurrent',
+ 'insertfirst',
+ 'insertfrom',
+ 'insertlast',
+ 'insertpage',
+ 'integer',
+ 'intersection',
+ 'invoke',
+ 'isa',
+ 'isalnum',
+ 'isalpha',
+ 'isbase',
+ 'iscntrl',
+ 'isdigit',
+ 'isemptyelement',
+ 'islower',
+ 'isopen',
+ 'isprint',
+ 'isspace',
+ 'istitle',
+ 'istruetype',
+ 'isualphabetic',
+ 'isulowercase',
+ 'isupper',
+ 'isuuppercase',
+ 'isuwhitespace',
+ 'iswhitespace',
+ 'iterator',
+ 'javascript',
+ 'join',
+ 'journals',
+ 'key',
+ 'keys',
+ 'last',
+ 'lastchild',
+ 'lasterror',
+ 'left',
+ 'length',
+ 'line',
+ 'listen',
+ 'localaddress',
+ 'localname',
+ 'lock',
+ 'lookupnamespace',
+ 'lowercase',
+ 'marker',
+ 'matches',
+ 'matchesstart',
+ 'matchposition',
+ 'matchstring',
+ 'merge',
+ 'millisecond',
+ 'minute',
+ 'mode',
+ 'modulate',
+ 'month',
+ 'moveto',
+ 'movetoattributenamespace',
+ 'movetoelement',
+ 'movetofirstattribute',
+ 'movetonextattribute',
+ 'name',
+ 'namespaces',
+ 'namespaceuri',
+ 'nettohost16',
+ 'nettohost32',
+ 'newchild',
+ 'next',
+ 'nextsibling',
+ 'nodetype',
+ 'open',
+ 'output',
+ 'padleading',
+ 'padtrailing',
+ 'pagecount',
+ 'pagesize',
+ 'paraminfo',
+ 'params',
+ 'parent',
+ 'path',
+ 'pixel',
+ 'position',
+ 'prefix',
+ 'previoussibling',
+ 'properties',
+ 'rawheaders',
+ 'read',
+ 'readattributevalue',
+ 'readerror',
+ 'readfrom',
+ 'readline',
+ 'readlock',
+ 'readstring',
+ 'readunlock',
+ 'recipients',
+ 'rect',
+ 'refcount',
+ 'referrals',
+ 'remoteaddress',
+ 'remove',
+ 'removeall',
+ 'removeattribute',
+ 'removechild',
+ 'removecurrent',
+ 'removefirst',
+ 'removelast',
+ 'removeleading',
+ 'removenamespace',
+ 'removetrailing',
+ 'render',
+ 'replace',
+ 'replaceall',
+ 'replacefirst',
+ 'replacepattern',
+ 'replacewith',
+ 'reserve',
+ 'reset',
+ 'resolutionh',
+ 'resolutionv',
+ 'response',
+ 'results',
+ 'retrieve',
+ 'returntype',
+ 'reverse',
+ 'reverseiterator',
+ 'right',
+ 'rotate',
+ 'run',
+ 'save',
+ 'scale',
+ 'search',
+ 'second',
+ 'send',
+ 'serialize',
+ 'set',
+ 'setalignment',
+ 'setbarheight',
+ 'setbarmultiplier',
+ 'setbarwidth',
+ 'setbaseline',
+ 'setblocking',
+ 'setbordercolor',
+ 'setborderwidth',
+ 'setbytes',
+ 'setcode',
+ 'setcolor',
+ 'setcolorspace',
+ 'setdatatype',
+ 'setencoding',
+ 'setface',
+ 'setfieldvalue',
+ 'setfont',
+ 'setformat',
+ 'setgeneratechecksum',
+ 'setheight',
+ 'setlassodata',
+ 'setlinewidth',
+ 'setmarker',
+ 'setmode',
+ 'setname',
+ 'setpadding',
+ 'setpagenumber',
+ 'setpagerange',
+ 'setposition',
+ 'setproperty',
+ 'setrange',
+ 'setshowchecksum',
+ 'setsize',
+ 'setspacing',
+ 'settemplate',
+ 'settemplatestr',
+ 'settextalignment',
+ 'settextdata',
+ 'settextsize',
+ 'settype',
+ 'setunderline',
+ 'setwidth',
+ 'setxmldata',
+ 'sharpen',
+ 'showchecksum',
+ 'showcode39startstop',
+ 'showeanguardbars',
+ 'signal',
+ 'signalall',
+ 'size',
+ 'smooth',
+ 'sort',
+ 'sortwith',
+ 'split',
+ 'standards',
+ 'steal',
+ 'subject',
+ 'substring',
+ 'subtract',
+ 'swapbytes',
+ 'textwidth',
+ 'time',
+ 'timezones',
+ 'titlecase',
+ 'to',
+ 'todos',
+ 'tolower',
+ 'totitle',
+ 'toupper',
+ 'transform',
+ 'trim',
+ 'type',
+ 'unescape',
+ 'union',
+ 'uniqueid',
+ 'unlock',
+ 'unserialize',
+ 'up',
+ 'uppercase',
+ 'value',
+ 'values',
+ 'valuetype',
+ 'wait',
+ 'waskeyword',
+ 'week',
+ 'width',
+ 'write',
+ 'writelock',
+ 'writeto',
+ 'writeunlock',
+ 'xmllang',
+ 'xmlschematype',
+ 'year',
+ )
+}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
index f6a9b796ee..9f07d191f7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
@@ -1,41 +1,41 @@
-"""
- pygments.lexers._lua_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names and modules of lua functions
- It is able to re-generate itself, but for adding new functions you
- probably have to add some callbacks (see function module_callbacks).
-
- Do not edit the MODULES dict by hand.
-
+"""
+ pygments.lexers._lua_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names and modules of lua functions
+ It is able to re-generate itself, but for adding new functions you
+ probably have to add some callbacks (see function module_callbacks).
+
+ Do not edit the MODULES dict by hand.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-MODULES = {'basic': ('_G',
- '_VERSION',
- 'assert',
- 'collectgarbage',
- 'dofile',
- 'error',
- 'getmetatable',
- 'ipairs',
- 'load',
- 'loadfile',
- 'next',
- 'pairs',
- 'pcall',
- 'print',
- 'rawequal',
- 'rawget',
+ :license: BSD, see LICENSE for details.
+"""
+
+MODULES = {'basic': ('_G',
+ '_VERSION',
+ 'assert',
+ 'collectgarbage',
+ 'dofile',
+ 'error',
+ 'getmetatable',
+ 'ipairs',
+ 'load',
+ 'loadfile',
+ 'next',
+ 'pairs',
+ 'pcall',
+ 'print',
+ 'rawequal',
+ 'rawget',
'rawlen',
- 'rawset',
- 'select',
- 'setmetatable',
- 'tonumber',
- 'tostring',
- 'type',
- 'xpcall'),
+ 'rawset',
+ 'select',
+ 'setmetatable',
+ 'tonumber',
+ 'tostring',
+ 'type',
+ 'xpcall'),
'bit32': ('bit32.arshift',
'bit32.band',
'bit32.bnot',
@@ -48,119 +48,119 @@ MODULES = {'basic': ('_G',
'bit32.replace',
'bit32.rrotate',
'bit32.rshift'),
- 'coroutine': ('coroutine.create',
+ 'coroutine': ('coroutine.create',
'coroutine.isyieldable',
- 'coroutine.resume',
- 'coroutine.running',
- 'coroutine.status',
- 'coroutine.wrap',
- 'coroutine.yield'),
- 'debug': ('debug.debug',
- 'debug.gethook',
- 'debug.getinfo',
- 'debug.getlocal',
- 'debug.getmetatable',
- 'debug.getregistry',
- 'debug.getupvalue',
+ 'coroutine.resume',
+ 'coroutine.running',
+ 'coroutine.status',
+ 'coroutine.wrap',
+ 'coroutine.yield'),
+ 'debug': ('debug.debug',
+ 'debug.gethook',
+ 'debug.getinfo',
+ 'debug.getlocal',
+ 'debug.getmetatable',
+ 'debug.getregistry',
+ 'debug.getupvalue',
'debug.getuservalue',
- 'debug.sethook',
- 'debug.setlocal',
- 'debug.setmetatable',
- 'debug.setupvalue',
+ 'debug.sethook',
+ 'debug.setlocal',
+ 'debug.setmetatable',
+ 'debug.setupvalue',
'debug.setuservalue',
'debug.traceback',
'debug.upvalueid',
'debug.upvaluejoin'),
- 'io': ('io.close',
- 'io.flush',
- 'io.input',
- 'io.lines',
- 'io.open',
- 'io.output',
- 'io.popen',
- 'io.read',
+ 'io': ('io.close',
+ 'io.flush',
+ 'io.input',
+ 'io.lines',
+ 'io.open',
+ 'io.output',
+ 'io.popen',
+ 'io.read',
'io.stderr',
'io.stdin',
'io.stdout',
- 'io.tmpfile',
- 'io.type',
- 'io.write'),
- 'math': ('math.abs',
- 'math.acos',
- 'math.asin',
+ 'io.tmpfile',
+ 'io.type',
+ 'io.write'),
+ 'math': ('math.abs',
+ 'math.acos',
+ 'math.asin',
'math.atan',
- 'math.atan2',
- 'math.ceil',
+ 'math.atan2',
+ 'math.ceil',
'math.cos',
- 'math.cosh',
- 'math.deg',
- 'math.exp',
- 'math.floor',
- 'math.fmod',
- 'math.frexp',
- 'math.huge',
- 'math.ldexp',
- 'math.log',
- 'math.max',
+ 'math.cosh',
+ 'math.deg',
+ 'math.exp',
+ 'math.floor',
+ 'math.fmod',
+ 'math.frexp',
+ 'math.huge',
+ 'math.ldexp',
+ 'math.log',
+ 'math.max',
'math.maxinteger',
- 'math.min',
+ 'math.min',
'math.mininteger',
- 'math.modf',
- 'math.pi',
- 'math.pow',
- 'math.rad',
- 'math.random',
- 'math.randomseed',
+ 'math.modf',
+ 'math.pi',
+ 'math.pow',
+ 'math.rad',
+ 'math.random',
+ 'math.randomseed',
'math.sin',
- 'math.sinh',
- 'math.sqrt',
+ 'math.sinh',
+ 'math.sqrt',
'math.tan',
- 'math.tanh',
+ 'math.tanh',
'math.tointeger',
'math.type',
'math.ult'),
'modules': ('package.config',
- 'package.cpath',
- 'package.loaded',
- 'package.loadlib',
- 'package.path',
- 'package.preload',
+ 'package.cpath',
+ 'package.loaded',
+ 'package.loadlib',
+ 'package.path',
+ 'package.preload',
'package.searchers',
'package.searchpath',
'require'),
- 'os': ('os.clock',
- 'os.date',
- 'os.difftime',
- 'os.execute',
- 'os.exit',
- 'os.getenv',
- 'os.remove',
- 'os.rename',
- 'os.setlocale',
- 'os.time',
- 'os.tmpname'),
- 'string': ('string.byte',
- 'string.char',
- 'string.dump',
- 'string.find',
- 'string.format',
- 'string.gmatch',
- 'string.gsub',
- 'string.len',
- 'string.lower',
- 'string.match',
+ 'os': ('os.clock',
+ 'os.date',
+ 'os.difftime',
+ 'os.execute',
+ 'os.exit',
+ 'os.getenv',
+ 'os.remove',
+ 'os.rename',
+ 'os.setlocale',
+ 'os.time',
+ 'os.tmpname'),
+ 'string': ('string.byte',
+ 'string.char',
+ 'string.dump',
+ 'string.find',
+ 'string.format',
+ 'string.gmatch',
+ 'string.gsub',
+ 'string.len',
+ 'string.lower',
+ 'string.match',
'string.pack',
'string.packsize',
- 'string.rep',
- 'string.reverse',
- 'string.sub',
+ 'string.rep',
+ 'string.reverse',
+ 'string.sub',
'string.unpack',
- 'string.upper'),
- 'table': ('table.concat',
- 'table.insert',
+ 'string.upper'),
+ 'table': ('table.concat',
+ 'table.insert',
'table.move',
'table.pack',
- 'table.remove',
+ 'table.remove',
'table.sort',
'table.unpack'),
'utf8': ('utf8.char',
@@ -169,9 +169,9 @@ MODULES = {'basic': ('_G',
'utf8.codes',
'utf8.len',
'utf8.offset')}
-
-if __name__ == '__main__': # pragma: no cover
- import re
+
+if __name__ == '__main__': # pragma: no cover
+ import re
import sys
# urllib ends up wanting to import a module called 'math' -- if
@@ -180,96 +180,96 @@ if __name__ == '__main__': # pragma: no cover
if sys.path[i].endswith('/lexers'):
del sys.path[i]
- try:
- from urllib import urlopen
- except ImportError:
- from urllib.request import urlopen
- import pprint
-
- # you can't generally find out what module a function belongs to if you
- # have only its name. Because of this, here are some callback functions
- # that recognize if a gioven function belongs to a specific module
- def module_callbacks():
- def is_in_coroutine_module(name):
- return name.startswith('coroutine.')
-
- def is_in_modules_module(name):
- if name in ['require', 'module'] or name.startswith('package'):
- return True
- else:
- return False
-
- def is_in_string_module(name):
- return name.startswith('string.')
-
- def is_in_table_module(name):
- return name.startswith('table.')
-
- def is_in_math_module(name):
- return name.startswith('math')
-
- def is_in_io_module(name):
- return name.startswith('io.')
-
- def is_in_os_module(name):
- return name.startswith('os.')
-
- def is_in_debug_module(name):
- return name.startswith('debug.')
-
- return {'coroutine': is_in_coroutine_module,
- 'modules': is_in_modules_module,
- 'string': is_in_string_module,
- 'table': is_in_table_module,
- 'math': is_in_math_module,
- 'io': is_in_io_module,
- 'os': is_in_os_module,
- 'debug': is_in_debug_module}
-
-
-
- def get_newest_version():
- f = urlopen('http://www.lua.org/manual/')
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
+ import pprint
+
+ # you can't generally find out what module a function belongs to if you
+ # have only its name. Because of this, here are some callback functions
+ # that recognize if a gioven function belongs to a specific module
+ def module_callbacks():
+ def is_in_coroutine_module(name):
+ return name.startswith('coroutine.')
+
+ def is_in_modules_module(name):
+ if name in ['require', 'module'] or name.startswith('package'):
+ return True
+ else:
+ return False
+
+ def is_in_string_module(name):
+ return name.startswith('string.')
+
+ def is_in_table_module(name):
+ return name.startswith('table.')
+
+ def is_in_math_module(name):
+ return name.startswith('math')
+
+ def is_in_io_module(name):
+ return name.startswith('io.')
+
+ def is_in_os_module(name):
+ return name.startswith('os.')
+
+ def is_in_debug_module(name):
+ return name.startswith('debug.')
+
+ return {'coroutine': is_in_coroutine_module,
+ 'modules': is_in_modules_module,
+ 'string': is_in_string_module,
+ 'table': is_in_table_module,
+ 'math': is_in_math_module,
+ 'io': is_in_io_module,
+ 'os': is_in_os_module,
+ 'debug': is_in_debug_module}
+
+
+
+ def get_newest_version():
+ f = urlopen('http://www.lua.org/manual/')
r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
- for line in f:
- m = r.match(line)
- if m is not None:
- return m.groups()[0]
-
- def get_lua_functions(version):
- f = urlopen('http://www.lua.org/manual/%s/' % version)
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ return m.groups()[0]
+
+ def get_lua_functions(version):
+ f = urlopen('http://www.lua.org/manual/%s/' % version)
r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def get_function_module(name):
- for mod, cb in module_callbacks().items():
- if cb(name):
- return mod
- if '.' in name:
- return name.split('.')[0]
- else:
- return 'basic'
-
- def regenerate(filename, modules):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- version = get_newest_version()
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def get_function_module(name):
+ for mod, cb in module_callbacks().items():
+ if cb(name):
+ return mod
+ if '.' in name:
+ return name.split('.')[0]
+ else:
+ return 'basic'
+
+ def regenerate(filename, modules):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
+
+ def run():
+ version = get_newest_version()
functions = set()
for v in ('5.2', version):
print('> Downloading function index for Lua %s' % v)
@@ -277,16 +277,16 @@ if __name__ == '__main__': # pragma: no cover
print('> %d functions found, %d new:' %
(len(f), len(set(f) - functions)))
functions |= set(f)
-
+
functions = sorted(functions)
- modules = {}
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- m = get_function_module(full_function_name)
- modules.setdefault(m, []).append(full_function_name)
+ modules = {}
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ m = get_function_module(full_function_name)
+ modules.setdefault(m, []).append(full_function_name)
modules = {k: tuple(v) for k, v in modules.items()}
-
- regenerate(__file__, modules)
-
- run()
+
+ regenerate(__file__, modules)
+
+ run()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
index a7b1b2d96a..ec9fbd067b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
@@ -1,580 +1,580 @@
-"""
- pygments.lexers._mapping
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer mapping definitions. This file is generated by itself. Everytime
- you change something on a builtin lexer definition, run this script from
- the lexers folder to update it.
-
- Do not alter the LEXERS dictionary by hand.
-
+"""
+ pygments.lexers._mapping
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer mapping definitions. This file is generated by itself. Everytime
+ you change something on a builtin lexer definition, run this script from
+ the lexers folder to update it.
+
+ Do not alter the LEXERS dictionary by hand.
+
:copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-LEXERS = {
- 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
+ :license: BSD, see LICENSE for details.
+"""
+
+LEXERS = {
+ 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
'AMDGPULexer': ('pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
- 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
+ 'AbnfLexer': ('pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
- 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
- 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
- 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
+ 'AdlLexer': ('pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
+ 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
'AheuiLexer': ('pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
- 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
+ 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
'AmplLexer': ('pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
'Angular2HtmlLexer': ('pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
'Angular2Lexer': ('pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
- 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
- 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
- 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
- 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
- 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
- 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
- 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
- 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
- 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
- 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
- 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
+ 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
+ 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
+ 'AntlrJavaLexer': ('pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
+ 'AntlrLexer': ('pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
+ 'AntlrObjectiveCLexer': ('pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
+ 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
+ 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
+ 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
+ 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
+ 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
+ 'ArduinoLexer': ('pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
'ArrowLexer': ('pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
'AscLexer': ('pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
- 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
+ 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
'AugeasLexer': ('pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
- 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
+ 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
- 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
+ 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
'BBCBasicLexer': ('pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
- 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
- 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
+ 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
+ 'BCLexer': ('pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
- 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
+ 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
- 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
+ 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BddLexer': ('pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
- 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
'BibTeXLexer': ('pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
- 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
- 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
- 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
+ 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
+ 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
+ 'BnfLexer': ('pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
'BoaLexer': ('pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()),
- 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
+ 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
'BoogieLexer': ('pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
- 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
- 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
- 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
+ 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
+ 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
+ 'CAmkESLexer': ('pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc', '*.x[bp]m'), ('text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap')),
- 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
- 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
- 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
- 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
+ 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
+ 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
+ 'CPSALexer': ('pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
+ 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#', 'cs'), ('*.cs',), ('text/x-csharp',)),
- 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
- 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
+ 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
+ 'CadlLexer': ('pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
'CapDLLexer': ('pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
'CapnProtoLexer': ('pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
- 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
+ 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CddlLexer': ('pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
- 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
- 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
+ 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
- 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
+ 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CharmciLexer': ('pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
- 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
+ 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
- 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
- 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
- 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
+ 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
+ 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
+ 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
+ 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
'CleanLexer': ('pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
- 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
- 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
- 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
- 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
+ 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
+ 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
+ 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
+ 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
- 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
- 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
- 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
- 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
- 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
- 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
- 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
- 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
- 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
- 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
+ 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
+ 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
+ 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
+ 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
+ 'ComponentPascalLexer': ('pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
+ 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
+ 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
+ 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
+ 'CrmshLexer': ('pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
+ 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
+ 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
'CrystalLexer': ('pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
- 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
+ 'CsoundDocumentLexer': ('pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
'CsoundOrchestraLexer': ('pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
- 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
- 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
+ 'CsoundScoreLexer': ('pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
+ 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
- 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
- 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
- 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
- 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
- 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
- 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
- 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
- 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
- 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
- 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
- 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
+ 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
+ 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
+ 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
+ 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
+ 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
+ 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
+ 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
+ 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
+ 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
+ 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
'Dasm16Lexer': ('pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
- 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
- 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
- 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
- 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
- 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
- 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
- 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
- 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
- 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
- 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
- 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
- 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
- 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
- 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
- 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
- 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
+ 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
+ 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
+ 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
+ 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
+ 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
+ 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+ 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
+ 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
+ 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
+ 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
+ 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
+ 'EarlGreyLexer': ('pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
+ 'EasytrieveLexer': ('pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
+ 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
+ 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
+ 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
- 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
+ 'ElmLexer': ('pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
'ElpiLexer': ('pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)),
'EmacsLispLexer': ('pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
'EmailLexer': ('pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
- 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
- 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
- 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
- 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
- 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
+ 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
+ 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
+ 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
+ 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
+ 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
+ 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
'ExeclineLexer': ('pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
- 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
+ 'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
'FStarLexer': ('pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
- 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
- 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
- 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
- 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
+ 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
+ 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
+ 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
'FennelLexer': ('pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
- 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
+ 'FishShellLexer': ('pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
'FlatlineLexer': ('pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
'FloScriptLexer': ('pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
'ForthLexer': ('pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
- 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
+ 'FortranFixedLexer': ('pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
- 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
+ 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
'FreeFemLexer': ('pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
'FutharkLexer': ('pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
- 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
+ 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
'GDScriptLexer': ('pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
- 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
+ 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
'GSQLLexer': ('pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
- 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
+ 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
'GcodeLexer': ('pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
- 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
- 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
- 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
+ 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
+ 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
+ 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
- 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
+ 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
'GoLexer': ('pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
- 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
- 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
- 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
- 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
+ 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
+ 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
+ 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
+ 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GraphvizLexer': ('pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')),
- 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
+ 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
'HLSLShaderLexer': ('pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
- 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
- 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
- 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
- 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
+ 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
+ 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
+ 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
+ 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
- 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
+ 'HexdumpLexer': ('pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
'HsailLexer': ('pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
'HspecLexer': ('pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()),
- 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
- 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
- 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
- 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
- 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
- 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
- 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
- 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
+ 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
+ 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
+ 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
+ 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
+ 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
+ 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
+ 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
+ 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
- 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
- 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
- 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
- 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
- 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
+ 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
+ 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
+ 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
+ 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
+ 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig', '*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ('text/x-ini', 'text/inf')),
- 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
- 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
- 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
- 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
- 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
+ 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+ 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
+ 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+ 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
+ 'JLexer': ('pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
- 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
- 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
- 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
+ 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
+ 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
- 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
+ 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
- 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
+ 'JclLexer': ('pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
'JsonBareObjectLexer': ('pygments.lexers.data', 'JSONBareObject', (), (), ()),
- 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
+ 'JsonLdLexer': ('pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
- 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
+ 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
- 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
'JuttleLexer': ('pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
- 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
+ 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
'KernelLogLexer': ('pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
- 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
+ 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
'KuinLexer': ('pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
- 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
- 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
- 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
+ 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
+ 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
+ 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
- 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
- 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
- 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
- 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
+ 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
+ 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
+ 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
+ 'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
'LilyPondLexer': ('pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
- 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
- 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
+ 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
+ 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
- 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
+ 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
'LlvmMirBodyLexer': ('pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
'LlvmMirLexer': ('pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
- 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
- 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
- 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
+ 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
+ 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
- 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
- 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
- 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
- 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
- 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
+ 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
+ 'MSDOSSessionLexer': ('pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
+ 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
+ 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
+ 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
- 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
- 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
- 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
+ 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
+ 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
'MarkdownLexer': ('pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
- 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
- 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
- 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
- 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
- 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
+ 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
+ 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
+ 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
+ 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
+ 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
'MaximaLexer': ('pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()),
'MesonLexer': ('pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
- 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
+ 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
'MiniScriptLexer': ('pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
- 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
- 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
- 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
- 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
+ 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
+ 'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
+ 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
'MoselLexer': ('pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
- 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
- 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
- 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
- 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
- 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
- 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
- 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
- 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
- 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
- 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
- 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
- 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
+ 'MozPreprocCssLexer': ('pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
+ 'MozPreprocHashLexer': ('pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
+ 'MozPreprocJavascriptLexer': ('pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
+ 'MozPreprocPercentLexer': ('pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
+ 'MozPreprocXulLexer': ('pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
+ 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
+ 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
+ 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
+ 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
+ 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
+ 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
+ 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
- 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
- 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
+ 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
+ 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
'NCLLexer': ('pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
- 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
- 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
- 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
- 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
- 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
+ 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
+ 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
+ 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
+ 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
+ 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
'NestedTextLexer': ('pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
- 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
+ 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
- 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
- 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
+ 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
'NodeConsoleLexer': ('pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
'NotmuchLexer': ('pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
- 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
- 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
- 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
- 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
- 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
- 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
- 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
+ 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
+ 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
+ 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
+ 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
+ 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
+ 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
+ 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
+ 'OdinLexer': ('pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
'OmgIdlLexer': ('pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
- 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
- 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
- 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
+ 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
+ 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
- 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
- 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
- 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
- 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
+ 'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
+ 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
+ 'ParaSailLexer': ('pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
+ 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
'PegLexer': ('pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
- 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
- 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
- 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
- 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
- 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
+ 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
+ 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
+ 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
+ 'PkgConfigLexer': ('pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
+ 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
'PointlessLexer': ('pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
'PonyLexer': ('pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
- 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
- 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
- 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
- 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
+ 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
+ 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
+ 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
+ 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
'PowerShellSessionLexer': ('pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
- 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
+ 'PraatLexer': ('pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
- 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
- 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
+ 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PsyshConsoleLexer': ('pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
'PugLexer': ('pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
- 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
- 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
+ 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
+ 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
- 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
- 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
- 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
- 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
+ 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
+ 'QVToLexer': ('pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
+ 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
+ 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
'RNCCompactLexer': ('pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
- 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
- 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
- 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
- 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
- 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
- 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
- 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
- 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
- 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
- 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
+ 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
+ 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
+ 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
+ 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
+ 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
+ 'RagelEmbeddedLexer': ('pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
+ 'RagelJavaLexer': ('pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
+ 'RagelLexer': ('pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
+ 'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
+ 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
- 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
+ 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
'ReasonLexer': ('pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
- 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
- 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
- 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
- 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
+ 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
+ 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
'ResourceLexer': ('pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
- 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
- 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
+ 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
+ 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
'RideLexer': ('pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
'RitaLexer': ('pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)),
- 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
- 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
+ 'RoboconfGraphLexer': ('pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
+ 'RoboconfInstancesLexer': ('pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot',), ('text/x-robotframework',)),
- 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
- 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
+ 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
+ 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
'RtsLexer': ('pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
- 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
+ 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')),
'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
'SASLexer': ('pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
- 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
- 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
+ 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
+ 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
'SarlLexer': ('pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
- 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
+ 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'SaviLexer': ('pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
- 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
- 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
+ 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
+ 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
'ScdocLexer': ('pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
- 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
- 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
- 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
+ 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
+ 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
+ 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'SedLexer': ('pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)),
'ShExCLexer': ('pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
- 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
+ 'ShenLexer': ('pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
'SieveLexer': ('pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
'SilverLexer': ('pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
'SingularityLexer': ('pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
'SlashLexer': ('pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
- 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
+ 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SlurmBashLexer': ('pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
- 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
+ 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartGameFormatLexer': ('pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
- 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
+ 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
'SmithyLexer': ('pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
- 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
+ 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
'SophiaLexer': ('pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
- 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
+ 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
- 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
+ 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
'SpiceLexer': ('pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)),
- 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
- 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
- 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+ 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
+ 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
+ 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SrcinfoLexer': ('pygments.lexers.srcinfo', 'Srcinfo', ('srcinfo',), ('.SRCINFO',), ()),
- 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
- 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
+ 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
+ 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
'StataLexer': ('pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
'SuperColliderLexer': ('pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
- 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
- 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
- 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
- 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
+ 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
+ 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
+ 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
+ 'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'TNTLexer': ('pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()),
- 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
+ 'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
- 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
- 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
- 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
- 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
+ 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
+ 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
+ 'TcshSessionLexer': ('pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
+ 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
'TealLexer': ('pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
'TeraTermLexer': ('pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
- 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
- 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
- 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
- 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
- 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
+ 'TermcapLexer': ('pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
+ 'TerminfoLexer': ('pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
+ 'TerraformLexer': ('pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
+ 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
+ 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
'ThingsDBLexer': ('pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
- 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
+ 'ThriftLexer': ('pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
'TiddlyWiki5Lexer': ('pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
- 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
+ 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TransactSqlLexer': ('pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
- 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
- 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
- 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
- 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
+ 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
+ 'TurtleLexer': ('pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
+ 'TwigHtmlLexer': ('pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
+ 'TwigLexer': ('pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
- 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
+ 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
'UsdLexer': ('pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
'VBScriptLexer': ('pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
'VCLLexer': ('pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
'VCLSnippetLexer': ('pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
- 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
- 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
- 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
- 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
- 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
- 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
- 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
- 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
- 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
- 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
+ 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
+ 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
+ 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
+ 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
+ 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+ 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
+ 'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
+ 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
+ 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
+ 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
+ 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'WDiffLexer': ('pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
'WatLexer': ('pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
'WebIDLLexer': ('pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
'WhileyLexer': ('pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
- 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
- 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
- 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
+ 'X10Lexer': ('pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
+ 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
+ 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
- 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
- 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
+ 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
+ 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
+ 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
'XorgLexer': ('pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
- 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
- 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
+ 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
+ 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
'XtlangLexer': ('pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
- 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
- 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+ 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
+ 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
'YangLexer': ('pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
'ZeekLexer': ('pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
- 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
+ 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
'ZigLexer': ('pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
'apdlexer': ('pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
-}
-
-if __name__ == '__main__': # pragma: no cover
- import sys
- import os
-
- # lookup lexers
- found_lexers = []
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- for root, dirs, files in os.walk('.'):
- for filename in files:
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.lexers%s.%s' % (
- root[1:].replace('/', '.'), filename[:-3])
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for lexer_name in module.__all__:
- lexer = getattr(module, lexer_name)
- found_lexers.append(
- '%r: %r' % (lexer_name,
- (module_name,
- lexer.name,
- tuple(lexer.aliases),
- tuple(lexer.filenames),
- tuple(lexer.mimetypes))))
- # sort them to make the diff minimal
- found_lexers.sort()
-
- # extract useful sourcecode from this file
- with open(__file__) as fp:
- content = fp.read()
- # replace crnl to nl for Windows.
- #
- # Note that, originally, contributers should keep nl of master
- # repository, for example by using some kind of automatic
- # management EOL, like `EolExtension
- # <https://www.mercurial-scm.org/wiki/EolExtension>`.
- content = content.replace("\r\n", "\n")
- header = content[:content.find('LEXERS = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- # write new file
+}
+
+if __name__ == '__main__': # pragma: no cover
+ import sys
+ import os
+
+ # lookup lexers
+ found_lexers = []
+ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.lexers%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for lexer_name in module.__all__:
+ lexer = getattr(module, lexer_name)
+ found_lexers.append(
+ '%r: %r' % (lexer_name,
+ (module_name,
+ lexer.name,
+ tuple(lexer.aliases),
+ tuple(lexer.filenames),
+ tuple(lexer.mimetypes))))
+ # sort them to make the diff minimal
+ found_lexers.sort()
+
+ # extract useful sourcecode from this file
+ with open(__file__) as fp:
+ content = fp.read()
+ # replace crnl to nl for Windows.
+ #
+ # Note that, originally, contributers should keep nl of master
+ # repository, for example by using some kind of automatic
+ # management EOL, like `EolExtension
+ # <https://www.mercurial-scm.org/wiki/EolExtension>`.
+ content = content.replace("\r\n", "\n")
+ header = content[:content.find('LEXERS = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ # write new file
with open(__file__, 'w') as fp:
- fp.write(header)
- fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
- fp.write(footer)
-
- print ('=== %d lexers processed.' % len(found_lexers))
+ fp.write(header)
+ fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
+ fp.write(footer)
+
+ print ('=== %d lexers processed.' % len(found_lexers))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
index 5cc50a940b..33d919fc27 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
@@ -1,1171 +1,1171 @@
-"""
- pygments.lexers._mql_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtins for the MqlLexer.
-
+"""
+ pygments.lexers._mql_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtins for the MqlLexer.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-types = (
- 'AccountBalance',
- 'AccountCompany',
- 'AccountCredit',
- 'AccountCurrency',
- 'AccountEquity',
- 'AccountFreeMarginCheck',
- 'AccountFreeMarginMode',
- 'AccountFreeMargin',
- 'AccountInfoDouble',
- 'AccountInfoInteger',
- 'AccountInfoString',
- 'AccountLeverage',
- 'AccountMargin',
- 'AccountName',
- 'AccountNumber',
- 'AccountProfit',
- 'AccountServer',
- 'AccountStopoutLevel',
- 'AccountStopoutMode',
- 'Alert',
- 'ArrayBsearch',
- 'ArrayCompare',
- 'ArrayCopyRates',
- 'ArrayCopySeries',
- 'ArrayCopy',
- 'ArrayDimension',
- 'ArrayFill',
- 'ArrayFree',
- 'ArrayGetAsSeries',
- 'ArrayInitialize',
- 'ArrayIsDynamic',
- 'ArrayIsSeries',
- 'ArrayMaximum',
- 'ArrayMinimum',
- 'ArrayRange',
- 'ArrayResize',
- 'ArraySetAsSeries',
- 'ArraySize',
- 'ArraySort',
- 'CharArrayToString',
- 'CharToString',
- 'CharToStr',
- 'CheckPointer',
- 'ColorToARGB',
- 'ColorToString',
- 'Comment',
- 'CopyClose',
- 'CopyHigh',
- 'CopyLow',
- 'CopyOpen',
- 'CopyRates',
- 'CopyRealVolume',
- 'CopySpread',
- 'CopyTickVolume',
- 'CopyTime',
- 'DayOfWeek',
- 'DayOfYear',
- 'Day',
- 'DebugBreak',
- 'Digits',
- 'DoubleToString',
- 'DoubleToStr',
- 'EnumToString',
- 'EventChartCustom',
- 'EventKillTimer',
- 'EventSetMillisecondTimer',
- 'EventSetTimer',
- 'ExpertRemove',
- 'FileClose',
- 'FileCopy',
- 'FileDelete',
- 'FileFindClose',
- 'FileFindFirst',
- 'FileFindNext',
- 'FileFlush',
- 'FileGetInteger',
- 'FileIsEnding',
- 'FileIsExist',
- 'FileIsLineEnding',
- 'FileMove',
- 'FileOpenHistory',
- 'FileOpen',
- 'FileReadArray',
- 'FileReadBool',
- 'FileReadDatetime',
- 'FileReadDouble',
- 'FileReadFloat',
- 'FileReadInteger',
- 'FileReadLong',
- 'FileReadNumber',
- 'FileReadString',
- 'FileReadStruct',
- 'FileSeek',
- 'FileSize',
- 'FileTell',
- 'FileWriteArray',
- 'FileWriteDouble',
- 'FileWriteFloat',
- 'FileWriteInteger',
- 'FileWriteLong',
- 'FileWriteString',
- 'FileWriteStruct',
- 'FileWrite',
- 'FolderClean',
- 'FolderCreate',
- 'FolderDelete',
- 'GetLastError',
- 'GetPointer',
- 'GetTickCount',
- 'GlobalVariableCheck',
- 'GlobalVariableDel',
- 'GlobalVariableGet',
- 'GlobalVariableName',
- 'GlobalVariableSetOnCondition',
- 'GlobalVariableSet',
- 'GlobalVariableTemp',
- 'GlobalVariableTime',
- 'GlobalVariablesDeleteAll',
- 'GlobalVariablesFlush',
- 'GlobalVariablesTotal',
- 'HideTestIndicators',
- 'Hour',
- 'IndicatorBuffers',
- 'IndicatorCounted',
- 'IndicatorDigits',
- 'IndicatorSetDouble',
- 'IndicatorSetInteger',
- 'IndicatorSetString',
- 'IndicatorShortName',
- 'IntegerToString',
- 'IsConnected',
- 'IsDemo',
- 'IsDllsAllowed',
- 'IsExpertEnabled',
- 'IsLibrariesAllowed',
- 'IsOptimization',
- 'IsStopped',
- 'IsTesting',
- 'IsTradeAllowed',
- 'IsTradeContextBusy',
- 'IsVisualMode',
- 'MQLInfoInteger',
- 'MQLInfoString',
- 'MarketInfo',
- 'MathAbs',
- 'MathArccos',
- 'MathArcsin',
- 'MathArctan',
- 'MathCeil',
- 'MathCos',
- 'MathExp',
- 'MathFloor',
- 'MathIsValidNumber',
- 'MathLog',
- 'MathMax',
- 'MathMin',
- 'MathMod',
- 'MathPow',
- 'MathRand',
- 'MathRound',
- 'MathSin',
- 'MathSqrt',
- 'MathSrand',
- 'MathTan',
- 'MessageBox',
- 'Minute',
- 'Month',
- 'NormalizeDouble',
- 'ObjectCreate',
- 'ObjectDelete',
- 'ObjectDescription',
- 'ObjectFind',
- 'ObjectGetDouble',
- 'ObjectGetFiboDescription',
- 'ObjectGetInteger',
- 'ObjectGetShiftByValue',
- 'ObjectGetString',
- 'ObjectGetTimeByValue',
- 'ObjectGetValueByShift',
- 'ObjectGetValueByTime',
- 'ObjectGet',
- 'ObjectMove',
- 'ObjectName',
- 'ObjectSetDouble',
- 'ObjectSetFiboDescription',
- 'ObjectSetInteger',
- 'ObjectSetString',
- 'ObjectSetText',
- 'ObjectSet',
- 'ObjectType',
- 'ObjectsDeleteAll',
- 'ObjectsTotal',
- 'OrderCloseBy',
- 'OrderClosePrice',
- 'OrderCloseTime',
- 'OrderClose',
- 'OrderComment',
- 'OrderCommission',
- 'OrderDelete',
- 'OrderExpiration',
- 'OrderLots',
- 'OrderMagicNumber',
- 'OrderModify',
- 'OrderOpenPrice',
- 'OrderOpenTime',
- 'OrderPrint',
- 'OrderProfit',
- 'OrderSelect',
- 'OrderSend',
- 'OrderStopLoss',
- 'OrderSwap',
- 'OrderSymbol',
- 'OrderTakeProfit',
- 'OrderTicket',
- 'OrderType',
- 'OrdersHistoryTotal',
- 'OrdersTotal',
- 'PeriodSeconds',
- 'Period',
- 'PlaySound',
- 'Point',
- 'PrintFormat',
- 'Print',
- 'RefreshRates',
- 'ResetLastError',
- 'ResourceCreate',
- 'ResourceFree',
- 'ResourceReadImage',
- 'ResourceSave',
- 'Seconds',
- 'SendFTP',
- 'SendMail',
- 'SendNotification',
- 'SeriesInfoInteger',
- 'SetIndexArrow',
- 'SetIndexBuffer',
- 'SetIndexDrawBegin',
- 'SetIndexEmptyValue',
- 'SetIndexLabel',
- 'SetIndexShift',
- 'SetIndexStyle',
- 'SetLevelStyle',
- 'SetLevelValue',
- 'ShortArrayToString',
- 'ShortToString',
- 'Sleep',
- 'StrToDouble',
- 'StrToInteger',
- 'StrToTime',
- 'StringAdd',
- 'StringBufferLen',
- 'StringCompare',
- 'StringConcatenate',
- 'StringFill',
- 'StringFind',
- 'StringFormat',
- 'StringGetCharacter',
- 'StringGetChar',
- 'StringInit',
- 'StringLen',
- 'StringReplace',
- 'StringSetCharacter',
- 'StringSetChar',
- 'StringSplit',
- 'StringSubstr',
- 'StringToCharArray',
- 'StringToColor',
- 'StringToDouble',
- 'StringToInteger',
- 'StringToLower',
- 'StringToShortArray',
- 'StringToTime',
- 'StringToUpper',
- 'StringTrimLeft',
- 'StringTrimRight',
- 'StructToTime',
- 'SymbolInfoDouble',
- 'SymbolInfoInteger',
- 'SymbolInfoSessionQuote',
- 'SymbolInfoSessionTrade',
- 'SymbolInfoString',
- 'SymbolInfoTick',
- 'SymbolIsSynchronized',
- 'SymbolName',
- 'SymbolSelect',
- 'SymbolsTotal',
- 'Symbol',
- 'TerminalClose',
- 'TerminalCompany',
- 'TerminalName',
- 'TerminalPath',
- 'TesterStatistics',
- 'TextGetSize',
- 'TextOut',
- 'TextSetFont',
- 'TimeCurrent',
- 'TimeDayOfWeek',
- 'TimeDayOfYear',
- 'TimeDaylightSavings',
- 'TimeDay',
- 'TimeGMTOffset',
- 'TimeGMT',
- 'TimeHour',
- 'TimeLocal',
- 'TimeMinute',
- 'TimeMonth',
- 'TimeSeconds',
- 'TimeToString',
- 'TimeToStruct',
- 'TimeToStr',
- 'TimeTradeServer',
- 'TimeYear',
- 'UninitializeReason',
- 'WindowBarsPerChart',
- 'WindowExpertName',
- 'WindowFind',
- 'WindowFirstVisibleBar',
- 'WindowHandle',
- 'WindowIsVisible',
- 'WindowOnDropped',
- 'WindowPriceMax',
- 'WindowPriceMin',
- 'WindowPriceOnDropped',
- 'WindowRedraw',
- 'WindowScreenShot',
- 'WindowTimeOnDropped',
- 'WindowXOnDropped',
- 'WindowYOnDropped',
- 'WindowsTotal',
- 'Year',
- 'ZeroMemory',
- 'iAC',
- 'iADX',
- 'iAD',
- 'iAO',
- 'iATR',
- 'iAlligator',
- 'iBWMFI',
- 'iBandsOnArray',
- 'iBands',
- 'iBarShift',
- 'iBars',
- 'iBearsPower',
- 'iBullsPower',
- 'iCCIOnArray',
- 'iCCI',
- 'iClose',
- 'iCustom',
- 'iDeMarker',
- 'iEnvelopesOnArray',
- 'iEnvelopes',
- 'iForce',
- 'iFractals',
- 'iGator',
- 'iHighest',
- 'iHigh',
- 'iIchimoku',
- 'iLowest',
- 'iLow',
- 'iMACD',
- 'iMAOnArray',
- 'iMA',
- 'iMFI',
- 'iMomentumOnArray',
- 'iMomentum',
- 'iOBV',
- 'iOpen',
- 'iOsMA',
- 'iRSIOnArray',
- 'iRSI',
- 'iRVI',
- 'iSAR',
- 'iStdDevOnArray',
- 'iStdDev',
- 'iStochastic',
- 'iTime',
- 'iVolume',
- 'iWPR',
-)
-
-constants = (
- 'ACCOUNT_BALANCE',
- 'ACCOUNT_COMPANY',
- 'ACCOUNT_CREDIT',
- 'ACCOUNT_CURRENCY',
- 'ACCOUNT_EQUITY',
- 'ACCOUNT_FREEMARGIN',
- 'ACCOUNT_LEVERAGE',
- 'ACCOUNT_LIMIT_ORDERS',
- 'ACCOUNT_LOGIN',
- 'ACCOUNT_MARGIN',
- 'ACCOUNT_MARGIN_LEVEL',
- 'ACCOUNT_MARGIN_SO_CALL',
- 'ACCOUNT_MARGIN_SO_MODE',
- 'ACCOUNT_MARGIN_SO_SO',
- 'ACCOUNT_NAME',
- 'ACCOUNT_PROFIT',
- 'ACCOUNT_SERVER',
- 'ACCOUNT_STOPOUT_MODE_MONEY',
- 'ACCOUNT_STOPOUT_MODE_PERCENT',
- 'ACCOUNT_TRADE_ALLOWED',
- 'ACCOUNT_TRADE_EXPERT',
- 'ACCOUNT_TRADE_MODE',
- 'ACCOUNT_TRADE_MODE_CONTEST',
- 'ACCOUNT_TRADE_MODE_DEMO',
- 'ACCOUNT_TRADE_MODE_REAL',
- 'ALIGN_CENTER',
- 'ALIGN_LEFT',
- 'ALIGN_RIGHT',
- 'ANCHOR_BOTTOM',
- 'ANCHOR_CENTER',
- 'ANCHOR_LEFT',
- 'ANCHOR_LEFT_LOWER',
- 'ANCHOR_LEFT_UPPER',
- 'ANCHOR_LOWER',
- 'ANCHOR_RIGHT',
- 'ANCHOR_RIGHT_LOWER',
- 'ANCHOR_RIGHT_UPPER',
- 'ANCHOR_TOP',
- 'ANCHOR_UPPER',
- 'BORDER_FLAT',
- 'BORDER_RAISED',
- 'BORDER_SUNKEN',
- 'CHARTEVENT_CHART_CHANGE',
- 'CHARTEVENT_CLICK',
- 'CHARTEVENT_CUSTOM',
- 'CHARTEVENT_CUSTOM_LAST',
- 'CHARTEVENT_KEYDOWN',
- 'CHARTEVENT_MOUSE_MOVE',
- 'CHARTEVENT_OBJECT_CHANGE',
- 'CHARTEVENT_OBJECT_CLICK',
- 'CHARTEVENT_OBJECT_CREATE',
- 'CHARTEVENT_OBJECT_DELETE',
- 'CHARTEVENT_OBJECT_DRAG',
- 'CHARTEVENT_OBJECT_ENDEDIT',
- 'CHARTS_MAX',
- 'CHART_AUTOSCROLL',
- 'CHART_BARS',
- 'CHART_BEGIN',
- 'CHART_BRING_TO_TOP',
- 'CHART_CANDLES',
- 'CHART_COLOR_ASK',
- 'CHART_COLOR_BACKGROUND',
- 'CHART_COLOR_BID',
- 'CHART_COLOR_CANDLE_BEAR',
- 'CHART_COLOR_CANDLE_BULL',
- 'CHART_COLOR_CHART_DOWN',
- 'CHART_COLOR_CHART_LINE',
- 'CHART_COLOR_CHART_UP',
- 'CHART_COLOR_FOREGROUND',
- 'CHART_COLOR_GRID',
- 'CHART_COLOR_LAST',
- 'CHART_COLOR_STOP_LEVEL',
- 'CHART_COLOR_VOLUME',
- 'CHART_COMMENT',
- 'CHART_CURRENT_POS',
- 'CHART_DRAG_TRADE_LEVELS',
- 'CHART_END',
- 'CHART_EVENT_MOUSE_MOVE',
- 'CHART_EVENT_OBJECT_CREATE',
- 'CHART_EVENT_OBJECT_DELETE',
- 'CHART_FIRST_VISIBLE_BAR',
- 'CHART_FIXED_MAX',
- 'CHART_FIXED_MIN',
- 'CHART_FIXED_POSITION',
- 'CHART_FOREGROUND',
- 'CHART_HEIGHT_IN_PIXELS',
- 'CHART_IS_OBJECT',
- 'CHART_LINE',
- 'CHART_MODE',
- 'CHART_MOUSE_SCROLL',
- 'CHART_POINTS_PER_BAR',
- 'CHART_PRICE_MAX',
- 'CHART_PRICE_MIN',
- 'CHART_SCALEFIX',
- 'CHART_SCALEFIX_11',
- 'CHART_SCALE',
- 'CHART_SCALE_PT_PER_BAR',
- 'CHART_SHIFT',
- 'CHART_SHIFT_SIZE',
- 'CHART_SHOW_ASK_LINE',
- 'CHART_SHOW_BID_LINE',
- 'CHART_SHOW_DATE_SCALE',
- 'CHART_SHOW_GRID',
- 'CHART_SHOW_LAST_LINE',
- 'CHART_SHOW_OBJECT_DESCR',
- 'CHART_SHOW_OHLC',
- 'CHART_SHOW_PERIOD_SEP',
- 'CHART_SHOW_PRICE_SCALE',
- 'CHART_SHOW_TRADE_LEVELS',
- 'CHART_SHOW_VOLUMES',
- 'CHART_VISIBLE_BARS',
- 'CHART_VOLUME_HIDE',
- 'CHART_VOLUME_REAL',
- 'CHART_VOLUME_TICK',
- 'CHART_WIDTH_IN_BARS',
- 'CHART_WIDTH_IN_PIXELS',
- 'CHART_WINDOWS_TOTAL',
- 'CHART_WINDOW_HANDLE',
- 'CHART_WINDOW_IS_VISIBLE',
- 'CHART_WINDOW_YDISTANCE',
- 'CHAR_MAX',
- 'CHAR_MIN',
- 'CLR_NONE',
- 'CORNER_LEFT_LOWER',
- 'CORNER_LEFT_UPPER',
- 'CORNER_RIGHT_LOWER',
- 'CORNER_RIGHT_UPPER',
- 'CP_ACP',
- 'CP_MACCP',
- 'CP_OEMCP',
- 'CP_SYMBOL',
- 'CP_THREAD_ACP',
- 'CP_UTF7',
- 'CP_UTF8',
- 'DBL_DIG',
- 'DBL_EPSILON',
- 'DBL_MANT_DIG',
- 'DBL_MAX',
- 'DBL_MAX_10_EXP',
- 'DBL_MAX_EXP',
- 'DBL_MIN',
- 'DBL_MIN_10_EXP',
- 'DBL_MIN_EXP',
- 'DRAW_ARROW',
- 'DRAW_FILLING',
- 'DRAW_HISTOGRAM',
- 'DRAW_LINE',
- 'DRAW_NONE',
- 'DRAW_SECTION',
- 'DRAW_ZIGZAG',
- 'EMPTY',
- 'EMPTY_VALUE',
- 'ERR_ACCOUNT_DISABLED',
- 'ERR_BROKER_BUSY',
- 'ERR_COMMON_ERROR',
- 'ERR_INVALID_ACCOUNT',
- 'ERR_INVALID_PRICE',
- 'ERR_INVALID_STOPS',
- 'ERR_INVALID_TRADE_PARAMETERS',
- 'ERR_INVALID_TRADE_VOLUME',
- 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
- 'ERR_MALFUNCTIONAL_TRADE',
- 'ERR_MARKET_CLOSED',
- 'ERR_NOT_ENOUGH_MONEY',
- 'ERR_NOT_ENOUGH_RIGHTS',
- 'ERR_NO_CONNECTION',
- 'ERR_NO_ERROR',
- 'ERR_NO_RESULT',
- 'ERR_OFF_QUOTES',
- 'ERR_OLD_VERSION',
- 'ERR_ORDER_LOCKED',
- 'ERR_PRICE_CHANGED',
- 'ERR_REQUOTE',
- 'ERR_SERVER_BUSY',
- 'ERR_TOO_FREQUENT_REQUESTS',
- 'ERR_TOO_MANY_REQUESTS',
- 'ERR_TRADE_CONTEXT_BUSY',
- 'ERR_TRADE_DISABLED',
- 'ERR_TRADE_EXPIRATION_DENIED',
- 'ERR_TRADE_HEDGE_PROHIBITED',
- 'ERR_TRADE_MODIFY_DENIED',
- 'ERR_TRADE_PROHIBITED_BY_FIFO',
- 'ERR_TRADE_TIMEOUT',
- 'ERR_TRADE_TOO_MANY_ORDERS',
- 'FILE_ACCESS_DATE',
- 'FILE_ANSI',
- 'FILE_BIN',
- 'FILE_COMMON',
- 'FILE_CREATE_DATE',
- 'FILE_CSV',
- 'FILE_END',
- 'FILE_EXISTS',
- 'FILE_IS_ANSI',
- 'FILE_IS_BINARY',
- 'FILE_IS_COMMON',
- 'FILE_IS_CSV',
- 'FILE_IS_READABLE',
- 'FILE_IS_TEXT',
- 'FILE_IS_WRITABLE',
- 'FILE_LINE_END',
- 'FILE_MODIFY_DATE',
- 'FILE_POSITION',
- 'FILE_READ',
- 'FILE_REWRITE',
- 'FILE_SHARE_READ',
- 'FILE_SHARE_WRITE',
- 'FILE_SIZE',
- 'FILE_TXT',
- 'FILE_UNICODE',
- 'FILE_WRITE',
- 'FLT_DIG',
- 'FLT_EPSILON',
- 'FLT_MANT_DIG',
- 'FLT_MAX',
- 'FLT_MAX_10_EXP',
- 'FLT_MAX_EXP',
- 'FLT_MIN',
- 'FLT_MIN_10_EXP',
- 'FLT_MIN_EXP',
- 'FRIDAY',
- 'GANN_DOWN_TREND',
- 'GANN_UP_TREND',
- 'IDABORT',
- 'IDCANCEL',
- 'IDCONTINUE',
- 'IDIGNORE',
- 'IDNO',
- 'IDOK',
- 'IDRETRY',
- 'IDTRYAGAIN',
- 'IDYES',
- 'INDICATOR_CALCULATIONS',
- 'INDICATOR_COLOR_INDEX',
- 'INDICATOR_DATA',
- 'INDICATOR_DIGITS',
- 'INDICATOR_HEIGHT',
- 'INDICATOR_LEVELCOLOR',
- 'INDICATOR_LEVELSTYLE',
- 'INDICATOR_LEVELS',
- 'INDICATOR_LEVELTEXT',
- 'INDICATOR_LEVELVALUE',
- 'INDICATOR_LEVELWIDTH',
- 'INDICATOR_MAXIMUM',
- 'INDICATOR_MINIMUM',
- 'INDICATOR_SHORTNAME',
- 'INT_MAX',
- 'INT_MIN',
- 'INVALID_HANDLE',
- 'IS_DEBUG_MODE',
- 'IS_PROFILE_MODE',
- 'LICENSE_DEMO',
- 'LICENSE_FREE',
- 'LICENSE_FULL',
- 'LICENSE_TIME',
- 'LONG_MAX',
- 'LONG_MIN',
- 'MB_ABORTRETRYIGNORE',
- 'MB_CANCELTRYCONTINUE',
- 'MB_DEFBUTTON1',
- 'MB_DEFBUTTON2',
- 'MB_DEFBUTTON3',
- 'MB_DEFBUTTON4',
- 'MB_ICONASTERISK',
- 'MB_ICONERROR',
- 'MB_ICONEXCLAMATION',
- 'MB_ICONHAND',
- 'MB_ICONINFORMATION',
- 'MB_ICONQUESTION',
- 'MB_ICONSTOP',
- 'MB_ICONWARNING',
- 'MB_OKCANCEL',
- 'MB_OK',
- 'MB_RETRYCANCEL',
- 'MB_YESNOCANCEL',
- 'MB_YESNO',
- 'MODE_ASK',
- 'MODE_BID',
- 'MODE_CHINKOUSPAN',
- 'MODE_CLOSE',
- 'MODE_DIGITS',
- 'MODE_EMA',
- 'MODE_EXPIRATION',
- 'MODE_FREEZELEVEL',
- 'MODE_GATORJAW',
- 'MODE_GATORLIPS',
- 'MODE_GATORTEETH',
- 'MODE_HIGH',
- 'MODE_KIJUNSEN',
- 'MODE_LOTSIZE',
- 'MODE_LOTSTEP',
- 'MODE_LOWER',
- 'MODE_LOW',
- 'MODE_LWMA',
- 'MODE_MAIN',
- 'MODE_MARGINCALCMODE',
- 'MODE_MARGINHEDGED',
- 'MODE_MARGININIT',
- 'MODE_MARGINMAINTENANCE',
- 'MODE_MARGINREQUIRED',
- 'MODE_MAXLOT',
- 'MODE_MINLOT',
- 'MODE_MINUSDI',
- 'MODE_OPEN',
- 'MODE_PLUSDI',
- 'MODE_POINT',
- 'MODE_PROFITCALCMODE',
- 'MODE_SENKOUSPANA',
- 'MODE_SENKOUSPANB',
- 'MODE_SIGNAL',
- 'MODE_SMA',
- 'MODE_SMMA',
- 'MODE_SPREAD',
- 'MODE_STARTING',
- 'MODE_STOPLEVEL',
- 'MODE_SWAPLONG',
- 'MODE_SWAPSHORT',
- 'MODE_SWAPTYPE',
- 'MODE_TENKANSEN',
- 'MODE_TICKSIZE',
- 'MODE_TICKVALUE',
- 'MODE_TIME',
- 'MODE_TRADEALLOWED',
- 'MODE_UPPER',
- 'MODE_VOLUME',
- 'MONDAY',
- 'MQL_DEBUG',
- 'MQL_DLLS_ALLOWED',
- 'MQL_FRAME_MODE',
- 'MQL_LICENSE_TYPE',
- 'MQL_OPTIMIZATION',
- 'MQL_PROFILER',
- 'MQL_PROGRAM_NAME',
- 'MQL_PROGRAM_PATH',
- 'MQL_PROGRAM_TYPE',
- 'MQL_TESTER',
- 'MQL_TRADE_ALLOWED',
- 'MQL_VISUAL_MODE',
- 'M_1_PI',
- 'M_2_PI',
- 'M_2_SQRTPI',
- 'M_E',
- 'M_LN2',
- 'M_LN10',
- 'M_LOG2E',
- 'M_LOG10E',
- 'M_PI',
- 'M_PI_2',
- 'M_PI_4',
- 'M_SQRT1_2',
- 'M_SQRT2',
- 'NULL',
- 'OBJPROP_ALIGN',
- 'OBJPROP_ANCHOR',
- 'OBJPROP_ANGLE',
- 'OBJPROP_ARROWCODE',
- 'OBJPROP_BACK',
- 'OBJPROP_BGCOLOR',
- 'OBJPROP_BMPFILE',
- 'OBJPROP_BORDER_COLOR',
- 'OBJPROP_BORDER_TYPE',
- 'OBJPROP_CHART_ID',
- 'OBJPROP_CHART_SCALE',
- 'OBJPROP_COLOR',
- 'OBJPROP_CORNER',
- 'OBJPROP_CREATETIME',
- 'OBJPROP_DATE_SCALE',
- 'OBJPROP_DEVIATION',
- 'OBJPROP_DRAWLINES',
- 'OBJPROP_ELLIPSE',
- 'OBJPROP_FIBOLEVELS',
- 'OBJPROP_FILL',
- 'OBJPROP_FIRSTLEVEL',
- 'OBJPROP_FONTSIZE',
- 'OBJPROP_FONT',
- 'OBJPROP_HIDDEN',
- 'OBJPROP_LEVELCOLOR',
- 'OBJPROP_LEVELSTYLE',
- 'OBJPROP_LEVELS',
- 'OBJPROP_LEVELTEXT',
- 'OBJPROP_LEVELVALUE',
- 'OBJPROP_LEVELWIDTH',
- 'OBJPROP_NAME',
- 'OBJPROP_PERIOD',
- 'OBJPROP_PRICE1',
- 'OBJPROP_PRICE2',
- 'OBJPROP_PRICE3',
- 'OBJPROP_PRICE',
- 'OBJPROP_PRICE_SCALE',
- 'OBJPROP_RAY',
- 'OBJPROP_RAY_RIGHT',
- 'OBJPROP_READONLY',
- 'OBJPROP_SCALE',
- 'OBJPROP_SELECTABLE',
- 'OBJPROP_SELECTED',
- 'OBJPROP_STATE',
- 'OBJPROP_STYLE',
- 'OBJPROP_SYMBOL',
- 'OBJPROP_TEXT',
- 'OBJPROP_TIME1',
- 'OBJPROP_TIME2',
- 'OBJPROP_TIME3',
- 'OBJPROP_TIMEFRAMES',
- 'OBJPROP_TIME',
- 'OBJPROP_TOOLTIP',
- 'OBJPROP_TYPE',
- 'OBJPROP_WIDTH',
- 'OBJPROP_XDISTANCE',
- 'OBJPROP_XOFFSET',
- 'OBJPROP_XSIZE',
- 'OBJPROP_YDISTANCE',
- 'OBJPROP_YOFFSET',
- 'OBJPROP_YSIZE',
- 'OBJPROP_ZORDER',
- 'OBJ_ALL_PERIODS',
- 'OBJ_ARROW',
- 'OBJ_ARROW_BUY',
- 'OBJ_ARROW_CHECK',
- 'OBJ_ARROW_DOWN',
- 'OBJ_ARROW_LEFT_PRICE',
- 'OBJ_ARROW_RIGHT_PRICE',
- 'OBJ_ARROW_SELL',
- 'OBJ_ARROW_STOP',
- 'OBJ_ARROW_THUMB_DOWN',
- 'OBJ_ARROW_THUMB_UP',
- 'OBJ_ARROW_UP',
- 'OBJ_BITMAP',
- 'OBJ_BITMAP_LABEL',
- 'OBJ_BUTTON',
- 'OBJ_CHANNEL',
- 'OBJ_CYCLES',
- 'OBJ_EDIT',
- 'OBJ_ELLIPSE',
- 'OBJ_EVENT',
- 'OBJ_EXPANSION',
- 'OBJ_FIBOARC',
- 'OBJ_FIBOCHANNEL',
- 'OBJ_FIBOFAN',
- 'OBJ_FIBOTIMES',
- 'OBJ_FIBO',
- 'OBJ_GANNFAN',
- 'OBJ_GANNGRID',
- 'OBJ_GANNLINE',
- 'OBJ_HLINE',
- 'OBJ_LABEL',
- 'OBJ_NO_PERIODS',
- 'OBJ_PERIOD_D1',
- 'OBJ_PERIOD_H1',
- 'OBJ_PERIOD_H4',
- 'OBJ_PERIOD_M1',
- 'OBJ_PERIOD_M5',
- 'OBJ_PERIOD_M15',
- 'OBJ_PERIOD_M30',
- 'OBJ_PERIOD_MN1',
- 'OBJ_PERIOD_W1',
- 'OBJ_PITCHFORK',
- 'OBJ_RECTANGLE',
- 'OBJ_RECTANGLE_LABEL',
- 'OBJ_REGRESSION',
- 'OBJ_STDDEVCHANNEL',
- 'OBJ_TEXT',
- 'OBJ_TRENDBYANGLE',
- 'OBJ_TREND',
- 'OBJ_TRIANGLE',
- 'OBJ_VLINE',
- 'OP_BUYLIMIT',
- 'OP_BUYSTOP',
- 'OP_BUY',
- 'OP_SELLLIMIT',
- 'OP_SELLSTOP',
- 'OP_SELL',
- 'PERIOD_CURRENT',
- 'PERIOD_D1',
- 'PERIOD_H1',
- 'PERIOD_H2',
- 'PERIOD_H3',
- 'PERIOD_H4',
- 'PERIOD_H6',
- 'PERIOD_H8',
- 'PERIOD_H12',
- 'PERIOD_M1',
- 'PERIOD_M2',
- 'PERIOD_M3',
- 'PERIOD_M4',
- 'PERIOD_M5',
- 'PERIOD_M6',
- 'PERIOD_M10',
- 'PERIOD_M12',
- 'PERIOD_M15',
- 'PERIOD_M20',
- 'PERIOD_M30',
- 'PERIOD_MN1',
- 'PERIOD_W1',
- 'POINTER_AUTOMATIC',
- 'POINTER_DYNAMIC',
+ :license: BSD, see LICENSE for details.
+"""
+types = (
+ 'AccountBalance',
+ 'AccountCompany',
+ 'AccountCredit',
+ 'AccountCurrency',
+ 'AccountEquity',
+ 'AccountFreeMarginCheck',
+ 'AccountFreeMarginMode',
+ 'AccountFreeMargin',
+ 'AccountInfoDouble',
+ 'AccountInfoInteger',
+ 'AccountInfoString',
+ 'AccountLeverage',
+ 'AccountMargin',
+ 'AccountName',
+ 'AccountNumber',
+ 'AccountProfit',
+ 'AccountServer',
+ 'AccountStopoutLevel',
+ 'AccountStopoutMode',
+ 'Alert',
+ 'ArrayBsearch',
+ 'ArrayCompare',
+ 'ArrayCopyRates',
+ 'ArrayCopySeries',
+ 'ArrayCopy',
+ 'ArrayDimension',
+ 'ArrayFill',
+ 'ArrayFree',
+ 'ArrayGetAsSeries',
+ 'ArrayInitialize',
+ 'ArrayIsDynamic',
+ 'ArrayIsSeries',
+ 'ArrayMaximum',
+ 'ArrayMinimum',
+ 'ArrayRange',
+ 'ArrayResize',
+ 'ArraySetAsSeries',
+ 'ArraySize',
+ 'ArraySort',
+ 'CharArrayToString',
+ 'CharToString',
+ 'CharToStr',
+ 'CheckPointer',
+ 'ColorToARGB',
+ 'ColorToString',
+ 'Comment',
+ 'CopyClose',
+ 'CopyHigh',
+ 'CopyLow',
+ 'CopyOpen',
+ 'CopyRates',
+ 'CopyRealVolume',
+ 'CopySpread',
+ 'CopyTickVolume',
+ 'CopyTime',
+ 'DayOfWeek',
+ 'DayOfYear',
+ 'Day',
+ 'DebugBreak',
+ 'Digits',
+ 'DoubleToString',
+ 'DoubleToStr',
+ 'EnumToString',
+ 'EventChartCustom',
+ 'EventKillTimer',
+ 'EventSetMillisecondTimer',
+ 'EventSetTimer',
+ 'ExpertRemove',
+ 'FileClose',
+ 'FileCopy',
+ 'FileDelete',
+ 'FileFindClose',
+ 'FileFindFirst',
+ 'FileFindNext',
+ 'FileFlush',
+ 'FileGetInteger',
+ 'FileIsEnding',
+ 'FileIsExist',
+ 'FileIsLineEnding',
+ 'FileMove',
+ 'FileOpenHistory',
+ 'FileOpen',
+ 'FileReadArray',
+ 'FileReadBool',
+ 'FileReadDatetime',
+ 'FileReadDouble',
+ 'FileReadFloat',
+ 'FileReadInteger',
+ 'FileReadLong',
+ 'FileReadNumber',
+ 'FileReadString',
+ 'FileReadStruct',
+ 'FileSeek',
+ 'FileSize',
+ 'FileTell',
+ 'FileWriteArray',
+ 'FileWriteDouble',
+ 'FileWriteFloat',
+ 'FileWriteInteger',
+ 'FileWriteLong',
+ 'FileWriteString',
+ 'FileWriteStruct',
+ 'FileWrite',
+ 'FolderClean',
+ 'FolderCreate',
+ 'FolderDelete',
+ 'GetLastError',
+ 'GetPointer',
+ 'GetTickCount',
+ 'GlobalVariableCheck',
+ 'GlobalVariableDel',
+ 'GlobalVariableGet',
+ 'GlobalVariableName',
+ 'GlobalVariableSetOnCondition',
+ 'GlobalVariableSet',
+ 'GlobalVariableTemp',
+ 'GlobalVariableTime',
+ 'GlobalVariablesDeleteAll',
+ 'GlobalVariablesFlush',
+ 'GlobalVariablesTotal',
+ 'HideTestIndicators',
+ 'Hour',
+ 'IndicatorBuffers',
+ 'IndicatorCounted',
+ 'IndicatorDigits',
+ 'IndicatorSetDouble',
+ 'IndicatorSetInteger',
+ 'IndicatorSetString',
+ 'IndicatorShortName',
+ 'IntegerToString',
+ 'IsConnected',
+ 'IsDemo',
+ 'IsDllsAllowed',
+ 'IsExpertEnabled',
+ 'IsLibrariesAllowed',
+ 'IsOptimization',
+ 'IsStopped',
+ 'IsTesting',
+ 'IsTradeAllowed',
+ 'IsTradeContextBusy',
+ 'IsVisualMode',
+ 'MQLInfoInteger',
+ 'MQLInfoString',
+ 'MarketInfo',
+ 'MathAbs',
+ 'MathArccos',
+ 'MathArcsin',
+ 'MathArctan',
+ 'MathCeil',
+ 'MathCos',
+ 'MathExp',
+ 'MathFloor',
+ 'MathIsValidNumber',
+ 'MathLog',
+ 'MathMax',
+ 'MathMin',
+ 'MathMod',
+ 'MathPow',
+ 'MathRand',
+ 'MathRound',
+ 'MathSin',
+ 'MathSqrt',
+ 'MathSrand',
+ 'MathTan',
+ 'MessageBox',
+ 'Minute',
+ 'Month',
+ 'NormalizeDouble',
+ 'ObjectCreate',
+ 'ObjectDelete',
+ 'ObjectDescription',
+ 'ObjectFind',
+ 'ObjectGetDouble',
+ 'ObjectGetFiboDescription',
+ 'ObjectGetInteger',
+ 'ObjectGetShiftByValue',
+ 'ObjectGetString',
+ 'ObjectGetTimeByValue',
+ 'ObjectGetValueByShift',
+ 'ObjectGetValueByTime',
+ 'ObjectGet',
+ 'ObjectMove',
+ 'ObjectName',
+ 'ObjectSetDouble',
+ 'ObjectSetFiboDescription',
+ 'ObjectSetInteger',
+ 'ObjectSetString',
+ 'ObjectSetText',
+ 'ObjectSet',
+ 'ObjectType',
+ 'ObjectsDeleteAll',
+ 'ObjectsTotal',
+ 'OrderCloseBy',
+ 'OrderClosePrice',
+ 'OrderCloseTime',
+ 'OrderClose',
+ 'OrderComment',
+ 'OrderCommission',
+ 'OrderDelete',
+ 'OrderExpiration',
+ 'OrderLots',
+ 'OrderMagicNumber',
+ 'OrderModify',
+ 'OrderOpenPrice',
+ 'OrderOpenTime',
+ 'OrderPrint',
+ 'OrderProfit',
+ 'OrderSelect',
+ 'OrderSend',
+ 'OrderStopLoss',
+ 'OrderSwap',
+ 'OrderSymbol',
+ 'OrderTakeProfit',
+ 'OrderTicket',
+ 'OrderType',
+ 'OrdersHistoryTotal',
+ 'OrdersTotal',
+ 'PeriodSeconds',
+ 'Period',
+ 'PlaySound',
+ 'Point',
+ 'PrintFormat',
+ 'Print',
+ 'RefreshRates',
+ 'ResetLastError',
+ 'ResourceCreate',
+ 'ResourceFree',
+ 'ResourceReadImage',
+ 'ResourceSave',
+ 'Seconds',
+ 'SendFTP',
+ 'SendMail',
+ 'SendNotification',
+ 'SeriesInfoInteger',
+ 'SetIndexArrow',
+ 'SetIndexBuffer',
+ 'SetIndexDrawBegin',
+ 'SetIndexEmptyValue',
+ 'SetIndexLabel',
+ 'SetIndexShift',
+ 'SetIndexStyle',
+ 'SetLevelStyle',
+ 'SetLevelValue',
+ 'ShortArrayToString',
+ 'ShortToString',
+ 'Sleep',
+ 'StrToDouble',
+ 'StrToInteger',
+ 'StrToTime',
+ 'StringAdd',
+ 'StringBufferLen',
+ 'StringCompare',
+ 'StringConcatenate',
+ 'StringFill',
+ 'StringFind',
+ 'StringFormat',
+ 'StringGetCharacter',
+ 'StringGetChar',
+ 'StringInit',
+ 'StringLen',
+ 'StringReplace',
+ 'StringSetCharacter',
+ 'StringSetChar',
+ 'StringSplit',
+ 'StringSubstr',
+ 'StringToCharArray',
+ 'StringToColor',
+ 'StringToDouble',
+ 'StringToInteger',
+ 'StringToLower',
+ 'StringToShortArray',
+ 'StringToTime',
+ 'StringToUpper',
+ 'StringTrimLeft',
+ 'StringTrimRight',
+ 'StructToTime',
+ 'SymbolInfoDouble',
+ 'SymbolInfoInteger',
+ 'SymbolInfoSessionQuote',
+ 'SymbolInfoSessionTrade',
+ 'SymbolInfoString',
+ 'SymbolInfoTick',
+ 'SymbolIsSynchronized',
+ 'SymbolName',
+ 'SymbolSelect',
+ 'SymbolsTotal',
+ 'Symbol',
+ 'TerminalClose',
+ 'TerminalCompany',
+ 'TerminalName',
+ 'TerminalPath',
+ 'TesterStatistics',
+ 'TextGetSize',
+ 'TextOut',
+ 'TextSetFont',
+ 'TimeCurrent',
+ 'TimeDayOfWeek',
+ 'TimeDayOfYear',
+ 'TimeDaylightSavings',
+ 'TimeDay',
+ 'TimeGMTOffset',
+ 'TimeGMT',
+ 'TimeHour',
+ 'TimeLocal',
+ 'TimeMinute',
+ 'TimeMonth',
+ 'TimeSeconds',
+ 'TimeToString',
+ 'TimeToStruct',
+ 'TimeToStr',
+ 'TimeTradeServer',
+ 'TimeYear',
+ 'UninitializeReason',
+ 'WindowBarsPerChart',
+ 'WindowExpertName',
+ 'WindowFind',
+ 'WindowFirstVisibleBar',
+ 'WindowHandle',
+ 'WindowIsVisible',
+ 'WindowOnDropped',
+ 'WindowPriceMax',
+ 'WindowPriceMin',
+ 'WindowPriceOnDropped',
+ 'WindowRedraw',
+ 'WindowScreenShot',
+ 'WindowTimeOnDropped',
+ 'WindowXOnDropped',
+ 'WindowYOnDropped',
+ 'WindowsTotal',
+ 'Year',
+ 'ZeroMemory',
+ 'iAC',
+ 'iADX',
+ 'iAD',
+ 'iAO',
+ 'iATR',
+ 'iAlligator',
+ 'iBWMFI',
+ 'iBandsOnArray',
+ 'iBands',
+ 'iBarShift',
+ 'iBars',
+ 'iBearsPower',
+ 'iBullsPower',
+ 'iCCIOnArray',
+ 'iCCI',
+ 'iClose',
+ 'iCustom',
+ 'iDeMarker',
+ 'iEnvelopesOnArray',
+ 'iEnvelopes',
+ 'iForce',
+ 'iFractals',
+ 'iGator',
+ 'iHighest',
+ 'iHigh',
+ 'iIchimoku',
+ 'iLowest',
+ 'iLow',
+ 'iMACD',
+ 'iMAOnArray',
+ 'iMA',
+ 'iMFI',
+ 'iMomentumOnArray',
+ 'iMomentum',
+ 'iOBV',
+ 'iOpen',
+ 'iOsMA',
+ 'iRSIOnArray',
+ 'iRSI',
+ 'iRVI',
+ 'iSAR',
+ 'iStdDevOnArray',
+ 'iStdDev',
+ 'iStochastic',
+ 'iTime',
+ 'iVolume',
+ 'iWPR',
+)
+
+constants = (
+ 'ACCOUNT_BALANCE',
+ 'ACCOUNT_COMPANY',
+ 'ACCOUNT_CREDIT',
+ 'ACCOUNT_CURRENCY',
+ 'ACCOUNT_EQUITY',
+ 'ACCOUNT_FREEMARGIN',
+ 'ACCOUNT_LEVERAGE',
+ 'ACCOUNT_LIMIT_ORDERS',
+ 'ACCOUNT_LOGIN',
+ 'ACCOUNT_MARGIN',
+ 'ACCOUNT_MARGIN_LEVEL',
+ 'ACCOUNT_MARGIN_SO_CALL',
+ 'ACCOUNT_MARGIN_SO_MODE',
+ 'ACCOUNT_MARGIN_SO_SO',
+ 'ACCOUNT_NAME',
+ 'ACCOUNT_PROFIT',
+ 'ACCOUNT_SERVER',
+ 'ACCOUNT_STOPOUT_MODE_MONEY',
+ 'ACCOUNT_STOPOUT_MODE_PERCENT',
+ 'ACCOUNT_TRADE_ALLOWED',
+ 'ACCOUNT_TRADE_EXPERT',
+ 'ACCOUNT_TRADE_MODE',
+ 'ACCOUNT_TRADE_MODE_CONTEST',
+ 'ACCOUNT_TRADE_MODE_DEMO',
+ 'ACCOUNT_TRADE_MODE_REAL',
+ 'ALIGN_CENTER',
+ 'ALIGN_LEFT',
+ 'ALIGN_RIGHT',
+ 'ANCHOR_BOTTOM',
+ 'ANCHOR_CENTER',
+ 'ANCHOR_LEFT',
+ 'ANCHOR_LEFT_LOWER',
+ 'ANCHOR_LEFT_UPPER',
+ 'ANCHOR_LOWER',
+ 'ANCHOR_RIGHT',
+ 'ANCHOR_RIGHT_LOWER',
+ 'ANCHOR_RIGHT_UPPER',
+ 'ANCHOR_TOP',
+ 'ANCHOR_UPPER',
+ 'BORDER_FLAT',
+ 'BORDER_RAISED',
+ 'BORDER_SUNKEN',
+ 'CHARTEVENT_CHART_CHANGE',
+ 'CHARTEVENT_CLICK',
+ 'CHARTEVENT_CUSTOM',
+ 'CHARTEVENT_CUSTOM_LAST',
+ 'CHARTEVENT_KEYDOWN',
+ 'CHARTEVENT_MOUSE_MOVE',
+ 'CHARTEVENT_OBJECT_CHANGE',
+ 'CHARTEVENT_OBJECT_CLICK',
+ 'CHARTEVENT_OBJECT_CREATE',
+ 'CHARTEVENT_OBJECT_DELETE',
+ 'CHARTEVENT_OBJECT_DRAG',
+ 'CHARTEVENT_OBJECT_ENDEDIT',
+ 'CHARTS_MAX',
+ 'CHART_AUTOSCROLL',
+ 'CHART_BARS',
+ 'CHART_BEGIN',
+ 'CHART_BRING_TO_TOP',
+ 'CHART_CANDLES',
+ 'CHART_COLOR_ASK',
+ 'CHART_COLOR_BACKGROUND',
+ 'CHART_COLOR_BID',
+ 'CHART_COLOR_CANDLE_BEAR',
+ 'CHART_COLOR_CANDLE_BULL',
+ 'CHART_COLOR_CHART_DOWN',
+ 'CHART_COLOR_CHART_LINE',
+ 'CHART_COLOR_CHART_UP',
+ 'CHART_COLOR_FOREGROUND',
+ 'CHART_COLOR_GRID',
+ 'CHART_COLOR_LAST',
+ 'CHART_COLOR_STOP_LEVEL',
+ 'CHART_COLOR_VOLUME',
+ 'CHART_COMMENT',
+ 'CHART_CURRENT_POS',
+ 'CHART_DRAG_TRADE_LEVELS',
+ 'CHART_END',
+ 'CHART_EVENT_MOUSE_MOVE',
+ 'CHART_EVENT_OBJECT_CREATE',
+ 'CHART_EVENT_OBJECT_DELETE',
+ 'CHART_FIRST_VISIBLE_BAR',
+ 'CHART_FIXED_MAX',
+ 'CHART_FIXED_MIN',
+ 'CHART_FIXED_POSITION',
+ 'CHART_FOREGROUND',
+ 'CHART_HEIGHT_IN_PIXELS',
+ 'CHART_IS_OBJECT',
+ 'CHART_LINE',
+ 'CHART_MODE',
+ 'CHART_MOUSE_SCROLL',
+ 'CHART_POINTS_PER_BAR',
+ 'CHART_PRICE_MAX',
+ 'CHART_PRICE_MIN',
+ 'CHART_SCALEFIX',
+ 'CHART_SCALEFIX_11',
+ 'CHART_SCALE',
+ 'CHART_SCALE_PT_PER_BAR',
+ 'CHART_SHIFT',
+ 'CHART_SHIFT_SIZE',
+ 'CHART_SHOW_ASK_LINE',
+ 'CHART_SHOW_BID_LINE',
+ 'CHART_SHOW_DATE_SCALE',
+ 'CHART_SHOW_GRID',
+ 'CHART_SHOW_LAST_LINE',
+ 'CHART_SHOW_OBJECT_DESCR',
+ 'CHART_SHOW_OHLC',
+ 'CHART_SHOW_PERIOD_SEP',
+ 'CHART_SHOW_PRICE_SCALE',
+ 'CHART_SHOW_TRADE_LEVELS',
+ 'CHART_SHOW_VOLUMES',
+ 'CHART_VISIBLE_BARS',
+ 'CHART_VOLUME_HIDE',
+ 'CHART_VOLUME_REAL',
+ 'CHART_VOLUME_TICK',
+ 'CHART_WIDTH_IN_BARS',
+ 'CHART_WIDTH_IN_PIXELS',
+ 'CHART_WINDOWS_TOTAL',
+ 'CHART_WINDOW_HANDLE',
+ 'CHART_WINDOW_IS_VISIBLE',
+ 'CHART_WINDOW_YDISTANCE',
+ 'CHAR_MAX',
+ 'CHAR_MIN',
+ 'CLR_NONE',
+ 'CORNER_LEFT_LOWER',
+ 'CORNER_LEFT_UPPER',
+ 'CORNER_RIGHT_LOWER',
+ 'CORNER_RIGHT_UPPER',
+ 'CP_ACP',
+ 'CP_MACCP',
+ 'CP_OEMCP',
+ 'CP_SYMBOL',
+ 'CP_THREAD_ACP',
+ 'CP_UTF7',
+ 'CP_UTF8',
+ 'DBL_DIG',
+ 'DBL_EPSILON',
+ 'DBL_MANT_DIG',
+ 'DBL_MAX',
+ 'DBL_MAX_10_EXP',
+ 'DBL_MAX_EXP',
+ 'DBL_MIN',
+ 'DBL_MIN_10_EXP',
+ 'DBL_MIN_EXP',
+ 'DRAW_ARROW',
+ 'DRAW_FILLING',
+ 'DRAW_HISTOGRAM',
+ 'DRAW_LINE',
+ 'DRAW_NONE',
+ 'DRAW_SECTION',
+ 'DRAW_ZIGZAG',
+ 'EMPTY',
+ 'EMPTY_VALUE',
+ 'ERR_ACCOUNT_DISABLED',
+ 'ERR_BROKER_BUSY',
+ 'ERR_COMMON_ERROR',
+ 'ERR_INVALID_ACCOUNT',
+ 'ERR_INVALID_PRICE',
+ 'ERR_INVALID_STOPS',
+ 'ERR_INVALID_TRADE_PARAMETERS',
+ 'ERR_INVALID_TRADE_VOLUME',
+ 'ERR_LONG_POSITIONS_ONLY_ALLOWED',
+ 'ERR_MALFUNCTIONAL_TRADE',
+ 'ERR_MARKET_CLOSED',
+ 'ERR_NOT_ENOUGH_MONEY',
+ 'ERR_NOT_ENOUGH_RIGHTS',
+ 'ERR_NO_CONNECTION',
+ 'ERR_NO_ERROR',
+ 'ERR_NO_RESULT',
+ 'ERR_OFF_QUOTES',
+ 'ERR_OLD_VERSION',
+ 'ERR_ORDER_LOCKED',
+ 'ERR_PRICE_CHANGED',
+ 'ERR_REQUOTE',
+ 'ERR_SERVER_BUSY',
+ 'ERR_TOO_FREQUENT_REQUESTS',
+ 'ERR_TOO_MANY_REQUESTS',
+ 'ERR_TRADE_CONTEXT_BUSY',
+ 'ERR_TRADE_DISABLED',
+ 'ERR_TRADE_EXPIRATION_DENIED',
+ 'ERR_TRADE_HEDGE_PROHIBITED',
+ 'ERR_TRADE_MODIFY_DENIED',
+ 'ERR_TRADE_PROHIBITED_BY_FIFO',
+ 'ERR_TRADE_TIMEOUT',
+ 'ERR_TRADE_TOO_MANY_ORDERS',
+ 'FILE_ACCESS_DATE',
+ 'FILE_ANSI',
+ 'FILE_BIN',
+ 'FILE_COMMON',
+ 'FILE_CREATE_DATE',
+ 'FILE_CSV',
+ 'FILE_END',
+ 'FILE_EXISTS',
+ 'FILE_IS_ANSI',
+ 'FILE_IS_BINARY',
+ 'FILE_IS_COMMON',
+ 'FILE_IS_CSV',
+ 'FILE_IS_READABLE',
+ 'FILE_IS_TEXT',
+ 'FILE_IS_WRITABLE',
+ 'FILE_LINE_END',
+ 'FILE_MODIFY_DATE',
+ 'FILE_POSITION',
+ 'FILE_READ',
+ 'FILE_REWRITE',
+ 'FILE_SHARE_READ',
+ 'FILE_SHARE_WRITE',
+ 'FILE_SIZE',
+ 'FILE_TXT',
+ 'FILE_UNICODE',
+ 'FILE_WRITE',
+ 'FLT_DIG',
+ 'FLT_EPSILON',
+ 'FLT_MANT_DIG',
+ 'FLT_MAX',
+ 'FLT_MAX_10_EXP',
+ 'FLT_MAX_EXP',
+ 'FLT_MIN',
+ 'FLT_MIN_10_EXP',
+ 'FLT_MIN_EXP',
+ 'FRIDAY',
+ 'GANN_DOWN_TREND',
+ 'GANN_UP_TREND',
+ 'IDABORT',
+ 'IDCANCEL',
+ 'IDCONTINUE',
+ 'IDIGNORE',
+ 'IDNO',
+ 'IDOK',
+ 'IDRETRY',
+ 'IDTRYAGAIN',
+ 'IDYES',
+ 'INDICATOR_CALCULATIONS',
+ 'INDICATOR_COLOR_INDEX',
+ 'INDICATOR_DATA',
+ 'INDICATOR_DIGITS',
+ 'INDICATOR_HEIGHT',
+ 'INDICATOR_LEVELCOLOR',
+ 'INDICATOR_LEVELSTYLE',
+ 'INDICATOR_LEVELS',
+ 'INDICATOR_LEVELTEXT',
+ 'INDICATOR_LEVELVALUE',
+ 'INDICATOR_LEVELWIDTH',
+ 'INDICATOR_MAXIMUM',
+ 'INDICATOR_MINIMUM',
+ 'INDICATOR_SHORTNAME',
+ 'INT_MAX',
+ 'INT_MIN',
+ 'INVALID_HANDLE',
+ 'IS_DEBUG_MODE',
+ 'IS_PROFILE_MODE',
+ 'LICENSE_DEMO',
+ 'LICENSE_FREE',
+ 'LICENSE_FULL',
+ 'LICENSE_TIME',
+ 'LONG_MAX',
+ 'LONG_MIN',
+ 'MB_ABORTRETRYIGNORE',
+ 'MB_CANCELTRYCONTINUE',
+ 'MB_DEFBUTTON1',
+ 'MB_DEFBUTTON2',
+ 'MB_DEFBUTTON3',
+ 'MB_DEFBUTTON4',
+ 'MB_ICONASTERISK',
+ 'MB_ICONERROR',
+ 'MB_ICONEXCLAMATION',
+ 'MB_ICONHAND',
+ 'MB_ICONINFORMATION',
+ 'MB_ICONQUESTION',
+ 'MB_ICONSTOP',
+ 'MB_ICONWARNING',
+ 'MB_OKCANCEL',
+ 'MB_OK',
+ 'MB_RETRYCANCEL',
+ 'MB_YESNOCANCEL',
+ 'MB_YESNO',
+ 'MODE_ASK',
+ 'MODE_BID',
+ 'MODE_CHINKOUSPAN',
+ 'MODE_CLOSE',
+ 'MODE_DIGITS',
+ 'MODE_EMA',
+ 'MODE_EXPIRATION',
+ 'MODE_FREEZELEVEL',
+ 'MODE_GATORJAW',
+ 'MODE_GATORLIPS',
+ 'MODE_GATORTEETH',
+ 'MODE_HIGH',
+ 'MODE_KIJUNSEN',
+ 'MODE_LOTSIZE',
+ 'MODE_LOTSTEP',
+ 'MODE_LOWER',
+ 'MODE_LOW',
+ 'MODE_LWMA',
+ 'MODE_MAIN',
+ 'MODE_MARGINCALCMODE',
+ 'MODE_MARGINHEDGED',
+ 'MODE_MARGININIT',
+ 'MODE_MARGINMAINTENANCE',
+ 'MODE_MARGINREQUIRED',
+ 'MODE_MAXLOT',
+ 'MODE_MINLOT',
+ 'MODE_MINUSDI',
+ 'MODE_OPEN',
+ 'MODE_PLUSDI',
+ 'MODE_POINT',
+ 'MODE_PROFITCALCMODE',
+ 'MODE_SENKOUSPANA',
+ 'MODE_SENKOUSPANB',
+ 'MODE_SIGNAL',
+ 'MODE_SMA',
+ 'MODE_SMMA',
+ 'MODE_SPREAD',
+ 'MODE_STARTING',
+ 'MODE_STOPLEVEL',
+ 'MODE_SWAPLONG',
+ 'MODE_SWAPSHORT',
+ 'MODE_SWAPTYPE',
+ 'MODE_TENKANSEN',
+ 'MODE_TICKSIZE',
+ 'MODE_TICKVALUE',
+ 'MODE_TIME',
+ 'MODE_TRADEALLOWED',
+ 'MODE_UPPER',
+ 'MODE_VOLUME',
+ 'MONDAY',
+ 'MQL_DEBUG',
+ 'MQL_DLLS_ALLOWED',
+ 'MQL_FRAME_MODE',
+ 'MQL_LICENSE_TYPE',
+ 'MQL_OPTIMIZATION',
+ 'MQL_PROFILER',
+ 'MQL_PROGRAM_NAME',
+ 'MQL_PROGRAM_PATH',
+ 'MQL_PROGRAM_TYPE',
+ 'MQL_TESTER',
+ 'MQL_TRADE_ALLOWED',
+ 'MQL_VISUAL_MODE',
+ 'M_1_PI',
+ 'M_2_PI',
+ 'M_2_SQRTPI',
+ 'M_E',
+ 'M_LN2',
+ 'M_LN10',
+ 'M_LOG2E',
+ 'M_LOG10E',
+ 'M_PI',
+ 'M_PI_2',
+ 'M_PI_4',
+ 'M_SQRT1_2',
+ 'M_SQRT2',
+ 'NULL',
+ 'OBJPROP_ALIGN',
+ 'OBJPROP_ANCHOR',
+ 'OBJPROP_ANGLE',
+ 'OBJPROP_ARROWCODE',
+ 'OBJPROP_BACK',
+ 'OBJPROP_BGCOLOR',
+ 'OBJPROP_BMPFILE',
+ 'OBJPROP_BORDER_COLOR',
+ 'OBJPROP_BORDER_TYPE',
+ 'OBJPROP_CHART_ID',
+ 'OBJPROP_CHART_SCALE',
+ 'OBJPROP_COLOR',
+ 'OBJPROP_CORNER',
+ 'OBJPROP_CREATETIME',
+ 'OBJPROP_DATE_SCALE',
+ 'OBJPROP_DEVIATION',
+ 'OBJPROP_DRAWLINES',
+ 'OBJPROP_ELLIPSE',
+ 'OBJPROP_FIBOLEVELS',
+ 'OBJPROP_FILL',
+ 'OBJPROP_FIRSTLEVEL',
+ 'OBJPROP_FONTSIZE',
+ 'OBJPROP_FONT',
+ 'OBJPROP_HIDDEN',
+ 'OBJPROP_LEVELCOLOR',
+ 'OBJPROP_LEVELSTYLE',
+ 'OBJPROP_LEVELS',
+ 'OBJPROP_LEVELTEXT',
+ 'OBJPROP_LEVELVALUE',
+ 'OBJPROP_LEVELWIDTH',
+ 'OBJPROP_NAME',
+ 'OBJPROP_PERIOD',
+ 'OBJPROP_PRICE1',
+ 'OBJPROP_PRICE2',
+ 'OBJPROP_PRICE3',
+ 'OBJPROP_PRICE',
+ 'OBJPROP_PRICE_SCALE',
+ 'OBJPROP_RAY',
+ 'OBJPROP_RAY_RIGHT',
+ 'OBJPROP_READONLY',
+ 'OBJPROP_SCALE',
+ 'OBJPROP_SELECTABLE',
+ 'OBJPROP_SELECTED',
+ 'OBJPROP_STATE',
+ 'OBJPROP_STYLE',
+ 'OBJPROP_SYMBOL',
+ 'OBJPROP_TEXT',
+ 'OBJPROP_TIME1',
+ 'OBJPROP_TIME2',
+ 'OBJPROP_TIME3',
+ 'OBJPROP_TIMEFRAMES',
+ 'OBJPROP_TIME',
+ 'OBJPROP_TOOLTIP',
+ 'OBJPROP_TYPE',
+ 'OBJPROP_WIDTH',
+ 'OBJPROP_XDISTANCE',
+ 'OBJPROP_XOFFSET',
+ 'OBJPROP_XSIZE',
+ 'OBJPROP_YDISTANCE',
+ 'OBJPROP_YOFFSET',
+ 'OBJPROP_YSIZE',
+ 'OBJPROP_ZORDER',
+ 'OBJ_ALL_PERIODS',
+ 'OBJ_ARROW',
+ 'OBJ_ARROW_BUY',
+ 'OBJ_ARROW_CHECK',
+ 'OBJ_ARROW_DOWN',
+ 'OBJ_ARROW_LEFT_PRICE',
+ 'OBJ_ARROW_RIGHT_PRICE',
+ 'OBJ_ARROW_SELL',
+ 'OBJ_ARROW_STOP',
+ 'OBJ_ARROW_THUMB_DOWN',
+ 'OBJ_ARROW_THUMB_UP',
+ 'OBJ_ARROW_UP',
+ 'OBJ_BITMAP',
+ 'OBJ_BITMAP_LABEL',
+ 'OBJ_BUTTON',
+ 'OBJ_CHANNEL',
+ 'OBJ_CYCLES',
+ 'OBJ_EDIT',
+ 'OBJ_ELLIPSE',
+ 'OBJ_EVENT',
+ 'OBJ_EXPANSION',
+ 'OBJ_FIBOARC',
+ 'OBJ_FIBOCHANNEL',
+ 'OBJ_FIBOFAN',
+ 'OBJ_FIBOTIMES',
+ 'OBJ_FIBO',
+ 'OBJ_GANNFAN',
+ 'OBJ_GANNGRID',
+ 'OBJ_GANNLINE',
+ 'OBJ_HLINE',
+ 'OBJ_LABEL',
+ 'OBJ_NO_PERIODS',
+ 'OBJ_PERIOD_D1',
+ 'OBJ_PERIOD_H1',
+ 'OBJ_PERIOD_H4',
+ 'OBJ_PERIOD_M1',
+ 'OBJ_PERIOD_M5',
+ 'OBJ_PERIOD_M15',
+ 'OBJ_PERIOD_M30',
+ 'OBJ_PERIOD_MN1',
+ 'OBJ_PERIOD_W1',
+ 'OBJ_PITCHFORK',
+ 'OBJ_RECTANGLE',
+ 'OBJ_RECTANGLE_LABEL',
+ 'OBJ_REGRESSION',
+ 'OBJ_STDDEVCHANNEL',
+ 'OBJ_TEXT',
+ 'OBJ_TRENDBYANGLE',
+ 'OBJ_TREND',
+ 'OBJ_TRIANGLE',
+ 'OBJ_VLINE',
+ 'OP_BUYLIMIT',
+ 'OP_BUYSTOP',
+ 'OP_BUY',
+ 'OP_SELLLIMIT',
+ 'OP_SELLSTOP',
+ 'OP_SELL',
+ 'PERIOD_CURRENT',
+ 'PERIOD_D1',
+ 'PERIOD_H1',
+ 'PERIOD_H2',
+ 'PERIOD_H3',
+ 'PERIOD_H4',
+ 'PERIOD_H6',
+ 'PERIOD_H8',
+ 'PERIOD_H12',
+ 'PERIOD_M1',
+ 'PERIOD_M2',
+ 'PERIOD_M3',
+ 'PERIOD_M4',
+ 'PERIOD_M5',
+ 'PERIOD_M6',
+ 'PERIOD_M10',
+ 'PERIOD_M12',
+ 'PERIOD_M15',
+ 'PERIOD_M20',
+ 'PERIOD_M30',
+ 'PERIOD_MN1',
+ 'PERIOD_W1',
+ 'POINTER_AUTOMATIC',
+ 'POINTER_DYNAMIC',
'POINTER_INVALID',
- 'PRICE_CLOSE',
- 'PRICE_HIGH',
- 'PRICE_LOW',
- 'PRICE_MEDIAN',
- 'PRICE_OPEN',
- 'PRICE_TYPICAL',
- 'PRICE_WEIGHTED',
- 'PROGRAM_EXPERT',
- 'PROGRAM_INDICATOR',
- 'PROGRAM_SCRIPT',
- 'REASON_ACCOUNT',
- 'REASON_CHARTCHANGE',
- 'REASON_CHARTCLOSE',
- 'REASON_CLOSE',
- 'REASON_INITFAILED',
- 'REASON_PARAMETERS',
- 'REASON_PROGRAM'
- 'REASON_RECOMPILE',
- 'REASON_REMOVE',
- 'REASON_TEMPLATE',
- 'SATURDAY',
- 'SEEK_CUR',
- 'SEEK_END',
- 'SEEK_SET',
- 'SERIES_BARS_COUNT',
- 'SERIES_FIRSTDATE',
- 'SERIES_LASTBAR_DATE',
- 'SERIES_SERVER_FIRSTDATE',
- 'SERIES_SYNCHRONIZED',
- 'SERIES_TERMINAL_FIRSTDATE',
- 'SHORT_MAX',
- 'SHORT_MIN',
- 'STAT_BALANCEDD_PERCENT',
- 'STAT_BALANCEMIN',
- 'STAT_BALANCE_DDREL_PERCENT',
- 'STAT_BALANCE_DD',
- 'STAT_BALANCE_DD_RELATIVE',
- 'STAT_CONLOSSMAX',
- 'STAT_CONLOSSMAX_TRADES',
- 'STAT_CONPROFITMAX',
- 'STAT_CONPROFITMAX_TRADES',
- 'STAT_CUSTOM_ONTESTER',
- 'STAT_DEALS',
- 'STAT_EQUITYDD_PERCENT',
- 'STAT_EQUITYMIN',
- 'STAT_EQUITY_DDREL_PERCENT',
- 'STAT_EQUITY_DD',
- 'STAT_EQUITY_DD_RELATIVE',
- 'STAT_EXPECTED_PAYOFF',
- 'STAT_GROSS_LOSS',
- 'STAT_GROSS_PROFIT',
- 'STAT_INITIAL_DEPOSIT',
- 'STAT_LONG_TRADES',
- 'STAT_LOSSTRADES_AVGCON',
- 'STAT_LOSS_TRADES',
- 'STAT_MAX_CONLOSSES',
- 'STAT_MAX_CONLOSS_TRADES',
- 'STAT_MAX_CONPROFIT_TRADES',
- 'STAT_MAX_CONWINS',
- 'STAT_MAX_LOSSTRADE',
- 'STAT_MAX_PROFITTRADE',
- 'STAT_MIN_MARGINLEVEL',
- 'STAT_PROFITTRADES_AVGCON',
- 'STAT_PROFIT',
- 'STAT_PROFIT_FACTOR',
- 'STAT_PROFIT_LONGTRADES',
- 'STAT_PROFIT_SHORTTRADES',
- 'STAT_PROFIT_TRADES',
- 'STAT_RECOVERY_FACTOR',
- 'STAT_SHARPE_RATIO',
- 'STAT_SHORT_TRADES',
- 'STAT_TRADES',
- 'STAT_WITHDRAWAL',
- 'STO_CLOSECLOSE',
- 'STO_LOWHIGH',
- 'STYLE_DASHDOTDOT',
- 'STYLE_DASHDOT',
- 'STYLE_DASH',
- 'STYLE_DOT',
- 'STYLE_SOLID',
- 'SUNDAY',
- 'SYMBOL_ARROWDOWN',
- 'SYMBOL_ARROWUP',
- 'SYMBOL_CHECKSIGN',
- 'SYMBOL_LEFTPRICE',
- 'SYMBOL_RIGHTPRICE',
- 'SYMBOL_STOPSIGN',
- 'SYMBOL_THUMBSDOWN',
- 'SYMBOL_THUMBSUP',
- 'TERMINAL_BUILD',
- 'TERMINAL_CODEPAGE',
- 'TERMINAL_COMMONDATA_PATH',
- 'TERMINAL_COMPANY',
- 'TERMINAL_CONNECTED',
- 'TERMINAL_CPU_CORES',
- 'TERMINAL_DATA_PATH',
- 'TERMINAL_DISK_SPACE',
- 'TERMINAL_DLLS_ALLOWED',
- 'TERMINAL_EMAIL_ENABLED',
- 'TERMINAL_FTP_ENABLED',
- 'TERMINAL_LANGUAGE',
- 'TERMINAL_MAXBARS',
- 'TERMINAL_MEMORY_AVAILABLE',
- 'TERMINAL_MEMORY_PHYSICAL',
- 'TERMINAL_MEMORY_TOTAL',
- 'TERMINAL_MEMORY_USED',
- 'TERMINAL_NAME',
- 'TERMINAL_OPENCL_SUPPORT',
- 'TERMINAL_PATH',
- 'TERMINAL_TRADE_ALLOWED',
- 'TERMINAL_X64',
- 'THURSDAY',
- 'TRADE_ACTION_DEAL',
- 'TRADE_ACTION_MODIFY',
- 'TRADE_ACTION_PENDING',
- 'TRADE_ACTION_REMOVE',
- 'TRADE_ACTION_SLTP',
- 'TUESDAY',
- 'UCHAR_MAX',
- 'UINT_MAX',
- 'ULONG_MAX',
- 'USHORT_MAX',
- 'VOLUME_REAL',
- 'VOLUME_TICK',
- 'WEDNESDAY',
- 'WHOLE_ARRAY',
- 'WRONG_VALUE',
- 'clrNONE',
- '__DATETIME__',
- '__DATE__',
- '__FILE__',
- '__FUNCSIG__',
- '__FUNCTION__',
- '__LINE__',
- '__MQL4BUILD__',
- '__MQLBUILD__',
- '__PATH__',
-)
-
-colors = (
- 'AliceBlue',
- 'AntiqueWhite',
- 'Aquamarine',
- 'Aqua',
- 'Beige',
- 'Bisque',
- 'Black',
- 'BlanchedAlmond',
- 'BlueViolet',
- 'Blue',
- 'Brown',
- 'BurlyWood',
- 'CadetBlue',
- 'Chartreuse',
- 'Chocolate',
- 'Coral',
- 'CornflowerBlue',
- 'Cornsilk',
- 'Crimson',
- 'DarkBlue',
- 'DarkGoldenrod',
- 'DarkGray',
- 'DarkGreen',
- 'DarkKhaki',
- 'DarkOliveGreen',
- 'DarkOrange',
- 'DarkOrchid',
- 'DarkSalmon',
- 'DarkSeaGreen',
- 'DarkSlateBlue',
- 'DarkSlateGray',
- 'DarkTurquoise',
- 'DarkViolet',
- 'DeepPink',
- 'DeepSkyBlue',
- 'DimGray',
- 'DodgerBlue',
- 'FireBrick',
- 'ForestGreen',
- 'Gainsboro',
- 'Goldenrod',
- 'Gold',
- 'Gray',
- 'GreenYellow',
- 'Green',
- 'Honeydew',
- 'HotPink',
- 'IndianRed',
- 'Indigo',
- 'Ivory',
- 'Khaki',
- 'LavenderBlush',
- 'Lavender',
- 'LawnGreen',
- 'LemonChiffon',
- 'LightBlue',
- 'LightCoral',
- 'LightCyan',
- 'LightGoldenrod',
- 'LightGray',
- 'LightGreen',
- 'LightPink',
- 'LightSalmon',
- 'LightSeaGreen',
- 'LightSkyBlue',
- 'LightSlateGray',
- 'LightSteelBlue',
- 'LightYellow',
- 'LimeGreen',
- 'Lime',
- 'Linen',
- 'Magenta',
- 'Maroon',
- 'MediumAquamarine',
- 'MediumBlue',
- 'MediumOrchid',
- 'MediumPurple',
- 'MediumSeaGreen',
- 'MediumSlateBlue',
- 'MediumSpringGreen',
- 'MediumTurquoise',
- 'MediumVioletRed',
- 'MidnightBlue',
- 'MintCream',
- 'MistyRose',
- 'Moccasin',
- 'NavajoWhite',
- 'Navy',
- 'OldLace',
- 'OliveDrab',
- 'Olive',
- 'OrangeRed',
- 'Orange',
- 'Orchid',
- 'PaleGoldenrod',
- 'PaleGreen',
- 'PaleTurquoise',
- 'PaleVioletRed',
- 'PapayaWhip',
- 'PeachPuff',
- 'Peru',
- 'Pink',
- 'Plum',
- 'PowderBlue',
- 'Purple',
- 'Red',
- 'RosyBrown',
- 'RoyalBlue',
- 'SaddleBrown',
- 'Salmon',
- 'SandyBrown',
- 'SeaGreen',
- 'Seashell',
- 'Sienna',
- 'Silver',
- 'SkyBlue',
- 'SlateBlue',
- 'SlateGray',
- 'Snow',
- 'SpringGreen',
- 'SteelBlue',
- 'Tan',
- 'Teal',
- 'Thistle',
- 'Tomato',
- 'Turquoise',
- 'Violet',
- 'Wheat',
- 'WhiteSmoke',
- 'White',
- 'YellowGreen',
- 'Yellow',
-)
-
-keywords = (
- 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
- '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
- 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
- 'Volume',
-)
-c_types = (
- 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
- 'color', 'long', 'ulong', 'datetime', 'float', 'double',
- 'string',
-)
+ 'PRICE_CLOSE',
+ 'PRICE_HIGH',
+ 'PRICE_LOW',
+ 'PRICE_MEDIAN',
+ 'PRICE_OPEN',
+ 'PRICE_TYPICAL',
+ 'PRICE_WEIGHTED',
+ 'PROGRAM_EXPERT',
+ 'PROGRAM_INDICATOR',
+ 'PROGRAM_SCRIPT',
+ 'REASON_ACCOUNT',
+ 'REASON_CHARTCHANGE',
+ 'REASON_CHARTCLOSE',
+ 'REASON_CLOSE',
+ 'REASON_INITFAILED',
+ 'REASON_PARAMETERS',
+ 'REASON_PROGRAM'
+ 'REASON_RECOMPILE',
+ 'REASON_REMOVE',
+ 'REASON_TEMPLATE',
+ 'SATURDAY',
+ 'SEEK_CUR',
+ 'SEEK_END',
+ 'SEEK_SET',
+ 'SERIES_BARS_COUNT',
+ 'SERIES_FIRSTDATE',
+ 'SERIES_LASTBAR_DATE',
+ 'SERIES_SERVER_FIRSTDATE',
+ 'SERIES_SYNCHRONIZED',
+ 'SERIES_TERMINAL_FIRSTDATE',
+ 'SHORT_MAX',
+ 'SHORT_MIN',
+ 'STAT_BALANCEDD_PERCENT',
+ 'STAT_BALANCEMIN',
+ 'STAT_BALANCE_DDREL_PERCENT',
+ 'STAT_BALANCE_DD',
+ 'STAT_BALANCE_DD_RELATIVE',
+ 'STAT_CONLOSSMAX',
+ 'STAT_CONLOSSMAX_TRADES',
+ 'STAT_CONPROFITMAX',
+ 'STAT_CONPROFITMAX_TRADES',
+ 'STAT_CUSTOM_ONTESTER',
+ 'STAT_DEALS',
+ 'STAT_EQUITYDD_PERCENT',
+ 'STAT_EQUITYMIN',
+ 'STAT_EQUITY_DDREL_PERCENT',
+ 'STAT_EQUITY_DD',
+ 'STAT_EQUITY_DD_RELATIVE',
+ 'STAT_EXPECTED_PAYOFF',
+ 'STAT_GROSS_LOSS',
+ 'STAT_GROSS_PROFIT',
+ 'STAT_INITIAL_DEPOSIT',
+ 'STAT_LONG_TRADES',
+ 'STAT_LOSSTRADES_AVGCON',
+ 'STAT_LOSS_TRADES',
+ 'STAT_MAX_CONLOSSES',
+ 'STAT_MAX_CONLOSS_TRADES',
+ 'STAT_MAX_CONPROFIT_TRADES',
+ 'STAT_MAX_CONWINS',
+ 'STAT_MAX_LOSSTRADE',
+ 'STAT_MAX_PROFITTRADE',
+ 'STAT_MIN_MARGINLEVEL',
+ 'STAT_PROFITTRADES_AVGCON',
+ 'STAT_PROFIT',
+ 'STAT_PROFIT_FACTOR',
+ 'STAT_PROFIT_LONGTRADES',
+ 'STAT_PROFIT_SHORTTRADES',
+ 'STAT_PROFIT_TRADES',
+ 'STAT_RECOVERY_FACTOR',
+ 'STAT_SHARPE_RATIO',
+ 'STAT_SHORT_TRADES',
+ 'STAT_TRADES',
+ 'STAT_WITHDRAWAL',
+ 'STO_CLOSECLOSE',
+ 'STO_LOWHIGH',
+ 'STYLE_DASHDOTDOT',
+ 'STYLE_DASHDOT',
+ 'STYLE_DASH',
+ 'STYLE_DOT',
+ 'STYLE_SOLID',
+ 'SUNDAY',
+ 'SYMBOL_ARROWDOWN',
+ 'SYMBOL_ARROWUP',
+ 'SYMBOL_CHECKSIGN',
+ 'SYMBOL_LEFTPRICE',
+ 'SYMBOL_RIGHTPRICE',
+ 'SYMBOL_STOPSIGN',
+ 'SYMBOL_THUMBSDOWN',
+ 'SYMBOL_THUMBSUP',
+ 'TERMINAL_BUILD',
+ 'TERMINAL_CODEPAGE',
+ 'TERMINAL_COMMONDATA_PATH',
+ 'TERMINAL_COMPANY',
+ 'TERMINAL_CONNECTED',
+ 'TERMINAL_CPU_CORES',
+ 'TERMINAL_DATA_PATH',
+ 'TERMINAL_DISK_SPACE',
+ 'TERMINAL_DLLS_ALLOWED',
+ 'TERMINAL_EMAIL_ENABLED',
+ 'TERMINAL_FTP_ENABLED',
+ 'TERMINAL_LANGUAGE',
+ 'TERMINAL_MAXBARS',
+ 'TERMINAL_MEMORY_AVAILABLE',
+ 'TERMINAL_MEMORY_PHYSICAL',
+ 'TERMINAL_MEMORY_TOTAL',
+ 'TERMINAL_MEMORY_USED',
+ 'TERMINAL_NAME',
+ 'TERMINAL_OPENCL_SUPPORT',
+ 'TERMINAL_PATH',
+ 'TERMINAL_TRADE_ALLOWED',
+ 'TERMINAL_X64',
+ 'THURSDAY',
+ 'TRADE_ACTION_DEAL',
+ 'TRADE_ACTION_MODIFY',
+ 'TRADE_ACTION_PENDING',
+ 'TRADE_ACTION_REMOVE',
+ 'TRADE_ACTION_SLTP',
+ 'TUESDAY',
+ 'UCHAR_MAX',
+ 'UINT_MAX',
+ 'ULONG_MAX',
+ 'USHORT_MAX',
+ 'VOLUME_REAL',
+ 'VOLUME_TICK',
+ 'WEDNESDAY',
+ 'WHOLE_ARRAY',
+ 'WRONG_VALUE',
+ 'clrNONE',
+ '__DATETIME__',
+ '__DATE__',
+ '__FILE__',
+ '__FUNCSIG__',
+ '__FUNCTION__',
+ '__LINE__',
+ '__MQL4BUILD__',
+ '__MQLBUILD__',
+ '__PATH__',
+)
+
+colors = (
+ 'AliceBlue',
+ 'AntiqueWhite',
+ 'Aquamarine',
+ 'Aqua',
+ 'Beige',
+ 'Bisque',
+ 'Black',
+ 'BlanchedAlmond',
+ 'BlueViolet',
+ 'Blue',
+ 'Brown',
+ 'BurlyWood',
+ 'CadetBlue',
+ 'Chartreuse',
+ 'Chocolate',
+ 'Coral',
+ 'CornflowerBlue',
+ 'Cornsilk',
+ 'Crimson',
+ 'DarkBlue',
+ 'DarkGoldenrod',
+ 'DarkGray',
+ 'DarkGreen',
+ 'DarkKhaki',
+ 'DarkOliveGreen',
+ 'DarkOrange',
+ 'DarkOrchid',
+ 'DarkSalmon',
+ 'DarkSeaGreen',
+ 'DarkSlateBlue',
+ 'DarkSlateGray',
+ 'DarkTurquoise',
+ 'DarkViolet',
+ 'DeepPink',
+ 'DeepSkyBlue',
+ 'DimGray',
+ 'DodgerBlue',
+ 'FireBrick',
+ 'ForestGreen',
+ 'Gainsboro',
+ 'Goldenrod',
+ 'Gold',
+ 'Gray',
+ 'GreenYellow',
+ 'Green',
+ 'Honeydew',
+ 'HotPink',
+ 'IndianRed',
+ 'Indigo',
+ 'Ivory',
+ 'Khaki',
+ 'LavenderBlush',
+ 'Lavender',
+ 'LawnGreen',
+ 'LemonChiffon',
+ 'LightBlue',
+ 'LightCoral',
+ 'LightCyan',
+ 'LightGoldenrod',
+ 'LightGray',
+ 'LightGreen',
+ 'LightPink',
+ 'LightSalmon',
+ 'LightSeaGreen',
+ 'LightSkyBlue',
+ 'LightSlateGray',
+ 'LightSteelBlue',
+ 'LightYellow',
+ 'LimeGreen',
+ 'Lime',
+ 'Linen',
+ 'Magenta',
+ 'Maroon',
+ 'MediumAquamarine',
+ 'MediumBlue',
+ 'MediumOrchid',
+ 'MediumPurple',
+ 'MediumSeaGreen',
+ 'MediumSlateBlue',
+ 'MediumSpringGreen',
+ 'MediumTurquoise',
+ 'MediumVioletRed',
+ 'MidnightBlue',
+ 'MintCream',
+ 'MistyRose',
+ 'Moccasin',
+ 'NavajoWhite',
+ 'Navy',
+ 'OldLace',
+ 'OliveDrab',
+ 'Olive',
+ 'OrangeRed',
+ 'Orange',
+ 'Orchid',
+ 'PaleGoldenrod',
+ 'PaleGreen',
+ 'PaleTurquoise',
+ 'PaleVioletRed',
+ 'PapayaWhip',
+ 'PeachPuff',
+ 'Peru',
+ 'Pink',
+ 'Plum',
+ 'PowderBlue',
+ 'Purple',
+ 'Red',
+ 'RosyBrown',
+ 'RoyalBlue',
+ 'SaddleBrown',
+ 'Salmon',
+ 'SandyBrown',
+ 'SeaGreen',
+ 'Seashell',
+ 'Sienna',
+ 'Silver',
+ 'SkyBlue',
+ 'SlateBlue',
+ 'SlateGray',
+ 'Snow',
+ 'SpringGreen',
+ 'SteelBlue',
+ 'Tan',
+ 'Teal',
+ 'Thistle',
+ 'Tomato',
+ 'Turquoise',
+ 'Violet',
+ 'Wheat',
+ 'WhiteSmoke',
+ 'White',
+ 'YellowGreen',
+ 'Yellow',
+)
+
+keywords = (
+ 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
+ '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
+ 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
+ 'Volume',
+)
+c_types = (
+ 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
+ 'color', 'long', 'ulong', 'datetime', 'float', 'double',
+ 'string',
+)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
index 43611b29d0..0743c391d5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
@@ -1,1226 +1,1226 @@
-"""
- pygments.lexers._openedge_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the OpenEdgeLexer.
-
+"""
+ pygments.lexers._openedge_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the OpenEdgeLexer.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = (
- 'ABS',
- 'ABSO',
- 'ABSOL',
- 'ABSOLU',
- 'ABSOLUT',
+ :license: BSD, see LICENSE for details.
+"""
+
+OPENEDGEKEYWORDS = (
+ 'ABS',
+ 'ABSO',
+ 'ABSOL',
+ 'ABSOLU',
+ 'ABSOLUT',
'ABSOLUTE',
'ABSTRACT',
- 'ACCELERATOR',
- 'ACCUM',
- 'ACCUMU',
- 'ACCUMUL',
- 'ACCUMULA',
- 'ACCUMULAT',
+ 'ACCELERATOR',
+ 'ACCUM',
+ 'ACCUMU',
+ 'ACCUMUL',
+ 'ACCUMULA',
+ 'ACCUMULAT',
'ACCUMULATE',
- 'ACTIVE-FORM',
- 'ACTIVE-WINDOW',
- 'ADD',
- 'ADD-BUFFER',
- 'ADD-CALC-COLUMN',
- 'ADD-COLUMNS-FROM',
- 'ADD-EVENTS-PROCEDURE',
- 'ADD-FIELDS-FROM',
- 'ADD-FIRST',
- 'ADD-INDEX-FIELD',
- 'ADD-LAST',
- 'ADD-LIKE-COLUMN',
- 'ADD-LIKE-FIELD',
- 'ADD-LIKE-INDEX',
- 'ADD-NEW-FIELD',
- 'ADD-NEW-INDEX',
- 'ADD-SCHEMA-LOCATION',
- 'ADD-SUPER-PROCEDURE',
- 'ADM-DATA',
- 'ADVISE',
- 'ALERT-BOX',
- 'ALIAS',
- 'ALL',
- 'ALLOW-COLUMN-SEARCHING',
- 'ALLOW-REPLICATION',
- 'ALTER',
- 'ALWAYS-ON-TOP',
- 'AMBIG',
- 'AMBIGU',
- 'AMBIGUO',
- 'AMBIGUOU',
+ 'ACTIVE-FORM',
+ 'ACTIVE-WINDOW',
+ 'ADD',
+ 'ADD-BUFFER',
+ 'ADD-CALC-COLUMN',
+ 'ADD-COLUMNS-FROM',
+ 'ADD-EVENTS-PROCEDURE',
+ 'ADD-FIELDS-FROM',
+ 'ADD-FIRST',
+ 'ADD-INDEX-FIELD',
+ 'ADD-LAST',
+ 'ADD-LIKE-COLUMN',
+ 'ADD-LIKE-FIELD',
+ 'ADD-LIKE-INDEX',
+ 'ADD-NEW-FIELD',
+ 'ADD-NEW-INDEX',
+ 'ADD-SCHEMA-LOCATION',
+ 'ADD-SUPER-PROCEDURE',
+ 'ADM-DATA',
+ 'ADVISE',
+ 'ALERT-BOX',
+ 'ALIAS',
+ 'ALL',
+ 'ALLOW-COLUMN-SEARCHING',
+ 'ALLOW-REPLICATION',
+ 'ALTER',
+ 'ALWAYS-ON-TOP',
+ 'AMBIG',
+ 'AMBIGU',
+ 'AMBIGUO',
+ 'AMBIGUOU',
'AMBIGUOUS',
'ANALYZ',
- 'ANALYZE',
- 'AND',
- 'ANSI-ONLY',
- 'ANY',
- 'ANYWHERE',
- 'APPEND',
- 'APPL-ALERT',
- 'APPL-ALERT-',
- 'APPL-ALERT-B',
- 'APPL-ALERT-BO',
- 'APPL-ALERT-BOX',
- 'APPL-ALERT-BOXE',
+ 'ANALYZE',
+ 'AND',
+ 'ANSI-ONLY',
+ 'ANY',
+ 'ANYWHERE',
+ 'APPEND',
+ 'APPL-ALERT',
+ 'APPL-ALERT-',
+ 'APPL-ALERT-B',
+ 'APPL-ALERT-BO',
+ 'APPL-ALERT-BOX',
+ 'APPL-ALERT-BOXE',
'APPL-ALERT-BOXES',
- 'APPL-CONTEXT-ID',
- 'APPLICATION',
- 'APPLY',
- 'APPSERVER-INFO',
- 'APPSERVER-PASSWORD',
- 'APPSERVER-USERID',
- 'ARRAY-MESSAGE',
- 'AS',
- 'ASC',
- 'ASCE',
- 'ASCEN',
- 'ASCEND',
- 'ASCENDI',
- 'ASCENDIN',
+ 'APPL-CONTEXT-ID',
+ 'APPLICATION',
+ 'APPLY',
+ 'APPSERVER-INFO',
+ 'APPSERVER-PASSWORD',
+ 'APPSERVER-USERID',
+ 'ARRAY-MESSAGE',
+ 'AS',
+ 'ASC',
+ 'ASCE',
+ 'ASCEN',
+ 'ASCEND',
+ 'ASCENDI',
+ 'ASCENDIN',
'ASCENDING',
- 'ASK-OVERWRITE',
- 'ASSEMBLY',
- 'ASSIGN',
- 'ASYNC-REQUEST-COUNT',
- 'ASYNC-REQUEST-HANDLE',
+ 'ASK-OVERWRITE',
+ 'ASSEMBLY',
+ 'ASSIGN',
+ 'ASYNC-REQUEST-COUNT',
+ 'ASYNC-REQUEST-HANDLE',
'ASYNCHRONOUS',
- 'AT',
- 'ATTACHED-PAIRLIST',
+ 'AT',
+ 'ATTACHED-PAIRLIST',
'ATTR',
- 'ATTR-SPACE',
- 'ATTRI',
- 'ATTRIB',
- 'ATTRIBU',
- 'ATTRIBUT',
- 'AUDIT-CONTROL',
- 'AUDIT-ENABLED',
- 'AUDIT-EVENT-CONTEXT',
- 'AUDIT-POLICY',
- 'AUTHENTICATION-FAILED',
- 'AUTHORIZATION',
- 'AUTO-COMP',
- 'AUTO-COMPL',
- 'AUTO-COMPLE',
- 'AUTO-COMPLET',
- 'AUTO-COMPLETI',
- 'AUTO-COMPLETIO',
+ 'ATTR-SPACE',
+ 'ATTRI',
+ 'ATTRIB',
+ 'ATTRIBU',
+ 'ATTRIBUT',
+ 'AUDIT-CONTROL',
+ 'AUDIT-ENABLED',
+ 'AUDIT-EVENT-CONTEXT',
+ 'AUDIT-POLICY',
+ 'AUTHENTICATION-FAILED',
+ 'AUTHORIZATION',
+ 'AUTO-COMP',
+ 'AUTO-COMPL',
+ 'AUTO-COMPLE',
+ 'AUTO-COMPLET',
+ 'AUTO-COMPLETI',
+ 'AUTO-COMPLETIO',
'AUTO-COMPLETION',
'AUTO-END-KEY',
- 'AUTO-ENDKEY',
- 'AUTO-GO',
- 'AUTO-IND',
- 'AUTO-INDE',
- 'AUTO-INDEN',
+ 'AUTO-ENDKEY',
+ 'AUTO-GO',
+ 'AUTO-IND',
+ 'AUTO-INDE',
+ 'AUTO-INDEN',
'AUTO-INDENT',
- 'AUTO-RESIZE',
- 'AUTO-RET',
- 'AUTO-RETU',
- 'AUTO-RETUR',
+ 'AUTO-RESIZE',
+ 'AUTO-RET',
+ 'AUTO-RETU',
+ 'AUTO-RETUR',
'AUTO-RETURN',
- 'AUTO-SYNCHRONIZE',
- 'AUTO-Z',
- 'AUTO-ZA',
+ 'AUTO-SYNCHRONIZE',
+ 'AUTO-Z',
+ 'AUTO-ZA',
'AUTO-ZAP',
'AUTOMATIC',
- 'AVAIL',
- 'AVAILA',
- 'AVAILAB',
- 'AVAILABL',
+ 'AVAIL',
+ 'AVAILA',
+ 'AVAILAB',
+ 'AVAILABL',
'AVAILABLE',
- 'AVAILABLE-FORMATS',
- 'AVE',
- 'AVER',
- 'AVERA',
- 'AVERAG',
+ 'AVAILABLE-FORMATS',
+ 'AVE',
+ 'AVER',
+ 'AVERA',
+ 'AVERAG',
'AVERAGE',
- 'AVG',
- 'BACK',
- 'BACKG',
- 'BACKGR',
- 'BACKGRO',
- 'BACKGROU',
- 'BACKGROUN',
+ 'AVG',
+ 'BACK',
+ 'BACKG',
+ 'BACKGR',
+ 'BACKGRO',
+ 'BACKGROU',
+ 'BACKGROUN',
'BACKGROUND',
'BACKWARD',
- 'BACKWARDS',
- 'BASE64-DECODE',
- 'BASE64-ENCODE',
- 'BASE-ADE',
- 'BASE-KEY',
- 'BATCH',
- 'BATCH-',
- 'BATCH-M',
- 'BATCH-MO',
- 'BATCH-MOD',
+ 'BACKWARDS',
+ 'BASE64-DECODE',
+ 'BASE64-ENCODE',
+ 'BASE-ADE',
+ 'BASE-KEY',
+ 'BATCH',
+ 'BATCH-',
+ 'BATCH-M',
+ 'BATCH-MO',
+ 'BATCH-MOD',
'BATCH-MODE',
- 'BATCH-SIZE',
- 'BEFORE-H',
- 'BEFORE-HI',
- 'BEFORE-HID',
+ 'BATCH-SIZE',
+ 'BEFORE-H',
+ 'BEFORE-HI',
+ 'BEFORE-HID',
'BEFORE-HIDE',
- 'BEGIN-EVENT-GROUP',
- 'BEGINS',
- 'BELL',
- 'BETWEEN',
- 'BGC',
- 'BGCO',
- 'BGCOL',
- 'BGCOLO',
+ 'BEGIN-EVENT-GROUP',
+ 'BEGINS',
+ 'BELL',
+ 'BETWEEN',
+ 'BGC',
+ 'BGCO',
+ 'BGCOL',
+ 'BGCOLO',
'BGCOLOR',
- 'BIG-ENDIAN',
- 'BINARY',
- 'BIND',
- 'BIND-WHERE',
- 'BLANK',
- 'BLOCK-ITERATION-DISPLAY',
+ 'BIG-ENDIAN',
+ 'BINARY',
+ 'BIND',
+ 'BIND-WHERE',
+ 'BLANK',
+ 'BLOCK-ITERATION-DISPLAY',
'BLOCK-LEVEL',
- 'BORDER-B',
- 'BORDER-BO',
- 'BORDER-BOT',
- 'BORDER-BOTT',
- 'BORDER-BOTTO',
+ 'BORDER-B',
+ 'BORDER-BO',
+ 'BORDER-BOT',
+ 'BORDER-BOTT',
+ 'BORDER-BOTTO',
'BORDER-BOTTOM-CHARS',
- 'BORDER-BOTTOM-P',
- 'BORDER-BOTTOM-PI',
- 'BORDER-BOTTOM-PIX',
- 'BORDER-BOTTOM-PIXE',
- 'BORDER-BOTTOM-PIXEL',
+ 'BORDER-BOTTOM-P',
+ 'BORDER-BOTTOM-PI',
+ 'BORDER-BOTTOM-PIX',
+ 'BORDER-BOTTOM-PIXE',
+ 'BORDER-BOTTOM-PIXEL',
'BORDER-BOTTOM-PIXELS',
- 'BORDER-L',
- 'BORDER-LE',
- 'BORDER-LEF',
- 'BORDER-LEFT',
- 'BORDER-LEFT-',
- 'BORDER-LEFT-C',
- 'BORDER-LEFT-CH',
- 'BORDER-LEFT-CHA',
- 'BORDER-LEFT-CHAR',
+ 'BORDER-L',
+ 'BORDER-LE',
+ 'BORDER-LEF',
+ 'BORDER-LEFT',
+ 'BORDER-LEFT-',
+ 'BORDER-LEFT-C',
+ 'BORDER-LEFT-CH',
+ 'BORDER-LEFT-CHA',
+ 'BORDER-LEFT-CHAR',
'BORDER-LEFT-CHARS',
- 'BORDER-LEFT-P',
- 'BORDER-LEFT-PI',
- 'BORDER-LEFT-PIX',
- 'BORDER-LEFT-PIXE',
- 'BORDER-LEFT-PIXEL',
+ 'BORDER-LEFT-P',
+ 'BORDER-LEFT-PI',
+ 'BORDER-LEFT-PIX',
+ 'BORDER-LEFT-PIXE',
+ 'BORDER-LEFT-PIXEL',
'BORDER-LEFT-PIXELS',
- 'BORDER-R',
- 'BORDER-RI',
- 'BORDER-RIG',
- 'BORDER-RIGH',
- 'BORDER-RIGHT',
- 'BORDER-RIGHT-',
- 'BORDER-RIGHT-C',
- 'BORDER-RIGHT-CH',
- 'BORDER-RIGHT-CHA',
- 'BORDER-RIGHT-CHAR',
+ 'BORDER-R',
+ 'BORDER-RI',
+ 'BORDER-RIG',
+ 'BORDER-RIGH',
+ 'BORDER-RIGHT',
+ 'BORDER-RIGHT-',
+ 'BORDER-RIGHT-C',
+ 'BORDER-RIGHT-CH',
+ 'BORDER-RIGHT-CHA',
+ 'BORDER-RIGHT-CHAR',
'BORDER-RIGHT-CHARS',
- 'BORDER-RIGHT-P',
- 'BORDER-RIGHT-PI',
- 'BORDER-RIGHT-PIX',
- 'BORDER-RIGHT-PIXE',
- 'BORDER-RIGHT-PIXEL',
+ 'BORDER-RIGHT-P',
+ 'BORDER-RIGHT-PI',
+ 'BORDER-RIGHT-PIX',
+ 'BORDER-RIGHT-PIXE',
+ 'BORDER-RIGHT-PIXEL',
'BORDER-RIGHT-PIXELS',
- 'BORDER-T',
- 'BORDER-TO',
- 'BORDER-TOP',
- 'BORDER-TOP-',
- 'BORDER-TOP-C',
- 'BORDER-TOP-CH',
- 'BORDER-TOP-CHA',
- 'BORDER-TOP-CHAR',
+ 'BORDER-T',
+ 'BORDER-TO',
+ 'BORDER-TOP',
+ 'BORDER-TOP-',
+ 'BORDER-TOP-C',
+ 'BORDER-TOP-CH',
+ 'BORDER-TOP-CHA',
+ 'BORDER-TOP-CHAR',
'BORDER-TOP-CHARS',
- 'BORDER-TOP-P',
- 'BORDER-TOP-PI',
- 'BORDER-TOP-PIX',
- 'BORDER-TOP-PIXE',
- 'BORDER-TOP-PIXEL',
+ 'BORDER-TOP-P',
+ 'BORDER-TOP-PI',
+ 'BORDER-TOP-PIX',
+ 'BORDER-TOP-PIXE',
+ 'BORDER-TOP-PIXEL',
'BORDER-TOP-PIXELS',
- 'BOX',
- 'BOX-SELECT',
- 'BOX-SELECTA',
- 'BOX-SELECTAB',
- 'BOX-SELECTABL',
+ 'BOX',
+ 'BOX-SELECT',
+ 'BOX-SELECTA',
+ 'BOX-SELECTAB',
+ 'BOX-SELECTABL',
'BOX-SELECTABLE',
- 'BREAK',
- 'BROWSE',
- 'BUFFER',
- 'BUFFER-CHARS',
- 'BUFFER-COMPARE',
- 'BUFFER-COPY',
- 'BUFFER-CREATE',
- 'BUFFER-DELETE',
- 'BUFFER-FIELD',
- 'BUFFER-HANDLE',
- 'BUFFER-LINES',
- 'BUFFER-NAME',
+ 'BREAK',
+ 'BROWSE',
+ 'BUFFER',
+ 'BUFFER-CHARS',
+ 'BUFFER-COMPARE',
+ 'BUFFER-COPY',
+ 'BUFFER-CREATE',
+ 'BUFFER-DELETE',
+ 'BUFFER-FIELD',
+ 'BUFFER-HANDLE',
+ 'BUFFER-LINES',
+ 'BUFFER-NAME',
'BUFFER-PARTITION-ID',
- 'BUFFER-RELEASE',
- 'BUFFER-VALUE',
- 'BUTTON',
- 'BUTTONS',
- 'BY',
- 'BY-POINTER',
- 'BY-VARIANT-POINTER',
- 'CACHE',
- 'CACHE-SIZE',
- 'CALL',
- 'CALL-NAME',
- 'CALL-TYPE',
- 'CAN-CREATE',
- 'CAN-DELETE',
- 'CAN-DO',
+ 'BUFFER-RELEASE',
+ 'BUFFER-VALUE',
+ 'BUTTON',
+ 'BUTTONS',
+ 'BY',
+ 'BY-POINTER',
+ 'BY-VARIANT-POINTER',
+ 'CACHE',
+ 'CACHE-SIZE',
+ 'CALL',
+ 'CALL-NAME',
+ 'CALL-TYPE',
+ 'CAN-CREATE',
+ 'CAN-DELETE',
+ 'CAN-DO',
'CAN-DO-DOMAIN-SUPPORT',
- 'CAN-FIND',
- 'CAN-QUERY',
- 'CAN-READ',
- 'CAN-SET',
- 'CAN-WRITE',
+ 'CAN-FIND',
+ 'CAN-QUERY',
+ 'CAN-READ',
+ 'CAN-SET',
+ 'CAN-WRITE',
'CANCEL-BREAK',
'CANCEL-BUTTON',
- 'CAPS',
- 'CAREFUL-PAINT',
- 'CASE',
- 'CASE-SEN',
- 'CASE-SENS',
- 'CASE-SENSI',
- 'CASE-SENSIT',
- 'CASE-SENSITI',
- 'CASE-SENSITIV',
+ 'CAPS',
+ 'CAREFUL-PAINT',
+ 'CASE',
+ 'CASE-SEN',
+ 'CASE-SENS',
+ 'CASE-SENSI',
+ 'CASE-SENSIT',
+ 'CASE-SENSITI',
+ 'CASE-SENSITIV',
'CASE-SENSITIVE',
- 'CAST',
- 'CATCH',
- 'CDECL',
- 'CENTER',
- 'CENTERE',
+ 'CAST',
+ 'CATCH',
+ 'CDECL',
+ 'CENTER',
+ 'CENTERE',
'CENTERED',
- 'CHAINED',
+ 'CHAINED',
'CHARACTER',
- 'CHARACTER_LENGTH',
- 'CHARSET',
- 'CHECK',
- 'CHECKED',
- 'CHOOSE',
- 'CHR',
- 'CLASS',
- 'CLASS-TYPE',
- 'CLEAR',
- 'CLEAR-APPL-CONTEXT',
- 'CLEAR-LOG',
- 'CLEAR-SELECT',
- 'CLEAR-SELECTI',
- 'CLEAR-SELECTIO',
+ 'CHARACTER_LENGTH',
+ 'CHARSET',
+ 'CHECK',
+ 'CHECKED',
+ 'CHOOSE',
+ 'CHR',
+ 'CLASS',
+ 'CLASS-TYPE',
+ 'CLEAR',
+ 'CLEAR-APPL-CONTEXT',
+ 'CLEAR-LOG',
+ 'CLEAR-SELECT',
+ 'CLEAR-SELECTI',
+ 'CLEAR-SELECTIO',
'CLEAR-SELECTION',
'CLEAR-SORT-ARROW',
- 'CLEAR-SORT-ARROWS',
- 'CLIENT-CONNECTION-ID',
- 'CLIENT-PRINCIPAL',
- 'CLIENT-TTY',
- 'CLIENT-TYPE',
- 'CLIENT-WORKSTATION',
- 'CLIPBOARD',
- 'CLOSE',
- 'CLOSE-LOG',
- 'CODE',
- 'CODEBASE-LOCATOR',
- 'CODEPAGE',
- 'CODEPAGE-CONVERT',
+ 'CLEAR-SORT-ARROWS',
+ 'CLIENT-CONNECTION-ID',
+ 'CLIENT-PRINCIPAL',
+ 'CLIENT-TTY',
+ 'CLIENT-TYPE',
+ 'CLIENT-WORKSTATION',
+ 'CLIPBOARD',
+ 'CLOSE',
+ 'CLOSE-LOG',
+ 'CODE',
+ 'CODEBASE-LOCATOR',
+ 'CODEPAGE',
+ 'CODEPAGE-CONVERT',
'COL',
'COL-OF',
- 'COLLATE',
- 'COLON',
- 'COLON-ALIGN',
- 'COLON-ALIGNE',
+ 'COLLATE',
+ 'COLON',
+ 'COLON-ALIGN',
+ 'COLON-ALIGNE',
'COLON-ALIGNED',
- 'COLOR',
- 'COLOR-TABLE',
- 'COLU',
- 'COLUM',
+ 'COLOR',
+ 'COLOR-TABLE',
+ 'COLU',
+ 'COLUM',
'COLUMN',
- 'COLUMN-BGCOLOR',
- 'COLUMN-DCOLOR',
- 'COLUMN-FGCOLOR',
- 'COLUMN-FONT',
- 'COLUMN-LAB',
- 'COLUMN-LABE',
+ 'COLUMN-BGCOLOR',
+ 'COLUMN-DCOLOR',
+ 'COLUMN-FGCOLOR',
+ 'COLUMN-FONT',
+ 'COLUMN-LAB',
+ 'COLUMN-LABE',
'COLUMN-LABEL',
- 'COLUMN-MOVABLE',
- 'COLUMN-OF',
- 'COLUMN-PFCOLOR',
- 'COLUMN-READ-ONLY',
- 'COLUMN-RESIZABLE',
+ 'COLUMN-MOVABLE',
+ 'COLUMN-OF',
+ 'COLUMN-PFCOLOR',
+ 'COLUMN-READ-ONLY',
+ 'COLUMN-RESIZABLE',
'COLUMN-SCROLLING',
- 'COLUMNS',
+ 'COLUMNS',
'COM-HANDLE',
'COM-SELF',
- 'COMBO-BOX',
- 'COMMAND',
- 'COMPARES',
- 'COMPILE',
- 'COMPILER',
- 'COMPLETE',
- 'CONFIG-NAME',
- 'CONNECT',
- 'CONNECTED',
- 'CONSTRUCTOR',
- 'CONTAINS',
- 'CONTENTS',
- 'CONTEXT',
- 'CONTEXT-HELP',
- 'CONTEXT-HELP-FILE',
- 'CONTEXT-HELP-ID',
- 'CONTEXT-POPUP',
- 'CONTROL',
- 'CONTROL-BOX',
- 'CONTROL-FRAME',
- 'CONVERT',
- 'CONVERT-3D-COLORS',
- 'CONVERT-TO-OFFS',
- 'CONVERT-TO-OFFSE',
+ 'COMBO-BOX',
+ 'COMMAND',
+ 'COMPARES',
+ 'COMPILE',
+ 'COMPILER',
+ 'COMPLETE',
+ 'CONFIG-NAME',
+ 'CONNECT',
+ 'CONNECTED',
+ 'CONSTRUCTOR',
+ 'CONTAINS',
+ 'CONTENTS',
+ 'CONTEXT',
+ 'CONTEXT-HELP',
+ 'CONTEXT-HELP-FILE',
+ 'CONTEXT-HELP-ID',
+ 'CONTEXT-POPUP',
+ 'CONTROL',
+ 'CONTROL-BOX',
+ 'CONTROL-FRAME',
+ 'CONVERT',
+ 'CONVERT-3D-COLORS',
+ 'CONVERT-TO-OFFS',
+ 'CONVERT-TO-OFFSE',
'CONVERT-TO-OFFSET',
- 'COPY-DATASET',
- 'COPY-LOB',
- 'COPY-SAX-ATTRIBUTES',
- 'COPY-TEMP-TABLE',
- 'COUNT',
- 'COUNT-OF',
- 'CPCASE',
- 'CPCOLL',
- 'CPINTERNAL',
- 'CPLOG',
- 'CPPRINT',
- 'CPRCODEIN',
- 'CPRCODEOUT',
- 'CPSTREAM',
- 'CPTERM',
- 'CRC-VALUE',
- 'CREATE',
- 'CREATE-LIKE',
- 'CREATE-LIKE-SEQUENTIAL',
- 'CREATE-NODE-NAMESPACE',
- 'CREATE-RESULT-LIST-ENTRY',
- 'CREATE-TEST-FILE',
- 'CURRENT',
- 'CURRENT-CHANGED',
- 'CURRENT-COLUMN',
- 'CURRENT-ENV',
- 'CURRENT-ENVI',
- 'CURRENT-ENVIR',
- 'CURRENT-ENVIRO',
- 'CURRENT-ENVIRON',
- 'CURRENT-ENVIRONM',
- 'CURRENT-ENVIRONME',
- 'CURRENT-ENVIRONMEN',
+ 'COPY-DATASET',
+ 'COPY-LOB',
+ 'COPY-SAX-ATTRIBUTES',
+ 'COPY-TEMP-TABLE',
+ 'COUNT',
+ 'COUNT-OF',
+ 'CPCASE',
+ 'CPCOLL',
+ 'CPINTERNAL',
+ 'CPLOG',
+ 'CPPRINT',
+ 'CPRCODEIN',
+ 'CPRCODEOUT',
+ 'CPSTREAM',
+ 'CPTERM',
+ 'CRC-VALUE',
+ 'CREATE',
+ 'CREATE-LIKE',
+ 'CREATE-LIKE-SEQUENTIAL',
+ 'CREATE-NODE-NAMESPACE',
+ 'CREATE-RESULT-LIST-ENTRY',
+ 'CREATE-TEST-FILE',
+ 'CURRENT',
+ 'CURRENT-CHANGED',
+ 'CURRENT-COLUMN',
+ 'CURRENT-ENV',
+ 'CURRENT-ENVI',
+ 'CURRENT-ENVIR',
+ 'CURRENT-ENVIRO',
+ 'CURRENT-ENVIRON',
+ 'CURRENT-ENVIRONM',
+ 'CURRENT-ENVIRONME',
+ 'CURRENT-ENVIRONMEN',
'CURRENT-ENVIRONMENT',
- 'CURRENT-ITERATION',
- 'CURRENT-LANG',
- 'CURRENT-LANGU',
- 'CURRENT-LANGUA',
- 'CURRENT-LANGUAG',
+ 'CURRENT-ITERATION',
+ 'CURRENT-LANG',
+ 'CURRENT-LANGU',
+ 'CURRENT-LANGUA',
+ 'CURRENT-LANGUAG',
'CURRENT-LANGUAGE',
- 'CURRENT-QUERY',
+ 'CURRENT-QUERY',
'CURRENT-REQUEST-INFO',
'CURRENT-RESPONSE-INFO',
- 'CURRENT-RESULT-ROW',
- 'CURRENT-ROW-MODIFIED',
- 'CURRENT-VALUE',
- 'CURRENT-WINDOW',
+ 'CURRENT-RESULT-ROW',
+ 'CURRENT-ROW-MODIFIED',
+ 'CURRENT-VALUE',
+ 'CURRENT-WINDOW',
'CURRENT_DATE',
- 'CURS',
- 'CURSO',
+ 'CURS',
+ 'CURSO',
'CURSOR',
- 'CURSOR-CHAR',
- 'CURSOR-LINE',
- 'CURSOR-OFFSET',
- 'DATA-BIND',
- 'DATA-ENTRY-RET',
- 'DATA-ENTRY-RETU',
- 'DATA-ENTRY-RETUR',
+ 'CURSOR-CHAR',
+ 'CURSOR-LINE',
+ 'CURSOR-OFFSET',
+ 'DATA-BIND',
+ 'DATA-ENTRY-RET',
+ 'DATA-ENTRY-RETU',
+ 'DATA-ENTRY-RETUR',
'DATA-ENTRY-RETURN',
- 'DATA-REL',
- 'DATA-RELA',
- 'DATA-RELAT',
- 'DATA-RELATI',
- 'DATA-RELATIO',
+ 'DATA-REL',
+ 'DATA-RELA',
+ 'DATA-RELAT',
+ 'DATA-RELATI',
+ 'DATA-RELATIO',
'DATA-RELATION',
- 'DATA-SOURCE',
- 'DATA-SOURCE-COMPLETE-MAP',
- 'DATA-SOURCE-MODIFIED',
- 'DATA-SOURCE-ROWID',
- 'DATA-T',
- 'DATA-TY',
- 'DATA-TYP',
+ 'DATA-SOURCE',
+ 'DATA-SOURCE-COMPLETE-MAP',
+ 'DATA-SOURCE-MODIFIED',
+ 'DATA-SOURCE-ROWID',
+ 'DATA-T',
+ 'DATA-TY',
+ 'DATA-TYP',
'DATA-TYPE',
'DATABASE',
'DATASERVERS',
'DATASET',
'DATASET-HANDLE',
'DATE',
- 'DATE-F',
- 'DATE-FO',
- 'DATE-FOR',
- 'DATE-FORM',
- 'DATE-FORMA',
+ 'DATE-F',
+ 'DATE-FO',
+ 'DATE-FOR',
+ 'DATE-FORM',
+ 'DATE-FORMA',
'DATE-FORMAT',
- 'DAY',
+ 'DAY',
'DB-CONTEXT',
'DB-REFERENCES',
- 'DBCODEPAGE',
- 'DBCOLLATION',
- 'DBNAME',
- 'DBPARAM',
- 'DBREST',
- 'DBRESTR',
- 'DBRESTRI',
- 'DBRESTRIC',
- 'DBRESTRICT',
- 'DBRESTRICTI',
- 'DBRESTRICTIO',
- 'DBRESTRICTION',
+ 'DBCODEPAGE',
+ 'DBCOLLATION',
+ 'DBNAME',
+ 'DBPARAM',
+ 'DBREST',
+ 'DBRESTR',
+ 'DBRESTRI',
+ 'DBRESTRIC',
+ 'DBRESTRICT',
+ 'DBRESTRICTI',
+ 'DBRESTRICTIO',
+ 'DBRESTRICTION',
'DBRESTRICTIONS',
- 'DBTASKID',
- 'DBTYPE',
- 'DBVERS',
- 'DBVERSI',
- 'DBVERSIO',
+ 'DBTASKID',
+ 'DBTYPE',
+ 'DBVERS',
+ 'DBVERSI',
+ 'DBVERSIO',
'DBVERSION',
- 'DCOLOR',
- 'DDE',
- 'DDE-ERROR',
+ 'DCOLOR',
+ 'DDE',
+ 'DDE-ERROR',
'DDE-I',
- 'DDE-ID',
- 'DDE-ITEM',
- 'DDE-NAME',
- 'DDE-TOPIC',
- 'DEBLANK',
+ 'DDE-ID',
+ 'DDE-ITEM',
+ 'DDE-NAME',
+ 'DDE-TOPIC',
+ 'DEBLANK',
'DEBU',
- 'DEBUG',
- 'DEBUG-ALERT',
+ 'DEBUG',
+ 'DEBUG-ALERT',
'DEBUG-LIST',
- 'DEBUGGER',
+ 'DEBUGGER',
'DECIMAL',
- 'DECIMALS',
- 'DECLARE',
- 'DECLARE-NAMESPACE',
- 'DECRYPT',
- 'DEFAULT',
+ 'DECIMALS',
+ 'DECLARE',
+ 'DECLARE-NAMESPACE',
+ 'DECRYPT',
+ 'DEFAULT',
'DEFAULT-B',
'DEFAULT-BU',
- 'DEFAULT-BUFFER-HANDLE',
+ 'DEFAULT-BUFFER-HANDLE',
'DEFAULT-BUT',
'DEFAULT-BUTT',
'DEFAULT-BUTTO',
- 'DEFAULT-BUTTON',
- 'DEFAULT-COMMIT',
- 'DEFAULT-EX',
- 'DEFAULT-EXT',
- 'DEFAULT-EXTE',
- 'DEFAULT-EXTEN',
- 'DEFAULT-EXTENS',
- 'DEFAULT-EXTENSI',
- 'DEFAULT-EXTENSIO',
+ 'DEFAULT-BUTTON',
+ 'DEFAULT-COMMIT',
+ 'DEFAULT-EX',
+ 'DEFAULT-EXT',
+ 'DEFAULT-EXTE',
+ 'DEFAULT-EXTEN',
+ 'DEFAULT-EXTENS',
+ 'DEFAULT-EXTENSI',
+ 'DEFAULT-EXTENSIO',
'DEFAULT-EXTENSION',
- 'DEFAULT-NOXL',
- 'DEFAULT-NOXLA',
- 'DEFAULT-NOXLAT',
+ 'DEFAULT-NOXL',
+ 'DEFAULT-NOXLA',
+ 'DEFAULT-NOXLAT',
'DEFAULT-NOXLATE',
- 'DEFAULT-VALUE',
- 'DEFAULT-WINDOW',
+ 'DEFAULT-VALUE',
+ 'DEFAULT-WINDOW',
'DEFINE',
'DEFINE-USER-EVENT-MANAGER',
- 'DEFINED',
- 'DEL',
- 'DELE',
+ 'DEFINED',
+ 'DEL',
+ 'DELE',
'DELEGATE',
- 'DELET',
+ 'DELET',
'DELETE PROCEDURE',
'DELETE',
- 'DELETE-CHAR',
- 'DELETE-CHARA',
- 'DELETE-CHARAC',
- 'DELETE-CHARACT',
- 'DELETE-CHARACTE',
+ 'DELETE-CHAR',
+ 'DELETE-CHARA',
+ 'DELETE-CHARAC',
+ 'DELETE-CHARACT',
+ 'DELETE-CHARACTE',
'DELETE-CHARACTER',
- 'DELETE-CURRENT-ROW',
- 'DELETE-LINE',
- 'DELETE-RESULT-LIST-ENTRY',
- 'DELETE-SELECTED-ROW',
- 'DELETE-SELECTED-ROWS',
- 'DELIMITER',
- 'DESC',
- 'DESCE',
- 'DESCEN',
- 'DESCEND',
- 'DESCENDI',
- 'DESCENDIN',
+ 'DELETE-CURRENT-ROW',
+ 'DELETE-LINE',
+ 'DELETE-RESULT-LIST-ENTRY',
+ 'DELETE-SELECTED-ROW',
+ 'DELETE-SELECTED-ROWS',
+ 'DELIMITER',
+ 'DESC',
+ 'DESCE',
+ 'DESCEN',
+ 'DESCEND',
+ 'DESCENDI',
+ 'DESCENDIN',
'DESCENDING',
- 'DESELECT-FOCUSED-ROW',
- 'DESELECT-ROWS',
- 'DESELECT-SELECTED-ROW',
+ 'DESELECT-FOCUSED-ROW',
+ 'DESELECT-ROWS',
+ 'DESELECT-SELECTED-ROW',
'DESELECTION',
- 'DESTRUCTOR',
- 'DIALOG-BOX',
- 'DICT',
- 'DICTI',
- 'DICTIO',
- 'DICTION',
- 'DICTIONA',
- 'DICTIONAR',
+ 'DESTRUCTOR',
+ 'DIALOG-BOX',
+ 'DICT',
+ 'DICTI',
+ 'DICTIO',
+ 'DICTION',
+ 'DICTIONA',
+ 'DICTIONAR',
'DICTIONARY',
- 'DIR',
- 'DISABLE',
- 'DISABLE-AUTO-ZAP',
- 'DISABLE-DUMP-TRIGGERS',
- 'DISABLE-LOAD-TRIGGERS',
+ 'DIR',
+ 'DISABLE',
+ 'DISABLE-AUTO-ZAP',
+ 'DISABLE-DUMP-TRIGGERS',
+ 'DISABLE-LOAD-TRIGGERS',
'DISABLED',
- 'DISCON',
- 'DISCONN',
- 'DISCONNE',
- 'DISCONNEC',
+ 'DISCON',
+ 'DISCONN',
+ 'DISCONNE',
+ 'DISCONNEC',
'DISCONNECT',
- 'DISP',
- 'DISPL',
- 'DISPLA',
+ 'DISP',
+ 'DISPL',
+ 'DISPLA',
'DISPLAY',
- 'DISPLAY-MESSAGE',
- 'DISPLAY-T',
- 'DISPLAY-TY',
- 'DISPLAY-TYP',
+ 'DISPLAY-MESSAGE',
+ 'DISPLAY-T',
+ 'DISPLAY-TY',
+ 'DISPLAY-TYP',
'DISPLAY-TYPE',
- 'DISTINCT',
- 'DO',
- 'DOMAIN-DESCRIPTION',
- 'DOMAIN-NAME',
- 'DOMAIN-TYPE',
- 'DOS',
- 'DOUBLE',
- 'DOWN',
- 'DRAG-ENABLED',
- 'DROP',
- 'DROP-DOWN',
- 'DROP-DOWN-LIST',
- 'DROP-FILE-NOTIFY',
- 'DROP-TARGET',
+ 'DISTINCT',
+ 'DO',
+ 'DOMAIN-DESCRIPTION',
+ 'DOMAIN-NAME',
+ 'DOMAIN-TYPE',
+ 'DOS',
+ 'DOUBLE',
+ 'DOWN',
+ 'DRAG-ENABLED',
+ 'DROP',
+ 'DROP-DOWN',
+ 'DROP-DOWN-LIST',
+ 'DROP-FILE-NOTIFY',
+ 'DROP-TARGET',
'DS-CLOSE-CURSOR',
'DSLOG-MANAGER',
- 'DUMP',
- 'DYNAMIC',
+ 'DUMP',
+ 'DYNAMIC',
'DYNAMIC-ENUM',
- 'DYNAMIC-FUNCTION',
+ 'DYNAMIC-FUNCTION',
'DYNAMIC-INVOKE',
- 'EACH',
- 'ECHO',
- 'EDGE',
- 'EDGE-',
- 'EDGE-C',
- 'EDGE-CH',
- 'EDGE-CHA',
- 'EDGE-CHAR',
+ 'EACH',
+ 'ECHO',
+ 'EDGE',
+ 'EDGE-',
+ 'EDGE-C',
+ 'EDGE-CH',
+ 'EDGE-CHA',
+ 'EDGE-CHAR',
'EDGE-CHARS',
- 'EDGE-P',
- 'EDGE-PI',
- 'EDGE-PIX',
- 'EDGE-PIXE',
- 'EDGE-PIXEL',
+ 'EDGE-P',
+ 'EDGE-PI',
+ 'EDGE-PIX',
+ 'EDGE-PIXE',
+ 'EDGE-PIXEL',
'EDGE-PIXELS',
- 'EDIT-CAN-PASTE',
- 'EDIT-CAN-UNDO',
- 'EDIT-CLEAR',
- 'EDIT-COPY',
- 'EDIT-CUT',
+ 'EDIT-CAN-PASTE',
+ 'EDIT-CAN-UNDO',
+ 'EDIT-CLEAR',
+ 'EDIT-COPY',
+ 'EDIT-CUT',
'EDIT-PASTE',
'EDIT-UNDO',
- 'EDITING',
- 'EDITOR',
- 'ELSE',
- 'EMPTY',
- 'EMPTY-TEMP-TABLE',
- 'ENABLE',
- 'ENABLED-FIELDS',
- 'ENCODE',
- 'ENCRYPT',
- 'ENCRYPT-AUDIT-MAC-KEY',
- 'ENCRYPTION-SALT',
- 'END',
- 'END-DOCUMENT',
- 'END-ELEMENT',
- 'END-EVENT-GROUP',
- 'END-FILE-DROP',
- 'END-KEY',
- 'END-MOVE',
- 'END-RESIZE',
- 'END-ROW-RESIZE',
- 'END-USER-PROMPT',
+ 'EDITING',
+ 'EDITOR',
+ 'ELSE',
+ 'EMPTY',
+ 'EMPTY-TEMP-TABLE',
+ 'ENABLE',
+ 'ENABLED-FIELDS',
+ 'ENCODE',
+ 'ENCRYPT',
+ 'ENCRYPT-AUDIT-MAC-KEY',
+ 'ENCRYPTION-SALT',
+ 'END',
+ 'END-DOCUMENT',
+ 'END-ELEMENT',
+ 'END-EVENT-GROUP',
+ 'END-FILE-DROP',
+ 'END-KEY',
+ 'END-MOVE',
+ 'END-RESIZE',
+ 'END-ROW-RESIZE',
+ 'END-USER-PROMPT',
'ENDKEY',
- 'ENTERED',
+ 'ENTERED',
'ENTITY-EXPANSION-LIMIT',
- 'ENTRY',
+ 'ENTRY',
'ENUM',
- 'EQ',
- 'ERROR',
- 'ERROR-COL',
- 'ERROR-COLU',
- 'ERROR-COLUM',
+ 'EQ',
+ 'ERROR',
+ 'ERROR-COL',
+ 'ERROR-COLU',
+ 'ERROR-COLUM',
'ERROR-COLUMN',
- 'ERROR-ROW',
- 'ERROR-STACK-TRACE',
- 'ERROR-STAT',
- 'ERROR-STATU',
+ 'ERROR-ROW',
+ 'ERROR-STACK-TRACE',
+ 'ERROR-STAT',
+ 'ERROR-STATU',
'ERROR-STATUS',
- 'ESCAPE',
- 'ETIME',
+ 'ESCAPE',
+ 'ETIME',
'EVENT',
- 'EVENT-GROUP-ID',
- 'EVENT-PROCEDURE',
- 'EVENT-PROCEDURE-CONTEXT',
- 'EVENT-T',
- 'EVENT-TY',
- 'EVENT-TYP',
+ 'EVENT-GROUP-ID',
+ 'EVENT-PROCEDURE',
+ 'EVENT-PROCEDURE-CONTEXT',
+ 'EVENT-T',
+ 'EVENT-TY',
+ 'EVENT-TYP',
'EVENT-TYPE',
'EVENTS',
- 'EXCEPT',
- 'EXCLUSIVE',
- 'EXCLUSIVE-',
+ 'EXCEPT',
+ 'EXCLUSIVE',
+ 'EXCLUSIVE-',
'EXCLUSIVE-ID',
- 'EXCLUSIVE-L',
- 'EXCLUSIVE-LO',
- 'EXCLUSIVE-LOC',
+ 'EXCLUSIVE-L',
+ 'EXCLUSIVE-LO',
+ 'EXCLUSIVE-LOC',
'EXCLUSIVE-LOCK',
- 'EXCLUSIVE-WEB-USER',
- 'EXECUTE',
- 'EXISTS',
- 'EXP',
- 'EXPAND',
- 'EXPANDABLE',
- 'EXPLICIT',
- 'EXPORT',
- 'EXPORT-PRINCIPAL',
- 'EXTENDED',
- 'EXTENT',
- 'EXTERNAL',
- 'FALSE',
- 'FETCH',
- 'FETCH-SELECTED-ROW',
- 'FGC',
- 'FGCO',
- 'FGCOL',
- 'FGCOLO',
+ 'EXCLUSIVE-WEB-USER',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXP',
+ 'EXPAND',
+ 'EXPANDABLE',
+ 'EXPLICIT',
+ 'EXPORT',
+ 'EXPORT-PRINCIPAL',
+ 'EXTENDED',
+ 'EXTENT',
+ 'EXTERNAL',
+ 'FALSE',
+ 'FETCH',
+ 'FETCH-SELECTED-ROW',
+ 'FGC',
+ 'FGCO',
+ 'FGCOL',
+ 'FGCOLO',
'FGCOLOR',
- 'FIELD',
- 'FIELDS',
- 'FILE',
- 'FILE-CREATE-DATE',
- 'FILE-CREATE-TIME',
- 'FILE-INFO',
- 'FILE-INFOR',
- 'FILE-INFORM',
- 'FILE-INFORMA',
- 'FILE-INFORMAT',
- 'FILE-INFORMATI',
- 'FILE-INFORMATIO',
+ 'FIELD',
+ 'FIELDS',
+ 'FILE',
+ 'FILE-CREATE-DATE',
+ 'FILE-CREATE-TIME',
+ 'FILE-INFO',
+ 'FILE-INFOR',
+ 'FILE-INFORM',
+ 'FILE-INFORMA',
+ 'FILE-INFORMAT',
+ 'FILE-INFORMATI',
+ 'FILE-INFORMATIO',
'FILE-INFORMATION',
- 'FILE-MOD-DATE',
- 'FILE-MOD-TIME',
- 'FILE-NAME',
- 'FILE-OFF',
- 'FILE-OFFS',
- 'FILE-OFFSE',
+ 'FILE-MOD-DATE',
+ 'FILE-MOD-TIME',
+ 'FILE-NAME',
+ 'FILE-OFF',
+ 'FILE-OFFS',
+ 'FILE-OFFSE',
'FILE-OFFSET',
- 'FILE-SIZE',
- 'FILE-TYPE',
+ 'FILE-SIZE',
+ 'FILE-TYPE',
'FILENAME',
- 'FILL',
+ 'FILL',
'FILL-IN',
- 'FILLED',
- 'FILTERS',
- 'FINAL',
- 'FINALLY',
- 'FIND',
- 'FIND-BY-ROWID',
- 'FIND-CASE-SENSITIVE',
- 'FIND-CURRENT',
- 'FIND-FIRST',
- 'FIND-GLOBAL',
- 'FIND-LAST',
- 'FIND-NEXT-OCCURRENCE',
- 'FIND-PREV-OCCURRENCE',
- 'FIND-SELECT',
- 'FIND-UNIQUE',
- 'FIND-WRAP-AROUND',
+ 'FILLED',
+ 'FILTERS',
+ 'FINAL',
+ 'FINALLY',
+ 'FIND',
+ 'FIND-BY-ROWID',
+ 'FIND-CASE-SENSITIVE',
+ 'FIND-CURRENT',
+ 'FIND-FIRST',
+ 'FIND-GLOBAL',
+ 'FIND-LAST',
+ 'FIND-NEXT-OCCURRENCE',
+ 'FIND-PREV-OCCURRENCE',
+ 'FIND-SELECT',
+ 'FIND-UNIQUE',
+ 'FIND-WRAP-AROUND',
'FINDER',
- 'FIRST',
- 'FIRST-ASYNCH-REQUEST',
- 'FIRST-CHILD',
- 'FIRST-COLUMN',
- 'FIRST-FORM',
- 'FIRST-OBJECT',
- 'FIRST-OF',
- 'FIRST-PROC',
- 'FIRST-PROCE',
- 'FIRST-PROCED',
- 'FIRST-PROCEDU',
- 'FIRST-PROCEDUR',
+ 'FIRST',
+ 'FIRST-ASYNCH-REQUEST',
+ 'FIRST-CHILD',
+ 'FIRST-COLUMN',
+ 'FIRST-FORM',
+ 'FIRST-OBJECT',
+ 'FIRST-OF',
+ 'FIRST-PROC',
+ 'FIRST-PROCE',
+ 'FIRST-PROCED',
+ 'FIRST-PROCEDU',
+ 'FIRST-PROCEDUR',
'FIRST-PROCEDURE',
- 'FIRST-SERVER',
- 'FIRST-TAB-I',
- 'FIRST-TAB-IT',
- 'FIRST-TAB-ITE',
+ 'FIRST-SERVER',
+ 'FIRST-TAB-I',
+ 'FIRST-TAB-IT',
+ 'FIRST-TAB-ITE',
'FIRST-TAB-ITEM',
- 'FIT-LAST-COLUMN',
- 'FIXED-ONLY',
- 'FLAT-BUTTON',
- 'FLOAT',
- 'FOCUS',
- 'FOCUSED-ROW',
- 'FOCUSED-ROW-SELECTED',
- 'FONT',
- 'FONT-TABLE',
- 'FOR',
- 'FORCE-FILE',
- 'FORE',
- 'FOREG',
- 'FOREGR',
- 'FOREGRO',
- 'FOREGROU',
- 'FOREGROUN',
+ 'FIT-LAST-COLUMN',
+ 'FIXED-ONLY',
+ 'FLAT-BUTTON',
+ 'FLOAT',
+ 'FOCUS',
+ 'FOCUSED-ROW',
+ 'FOCUSED-ROW-SELECTED',
+ 'FONT',
+ 'FONT-TABLE',
+ 'FOR',
+ 'FORCE-FILE',
+ 'FORE',
+ 'FOREG',
+ 'FOREGR',
+ 'FOREGRO',
+ 'FOREGROU',
+ 'FOREGROUN',
'FOREGROUND',
'FORM INPUT',
- 'FORM',
+ 'FORM',
'FORM-LONG-INPUT',
'FORMA',
- 'FORMAT',
+ 'FORMAT',
'FORMATTE',
- 'FORMATTED',
- 'FORWARD',
- 'FORWARDS',
+ 'FORMATTED',
+ 'FORWARD',
+ 'FORWARDS',
'FRAGMEN',
- 'FRAGMENT',
+ 'FRAGMENT',
'FRAM',
- 'FRAME',
- 'FRAME-COL',
- 'FRAME-DB',
- 'FRAME-DOWN',
- 'FRAME-FIELD',
- 'FRAME-FILE',
+ 'FRAME',
+ 'FRAME-COL',
+ 'FRAME-DB',
+ 'FRAME-DOWN',
+ 'FRAME-FIELD',
+ 'FRAME-FILE',
'FRAME-INDE',
- 'FRAME-INDEX',
- 'FRAME-LINE',
- 'FRAME-NAME',
- 'FRAME-ROW',
- 'FRAME-SPA',
- 'FRAME-SPAC',
- 'FRAME-SPACI',
- 'FRAME-SPACIN',
+ 'FRAME-INDEX',
+ 'FRAME-LINE',
+ 'FRAME-NAME',
+ 'FRAME-ROW',
+ 'FRAME-SPA',
+ 'FRAME-SPAC',
+ 'FRAME-SPACI',
+ 'FRAME-SPACIN',
'FRAME-SPACING',
- 'FRAME-VAL',
- 'FRAME-VALU',
+ 'FRAME-VAL',
+ 'FRAME-VALU',
'FRAME-VALUE',
- 'FRAME-X',
- 'FRAME-Y',
- 'FREQUENCY',
- 'FROM',
- 'FROM-C',
- 'FROM-CH',
- 'FROM-CHA',
- 'FROM-CHAR',
+ 'FRAME-X',
+ 'FRAME-Y',
+ 'FREQUENCY',
+ 'FROM',
+ 'FROM-C',
+ 'FROM-CH',
+ 'FROM-CHA',
+ 'FROM-CHAR',
'FROM-CHARS',
- 'FROM-CUR',
- 'FROM-CURR',
- 'FROM-CURRE',
- 'FROM-CURREN',
+ 'FROM-CUR',
+ 'FROM-CURR',
+ 'FROM-CURRE',
+ 'FROM-CURREN',
'FROM-CURRENT',
- 'FROM-P',
- 'FROM-PI',
- 'FROM-PIX',
- 'FROM-PIXE',
- 'FROM-PIXEL',
+ 'FROM-P',
+ 'FROM-PI',
+ 'FROM-PIX',
+ 'FROM-PIXE',
+ 'FROM-PIXEL',
'FROM-PIXELS',
- 'FULL-HEIGHT',
- 'FULL-HEIGHT-',
- 'FULL-HEIGHT-C',
- 'FULL-HEIGHT-CH',
- 'FULL-HEIGHT-CHA',
- 'FULL-HEIGHT-CHAR',
+ 'FULL-HEIGHT',
+ 'FULL-HEIGHT-',
+ 'FULL-HEIGHT-C',
+ 'FULL-HEIGHT-CH',
+ 'FULL-HEIGHT-CHA',
+ 'FULL-HEIGHT-CHAR',
'FULL-HEIGHT-CHARS',
- 'FULL-HEIGHT-P',
- 'FULL-HEIGHT-PI',
- 'FULL-HEIGHT-PIX',
- 'FULL-HEIGHT-PIXE',
- 'FULL-HEIGHT-PIXEL',
+ 'FULL-HEIGHT-P',
+ 'FULL-HEIGHT-PI',
+ 'FULL-HEIGHT-PIX',
+ 'FULL-HEIGHT-PIXE',
+ 'FULL-HEIGHT-PIXEL',
'FULL-HEIGHT-PIXELS',
- 'FULL-PATHN',
- 'FULL-PATHNA',
- 'FULL-PATHNAM',
+ 'FULL-PATHN',
+ 'FULL-PATHNA',
+ 'FULL-PATHNAM',
'FULL-PATHNAME',
- 'FULL-WIDTH',
- 'FULL-WIDTH-',
- 'FULL-WIDTH-C',
- 'FULL-WIDTH-CH',
- 'FULL-WIDTH-CHA',
- 'FULL-WIDTH-CHAR',
+ 'FULL-WIDTH',
+ 'FULL-WIDTH-',
+ 'FULL-WIDTH-C',
+ 'FULL-WIDTH-CH',
+ 'FULL-WIDTH-CHA',
+ 'FULL-WIDTH-CHAR',
'FULL-WIDTH-CHARS',
- 'FULL-WIDTH-P',
- 'FULL-WIDTH-PI',
- 'FULL-WIDTH-PIX',
- 'FULL-WIDTH-PIXE',
- 'FULL-WIDTH-PIXEL',
+ 'FULL-WIDTH-P',
+ 'FULL-WIDTH-PI',
+ 'FULL-WIDTH-PIX',
+ 'FULL-WIDTH-PIXE',
+ 'FULL-WIDTH-PIXEL',
'FULL-WIDTH-PIXELS',
- 'FUNCTION',
- 'FUNCTION-CALL-TYPE',
+ 'FUNCTION',
+ 'FUNCTION-CALL-TYPE',
'GATEWAY',
- 'GATEWAYS',
- 'GE',
- 'GENERATE-MD5',
- 'GENERATE-PBE-KEY',
- 'GENERATE-PBE-SALT',
- 'GENERATE-RANDOM-KEY',
- 'GENERATE-UUID',
- 'GET',
- 'GET-ATTR-CALL-TYPE',
- 'GET-ATTRIBUTE-NODE',
- 'GET-BINARY-DATA',
- 'GET-BLUE',
- 'GET-BLUE-',
- 'GET-BLUE-V',
- 'GET-BLUE-VA',
- 'GET-BLUE-VAL',
- 'GET-BLUE-VALU',
+ 'GATEWAYS',
+ 'GE',
+ 'GENERATE-MD5',
+ 'GENERATE-PBE-KEY',
+ 'GENERATE-PBE-SALT',
+ 'GENERATE-RANDOM-KEY',
+ 'GENERATE-UUID',
+ 'GET',
+ 'GET-ATTR-CALL-TYPE',
+ 'GET-ATTRIBUTE-NODE',
+ 'GET-BINARY-DATA',
+ 'GET-BLUE',
+ 'GET-BLUE-',
+ 'GET-BLUE-V',
+ 'GET-BLUE-VA',
+ 'GET-BLUE-VAL',
+ 'GET-BLUE-VALU',
'GET-BLUE-VALUE',
- 'GET-BROWSE-COLUMN',
+ 'GET-BROWSE-COLUMN',
'GET-BUFFER-HANDLE',
- 'GET-BYTE',
- 'GET-CALLBACK-PROC-CONTEXT',
- 'GET-CALLBACK-PROC-NAME',
- 'GET-CGI-LIST',
- 'GET-CGI-LONG-VALUE',
- 'GET-CGI-VALUE',
+ 'GET-BYTE',
+ 'GET-CALLBACK-PROC-CONTEXT',
+ 'GET-CALLBACK-PROC-NAME',
+ 'GET-CGI-LIST',
+ 'GET-CGI-LONG-VALUE',
+ 'GET-CGI-VALUE',
'GET-CLASS',
- 'GET-CODEPAGES',
- 'GET-COLLATIONS',
- 'GET-CONFIG-VALUE',
- 'GET-CURRENT',
- 'GET-DOUBLE',
- 'GET-DROPPED-FILE',
- 'GET-DYNAMIC',
- 'GET-ERROR-COLUMN',
- 'GET-ERROR-ROW',
- 'GET-FILE',
- 'GET-FILE-NAME',
+ 'GET-CODEPAGES',
+ 'GET-COLLATIONS',
+ 'GET-CONFIG-VALUE',
+ 'GET-CURRENT',
+ 'GET-DOUBLE',
+ 'GET-DROPPED-FILE',
+ 'GET-DYNAMIC',
+ 'GET-ERROR-COLUMN',
+ 'GET-ERROR-ROW',
+ 'GET-FILE',
+ 'GET-FILE-NAME',
'GET-FILE-OFFSE',
- 'GET-FILE-OFFSET',
- 'GET-FIRST',
- 'GET-FLOAT',
- 'GET-GREEN',
- 'GET-GREEN-',
- 'GET-GREEN-V',
- 'GET-GREEN-VA',
- 'GET-GREEN-VAL',
- 'GET-GREEN-VALU',
+ 'GET-FILE-OFFSET',
+ 'GET-FIRST',
+ 'GET-FLOAT',
+ 'GET-GREEN',
+ 'GET-GREEN-',
+ 'GET-GREEN-V',
+ 'GET-GREEN-VA',
+ 'GET-GREEN-VAL',
+ 'GET-GREEN-VALU',
'GET-GREEN-VALUE',
- 'GET-INDEX-BY-NAMESPACE-NAME',
- 'GET-INDEX-BY-QNAME',
- 'GET-INT64',
- 'GET-ITERATION',
- 'GET-KEY-VAL',
- 'GET-KEY-VALU',
+ 'GET-INDEX-BY-NAMESPACE-NAME',
+ 'GET-INDEX-BY-QNAME',
+ 'GET-INT64',
+ 'GET-ITERATION',
+ 'GET-KEY-VAL',
+ 'GET-KEY-VALU',
'GET-KEY-VALUE',
- 'GET-LAST',
- 'GET-LOCALNAME-BY-INDEX',
- 'GET-LONG',
- 'GET-MESSAGE',
- 'GET-NEXT',
- 'GET-NUMBER',
- 'GET-POINTER-VALUE',
- 'GET-PREV',
- 'GET-PRINTERS',
- 'GET-PROPERTY',
- 'GET-QNAME-BY-INDEX',
- 'GET-RED',
- 'GET-RED-',
- 'GET-RED-V',
- 'GET-RED-VA',
- 'GET-RED-VAL',
- 'GET-RED-VALU',
+ 'GET-LAST',
+ 'GET-LOCALNAME-BY-INDEX',
+ 'GET-LONG',
+ 'GET-MESSAGE',
+ 'GET-NEXT',
+ 'GET-NUMBER',
+ 'GET-POINTER-VALUE',
+ 'GET-PREV',
+ 'GET-PRINTERS',
+ 'GET-PROPERTY',
+ 'GET-QNAME-BY-INDEX',
+ 'GET-RED',
+ 'GET-RED-',
+ 'GET-RED-V',
+ 'GET-RED-VA',
+ 'GET-RED-VAL',
+ 'GET-RED-VALU',
'GET-RED-VALUE',
- 'GET-REPOSITIONED-ROW',
- 'GET-RGB-VALUE',
- 'GET-SELECTED',
- 'GET-SELECTED-',
- 'GET-SELECTED-W',
- 'GET-SELECTED-WI',
- 'GET-SELECTED-WID',
- 'GET-SELECTED-WIDG',
- 'GET-SELECTED-WIDGE',
+ 'GET-REPOSITIONED-ROW',
+ 'GET-RGB-VALUE',
+ 'GET-SELECTED',
+ 'GET-SELECTED-',
+ 'GET-SELECTED-W',
+ 'GET-SELECTED-WI',
+ 'GET-SELECTED-WID',
+ 'GET-SELECTED-WIDG',
+ 'GET-SELECTED-WIDGE',
'GET-SELECTED-WIDGET',
- 'GET-SHORT',
- 'GET-SIGNATURE',
- 'GET-SIZE',
- 'GET-STRING',
- 'GET-TAB-ITEM',
- 'GET-TEXT-HEIGHT',
- 'GET-TEXT-HEIGHT-',
- 'GET-TEXT-HEIGHT-C',
- 'GET-TEXT-HEIGHT-CH',
- 'GET-TEXT-HEIGHT-CHA',
- 'GET-TEXT-HEIGHT-CHAR',
+ 'GET-SHORT',
+ 'GET-SIGNATURE',
+ 'GET-SIZE',
+ 'GET-STRING',
+ 'GET-TAB-ITEM',
+ 'GET-TEXT-HEIGHT',
+ 'GET-TEXT-HEIGHT-',
+ 'GET-TEXT-HEIGHT-C',
+ 'GET-TEXT-HEIGHT-CH',
+ 'GET-TEXT-HEIGHT-CHA',
+ 'GET-TEXT-HEIGHT-CHAR',
'GET-TEXT-HEIGHT-CHARS',
- 'GET-TEXT-HEIGHT-P',
- 'GET-TEXT-HEIGHT-PI',
- 'GET-TEXT-HEIGHT-PIX',
- 'GET-TEXT-HEIGHT-PIXE',
- 'GET-TEXT-HEIGHT-PIXEL',
+ 'GET-TEXT-HEIGHT-P',
+ 'GET-TEXT-HEIGHT-PI',
+ 'GET-TEXT-HEIGHT-PIX',
+ 'GET-TEXT-HEIGHT-PIXE',
+ 'GET-TEXT-HEIGHT-PIXEL',
'GET-TEXT-HEIGHT-PIXELS',
- 'GET-TEXT-WIDTH',
- 'GET-TEXT-WIDTH-',
- 'GET-TEXT-WIDTH-C',
- 'GET-TEXT-WIDTH-CH',
- 'GET-TEXT-WIDTH-CHA',
- 'GET-TEXT-WIDTH-CHAR',
+ 'GET-TEXT-WIDTH',
+ 'GET-TEXT-WIDTH-',
+ 'GET-TEXT-WIDTH-C',
+ 'GET-TEXT-WIDTH-CH',
+ 'GET-TEXT-WIDTH-CHA',
+ 'GET-TEXT-WIDTH-CHAR',
'GET-TEXT-WIDTH-CHARS',
- 'GET-TEXT-WIDTH-P',
- 'GET-TEXT-WIDTH-PI',
- 'GET-TEXT-WIDTH-PIX',
- 'GET-TEXT-WIDTH-PIXE',
- 'GET-TEXT-WIDTH-PIXEL',
+ 'GET-TEXT-WIDTH-P',
+ 'GET-TEXT-WIDTH-PI',
+ 'GET-TEXT-WIDTH-PIX',
+ 'GET-TEXT-WIDTH-PIXE',
+ 'GET-TEXT-WIDTH-PIXEL',
'GET-TEXT-WIDTH-PIXELS',
- 'GET-TYPE-BY-INDEX',
- 'GET-TYPE-BY-NAMESPACE-NAME',
- 'GET-TYPE-BY-QNAME',
- 'GET-UNSIGNED-LONG',
- 'GET-UNSIGNED-SHORT',
- 'GET-URI-BY-INDEX',
- 'GET-VALUE-BY-INDEX',
- 'GET-VALUE-BY-NAMESPACE-NAME',
- 'GET-VALUE-BY-QNAME',
- 'GET-WAIT-STATE',
+ 'GET-TYPE-BY-INDEX',
+ 'GET-TYPE-BY-NAMESPACE-NAME',
+ 'GET-TYPE-BY-QNAME',
+ 'GET-UNSIGNED-LONG',
+ 'GET-UNSIGNED-SHORT',
+ 'GET-URI-BY-INDEX',
+ 'GET-VALUE-BY-INDEX',
+ 'GET-VALUE-BY-NAMESPACE-NAME',
+ 'GET-VALUE-BY-QNAME',
+ 'GET-WAIT-STATE',
'GETBYTE',
- 'GLOBAL',
- 'GO-ON',
- 'GO-PEND',
- 'GO-PENDI',
- 'GO-PENDIN',
+ 'GLOBAL',
+ 'GO-ON',
+ 'GO-PEND',
+ 'GO-PENDI',
+ 'GO-PENDIN',
'GO-PENDING',
- 'GRANT',
- 'GRAPHIC-E',
- 'GRAPHIC-ED',
- 'GRAPHIC-EDG',
+ 'GRANT',
+ 'GRAPHIC-E',
+ 'GRAPHIC-ED',
+ 'GRAPHIC-EDG',
'GRAPHIC-EDGE',
- 'GRID-FACTOR-H',
- 'GRID-FACTOR-HO',
- 'GRID-FACTOR-HOR',
- 'GRID-FACTOR-HORI',
- 'GRID-FACTOR-HORIZ',
- 'GRID-FACTOR-HORIZO',
- 'GRID-FACTOR-HORIZON',
- 'GRID-FACTOR-HORIZONT',
- 'GRID-FACTOR-HORIZONTA',
+ 'GRID-FACTOR-H',
+ 'GRID-FACTOR-HO',
+ 'GRID-FACTOR-HOR',
+ 'GRID-FACTOR-HORI',
+ 'GRID-FACTOR-HORIZ',
+ 'GRID-FACTOR-HORIZO',
+ 'GRID-FACTOR-HORIZON',
+ 'GRID-FACTOR-HORIZONT',
+ 'GRID-FACTOR-HORIZONTA',
'GRID-FACTOR-HORIZONTAL',
- 'GRID-FACTOR-V',
- 'GRID-FACTOR-VE',
- 'GRID-FACTOR-VER',
- 'GRID-FACTOR-VERT',
- 'GRID-FACTOR-VERTI',
- 'GRID-FACTOR-VERTIC',
- 'GRID-FACTOR-VERTICA',
+ 'GRID-FACTOR-V',
+ 'GRID-FACTOR-VE',
+ 'GRID-FACTOR-VER',
+ 'GRID-FACTOR-VERT',
+ 'GRID-FACTOR-VERTI',
+ 'GRID-FACTOR-VERTIC',
+ 'GRID-FACTOR-VERTICA',
'GRID-FACTOR-VERTICAL',
- 'GRID-SNAP',
- 'GRID-UNIT-HEIGHT',
- 'GRID-UNIT-HEIGHT-',
- 'GRID-UNIT-HEIGHT-C',
- 'GRID-UNIT-HEIGHT-CH',
- 'GRID-UNIT-HEIGHT-CHA',
+ 'GRID-SNAP',
+ 'GRID-UNIT-HEIGHT',
+ 'GRID-UNIT-HEIGHT-',
+ 'GRID-UNIT-HEIGHT-C',
+ 'GRID-UNIT-HEIGHT-CH',
+ 'GRID-UNIT-HEIGHT-CHA',
'GRID-UNIT-HEIGHT-CHARS',
- 'GRID-UNIT-HEIGHT-P',
- 'GRID-UNIT-HEIGHT-PI',
- 'GRID-UNIT-HEIGHT-PIX',
- 'GRID-UNIT-HEIGHT-PIXE',
- 'GRID-UNIT-HEIGHT-PIXEL',
+ 'GRID-UNIT-HEIGHT-P',
+ 'GRID-UNIT-HEIGHT-PI',
+ 'GRID-UNIT-HEIGHT-PIX',
+ 'GRID-UNIT-HEIGHT-PIXE',
+ 'GRID-UNIT-HEIGHT-PIXEL',
'GRID-UNIT-HEIGHT-PIXELS',
- 'GRID-UNIT-WIDTH',
- 'GRID-UNIT-WIDTH-',
- 'GRID-UNIT-WIDTH-C',
- 'GRID-UNIT-WIDTH-CH',
- 'GRID-UNIT-WIDTH-CHA',
- 'GRID-UNIT-WIDTH-CHAR',
+ 'GRID-UNIT-WIDTH',
+ 'GRID-UNIT-WIDTH-',
+ 'GRID-UNIT-WIDTH-C',
+ 'GRID-UNIT-WIDTH-CH',
+ 'GRID-UNIT-WIDTH-CHA',
+ 'GRID-UNIT-WIDTH-CHAR',
'GRID-UNIT-WIDTH-CHARS',
- 'GRID-UNIT-WIDTH-P',
- 'GRID-UNIT-WIDTH-PI',
- 'GRID-UNIT-WIDTH-PIX',
- 'GRID-UNIT-WIDTH-PIXE',
- 'GRID-UNIT-WIDTH-PIXEL',
+ 'GRID-UNIT-WIDTH-P',
+ 'GRID-UNIT-WIDTH-PI',
+ 'GRID-UNIT-WIDTH-PIX',
+ 'GRID-UNIT-WIDTH-PIXE',
+ 'GRID-UNIT-WIDTH-PIXEL',
'GRID-UNIT-WIDTH-PIXELS',
- 'GRID-VISIBLE',
- 'GROUP',
- 'GT',
- 'GUID',
+ 'GRID-VISIBLE',
+ 'GROUP',
+ 'GT',
+ 'GUID',
'HANDLE',
- 'HANDLER',
- 'HAS-RECORDS',
- 'HAVING',
- 'HEADER',
- 'HEIGHT',
- 'HEIGHT-',
- 'HEIGHT-C',
- 'HEIGHT-CH',
- 'HEIGHT-CHA',
- 'HEIGHT-CHAR',
+ 'HANDLER',
+ 'HAS-RECORDS',
+ 'HAVING',
+ 'HEADER',
+ 'HEIGHT',
+ 'HEIGHT-',
+ 'HEIGHT-C',
+ 'HEIGHT-CH',
+ 'HEIGHT-CHA',
+ 'HEIGHT-CHAR',
'HEIGHT-CHARS',
- 'HEIGHT-P',
- 'HEIGHT-PI',
- 'HEIGHT-PIX',
- 'HEIGHT-PIXE',
- 'HEIGHT-PIXEL',
+ 'HEIGHT-P',
+ 'HEIGHT-PI',
+ 'HEIGHT-PIX',
+ 'HEIGHT-PIXE',
+ 'HEIGHT-PIXEL',
'HEIGHT-PIXELS',
- 'HELP',
- 'HEX-DECODE',
- 'HEX-ENCODE',
- 'HIDDEN',
- 'HIDE',
- 'HORI',
- 'HORIZ',
- 'HORIZO',
- 'HORIZON',
- 'HORIZONT',
- 'HORIZONTA',
+ 'HELP',
+ 'HEX-DECODE',
+ 'HEX-ENCODE',
+ 'HIDDEN',
+ 'HIDE',
+ 'HORI',
+ 'HORIZ',
+ 'HORIZO',
+ 'HORIZON',
+ 'HORIZONT',
+ 'HORIZONTA',
'HORIZONTAL',
- 'HOST-BYTE-ORDER',
- 'HTML-CHARSET',
- 'HTML-END-OF-LINE',
- 'HTML-END-OF-PAGE',
- 'HTML-FRAME-BEGIN',
- 'HTML-FRAME-END',
- 'HTML-HEADER-BEGIN',
- 'HTML-HEADER-END',
- 'HTML-TITLE-BEGIN',
- 'HTML-TITLE-END',
- 'HWND',
- 'ICON',
- 'IF',
- 'IMAGE',
- 'IMAGE-DOWN',
- 'IMAGE-INSENSITIVE',
- 'IMAGE-SIZE',
- 'IMAGE-SIZE-C',
- 'IMAGE-SIZE-CH',
- 'IMAGE-SIZE-CHA',
- 'IMAGE-SIZE-CHAR',
+ 'HOST-BYTE-ORDER',
+ 'HTML-CHARSET',
+ 'HTML-END-OF-LINE',
+ 'HTML-END-OF-PAGE',
+ 'HTML-FRAME-BEGIN',
+ 'HTML-FRAME-END',
+ 'HTML-HEADER-BEGIN',
+ 'HTML-HEADER-END',
+ 'HTML-TITLE-BEGIN',
+ 'HTML-TITLE-END',
+ 'HWND',
+ 'ICON',
+ 'IF',
+ 'IMAGE',
+ 'IMAGE-DOWN',
+ 'IMAGE-INSENSITIVE',
+ 'IMAGE-SIZE',
+ 'IMAGE-SIZE-C',
+ 'IMAGE-SIZE-CH',
+ 'IMAGE-SIZE-CHA',
+ 'IMAGE-SIZE-CHAR',
'IMAGE-SIZE-CHARS',
- 'IMAGE-SIZE-P',
- 'IMAGE-SIZE-PI',
- 'IMAGE-SIZE-PIX',
- 'IMAGE-SIZE-PIXE',
- 'IMAGE-SIZE-PIXEL',
+ 'IMAGE-SIZE-P',
+ 'IMAGE-SIZE-PI',
+ 'IMAGE-SIZE-PIX',
+ 'IMAGE-SIZE-PIXE',
+ 'IMAGE-SIZE-PIXEL',
'IMAGE-SIZE-PIXELS',
- 'IMAGE-UP',
- 'IMMEDIATE-DISPLAY',
- 'IMPLEMENTS',
- 'IMPORT',
- 'IMPORT-PRINCIPAL',
- 'IN',
+ 'IMAGE-UP',
+ 'IMMEDIATE-DISPLAY',
+ 'IMPLEMENTS',
+ 'IMPORT',
+ 'IMPORT-PRINCIPAL',
+ 'IN',
'IN-HANDLE',
- 'INCREMENT-EXCLUSIVE-ID',
- 'INDEX',
- 'INDEX-HINT',
- 'INDEX-INFORMATION',
+ 'INCREMENT-EXCLUSIVE-ID',
+ 'INDEX',
+ 'INDEX-HINT',
+ 'INDEX-INFORMATION',
'INDEXED-REPOSITION',
- 'INDICATOR',
- 'INFO',
- 'INFOR',
- 'INFORM',
- 'INFORMA',
- 'INFORMAT',
- 'INFORMATI',
- 'INFORMATIO',
+ 'INDICATOR',
+ 'INFO',
+ 'INFOR',
+ 'INFORM',
+ 'INFORMA',
+ 'INFORMAT',
+ 'INFORMATI',
+ 'INFORMATIO',
'INFORMATION',
- 'INHERIT-BGC',
- 'INHERIT-BGCO',
- 'INHERIT-BGCOL',
- 'INHERIT-BGCOLO',
+ 'INHERIT-BGC',
+ 'INHERIT-BGCO',
+ 'INHERIT-BGCOL',
+ 'INHERIT-BGCOLO',
'INHERIT-BGCOLOR',
- 'INHERIT-FGC',
- 'INHERIT-FGCO',
- 'INHERIT-FGCOL',
- 'INHERIT-FGCOLO',
+ 'INHERIT-FGC',
+ 'INHERIT-FGCO',
+ 'INHERIT-FGCOL',
+ 'INHERIT-FGCOLO',
'INHERIT-FGCOLOR',
- 'INHERITS',
- 'INIT',
- 'INITI',
- 'INITIA',
+ 'INHERITS',
+ 'INIT',
+ 'INITI',
+ 'INITIA',
'INITIAL',
- 'INITIAL-DIR',
- 'INITIAL-FILTER',
- 'INITIALIZE-DOCUMENT-TYPE',
- 'INITIATE',
- 'INNER-CHARS',
- 'INNER-LINES',
- 'INPUT',
- 'INPUT-O',
- 'INPUT-OU',
- 'INPUT-OUT',
- 'INPUT-OUTP',
- 'INPUT-OUTPU',
+ 'INITIAL-DIR',
+ 'INITIAL-FILTER',
+ 'INITIALIZE-DOCUMENT-TYPE',
+ 'INITIATE',
+ 'INNER-CHARS',
+ 'INNER-LINES',
+ 'INPUT',
+ 'INPUT-O',
+ 'INPUT-OU',
+ 'INPUT-OUT',
+ 'INPUT-OUTP',
+ 'INPUT-OUTPU',
'INPUT-OUTPUT',
- 'INPUT-VALUE',
- 'INSERT',
- 'INSERT-ATTRIBUTE',
- 'INSERT-B',
- 'INSERT-BA',
- 'INSERT-BAC',
- 'INSERT-BACK',
- 'INSERT-BACKT',
- 'INSERT-BACKTA',
+ 'INPUT-VALUE',
+ 'INSERT',
+ 'INSERT-ATTRIBUTE',
+ 'INSERT-B',
+ 'INSERT-BA',
+ 'INSERT-BAC',
+ 'INSERT-BACK',
+ 'INSERT-BACKT',
+ 'INSERT-BACKTA',
'INSERT-BACKTAB',
- 'INSERT-FILE',
- 'INSERT-ROW',
- 'INSERT-STRING',
- 'INSERT-T',
- 'INSERT-TA',
+ 'INSERT-FILE',
+ 'INSERT-ROW',
+ 'INSERT-STRING',
+ 'INSERT-T',
+ 'INSERT-TA',
'INSERT-TAB',
'INT64',
'INT',
'INTEGER',
- 'INTERFACE',
- 'INTERNAL-ENTRIES',
- 'INTO',
- 'INVOKE',
- 'IS',
- 'IS-ATTR',
- 'IS-ATTR-',
- 'IS-ATTR-S',
- 'IS-ATTR-SP',
- 'IS-ATTR-SPA',
- 'IS-ATTR-SPAC',
+ 'INTERFACE',
+ 'INTERNAL-ENTRIES',
+ 'INTO',
+ 'INVOKE',
+ 'IS',
+ 'IS-ATTR',
+ 'IS-ATTR-',
+ 'IS-ATTR-S',
+ 'IS-ATTR-SP',
+ 'IS-ATTR-SPA',
+ 'IS-ATTR-SPAC',
'IS-ATTR-SPACE',
- 'IS-CLASS',
+ 'IS-CLASS',
'IS-JSON',
- 'IS-LEAD-BYTE',
- 'IS-OPEN',
- 'IS-PARAMETER-SET',
+ 'IS-LEAD-BYTE',
+ 'IS-OPEN',
+ 'IS-PARAMETER-SET',
'IS-PARTITIONED',
- 'IS-ROW-SELECTED',
- 'IS-SELECTED',
+ 'IS-ROW-SELECTED',
+ 'IS-SELECTED',
'IS-XML',
- 'ITEM',
- 'ITEMS-PER-ROW',
- 'JOIN',
- 'JOIN-BY-SQLDB',
- 'KBLABEL',
- 'KEEP-CONNECTION-OPEN',
- 'KEEP-FRAME-Z',
- 'KEEP-FRAME-Z-',
- 'KEEP-FRAME-Z-O',
- 'KEEP-FRAME-Z-OR',
- 'KEEP-FRAME-Z-ORD',
- 'KEEP-FRAME-Z-ORDE',
+ 'ITEM',
+ 'ITEMS-PER-ROW',
+ 'JOIN',
+ 'JOIN-BY-SQLDB',
+ 'KBLABEL',
+ 'KEEP-CONNECTION-OPEN',
+ 'KEEP-FRAME-Z',
+ 'KEEP-FRAME-Z-',
+ 'KEEP-FRAME-Z-O',
+ 'KEEP-FRAME-Z-OR',
+ 'KEEP-FRAME-Z-ORD',
+ 'KEEP-FRAME-Z-ORDE',
'KEEP-FRAME-Z-ORDER',
- 'KEEP-MESSAGES',
- 'KEEP-SECURITY-CACHE',
- 'KEEP-TAB-ORDER',
- 'KEY',
+ 'KEEP-MESSAGES',
+ 'KEEP-SECURITY-CACHE',
+ 'KEEP-TAB-ORDER',
+ 'KEY',
'KEY-CODE',
'KEY-FUNC',
'KEY-FUNCT',
@@ -1228,198 +1228,198 @@ OPENEDGEKEYWORDS = (
'KEY-FUNCTIO',
'KEY-FUNCTION',
'KEY-LABEL',
- 'KEYCODE',
- 'KEYFUNC',
- 'KEYFUNCT',
- 'KEYFUNCTI',
- 'KEYFUNCTIO',
+ 'KEYCODE',
+ 'KEYFUNC',
+ 'KEYFUNCT',
+ 'KEYFUNCTI',
+ 'KEYFUNCTIO',
'KEYFUNCTION',
- 'KEYLABEL',
- 'KEYS',
- 'KEYWORD',
- 'KEYWORD-ALL',
- 'LABEL',
- 'LABEL-BGC',
- 'LABEL-BGCO',
- 'LABEL-BGCOL',
- 'LABEL-BGCOLO',
+ 'KEYLABEL',
+ 'KEYS',
+ 'KEYWORD',
+ 'KEYWORD-ALL',
+ 'LABEL',
+ 'LABEL-BGC',
+ 'LABEL-BGCO',
+ 'LABEL-BGCOL',
+ 'LABEL-BGCOLO',
'LABEL-BGCOLOR',
- 'LABEL-DC',
- 'LABEL-DCO',
- 'LABEL-DCOL',
- 'LABEL-DCOLO',
+ 'LABEL-DC',
+ 'LABEL-DCO',
+ 'LABEL-DCOL',
+ 'LABEL-DCOLO',
'LABEL-DCOLOR',
- 'LABEL-FGC',
- 'LABEL-FGCO',
- 'LABEL-FGCOL',
- 'LABEL-FGCOLO',
+ 'LABEL-FGC',
+ 'LABEL-FGCO',
+ 'LABEL-FGCOL',
+ 'LABEL-FGCOLO',
'LABEL-FGCOLOR',
- 'LABEL-FONT',
- 'LABEL-PFC',
- 'LABEL-PFCO',
- 'LABEL-PFCOL',
- 'LABEL-PFCOLO',
+ 'LABEL-FONT',
+ 'LABEL-PFC',
+ 'LABEL-PFCO',
+ 'LABEL-PFCOL',
+ 'LABEL-PFCOLO',
'LABEL-PFCOLOR',
- 'LABELS',
+ 'LABELS',
'LABELS-HAVE-COLONS',
- 'LANDSCAPE',
+ 'LANDSCAPE',
'LANGUAGE',
- 'LANGUAGES',
- 'LARGE',
- 'LARGE-TO-SMALL',
- 'LAST',
- 'LAST-ASYNCH-REQUEST',
- 'LAST-BATCH',
- 'LAST-CHILD',
+ 'LANGUAGES',
+ 'LARGE',
+ 'LARGE-TO-SMALL',
+ 'LAST',
+ 'LAST-ASYNCH-REQUEST',
+ 'LAST-BATCH',
+ 'LAST-CHILD',
'LAST-EVEN',
- 'LAST-EVENT',
- 'LAST-FORM',
- 'LAST-KEY',
- 'LAST-OBJECT',
- 'LAST-OF',
- 'LAST-PROCE',
- 'LAST-PROCED',
- 'LAST-PROCEDU',
- 'LAST-PROCEDUR',
+ 'LAST-EVENT',
+ 'LAST-FORM',
+ 'LAST-KEY',
+ 'LAST-OBJECT',
+ 'LAST-OF',
+ 'LAST-PROCE',
+ 'LAST-PROCED',
+ 'LAST-PROCEDU',
+ 'LAST-PROCEDUR',
'LAST-PROCEDURE',
- 'LAST-SERVER',
- 'LAST-TAB-I',
- 'LAST-TAB-IT',
- 'LAST-TAB-ITE',
+ 'LAST-SERVER',
+ 'LAST-TAB-I',
+ 'LAST-TAB-IT',
+ 'LAST-TAB-ITE',
'LAST-TAB-ITEM',
'LASTKEY',
- 'LC',
- 'LDBNAME',
- 'LE',
- 'LEAVE',
- 'LEFT-ALIGN',
- 'LEFT-ALIGNE',
+ 'LC',
+ 'LDBNAME',
+ 'LE',
+ 'LEAVE',
+ 'LEFT-ALIGN',
+ 'LEFT-ALIGNE',
'LEFT-ALIGNED',
- 'LEFT-TRIM',
- 'LENGTH',
- 'LIBRARY',
- 'LIKE',
- 'LIKE-SEQUENTIAL',
- 'LINE',
- 'LINE-COUNT',
- 'LINE-COUNTE',
+ 'LEFT-TRIM',
+ 'LENGTH',
+ 'LIBRARY',
+ 'LIKE',
+ 'LIKE-SEQUENTIAL',
+ 'LINE',
+ 'LINE-COUNT',
+ 'LINE-COUNTE',
'LINE-COUNTER',
- 'LIST-EVENTS',
- 'LIST-ITEM-PAIRS',
- 'LIST-ITEMS',
- 'LIST-PROPERTY-NAMES',
- 'LIST-QUERY-ATTRS',
- 'LIST-SET-ATTRS',
- 'LIST-WIDGETS',
+ 'LIST-EVENTS',
+ 'LIST-ITEM-PAIRS',
+ 'LIST-ITEMS',
+ 'LIST-PROPERTY-NAMES',
+ 'LIST-QUERY-ATTRS',
+ 'LIST-SET-ATTRS',
+ 'LIST-WIDGETS',
'LISTI',
'LISTIN',
'LISTING',
- 'LITERAL-QUESTION',
- 'LITTLE-ENDIAN',
- 'LOAD',
- 'LOAD-DOMAINS',
- 'LOAD-ICON',
- 'LOAD-IMAGE',
- 'LOAD-IMAGE-DOWN',
- 'LOAD-IMAGE-INSENSITIVE',
- 'LOAD-IMAGE-UP',
- 'LOAD-MOUSE-P',
- 'LOAD-MOUSE-PO',
- 'LOAD-MOUSE-POI',
- 'LOAD-MOUSE-POIN',
- 'LOAD-MOUSE-POINT',
- 'LOAD-MOUSE-POINTE',
+ 'LITERAL-QUESTION',
+ 'LITTLE-ENDIAN',
+ 'LOAD',
+ 'LOAD-DOMAINS',
+ 'LOAD-ICON',
+ 'LOAD-IMAGE',
+ 'LOAD-IMAGE-DOWN',
+ 'LOAD-IMAGE-INSENSITIVE',
+ 'LOAD-IMAGE-UP',
+ 'LOAD-MOUSE-P',
+ 'LOAD-MOUSE-PO',
+ 'LOAD-MOUSE-POI',
+ 'LOAD-MOUSE-POIN',
+ 'LOAD-MOUSE-POINT',
+ 'LOAD-MOUSE-POINTE',
'LOAD-MOUSE-POINTER',
- 'LOAD-PICTURE',
- 'LOAD-SMALL-ICON',
- 'LOCAL-NAME',
+ 'LOAD-PICTURE',
+ 'LOAD-SMALL-ICON',
+ 'LOCAL-NAME',
'LOCAL-VERSION-INFO',
- 'LOCATOR-COLUMN-NUMBER',
- 'LOCATOR-LINE-NUMBER',
- 'LOCATOR-PUBLIC-ID',
- 'LOCATOR-SYSTEM-ID',
- 'LOCATOR-TYPE',
+ 'LOCATOR-COLUMN-NUMBER',
+ 'LOCATOR-LINE-NUMBER',
+ 'LOCATOR-PUBLIC-ID',
+ 'LOCATOR-SYSTEM-ID',
+ 'LOCATOR-TYPE',
'LOCK-REGISTRATION',
- 'LOCKED',
- 'LOG',
- 'LOG-AUDIT-EVENT',
+ 'LOCKED',
+ 'LOG',
+ 'LOG-AUDIT-EVENT',
'LOG-MANAGER',
'LOGICAL',
- 'LOGIN-EXPIRATION-TIMESTAMP',
- 'LOGIN-HOST',
- 'LOGIN-STATE',
- 'LOGOUT',
+ 'LOGIN-EXPIRATION-TIMESTAMP',
+ 'LOGIN-HOST',
+ 'LOGIN-STATE',
+ 'LOGOUT',
'LONGCHAR',
- 'LOOKAHEAD',
- 'LOOKUP',
- 'LT',
- 'MACHINE-CLASS',
- 'MANDATORY',
- 'MANUAL-HIGHLIGHT',
- 'MAP',
- 'MARGIN-EXTRA',
- 'MARGIN-HEIGHT',
- 'MARGIN-HEIGHT-',
- 'MARGIN-HEIGHT-C',
- 'MARGIN-HEIGHT-CH',
- 'MARGIN-HEIGHT-CHA',
- 'MARGIN-HEIGHT-CHAR',
+ 'LOOKAHEAD',
+ 'LOOKUP',
+ 'LT',
+ 'MACHINE-CLASS',
+ 'MANDATORY',
+ 'MANUAL-HIGHLIGHT',
+ 'MAP',
+ 'MARGIN-EXTRA',
+ 'MARGIN-HEIGHT',
+ 'MARGIN-HEIGHT-',
+ 'MARGIN-HEIGHT-C',
+ 'MARGIN-HEIGHT-CH',
+ 'MARGIN-HEIGHT-CHA',
+ 'MARGIN-HEIGHT-CHAR',
'MARGIN-HEIGHT-CHARS',
- 'MARGIN-HEIGHT-P',
- 'MARGIN-HEIGHT-PI',
- 'MARGIN-HEIGHT-PIX',
- 'MARGIN-HEIGHT-PIXE',
- 'MARGIN-HEIGHT-PIXEL',
+ 'MARGIN-HEIGHT-P',
+ 'MARGIN-HEIGHT-PI',
+ 'MARGIN-HEIGHT-PIX',
+ 'MARGIN-HEIGHT-PIXE',
+ 'MARGIN-HEIGHT-PIXEL',
'MARGIN-HEIGHT-PIXELS',
- 'MARGIN-WIDTH',
- 'MARGIN-WIDTH-',
- 'MARGIN-WIDTH-C',
- 'MARGIN-WIDTH-CH',
- 'MARGIN-WIDTH-CHA',
- 'MARGIN-WIDTH-CHAR',
+ 'MARGIN-WIDTH',
+ 'MARGIN-WIDTH-',
+ 'MARGIN-WIDTH-C',
+ 'MARGIN-WIDTH-CH',
+ 'MARGIN-WIDTH-CHA',
+ 'MARGIN-WIDTH-CHAR',
'MARGIN-WIDTH-CHARS',
- 'MARGIN-WIDTH-P',
- 'MARGIN-WIDTH-PI',
- 'MARGIN-WIDTH-PIX',
- 'MARGIN-WIDTH-PIXE',
- 'MARGIN-WIDTH-PIXEL',
+ 'MARGIN-WIDTH-P',
+ 'MARGIN-WIDTH-PI',
+ 'MARGIN-WIDTH-PIX',
+ 'MARGIN-WIDTH-PIXE',
+ 'MARGIN-WIDTH-PIXEL',
'MARGIN-WIDTH-PIXELS',
- 'MARK-NEW',
- 'MARK-ROW-STATE',
- 'MATCHES',
+ 'MARK-NEW',
+ 'MARK-ROW-STATE',
+ 'MATCHES',
'MAX',
- 'MAX-BUTTON',
- 'MAX-CHARS',
- 'MAX-DATA-GUESS',
- 'MAX-HEIGHT',
- 'MAX-HEIGHT-C',
- 'MAX-HEIGHT-CH',
- 'MAX-HEIGHT-CHA',
- 'MAX-HEIGHT-CHAR',
+ 'MAX-BUTTON',
+ 'MAX-CHARS',
+ 'MAX-DATA-GUESS',
+ 'MAX-HEIGHT',
+ 'MAX-HEIGHT-C',
+ 'MAX-HEIGHT-CH',
+ 'MAX-HEIGHT-CHA',
+ 'MAX-HEIGHT-CHAR',
'MAX-HEIGHT-CHARS',
- 'MAX-HEIGHT-P',
- 'MAX-HEIGHT-PI',
- 'MAX-HEIGHT-PIX',
- 'MAX-HEIGHT-PIXE',
- 'MAX-HEIGHT-PIXEL',
+ 'MAX-HEIGHT-P',
+ 'MAX-HEIGHT-PI',
+ 'MAX-HEIGHT-PIX',
+ 'MAX-HEIGHT-PIXE',
+ 'MAX-HEIGHT-PIXEL',
'MAX-HEIGHT-PIXELS',
- 'MAX-ROWS',
- 'MAX-SIZE',
- 'MAX-VAL',
- 'MAX-VALU',
+ 'MAX-ROWS',
+ 'MAX-SIZE',
+ 'MAX-VAL',
+ 'MAX-VALU',
'MAX-VALUE',
- 'MAX-WIDTH',
- 'MAX-WIDTH-',
- 'MAX-WIDTH-C',
- 'MAX-WIDTH-CH',
- 'MAX-WIDTH-CHA',
- 'MAX-WIDTH-CHAR',
+ 'MAX-WIDTH',
+ 'MAX-WIDTH-',
+ 'MAX-WIDTH-C',
+ 'MAX-WIDTH-CH',
+ 'MAX-WIDTH-CHA',
+ 'MAX-WIDTH-CHAR',
'MAX-WIDTH-CHARS',
- 'MAX-WIDTH-P',
- 'MAX-WIDTH-PI',
- 'MAX-WIDTH-PIX',
- 'MAX-WIDTH-PIXE',
- 'MAX-WIDTH-PIXEL',
+ 'MAX-WIDTH-P',
+ 'MAX-WIDTH-PI',
+ 'MAX-WIDTH-PIX',
+ 'MAX-WIDTH-PIXE',
+ 'MAX-WIDTH-PIXEL',
'MAX-WIDTH-PIXELS',
'MAXI',
'MAXIM',
@@ -1427,232 +1427,232 @@ OPENEDGEKEYWORDS = (
'MAXIMU',
'MAXIMUM',
'MAXIMUM-LEVEL',
- 'MD5-DIGEST',
- 'MEMBER',
- 'MEMPTR-TO-NODE-VALUE',
- 'MENU',
- 'MENU-BAR',
- 'MENU-ITEM',
- 'MENU-K',
- 'MENU-KE',
+ 'MD5-DIGEST',
+ 'MEMBER',
+ 'MEMPTR-TO-NODE-VALUE',
+ 'MENU',
+ 'MENU-BAR',
+ 'MENU-ITEM',
+ 'MENU-K',
+ 'MENU-KE',
'MENU-KEY',
- 'MENU-M',
- 'MENU-MO',
- 'MENU-MOU',
- 'MENU-MOUS',
+ 'MENU-M',
+ 'MENU-MO',
+ 'MENU-MOU',
+ 'MENU-MOUS',
'MENU-MOUSE',
'MENUBAR',
- 'MERGE-BY-FIELD',
- 'MESSAGE',
- 'MESSAGE-AREA',
- 'MESSAGE-AREA-FONT',
- 'MESSAGE-LINES',
- 'METHOD',
+ 'MERGE-BY-FIELD',
+ 'MESSAGE',
+ 'MESSAGE-AREA',
+ 'MESSAGE-AREA-FONT',
+ 'MESSAGE-LINES',
+ 'METHOD',
'MIN',
- 'MIN-BUTTON',
- 'MIN-COLUMN-WIDTH-C',
- 'MIN-COLUMN-WIDTH-CH',
- 'MIN-COLUMN-WIDTH-CHA',
- 'MIN-COLUMN-WIDTH-CHAR',
+ 'MIN-BUTTON',
+ 'MIN-COLUMN-WIDTH-C',
+ 'MIN-COLUMN-WIDTH-CH',
+ 'MIN-COLUMN-WIDTH-CHA',
+ 'MIN-COLUMN-WIDTH-CHAR',
'MIN-COLUMN-WIDTH-CHARS',
- 'MIN-COLUMN-WIDTH-P',
- 'MIN-COLUMN-WIDTH-PI',
- 'MIN-COLUMN-WIDTH-PIX',
- 'MIN-COLUMN-WIDTH-PIXE',
- 'MIN-COLUMN-WIDTH-PIXEL',
+ 'MIN-COLUMN-WIDTH-P',
+ 'MIN-COLUMN-WIDTH-PI',
+ 'MIN-COLUMN-WIDTH-PIX',
+ 'MIN-COLUMN-WIDTH-PIXE',
+ 'MIN-COLUMN-WIDTH-PIXEL',
'MIN-COLUMN-WIDTH-PIXELS',
- 'MIN-HEIGHT',
- 'MIN-HEIGHT-',
- 'MIN-HEIGHT-C',
- 'MIN-HEIGHT-CH',
- 'MIN-HEIGHT-CHA',
- 'MIN-HEIGHT-CHAR',
+ 'MIN-HEIGHT',
+ 'MIN-HEIGHT-',
+ 'MIN-HEIGHT-C',
+ 'MIN-HEIGHT-CH',
+ 'MIN-HEIGHT-CHA',
+ 'MIN-HEIGHT-CHAR',
'MIN-HEIGHT-CHARS',
- 'MIN-HEIGHT-P',
- 'MIN-HEIGHT-PI',
- 'MIN-HEIGHT-PIX',
- 'MIN-HEIGHT-PIXE',
- 'MIN-HEIGHT-PIXEL',
+ 'MIN-HEIGHT-P',
+ 'MIN-HEIGHT-PI',
+ 'MIN-HEIGHT-PIX',
+ 'MIN-HEIGHT-PIXE',
+ 'MIN-HEIGHT-PIXEL',
'MIN-HEIGHT-PIXELS',
- 'MIN-SIZE',
- 'MIN-VAL',
- 'MIN-VALU',
+ 'MIN-SIZE',
+ 'MIN-VAL',
+ 'MIN-VALU',
'MIN-VALUE',
- 'MIN-WIDTH',
- 'MIN-WIDTH-',
- 'MIN-WIDTH-C',
- 'MIN-WIDTH-CH',
- 'MIN-WIDTH-CHA',
- 'MIN-WIDTH-CHAR',
+ 'MIN-WIDTH',
+ 'MIN-WIDTH-',
+ 'MIN-WIDTH-C',
+ 'MIN-WIDTH-CH',
+ 'MIN-WIDTH-CHA',
+ 'MIN-WIDTH-CHAR',
'MIN-WIDTH-CHARS',
- 'MIN-WIDTH-P',
- 'MIN-WIDTH-PI',
- 'MIN-WIDTH-PIX',
- 'MIN-WIDTH-PIXE',
- 'MIN-WIDTH-PIXEL',
+ 'MIN-WIDTH-P',
+ 'MIN-WIDTH-PI',
+ 'MIN-WIDTH-PIX',
+ 'MIN-WIDTH-PIXE',
+ 'MIN-WIDTH-PIXEL',
'MIN-WIDTH-PIXELS',
'MINI',
'MINIM',
'MINIMU',
'MINIMUM',
'MOD',
- 'MODIFIED',
- 'MODU',
- 'MODUL',
+ 'MODIFIED',
+ 'MODU',
+ 'MODUL',
'MODULO',
- 'MONTH',
- 'MOUSE',
- 'MOUSE-P',
- 'MOUSE-PO',
- 'MOUSE-POI',
- 'MOUSE-POIN',
- 'MOUSE-POINT',
- 'MOUSE-POINTE',
+ 'MONTH',
+ 'MOUSE',
+ 'MOUSE-P',
+ 'MOUSE-PO',
+ 'MOUSE-POI',
+ 'MOUSE-POIN',
+ 'MOUSE-POINT',
+ 'MOUSE-POINTE',
'MOUSE-POINTER',
- 'MOVABLE',
- 'MOVE-AFTER',
- 'MOVE-AFTER-',
- 'MOVE-AFTER-T',
- 'MOVE-AFTER-TA',
- 'MOVE-AFTER-TAB',
- 'MOVE-AFTER-TAB-',
- 'MOVE-AFTER-TAB-I',
- 'MOVE-AFTER-TAB-IT',
- 'MOVE-AFTER-TAB-ITE',
+ 'MOVABLE',
+ 'MOVE-AFTER',
+ 'MOVE-AFTER-',
+ 'MOVE-AFTER-T',
+ 'MOVE-AFTER-TA',
+ 'MOVE-AFTER-TAB',
+ 'MOVE-AFTER-TAB-',
+ 'MOVE-AFTER-TAB-I',
+ 'MOVE-AFTER-TAB-IT',
+ 'MOVE-AFTER-TAB-ITE',
'MOVE-AFTER-TAB-ITEM',
- 'MOVE-BEFOR',
- 'MOVE-BEFORE',
- 'MOVE-BEFORE-',
- 'MOVE-BEFORE-T',
- 'MOVE-BEFORE-TA',
- 'MOVE-BEFORE-TAB',
- 'MOVE-BEFORE-TAB-',
- 'MOVE-BEFORE-TAB-I',
- 'MOVE-BEFORE-TAB-IT',
- 'MOVE-BEFORE-TAB-ITE',
+ 'MOVE-BEFOR',
+ 'MOVE-BEFORE',
+ 'MOVE-BEFORE-',
+ 'MOVE-BEFORE-T',
+ 'MOVE-BEFORE-TA',
+ 'MOVE-BEFORE-TAB',
+ 'MOVE-BEFORE-TAB-',
+ 'MOVE-BEFORE-TAB-I',
+ 'MOVE-BEFORE-TAB-IT',
+ 'MOVE-BEFORE-TAB-ITE',
'MOVE-BEFORE-TAB-ITEM',
- 'MOVE-COL',
- 'MOVE-COLU',
- 'MOVE-COLUM',
+ 'MOVE-COL',
+ 'MOVE-COLU',
+ 'MOVE-COLUM',
'MOVE-COLUMN',
- 'MOVE-TO-B',
- 'MOVE-TO-BO',
- 'MOVE-TO-BOT',
- 'MOVE-TO-BOTT',
- 'MOVE-TO-BOTTO',
+ 'MOVE-TO-B',
+ 'MOVE-TO-BO',
+ 'MOVE-TO-BOT',
+ 'MOVE-TO-BOTT',
+ 'MOVE-TO-BOTTO',
'MOVE-TO-BOTTOM',
- 'MOVE-TO-EOF',
- 'MOVE-TO-T',
- 'MOVE-TO-TO',
+ 'MOVE-TO-EOF',
+ 'MOVE-TO-T',
+ 'MOVE-TO-TO',
'MOVE-TO-TOP',
- 'MPE',
+ 'MPE',
'MTIME',
- 'MULTI-COMPILE',
- 'MULTIPLE',
- 'MULTIPLE-KEY',
- 'MULTITASKING-INTERVAL',
- 'MUST-EXIST',
- 'NAME',
- 'NAMESPACE-PREFIX',
- 'NAMESPACE-URI',
- 'NATIVE',
- 'NE',
- 'NEEDS-APPSERVER-PROMPT',
- 'NEEDS-PROMPT',
- 'NEW',
- 'NEW-INSTANCE',
- 'NEW-ROW',
- 'NEXT',
- 'NEXT-COLUMN',
- 'NEXT-PROMPT',
- 'NEXT-ROWID',
- 'NEXT-SIBLING',
- 'NEXT-TAB-I',
- 'NEXT-TAB-IT',
- 'NEXT-TAB-ITE',
+ 'MULTI-COMPILE',
+ 'MULTIPLE',
+ 'MULTIPLE-KEY',
+ 'MULTITASKING-INTERVAL',
+ 'MUST-EXIST',
+ 'NAME',
+ 'NAMESPACE-PREFIX',
+ 'NAMESPACE-URI',
+ 'NATIVE',
+ 'NE',
+ 'NEEDS-APPSERVER-PROMPT',
+ 'NEEDS-PROMPT',
+ 'NEW',
+ 'NEW-INSTANCE',
+ 'NEW-ROW',
+ 'NEXT',
+ 'NEXT-COLUMN',
+ 'NEXT-PROMPT',
+ 'NEXT-ROWID',
+ 'NEXT-SIBLING',
+ 'NEXT-TAB-I',
+ 'NEXT-TAB-IT',
+ 'NEXT-TAB-ITE',
'NEXT-TAB-ITEM',
- 'NEXT-VALUE',
- 'NO',
- 'NO-APPLY',
- 'NO-ARRAY-MESSAGE',
- 'NO-ASSIGN',
- 'NO-ATTR',
- 'NO-ATTR-',
- 'NO-ATTR-L',
- 'NO-ATTR-LI',
- 'NO-ATTR-LIS',
+ 'NEXT-VALUE',
+ 'NO',
+ 'NO-APPLY',
+ 'NO-ARRAY-MESSAGE',
+ 'NO-ASSIGN',
+ 'NO-ATTR',
+ 'NO-ATTR-',
+ 'NO-ATTR-L',
+ 'NO-ATTR-LI',
+ 'NO-ATTR-LIS',
'NO-ATTR-LIST',
- 'NO-ATTR-S',
- 'NO-ATTR-SP',
- 'NO-ATTR-SPA',
- 'NO-ATTR-SPAC',
+ 'NO-ATTR-S',
+ 'NO-ATTR-SP',
+ 'NO-ATTR-SPA',
+ 'NO-ATTR-SPAC',
'NO-ATTR-SPACE',
- 'NO-AUTO-VALIDATE',
- 'NO-BIND-WHERE',
- 'NO-BOX',
- 'NO-CONSOLE',
- 'NO-CONVERT',
- 'NO-CONVERT-3D-COLORS',
- 'NO-CURRENT-VALUE',
- 'NO-DEBUG',
- 'NO-DRAG',
- 'NO-ECHO',
- 'NO-EMPTY-SPACE',
- 'NO-ERROR',
- 'NO-F',
- 'NO-FI',
- 'NO-FIL',
+ 'NO-AUTO-VALIDATE',
+ 'NO-BIND-WHERE',
+ 'NO-BOX',
+ 'NO-CONSOLE',
+ 'NO-CONVERT',
+ 'NO-CONVERT-3D-COLORS',
+ 'NO-CURRENT-VALUE',
+ 'NO-DEBUG',
+ 'NO-DRAG',
+ 'NO-ECHO',
+ 'NO-EMPTY-SPACE',
+ 'NO-ERROR',
+ 'NO-F',
+ 'NO-FI',
+ 'NO-FIL',
'NO-FILL',
- 'NO-FOCUS',
- 'NO-HELP',
- 'NO-HIDE',
- 'NO-INDEX-HINT',
- 'NO-INHERIT-BGC',
- 'NO-INHERIT-BGCO',
+ 'NO-FOCUS',
+ 'NO-HELP',
+ 'NO-HIDE',
+ 'NO-INDEX-HINT',
+ 'NO-INHERIT-BGC',
+ 'NO-INHERIT-BGCO',
'NO-INHERIT-BGCOLOR',
- 'NO-INHERIT-FGC',
- 'NO-INHERIT-FGCO',
- 'NO-INHERIT-FGCOL',
- 'NO-INHERIT-FGCOLO',
+ 'NO-INHERIT-FGC',
+ 'NO-INHERIT-FGCO',
+ 'NO-INHERIT-FGCOL',
+ 'NO-INHERIT-FGCOLO',
'NO-INHERIT-FGCOLOR',
- 'NO-JOIN-BY-SQLDB',
+ 'NO-JOIN-BY-SQLDB',
'NO-LABE',
- 'NO-LABELS',
- 'NO-LOBS',
- 'NO-LOCK',
- 'NO-LOOKAHEAD',
- 'NO-MAP',
- 'NO-MES',
- 'NO-MESS',
- 'NO-MESSA',
- 'NO-MESSAG',
+ 'NO-LABELS',
+ 'NO-LOBS',
+ 'NO-LOCK',
+ 'NO-LOOKAHEAD',
+ 'NO-MAP',
+ 'NO-MES',
+ 'NO-MESS',
+ 'NO-MESSA',
+ 'NO-MESSAG',
'NO-MESSAGE',
- 'NO-PAUSE',
- 'NO-PREFE',
- 'NO-PREFET',
- 'NO-PREFETC',
+ 'NO-PAUSE',
+ 'NO-PREFE',
+ 'NO-PREFET',
+ 'NO-PREFETC',
'NO-PREFETCH',
- 'NO-ROW-MARKERS',
- 'NO-SCROLLBAR-VERTICAL',
- 'NO-SEPARATE-CONNECTION',
- 'NO-SEPARATORS',
- 'NO-TAB-STOP',
- 'NO-UND',
- 'NO-UNDE',
- 'NO-UNDER',
- 'NO-UNDERL',
- 'NO-UNDERLI',
- 'NO-UNDERLIN',
+ 'NO-ROW-MARKERS',
+ 'NO-SCROLLBAR-VERTICAL',
+ 'NO-SEPARATE-CONNECTION',
+ 'NO-SEPARATORS',
+ 'NO-TAB-STOP',
+ 'NO-UND',
+ 'NO-UNDE',
+ 'NO-UNDER',
+ 'NO-UNDERL',
+ 'NO-UNDERLI',
+ 'NO-UNDERLIN',
'NO-UNDERLINE',
- 'NO-UNDO',
- 'NO-VAL',
- 'NO-VALI',
- 'NO-VALID',
- 'NO-VALIDA',
- 'NO-VALIDAT',
+ 'NO-UNDO',
+ 'NO-VAL',
+ 'NO-VALI',
+ 'NO-VALID',
+ 'NO-VALIDA',
+ 'NO-VALIDAT',
'NO-VALIDATE',
- 'NO-WAIT',
- 'NO-WORD-WRAP',
+ 'NO-WAIT',
+ 'NO-WORD-WRAP',
'NODE-VALUE-TO-MEMPTR',
'NONAMESPACE-SCHEMA-LOCATION',
'NONE',
@@ -1660,55 +1660,55 @@ OPENEDGEKEYWORDS = (
'NOT',
'NOT-ACTIVE',
'NOW',
- 'NULL',
- 'NUM-ALI',
- 'NUM-ALIA',
- 'NUM-ALIAS',
- 'NUM-ALIASE',
+ 'NULL',
+ 'NUM-ALI',
+ 'NUM-ALIA',
+ 'NUM-ALIAS',
+ 'NUM-ALIASE',
'NUM-ALIASES',
- 'NUM-BUFFERS',
- 'NUM-BUT',
- 'NUM-BUTT',
- 'NUM-BUTTO',
- 'NUM-BUTTON',
+ 'NUM-BUFFERS',
+ 'NUM-BUT',
+ 'NUM-BUTT',
+ 'NUM-BUTTO',
+ 'NUM-BUTTON',
'NUM-BUTTONS',
- 'NUM-COL',
- 'NUM-COLU',
- 'NUM-COLUM',
- 'NUM-COLUMN',
+ 'NUM-COL',
+ 'NUM-COLU',
+ 'NUM-COLUM',
+ 'NUM-COLUMN',
'NUM-COLUMNS',
- 'NUM-COPIES',
- 'NUM-DBS',
- 'NUM-DROPPED-FILES',
- 'NUM-ENTRIES',
- 'NUM-FIELDS',
- 'NUM-FORMATS',
- 'NUM-ITEMS',
- 'NUM-ITERATIONS',
- 'NUM-LINES',
- 'NUM-LOCKED-COL',
- 'NUM-LOCKED-COLU',
- 'NUM-LOCKED-COLUM',
- 'NUM-LOCKED-COLUMN',
+ 'NUM-COPIES',
+ 'NUM-DBS',
+ 'NUM-DROPPED-FILES',
+ 'NUM-ENTRIES',
+ 'NUM-FIELDS',
+ 'NUM-FORMATS',
+ 'NUM-ITEMS',
+ 'NUM-ITERATIONS',
+ 'NUM-LINES',
+ 'NUM-LOCKED-COL',
+ 'NUM-LOCKED-COLU',
+ 'NUM-LOCKED-COLUM',
+ 'NUM-LOCKED-COLUMN',
'NUM-LOCKED-COLUMNS',
- 'NUM-MESSAGES',
- 'NUM-PARAMETERS',
- 'NUM-REFERENCES',
- 'NUM-REPLACED',
- 'NUM-RESULTS',
- 'NUM-SELECTED',
- 'NUM-SELECTED-',
+ 'NUM-MESSAGES',
+ 'NUM-PARAMETERS',
+ 'NUM-REFERENCES',
+ 'NUM-REPLACED',
+ 'NUM-RESULTS',
+ 'NUM-SELECTED',
+ 'NUM-SELECTED-',
'NUM-SELECTED-ROWS',
- 'NUM-SELECTED-W',
- 'NUM-SELECTED-WI',
- 'NUM-SELECTED-WID',
- 'NUM-SELECTED-WIDG',
- 'NUM-SELECTED-WIDGE',
- 'NUM-SELECTED-WIDGET',
+ 'NUM-SELECTED-W',
+ 'NUM-SELECTED-WI',
+ 'NUM-SELECTED-WID',
+ 'NUM-SELECTED-WIDG',
+ 'NUM-SELECTED-WIDGE',
+ 'NUM-SELECTED-WIDGET',
'NUM-SELECTED-WIDGETS',
- 'NUM-TABS',
- 'NUM-TO-RETAIN',
- 'NUM-VISIBLE-COLUMNS',
+ 'NUM-TABS',
+ 'NUM-TO-RETAIN',
+ 'NUM-VISIBLE-COLUMNS',
'NUMERIC',
'NUMERIC-F',
'NUMERIC-FO',
@@ -1716,146 +1716,146 @@ OPENEDGEKEYWORDS = (
'NUMERIC-FORM',
'NUMERIC-FORMA',
'NUMERIC-FORMAT',
- 'OCTET-LENGTH',
- 'OF',
- 'OFF',
- 'OK',
- 'OK-CANCEL',
- 'OLD',
- 'ON',
- 'ON-FRAME',
- 'ON-FRAME-',
- 'ON-FRAME-B',
- 'ON-FRAME-BO',
- 'ON-FRAME-BOR',
- 'ON-FRAME-BORD',
- 'ON-FRAME-BORDE',
+ 'OCTET-LENGTH',
+ 'OF',
+ 'OFF',
+ 'OK',
+ 'OK-CANCEL',
+ 'OLD',
+ 'ON',
+ 'ON-FRAME',
+ 'ON-FRAME-',
+ 'ON-FRAME-B',
+ 'ON-FRAME-BO',
+ 'ON-FRAME-BOR',
+ 'ON-FRAME-BORD',
+ 'ON-FRAME-BORDE',
'ON-FRAME-BORDER',
- 'OPEN',
- 'OPSYS',
- 'OPTION',
- 'OR',
- 'ORDERED-JOIN',
- 'ORDINAL',
- 'OS-APPEND',
- 'OS-COMMAND',
- 'OS-COPY',
- 'OS-CREATE-DIR',
- 'OS-DELETE',
- 'OS-DIR',
+ 'OPEN',
+ 'OPSYS',
+ 'OPTION',
+ 'OR',
+ 'ORDERED-JOIN',
+ 'ORDINAL',
+ 'OS-APPEND',
+ 'OS-COMMAND',
+ 'OS-COPY',
+ 'OS-CREATE-DIR',
+ 'OS-DELETE',
+ 'OS-DIR',
'OS-DRIVE',
- 'OS-DRIVES',
- 'OS-ERROR',
- 'OS-GETENV',
- 'OS-RENAME',
- 'OTHERWISE',
- 'OUTPUT',
- 'OVERLAY',
- 'OVERRIDE',
- 'OWNER',
- 'PAGE',
- 'PAGE-BOT',
- 'PAGE-BOTT',
- 'PAGE-BOTTO',
+ 'OS-DRIVES',
+ 'OS-ERROR',
+ 'OS-GETENV',
+ 'OS-RENAME',
+ 'OTHERWISE',
+ 'OUTPUT',
+ 'OVERLAY',
+ 'OVERRIDE',
+ 'OWNER',
+ 'PAGE',
+ 'PAGE-BOT',
+ 'PAGE-BOTT',
+ 'PAGE-BOTTO',
'PAGE-BOTTOM',
- 'PAGE-NUM',
- 'PAGE-NUMB',
- 'PAGE-NUMBE',
+ 'PAGE-NUM',
+ 'PAGE-NUMB',
+ 'PAGE-NUMBE',
'PAGE-NUMBER',
- 'PAGE-SIZE',
- 'PAGE-TOP',
- 'PAGE-WID',
- 'PAGE-WIDT',
+ 'PAGE-SIZE',
+ 'PAGE-TOP',
+ 'PAGE-WID',
+ 'PAGE-WIDT',
'PAGE-WIDTH',
'PAGED',
- 'PARAM',
- 'PARAME',
- 'PARAMET',
- 'PARAMETE',
+ 'PARAM',
+ 'PARAME',
+ 'PARAMET',
+ 'PARAMETE',
'PARAMETER',
- 'PARENT',
- 'PARSE-STATUS',
- 'PARTIAL-KEY',
- 'PASCAL',
- 'PASSWORD-FIELD',
- 'PATHNAME',
- 'PAUSE',
- 'PBE-HASH-ALG',
- 'PBE-HASH-ALGO',
- 'PBE-HASH-ALGOR',
- 'PBE-HASH-ALGORI',
- 'PBE-HASH-ALGORIT',
- 'PBE-HASH-ALGORITH',
+ 'PARENT',
+ 'PARSE-STATUS',
+ 'PARTIAL-KEY',
+ 'PASCAL',
+ 'PASSWORD-FIELD',
+ 'PATHNAME',
+ 'PAUSE',
+ 'PBE-HASH-ALG',
+ 'PBE-HASH-ALGO',
+ 'PBE-HASH-ALGOR',
+ 'PBE-HASH-ALGORI',
+ 'PBE-HASH-ALGORIT',
+ 'PBE-HASH-ALGORITH',
'PBE-HASH-ALGORITHM',
- 'PBE-KEY-ROUNDS',
- 'PDBNAME',
- 'PERSIST',
- 'PERSISTE',
- 'PERSISTEN',
+ 'PBE-KEY-ROUNDS',
+ 'PDBNAME',
+ 'PERSIST',
+ 'PERSISTE',
+ 'PERSISTEN',
'PERSISTENT',
- 'PERSISTENT-CACHE-DISABLED',
- 'PFC',
- 'PFCO',
- 'PFCOL',
- 'PFCOLO',
+ 'PERSISTENT-CACHE-DISABLED',
+ 'PFC',
+ 'PFCO',
+ 'PFCOL',
+ 'PFCOLO',
'PFCOLOR',
- 'PIXELS',
- 'PIXELS-PER-COL',
- 'PIXELS-PER-COLU',
- 'PIXELS-PER-COLUM',
+ 'PIXELS',
+ 'PIXELS-PER-COL',
+ 'PIXELS-PER-COLU',
+ 'PIXELS-PER-COLUM',
'PIXELS-PER-COLUMN',
- 'PIXELS-PER-ROW',
- 'POPUP-M',
- 'POPUP-ME',
- 'POPUP-MEN',
+ 'PIXELS-PER-ROW',
+ 'POPUP-M',
+ 'POPUP-ME',
+ 'POPUP-MEN',
'POPUP-MENU',
- 'POPUP-O',
- 'POPUP-ON',
- 'POPUP-ONL',
+ 'POPUP-O',
+ 'POPUP-ON',
+ 'POPUP-ONL',
'POPUP-ONLY',
- 'PORTRAIT',
- 'POSITION',
- 'PRECISION',
- 'PREFER-DATASET',
+ 'PORTRAIT',
+ 'POSITION',
+ 'PRECISION',
+ 'PREFER-DATASET',
'PREPARE-STRING',
- 'PREPARED',
- 'PREPROC',
- 'PREPROCE',
- 'PREPROCES',
+ 'PREPARED',
+ 'PREPROC',
+ 'PREPROCE',
+ 'PREPROCES',
'PREPROCESS',
- 'PRESEL',
- 'PRESELE',
- 'PRESELEC',
+ 'PRESEL',
+ 'PRESELE',
+ 'PRESELEC',
'PRESELECT',
- 'PREV',
- 'PREV-COLUMN',
- 'PREV-SIBLING',
- 'PREV-TAB-I',
- 'PREV-TAB-IT',
- 'PREV-TAB-ITE',
+ 'PREV',
+ 'PREV-COLUMN',
+ 'PREV-SIBLING',
+ 'PREV-TAB-I',
+ 'PREV-TAB-IT',
+ 'PREV-TAB-ITE',
'PREV-TAB-ITEM',
- 'PRIMARY',
- 'PRINTER',
- 'PRINTER-CONTROL-HANDLE',
- 'PRINTER-HDC',
- 'PRINTER-NAME',
- 'PRINTER-PORT',
- 'PRINTER-SETUP',
- 'PRIVATE',
- 'PRIVATE-D',
- 'PRIVATE-DA',
- 'PRIVATE-DAT',
+ 'PRIMARY',
+ 'PRINTER',
+ 'PRINTER-CONTROL-HANDLE',
+ 'PRINTER-HDC',
+ 'PRINTER-NAME',
+ 'PRINTER-PORT',
+ 'PRINTER-SETUP',
+ 'PRIVATE',
+ 'PRIVATE-D',
+ 'PRIVATE-DA',
+ 'PRIVATE-DAT',
'PRIVATE-DATA',
- 'PRIVILEGES',
- 'PROC-HA',
- 'PROC-HAN',
- 'PROC-HAND',
- 'PROC-HANDL',
+ 'PRIVILEGES',
+ 'PROC-HA',
+ 'PROC-HAN',
+ 'PROC-HAND',
+ 'PROC-HANDL',
'PROC-HANDLE',
- 'PROC-ST',
- 'PROC-STA',
- 'PROC-STAT',
- 'PROC-STATU',
+ 'PROC-ST',
+ 'PROC-STA',
+ 'PROC-STAT',
+ 'PROC-STATU',
'PROC-STATUS',
'PROC-TEXT',
'PROC-TEXT-BUFFER',
@@ -1867,188 +1867,188 @@ OPENEDGEKEYWORDS = (
'PROCEDURE-CALL-TYPE',
'PROCEDURE-TYPE',
'PROCESS',
- 'PROFILER',
- 'PROGRAM-NAME',
- 'PROGRESS',
- 'PROGRESS-S',
- 'PROGRESS-SO',
- 'PROGRESS-SOU',
- 'PROGRESS-SOUR',
- 'PROGRESS-SOURC',
+ 'PROFILER',
+ 'PROGRAM-NAME',
+ 'PROGRESS',
+ 'PROGRESS-S',
+ 'PROGRESS-SO',
+ 'PROGRESS-SOU',
+ 'PROGRESS-SOUR',
+ 'PROGRESS-SOURC',
'PROGRESS-SOURCE',
- 'PROMPT',
- 'PROMPT-F',
- 'PROMPT-FO',
+ 'PROMPT',
+ 'PROMPT-F',
+ 'PROMPT-FO',
'PROMPT-FOR',
- 'PROMSGS',
- 'PROPATH',
- 'PROPERTY',
- 'PROTECTED',
- 'PROVERS',
- 'PROVERSI',
- 'PROVERSIO',
+ 'PROMSGS',
+ 'PROPATH',
+ 'PROPERTY',
+ 'PROTECTED',
+ 'PROVERS',
+ 'PROVERSI',
+ 'PROVERSIO',
'PROVERSION',
- 'PROXY',
- 'PROXY-PASSWORD',
- 'PROXY-USERID',
- 'PUBLIC',
- 'PUBLIC-ID',
- 'PUBLISH',
- 'PUBLISHED-EVENTS',
- 'PUT',
- 'PUT-BYTE',
- 'PUT-DOUBLE',
- 'PUT-FLOAT',
- 'PUT-INT64',
- 'PUT-KEY-VAL',
- 'PUT-KEY-VALU',
+ 'PROXY',
+ 'PROXY-PASSWORD',
+ 'PROXY-USERID',
+ 'PUBLIC',
+ 'PUBLIC-ID',
+ 'PUBLISH',
+ 'PUBLISHED-EVENTS',
+ 'PUT',
+ 'PUT-BYTE',
+ 'PUT-DOUBLE',
+ 'PUT-FLOAT',
+ 'PUT-INT64',
+ 'PUT-KEY-VAL',
+ 'PUT-KEY-VALU',
'PUT-KEY-VALUE',
- 'PUT-LONG',
- 'PUT-SHORT',
- 'PUT-STRING',
- 'PUT-UNSIGNED-LONG',
+ 'PUT-LONG',
+ 'PUT-SHORT',
+ 'PUT-STRING',
+ 'PUT-UNSIGNED-LONG',
'PUTBYTE',
- 'QUERY',
- 'QUERY-CLOSE',
- 'QUERY-OFF-END',
- 'QUERY-OPEN',
- 'QUERY-PREPARE',
- 'QUERY-TUNING',
- 'QUESTION',
- 'QUIT',
- 'QUOTER',
+ 'QUERY',
+ 'QUERY-CLOSE',
+ 'QUERY-OFF-END',
+ 'QUERY-OPEN',
+ 'QUERY-PREPARE',
+ 'QUERY-TUNING',
+ 'QUESTION',
+ 'QUIT',
+ 'QUOTER',
'R-INDEX',
- 'RADIO-BUTTONS',
- 'RADIO-SET',
- 'RANDOM',
+ 'RADIO-BUTTONS',
+ 'RADIO-SET',
+ 'RANDOM',
'RAW',
- 'RAW-TRANSFER',
- 'RCODE-INFO',
- 'RCODE-INFOR',
- 'RCODE-INFORM',
- 'RCODE-INFORMA',
- 'RCODE-INFORMAT',
- 'RCODE-INFORMATI',
- 'RCODE-INFORMATIO',
+ 'RAW-TRANSFER',
+ 'RCODE-INFO',
+ 'RCODE-INFOR',
+ 'RCODE-INFORM',
+ 'RCODE-INFORMA',
+ 'RCODE-INFORMAT',
+ 'RCODE-INFORMATI',
+ 'RCODE-INFORMATIO',
'RCODE-INFORMATION',
- 'READ-AVAILABLE',
- 'READ-EXACT-NUM',
- 'READ-FILE',
+ 'READ-AVAILABLE',
+ 'READ-EXACT-NUM',
+ 'READ-FILE',
'READ-JSON',
- 'READ-ONLY',
- 'READ-XML',
- 'READ-XMLSCHEMA',
+ 'READ-ONLY',
+ 'READ-XML',
+ 'READ-XMLSCHEMA',
'READKEY',
- 'REAL',
+ 'REAL',
'RECID',
- 'RECORD-LENGTH',
- 'RECT',
- 'RECTA',
- 'RECTAN',
- 'RECTANG',
- 'RECTANGL',
+ 'RECORD-LENGTH',
+ 'RECT',
+ 'RECTA',
+ 'RECTAN',
+ 'RECTANG',
+ 'RECTANGL',
'RECTANGLE',
- 'RECURSIVE',
- 'REFERENCE-ONLY',
- 'REFRESH',
+ 'RECURSIVE',
+ 'REFERENCE-ONLY',
+ 'REFRESH',
'REFRESH-AUDIT-POLICY',
- 'REFRESHABLE',
- 'REGISTER-DOMAIN',
- 'RELEASE',
- 'REMOTE',
- 'REMOVE-EVENTS-PROCEDURE',
- 'REMOVE-SUPER-PROCEDURE',
- 'REPEAT',
- 'REPLACE',
- 'REPLACE-SELECTION-TEXT',
- 'REPOSITION',
- 'REPOSITION-BACKWARD',
- 'REPOSITION-FORWARD',
- 'REPOSITION-MODE',
- 'REPOSITION-TO-ROW',
- 'REPOSITION-TO-ROWID',
- 'REQUEST',
+ 'REFRESHABLE',
+ 'REGISTER-DOMAIN',
+ 'RELEASE',
+ 'REMOTE',
+ 'REMOVE-EVENTS-PROCEDURE',
+ 'REMOVE-SUPER-PROCEDURE',
+ 'REPEAT',
+ 'REPLACE',
+ 'REPLACE-SELECTION-TEXT',
+ 'REPOSITION',
+ 'REPOSITION-BACKWARD',
+ 'REPOSITION-FORWARD',
+ 'REPOSITION-MODE',
+ 'REPOSITION-TO-ROW',
+ 'REPOSITION-TO-ROWID',
+ 'REQUEST',
'REQUEST-INFO',
- 'RESET',
- 'RESIZA',
- 'RESIZAB',
- 'RESIZABL',
+ 'RESET',
+ 'RESIZA',
+ 'RESIZAB',
+ 'RESIZABL',
'RESIZABLE',
- 'RESIZE',
+ 'RESIZE',
'RESPONSE-INFO',
- 'RESTART-ROW',
- 'RESTART-ROWID',
- 'RETAIN',
- 'RETAIN-SHAPE',
- 'RETRY',
- 'RETRY-CANCEL',
- 'RETURN',
+ 'RESTART-ROW',
+ 'RESTART-ROWID',
+ 'RETAIN',
+ 'RETAIN-SHAPE',
+ 'RETRY',
+ 'RETRY-CANCEL',
+ 'RETURN',
'RETURN-ALIGN',
'RETURN-ALIGNE',
- 'RETURN-INS',
- 'RETURN-INSE',
- 'RETURN-INSER',
- 'RETURN-INSERT',
- 'RETURN-INSERTE',
+ 'RETURN-INS',
+ 'RETURN-INSE',
+ 'RETURN-INSER',
+ 'RETURN-INSERT',
+ 'RETURN-INSERTE',
'RETURN-INSERTED',
'RETURN-TO-START-DI',
- 'RETURN-TO-START-DIR',
- 'RETURN-VAL',
- 'RETURN-VALU',
+ 'RETURN-TO-START-DIR',
+ 'RETURN-VAL',
+ 'RETURN-VALU',
'RETURN-VALUE',
- 'RETURN-VALUE-DATA-TYPE',
+ 'RETURN-VALUE-DATA-TYPE',
'RETURNS',
- 'REVERSE-FROM',
- 'REVERT',
- 'REVOKE',
- 'RGB-VALUE',
- 'RIGHT-ALIGNED',
- 'RIGHT-TRIM',
- 'ROLES',
- 'ROUND',
- 'ROUTINE-LEVEL',
- 'ROW',
- 'ROW-HEIGHT-CHARS',
- 'ROW-HEIGHT-PIXELS',
- 'ROW-MARKERS',
- 'ROW-OF',
- 'ROW-RESIZABLE',
+ 'REVERSE-FROM',
+ 'REVERT',
+ 'REVOKE',
+ 'RGB-VALUE',
+ 'RIGHT-ALIGNED',
+ 'RIGHT-TRIM',
+ 'ROLES',
+ 'ROUND',
+ 'ROUTINE-LEVEL',
+ 'ROW',
+ 'ROW-HEIGHT-CHARS',
+ 'ROW-HEIGHT-PIXELS',
+ 'ROW-MARKERS',
+ 'ROW-OF',
+ 'ROW-RESIZABLE',
'ROWID',
- 'RULE',
- 'RUN',
- 'RUN-PROCEDURE',
+ 'RULE',
+ 'RUN',
+ 'RUN-PROCEDURE',
'SAVE CACHE',
- 'SAVE',
- 'SAVE-AS',
- 'SAVE-FILE',
- 'SAX-COMPLE',
- 'SAX-COMPLET',
+ 'SAVE',
+ 'SAVE-AS',
+ 'SAVE-FILE',
+ 'SAX-COMPLE',
+ 'SAX-COMPLET',
'SAX-COMPLETE',
- 'SAX-PARSE',
- 'SAX-PARSE-FIRST',
- 'SAX-PARSE-NEXT',
- 'SAX-PARSER-ERROR',
- 'SAX-RUNNING',
- 'SAX-UNINITIALIZED',
- 'SAX-WRITE-BEGIN',
- 'SAX-WRITE-COMPLETE',
- 'SAX-WRITE-CONTENT',
- 'SAX-WRITE-ELEMENT',
- 'SAX-WRITE-ERROR',
- 'SAX-WRITE-IDLE',
+ 'SAX-PARSE',
+ 'SAX-PARSE-FIRST',
+ 'SAX-PARSE-NEXT',
+ 'SAX-PARSER-ERROR',
+ 'SAX-RUNNING',
+ 'SAX-UNINITIALIZED',
+ 'SAX-WRITE-BEGIN',
+ 'SAX-WRITE-COMPLETE',
+ 'SAX-WRITE-CONTENT',
+ 'SAX-WRITE-ELEMENT',
+ 'SAX-WRITE-ERROR',
+ 'SAX-WRITE-IDLE',
'SAX-WRITE-TAG',
- 'SAX-WRITER',
- 'SCHEMA',
- 'SCHEMA-LOCATION',
- 'SCHEMA-MARSHAL',
- 'SCHEMA-PATH',
- 'SCREEN',
- 'SCREEN-IO',
- 'SCREEN-LINES',
- 'SCREEN-VAL',
- 'SCREEN-VALU',
+ 'SAX-WRITER',
+ 'SCHEMA',
+ 'SCHEMA-LOCATION',
+ 'SCHEMA-MARSHAL',
+ 'SCHEMA-PATH',
+ 'SCREEN',
+ 'SCREEN-IO',
+ 'SCREEN-LINES',
+ 'SCREEN-VAL',
+ 'SCREEN-VALU',
'SCREEN-VALUE',
- 'SCROLL',
+ 'SCROLL',
'SCROLL-BARS',
'SCROLL-DELTA',
'SCROLL-OFFSET',
@@ -2058,543 +2058,543 @@ OPENEDGEKEYWORDS = (
'SCROLL-TO-ITE',
'SCROLL-TO-ITEM',
'SCROLL-TO-SELECTED-ROW',
- 'SCROLLABLE',
- 'SCROLLBAR-H',
- 'SCROLLBAR-HO',
- 'SCROLLBAR-HOR',
- 'SCROLLBAR-HORI',
- 'SCROLLBAR-HORIZ',
- 'SCROLLBAR-HORIZO',
- 'SCROLLBAR-HORIZON',
- 'SCROLLBAR-HORIZONT',
- 'SCROLLBAR-HORIZONTA',
+ 'SCROLLABLE',
+ 'SCROLLBAR-H',
+ 'SCROLLBAR-HO',
+ 'SCROLLBAR-HOR',
+ 'SCROLLBAR-HORI',
+ 'SCROLLBAR-HORIZ',
+ 'SCROLLBAR-HORIZO',
+ 'SCROLLBAR-HORIZON',
+ 'SCROLLBAR-HORIZONT',
+ 'SCROLLBAR-HORIZONTA',
'SCROLLBAR-HORIZONTAL',
- 'SCROLLBAR-V',
- 'SCROLLBAR-VE',
- 'SCROLLBAR-VER',
- 'SCROLLBAR-VERT',
- 'SCROLLBAR-VERTI',
- 'SCROLLBAR-VERTIC',
- 'SCROLLBAR-VERTICA',
+ 'SCROLLBAR-V',
+ 'SCROLLBAR-VE',
+ 'SCROLLBAR-VER',
+ 'SCROLLBAR-VERT',
+ 'SCROLLBAR-VERTI',
+ 'SCROLLBAR-VERTIC',
+ 'SCROLLBAR-VERTICA',
'SCROLLBAR-VERTICAL',
- 'SCROLLED-ROW-POS',
- 'SCROLLED-ROW-POSI',
- 'SCROLLED-ROW-POSIT',
- 'SCROLLED-ROW-POSITI',
- 'SCROLLED-ROW-POSITIO',
+ 'SCROLLED-ROW-POS',
+ 'SCROLLED-ROW-POSI',
+ 'SCROLLED-ROW-POSIT',
+ 'SCROLLED-ROW-POSITI',
+ 'SCROLLED-ROW-POSITIO',
'SCROLLED-ROW-POSITION',
- 'SCROLLING',
- 'SDBNAME',
- 'SEAL',
- 'SEAL-TIMESTAMP',
- 'SEARCH',
- 'SEARCH-SELF',
- 'SEARCH-TARGET',
- 'SECTION',
- 'SECURITY-POLICY',
- 'SEEK',
- 'SELECT',
+ 'SCROLLING',
+ 'SDBNAME',
+ 'SEAL',
+ 'SEAL-TIMESTAMP',
+ 'SEARCH',
+ 'SEARCH-SELF',
+ 'SEARCH-TARGET',
+ 'SECTION',
+ 'SECURITY-POLICY',
+ 'SEEK',
+ 'SELECT',
'SELECT-ALL',
'SELECT-FOCUSED-ROW',
'SELECT-NEXT-ROW',
'SELECT-PREV-ROW',
'SELECT-ROW',
- 'SELECTABLE',
- 'SELECTED',
- 'SELECTION',
- 'SELECTION-END',
- 'SELECTION-LIST',
- 'SELECTION-START',
- 'SELECTION-TEXT',
- 'SELF',
- 'SEND',
+ 'SELECTABLE',
+ 'SELECTED',
+ 'SELECTION',
+ 'SELECTION-END',
+ 'SELECTION-LIST',
+ 'SELECTION-START',
+ 'SELECTION-TEXT',
+ 'SELF',
+ 'SEND',
'SEND-SQL-STATEMENT',
- 'SENSITIVE',
- 'SEPARATE-CONNECTION',
- 'SEPARATOR-FGCOLOR',
- 'SEPARATORS',
+ 'SENSITIVE',
+ 'SEPARATE-CONNECTION',
+ 'SEPARATOR-FGCOLOR',
+ 'SEPARATORS',
'SERIALIZABLE',
'SERIALIZE-HIDDEN',
'SERIALIZE-NAME',
- 'SERVER',
- 'SERVER-CONNECTION-BOUND',
- 'SERVER-CONNECTION-BOUND-REQUEST',
- 'SERVER-CONNECTION-CONTEXT',
- 'SERVER-CONNECTION-ID',
- 'SERVER-OPERATING-MODE',
- 'SESSION',
- 'SESSION-ID',
- 'SET',
- 'SET-APPL-CONTEXT',
- 'SET-ATTR-CALL-TYPE',
- 'SET-ATTRIBUTE-NODE',
- 'SET-BLUE',
- 'SET-BLUE-',
- 'SET-BLUE-V',
- 'SET-BLUE-VA',
- 'SET-BLUE-VAL',
- 'SET-BLUE-VALU',
+ 'SERVER',
+ 'SERVER-CONNECTION-BOUND',
+ 'SERVER-CONNECTION-BOUND-REQUEST',
+ 'SERVER-CONNECTION-CONTEXT',
+ 'SERVER-CONNECTION-ID',
+ 'SERVER-OPERATING-MODE',
+ 'SESSION',
+ 'SESSION-ID',
+ 'SET',
+ 'SET-APPL-CONTEXT',
+ 'SET-ATTR-CALL-TYPE',
+ 'SET-ATTRIBUTE-NODE',
+ 'SET-BLUE',
+ 'SET-BLUE-',
+ 'SET-BLUE-V',
+ 'SET-BLUE-VA',
+ 'SET-BLUE-VAL',
+ 'SET-BLUE-VALU',
'SET-BLUE-VALUE',
- 'SET-BREAK',
- 'SET-BUFFERS',
- 'SET-CALLBACK',
- 'SET-CLIENT',
- 'SET-COMMIT',
- 'SET-CONTENTS',
- 'SET-CURRENT-VALUE',
- 'SET-DB-CLIENT',
- 'SET-DYNAMIC',
- 'SET-EVENT-MANAGER-OPTION',
- 'SET-GREEN',
- 'SET-GREEN-',
- 'SET-GREEN-V',
- 'SET-GREEN-VA',
- 'SET-GREEN-VAL',
- 'SET-GREEN-VALU',
+ 'SET-BREAK',
+ 'SET-BUFFERS',
+ 'SET-CALLBACK',
+ 'SET-CLIENT',
+ 'SET-COMMIT',
+ 'SET-CONTENTS',
+ 'SET-CURRENT-VALUE',
+ 'SET-DB-CLIENT',
+ 'SET-DYNAMIC',
+ 'SET-EVENT-MANAGER-OPTION',
+ 'SET-GREEN',
+ 'SET-GREEN-',
+ 'SET-GREEN-V',
+ 'SET-GREEN-VA',
+ 'SET-GREEN-VAL',
+ 'SET-GREEN-VALU',
'SET-GREEN-VALUE',
- 'SET-INPUT-SOURCE',
- 'SET-OPTION',
- 'SET-OUTPUT-DESTINATION',
- 'SET-PARAMETER',
- 'SET-POINTER-VALUE',
- 'SET-PROPERTY',
- 'SET-RED',
- 'SET-RED-',
- 'SET-RED-V',
- 'SET-RED-VA',
- 'SET-RED-VAL',
- 'SET-RED-VALU',
+ 'SET-INPUT-SOURCE',
+ 'SET-OPTION',
+ 'SET-OUTPUT-DESTINATION',
+ 'SET-PARAMETER',
+ 'SET-POINTER-VALUE',
+ 'SET-PROPERTY',
+ 'SET-RED',
+ 'SET-RED-',
+ 'SET-RED-V',
+ 'SET-RED-VA',
+ 'SET-RED-VAL',
+ 'SET-RED-VALU',
'SET-RED-VALUE',
- 'SET-REPOSITIONED-ROW',
- 'SET-RGB-VALUE',
- 'SET-ROLLBACK',
- 'SET-SELECTION',
- 'SET-SIZE',
- 'SET-SORT-ARROW',
+ 'SET-REPOSITIONED-ROW',
+ 'SET-RGB-VALUE',
+ 'SET-ROLLBACK',
+ 'SET-SELECTION',
+ 'SET-SIZE',
+ 'SET-SORT-ARROW',
'SET-WAIT-STATE',
- 'SETUSER',
- 'SETUSERI',
+ 'SETUSER',
+ 'SETUSERI',
'SETUSERID',
- 'SHA1-DIGEST',
- 'SHARE',
- 'SHARE-',
- 'SHARE-L',
- 'SHARE-LO',
- 'SHARE-LOC',
+ 'SHA1-DIGEST',
+ 'SHARE',
+ 'SHARE-',
+ 'SHARE-L',
+ 'SHARE-LO',
+ 'SHARE-LOC',
'SHARE-LOCK',
'SHARED',
- 'SHOW-IN-TASKBAR',
+ 'SHOW-IN-TASKBAR',
'SHOW-STAT',
- 'SHOW-STATS',
+ 'SHOW-STATS',
'SIDE-LAB',
'SIDE-LABE',
'SIDE-LABEL',
- 'SIDE-LABEL-H',
- 'SIDE-LABEL-HA',
- 'SIDE-LABEL-HAN',
- 'SIDE-LABEL-HAND',
- 'SIDE-LABEL-HANDL',
+ 'SIDE-LABEL-H',
+ 'SIDE-LABEL-HA',
+ 'SIDE-LABEL-HAN',
+ 'SIDE-LABEL-HAND',
+ 'SIDE-LABEL-HANDL',
'SIDE-LABEL-HANDLE',
- 'SIDE-LABELS',
+ 'SIDE-LABELS',
'SIGNATURE',
- 'SILENT',
- 'SIMPLE',
- 'SINGLE',
+ 'SILENT',
+ 'SIMPLE',
+ 'SINGLE',
'SINGLE-RUN',
'SINGLETON',
- 'SIZE',
- 'SIZE-C',
- 'SIZE-CH',
- 'SIZE-CHA',
- 'SIZE-CHAR',
+ 'SIZE',
+ 'SIZE-C',
+ 'SIZE-CH',
+ 'SIZE-CHA',
+ 'SIZE-CHAR',
'SIZE-CHARS',
- 'SIZE-P',
- 'SIZE-PI',
- 'SIZE-PIX',
- 'SIZE-PIXE',
- 'SIZE-PIXEL',
+ 'SIZE-P',
+ 'SIZE-PI',
+ 'SIZE-PIX',
+ 'SIZE-PIXE',
+ 'SIZE-PIXEL',
'SIZE-PIXELS',
- 'SKIP',
- 'SKIP-DELETED-RECORD',
- 'SLIDER',
- 'SMALL-ICON',
+ 'SKIP',
+ 'SKIP-DELETED-RECORD',
+ 'SLIDER',
+ 'SMALL-ICON',
'SMALL-TITLE',
- 'SMALLINT',
- 'SOME',
- 'SORT',
- 'SORT-ASCENDING',
- 'SORT-NUMBER',
- 'SOURCE',
- 'SOURCE-PROCEDURE',
- 'SPACE',
- 'SQL',
- 'SQRT',
- 'SSL-SERVER-NAME',
- 'STANDALONE',
- 'START',
- 'START-DOCUMENT',
- 'START-ELEMENT',
- 'START-MOVE',
- 'START-RESIZE',
- 'START-ROW-RESIZE',
- 'STATE-DETAIL',
- 'STATIC',
- 'STATUS',
- 'STATUS-AREA',
- 'STATUS-AREA-FONT',
- 'STDCALL',
- 'STOP',
+ 'SMALLINT',
+ 'SOME',
+ 'SORT',
+ 'SORT-ASCENDING',
+ 'SORT-NUMBER',
+ 'SOURCE',
+ 'SOURCE-PROCEDURE',
+ 'SPACE',
+ 'SQL',
+ 'SQRT',
+ 'SSL-SERVER-NAME',
+ 'STANDALONE',
+ 'START',
+ 'START-DOCUMENT',
+ 'START-ELEMENT',
+ 'START-MOVE',
+ 'START-RESIZE',
+ 'START-ROW-RESIZE',
+ 'STATE-DETAIL',
+ 'STATIC',
+ 'STATUS',
+ 'STATUS-AREA',
+ 'STATUS-AREA-FONT',
+ 'STDCALL',
+ 'STOP',
'STOP-AFTER',
- 'STOP-PARSING',
+ 'STOP-PARSING',
'STOPPE',
- 'STOPPED',
- 'STORED-PROC',
- 'STORED-PROCE',
- 'STORED-PROCED',
- 'STORED-PROCEDU',
- 'STORED-PROCEDUR',
+ 'STOPPED',
+ 'STORED-PROC',
+ 'STORED-PROCE',
+ 'STORED-PROCED',
+ 'STORED-PROCEDU',
+ 'STORED-PROCEDUR',
'STORED-PROCEDURE',
- 'STREAM',
- 'STREAM-HANDLE',
- 'STREAM-IO',
- 'STRETCH-TO-FIT',
- 'STRICT',
+ 'STREAM',
+ 'STREAM-HANDLE',
+ 'STREAM-IO',
+ 'STRETCH-TO-FIT',
+ 'STRICT',
'STRICT-ENTITY-RESOLUTION',
- 'STRING',
- 'STRING-VALUE',
- 'STRING-XREF',
- 'SUB-AVE',
- 'SUB-AVER',
- 'SUB-AVERA',
- 'SUB-AVERAG',
+ 'STRING',
+ 'STRING-VALUE',
+ 'STRING-XREF',
+ 'SUB-AVE',
+ 'SUB-AVER',
+ 'SUB-AVERA',
+ 'SUB-AVERAG',
'SUB-AVERAGE',
- 'SUB-COUNT',
- 'SUB-MAXIMUM',
- 'SUB-MENU',
- 'SUB-MIN',
+ 'SUB-COUNT',
+ 'SUB-MAXIMUM',
+ 'SUB-MENU',
+ 'SUB-MIN',
'SUB-MINIMUM',
'SUB-TOTAL',
- 'SUBSCRIBE',
- 'SUBST',
- 'SUBSTI',
- 'SUBSTIT',
- 'SUBSTITU',
- 'SUBSTITUT',
+ 'SUBSCRIBE',
+ 'SUBST',
+ 'SUBSTI',
+ 'SUBSTIT',
+ 'SUBSTITU',
+ 'SUBSTITUT',
'SUBSTITUTE',
- 'SUBSTR',
- 'SUBSTRI',
- 'SUBSTRIN',
+ 'SUBSTR',
+ 'SUBSTRI',
+ 'SUBSTRIN',
'SUBSTRING',
- 'SUBTYPE',
- 'SUM',
+ 'SUBTYPE',
+ 'SUM',
'SUM-MAX',
'SUM-MAXI',
'SUM-MAXIM',
'SUM-MAXIMU',
- 'SUPER',
- 'SUPER-PROCEDURES',
- 'SUPPRESS-NAMESPACE-PROCESSING',
- 'SUPPRESS-W',
- 'SUPPRESS-WA',
- 'SUPPRESS-WAR',
- 'SUPPRESS-WARN',
- 'SUPPRESS-WARNI',
- 'SUPPRESS-WARNIN',
- 'SUPPRESS-WARNING',
+ 'SUPER',
+ 'SUPER-PROCEDURES',
+ 'SUPPRESS-NAMESPACE-PROCESSING',
+ 'SUPPRESS-W',
+ 'SUPPRESS-WA',
+ 'SUPPRESS-WAR',
+ 'SUPPRESS-WARN',
+ 'SUPPRESS-WARNI',
+ 'SUPPRESS-WARNIN',
+ 'SUPPRESS-WARNING',
'SUPPRESS-WARNINGS',
- 'SYMMETRIC-ENCRYPTION-ALGORITHM',
- 'SYMMETRIC-ENCRYPTION-IV',
- 'SYMMETRIC-ENCRYPTION-KEY',
- 'SYMMETRIC-SUPPORT',
- 'SYSTEM-ALERT',
- 'SYSTEM-ALERT-',
- 'SYSTEM-ALERT-B',
- 'SYSTEM-ALERT-BO',
- 'SYSTEM-ALERT-BOX',
- 'SYSTEM-ALERT-BOXE',
+ 'SYMMETRIC-ENCRYPTION-ALGORITHM',
+ 'SYMMETRIC-ENCRYPTION-IV',
+ 'SYMMETRIC-ENCRYPTION-KEY',
+ 'SYMMETRIC-SUPPORT',
+ 'SYSTEM-ALERT',
+ 'SYSTEM-ALERT-',
+ 'SYSTEM-ALERT-B',
+ 'SYSTEM-ALERT-BO',
+ 'SYSTEM-ALERT-BOX',
+ 'SYSTEM-ALERT-BOXE',
'SYSTEM-ALERT-BOXES',
- 'SYSTEM-DIALOG',
- 'SYSTEM-HELP',
- 'SYSTEM-ID',
+ 'SYSTEM-DIALOG',
+ 'SYSTEM-HELP',
+ 'SYSTEM-ID',
'TAB-POSITION',
'TAB-STOP',
- 'TABLE',
- 'TABLE-HANDLE',
- 'TABLE-NUMBER',
+ 'TABLE',
+ 'TABLE-HANDLE',
+ 'TABLE-NUMBER',
'TABLE-SCAN',
- 'TARGET',
- 'TARGET-PROCEDURE',
- 'TEMP-DIR',
- 'TEMP-DIRE',
- 'TEMP-DIREC',
- 'TEMP-DIRECT',
- 'TEMP-DIRECTO',
- 'TEMP-DIRECTOR',
+ 'TARGET',
+ 'TARGET-PROCEDURE',
+ 'TEMP-DIR',
+ 'TEMP-DIRE',
+ 'TEMP-DIREC',
+ 'TEMP-DIRECT',
+ 'TEMP-DIRECTO',
+ 'TEMP-DIRECTOR',
'TEMP-DIRECTORY',
- 'TEMP-TABLE',
- 'TEMP-TABLE-PREPARE',
- 'TERM',
- 'TERMI',
- 'TERMIN',
- 'TERMINA',
+ 'TEMP-TABLE',
+ 'TEMP-TABLE-PREPARE',
+ 'TERM',
+ 'TERMI',
+ 'TERMIN',
+ 'TERMINA',
'TERMINAL',
- 'TERMINATE',
- 'TEXT',
- 'TEXT-CURSOR',
- 'TEXT-SEG-GROW',
- 'TEXT-SELECTED',
- 'THEN',
- 'THIS-OBJECT',
- 'THIS-PROCEDURE',
+ 'TERMINATE',
+ 'TEXT',
+ 'TEXT-CURSOR',
+ 'TEXT-SEG-GROW',
+ 'TEXT-SELECTED',
+ 'THEN',
+ 'THIS-OBJECT',
+ 'THIS-PROCEDURE',
'THREAD-SAFE',
- 'THREE-D',
+ 'THREE-D',
'THROUGH',
- 'THROW',
- 'THRU',
- 'TIC-MARKS',
- 'TIME',
- 'TIME-SOURCE',
- 'TITLE',
- 'TITLE-BGC',
- 'TITLE-BGCO',
- 'TITLE-BGCOL',
- 'TITLE-BGCOLO',
+ 'THROW',
+ 'THRU',
+ 'TIC-MARKS',
+ 'TIME',
+ 'TIME-SOURCE',
+ 'TITLE',
+ 'TITLE-BGC',
+ 'TITLE-BGCO',
+ 'TITLE-BGCOL',
+ 'TITLE-BGCOLO',
'TITLE-BGCOLOR',
- 'TITLE-DC',
- 'TITLE-DCO',
- 'TITLE-DCOL',
- 'TITLE-DCOLO',
+ 'TITLE-DC',
+ 'TITLE-DCO',
+ 'TITLE-DCOL',
+ 'TITLE-DCOLO',
'TITLE-DCOLOR',
- 'TITLE-FGC',
- 'TITLE-FGCO',
- 'TITLE-FGCOL',
- 'TITLE-FGCOLO',
+ 'TITLE-FGC',
+ 'TITLE-FGCO',
+ 'TITLE-FGCOL',
+ 'TITLE-FGCOLO',
'TITLE-FGCOLOR',
- 'TITLE-FO',
- 'TITLE-FON',
+ 'TITLE-FO',
+ 'TITLE-FON',
'TITLE-FONT',
- 'TO',
+ 'TO',
'TO-ROWID',
- 'TODAY',
- 'TOGGLE-BOX',
- 'TOOLTIP',
- 'TOOLTIPS',
- 'TOP-NAV-QUERY',
- 'TOP-ONLY',
+ 'TODAY',
+ 'TOGGLE-BOX',
+ 'TOOLTIP',
+ 'TOOLTIPS',
+ 'TOP-NAV-QUERY',
+ 'TOP-ONLY',
'TOPIC',
- 'TOTAL',
- 'TRAILING',
- 'TRANS',
+ 'TOTAL',
+ 'TRAILING',
+ 'TRANS',
'TRANS-INIT-PROCEDURE',
- 'TRANSACTION',
- 'TRANSACTION-MODE',
- 'TRANSPARENT',
- 'TRIGGER',
- 'TRIGGERS',
- 'TRIM',
- 'TRUE',
- 'TRUNC',
- 'TRUNCA',
- 'TRUNCAT',
+ 'TRANSACTION',
+ 'TRANSACTION-MODE',
+ 'TRANSPARENT',
+ 'TRIGGER',
+ 'TRIGGERS',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNC',
+ 'TRUNCA',
+ 'TRUNCAT',
'TRUNCATE',
- 'TYPE',
- 'TYPE-OF',
- 'UNBOX',
- 'UNBUFF',
- 'UNBUFFE',
- 'UNBUFFER',
- 'UNBUFFERE',
+ 'TYPE',
+ 'TYPE-OF',
+ 'UNBOX',
+ 'UNBUFF',
+ 'UNBUFFE',
+ 'UNBUFFER',
+ 'UNBUFFERE',
'UNBUFFERED',
- 'UNDERL',
- 'UNDERLI',
- 'UNDERLIN',
+ 'UNDERL',
+ 'UNDERLI',
+ 'UNDERLIN',
'UNDERLINE',
- 'UNDO',
- 'UNFORM',
- 'UNFORMA',
- 'UNFORMAT',
- 'UNFORMATT',
- 'UNFORMATTE',
+ 'UNDO',
+ 'UNFORM',
+ 'UNFORMA',
+ 'UNFORMAT',
+ 'UNFORMATT',
+ 'UNFORMATTE',
'UNFORMATTED',
- 'UNION',
- 'UNIQUE',
- 'UNIQUE-ID',
- 'UNIQUE-MATCH',
- 'UNIX',
- 'UNLESS-HIDDEN',
- 'UNLOAD',
- 'UNSIGNED-LONG',
- 'UNSUBSCRIBE',
- 'UP',
- 'UPDATE',
- 'UPDATE-ATTRIBUTE',
- 'URL',
- 'URL-DECODE',
- 'URL-ENCODE',
- 'URL-PASSWORD',
- 'URL-USERID',
- 'USE',
- 'USE-DICT-EXPS',
- 'USE-FILENAME',
- 'USE-INDEX',
- 'USE-REVVIDEO',
- 'USE-TEXT',
- 'USE-UNDERLINE',
- 'USE-WIDGET-POOL',
+ 'UNION',
+ 'UNIQUE',
+ 'UNIQUE-ID',
+ 'UNIQUE-MATCH',
+ 'UNIX',
+ 'UNLESS-HIDDEN',
+ 'UNLOAD',
+ 'UNSIGNED-LONG',
+ 'UNSUBSCRIBE',
+ 'UP',
+ 'UPDATE',
+ 'UPDATE-ATTRIBUTE',
+ 'URL',
+ 'URL-DECODE',
+ 'URL-ENCODE',
+ 'URL-PASSWORD',
+ 'URL-USERID',
+ 'USE',
+ 'USE-DICT-EXPS',
+ 'USE-FILENAME',
+ 'USE-INDEX',
+ 'USE-REVVIDEO',
+ 'USE-TEXT',
+ 'USE-UNDERLINE',
+ 'USE-WIDGET-POOL',
'USER',
'USER-ID',
'USERID',
- 'USING',
- 'V6DISPLAY',
- 'V6FRAME',
+ 'USING',
+ 'V6DISPLAY',
+ 'V6FRAME',
'VALID-EVENT',
'VALID-HANDLE',
'VALID-OBJECT',
- 'VALIDATE',
- 'VALIDATE-EXPRESSION',
- 'VALIDATE-MESSAGE',
- 'VALIDATE-SEAL',
- 'VALIDATION-ENABLED',
- 'VALUE',
- 'VALUE-CHANGED',
- 'VALUES',
- 'VAR',
- 'VARI',
- 'VARIA',
- 'VARIAB',
- 'VARIABL',
+ 'VALIDATE',
+ 'VALIDATE-EXPRESSION',
+ 'VALIDATE-MESSAGE',
+ 'VALIDATE-SEAL',
+ 'VALIDATION-ENABLED',
+ 'VALUE',
+ 'VALUE-CHANGED',
+ 'VALUES',
+ 'VAR',
+ 'VARI',
+ 'VARIA',
+ 'VARIAB',
+ 'VARIABL',
'VARIABLE',
- 'VERBOSE',
- 'VERSION',
- 'VERT',
- 'VERTI',
- 'VERTIC',
- 'VERTICA',
+ 'VERBOSE',
+ 'VERSION',
+ 'VERT',
+ 'VERTI',
+ 'VERTIC',
+ 'VERTICA',
'VERTICAL',
- 'VIEW',
- 'VIEW-AS',
- 'VIEW-FIRST-COLUMN-ON-REOPEN',
- 'VIRTUAL-HEIGHT',
- 'VIRTUAL-HEIGHT-',
- 'VIRTUAL-HEIGHT-C',
- 'VIRTUAL-HEIGHT-CH',
- 'VIRTUAL-HEIGHT-CHA',
- 'VIRTUAL-HEIGHT-CHAR',
+ 'VIEW',
+ 'VIEW-AS',
+ 'VIEW-FIRST-COLUMN-ON-REOPEN',
+ 'VIRTUAL-HEIGHT',
+ 'VIRTUAL-HEIGHT-',
+ 'VIRTUAL-HEIGHT-C',
+ 'VIRTUAL-HEIGHT-CH',
+ 'VIRTUAL-HEIGHT-CHA',
+ 'VIRTUAL-HEIGHT-CHAR',
'VIRTUAL-HEIGHT-CHARS',
- 'VIRTUAL-HEIGHT-P',
- 'VIRTUAL-HEIGHT-PI',
- 'VIRTUAL-HEIGHT-PIX',
- 'VIRTUAL-HEIGHT-PIXE',
- 'VIRTUAL-HEIGHT-PIXEL',
+ 'VIRTUAL-HEIGHT-P',
+ 'VIRTUAL-HEIGHT-PI',
+ 'VIRTUAL-HEIGHT-PIX',
+ 'VIRTUAL-HEIGHT-PIXE',
+ 'VIRTUAL-HEIGHT-PIXEL',
'VIRTUAL-HEIGHT-PIXELS',
- 'VIRTUAL-WIDTH',
- 'VIRTUAL-WIDTH-',
- 'VIRTUAL-WIDTH-C',
- 'VIRTUAL-WIDTH-CH',
- 'VIRTUAL-WIDTH-CHA',
- 'VIRTUAL-WIDTH-CHAR',
+ 'VIRTUAL-WIDTH',
+ 'VIRTUAL-WIDTH-',
+ 'VIRTUAL-WIDTH-C',
+ 'VIRTUAL-WIDTH-CH',
+ 'VIRTUAL-WIDTH-CHA',
+ 'VIRTUAL-WIDTH-CHAR',
'VIRTUAL-WIDTH-CHARS',
- 'VIRTUAL-WIDTH-P',
- 'VIRTUAL-WIDTH-PI',
- 'VIRTUAL-WIDTH-PIX',
- 'VIRTUAL-WIDTH-PIXE',
- 'VIRTUAL-WIDTH-PIXEL',
+ 'VIRTUAL-WIDTH-P',
+ 'VIRTUAL-WIDTH-PI',
+ 'VIRTUAL-WIDTH-PIX',
+ 'VIRTUAL-WIDTH-PIXE',
+ 'VIRTUAL-WIDTH-PIXEL',
'VIRTUAL-WIDTH-PIXELS',
- 'VISIBLE',
- 'VOID',
- 'WAIT',
- 'WAIT-FOR',
- 'WARNING',
- 'WEB-CONTEXT',
- 'WEEKDAY',
- 'WHEN',
- 'WHERE',
- 'WHILE',
- 'WIDGET',
- 'WIDGET-E',
- 'WIDGET-EN',
- 'WIDGET-ENT',
- 'WIDGET-ENTE',
+ 'VISIBLE',
+ 'VOID',
+ 'WAIT',
+ 'WAIT-FOR',
+ 'WARNING',
+ 'WEB-CONTEXT',
+ 'WEEKDAY',
+ 'WHEN',
+ 'WHERE',
+ 'WHILE',
+ 'WIDGET',
+ 'WIDGET-E',
+ 'WIDGET-EN',
+ 'WIDGET-ENT',
+ 'WIDGET-ENTE',
'WIDGET-ENTER',
- 'WIDGET-ID',
- 'WIDGET-L',
- 'WIDGET-LE',
- 'WIDGET-LEA',
- 'WIDGET-LEAV',
+ 'WIDGET-ID',
+ 'WIDGET-L',
+ 'WIDGET-LE',
+ 'WIDGET-LEA',
+ 'WIDGET-LEAV',
'WIDGET-LEAVE',
- 'WIDGET-POOL',
- 'WIDTH',
- 'WIDTH-',
- 'WIDTH-C',
- 'WIDTH-CH',
- 'WIDTH-CHA',
- 'WIDTH-CHAR',
+ 'WIDGET-POOL',
+ 'WIDTH',
+ 'WIDTH-',
+ 'WIDTH-C',
+ 'WIDTH-CH',
+ 'WIDTH-CHA',
+ 'WIDTH-CHAR',
'WIDTH-CHARS',
- 'WIDTH-P',
- 'WIDTH-PI',
- 'WIDTH-PIX',
- 'WIDTH-PIXE',
- 'WIDTH-PIXEL',
+ 'WIDTH-P',
+ 'WIDTH-PI',
+ 'WIDTH-PIX',
+ 'WIDTH-PIXE',
+ 'WIDTH-PIXEL',
'WIDTH-PIXELS',
- 'WINDOW',
- 'WINDOW-MAXIM',
- 'WINDOW-MAXIMI',
- 'WINDOW-MAXIMIZ',
- 'WINDOW-MAXIMIZE',
+ 'WINDOW',
+ 'WINDOW-MAXIM',
+ 'WINDOW-MAXIMI',
+ 'WINDOW-MAXIMIZ',
+ 'WINDOW-MAXIMIZE',
'WINDOW-MAXIMIZED',
- 'WINDOW-MINIM',
- 'WINDOW-MINIMI',
- 'WINDOW-MINIMIZ',
- 'WINDOW-MINIMIZE',
+ 'WINDOW-MINIM',
+ 'WINDOW-MINIMI',
+ 'WINDOW-MINIMIZ',
+ 'WINDOW-MINIMIZE',
'WINDOW-MINIMIZED',
- 'WINDOW-NAME',
- 'WINDOW-NORMAL',
- 'WINDOW-STA',
- 'WINDOW-STAT',
+ 'WINDOW-NAME',
+ 'WINDOW-NORMAL',
+ 'WINDOW-STA',
+ 'WINDOW-STAT',
'WINDOW-STATE',
- 'WINDOW-SYSTEM',
- 'WITH',
- 'WORD-INDEX',
- 'WORD-WRAP',
- 'WORK-AREA-HEIGHT-PIXELS',
- 'WORK-AREA-WIDTH-PIXELS',
- 'WORK-AREA-X',
- 'WORK-AREA-Y',
- 'WORK-TAB',
- 'WORK-TABL',
+ 'WINDOW-SYSTEM',
+ 'WITH',
+ 'WORD-INDEX',
+ 'WORD-WRAP',
+ 'WORK-AREA-HEIGHT-PIXELS',
+ 'WORK-AREA-WIDTH-PIXELS',
+ 'WORK-AREA-X',
+ 'WORK-AREA-Y',
+ 'WORK-TAB',
+ 'WORK-TABL',
'WORK-TABLE',
'WORKFILE',
- 'WRITE',
- 'WRITE-CDATA',
- 'WRITE-CHARACTERS',
- 'WRITE-COMMENT',
- 'WRITE-DATA-ELEMENT',
- 'WRITE-EMPTY-ELEMENT',
- 'WRITE-ENTITY-REF',
- 'WRITE-EXTERNAL-DTD',
- 'WRITE-FRAGMENT',
+ 'WRITE',
+ 'WRITE-CDATA',
+ 'WRITE-CHARACTERS',
+ 'WRITE-COMMENT',
+ 'WRITE-DATA-ELEMENT',
+ 'WRITE-EMPTY-ELEMENT',
+ 'WRITE-ENTITY-REF',
+ 'WRITE-EXTERNAL-DTD',
+ 'WRITE-FRAGMENT',
'WRITE-JSON',
- 'WRITE-MESSAGE',
- 'WRITE-PROCESSING-INSTRUCTION',
- 'WRITE-STATUS',
- 'WRITE-XML',
- 'WRITE-XMLSCHEMA',
- 'X',
+ 'WRITE-MESSAGE',
+ 'WRITE-PROCESSING-INSTRUCTION',
+ 'WRITE-STATUS',
+ 'WRITE-XML',
+ 'WRITE-XMLSCHEMA',
+ 'X',
'X-OF',
- 'XCODE',
- 'XML-DATA-TYPE',
+ 'XCODE',
+ 'XML-DATA-TYPE',
'XML-ENTITY-EXPANSION-LIMIT',
- 'XML-NODE-TYPE',
- 'XML-SCHEMA-PATH',
+ 'XML-NODE-TYPE',
+ 'XML-SCHEMA-PATH',
'XML-STRICT-ENTITY-RESOLUTION',
- 'XML-SUPPRESS-NAMESPACE-PROCESSING',
- 'XREF',
- 'XREF-XML',
- 'Y',
+ 'XML-SUPPRESS-NAMESPACE-PROCESSING',
+ 'XREF',
+ 'XREF-XML',
+ 'Y',
'Y-OF',
- 'YEAR',
- 'YEAR-OFFSET',
- 'YES',
- 'YES-NO',
+ 'YEAR',
+ 'YEAR-OFFSET',
+ 'YES',
+ 'YES-NO',
'YES-NO-CANCEL'
-)
+)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
index 168cb4460b..2675e5cf76 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
@@ -1,4700 +1,4700 @@
-"""
- pygments.lexers._php_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file loads the function names and their modules from the
- php webpage and generates itself.
-
- Do not alter the MODULES dict by hand!
-
- WARNING: the generation transfers quite much data over your
- internet connection. don't run that at home, use
- a server ;-)
-
+"""
+ pygments.lexers._php_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file loads the function names and their modules from the
+ php webpage and generates itself.
+
+ Do not alter the MODULES dict by hand!
+
+ WARNING: the generation transfers quite much data over your
+ internet connection. don't run that at home, use
+ a server ;-)
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-MODULES = {'.NET': ('dotnet_load',),
- 'APC': ('apc_add',
- 'apc_bin_dump',
- 'apc_bin_dumpfile',
- 'apc_bin_load',
- 'apc_bin_loadfile',
- 'apc_cache_info',
- 'apc_cas',
- 'apc_clear_cache',
- 'apc_compile_file',
- 'apc_dec',
- 'apc_define_constants',
- 'apc_delete_file',
- 'apc_delete',
- 'apc_exists',
- 'apc_fetch',
- 'apc_inc',
- 'apc_load_constants',
- 'apc_sma_info',
- 'apc_store'),
- 'APD': ('apd_breakpoint',
- 'apd_callstack',
- 'apd_clunk',
- 'apd_continue',
- 'apd_croak',
- 'apd_dump_function_table',
- 'apd_dump_persistent_resources',
- 'apd_dump_regular_resources',
- 'apd_echo',
- 'apd_get_active_symbols',
- 'apd_set_pprof_trace',
- 'apd_set_session_trace_socket',
- 'apd_set_session_trace',
- 'apd_set_session',
- 'override_function',
- 'rename_function'),
- 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
- 'mysqli_bind_result',
- 'mysqli_client_encoding',
- 'mysqli_connect',
- 'mysqli_disable_rpl_parse',
- 'mysqli_enable_reads_from_master',
- 'mysqli_enable_rpl_parse',
- 'mysqli_escape_string',
- 'mysqli_execute',
- 'mysqli_fetch',
- 'mysqli_get_cache_stats',
- 'mysqli_get_metadata',
- 'mysqli_master_query',
- 'mysqli_param_count',
- 'mysqli_report',
- 'mysqli_rpl_parse_enabled',
- 'mysqli_rpl_probe',
- 'mysqli_send_long_data',
- 'mysqli_slave_query'),
- 'Apache': ('apache_child_terminate',
- 'apache_get_modules',
- 'apache_get_version',
- 'apache_getenv',
- 'apache_lookup_uri',
- 'apache_note',
- 'apache_request_headers',
- 'apache_reset_timeout',
- 'apache_response_headers',
- 'apache_setenv',
- 'getallheaders',
- 'virtual'),
- 'Array': ('array_change_key_case',
- 'array_chunk',
- 'array_column',
- 'array_combine',
- 'array_count_values',
- 'array_diff_assoc',
- 'array_diff_key',
- 'array_diff_uassoc',
- 'array_diff_ukey',
- 'array_diff',
- 'array_fill_keys',
- 'array_fill',
- 'array_filter',
- 'array_flip',
- 'array_intersect_assoc',
- 'array_intersect_key',
- 'array_intersect_uassoc',
- 'array_intersect_ukey',
- 'array_intersect',
- 'array_key_exists',
- 'array_keys',
- 'array_map',
- 'array_merge_recursive',
- 'array_merge',
- 'array_multisort',
- 'array_pad',
- 'array_pop',
- 'array_product',
- 'array_push',
- 'array_rand',
- 'array_reduce',
- 'array_replace_recursive',
- 'array_replace',
- 'array_reverse',
- 'array_search',
- 'array_shift',
- 'array_slice',
- 'array_splice',
- 'array_sum',
- 'array_udiff_assoc',
- 'array_udiff_uassoc',
- 'array_udiff',
- 'array_uintersect_assoc',
- 'array_uintersect_uassoc',
- 'array_uintersect',
- 'array_unique',
- 'array_unshift',
- 'array_values',
- 'array_walk_recursive',
- 'array_walk',
- 'array',
- 'arsort',
- 'asort',
- 'compact',
- 'count',
- 'current',
- 'each',
- 'end',
- 'extract',
- 'in_array',
- 'key_exists',
- 'key',
- 'krsort',
- 'ksort',
- 'list',
- 'natcasesort',
- 'natsort',
- 'next',
- 'pos',
- 'prev',
- 'range',
- 'reset',
- 'rsort',
- 'shuffle',
- 'sizeof',
- 'sort',
- 'uasort',
- 'uksort',
- 'usort'),
- 'BBCode': ('bbcode_add_element',
- 'bbcode_add_smiley',
- 'bbcode_create',
- 'bbcode_destroy',
- 'bbcode_parse',
- 'bbcode_set_arg_parser',
- 'bbcode_set_flags'),
- 'BC Math': ('bcadd',
- 'bccomp',
- 'bcdiv',
- 'bcmod',
- 'bcmul',
- 'bcpow',
- 'bcpowmod',
- 'bcscale',
- 'bcsqrt',
- 'bcsub'),
- 'Blenc': ('blenc_encrypt',),
- 'Bzip2': ('bzclose',
- 'bzcompress',
- 'bzdecompress',
- 'bzerrno',
- 'bzerror',
- 'bzerrstr',
- 'bzflush',
- 'bzopen',
- 'bzread',
- 'bzwrite'),
- 'COM': ('com_addref',
- 'com_create_guid',
- 'com_event_sink',
- 'com_get_active_object',
- 'com_get',
- 'com_invoke',
- 'com_isenum',
- 'com_load_typelib',
- 'com_load',
- 'com_message_pump',
- 'com_print_typeinfo',
- 'com_propget',
- 'com_propput',
- 'com_propset',
- 'com_release',
- 'com_set',
- 'variant_abs',
- 'variant_add',
- 'variant_and',
- 'variant_cast',
- 'variant_cat',
- 'variant_cmp',
- 'variant_date_from_timestamp',
- 'variant_date_to_timestamp',
- 'variant_div',
- 'variant_eqv',
- 'variant_fix',
- 'variant_get_type',
- 'variant_idiv',
- 'variant_imp',
- 'variant_int',
- 'variant_mod',
- 'variant_mul',
- 'variant_neg',
- 'variant_not',
- 'variant_or',
- 'variant_pow',
- 'variant_round',
- 'variant_set_type',
- 'variant_set',
- 'variant_sub',
- 'variant_xor'),
- 'CUBRID': ('cubrid_bind',
- 'cubrid_close_prepare',
- 'cubrid_close_request',
- 'cubrid_col_get',
- 'cubrid_col_size',
- 'cubrid_column_names',
- 'cubrid_column_types',
- 'cubrid_commit',
- 'cubrid_connect_with_url',
- 'cubrid_connect',
- 'cubrid_current_oid',
- 'cubrid_disconnect',
- 'cubrid_drop',
- 'cubrid_error_code_facility',
- 'cubrid_error_code',
- 'cubrid_error_msg',
- 'cubrid_execute',
- 'cubrid_fetch',
- 'cubrid_free_result',
- 'cubrid_get_autocommit',
- 'cubrid_get_charset',
- 'cubrid_get_class_name',
- 'cubrid_get_client_info',
- 'cubrid_get_db_parameter',
- 'cubrid_get_query_timeout',
- 'cubrid_get_server_info',
- 'cubrid_get',
- 'cubrid_insert_id',
- 'cubrid_is_instance',
- 'cubrid_lob_close',
- 'cubrid_lob_export',
- 'cubrid_lob_get',
- 'cubrid_lob_send',
- 'cubrid_lob_size',
- 'cubrid_lob2_bind',
- 'cubrid_lob2_close',
- 'cubrid_lob2_export',
- 'cubrid_lob2_import',
- 'cubrid_lob2_new',
- 'cubrid_lob2_read',
- 'cubrid_lob2_seek64',
- 'cubrid_lob2_seek',
- 'cubrid_lob2_size64',
- 'cubrid_lob2_size',
- 'cubrid_lob2_tell64',
- 'cubrid_lob2_tell',
- 'cubrid_lob2_write',
- 'cubrid_lock_read',
- 'cubrid_lock_write',
- 'cubrid_move_cursor',
- 'cubrid_next_result',
- 'cubrid_num_cols',
- 'cubrid_num_rows',
- 'cubrid_pconnect_with_url',
- 'cubrid_pconnect',
- 'cubrid_prepare',
- 'cubrid_put',
- 'cubrid_rollback',
- 'cubrid_schema',
- 'cubrid_seq_drop',
- 'cubrid_seq_insert',
- 'cubrid_seq_put',
- 'cubrid_set_add',
- 'cubrid_set_autocommit',
- 'cubrid_set_db_parameter',
- 'cubrid_set_drop',
- 'cubrid_set_query_timeout',
- 'cubrid_version'),
- 'Cairo': ('cairo_create',
- 'cairo_font_face_get_type',
- 'cairo_font_face_status',
- 'cairo_font_options_create',
- 'cairo_font_options_equal',
- 'cairo_font_options_get_antialias',
- 'cairo_font_options_get_hint_metrics',
- 'cairo_font_options_get_hint_style',
- 'cairo_font_options_get_subpixel_order',
- 'cairo_font_options_hash',
- 'cairo_font_options_merge',
- 'cairo_font_options_set_antialias',
- 'cairo_font_options_set_hint_metrics',
- 'cairo_font_options_set_hint_style',
- 'cairo_font_options_set_subpixel_order',
- 'cairo_font_options_status',
- 'cairo_format_stride_for_width',
- 'cairo_image_surface_create_for_data',
- 'cairo_image_surface_create_from_png',
- 'cairo_image_surface_create',
- 'cairo_image_surface_get_data',
- 'cairo_image_surface_get_format',
- 'cairo_image_surface_get_height',
- 'cairo_image_surface_get_stride',
- 'cairo_image_surface_get_width',
- 'cairo_matrix_create_scale',
- 'cairo_matrix_create_translate',
- 'cairo_matrix_invert',
- 'cairo_matrix_multiply',
- 'cairo_matrix_rotate',
- 'cairo_matrix_transform_distance',
- 'cairo_matrix_transform_point',
- 'cairo_matrix_translate',
- 'cairo_pattern_add_color_stop_rgb',
- 'cairo_pattern_add_color_stop_rgba',
- 'cairo_pattern_create_for_surface',
- 'cairo_pattern_create_linear',
- 'cairo_pattern_create_radial',
- 'cairo_pattern_create_rgb',
- 'cairo_pattern_create_rgba',
- 'cairo_pattern_get_color_stop_count',
- 'cairo_pattern_get_color_stop_rgba',
- 'cairo_pattern_get_extend',
- 'cairo_pattern_get_filter',
- 'cairo_pattern_get_linear_points',
- 'cairo_pattern_get_matrix',
- 'cairo_pattern_get_radial_circles',
- 'cairo_pattern_get_rgba',
- 'cairo_pattern_get_surface',
- 'cairo_pattern_get_type',
- 'cairo_pattern_set_extend',
- 'cairo_pattern_set_filter',
- 'cairo_pattern_set_matrix',
- 'cairo_pattern_status',
- 'cairo_pdf_surface_create',
- 'cairo_pdf_surface_set_size',
- 'cairo_ps_get_levels',
- 'cairo_ps_level_to_string',
- 'cairo_ps_surface_create',
- 'cairo_ps_surface_dsc_begin_page_setup',
- 'cairo_ps_surface_dsc_begin_setup',
- 'cairo_ps_surface_dsc_comment',
- 'cairo_ps_surface_get_eps',
- 'cairo_ps_surface_restrict_to_level',
- 'cairo_ps_surface_set_eps',
- 'cairo_ps_surface_set_size',
- 'cairo_scaled_font_create',
- 'cairo_scaled_font_extents',
- 'cairo_scaled_font_get_ctm',
- 'cairo_scaled_font_get_font_face',
- 'cairo_scaled_font_get_font_matrix',
- 'cairo_scaled_font_get_font_options',
- 'cairo_scaled_font_get_scale_matrix',
- 'cairo_scaled_font_get_type',
- 'cairo_scaled_font_glyph_extents',
- 'cairo_scaled_font_status',
- 'cairo_scaled_font_text_extents',
- 'cairo_surface_copy_page',
- 'cairo_surface_create_similar',
- 'cairo_surface_finish',
- 'cairo_surface_flush',
- 'cairo_surface_get_content',
- 'cairo_surface_get_device_offset',
- 'cairo_surface_get_font_options',
- 'cairo_surface_get_type',
- 'cairo_surface_mark_dirty_rectangle',
- 'cairo_surface_mark_dirty',
- 'cairo_surface_set_device_offset',
- 'cairo_surface_set_fallback_resolution',
- 'cairo_surface_show_page',
- 'cairo_surface_status',
- 'cairo_surface_write_to_png',
- 'cairo_svg_surface_create',
- 'cairo_svg_surface_restrict_to_version',
- 'cairo_svg_version_to_string'),
- 'Calendar': ('cal_days_in_month',
- 'cal_from_jd',
- 'cal_info',
- 'cal_to_jd',
- 'easter_date',
- 'easter_days',
- 'FrenchToJD',
- 'GregorianToJD',
- 'JDDayOfWeek',
- 'JDMonthName',
- 'JDToFrench',
- 'JDToGregorian',
- 'jdtojewish',
- 'JDToJulian',
- 'jdtounix',
- 'JewishToJD',
- 'JulianToJD',
- 'unixtojd'),
- 'Classes/Object': ('__autoload',
- 'call_user_method_array',
- 'call_user_method',
- 'class_alias',
- 'class_exists',
- 'get_called_class',
- 'get_class_methods',
- 'get_class_vars',
- 'get_class',
- 'get_declared_classes',
- 'get_declared_interfaces',
- 'get_declared_traits',
- 'get_object_vars',
- 'get_parent_class',
- 'interface_exists',
- 'is_a',
- 'is_subclass_of',
- 'method_exists',
- 'property_exists',
- 'trait_exists'),
- 'Classkit': ('classkit_import',
- 'classkit_method_add',
- 'classkit_method_copy',
- 'classkit_method_redefine',
- 'classkit_method_remove',
- 'classkit_method_rename'),
- 'Crack': ('crack_check',
- 'crack_closedict',
- 'crack_getlastmessage',
- 'crack_opendict'),
- 'Ctype': ('ctype_alnum',
- 'ctype_alpha',
- 'ctype_cntrl',
- 'ctype_digit',
- 'ctype_graph',
- 'ctype_lower',
- 'ctype_print',
- 'ctype_punct',
- 'ctype_space',
- 'ctype_upper',
- 'ctype_xdigit'),
- 'Cyrus': ('cyrus_authenticate',
- 'cyrus_bind',
- 'cyrus_close',
- 'cyrus_connect',
- 'cyrus_query',
- 'cyrus_unbind'),
- 'DB++': ('dbplus_add',
- 'dbplus_aql',
- 'dbplus_chdir',
- 'dbplus_close',
- 'dbplus_curr',
- 'dbplus_errcode',
- 'dbplus_errno',
- 'dbplus_find',
- 'dbplus_first',
- 'dbplus_flush',
- 'dbplus_freealllocks',
- 'dbplus_freelock',
- 'dbplus_freerlocks',
- 'dbplus_getlock',
- 'dbplus_getunique',
- 'dbplus_info',
- 'dbplus_last',
- 'dbplus_lockrel',
- 'dbplus_next',
- 'dbplus_open',
- 'dbplus_prev',
- 'dbplus_rchperm',
- 'dbplus_rcreate',
- 'dbplus_rcrtexact',
- 'dbplus_rcrtlike',
- 'dbplus_resolve',
- 'dbplus_restorepos',
- 'dbplus_rkeys',
- 'dbplus_ropen',
- 'dbplus_rquery',
- 'dbplus_rrename',
- 'dbplus_rsecindex',
- 'dbplus_runlink',
- 'dbplus_rzap',
- 'dbplus_savepos',
- 'dbplus_setindex',
- 'dbplus_setindexbynumber',
- 'dbplus_sql',
- 'dbplus_tcl',
- 'dbplus_tremove',
- 'dbplus_undo',
- 'dbplus_undoprepare',
- 'dbplus_unlockrel',
- 'dbplus_unselect',
- 'dbplus_update',
- 'dbplus_xlockrel',
- 'dbplus_xunlockrel'),
- 'DBA': ('dba_close',
- 'dba_delete',
- 'dba_exists',
- 'dba_fetch',
- 'dba_firstkey',
- 'dba_handlers',
- 'dba_insert',
- 'dba_key_split',
- 'dba_list',
- 'dba_nextkey',
- 'dba_open',
- 'dba_optimize',
- 'dba_popen',
- 'dba_replace',
- 'dba_sync'),
- 'DOM': ('dom_import_simplexml',),
- 'Date/Time': ('checkdate',
- 'date_add',
- 'date_create_from_format',
- 'date_create_immutable_from_format',
- 'date_create_immutable',
- 'date_create',
- 'date_date_set',
- 'date_default_timezone_get',
- 'date_default_timezone_set',
- 'date_diff',
- 'date_format',
- 'date_get_last_errors',
- 'date_interval_create_from_date_string',
- 'date_interval_format',
- 'date_isodate_set',
- 'date_modify',
- 'date_offset_get',
- 'date_parse_from_format',
- 'date_parse',
- 'date_sub',
- 'date_sun_info',
- 'date_sunrise',
- 'date_sunset',
- 'date_time_set',
- 'date_timestamp_get',
- 'date_timestamp_set',
- 'date_timezone_get',
- 'date_timezone_set',
- 'date',
- 'getdate',
- 'gettimeofday',
- 'gmdate',
- 'gmmktime',
- 'gmstrftime',
- 'idate',
- 'localtime',
- 'microtime',
- 'mktime',
- 'strftime',
- 'strptime',
- 'strtotime',
- 'time',
- 'timezone_abbreviations_list',
- 'timezone_identifiers_list',
- 'timezone_location_get',
- 'timezone_name_from_abbr',
- 'timezone_name_get',
- 'timezone_offset_get',
- 'timezone_open',
- 'timezone_transitions_get',
- 'timezone_version_get'),
- 'Direct IO': ('dio_close',
- 'dio_fcntl',
- 'dio_open',
- 'dio_read',
- 'dio_seek',
- 'dio_stat',
- 'dio_tcsetattr',
- 'dio_truncate',
- 'dio_write'),
- 'Directory': ('chdir',
- 'chroot',
- 'closedir',
- 'dir',
- 'getcwd',
- 'opendir',
- 'readdir',
- 'rewinddir',
- 'scandir'),
- 'Eio': ('eio_busy',
- 'eio_cancel',
- 'eio_chmod',
- 'eio_chown',
- 'eio_close',
- 'eio_custom',
- 'eio_dup2',
- 'eio_event_loop',
- 'eio_fallocate',
- 'eio_fchmod',
- 'eio_fchown',
- 'eio_fdatasync',
- 'eio_fstat',
- 'eio_fstatvfs',
- 'eio_fsync',
- 'eio_ftruncate',
- 'eio_futime',
- 'eio_get_event_stream',
- 'eio_get_last_error',
- 'eio_grp_add',
- 'eio_grp_cancel',
- 'eio_grp_limit',
- 'eio_grp',
- 'eio_init',
- 'eio_link',
- 'eio_lstat',
- 'eio_mkdir',
- 'eio_mknod',
- 'eio_nop',
- 'eio_npending',
- 'eio_nready',
- 'eio_nreqs',
- 'eio_nthreads',
- 'eio_open',
- 'eio_poll',
- 'eio_read',
- 'eio_readahead',
- 'eio_readdir',
- 'eio_readlink',
- 'eio_realpath',
- 'eio_rename',
- 'eio_rmdir',
- 'eio_seek',
- 'eio_sendfile',
- 'eio_set_max_idle',
- 'eio_set_max_parallel',
- 'eio_set_max_poll_reqs',
- 'eio_set_max_poll_time',
- 'eio_set_min_parallel',
- 'eio_stat',
- 'eio_statvfs',
- 'eio_symlink',
- 'eio_sync_file_range',
- 'eio_sync',
- 'eio_syncfs',
- 'eio_truncate',
- 'eio_unlink',
- 'eio_utime',
- 'eio_write'),
- 'Enchant': ('enchant_broker_describe',
- 'enchant_broker_dict_exists',
- 'enchant_broker_free_dict',
- 'enchant_broker_free',
- 'enchant_broker_get_error',
- 'enchant_broker_init',
- 'enchant_broker_list_dicts',
- 'enchant_broker_request_dict',
- 'enchant_broker_request_pwl_dict',
- 'enchant_broker_set_ordering',
- 'enchant_dict_add_to_personal',
- 'enchant_dict_add_to_session',
- 'enchant_dict_check',
- 'enchant_dict_describe',
- 'enchant_dict_get_error',
- 'enchant_dict_is_in_session',
- 'enchant_dict_quick_check',
- 'enchant_dict_store_replacement',
- 'enchant_dict_suggest'),
- 'Error Handling': ('debug_backtrace',
- 'debug_print_backtrace',
- 'error_get_last',
- 'error_log',
- 'error_reporting',
- 'restore_error_handler',
- 'restore_exception_handler',
- 'set_error_handler',
- 'set_exception_handler',
- 'trigger_error',
- 'user_error'),
- 'Exif': ('exif_imagetype',
- 'exif_read_data',
- 'exif_tagname',
- 'exif_thumbnail',
- 'read_exif_data'),
- 'Expect': ('expect_expectl', 'expect_popen'),
- 'FAM': ('fam_cancel_monitor',
- 'fam_close',
- 'fam_monitor_collection',
- 'fam_monitor_directory',
- 'fam_monitor_file',
- 'fam_next_event',
- 'fam_open',
- 'fam_pending',
- 'fam_resume_monitor',
- 'fam_suspend_monitor'),
- 'FDF': ('fdf_add_doc_javascript',
- 'fdf_add_template',
- 'fdf_close',
- 'fdf_create',
- 'fdf_enum_values',
- 'fdf_errno',
- 'fdf_error',
- 'fdf_get_ap',
- 'fdf_get_attachment',
- 'fdf_get_encoding',
- 'fdf_get_file',
- 'fdf_get_flags',
- 'fdf_get_opt',
- 'fdf_get_status',
- 'fdf_get_value',
- 'fdf_get_version',
- 'fdf_header',
- 'fdf_next_field_name',
- 'fdf_open_string',
- 'fdf_open',
- 'fdf_remove_item',
- 'fdf_save_string',
- 'fdf_save',
- 'fdf_set_ap',
- 'fdf_set_encoding',
- 'fdf_set_file',
- 'fdf_set_flags',
- 'fdf_set_javascript_action',
- 'fdf_set_on_import_javascript',
- 'fdf_set_opt',
- 'fdf_set_status',
- 'fdf_set_submit_form_action',
- 'fdf_set_target_frame',
- 'fdf_set_value',
- 'fdf_set_version'),
- 'FPM': ('fastcgi_finish_request',),
- 'FTP': ('ftp_alloc',
- 'ftp_cdup',
- 'ftp_chdir',
- 'ftp_chmod',
- 'ftp_close',
- 'ftp_connect',
- 'ftp_delete',
- 'ftp_exec',
- 'ftp_fget',
- 'ftp_fput',
- 'ftp_get_option',
- 'ftp_get',
- 'ftp_login',
- 'ftp_mdtm',
- 'ftp_mkdir',
- 'ftp_nb_continue',
- 'ftp_nb_fget',
- 'ftp_nb_fput',
- 'ftp_nb_get',
- 'ftp_nb_put',
- 'ftp_nlist',
- 'ftp_pasv',
- 'ftp_put',
- 'ftp_pwd',
- 'ftp_quit',
- 'ftp_raw',
- 'ftp_rawlist',
- 'ftp_rename',
- 'ftp_rmdir',
- 'ftp_set_option',
- 'ftp_site',
- 'ftp_size',
- 'ftp_ssl_connect',
- 'ftp_systype'),
- 'Fann': ('fann_cascadetrain_on_data',
- 'fann_cascadetrain_on_file',
- 'fann_clear_scaling_params',
- 'fann_copy',
- 'fann_create_from_file',
- 'fann_create_shortcut_array',
- 'fann_create_shortcut',
- 'fann_create_sparse_array',
- 'fann_create_sparse',
- 'fann_create_standard_array',
- 'fann_create_standard',
- 'fann_create_train_from_callback',
- 'fann_create_train',
- 'fann_descale_input',
- 'fann_descale_output',
- 'fann_descale_train',
- 'fann_destroy_train',
- 'fann_destroy',
- 'fann_duplicate_train_data',
- 'fann_get_activation_function',
- 'fann_get_activation_steepness',
- 'fann_get_bias_array',
- 'fann_get_bit_fail_limit',
- 'fann_get_bit_fail',
- 'fann_get_cascade_activation_functions_count',
- 'fann_get_cascade_activation_functions',
- 'fann_get_cascade_activation_steepnesses_count',
- 'fann_get_cascade_activation_steepnesses',
- 'fann_get_cascade_candidate_change_fraction',
- 'fann_get_cascade_candidate_limit',
- 'fann_get_cascade_candidate_stagnation_epochs',
- 'fann_get_cascade_max_cand_epochs',
- 'fann_get_cascade_max_out_epochs',
- 'fann_get_cascade_min_cand_epochs',
- 'fann_get_cascade_min_out_epochs',
- 'fann_get_cascade_num_candidate_groups',
- 'fann_get_cascade_num_candidates',
- 'fann_get_cascade_output_change_fraction',
- 'fann_get_cascade_output_stagnation_epochs',
- 'fann_get_cascade_weight_multiplier',
- 'fann_get_connection_array',
- 'fann_get_connection_rate',
- 'fann_get_errno',
- 'fann_get_errstr',
- 'fann_get_layer_array',
- 'fann_get_learning_momentum',
- 'fann_get_learning_rate',
- 'fann_get_MSE',
- 'fann_get_network_type',
- 'fann_get_num_input',
- 'fann_get_num_layers',
- 'fann_get_num_output',
- 'fann_get_quickprop_decay',
- 'fann_get_quickprop_mu',
- 'fann_get_rprop_decrease_factor',
- 'fann_get_rprop_delta_max',
- 'fann_get_rprop_delta_min',
- 'fann_get_rprop_delta_zero',
- 'fann_get_rprop_increase_factor',
- 'fann_get_sarprop_step_error_shift',
- 'fann_get_sarprop_step_error_threshold_factor',
- 'fann_get_sarprop_temperature',
- 'fann_get_sarprop_weight_decay_shift',
- 'fann_get_total_connections',
- 'fann_get_total_neurons',
- 'fann_get_train_error_function',
- 'fann_get_train_stop_function',
- 'fann_get_training_algorithm',
- 'fann_init_weights',
- 'fann_length_train_data',
- 'fann_merge_train_data',
- 'fann_num_input_train_data',
- 'fann_num_output_train_data',
- 'fann_print_error',
- 'fann_randomize_weights',
- 'fann_read_train_from_file',
- 'fann_reset_errno',
- 'fann_reset_errstr',
- 'fann_reset_MSE',
- 'fann_run',
- 'fann_save_train',
- 'fann_save',
- 'fann_scale_input_train_data',
- 'fann_scale_input',
- 'fann_scale_output_train_data',
- 'fann_scale_output',
- 'fann_scale_train_data',
- 'fann_scale_train',
- 'fann_set_activation_function_hidden',
- 'fann_set_activation_function_layer',
- 'fann_set_activation_function_output',
- 'fann_set_activation_function',
- 'fann_set_activation_steepness_hidden',
- 'fann_set_activation_steepness_layer',
- 'fann_set_activation_steepness_output',
- 'fann_set_activation_steepness',
- 'fann_set_bit_fail_limit',
- 'fann_set_callback',
- 'fann_set_cascade_activation_functions',
- 'fann_set_cascade_activation_steepnesses',
- 'fann_set_cascade_candidate_change_fraction',
- 'fann_set_cascade_candidate_limit',
- 'fann_set_cascade_candidate_stagnation_epochs',
- 'fann_set_cascade_max_cand_epochs',
- 'fann_set_cascade_max_out_epochs',
- 'fann_set_cascade_min_cand_epochs',
- 'fann_set_cascade_min_out_epochs',
- 'fann_set_cascade_num_candidate_groups',
- 'fann_set_cascade_output_change_fraction',
- 'fann_set_cascade_output_stagnation_epochs',
- 'fann_set_cascade_weight_multiplier',
- 'fann_set_error_log',
- 'fann_set_input_scaling_params',
- 'fann_set_learning_momentum',
- 'fann_set_learning_rate',
- 'fann_set_output_scaling_params',
- 'fann_set_quickprop_decay',
- 'fann_set_quickprop_mu',
- 'fann_set_rprop_decrease_factor',
- 'fann_set_rprop_delta_max',
- 'fann_set_rprop_delta_min',
- 'fann_set_rprop_delta_zero',
- 'fann_set_rprop_increase_factor',
- 'fann_set_sarprop_step_error_shift',
- 'fann_set_sarprop_step_error_threshold_factor',
- 'fann_set_sarprop_temperature',
- 'fann_set_sarprop_weight_decay_shift',
- 'fann_set_scaling_params',
- 'fann_set_train_error_function',
- 'fann_set_train_stop_function',
- 'fann_set_training_algorithm',
- 'fann_set_weight_array',
- 'fann_set_weight',
- 'fann_shuffle_train_data',
- 'fann_subset_train_data',
- 'fann_test_data',
- 'fann_test',
- 'fann_train_epoch',
- 'fann_train_on_data',
- 'fann_train_on_file',
- 'fann_train'),
- 'Fileinfo': ('finfo_buffer',
- 'finfo_close',
- 'finfo_file',
- 'finfo_open',
- 'finfo_set_flags',
- 'mime_content_type'),
- 'Filesystem': ('basename',
- 'chgrp',
- 'chmod',
- 'chown',
- 'clearstatcache',
- 'copy',
- 'dirname',
- 'disk_free_space',
- 'disk_total_space',
- 'diskfreespace',
- 'fclose',
- 'feof',
- 'fflush',
- 'fgetc',
- 'fgetcsv',
- 'fgets',
- 'fgetss',
- 'file_exists',
- 'file_get_contents',
- 'file_put_contents',
- 'file',
- 'fileatime',
- 'filectime',
- 'filegroup',
- 'fileinode',
- 'filemtime',
- 'fileowner',
- 'fileperms',
- 'filesize',
- 'filetype',
- 'flock',
- 'fnmatch',
- 'fopen',
- 'fpassthru',
- 'fputcsv',
- 'fputs',
- 'fread',
- 'fscanf',
- 'fseek',
- 'fstat',
- 'ftell',
- 'ftruncate',
- 'fwrite',
- 'glob',
- 'is_dir',
- 'is_executable',
- 'is_file',
- 'is_link',
- 'is_readable',
- 'is_uploaded_file',
- 'is_writable',
- 'is_writeable',
- 'lchgrp',
- 'lchown',
- 'link',
- 'linkinfo',
- 'lstat',
- 'mkdir',
- 'move_uploaded_file',
- 'parse_ini_file',
- 'parse_ini_string',
- 'pathinfo',
- 'pclose',
- 'popen',
- 'readfile',
- 'readlink',
- 'realpath_cache_get',
- 'realpath_cache_size',
- 'realpath',
- 'rename',
- 'rewind',
- 'rmdir',
- 'set_file_buffer',
- 'stat',
- 'symlink',
- 'tempnam',
- 'tmpfile',
- 'touch',
- 'umask',
- 'unlink'),
- 'Filter': ('filter_has_var',
- 'filter_id',
- 'filter_input_array',
- 'filter_input',
- 'filter_list',
- 'filter_var_array',
- 'filter_var'),
- 'Firebird/InterBase': ('ibase_add_user',
- 'ibase_affected_rows',
- 'ibase_backup',
- 'ibase_blob_add',
- 'ibase_blob_cancel',
- 'ibase_blob_close',
- 'ibase_blob_create',
- 'ibase_blob_echo',
- 'ibase_blob_get',
- 'ibase_blob_import',
- 'ibase_blob_info',
- 'ibase_blob_open',
- 'ibase_close',
- 'ibase_commit_ret',
- 'ibase_commit',
- 'ibase_connect',
- 'ibase_db_info',
- 'ibase_delete_user',
- 'ibase_drop_db',
- 'ibase_errcode',
- 'ibase_errmsg',
- 'ibase_execute',
- 'ibase_fetch_assoc',
- 'ibase_fetch_object',
- 'ibase_fetch_row',
- 'ibase_field_info',
- 'ibase_free_event_handler',
- 'ibase_free_query',
- 'ibase_free_result',
- 'ibase_gen_id',
- 'ibase_maintain_db',
- 'ibase_modify_user',
- 'ibase_name_result',
- 'ibase_num_fields',
- 'ibase_num_params',
- 'ibase_param_info',
- 'ibase_pconnect',
- 'ibase_prepare',
- 'ibase_query',
- 'ibase_restore',
- 'ibase_rollback_ret',
- 'ibase_rollback',
- 'ibase_server_info',
- 'ibase_service_attach',
- 'ibase_service_detach',
- 'ibase_set_event_handler',
- 'ibase_trans',
- 'ibase_wait_event'),
- 'FriBiDi': ('fribidi_log2vis',),
- 'FrontBase': ('fbsql_affected_rows',
- 'fbsql_autocommit',
- 'fbsql_blob_size',
- 'fbsql_change_user',
- 'fbsql_clob_size',
- 'fbsql_close',
- 'fbsql_commit',
- 'fbsql_connect',
- 'fbsql_create_blob',
- 'fbsql_create_clob',
- 'fbsql_create_db',
- 'fbsql_data_seek',
- 'fbsql_database_password',
- 'fbsql_database',
- 'fbsql_db_query',
- 'fbsql_db_status',
- 'fbsql_drop_db',
- 'fbsql_errno',
- 'fbsql_error',
- 'fbsql_fetch_array',
- 'fbsql_fetch_assoc',
- 'fbsql_fetch_field',
- 'fbsql_fetch_lengths',
- 'fbsql_fetch_object',
- 'fbsql_fetch_row',
- 'fbsql_field_flags',
- 'fbsql_field_len',
- 'fbsql_field_name',
- 'fbsql_field_seek',
- 'fbsql_field_table',
- 'fbsql_field_type',
- 'fbsql_free_result',
- 'fbsql_get_autostart_info',
- 'fbsql_hostname',
- 'fbsql_insert_id',
- 'fbsql_list_dbs',
- 'fbsql_list_fields',
- 'fbsql_list_tables',
- 'fbsql_next_result',
- 'fbsql_num_fields',
- 'fbsql_num_rows',
- 'fbsql_password',
- 'fbsql_pconnect',
- 'fbsql_query',
- 'fbsql_read_blob',
- 'fbsql_read_clob',
- 'fbsql_result',
- 'fbsql_rollback',
- 'fbsql_rows_fetched',
- 'fbsql_select_db',
- 'fbsql_set_characterset',
- 'fbsql_set_lob_mode',
- 'fbsql_set_password',
- 'fbsql_set_transaction',
- 'fbsql_start_db',
- 'fbsql_stop_db',
- 'fbsql_table_name',
- 'fbsql_tablename',
- 'fbsql_username',
- 'fbsql_warnings'),
- 'Function handling': ('call_user_func_array',
- 'call_user_func',
- 'create_function',
- 'forward_static_call_array',
- 'forward_static_call',
- 'func_get_arg',
- 'func_get_args',
- 'func_num_args',
- 'function_exists',
- 'get_defined_functions',
- 'register_shutdown_function',
- 'register_tick_function',
- 'unregister_tick_function'),
- 'GD and Image': ('gd_info',
- 'getimagesize',
- 'getimagesizefromstring',
- 'image_type_to_extension',
- 'image_type_to_mime_type',
- 'image2wbmp',
- 'imageaffine',
- 'imageaffinematrixconcat',
- 'imageaffinematrixget',
- 'imagealphablending',
- 'imageantialias',
- 'imagearc',
- 'imagechar',
- 'imagecharup',
- 'imagecolorallocate',
- 'imagecolorallocatealpha',
- 'imagecolorat',
- 'imagecolorclosest',
- 'imagecolorclosestalpha',
- 'imagecolorclosesthwb',
- 'imagecolordeallocate',
- 'imagecolorexact',
- 'imagecolorexactalpha',
- 'imagecolormatch',
- 'imagecolorresolve',
- 'imagecolorresolvealpha',
- 'imagecolorset',
- 'imagecolorsforindex',
- 'imagecolorstotal',
- 'imagecolortransparent',
- 'imageconvolution',
- 'imagecopy',
- 'imagecopymerge',
- 'imagecopymergegray',
- 'imagecopyresampled',
- 'imagecopyresized',
- 'imagecreate',
- 'imagecreatefromgd2',
- 'imagecreatefromgd2part',
- 'imagecreatefromgd',
- 'imagecreatefromgif',
- 'imagecreatefromjpeg',
- 'imagecreatefrompng',
- 'imagecreatefromstring',
- 'imagecreatefromwbmp',
- 'imagecreatefromwebp',
- 'imagecreatefromxbm',
- 'imagecreatefromxpm',
- 'imagecreatetruecolor',
- 'imagecrop',
- 'imagecropauto',
- 'imagedashedline',
- 'imagedestroy',
- 'imageellipse',
- 'imagefill',
- 'imagefilledarc',
- 'imagefilledellipse',
- 'imagefilledpolygon',
- 'imagefilledrectangle',
- 'imagefilltoborder',
- 'imagefilter',
- 'imageflip',
- 'imagefontheight',
- 'imagefontwidth',
- 'imageftbbox',
- 'imagefttext',
- 'imagegammacorrect',
- 'imagegd2',
- 'imagegd',
- 'imagegif',
- 'imagegrabscreen',
- 'imagegrabwindow',
- 'imageinterlace',
- 'imageistruecolor',
- 'imagejpeg',
- 'imagelayereffect',
- 'imageline',
- 'imageloadfont',
- 'imagepalettecopy',
- 'imagepalettetotruecolor',
- 'imagepng',
- 'imagepolygon',
- 'imagepsbbox',
- 'imagepsencodefont',
- 'imagepsextendfont',
- 'imagepsfreefont',
- 'imagepsloadfont',
- 'imagepsslantfont',
- 'imagepstext',
- 'imagerectangle',
- 'imagerotate',
- 'imagesavealpha',
- 'imagescale',
- 'imagesetbrush',
- 'imagesetinterpolation',
- 'imagesetpixel',
- 'imagesetstyle',
- 'imagesetthickness',
- 'imagesettile',
- 'imagestring',
- 'imagestringup',
- 'imagesx',
- 'imagesy',
- 'imagetruecolortopalette',
- 'imagettfbbox',
- 'imagettftext',
- 'imagetypes',
- 'imagewbmp',
- 'imagewebp',
- 'imagexbm',
- 'iptcembed',
- 'iptcparse',
- 'jpeg2wbmp',
- 'png2wbmp'),
- 'GMP': ('gmp_abs',
- 'gmp_add',
- 'gmp_and',
- 'gmp_clrbit',
- 'gmp_cmp',
- 'gmp_com',
- 'gmp_div_q',
- 'gmp_div_qr',
- 'gmp_div_r',
- 'gmp_div',
- 'gmp_divexact',
- 'gmp_fact',
- 'gmp_gcd',
- 'gmp_gcdext',
- 'gmp_hamdist',
- 'gmp_init',
- 'gmp_intval',
- 'gmp_invert',
- 'gmp_jacobi',
- 'gmp_legendre',
- 'gmp_mod',
- 'gmp_mul',
- 'gmp_neg',
- 'gmp_nextprime',
- 'gmp_or',
- 'gmp_perfect_square',
- 'gmp_popcount',
- 'gmp_pow',
- 'gmp_powm',
- 'gmp_prob_prime',
- 'gmp_random',
- 'gmp_scan0',
- 'gmp_scan1',
- 'gmp_setbit',
- 'gmp_sign',
- 'gmp_sqrt',
- 'gmp_sqrtrem',
- 'gmp_strval',
- 'gmp_sub',
- 'gmp_testbit',
- 'gmp_xor'),
- 'GeoIP': ('geoip_asnum_by_name',
- 'geoip_continent_code_by_name',
- 'geoip_country_code_by_name',
- 'geoip_country_code3_by_name',
- 'geoip_country_name_by_name',
- 'geoip_database_info',
- 'geoip_db_avail',
- 'geoip_db_filename',
- 'geoip_db_get_all_info',
- 'geoip_domain_by_name',
- 'geoip_id_by_name',
- 'geoip_isp_by_name',
- 'geoip_netspeedcell_by_name',
- 'geoip_org_by_name',
- 'geoip_record_by_name',
- 'geoip_region_by_name',
- 'geoip_region_name_by_code',
- 'geoip_setup_custom_directory',
- 'geoip_time_zone_by_country_and_region'),
- 'Gettext': ('bind_textdomain_codeset',
- 'bindtextdomain',
- 'dcgettext',
- 'dcngettext',
- 'dgettext',
- 'dngettext',
- 'gettext',
- 'ngettext',
- 'textdomain'),
- 'GnuPG': ('gnupg_adddecryptkey',
- 'gnupg_addencryptkey',
- 'gnupg_addsignkey',
- 'gnupg_cleardecryptkeys',
- 'gnupg_clearencryptkeys',
- 'gnupg_clearsignkeys',
- 'gnupg_decrypt',
- 'gnupg_decryptverify',
- 'gnupg_encrypt',
- 'gnupg_encryptsign',
- 'gnupg_export',
- 'gnupg_geterror',
- 'gnupg_getprotocol',
- 'gnupg_import',
- 'gnupg_init',
- 'gnupg_keyinfo',
- 'gnupg_setarmor',
- 'gnupg_seterrormode',
- 'gnupg_setsignmode',
- 'gnupg_sign',
- 'gnupg_verify'),
- 'Gopher': ('gopher_parsedir',),
- 'Grapheme': ('grapheme_extract',
- 'grapheme_stripos',
- 'grapheme_stristr',
- 'grapheme_strlen',
- 'grapheme_strpos',
- 'grapheme_strripos',
- 'grapheme_strrpos',
- 'grapheme_strstr',
- 'grapheme_substr'),
- 'Gupnp': ('gupnp_context_get_host_ip',
- 'gupnp_context_get_port',
- 'gupnp_context_get_subscription_timeout',
- 'gupnp_context_host_path',
- 'gupnp_context_new',
- 'gupnp_context_set_subscription_timeout',
- 'gupnp_context_timeout_add',
- 'gupnp_context_unhost_path',
- 'gupnp_control_point_browse_start',
- 'gupnp_control_point_browse_stop',
- 'gupnp_control_point_callback_set',
- 'gupnp_control_point_new',
- 'gupnp_device_action_callback_set',
- 'gupnp_device_info_get_service',
- 'gupnp_device_info_get',
- 'gupnp_root_device_get_available',
- 'gupnp_root_device_get_relative_location',
- 'gupnp_root_device_new',
- 'gupnp_root_device_set_available',
- 'gupnp_root_device_start',
- 'gupnp_root_device_stop',
- 'gupnp_service_action_get',
- 'gupnp_service_action_return_error',
- 'gupnp_service_action_return',
- 'gupnp_service_action_set',
- 'gupnp_service_freeze_notify',
- 'gupnp_service_info_get_introspection',
- 'gupnp_service_info_get',
- 'gupnp_service_introspection_get_state_variable',
- 'gupnp_service_notify',
- 'gupnp_service_proxy_action_get',
- 'gupnp_service_proxy_action_set',
- 'gupnp_service_proxy_add_notify',
- 'gupnp_service_proxy_callback_set',
- 'gupnp_service_proxy_get_subscribed',
- 'gupnp_service_proxy_remove_notify',
- 'gupnp_service_proxy_set_subscribed',
- 'gupnp_service_thaw_notify'),
- 'HTTP': ('http_cache_etag',
- 'http_cache_last_modified',
- 'http_chunked_decode',
- 'http_deflate',
- 'http_inflate',
- 'http_build_cookie',
- 'http_date',
- 'http_get_request_body_stream',
- 'http_get_request_body',
- 'http_get_request_headers',
- 'http_match_etag',
- 'http_match_modified',
- 'http_match_request_header',
- 'http_support',
- 'http_negotiate_charset',
- 'http_negotiate_content_type',
- 'http_negotiate_language',
- 'ob_deflatehandler',
- 'ob_etaghandler',
- 'ob_inflatehandler',
- 'http_parse_cookie',
- 'http_parse_headers',
- 'http_parse_message',
- 'http_parse_params',
- 'http_persistent_handles_clean',
- 'http_persistent_handles_count',
- 'http_persistent_handles_ident',
- 'http_get',
- 'http_head',
- 'http_post_data',
- 'http_post_fields',
- 'http_put_data',
- 'http_put_file',
- 'http_put_stream',
- 'http_request_body_encode',
- 'http_request_method_exists',
- 'http_request_method_name',
- 'http_request_method_register',
- 'http_request_method_unregister',
- 'http_request',
- 'http_redirect',
- 'http_send_content_disposition',
- 'http_send_content_type',
- 'http_send_data',
- 'http_send_file',
- 'http_send_last_modified',
- 'http_send_status',
- 'http_send_stream',
- 'http_throttle',
- 'http_build_str',
- 'http_build_url'),
- 'Hash': ('hash_algos',
- 'hash_copy',
- 'hash_file',
- 'hash_final',
- 'hash_hmac_file',
- 'hash_hmac',
- 'hash_init',
- 'hash_pbkdf2',
- 'hash_update_file',
- 'hash_update_stream',
- 'hash_update',
- 'hash'),
- 'Hyperwave': ('hw_Array2Objrec',
- 'hw_changeobject',
- 'hw_Children',
- 'hw_ChildrenObj',
- 'hw_Close',
- 'hw_Connect',
- 'hw_connection_info',
- 'hw_cp',
- 'hw_Deleteobject',
- 'hw_DocByAnchor',
- 'hw_DocByAnchorObj',
- 'hw_Document_Attributes',
- 'hw_Document_BodyTag',
- 'hw_Document_Content',
- 'hw_Document_SetContent',
- 'hw_Document_Size',
- 'hw_dummy',
- 'hw_EditText',
- 'hw_Error',
- 'hw_ErrorMsg',
- 'hw_Free_Document',
- 'hw_GetAnchors',
- 'hw_GetAnchorsObj',
- 'hw_GetAndLock',
- 'hw_GetChildColl',
- 'hw_GetChildCollObj',
- 'hw_GetChildDocColl',
- 'hw_GetChildDocCollObj',
- 'hw_GetObject',
- 'hw_GetObjectByQuery',
- 'hw_GetObjectByQueryColl',
- 'hw_GetObjectByQueryCollObj',
- 'hw_GetObjectByQueryObj',
- 'hw_GetParents',
- 'hw_GetParentsObj',
- 'hw_getrellink',
- 'hw_GetRemote',
- 'hw_getremotechildren',
- 'hw_GetSrcByDestObj',
- 'hw_GetText',
- 'hw_getusername',
- 'hw_Identify',
- 'hw_InCollections',
- 'hw_Info',
- 'hw_InsColl',
- 'hw_InsDoc',
- 'hw_insertanchors',
- 'hw_InsertDocument',
- 'hw_InsertObject',
- 'hw_mapid',
- 'hw_Modifyobject',
- 'hw_mv',
- 'hw_New_Document',
- 'hw_objrec2array',
- 'hw_Output_Document',
- 'hw_pConnect',
- 'hw_PipeDocument',
- 'hw_Root',
- 'hw_setlinkroot',
- 'hw_stat',
- 'hw_Unlock',
- 'hw_Who'),
- 'Hyperwave API': ('hwapi_attribute_new',
- 'hwapi_content_new',
- 'hwapi_hgcsp',
- 'hwapi_object_new'),
- 'IBM DB2': ('db2_autocommit',
- 'db2_bind_param',
- 'db2_client_info',
- 'db2_close',
- 'db2_column_privileges',
- 'db2_columns',
- 'db2_commit',
- 'db2_conn_error',
- 'db2_conn_errormsg',
- 'db2_connect',
- 'db2_cursor_type',
- 'db2_escape_string',
- 'db2_exec',
- 'db2_execute',
- 'db2_fetch_array',
- 'db2_fetch_assoc',
- 'db2_fetch_both',
- 'db2_fetch_object',
- 'db2_fetch_row',
- 'db2_field_display_size',
- 'db2_field_name',
- 'db2_field_num',
- 'db2_field_precision',
- 'db2_field_scale',
- 'db2_field_type',
- 'db2_field_width',
- 'db2_foreign_keys',
- 'db2_free_result',
- 'db2_free_stmt',
- 'db2_get_option',
- 'db2_last_insert_id',
- 'db2_lob_read',
- 'db2_next_result',
- 'db2_num_fields',
- 'db2_num_rows',
- 'db2_pclose',
- 'db2_pconnect',
- 'db2_prepare',
- 'db2_primary_keys',
- 'db2_procedure_columns',
- 'db2_procedures',
- 'db2_result',
- 'db2_rollback',
- 'db2_server_info',
- 'db2_set_option',
- 'db2_special_columns',
- 'db2_statistics',
- 'db2_stmt_error',
- 'db2_stmt_errormsg',
- 'db2_table_privileges',
- 'db2_tables'),
- 'ID3': ('id3_get_frame_long_name',
- 'id3_get_frame_short_name',
- 'id3_get_genre_id',
- 'id3_get_genre_list',
- 'id3_get_genre_name',
- 'id3_get_tag',
- 'id3_get_version',
- 'id3_remove_tag',
- 'id3_set_tag'),
- 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
- 'IIS': ('iis_add_server',
- 'iis_get_dir_security',
- 'iis_get_script_map',
- 'iis_get_server_by_comment',
- 'iis_get_server_by_path',
- 'iis_get_server_rights',
- 'iis_get_service_state',
- 'iis_remove_server',
- 'iis_set_app_settings',
- 'iis_set_dir_security',
- 'iis_set_script_map',
- 'iis_set_server_rights',
- 'iis_start_server',
- 'iis_start_service',
- 'iis_stop_server',
- 'iis_stop_service'),
- 'IMAP': ('imap_8bit',
- 'imap_alerts',
- 'imap_append',
- 'imap_base64',
- 'imap_binary',
- 'imap_body',
- 'imap_bodystruct',
- 'imap_check',
- 'imap_clearflag_full',
- 'imap_close',
- 'imap_create',
- 'imap_createmailbox',
- 'imap_delete',
- 'imap_deletemailbox',
- 'imap_errors',
- 'imap_expunge',
- 'imap_fetch_overview',
- 'imap_fetchbody',
- 'imap_fetchheader',
- 'imap_fetchmime',
- 'imap_fetchstructure',
- 'imap_fetchtext',
- 'imap_gc',
- 'imap_get_quota',
- 'imap_get_quotaroot',
- 'imap_getacl',
- 'imap_getmailboxes',
- 'imap_getsubscribed',
- 'imap_header',
- 'imap_headerinfo',
- 'imap_headers',
- 'imap_last_error',
- 'imap_list',
- 'imap_listmailbox',
- 'imap_listscan',
- 'imap_listsubscribed',
- 'imap_lsub',
- 'imap_mail_compose',
- 'imap_mail_copy',
- 'imap_mail_move',
- 'imap_mail',
- 'imap_mailboxmsginfo',
- 'imap_mime_header_decode',
- 'imap_msgno',
- 'imap_num_msg',
- 'imap_num_recent',
- 'imap_open',
- 'imap_ping',
- 'imap_qprint',
- 'imap_rename',
- 'imap_renamemailbox',
- 'imap_reopen',
- 'imap_rfc822_parse_adrlist',
- 'imap_rfc822_parse_headers',
- 'imap_rfc822_write_address',
- 'imap_savebody',
- 'imap_scan',
- 'imap_scanmailbox',
- 'imap_search',
- 'imap_set_quota',
- 'imap_setacl',
- 'imap_setflag_full',
- 'imap_sort',
- 'imap_status',
- 'imap_subscribe',
- 'imap_thread',
- 'imap_timeout',
- 'imap_uid',
- 'imap_undelete',
- 'imap_unsubscribe',
- 'imap_utf7_decode',
- 'imap_utf7_encode',
- 'imap_utf8'),
- 'Informix': ('ifx_affected_rows',
- 'ifx_blobinfile_mode',
- 'ifx_byteasvarchar',
- 'ifx_close',
- 'ifx_connect',
- 'ifx_copy_blob',
- 'ifx_create_blob',
- 'ifx_create_char',
- 'ifx_do',
- 'ifx_error',
- 'ifx_errormsg',
- 'ifx_fetch_row',
- 'ifx_fieldproperties',
- 'ifx_fieldtypes',
- 'ifx_free_blob',
- 'ifx_free_char',
- 'ifx_free_result',
- 'ifx_get_blob',
- 'ifx_get_char',
- 'ifx_getsqlca',
- 'ifx_htmltbl_result',
- 'ifx_nullformat',
- 'ifx_num_fields',
- 'ifx_num_rows',
- 'ifx_pconnect',
- 'ifx_prepare',
- 'ifx_query',
- 'ifx_textasvarchar',
- 'ifx_update_blob',
- 'ifx_update_char',
- 'ifxus_close_slob',
- 'ifxus_create_slob',
- 'ifxus_free_slob',
- 'ifxus_open_slob',
- 'ifxus_read_slob',
- 'ifxus_seek_slob',
- 'ifxus_tell_slob',
- 'ifxus_write_slob'),
- 'Ingres': ('ingres_autocommit_state',
- 'ingres_autocommit',
- 'ingres_charset',
- 'ingres_close',
- 'ingres_commit',
- 'ingres_connect',
- 'ingres_cursor',
- 'ingres_errno',
- 'ingres_error',
- 'ingres_errsqlstate',
- 'ingres_escape_string',
- 'ingres_execute',
- 'ingres_fetch_array',
- 'ingres_fetch_assoc',
- 'ingres_fetch_object',
- 'ingres_fetch_proc_return',
- 'ingres_fetch_row',
- 'ingres_field_length',
- 'ingres_field_name',
- 'ingres_field_nullable',
- 'ingres_field_precision',
- 'ingres_field_scale',
- 'ingres_field_type',
- 'ingres_free_result',
- 'ingres_next_error',
- 'ingres_num_fields',
- 'ingres_num_rows',
- 'ingres_pconnect',
- 'ingres_prepare',
- 'ingres_query',
- 'ingres_result_seek',
- 'ingres_rollback',
- 'ingres_set_environment',
- 'ingres_unbuffered_query'),
- 'Inotify': ('inotify_add_watch',
- 'inotify_init',
- 'inotify_queue_len',
- 'inotify_read',
- 'inotify_rm_watch'),
- 'JSON': ('json_decode',
- 'json_encode',
- 'json_last_error_msg',
- 'json_last_error'),
- 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
- 'Judy': ('judy_type', 'judy_version'),
- 'KADM5': ('kadm5_chpass_principal',
- 'kadm5_create_principal',
- 'kadm5_delete_principal',
- 'kadm5_destroy',
- 'kadm5_flush',
- 'kadm5_get_policies',
- 'kadm5_get_principal',
- 'kadm5_get_principals',
- 'kadm5_init_with_password',
- 'kadm5_modify_principal'),
- 'LDAP': ('ldap_8859_to_t61',
- 'ldap_add',
- 'ldap_bind',
- 'ldap_close',
- 'ldap_compare',
- 'ldap_connect',
- 'ldap_control_paged_result_response',
- 'ldap_control_paged_result',
- 'ldap_count_entries',
- 'ldap_delete',
- 'ldap_dn2ufn',
- 'ldap_err2str',
- 'ldap_errno',
- 'ldap_error',
- 'ldap_explode_dn',
- 'ldap_first_attribute',
- 'ldap_first_entry',
- 'ldap_first_reference',
- 'ldap_free_result',
- 'ldap_get_attributes',
- 'ldap_get_dn',
- 'ldap_get_entries',
- 'ldap_get_option',
- 'ldap_get_values_len',
- 'ldap_get_values',
- 'ldap_list',
- 'ldap_mod_add',
- 'ldap_mod_del',
- 'ldap_mod_replace',
- 'ldap_modify',
- 'ldap_next_attribute',
- 'ldap_next_entry',
- 'ldap_next_reference',
- 'ldap_parse_reference',
- 'ldap_parse_result',
- 'ldap_read',
- 'ldap_rename',
- 'ldap_sasl_bind',
- 'ldap_search',
- 'ldap_set_option',
- 'ldap_set_rebind_proc',
- 'ldap_sort',
- 'ldap_start_tls',
- 'ldap_t61_to_8859',
- 'ldap_unbind'),
- 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
- 'Libevent': ('event_add',
- 'event_base_free',
- 'event_base_loop',
- 'event_base_loopbreak',
- 'event_base_loopexit',
- 'event_base_new',
- 'event_base_priority_init',
- 'event_base_set',
- 'event_buffer_base_set',
- 'event_buffer_disable',
- 'event_buffer_enable',
- 'event_buffer_fd_set',
- 'event_buffer_free',
- 'event_buffer_new',
- 'event_buffer_priority_set',
- 'event_buffer_read',
- 'event_buffer_set_callback',
- 'event_buffer_timeout_set',
- 'event_buffer_watermark_set',
- 'event_buffer_write',
- 'event_del',
- 'event_free',
- 'event_new',
- 'event_set'),
- 'Lotus Notes': ('notes_body',
- 'notes_copy_db',
- 'notes_create_db',
- 'notes_create_note',
- 'notes_drop_db',
- 'notes_find_note',
- 'notes_header_info',
- 'notes_list_msgs',
- 'notes_mark_read',
- 'notes_mark_unread',
- 'notes_nav_create',
- 'notes_search',
- 'notes_unread',
- 'notes_version'),
- 'MCVE': ('m_checkstatus',
- 'm_completeauthorizations',
- 'm_connect',
- 'm_connectionerror',
- 'm_deletetrans',
- 'm_destroyconn',
- 'm_destroyengine',
- 'm_getcell',
- 'm_getcellbynum',
- 'm_getcommadelimited',
- 'm_getheader',
- 'm_initconn',
- 'm_initengine',
- 'm_iscommadelimited',
- 'm_maxconntimeout',
- 'm_monitor',
- 'm_numcolumns',
- 'm_numrows',
- 'm_parsecommadelimited',
- 'm_responsekeys',
- 'm_responseparam',
- 'm_returnstatus',
- 'm_setblocking',
- 'm_setdropfile',
- 'm_setip',
- 'm_setssl_cafile',
- 'm_setssl_files',
- 'm_setssl',
- 'm_settimeout',
- 'm_sslcert_gen_hash',
- 'm_transactionssent',
- 'm_transinqueue',
- 'm_transkeyval',
- 'm_transnew',
- 'm_transsend',
- 'm_uwait',
- 'm_validateidentifier',
- 'm_verifyconnection',
- 'm_verifysslcert'),
- 'Mail': ('ezmlm_hash', 'mail'),
- 'Mailparse': ('mailparse_determine_best_xfer_encoding',
- 'mailparse_msg_create',
- 'mailparse_msg_extract_part_file',
- 'mailparse_msg_extract_part',
- 'mailparse_msg_extract_whole_part_file',
- 'mailparse_msg_free',
- 'mailparse_msg_get_part_data',
- 'mailparse_msg_get_part',
- 'mailparse_msg_get_structure',
- 'mailparse_msg_parse_file',
- 'mailparse_msg_parse',
- 'mailparse_rfc822_parse_addresses',
- 'mailparse_stream_encode',
- 'mailparse_uudecode_all'),
- 'Math': ('abs',
- 'acos',
- 'acosh',
- 'asin',
- 'asinh',
- 'atan2',
- 'atan',
- 'atanh',
- 'base_convert',
- 'bindec',
- 'ceil',
- 'cos',
- 'cosh',
- 'decbin',
- 'dechex',
- 'decoct',
- 'deg2rad',
- 'exp',
- 'expm1',
- 'floor',
- 'fmod',
- 'getrandmax',
- 'hexdec',
- 'hypot',
- 'is_finite',
- 'is_infinite',
- 'is_nan',
- 'lcg_value',
- 'log10',
- 'log1p',
- 'log',
- 'max',
- 'min',
- 'mt_getrandmax',
- 'mt_rand',
- 'mt_srand',
- 'octdec',
- 'pi',
- 'pow',
- 'rad2deg',
- 'rand',
- 'round',
- 'sin',
- 'sinh',
- 'sqrt',
- 'srand',
- 'tan',
- 'tanh'),
- 'MaxDB': ('maxdb_affected_rows',
- 'maxdb_autocommit',
- 'maxdb_bind_param',
- 'maxdb_bind_result',
- 'maxdb_change_user',
- 'maxdb_character_set_name',
- 'maxdb_client_encoding',
- 'maxdb_close_long_data',
- 'maxdb_close',
- 'maxdb_commit',
- 'maxdb_connect_errno',
- 'maxdb_connect_error',
- 'maxdb_connect',
- 'maxdb_data_seek',
- 'maxdb_debug',
- 'maxdb_disable_reads_from_master',
- 'maxdb_disable_rpl_parse',
- 'maxdb_dump_debug_info',
- 'maxdb_embedded_connect',
- 'maxdb_enable_reads_from_master',
- 'maxdb_enable_rpl_parse',
- 'maxdb_errno',
- 'maxdb_error',
- 'maxdb_escape_string',
- 'maxdb_execute',
- 'maxdb_fetch_array',
- 'maxdb_fetch_assoc',
- 'maxdb_fetch_field_direct',
- 'maxdb_fetch_field',
- 'maxdb_fetch_fields',
- 'maxdb_fetch_lengths',
- 'maxdb_fetch_object',
- 'maxdb_fetch_row',
- 'maxdb_fetch',
- 'maxdb_field_count',
- 'maxdb_field_seek',
- 'maxdb_field_tell',
- 'maxdb_free_result',
- 'maxdb_get_client_info',
- 'maxdb_get_client_version',
- 'maxdb_get_host_info',
- 'maxdb_get_metadata',
- 'maxdb_get_proto_info',
- 'maxdb_get_server_info',
- 'maxdb_get_server_version',
- 'maxdb_info',
- 'maxdb_init',
- 'maxdb_insert_id',
- 'maxdb_kill',
- 'maxdb_master_query',
- 'maxdb_more_results',
- 'maxdb_multi_query',
- 'maxdb_next_result',
- 'maxdb_num_fields',
- 'maxdb_num_rows',
- 'maxdb_options',
- 'maxdb_param_count',
- 'maxdb_ping',
- 'maxdb_prepare',
- 'maxdb_query',
- 'maxdb_real_connect',
- 'maxdb_real_escape_string',
- 'maxdb_real_query',
- 'maxdb_report',
- 'maxdb_rollback',
- 'maxdb_rpl_parse_enabled',
- 'maxdb_rpl_probe',
- 'maxdb_rpl_query_type',
- 'maxdb_select_db',
- 'maxdb_send_long_data',
- 'maxdb_send_query',
- 'maxdb_server_end',
- 'maxdb_server_init',
- 'maxdb_set_opt',
- 'maxdb_sqlstate',
- 'maxdb_ssl_set',
- 'maxdb_stat',
- 'maxdb_stmt_affected_rows',
- 'maxdb_stmt_bind_param',
- 'maxdb_stmt_bind_result',
- 'maxdb_stmt_close_long_data',
- 'maxdb_stmt_close',
- 'maxdb_stmt_data_seek',
- 'maxdb_stmt_errno',
- 'maxdb_stmt_error',
- 'maxdb_stmt_execute',
- 'maxdb_stmt_fetch',
- 'maxdb_stmt_free_result',
- 'maxdb_stmt_init',
- 'maxdb_stmt_num_rows',
- 'maxdb_stmt_param_count',
- 'maxdb_stmt_prepare',
- 'maxdb_stmt_reset',
- 'maxdb_stmt_result_metadata',
- 'maxdb_stmt_send_long_data',
- 'maxdb_stmt_sqlstate',
- 'maxdb_stmt_store_result',
- 'maxdb_store_result',
- 'maxdb_thread_id',
- 'maxdb_thread_safe',
- 'maxdb_use_result',
- 'maxdb_warning_count'),
- 'Mcrypt': ('mcrypt_cbc',
- 'mcrypt_cfb',
- 'mcrypt_create_iv',
- 'mcrypt_decrypt',
- 'mcrypt_ecb',
- 'mcrypt_enc_get_algorithms_name',
- 'mcrypt_enc_get_block_size',
- 'mcrypt_enc_get_iv_size',
- 'mcrypt_enc_get_key_size',
- 'mcrypt_enc_get_modes_name',
- 'mcrypt_enc_get_supported_key_sizes',
- 'mcrypt_enc_is_block_algorithm_mode',
- 'mcrypt_enc_is_block_algorithm',
- 'mcrypt_enc_is_block_mode',
- 'mcrypt_enc_self_test',
- 'mcrypt_encrypt',
- 'mcrypt_generic_deinit',
- 'mcrypt_generic_end',
- 'mcrypt_generic_init',
- 'mcrypt_generic',
- 'mcrypt_get_block_size',
- 'mcrypt_get_cipher_name',
- 'mcrypt_get_iv_size',
- 'mcrypt_get_key_size',
- 'mcrypt_list_algorithms',
- 'mcrypt_list_modes',
- 'mcrypt_module_close',
- 'mcrypt_module_get_algo_block_size',
- 'mcrypt_module_get_algo_key_size',
- 'mcrypt_module_get_supported_key_sizes',
- 'mcrypt_module_is_block_algorithm_mode',
- 'mcrypt_module_is_block_algorithm',
- 'mcrypt_module_is_block_mode',
- 'mcrypt_module_open',
- 'mcrypt_module_self_test',
- 'mcrypt_ofb',
- 'mdecrypt_generic'),
- 'Memcache': ('memcache_debug',),
- 'Mhash': ('mhash_count',
- 'mhash_get_block_size',
- 'mhash_get_hash_name',
- 'mhash_keygen_s2k',
- 'mhash'),
- 'Ming': ('ming_keypress',
- 'ming_setcubicthreshold',
- 'ming_setscale',
- 'ming_setswfcompression',
- 'ming_useconstants',
- 'ming_useswfversion'),
- 'Misc.': ('connection_aborted',
- 'connection_status',
- 'connection_timeout',
- 'constant',
- 'define',
- 'defined',
- 'die',
- 'eval',
- 'exit',
- 'get_browser',
- '__halt_compiler',
- 'highlight_file',
- 'highlight_string',
- 'ignore_user_abort',
- 'pack',
- 'php_check_syntax',
- 'php_strip_whitespace',
- 'show_source',
- 'sleep',
- 'sys_getloadavg',
- 'time_nanosleep',
- 'time_sleep_until',
- 'uniqid',
- 'unpack',
- 'usleep'),
- 'Mongo': ('bson_decode', 'bson_encode'),
- 'Msession': ('msession_connect',
- 'msession_count',
- 'msession_create',
- 'msession_destroy',
- 'msession_disconnect',
- 'msession_find',
- 'msession_get_array',
- 'msession_get_data',
- 'msession_get',
- 'msession_inc',
- 'msession_list',
- 'msession_listvar',
- 'msession_lock',
- 'msession_plugin',
- 'msession_randstr',
- 'msession_set_array',
- 'msession_set_data',
- 'msession_set',
- 'msession_timeout',
- 'msession_uniq',
- 'msession_unlock'),
- 'Mssql': ('mssql_bind',
- 'mssql_close',
- 'mssql_connect',
- 'mssql_data_seek',
- 'mssql_execute',
- 'mssql_fetch_array',
- 'mssql_fetch_assoc',
- 'mssql_fetch_batch',
- 'mssql_fetch_field',
- 'mssql_fetch_object',
- 'mssql_fetch_row',
- 'mssql_field_length',
- 'mssql_field_name',
- 'mssql_field_seek',
- 'mssql_field_type',
- 'mssql_free_result',
- 'mssql_free_statement',
- 'mssql_get_last_message',
- 'mssql_guid_string',
- 'mssql_init',
- 'mssql_min_error_severity',
- 'mssql_min_message_severity',
- 'mssql_next_result',
- 'mssql_num_fields',
- 'mssql_num_rows',
- 'mssql_pconnect',
- 'mssql_query',
- 'mssql_result',
- 'mssql_rows_affected',
- 'mssql_select_db'),
- 'Multibyte String': ('mb_check_encoding',
- 'mb_convert_case',
- 'mb_convert_encoding',
- 'mb_convert_kana',
- 'mb_convert_variables',
- 'mb_decode_mimeheader',
- 'mb_decode_numericentity',
- 'mb_detect_encoding',
- 'mb_detect_order',
- 'mb_encode_mimeheader',
- 'mb_encode_numericentity',
- 'mb_encoding_aliases',
- 'mb_ereg_match',
- 'mb_ereg_replace_callback',
- 'mb_ereg_replace',
- 'mb_ereg_search_getpos',
- 'mb_ereg_search_getregs',
- 'mb_ereg_search_init',
- 'mb_ereg_search_pos',
- 'mb_ereg_search_regs',
- 'mb_ereg_search_setpos',
- 'mb_ereg_search',
- 'mb_ereg',
- 'mb_eregi_replace',
- 'mb_eregi',
- 'mb_get_info',
- 'mb_http_input',
- 'mb_http_output',
- 'mb_internal_encoding',
- 'mb_language',
- 'mb_list_encodings',
- 'mb_output_handler',
- 'mb_parse_str',
- 'mb_preferred_mime_name',
- 'mb_regex_encoding',
- 'mb_regex_set_options',
- 'mb_send_mail',
- 'mb_split',
- 'mb_strcut',
- 'mb_strimwidth',
- 'mb_stripos',
- 'mb_stristr',
- 'mb_strlen',
- 'mb_strpos',
- 'mb_strrchr',
- 'mb_strrichr',
- 'mb_strripos',
- 'mb_strrpos',
- 'mb_strstr',
- 'mb_strtolower',
- 'mb_strtoupper',
- 'mb_strwidth',
- 'mb_substitute_character',
- 'mb_substr_count',
- 'mb_substr'),
- 'MySQL': ('mysql_affected_rows',
- 'mysql_client_encoding',
- 'mysql_close',
- 'mysql_connect',
- 'mysql_create_db',
- 'mysql_data_seek',
- 'mysql_db_name',
- 'mysql_db_query',
- 'mysql_drop_db',
- 'mysql_errno',
- 'mysql_error',
- 'mysql_escape_string',
- 'mysql_fetch_array',
- 'mysql_fetch_assoc',
- 'mysql_fetch_field',
- 'mysql_fetch_lengths',
- 'mysql_fetch_object',
- 'mysql_fetch_row',
- 'mysql_field_flags',
- 'mysql_field_len',
- 'mysql_field_name',
- 'mysql_field_seek',
- 'mysql_field_table',
- 'mysql_field_type',
- 'mysql_free_result',
- 'mysql_get_client_info',
- 'mysql_get_host_info',
- 'mysql_get_proto_info',
- 'mysql_get_server_info',
- 'mysql_info',
- 'mysql_insert_id',
- 'mysql_list_dbs',
- 'mysql_list_fields',
- 'mysql_list_processes',
- 'mysql_list_tables',
- 'mysql_num_fields',
- 'mysql_num_rows',
- 'mysql_pconnect',
- 'mysql_ping',
- 'mysql_query',
- 'mysql_real_escape_string',
- 'mysql_result',
- 'mysql_select_db',
- 'mysql_set_charset',
- 'mysql_stat',
- 'mysql_tablename',
- 'mysql_thread_id',
- 'mysql_unbuffered_query'),
- 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
- 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
- 'mysqlnd_ms_fabric_select_global',
- 'mysqlnd_ms_fabric_select_shard',
- 'mysqlnd_ms_get_last_gtid',
- 'mysqlnd_ms_get_last_used_connection',
- 'mysqlnd_ms_get_stats',
- 'mysqlnd_ms_match_wild',
- 'mysqlnd_ms_query_is_select',
- 'mysqlnd_ms_set_qos',
- 'mysqlnd_ms_set_user_pick_server'),
- 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
- 'mysqlnd_uh_set_connection_proxy',
- 'mysqlnd_uh_set_statement_proxy'),
- 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
- 'Ncurses': ('ncurses_addch',
- 'ncurses_addchnstr',
- 'ncurses_addchstr',
- 'ncurses_addnstr',
- 'ncurses_addstr',
- 'ncurses_assume_default_colors',
- 'ncurses_attroff',
- 'ncurses_attron',
- 'ncurses_attrset',
- 'ncurses_baudrate',
- 'ncurses_beep',
- 'ncurses_bkgd',
- 'ncurses_bkgdset',
- 'ncurses_border',
- 'ncurses_bottom_panel',
- 'ncurses_can_change_color',
- 'ncurses_cbreak',
- 'ncurses_clear',
- 'ncurses_clrtobot',
- 'ncurses_clrtoeol',
- 'ncurses_color_content',
- 'ncurses_color_set',
- 'ncurses_curs_set',
- 'ncurses_def_prog_mode',
- 'ncurses_def_shell_mode',
- 'ncurses_define_key',
- 'ncurses_del_panel',
- 'ncurses_delay_output',
- 'ncurses_delch',
- 'ncurses_deleteln',
- 'ncurses_delwin',
- 'ncurses_doupdate',
- 'ncurses_echo',
- 'ncurses_echochar',
- 'ncurses_end',
- 'ncurses_erase',
- 'ncurses_erasechar',
- 'ncurses_filter',
- 'ncurses_flash',
- 'ncurses_flushinp',
- 'ncurses_getch',
- 'ncurses_getmaxyx',
- 'ncurses_getmouse',
- 'ncurses_getyx',
- 'ncurses_halfdelay',
- 'ncurses_has_colors',
- 'ncurses_has_ic',
- 'ncurses_has_il',
- 'ncurses_has_key',
- 'ncurses_hide_panel',
- 'ncurses_hline',
- 'ncurses_inch',
- 'ncurses_init_color',
- 'ncurses_init_pair',
- 'ncurses_init',
- 'ncurses_insch',
- 'ncurses_insdelln',
- 'ncurses_insertln',
- 'ncurses_insstr',
- 'ncurses_instr',
- 'ncurses_isendwin',
- 'ncurses_keyok',
- 'ncurses_keypad',
- 'ncurses_killchar',
- 'ncurses_longname',
- 'ncurses_meta',
- 'ncurses_mouse_trafo',
- 'ncurses_mouseinterval',
- 'ncurses_mousemask',
- 'ncurses_move_panel',
- 'ncurses_move',
- 'ncurses_mvaddch',
- 'ncurses_mvaddchnstr',
- 'ncurses_mvaddchstr',
- 'ncurses_mvaddnstr',
- 'ncurses_mvaddstr',
- 'ncurses_mvcur',
- 'ncurses_mvdelch',
- 'ncurses_mvgetch',
- 'ncurses_mvhline',
- 'ncurses_mvinch',
- 'ncurses_mvvline',
- 'ncurses_mvwaddstr',
- 'ncurses_napms',
- 'ncurses_new_panel',
- 'ncurses_newpad',
- 'ncurses_newwin',
- 'ncurses_nl',
- 'ncurses_nocbreak',
- 'ncurses_noecho',
- 'ncurses_nonl',
- 'ncurses_noqiflush',
- 'ncurses_noraw',
- 'ncurses_pair_content',
- 'ncurses_panel_above',
- 'ncurses_panel_below',
- 'ncurses_panel_window',
- 'ncurses_pnoutrefresh',
- 'ncurses_prefresh',
- 'ncurses_putp',
- 'ncurses_qiflush',
- 'ncurses_raw',
- 'ncurses_refresh',
- 'ncurses_replace_panel',
- 'ncurses_reset_prog_mode',
- 'ncurses_reset_shell_mode',
- 'ncurses_resetty',
- 'ncurses_savetty',
- 'ncurses_scr_dump',
- 'ncurses_scr_init',
- 'ncurses_scr_restore',
- 'ncurses_scr_set',
- 'ncurses_scrl',
- 'ncurses_show_panel',
- 'ncurses_slk_attr',
- 'ncurses_slk_attroff',
- 'ncurses_slk_attron',
- 'ncurses_slk_attrset',
- 'ncurses_slk_clear',
- 'ncurses_slk_color',
- 'ncurses_slk_init',
- 'ncurses_slk_noutrefresh',
- 'ncurses_slk_refresh',
- 'ncurses_slk_restore',
- 'ncurses_slk_set',
- 'ncurses_slk_touch',
- 'ncurses_standend',
- 'ncurses_standout',
- 'ncurses_start_color',
- 'ncurses_termattrs',
- 'ncurses_termname',
- 'ncurses_timeout',
- 'ncurses_top_panel',
- 'ncurses_typeahead',
- 'ncurses_ungetch',
- 'ncurses_ungetmouse',
- 'ncurses_update_panels',
- 'ncurses_use_default_colors',
- 'ncurses_use_env',
- 'ncurses_use_extended_names',
- 'ncurses_vidattr',
- 'ncurses_vline',
- 'ncurses_waddch',
- 'ncurses_waddstr',
- 'ncurses_wattroff',
- 'ncurses_wattron',
- 'ncurses_wattrset',
- 'ncurses_wborder',
- 'ncurses_wclear',
- 'ncurses_wcolor_set',
- 'ncurses_werase',
- 'ncurses_wgetch',
- 'ncurses_whline',
- 'ncurses_wmouse_trafo',
- 'ncurses_wmove',
- 'ncurses_wnoutrefresh',
- 'ncurses_wrefresh',
- 'ncurses_wstandend',
- 'ncurses_wstandout',
- 'ncurses_wvline'),
- 'Network': ('checkdnsrr',
- 'closelog',
- 'define_syslog_variables',
- 'dns_check_record',
- 'dns_get_mx',
- 'dns_get_record',
- 'fsockopen',
- 'gethostbyaddr',
- 'gethostbyname',
- 'gethostbynamel',
- 'gethostname',
- 'getmxrr',
- 'getprotobyname',
- 'getprotobynumber',
- 'getservbyname',
- 'getservbyport',
- 'header_register_callback',
- 'header_remove',
- 'header',
- 'headers_list',
- 'headers_sent',
- 'http_response_code',
- 'inet_ntop',
- 'inet_pton',
- 'ip2long',
- 'long2ip',
- 'openlog',
- 'pfsockopen',
- 'setcookie',
- 'setrawcookie',
- 'socket_get_status',
- 'socket_set_blocking',
- 'socket_set_timeout',
- 'syslog'),
- 'Newt': ('newt_bell',
- 'newt_button_bar',
- 'newt_button',
- 'newt_centered_window',
- 'newt_checkbox_get_value',
- 'newt_checkbox_set_flags',
- 'newt_checkbox_set_value',
- 'newt_checkbox_tree_add_item',
- 'newt_checkbox_tree_find_item',
- 'newt_checkbox_tree_get_current',
- 'newt_checkbox_tree_get_entry_value',
- 'newt_checkbox_tree_get_multi_selection',
- 'newt_checkbox_tree_get_selection',
- 'newt_checkbox_tree_multi',
- 'newt_checkbox_tree_set_current',
- 'newt_checkbox_tree_set_entry_value',
- 'newt_checkbox_tree_set_entry',
- 'newt_checkbox_tree_set_width',
- 'newt_checkbox_tree',
- 'newt_checkbox',
- 'newt_clear_key_buffer',
- 'newt_cls',
- 'newt_compact_button',
- 'newt_component_add_callback',
- 'newt_component_takes_focus',
- 'newt_create_grid',
- 'newt_cursor_off',
- 'newt_cursor_on',
- 'newt_delay',
- 'newt_draw_form',
- 'newt_draw_root_text',
- 'newt_entry_get_value',
- 'newt_entry_set_filter',
- 'newt_entry_set_flags',
- 'newt_entry_set',
- 'newt_entry',
- 'newt_finished',
- 'newt_form_add_component',
- 'newt_form_add_components',
- 'newt_form_add_hot_key',
- 'newt_form_destroy',
- 'newt_form_get_current',
- 'newt_form_run',
- 'newt_form_set_background',
- 'newt_form_set_height',
- 'newt_form_set_size',
- 'newt_form_set_timer',
- 'newt_form_set_width',
- 'newt_form_watch_fd',
- 'newt_form',
- 'newt_get_screen_size',
- 'newt_grid_add_components_to_form',
- 'newt_grid_basic_window',
- 'newt_grid_free',
- 'newt_grid_get_size',
- 'newt_grid_h_close_stacked',
- 'newt_grid_h_stacked',
- 'newt_grid_place',
- 'newt_grid_set_field',
- 'newt_grid_simple_window',
- 'newt_grid_v_close_stacked',
- 'newt_grid_v_stacked',
- 'newt_grid_wrapped_window_at',
- 'newt_grid_wrapped_window',
- 'newt_init',
- 'newt_label_set_text',
- 'newt_label',
- 'newt_listbox_append_entry',
- 'newt_listbox_clear_selection',
- 'newt_listbox_clear',
- 'newt_listbox_delete_entry',
- 'newt_listbox_get_current',
- 'newt_listbox_get_selection',
- 'newt_listbox_insert_entry',
- 'newt_listbox_item_count',
- 'newt_listbox_select_item',
- 'newt_listbox_set_current_by_key',
- 'newt_listbox_set_current',
- 'newt_listbox_set_data',
- 'newt_listbox_set_entry',
- 'newt_listbox_set_width',
- 'newt_listbox',
- 'newt_listitem_get_data',
- 'newt_listitem_set',
- 'newt_listitem',
- 'newt_open_window',
- 'newt_pop_help_line',
- 'newt_pop_window',
- 'newt_push_help_line',
- 'newt_radio_get_current',
- 'newt_radiobutton',
- 'newt_redraw_help_line',
- 'newt_reflow_text',
- 'newt_refresh',
- 'newt_resize_screen',
- 'newt_resume',
- 'newt_run_form',
- 'newt_scale_set',
- 'newt_scale',
- 'newt_scrollbar_set',
- 'newt_set_help_callback',
- 'newt_set_suspend_callback',
- 'newt_suspend',
- 'newt_textbox_get_num_lines',
- 'newt_textbox_reflowed',
- 'newt_textbox_set_height',
- 'newt_textbox_set_text',
- 'newt_textbox',
- 'newt_vertical_scrollbar',
- 'newt_wait_for_key',
- 'newt_win_choice',
- 'newt_win_entries',
- 'newt_win_menu',
- 'newt_win_message',
- 'newt_win_messagev',
- 'newt_win_ternary'),
- 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
- 'OCI8': ('oci_bind_array_by_name',
- 'oci_bind_by_name',
- 'oci_cancel',
- 'oci_client_version',
- 'oci_close',
- 'oci_commit',
- 'oci_connect',
- 'oci_define_by_name',
- 'oci_error',
- 'oci_execute',
- 'oci_fetch_all',
- 'oci_fetch_array',
- 'oci_fetch_assoc',
- 'oci_fetch_object',
- 'oci_fetch_row',
- 'oci_fetch',
- 'oci_field_is_null',
- 'oci_field_name',
- 'oci_field_precision',
- 'oci_field_scale',
- 'oci_field_size',
- 'oci_field_type_raw',
- 'oci_field_type',
- 'oci_free_descriptor',
- 'oci_free_statement',
- 'oci_get_implicit_resultset',
- 'oci_internal_debug',
- 'oci_lob_copy',
- 'oci_lob_is_equal',
- 'oci_new_collection',
- 'oci_new_connect',
- 'oci_new_cursor',
- 'oci_new_descriptor',
- 'oci_num_fields',
- 'oci_num_rows',
- 'oci_parse',
- 'oci_password_change',
- 'oci_pconnect',
- 'oci_result',
- 'oci_rollback',
- 'oci_server_version',
- 'oci_set_action',
- 'oci_set_client_identifier',
- 'oci_set_client_info',
- 'oci_set_edition',
- 'oci_set_module_name',
- 'oci_set_prefetch',
- 'oci_statement_type'),
- 'ODBC': ('odbc_autocommit',
- 'odbc_binmode',
- 'odbc_close_all',
- 'odbc_close',
- 'odbc_columnprivileges',
- 'odbc_columns',
- 'odbc_commit',
- 'odbc_connect',
- 'odbc_cursor',
- 'odbc_data_source',
- 'odbc_do',
- 'odbc_error',
- 'odbc_errormsg',
- 'odbc_exec',
- 'odbc_execute',
- 'odbc_fetch_array',
- 'odbc_fetch_into',
- 'odbc_fetch_object',
- 'odbc_fetch_row',
- 'odbc_field_len',
- 'odbc_field_name',
- 'odbc_field_num',
- 'odbc_field_precision',
- 'odbc_field_scale',
- 'odbc_field_type',
- 'odbc_foreignkeys',
- 'odbc_free_result',
- 'odbc_gettypeinfo',
- 'odbc_longreadlen',
- 'odbc_next_result',
- 'odbc_num_fields',
- 'odbc_num_rows',
- 'odbc_pconnect',
- 'odbc_prepare',
- 'odbc_primarykeys',
- 'odbc_procedurecolumns',
- 'odbc_procedures',
- 'odbc_result_all',
- 'odbc_result',
- 'odbc_rollback',
- 'odbc_setoption',
- 'odbc_specialcolumns',
- 'odbc_statistics',
- 'odbc_tableprivileges',
- 'odbc_tables'),
- 'OPcache': ('opcache_compile_file',
- 'opcache_get_configuration',
- 'opcache_get_status',
- 'opcache_invalidate',
- 'opcache_reset'),
- 'Object Aggregation': ('aggregate_info',
- 'aggregate_methods_by_list',
- 'aggregate_methods_by_regexp',
- 'aggregate_methods',
- 'aggregate_properties_by_list',
- 'aggregate_properties_by_regexp',
- 'aggregate_properties',
- 'aggregate',
- 'aggregation_info',
- 'deaggregate'),
- 'OpenAL': ('openal_buffer_create',
- 'openal_buffer_data',
- 'openal_buffer_destroy',
- 'openal_buffer_get',
- 'openal_buffer_loadwav',
- 'openal_context_create',
- 'openal_context_current',
- 'openal_context_destroy',
- 'openal_context_process',
- 'openal_context_suspend',
- 'openal_device_close',
- 'openal_device_open',
- 'openal_listener_get',
- 'openal_listener_set',
- 'openal_source_create',
- 'openal_source_destroy',
- 'openal_source_get',
- 'openal_source_pause',
- 'openal_source_play',
- 'openal_source_rewind',
- 'openal_source_set',
- 'openal_source_stop',
- 'openal_stream'),
- 'OpenSSL': ('openssl_cipher_iv_length',
- 'openssl_csr_export_to_file',
- 'openssl_csr_export',
- 'openssl_csr_get_public_key',
- 'openssl_csr_get_subject',
- 'openssl_csr_new',
- 'openssl_csr_sign',
- 'openssl_decrypt',
- 'openssl_dh_compute_key',
- 'openssl_digest',
- 'openssl_encrypt',
- 'openssl_error_string',
- 'openssl_free_key',
- 'openssl_get_cipher_methods',
- 'openssl_get_md_methods',
- 'openssl_get_privatekey',
- 'openssl_get_publickey',
- 'openssl_open',
- 'openssl_pbkdf2',
- 'openssl_pkcs12_export_to_file',
- 'openssl_pkcs12_export',
- 'openssl_pkcs12_read',
- 'openssl_pkcs7_decrypt',
- 'openssl_pkcs7_encrypt',
- 'openssl_pkcs7_sign',
- 'openssl_pkcs7_verify',
- 'openssl_pkey_export_to_file',
- 'openssl_pkey_export',
- 'openssl_pkey_free',
- 'openssl_pkey_get_details',
- 'openssl_pkey_get_private',
- 'openssl_pkey_get_public',
- 'openssl_pkey_new',
- 'openssl_private_decrypt',
- 'openssl_private_encrypt',
- 'openssl_public_decrypt',
- 'openssl_public_encrypt',
- 'openssl_random_pseudo_bytes',
- 'openssl_seal',
- 'openssl_sign',
- 'openssl_spki_export_challenge',
- 'openssl_spki_export',
- 'openssl_spki_new',
- 'openssl_spki_verify',
- 'openssl_verify',
- 'openssl_x509_check_private_key',
- 'openssl_x509_checkpurpose',
- 'openssl_x509_export_to_file',
- 'openssl_x509_export',
- 'openssl_x509_free',
- 'openssl_x509_parse',
- 'openssl_x509_read'),
- 'Output Control': ('flush',
- 'ob_clean',
- 'ob_end_clean',
- 'ob_end_flush',
- 'ob_flush',
- 'ob_get_clean',
- 'ob_get_contents',
- 'ob_get_flush',
- 'ob_get_length',
- 'ob_get_level',
- 'ob_get_status',
- 'ob_gzhandler',
- 'ob_implicit_flush',
- 'ob_list_handlers',
- 'ob_start',
- 'output_add_rewrite_var',
- 'output_reset_rewrite_vars'),
- 'Ovrimos SQL': ('ovrimos_close',
- 'ovrimos_commit',
- 'ovrimos_connect',
- 'ovrimos_cursor',
- 'ovrimos_exec',
- 'ovrimos_execute',
- 'ovrimos_fetch_into',
- 'ovrimos_fetch_row',
- 'ovrimos_field_len',
- 'ovrimos_field_name',
- 'ovrimos_field_num',
- 'ovrimos_field_type',
- 'ovrimos_free_result',
- 'ovrimos_longreadlen',
- 'ovrimos_num_fields',
- 'ovrimos_num_rows',
- 'ovrimos_prepare',
- 'ovrimos_result_all',
- 'ovrimos_result',
- 'ovrimos_rollback'),
- 'PCNTL': ('pcntl_alarm',
- 'pcntl_errno',
- 'pcntl_exec',
- 'pcntl_fork',
- 'pcntl_get_last_error',
- 'pcntl_getpriority',
- 'pcntl_setpriority',
- 'pcntl_signal_dispatch',
- 'pcntl_signal',
- 'pcntl_sigprocmask',
- 'pcntl_sigtimedwait',
- 'pcntl_sigwaitinfo',
- 'pcntl_strerror',
- 'pcntl_wait',
- 'pcntl_waitpid',
- 'pcntl_wexitstatus',
- 'pcntl_wifexited',
- 'pcntl_wifsignaled',
- 'pcntl_wifstopped',
- 'pcntl_wstopsig',
- 'pcntl_wtermsig'),
- 'PCRE': ('preg_filter',
- 'preg_grep',
- 'preg_last_error',
- 'preg_match_all',
- 'preg_match',
- 'preg_quote',
- 'preg_replace_callback',
- 'preg_replace',
- 'preg_split'),
- 'PDF': ('PDF_activate_item',
- 'PDF_add_annotation',
- 'PDF_add_bookmark',
- 'PDF_add_launchlink',
- 'PDF_add_locallink',
- 'PDF_add_nameddest',
- 'PDF_add_note',
- 'PDF_add_outline',
- 'PDF_add_pdflink',
- 'PDF_add_table_cell',
- 'PDF_add_textflow',
- 'PDF_add_thumbnail',
- 'PDF_add_weblink',
- 'PDF_arc',
- 'PDF_arcn',
- 'PDF_attach_file',
- 'PDF_begin_document',
- 'PDF_begin_font',
- 'PDF_begin_glyph',
- 'PDF_begin_item',
- 'PDF_begin_layer',
- 'PDF_begin_page_ext',
- 'PDF_begin_page',
- 'PDF_begin_pattern',
- 'PDF_begin_template_ext',
- 'PDF_begin_template',
- 'PDF_circle',
- 'PDF_clip',
- 'PDF_close_image',
- 'PDF_close_pdi_page',
- 'PDF_close_pdi',
- 'PDF_close',
- 'PDF_closepath_fill_stroke',
- 'PDF_closepath_stroke',
- 'PDF_closepath',
- 'PDF_concat',
- 'PDF_continue_text',
- 'PDF_create_3dview',
- 'PDF_create_action',
- 'PDF_create_annotation',
- 'PDF_create_bookmark',
- 'PDF_create_field',
- 'PDF_create_fieldgroup',
- 'PDF_create_gstate',
- 'PDF_create_pvf',
- 'PDF_create_textflow',
- 'PDF_curveto',
- 'PDF_define_layer',
- 'PDF_delete_pvf',
- 'PDF_delete_table',
- 'PDF_delete_textflow',
- 'PDF_delete',
- 'PDF_encoding_set_char',
- 'PDF_end_document',
- 'PDF_end_font',
- 'PDF_end_glyph',
- 'PDF_end_item',
- 'PDF_end_layer',
- 'PDF_end_page_ext',
- 'PDF_end_page',
- 'PDF_end_pattern',
- 'PDF_end_template',
- 'PDF_endpath',
- 'PDF_fill_imageblock',
- 'PDF_fill_pdfblock',
- 'PDF_fill_stroke',
- 'PDF_fill_textblock',
- 'PDF_fill',
- 'PDF_findfont',
- 'PDF_fit_image',
- 'PDF_fit_pdi_page',
- 'PDF_fit_table',
- 'PDF_fit_textflow',
- 'PDF_fit_textline',
- 'PDF_get_apiname',
- 'PDF_get_buffer',
- 'PDF_get_errmsg',
- 'PDF_get_errnum',
- 'PDF_get_font',
- 'PDF_get_fontname',
- 'PDF_get_fontsize',
- 'PDF_get_image_height',
- 'PDF_get_image_width',
- 'PDF_get_majorversion',
- 'PDF_get_minorversion',
- 'PDF_get_parameter',
- 'PDF_get_pdi_parameter',
- 'PDF_get_pdi_value',
- 'PDF_get_value',
- 'PDF_info_font',
- 'PDF_info_matchbox',
- 'PDF_info_table',
- 'PDF_info_textflow',
- 'PDF_info_textline',
- 'PDF_initgraphics',
- 'PDF_lineto',
- 'PDF_load_3ddata',
- 'PDF_load_font',
- 'PDF_load_iccprofile',
- 'PDF_load_image',
- 'PDF_makespotcolor',
- 'PDF_moveto',
- 'PDF_new',
- 'PDF_open_ccitt',
- 'PDF_open_file',
- 'PDF_open_gif',
- 'PDF_open_image_file',
- 'PDF_open_image',
- 'PDF_open_jpeg',
- 'PDF_open_memory_image',
- 'PDF_open_pdi_document',
- 'PDF_open_pdi_page',
- 'PDF_open_pdi',
- 'PDF_open_tiff',
- 'PDF_pcos_get_number',
- 'PDF_pcos_get_stream',
- 'PDF_pcos_get_string',
- 'PDF_place_image',
- 'PDF_place_pdi_page',
- 'PDF_process_pdi',
- 'PDF_rect',
- 'PDF_restore',
- 'PDF_resume_page',
- 'PDF_rotate',
- 'PDF_save',
- 'PDF_scale',
- 'PDF_set_border_color',
- 'PDF_set_border_dash',
- 'PDF_set_border_style',
- 'PDF_set_char_spacing',
- 'PDF_set_duration',
- 'PDF_set_gstate',
- 'PDF_set_horiz_scaling',
- 'PDF_set_info_author',
- 'PDF_set_info_creator',
- 'PDF_set_info_keywords',
- 'PDF_set_info_subject',
- 'PDF_set_info_title',
- 'PDF_set_info',
- 'PDF_set_layer_dependency',
- 'PDF_set_leading',
- 'PDF_set_parameter',
- 'PDF_set_text_matrix',
- 'PDF_set_text_pos',
- 'PDF_set_text_rendering',
- 'PDF_set_text_rise',
- 'PDF_set_value',
- 'PDF_set_word_spacing',
- 'PDF_setcolor',
- 'PDF_setdash',
- 'PDF_setdashpattern',
- 'PDF_setflat',
- 'PDF_setfont',
- 'PDF_setgray_fill',
- 'PDF_setgray_stroke',
- 'PDF_setgray',
- 'PDF_setlinecap',
- 'PDF_setlinejoin',
- 'PDF_setlinewidth',
- 'PDF_setmatrix',
- 'PDF_setmiterlimit',
- 'PDF_setpolydash',
- 'PDF_setrgbcolor_fill',
- 'PDF_setrgbcolor_stroke',
- 'PDF_setrgbcolor',
- 'PDF_shading_pattern',
- 'PDF_shading',
- 'PDF_shfill',
- 'PDF_show_boxed',
- 'PDF_show_xy',
- 'PDF_show',
- 'PDF_skew',
- 'PDF_stringwidth',
- 'PDF_stroke',
- 'PDF_suspend_page',
- 'PDF_translate',
- 'PDF_utf16_to_utf8',
- 'PDF_utf32_to_utf16',
- 'PDF_utf8_to_utf16'),
- 'PHP Options/Info': ('assert_options',
- 'assert',
- 'cli_get_process_title',
- 'cli_set_process_title',
- 'dl',
- 'extension_loaded',
- 'gc_collect_cycles',
- 'gc_disable',
- 'gc_enable',
- 'gc_enabled',
- 'get_cfg_var',
- 'get_current_user',
- 'get_defined_constants',
- 'get_extension_funcs',
- 'get_include_path',
- 'get_included_files',
- 'get_loaded_extensions',
- 'get_magic_quotes_gpc',
- 'get_magic_quotes_runtime',
- 'get_required_files',
- 'getenv',
- 'getlastmod',
- 'getmygid',
- 'getmyinode',
- 'getmypid',
- 'getmyuid',
- 'getopt',
- 'getrusage',
- 'ini_alter',
- 'ini_get_all',
- 'ini_get',
- 'ini_restore',
- 'ini_set',
- 'magic_quotes_runtime',
- 'memory_get_peak_usage',
- 'memory_get_usage',
- 'php_ini_loaded_file',
- 'php_ini_scanned_files',
- 'php_logo_guid',
- 'php_sapi_name',
- 'php_uname',
- 'phpcredits',
- 'phpinfo',
- 'phpversion',
- 'putenv',
- 'restore_include_path',
- 'set_include_path',
- 'set_magic_quotes_runtime',
- 'set_time_limit',
- 'sys_get_temp_dir',
- 'version_compare',
- 'zend_logo_guid',
- 'zend_thread_id',
- 'zend_version'),
- 'POSIX': ('posix_access',
- 'posix_ctermid',
- 'posix_errno',
- 'posix_get_last_error',
- 'posix_getcwd',
- 'posix_getegid',
- 'posix_geteuid',
- 'posix_getgid',
- 'posix_getgrgid',
- 'posix_getgrnam',
- 'posix_getgroups',
- 'posix_getlogin',
- 'posix_getpgid',
- 'posix_getpgrp',
- 'posix_getpid',
- 'posix_getppid',
- 'posix_getpwnam',
- 'posix_getpwuid',
- 'posix_getrlimit',
- 'posix_getsid',
- 'posix_getuid',
- 'posix_initgroups',
- 'posix_isatty',
- 'posix_kill',
- 'posix_mkfifo',
- 'posix_mknod',
- 'posix_setegid',
- 'posix_seteuid',
- 'posix_setgid',
- 'posix_setpgid',
- 'posix_setsid',
- 'posix_setuid',
- 'posix_strerror',
- 'posix_times',
- 'posix_ttyname',
- 'posix_uname'),
- 'POSIX Regex': ('ereg_replace',
- 'ereg',
- 'eregi_replace',
- 'eregi',
- 'split',
- 'spliti',
- 'sql_regcase'),
- 'PS': ('ps_add_bookmark',
- 'ps_add_launchlink',
- 'ps_add_locallink',
- 'ps_add_note',
- 'ps_add_pdflink',
- 'ps_add_weblink',
- 'ps_arc',
- 'ps_arcn',
- 'ps_begin_page',
- 'ps_begin_pattern',
- 'ps_begin_template',
- 'ps_circle',
- 'ps_clip',
- 'ps_close_image',
- 'ps_close',
- 'ps_closepath_stroke',
- 'ps_closepath',
- 'ps_continue_text',
- 'ps_curveto',
- 'ps_delete',
- 'ps_end_page',
- 'ps_end_pattern',
- 'ps_end_template',
- 'ps_fill_stroke',
- 'ps_fill',
- 'ps_findfont',
- 'ps_get_buffer',
- 'ps_get_parameter',
- 'ps_get_value',
- 'ps_hyphenate',
- 'ps_include_file',
- 'ps_lineto',
- 'ps_makespotcolor',
- 'ps_moveto',
- 'ps_new',
- 'ps_open_file',
- 'ps_open_image_file',
- 'ps_open_image',
- 'ps_open_memory_image',
- 'ps_place_image',
- 'ps_rect',
- 'ps_restore',
- 'ps_rotate',
- 'ps_save',
- 'ps_scale',
- 'ps_set_border_color',
- 'ps_set_border_dash',
- 'ps_set_border_style',
- 'ps_set_info',
- 'ps_set_parameter',
- 'ps_set_text_pos',
- 'ps_set_value',
- 'ps_setcolor',
- 'ps_setdash',
- 'ps_setflat',
- 'ps_setfont',
- 'ps_setgray',
- 'ps_setlinecap',
- 'ps_setlinejoin',
- 'ps_setlinewidth',
- 'ps_setmiterlimit',
- 'ps_setoverprintmode',
- 'ps_setpolydash',
- 'ps_shading_pattern',
- 'ps_shading',
- 'ps_shfill',
- 'ps_show_boxed',
- 'ps_show_xy2',
- 'ps_show_xy',
- 'ps_show2',
- 'ps_show',
- 'ps_string_geometry',
- 'ps_stringwidth',
- 'ps_stroke',
- 'ps_symbol_name',
- 'ps_symbol_width',
- 'ps_symbol',
- 'ps_translate'),
- 'Paradox': ('px_close',
- 'px_create_fp',
- 'px_date2string',
- 'px_delete_record',
- 'px_delete',
- 'px_get_field',
- 'px_get_info',
- 'px_get_parameter',
- 'px_get_record',
- 'px_get_schema',
- 'px_get_value',
- 'px_insert_record',
- 'px_new',
- 'px_numfields',
- 'px_numrecords',
- 'px_open_fp',
- 'px_put_record',
- 'px_retrieve_record',
- 'px_set_blob_file',
- 'px_set_parameter',
- 'px_set_tablename',
- 'px_set_targetencoding',
- 'px_set_value',
- 'px_timestamp2string',
- 'px_update_record'),
- 'Parsekit': ('parsekit_compile_file',
- 'parsekit_compile_string',
- 'parsekit_func_arginfo'),
- 'Password Hashing': ('password_get_info',
- 'password_hash',
- 'password_needs_rehash',
- 'password_verify'),
- 'PostgreSQL': ('pg_affected_rows',
- 'pg_cancel_query',
- 'pg_client_encoding',
- 'pg_close',
- 'pg_connect',
- 'pg_connection_busy',
- 'pg_connection_reset',
- 'pg_connection_status',
- 'pg_convert',
- 'pg_copy_from',
- 'pg_copy_to',
- 'pg_dbname',
- 'pg_delete',
- 'pg_end_copy',
- 'pg_escape_bytea',
- 'pg_escape_identifier',
- 'pg_escape_literal',
- 'pg_escape_string',
- 'pg_execute',
- 'pg_fetch_all_columns',
- 'pg_fetch_all',
- 'pg_fetch_array',
- 'pg_fetch_assoc',
- 'pg_fetch_object',
- 'pg_fetch_result',
- 'pg_fetch_row',
- 'pg_field_is_null',
- 'pg_field_name',
- 'pg_field_num',
- 'pg_field_prtlen',
- 'pg_field_size',
- 'pg_field_table',
- 'pg_field_type_oid',
- 'pg_field_type',
- 'pg_free_result',
- 'pg_get_notify',
- 'pg_get_pid',
- 'pg_get_result',
- 'pg_host',
- 'pg_insert',
- 'pg_last_error',
- 'pg_last_notice',
- 'pg_last_oid',
- 'pg_lo_close',
- 'pg_lo_create',
- 'pg_lo_export',
- 'pg_lo_import',
- 'pg_lo_open',
- 'pg_lo_read_all',
- 'pg_lo_read',
- 'pg_lo_seek',
- 'pg_lo_tell',
- 'pg_lo_truncate',
- 'pg_lo_unlink',
- 'pg_lo_write',
- 'pg_meta_data',
- 'pg_num_fields',
- 'pg_num_rows',
- 'pg_options',
- 'pg_parameter_status',
- 'pg_pconnect',
- 'pg_ping',
- 'pg_port',
- 'pg_prepare',
- 'pg_put_line',
- 'pg_query_params',
- 'pg_query',
- 'pg_result_error_field',
- 'pg_result_error',
- 'pg_result_seek',
- 'pg_result_status',
- 'pg_select',
- 'pg_send_execute',
- 'pg_send_prepare',
- 'pg_send_query_params',
- 'pg_send_query',
- 'pg_set_client_encoding',
- 'pg_set_error_verbosity',
- 'pg_trace',
- 'pg_transaction_status',
- 'pg_tty',
- 'pg_unescape_bytea',
- 'pg_untrace',
- 'pg_update',
- 'pg_version'),
- 'Printer': ('printer_abort',
- 'printer_close',
- 'printer_create_brush',
- 'printer_create_dc',
- 'printer_create_font',
- 'printer_create_pen',
- 'printer_delete_brush',
- 'printer_delete_dc',
- 'printer_delete_font',
- 'printer_delete_pen',
- 'printer_draw_bmp',
- 'printer_draw_chord',
- 'printer_draw_elipse',
- 'printer_draw_line',
- 'printer_draw_pie',
- 'printer_draw_rectangle',
- 'printer_draw_roundrect',
- 'printer_draw_text',
- 'printer_end_doc',
- 'printer_end_page',
- 'printer_get_option',
- 'printer_list',
- 'printer_logical_fontheight',
- 'printer_open',
- 'printer_select_brush',
- 'printer_select_font',
- 'printer_select_pen',
- 'printer_set_option',
- 'printer_start_doc',
- 'printer_start_page',
- 'printer_write'),
- 'Proctitle': ('setproctitle', 'setthreadtitle'),
- 'Program execution': ('escapeshellarg',
- 'escapeshellcmd',
- 'exec',
- 'passthru',
- 'proc_close',
- 'proc_get_status',
- 'proc_nice',
- 'proc_open',
- 'proc_terminate',
- 'shell_exec',
- 'system'),
- 'Pspell': ('pspell_add_to_personal',
- 'pspell_add_to_session',
- 'pspell_check',
- 'pspell_clear_session',
- 'pspell_config_create',
- 'pspell_config_data_dir',
- 'pspell_config_dict_dir',
- 'pspell_config_ignore',
- 'pspell_config_mode',
- 'pspell_config_personal',
- 'pspell_config_repl',
- 'pspell_config_runtogether',
- 'pspell_config_save_repl',
- 'pspell_new_config',
- 'pspell_new_personal',
- 'pspell_new',
- 'pspell_save_wordlist',
- 'pspell_store_replacement',
- 'pspell_suggest'),
- 'RPM Reader': ('rpm_close',
- 'rpm_get_tag',
- 'rpm_is_valid',
- 'rpm_open',
- 'rpm_version'),
- 'RRD': ('rrd_create',
- 'rrd_error',
- 'rrd_fetch',
- 'rrd_first',
- 'rrd_graph',
- 'rrd_info',
- 'rrd_last',
- 'rrd_lastupdate',
- 'rrd_restore',
- 'rrd_tune',
- 'rrd_update',
- 'rrd_version',
- 'rrd_xport',
- 'rrdc_disconnect'),
- 'Radius': ('radius_acct_open',
- 'radius_add_server',
- 'radius_auth_open',
- 'radius_close',
- 'radius_config',
- 'radius_create_request',
- 'radius_cvt_addr',
- 'radius_cvt_int',
- 'radius_cvt_string',
- 'radius_demangle_mppe_key',
- 'radius_demangle',
- 'radius_get_attr',
- 'radius_get_tagged_attr_data',
- 'radius_get_tagged_attr_tag',
- 'radius_get_vendor_attr',
- 'radius_put_addr',
- 'radius_put_attr',
- 'radius_put_int',
- 'radius_put_string',
- 'radius_put_vendor_addr',
- 'radius_put_vendor_attr',
- 'radius_put_vendor_int',
- 'radius_put_vendor_string',
- 'radius_request_authenticator',
- 'radius_salt_encrypt_attr',
- 'radius_send_request',
- 'radius_server_secret',
- 'radius_strerror'),
- 'Rar': ('rar_wrapper_cache_stats',),
- 'Readline': ('readline_add_history',
- 'readline_callback_handler_install',
- 'readline_callback_handler_remove',
- 'readline_callback_read_char',
- 'readline_clear_history',
- 'readline_completion_function',
- 'readline_info',
- 'readline_list_history',
- 'readline_on_new_line',
- 'readline_read_history',
- 'readline_redisplay',
- 'readline_write_history',
- 'readline'),
- 'Recode': ('recode_file', 'recode_string', 'recode'),
- 'SNMP': ('snmp_get_quick_print',
- 'snmp_get_valueretrieval',
- 'snmp_read_mib',
- 'snmp_set_enum_print',
- 'snmp_set_oid_numeric_print',
- 'snmp_set_oid_output_format',
- 'snmp_set_quick_print',
- 'snmp_set_valueretrieval',
- 'snmp2_get',
- 'snmp2_getnext',
- 'snmp2_real_walk',
- 'snmp2_set',
- 'snmp2_walk',
- 'snmp3_get',
- 'snmp3_getnext',
- 'snmp3_real_walk',
- 'snmp3_set',
- 'snmp3_walk',
- 'snmpget',
- 'snmpgetnext',
- 'snmprealwalk',
- 'snmpset',
- 'snmpwalk',
- 'snmpwalkoid'),
- 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
- 'SPL': ('class_implements',
- 'class_parents',
- 'class_uses',
- 'iterator_apply',
- 'iterator_count',
- 'iterator_to_array',
- 'spl_autoload_call',
- 'spl_autoload_extensions',
- 'spl_autoload_functions',
- 'spl_autoload_register',
- 'spl_autoload_unregister',
- 'spl_autoload',
- 'spl_classes',
- 'spl_object_hash'),
- 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
- 'SQLSRV': ('sqlsrv_begin_transaction',
- 'sqlsrv_cancel',
- 'sqlsrv_client_info',
- 'sqlsrv_close',
- 'sqlsrv_commit',
- 'sqlsrv_configure',
- 'sqlsrv_connect',
- 'sqlsrv_errors',
- 'sqlsrv_execute',
- 'sqlsrv_fetch_array',
- 'sqlsrv_fetch_object',
- 'sqlsrv_fetch',
- 'sqlsrv_field_metadata',
- 'sqlsrv_free_stmt',
- 'sqlsrv_get_config',
- 'sqlsrv_get_field',
- 'sqlsrv_has_rows',
- 'sqlsrv_next_result',
- 'sqlsrv_num_fields',
- 'sqlsrv_num_rows',
- 'sqlsrv_prepare',
- 'sqlsrv_query',
- 'sqlsrv_rollback',
- 'sqlsrv_rows_affected',
- 'sqlsrv_send_stream_data',
- 'sqlsrv_server_info'),
- 'SQLite': ('sqlite_array_query',
- 'sqlite_busy_timeout',
- 'sqlite_changes',
- 'sqlite_close',
- 'sqlite_column',
- 'sqlite_create_aggregate',
- 'sqlite_create_function',
- 'sqlite_current',
- 'sqlite_error_string',
- 'sqlite_escape_string',
- 'sqlite_exec',
- 'sqlite_factory',
- 'sqlite_fetch_all',
- 'sqlite_fetch_array',
- 'sqlite_fetch_column_types',
- 'sqlite_fetch_object',
- 'sqlite_fetch_single',
- 'sqlite_fetch_string',
- 'sqlite_field_name',
- 'sqlite_has_more',
- 'sqlite_has_prev',
- 'sqlite_key',
- 'sqlite_last_error',
- 'sqlite_last_insert_rowid',
- 'sqlite_libencoding',
- 'sqlite_libversion',
- 'sqlite_next',
- 'sqlite_num_fields',
- 'sqlite_num_rows',
- 'sqlite_open',
- 'sqlite_popen',
- 'sqlite_prev',
- 'sqlite_query',
- 'sqlite_rewind',
- 'sqlite_seek',
- 'sqlite_single_query',
- 'sqlite_udf_decode_binary',
- 'sqlite_udf_encode_binary',
- 'sqlite_unbuffered_query',
- 'sqlite_valid'),
- 'SSH2': ('ssh2_auth_agent',
- 'ssh2_auth_hostbased_file',
- 'ssh2_auth_none',
- 'ssh2_auth_password',
- 'ssh2_auth_pubkey_file',
- 'ssh2_connect',
- 'ssh2_exec',
- 'ssh2_fetch_stream',
- 'ssh2_fingerprint',
- 'ssh2_methods_negotiated',
- 'ssh2_publickey_add',
- 'ssh2_publickey_init',
- 'ssh2_publickey_list',
- 'ssh2_publickey_remove',
- 'ssh2_scp_recv',
- 'ssh2_scp_send',
- 'ssh2_sftp_chmod',
- 'ssh2_sftp_lstat',
- 'ssh2_sftp_mkdir',
- 'ssh2_sftp_readlink',
- 'ssh2_sftp_realpath',
- 'ssh2_sftp_rename',
- 'ssh2_sftp_rmdir',
- 'ssh2_sftp_stat',
- 'ssh2_sftp_symlink',
- 'ssh2_sftp_unlink',
- 'ssh2_sftp',
- 'ssh2_shell',
- 'ssh2_tunnel'),
- 'SVN': ('svn_add',
- 'svn_auth_get_parameter',
- 'svn_auth_set_parameter',
- 'svn_blame',
- 'svn_cat',
- 'svn_checkout',
- 'svn_cleanup',
- 'svn_client_version',
- 'svn_commit',
- 'svn_delete',
- 'svn_diff',
- 'svn_export',
- 'svn_fs_abort_txn',
- 'svn_fs_apply_text',
- 'svn_fs_begin_txn2',
- 'svn_fs_change_node_prop',
- 'svn_fs_check_path',
- 'svn_fs_contents_changed',
- 'svn_fs_copy',
- 'svn_fs_delete',
- 'svn_fs_dir_entries',
- 'svn_fs_file_contents',
- 'svn_fs_file_length',
- 'svn_fs_is_dir',
- 'svn_fs_is_file',
- 'svn_fs_make_dir',
- 'svn_fs_make_file',
- 'svn_fs_node_created_rev',
- 'svn_fs_node_prop',
- 'svn_fs_props_changed',
- 'svn_fs_revision_prop',
- 'svn_fs_revision_root',
- 'svn_fs_txn_root',
- 'svn_fs_youngest_rev',
- 'svn_import',
- 'svn_log',
- 'svn_ls',
- 'svn_mkdir',
- 'svn_repos_create',
- 'svn_repos_fs_begin_txn_for_commit',
- 'svn_repos_fs_commit_txn',
- 'svn_repos_fs',
- 'svn_repos_hotcopy',
- 'svn_repos_open',
- 'svn_repos_recover',
- 'svn_revert',
- 'svn_status',
- 'svn_update'),
- 'SWF': ('swf_actiongeturl',
- 'swf_actiongotoframe',
- 'swf_actiongotolabel',
- 'swf_actionnextframe',
- 'swf_actionplay',
- 'swf_actionprevframe',
- 'swf_actionsettarget',
- 'swf_actionstop',
- 'swf_actiontogglequality',
- 'swf_actionwaitforframe',
- 'swf_addbuttonrecord',
- 'swf_addcolor',
- 'swf_closefile',
- 'swf_definebitmap',
- 'swf_definefont',
- 'swf_defineline',
- 'swf_definepoly',
- 'swf_definerect',
- 'swf_definetext',
- 'swf_endbutton',
- 'swf_enddoaction',
- 'swf_endshape',
- 'swf_endsymbol',
- 'swf_fontsize',
- 'swf_fontslant',
- 'swf_fonttracking',
- 'swf_getbitmapinfo',
- 'swf_getfontinfo',
- 'swf_getframe',
- 'swf_labelframe',
- 'swf_lookat',
- 'swf_modifyobject',
- 'swf_mulcolor',
- 'swf_nextid',
- 'swf_oncondition',
- 'swf_openfile',
- 'swf_ortho2',
- 'swf_ortho',
- 'swf_perspective',
- 'swf_placeobject',
- 'swf_polarview',
- 'swf_popmatrix',
- 'swf_posround',
- 'swf_pushmatrix',
- 'swf_removeobject',
- 'swf_rotate',
- 'swf_scale',
- 'swf_setfont',
- 'swf_setframe',
- 'swf_shapearc',
- 'swf_shapecurveto3',
- 'swf_shapecurveto',
- 'swf_shapefillbitmapclip',
- 'swf_shapefillbitmaptile',
- 'swf_shapefilloff',
- 'swf_shapefillsolid',
- 'swf_shapelinesolid',
- 'swf_shapelineto',
- 'swf_shapemoveto',
- 'swf_showframe',
- 'swf_startbutton',
- 'swf_startdoaction',
- 'swf_startshape',
- 'swf_startsymbol',
- 'swf_textwidth',
- 'swf_translate',
- 'swf_viewport'),
- 'Semaphore': ('ftok',
- 'msg_get_queue',
- 'msg_queue_exists',
- 'msg_receive',
- 'msg_remove_queue',
- 'msg_send',
- 'msg_set_queue',
- 'msg_stat_queue',
- 'sem_acquire',
- 'sem_get',
- 'sem_release',
- 'sem_remove',
- 'shm_attach',
- 'shm_detach',
- 'shm_get_var',
- 'shm_has_var',
- 'shm_put_var',
- 'shm_remove_var',
- 'shm_remove'),
- 'Session': ('session_cache_expire',
- 'session_cache_limiter',
- 'session_commit',
- 'session_decode',
- 'session_destroy',
- 'session_encode',
- 'session_get_cookie_params',
- 'session_id',
- 'session_is_registered',
- 'session_module_name',
- 'session_name',
- 'session_regenerate_id',
- 'session_register_shutdown',
- 'session_register',
- 'session_save_path',
- 'session_set_cookie_params',
- 'session_set_save_handler',
- 'session_start',
- 'session_status',
- 'session_unregister',
- 'session_unset',
- 'session_write_close'),
- 'Session PgSQL': ('session_pgsql_add_error',
- 'session_pgsql_get_error',
- 'session_pgsql_get_field',
- 'session_pgsql_reset',
- 'session_pgsql_set_field',
- 'session_pgsql_status'),
- 'Shared Memory': ('shmop_close',
- 'shmop_delete',
- 'shmop_open',
- 'shmop_read',
- 'shmop_size',
- 'shmop_write'),
- 'SimpleXML': ('simplexml_import_dom',
- 'simplexml_load_file',
- 'simplexml_load_string'),
- 'Socket': ('socket_accept',
- 'socket_bind',
- 'socket_clear_error',
- 'socket_close',
- 'socket_cmsg_space',
- 'socket_connect',
- 'socket_create_listen',
- 'socket_create_pair',
- 'socket_create',
- 'socket_get_option',
- 'socket_getpeername',
- 'socket_getsockname',
- 'socket_import_stream',
- 'socket_last_error',
- 'socket_listen',
- 'socket_read',
- 'socket_recv',
- 'socket_recvfrom',
- 'socket_recvmsg',
- 'socket_select',
- 'socket_send',
- 'socket_sendmsg',
- 'socket_sendto',
- 'socket_set_block',
- 'socket_set_nonblock',
- 'socket_set_option',
- 'socket_shutdown',
- 'socket_strerror',
- 'socket_write'),
- 'Solr': ('solr_get_version',),
- 'Statistic': ('stats_absolute_deviation',
- 'stats_cdf_beta',
- 'stats_cdf_binomial',
- 'stats_cdf_cauchy',
- 'stats_cdf_chisquare',
- 'stats_cdf_exponential',
- 'stats_cdf_f',
- 'stats_cdf_gamma',
- 'stats_cdf_laplace',
- 'stats_cdf_logistic',
- 'stats_cdf_negative_binomial',
- 'stats_cdf_noncentral_chisquare',
- 'stats_cdf_noncentral_f',
- 'stats_cdf_poisson',
- 'stats_cdf_t',
- 'stats_cdf_uniform',
- 'stats_cdf_weibull',
- 'stats_covariance',
- 'stats_den_uniform',
- 'stats_dens_beta',
- 'stats_dens_cauchy',
- 'stats_dens_chisquare',
- 'stats_dens_exponential',
- 'stats_dens_f',
- 'stats_dens_gamma',
- 'stats_dens_laplace',
- 'stats_dens_logistic',
- 'stats_dens_negative_binomial',
- 'stats_dens_normal',
- 'stats_dens_pmf_binomial',
- 'stats_dens_pmf_hypergeometric',
- 'stats_dens_pmf_poisson',
- 'stats_dens_t',
- 'stats_dens_weibull',
- 'stats_harmonic_mean',
- 'stats_kurtosis',
- 'stats_rand_gen_beta',
- 'stats_rand_gen_chisquare',
- 'stats_rand_gen_exponential',
- 'stats_rand_gen_f',
- 'stats_rand_gen_funiform',
- 'stats_rand_gen_gamma',
- 'stats_rand_gen_ibinomial_negative',
- 'stats_rand_gen_ibinomial',
- 'stats_rand_gen_int',
- 'stats_rand_gen_ipoisson',
- 'stats_rand_gen_iuniform',
- 'stats_rand_gen_noncenral_chisquare',
- 'stats_rand_gen_noncentral_f',
- 'stats_rand_gen_noncentral_t',
- 'stats_rand_gen_normal',
- 'stats_rand_gen_t',
- 'stats_rand_get_seeds',
- 'stats_rand_phrase_to_seeds',
- 'stats_rand_ranf',
- 'stats_rand_setall',
- 'stats_skew',
- 'stats_standard_deviation',
- 'stats_stat_binomial_coef',
- 'stats_stat_correlation',
- 'stats_stat_gennch',
- 'stats_stat_independent_t',
- 'stats_stat_innerproduct',
- 'stats_stat_noncentral_t',
- 'stats_stat_paired_t',
- 'stats_stat_percentile',
- 'stats_stat_powersum',
- 'stats_variance'),
- 'Stomp': ('stomp_connect_error', 'stomp_version'),
- 'Stream': ('set_socket_blocking',
- 'stream_bucket_append',
- 'stream_bucket_make_writeable',
- 'stream_bucket_new',
- 'stream_bucket_prepend',
- 'stream_context_create',
- 'stream_context_get_default',
- 'stream_context_get_options',
- 'stream_context_get_params',
- 'stream_context_set_default',
- 'stream_context_set_option',
- 'stream_context_set_params',
- 'stream_copy_to_stream',
- 'stream_encoding',
- 'stream_filter_append',
- 'stream_filter_prepend',
- 'stream_filter_register',
- 'stream_filter_remove',
- 'stream_get_contents',
- 'stream_get_filters',
- 'stream_get_line',
- 'stream_get_meta_data',
- 'stream_get_transports',
- 'stream_get_wrappers',
- 'stream_is_local',
- 'stream_notification_callback',
- 'stream_register_wrapper',
- 'stream_resolve_include_path',
- 'stream_select',
- 'stream_set_blocking',
- 'stream_set_chunk_size',
- 'stream_set_read_buffer',
- 'stream_set_timeout',
- 'stream_set_write_buffer',
- 'stream_socket_accept',
- 'stream_socket_client',
- 'stream_socket_enable_crypto',
- 'stream_socket_get_name',
- 'stream_socket_pair',
- 'stream_socket_recvfrom',
- 'stream_socket_sendto',
- 'stream_socket_server',
- 'stream_socket_shutdown',
- 'stream_supports_lock',
- 'stream_wrapper_register',
- 'stream_wrapper_restore',
- 'stream_wrapper_unregister'),
- 'String': ('addcslashes',
- 'addslashes',
- 'bin2hex',
- 'chop',
- 'chr',
- 'chunk_split',
- 'convert_cyr_string',
- 'convert_uudecode',
- 'convert_uuencode',
- 'count_chars',
- 'crc32',
- 'crypt',
- 'echo',
- 'explode',
- 'fprintf',
- 'get_html_translation_table',
- 'hebrev',
- 'hebrevc',
- 'hex2bin',
- 'html_entity_decode',
- 'htmlentities',
- 'htmlspecialchars_decode',
- 'htmlspecialchars',
- 'implode',
- 'join',
- 'lcfirst',
- 'levenshtein',
- 'localeconv',
- 'ltrim',
- 'md5_file',
- 'md5',
- 'metaphone',
- 'money_format',
- 'nl_langinfo',
- 'nl2br',
- 'number_format',
- 'ord',
- 'parse_str',
- 'print',
- 'printf',
- 'quoted_printable_decode',
- 'quoted_printable_encode',
- 'quotemeta',
- 'rtrim',
- 'setlocale',
- 'sha1_file',
- 'sha1',
- 'similar_text',
- 'soundex',
- 'sprintf',
- 'sscanf',
- 'str_getcsv',
- 'str_ireplace',
- 'str_pad',
- 'str_repeat',
- 'str_replace',
- 'str_rot13',
- 'str_shuffle',
- 'str_split',
- 'str_word_count',
- 'strcasecmp',
- 'strchr',
- 'strcmp',
- 'strcoll',
- 'strcspn',
- 'strip_tags',
- 'stripcslashes',
- 'stripos',
- 'stripslashes',
- 'stristr',
- 'strlen',
- 'strnatcasecmp',
- 'strnatcmp',
- 'strncasecmp',
- 'strncmp',
- 'strpbrk',
- 'strpos',
- 'strrchr',
- 'strrev',
- 'strripos',
- 'strrpos',
- 'strspn',
- 'strstr',
- 'strtok',
- 'strtolower',
- 'strtoupper',
- 'strtr',
- 'substr_compare',
- 'substr_count',
- 'substr_replace',
- 'substr',
- 'trim',
- 'ucfirst',
- 'ucwords',
- 'vfprintf',
- 'vprintf',
- 'vsprintf',
- 'wordwrap'),
- 'Sybase': ('sybase_affected_rows',
- 'sybase_close',
- 'sybase_connect',
- 'sybase_data_seek',
- 'sybase_deadlock_retry_count',
- 'sybase_fetch_array',
- 'sybase_fetch_assoc',
- 'sybase_fetch_field',
- 'sybase_fetch_object',
- 'sybase_fetch_row',
- 'sybase_field_seek',
- 'sybase_free_result',
- 'sybase_get_last_message',
- 'sybase_min_client_severity',
- 'sybase_min_error_severity',
- 'sybase_min_message_severity',
- 'sybase_min_server_severity',
- 'sybase_num_fields',
- 'sybase_num_rows',
- 'sybase_pconnect',
- 'sybase_query',
- 'sybase_result',
- 'sybase_select_db',
- 'sybase_set_message_handler',
- 'sybase_unbuffered_query'),
- 'TCP': ('tcpwrap_check',),
- 'Taint': ('is_tainted', 'taint', 'untaint'),
- 'Tidy': ('ob_tidyhandler',
- 'tidy_access_count',
- 'tidy_config_count',
- 'tidy_error_count',
- 'tidy_get_output',
- 'tidy_load_config',
- 'tidy_reset_config',
- 'tidy_save_config',
- 'tidy_set_encoding',
- 'tidy_setopt',
- 'tidy_warning_count'),
- 'Tokenizer': ('token_get_all', 'token_name'),
- 'Trader': ('trader_acos',
- 'trader_ad',
- 'trader_add',
- 'trader_adosc',
- 'trader_adx',
- 'trader_adxr',
- 'trader_apo',
- 'trader_aroon',
- 'trader_aroonosc',
- 'trader_asin',
- 'trader_atan',
- 'trader_atr',
- 'trader_avgprice',
- 'trader_bbands',
- 'trader_beta',
- 'trader_bop',
- 'trader_cci',
- 'trader_cdl2crows',
- 'trader_cdl3blackcrows',
- 'trader_cdl3inside',
- 'trader_cdl3linestrike',
- 'trader_cdl3outside',
- 'trader_cdl3starsinsouth',
- 'trader_cdl3whitesoldiers',
- 'trader_cdlabandonedbaby',
- 'trader_cdladvanceblock',
- 'trader_cdlbelthold',
- 'trader_cdlbreakaway',
- 'trader_cdlclosingmarubozu',
- 'trader_cdlconcealbabyswall',
- 'trader_cdlcounterattack',
- 'trader_cdldarkcloudcover',
- 'trader_cdldoji',
- 'trader_cdldojistar',
- 'trader_cdldragonflydoji',
- 'trader_cdlengulfing',
- 'trader_cdleveningdojistar',
- 'trader_cdleveningstar',
- 'trader_cdlgapsidesidewhite',
- 'trader_cdlgravestonedoji',
- 'trader_cdlhammer',
- 'trader_cdlhangingman',
- 'trader_cdlharami',
- 'trader_cdlharamicross',
- 'trader_cdlhighwave',
- 'trader_cdlhikkake',
- 'trader_cdlhikkakemod',
- 'trader_cdlhomingpigeon',
- 'trader_cdlidentical3crows',
- 'trader_cdlinneck',
- 'trader_cdlinvertedhammer',
- 'trader_cdlkicking',
- 'trader_cdlkickingbylength',
- 'trader_cdlladderbottom',
- 'trader_cdllongleggeddoji',
- 'trader_cdllongline',
- 'trader_cdlmarubozu',
- 'trader_cdlmatchinglow',
- 'trader_cdlmathold',
- 'trader_cdlmorningdojistar',
- 'trader_cdlmorningstar',
- 'trader_cdlonneck',
- 'trader_cdlpiercing',
- 'trader_cdlrickshawman',
- 'trader_cdlrisefall3methods',
- 'trader_cdlseparatinglines',
- 'trader_cdlshootingstar',
- 'trader_cdlshortline',
- 'trader_cdlspinningtop',
- 'trader_cdlstalledpattern',
- 'trader_cdlsticksandwich',
- 'trader_cdltakuri',
- 'trader_cdltasukigap',
- 'trader_cdlthrusting',
- 'trader_cdltristar',
- 'trader_cdlunique3river',
- 'trader_cdlupsidegap2crows',
- 'trader_cdlxsidegap3methods',
- 'trader_ceil',
- 'trader_cmo',
- 'trader_correl',
- 'trader_cos',
- 'trader_cosh',
- 'trader_dema',
- 'trader_div',
- 'trader_dx',
- 'trader_ema',
- 'trader_errno',
- 'trader_exp',
- 'trader_floor',
- 'trader_get_compat',
- 'trader_get_unstable_period',
- 'trader_ht_dcperiod',
- 'trader_ht_dcphase',
- 'trader_ht_phasor',
- 'trader_ht_sine',
- 'trader_ht_trendline',
- 'trader_ht_trendmode',
- 'trader_kama',
- 'trader_linearreg_angle',
- 'trader_linearreg_intercept',
- 'trader_linearreg_slope',
- 'trader_linearreg',
- 'trader_ln',
- 'trader_log10',
- 'trader_ma',
- 'trader_macd',
- 'trader_macdext',
- 'trader_macdfix',
- 'trader_mama',
- 'trader_mavp',
- 'trader_max',
- 'trader_maxindex',
- 'trader_medprice',
- 'trader_mfi',
- 'trader_midpoint',
- 'trader_midprice',
- 'trader_min',
- 'trader_minindex',
- 'trader_minmax',
- 'trader_minmaxindex',
- 'trader_minus_di',
- 'trader_minus_dm',
- 'trader_mom',
- 'trader_mult',
- 'trader_natr',
- 'trader_obv',
- 'trader_plus_di',
- 'trader_plus_dm',
- 'trader_ppo',
- 'trader_roc',
- 'trader_rocp',
- 'trader_rocr100',
- 'trader_rocr',
- 'trader_rsi',
- 'trader_sar',
- 'trader_sarext',
- 'trader_set_compat',
- 'trader_set_unstable_period',
- 'trader_sin',
- 'trader_sinh',
- 'trader_sma',
- 'trader_sqrt',
- 'trader_stddev',
- 'trader_stoch',
- 'trader_stochf',
- 'trader_stochrsi',
- 'trader_sub',
- 'trader_sum',
- 'trader_t3',
- 'trader_tan',
- 'trader_tanh',
- 'trader_tema',
- 'trader_trange',
- 'trader_trima',
- 'trader_trix',
- 'trader_tsf',
- 'trader_typprice',
- 'trader_ultosc',
- 'trader_var',
- 'trader_wclprice',
- 'trader_willr',
- 'trader_wma'),
- 'URL': ('base64_decode',
- 'base64_encode',
- 'get_headers',
- 'get_meta_tags',
- 'http_build_query',
- 'parse_url',
- 'rawurldecode',
- 'rawurlencode',
- 'urldecode',
- 'urlencode'),
- 'Uopz': ('uopz_backup',
- 'uopz_compose',
- 'uopz_copy',
- 'uopz_delete',
- 'uopz_extend',
- 'uopz_flags',
- 'uopz_function',
- 'uopz_implement',
- 'uopz_overload',
- 'uopz_redefine',
- 'uopz_rename',
- 'uopz_restore',
- 'uopz_undefine'),
- 'Variable handling': ('boolval',
- 'debug_zval_dump',
- 'doubleval',
- 'empty',
- 'floatval',
- 'get_defined_vars',
- 'get_resource_type',
- 'gettype',
- 'import_request_variables',
- 'intval',
- 'is_array',
- 'is_bool',
- 'is_callable',
- 'is_double',
- 'is_float',
- 'is_int',
- 'is_integer',
- 'is_long',
- 'is_null',
- 'is_numeric',
- 'is_object',
- 'is_real',
- 'is_resource',
- 'is_scalar',
- 'is_string',
- 'isset',
- 'print_r',
- 'serialize',
- 'settype',
- 'strval',
- 'unserialize',
- 'unset',
- 'var_dump',
- 'var_export'),
- 'W32api': ('w32api_deftype',
- 'w32api_init_dtype',
- 'w32api_invoke_function',
- 'w32api_register_function',
- 'w32api_set_call_method'),
- 'WDDX': ('wddx_add_vars',
- 'wddx_deserialize',
- 'wddx_packet_end',
- 'wddx_packet_start',
- 'wddx_serialize_value',
- 'wddx_serialize_vars'),
- 'WinCache': ('wincache_fcache_fileinfo',
- 'wincache_fcache_meminfo',
- 'wincache_lock',
- 'wincache_ocache_fileinfo',
- 'wincache_ocache_meminfo',
- 'wincache_refresh_if_changed',
- 'wincache_rplist_fileinfo',
- 'wincache_rplist_meminfo',
- 'wincache_scache_info',
- 'wincache_scache_meminfo',
- 'wincache_ucache_add',
- 'wincache_ucache_cas',
- 'wincache_ucache_clear',
- 'wincache_ucache_dec',
- 'wincache_ucache_delete',
- 'wincache_ucache_exists',
- 'wincache_ucache_get',
- 'wincache_ucache_inc',
- 'wincache_ucache_info',
- 'wincache_ucache_meminfo',
- 'wincache_ucache_set',
- 'wincache_unlock'),
- 'XML Parser': ('utf8_decode',
- 'utf8_encode',
- 'xml_error_string',
- 'xml_get_current_byte_index',
- 'xml_get_current_column_number',
- 'xml_get_current_line_number',
- 'xml_get_error_code',
- 'xml_parse_into_struct',
- 'xml_parse',
- 'xml_parser_create_ns',
- 'xml_parser_create',
- 'xml_parser_free',
- 'xml_parser_get_option',
- 'xml_parser_set_option',
- 'xml_set_character_data_handler',
- 'xml_set_default_handler',
- 'xml_set_element_handler',
- 'xml_set_end_namespace_decl_handler',
- 'xml_set_external_entity_ref_handler',
- 'xml_set_notation_decl_handler',
- 'xml_set_object',
- 'xml_set_processing_instruction_handler',
- 'xml_set_start_namespace_decl_handler',
- 'xml_set_unparsed_entity_decl_handler'),
- 'XML-RPC': ('xmlrpc_decode_request',
- 'xmlrpc_decode',
- 'xmlrpc_encode_request',
- 'xmlrpc_encode',
- 'xmlrpc_get_type',
- 'xmlrpc_is_fault',
- 'xmlrpc_parse_method_descriptions',
- 'xmlrpc_server_add_introspection_data',
- 'xmlrpc_server_call_method',
- 'xmlrpc_server_create',
- 'xmlrpc_server_destroy',
- 'xmlrpc_server_register_introspection_callback',
- 'xmlrpc_server_register_method',
- 'xmlrpc_set_type'),
- 'XSLT (PHP 4)': ('xslt_backend_info',
- 'xslt_backend_name',
- 'xslt_backend_version',
- 'xslt_create',
- 'xslt_errno',
- 'xslt_error',
- 'xslt_free',
- 'xslt_getopt',
- 'xslt_process',
- 'xslt_set_base',
- 'xslt_set_encoding',
- 'xslt_set_error_handler',
- 'xslt_set_log',
- 'xslt_set_object',
- 'xslt_set_sax_handler',
- 'xslt_set_sax_handlers',
- 'xslt_set_scheme_handler',
- 'xslt_set_scheme_handlers',
- 'xslt_setopt'),
- 'Xhprof': ('xhprof_disable',
- 'xhprof_enable',
- 'xhprof_sample_disable',
- 'xhprof_sample_enable'),
- 'YAZ': ('yaz_addinfo',
- 'yaz_ccl_conf',
- 'yaz_ccl_parse',
- 'yaz_close',
- 'yaz_connect',
- 'yaz_database',
- 'yaz_element',
- 'yaz_errno',
- 'yaz_error',
- 'yaz_es_result',
- 'yaz_es',
- 'yaz_get_option',
- 'yaz_hits',
- 'yaz_itemorder',
- 'yaz_present',
- 'yaz_range',
- 'yaz_record',
- 'yaz_scan_result',
- 'yaz_scan',
- 'yaz_schema',
- 'yaz_search',
- 'yaz_set_option',
- 'yaz_sort',
- 'yaz_syntax',
- 'yaz_wait'),
- 'YP/NIS': ('yp_all',
- 'yp_cat',
- 'yp_err_string',
- 'yp_errno',
- 'yp_first',
- 'yp_get_default_domain',
- 'yp_master',
- 'yp_match',
- 'yp_next',
- 'yp_order'),
- 'Yaml': ('yaml_emit_file',
- 'yaml_emit',
- 'yaml_parse_file',
- 'yaml_parse_url',
- 'yaml_parse'),
- 'Zip': ('zip_close',
- 'zip_entry_close',
- 'zip_entry_compressedsize',
- 'zip_entry_compressionmethod',
- 'zip_entry_filesize',
- 'zip_entry_name',
- 'zip_entry_open',
- 'zip_entry_read',
- 'zip_open',
- 'zip_read'),
- 'Zlib': ('gzclose',
- 'gzcompress',
- 'gzdecode',
- 'gzdeflate',
- 'gzencode',
- 'gzeof',
- 'gzfile',
- 'gzgetc',
- 'gzgets',
- 'gzgetss',
- 'gzinflate',
- 'gzopen',
- 'gzpassthru',
- 'gzputs',
- 'gzread',
- 'gzrewind',
- 'gzseek',
- 'gztell',
- 'gzuncompress',
- 'gzwrite',
- 'readgzfile',
- 'zlib_decode',
- 'zlib_encode',
- 'zlib_get_coding_type'),
- 'bcompiler': ('bcompiler_load_exe',
- 'bcompiler_load',
- 'bcompiler_parse_class',
- 'bcompiler_read',
- 'bcompiler_write_class',
- 'bcompiler_write_constant',
- 'bcompiler_write_exe_footer',
- 'bcompiler_write_file',
- 'bcompiler_write_footer',
- 'bcompiler_write_function',
- 'bcompiler_write_functions_from_file',
- 'bcompiler_write_header',
- 'bcompiler_write_included_filename'),
- 'cURL': ('curl_close',
- 'curl_copy_handle',
- 'curl_errno',
- 'curl_error',
- 'curl_escape',
- 'curl_exec',
- 'curl_file_create',
- 'curl_getinfo',
- 'curl_init',
- 'curl_multi_add_handle',
- 'curl_multi_close',
- 'curl_multi_exec',
- 'curl_multi_getcontent',
- 'curl_multi_info_read',
- 'curl_multi_init',
- 'curl_multi_remove_handle',
- 'curl_multi_select',
- 'curl_multi_setopt',
- 'curl_multi_strerror',
- 'curl_pause',
- 'curl_reset',
- 'curl_setopt_array',
- 'curl_setopt',
- 'curl_share_close',
- 'curl_share_init',
- 'curl_share_setopt',
- 'curl_strerror',
- 'curl_unescape',
- 'curl_version'),
- 'chdb': ('chdb_create',),
- 'dBase': ('dbase_add_record',
- 'dbase_close',
- 'dbase_create',
- 'dbase_delete_record',
- 'dbase_get_header_info',
- 'dbase_get_record_with_names',
- 'dbase_get_record',
- 'dbase_numfields',
- 'dbase_numrecords',
- 'dbase_open',
- 'dbase_pack',
- 'dbase_replace_record'),
- 'dbx': ('dbx_close',
- 'dbx_compare',
- 'dbx_connect',
- 'dbx_error',
- 'dbx_escape_string',
- 'dbx_fetch_row',
- 'dbx_query',
- 'dbx_sort'),
- 'filePro': ('filepro_fieldcount',
- 'filepro_fieldname',
- 'filepro_fieldtype',
- 'filepro_fieldwidth',
- 'filepro_retrieve',
- 'filepro_rowcount',
- 'filepro'),
- 'iconv': ('iconv_get_encoding',
- 'iconv_mime_decode_headers',
- 'iconv_mime_decode',
- 'iconv_mime_encode',
- 'iconv_set_encoding',
- 'iconv_strlen',
- 'iconv_strpos',
- 'iconv_strrpos',
- 'iconv_substr',
- 'iconv',
- 'ob_iconv_handler'),
- 'inclued': ('inclued_get_data',),
- 'intl': ('intl_error_name',
- 'intl_get_error_code',
- 'intl_get_error_message',
- 'intl_is_failure'),
- 'libxml': ('libxml_clear_errors',
- 'libxml_disable_entity_loader',
- 'libxml_get_errors',
- 'libxml_get_last_error',
- 'libxml_set_external_entity_loader',
- 'libxml_set_streams_context',
- 'libxml_use_internal_errors'),
- 'mSQL': ('msql_affected_rows',
- 'msql_close',
- 'msql_connect',
- 'msql_create_db',
- 'msql_createdb',
- 'msql_data_seek',
- 'msql_db_query',
- 'msql_dbname',
- 'msql_drop_db',
- 'msql_error',
- 'msql_fetch_array',
- 'msql_fetch_field',
- 'msql_fetch_object',
- 'msql_fetch_row',
- 'msql_field_flags',
- 'msql_field_len',
- 'msql_field_name',
- 'msql_field_seek',
- 'msql_field_table',
- 'msql_field_type',
- 'msql_fieldflags',
- 'msql_fieldlen',
- 'msql_fieldname',
- 'msql_fieldtable',
- 'msql_fieldtype',
- 'msql_free_result',
- 'msql_list_dbs',
- 'msql_list_fields',
- 'msql_list_tables',
- 'msql_num_fields',
- 'msql_num_rows',
- 'msql_numfields',
- 'msql_numrows',
- 'msql_pconnect',
- 'msql_query',
- 'msql_regcase',
- 'msql_result',
- 'msql_select_db',
- 'msql_tablename',
- 'msql'),
- 'mnoGoSearch': ('udm_add_search_limit',
- 'udm_alloc_agent_array',
- 'udm_alloc_agent',
- 'udm_api_version',
- 'udm_cat_list',
- 'udm_cat_path',
- 'udm_check_charset',
- 'udm_check_stored',
- 'udm_clear_search_limits',
- 'udm_close_stored',
- 'udm_crc32',
- 'udm_errno',
- 'udm_error',
- 'udm_find',
- 'udm_free_agent',
- 'udm_free_ispell_data',
- 'udm_free_res',
- 'udm_get_doc_count',
- 'udm_get_res_field',
- 'udm_get_res_param',
- 'udm_hash32',
- 'udm_load_ispell_data',
- 'udm_open_stored',
- 'udm_set_agent_param'),
- 'mqseries': ('mqseries_back',
- 'mqseries_begin',
- 'mqseries_close',
- 'mqseries_cmit',
- 'mqseries_conn',
- 'mqseries_connx',
- 'mqseries_disc',
- 'mqseries_get',
- 'mqseries_inq',
- 'mqseries_open',
- 'mqseries_put1',
- 'mqseries_put',
- 'mqseries_set',
- 'mqseries_strerror'),
- 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
- 'mysqlnd_qc_get_available_handlers',
- 'mysqlnd_qc_get_cache_info',
- 'mysqlnd_qc_get_core_stats',
- 'mysqlnd_qc_get_normalized_query_trace_log',
- 'mysqlnd_qc_get_query_trace_log',
- 'mysqlnd_qc_set_cache_condition',
- 'mysqlnd_qc_set_is_select',
- 'mysqlnd_qc_set_storage_handler',
- 'mysqlnd_qc_set_user_handlers'),
- 'qtdom': ('qdom_error', 'qdom_tree'),
- 'runkit': ('runkit_class_adopt',
- 'runkit_class_emancipate',
- 'runkit_constant_add',
- 'runkit_constant_redefine',
- 'runkit_constant_remove',
- 'runkit_function_add',
- 'runkit_function_copy',
- 'runkit_function_redefine',
- 'runkit_function_remove',
- 'runkit_function_rename',
- 'runkit_import',
- 'runkit_lint_file',
- 'runkit_lint',
- 'runkit_method_add',
- 'runkit_method_copy',
- 'runkit_method_redefine',
- 'runkit_method_remove',
- 'runkit_method_rename',
- 'runkit_return_value_used',
- 'runkit_sandbox_output_handler',
- 'runkit_superglobals'),
- 'ssdeep': ('ssdeep_fuzzy_compare',
- 'ssdeep_fuzzy_hash_filename',
- 'ssdeep_fuzzy_hash'),
- 'vpopmail': ('vpopmail_add_alias_domain_ex',
- 'vpopmail_add_alias_domain',
- 'vpopmail_add_domain_ex',
- 'vpopmail_add_domain',
- 'vpopmail_add_user',
- 'vpopmail_alias_add',
- 'vpopmail_alias_del_domain',
- 'vpopmail_alias_del',
- 'vpopmail_alias_get_all',
- 'vpopmail_alias_get',
- 'vpopmail_auth_user',
- 'vpopmail_del_domain_ex',
- 'vpopmail_del_domain',
- 'vpopmail_del_user',
- 'vpopmail_error',
- 'vpopmail_passwd',
- 'vpopmail_set_user_quota'),
- 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
- 'win32service': ('win32_continue_service',
- 'win32_create_service',
- 'win32_delete_service',
- 'win32_get_last_control_message',
- 'win32_pause_service',
- 'win32_query_service_status',
- 'win32_set_service_status',
- 'win32_start_service_ctrl_dispatcher',
- 'win32_start_service',
- 'win32_stop_service'),
- 'xattr': ('xattr_get',
- 'xattr_list',
- 'xattr_remove',
- 'xattr_set',
- 'xattr_supported'),
- 'xdiff': ('xdiff_file_bdiff_size',
- 'xdiff_file_bdiff',
- 'xdiff_file_bpatch',
- 'xdiff_file_diff_binary',
- 'xdiff_file_diff',
- 'xdiff_file_merge3',
- 'xdiff_file_patch_binary',
- 'xdiff_file_patch',
- 'xdiff_file_rabdiff',
- 'xdiff_string_bdiff_size',
- 'xdiff_string_bdiff',
- 'xdiff_string_bpatch',
- 'xdiff_string_diff_binary',
- 'xdiff_string_diff',
- 'xdiff_string_merge3',
- 'xdiff_string_patch_binary',
- 'xdiff_string_patch',
- 'xdiff_string_rabdiff')}
-
-
-if __name__ == '__main__': # pragma: no cover
- import glob
- import os
- import pprint
- import re
- import shutil
- import tarfile
- try:
- from urllib import urlretrieve
- except ImportError:
- from urllib.request import urlretrieve
-
- PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
- PHP_MANUAL_DIR = './php-chunked-xhtml/'
- PHP_REFERENCE_GLOB = 'ref.*'
+ :license: BSD, see LICENSE for details.
+"""
+
+MODULES = {'.NET': ('dotnet_load',),
+ 'APC': ('apc_add',
+ 'apc_bin_dump',
+ 'apc_bin_dumpfile',
+ 'apc_bin_load',
+ 'apc_bin_loadfile',
+ 'apc_cache_info',
+ 'apc_cas',
+ 'apc_clear_cache',
+ 'apc_compile_file',
+ 'apc_dec',
+ 'apc_define_constants',
+ 'apc_delete_file',
+ 'apc_delete',
+ 'apc_exists',
+ 'apc_fetch',
+ 'apc_inc',
+ 'apc_load_constants',
+ 'apc_sma_info',
+ 'apc_store'),
+ 'APD': ('apd_breakpoint',
+ 'apd_callstack',
+ 'apd_clunk',
+ 'apd_continue',
+ 'apd_croak',
+ 'apd_dump_function_table',
+ 'apd_dump_persistent_resources',
+ 'apd_dump_regular_resources',
+ 'apd_echo',
+ 'apd_get_active_symbols',
+ 'apd_set_pprof_trace',
+ 'apd_set_session_trace_socket',
+ 'apd_set_session_trace',
+ 'apd_set_session',
+ 'override_function',
+ 'rename_function'),
+ 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
+ 'mysqli_bind_result',
+ 'mysqli_client_encoding',
+ 'mysqli_connect',
+ 'mysqli_disable_rpl_parse',
+ 'mysqli_enable_reads_from_master',
+ 'mysqli_enable_rpl_parse',
+ 'mysqli_escape_string',
+ 'mysqli_execute',
+ 'mysqli_fetch',
+ 'mysqli_get_cache_stats',
+ 'mysqli_get_metadata',
+ 'mysqli_master_query',
+ 'mysqli_param_count',
+ 'mysqli_report',
+ 'mysqli_rpl_parse_enabled',
+ 'mysqli_rpl_probe',
+ 'mysqli_send_long_data',
+ 'mysqli_slave_query'),
+ 'Apache': ('apache_child_terminate',
+ 'apache_get_modules',
+ 'apache_get_version',
+ 'apache_getenv',
+ 'apache_lookup_uri',
+ 'apache_note',
+ 'apache_request_headers',
+ 'apache_reset_timeout',
+ 'apache_response_headers',
+ 'apache_setenv',
+ 'getallheaders',
+ 'virtual'),
+ 'Array': ('array_change_key_case',
+ 'array_chunk',
+ 'array_column',
+ 'array_combine',
+ 'array_count_values',
+ 'array_diff_assoc',
+ 'array_diff_key',
+ 'array_diff_uassoc',
+ 'array_diff_ukey',
+ 'array_diff',
+ 'array_fill_keys',
+ 'array_fill',
+ 'array_filter',
+ 'array_flip',
+ 'array_intersect_assoc',
+ 'array_intersect_key',
+ 'array_intersect_uassoc',
+ 'array_intersect_ukey',
+ 'array_intersect',
+ 'array_key_exists',
+ 'array_keys',
+ 'array_map',
+ 'array_merge_recursive',
+ 'array_merge',
+ 'array_multisort',
+ 'array_pad',
+ 'array_pop',
+ 'array_product',
+ 'array_push',
+ 'array_rand',
+ 'array_reduce',
+ 'array_replace_recursive',
+ 'array_replace',
+ 'array_reverse',
+ 'array_search',
+ 'array_shift',
+ 'array_slice',
+ 'array_splice',
+ 'array_sum',
+ 'array_udiff_assoc',
+ 'array_udiff_uassoc',
+ 'array_udiff',
+ 'array_uintersect_assoc',
+ 'array_uintersect_uassoc',
+ 'array_uintersect',
+ 'array_unique',
+ 'array_unshift',
+ 'array_values',
+ 'array_walk_recursive',
+ 'array_walk',
+ 'array',
+ 'arsort',
+ 'asort',
+ 'compact',
+ 'count',
+ 'current',
+ 'each',
+ 'end',
+ 'extract',
+ 'in_array',
+ 'key_exists',
+ 'key',
+ 'krsort',
+ 'ksort',
+ 'list',
+ 'natcasesort',
+ 'natsort',
+ 'next',
+ 'pos',
+ 'prev',
+ 'range',
+ 'reset',
+ 'rsort',
+ 'shuffle',
+ 'sizeof',
+ 'sort',
+ 'uasort',
+ 'uksort',
+ 'usort'),
+ 'BBCode': ('bbcode_add_element',
+ 'bbcode_add_smiley',
+ 'bbcode_create',
+ 'bbcode_destroy',
+ 'bbcode_parse',
+ 'bbcode_set_arg_parser',
+ 'bbcode_set_flags'),
+ 'BC Math': ('bcadd',
+ 'bccomp',
+ 'bcdiv',
+ 'bcmod',
+ 'bcmul',
+ 'bcpow',
+ 'bcpowmod',
+ 'bcscale',
+ 'bcsqrt',
+ 'bcsub'),
+ 'Blenc': ('blenc_encrypt',),
+ 'Bzip2': ('bzclose',
+ 'bzcompress',
+ 'bzdecompress',
+ 'bzerrno',
+ 'bzerror',
+ 'bzerrstr',
+ 'bzflush',
+ 'bzopen',
+ 'bzread',
+ 'bzwrite'),
+ 'COM': ('com_addref',
+ 'com_create_guid',
+ 'com_event_sink',
+ 'com_get_active_object',
+ 'com_get',
+ 'com_invoke',
+ 'com_isenum',
+ 'com_load_typelib',
+ 'com_load',
+ 'com_message_pump',
+ 'com_print_typeinfo',
+ 'com_propget',
+ 'com_propput',
+ 'com_propset',
+ 'com_release',
+ 'com_set',
+ 'variant_abs',
+ 'variant_add',
+ 'variant_and',
+ 'variant_cast',
+ 'variant_cat',
+ 'variant_cmp',
+ 'variant_date_from_timestamp',
+ 'variant_date_to_timestamp',
+ 'variant_div',
+ 'variant_eqv',
+ 'variant_fix',
+ 'variant_get_type',
+ 'variant_idiv',
+ 'variant_imp',
+ 'variant_int',
+ 'variant_mod',
+ 'variant_mul',
+ 'variant_neg',
+ 'variant_not',
+ 'variant_or',
+ 'variant_pow',
+ 'variant_round',
+ 'variant_set_type',
+ 'variant_set',
+ 'variant_sub',
+ 'variant_xor'),
+ 'CUBRID': ('cubrid_bind',
+ 'cubrid_close_prepare',
+ 'cubrid_close_request',
+ 'cubrid_col_get',
+ 'cubrid_col_size',
+ 'cubrid_column_names',
+ 'cubrid_column_types',
+ 'cubrid_commit',
+ 'cubrid_connect_with_url',
+ 'cubrid_connect',
+ 'cubrid_current_oid',
+ 'cubrid_disconnect',
+ 'cubrid_drop',
+ 'cubrid_error_code_facility',
+ 'cubrid_error_code',
+ 'cubrid_error_msg',
+ 'cubrid_execute',
+ 'cubrid_fetch',
+ 'cubrid_free_result',
+ 'cubrid_get_autocommit',
+ 'cubrid_get_charset',
+ 'cubrid_get_class_name',
+ 'cubrid_get_client_info',
+ 'cubrid_get_db_parameter',
+ 'cubrid_get_query_timeout',
+ 'cubrid_get_server_info',
+ 'cubrid_get',
+ 'cubrid_insert_id',
+ 'cubrid_is_instance',
+ 'cubrid_lob_close',
+ 'cubrid_lob_export',
+ 'cubrid_lob_get',
+ 'cubrid_lob_send',
+ 'cubrid_lob_size',
+ 'cubrid_lob2_bind',
+ 'cubrid_lob2_close',
+ 'cubrid_lob2_export',
+ 'cubrid_lob2_import',
+ 'cubrid_lob2_new',
+ 'cubrid_lob2_read',
+ 'cubrid_lob2_seek64',
+ 'cubrid_lob2_seek',
+ 'cubrid_lob2_size64',
+ 'cubrid_lob2_size',
+ 'cubrid_lob2_tell64',
+ 'cubrid_lob2_tell',
+ 'cubrid_lob2_write',
+ 'cubrid_lock_read',
+ 'cubrid_lock_write',
+ 'cubrid_move_cursor',
+ 'cubrid_next_result',
+ 'cubrid_num_cols',
+ 'cubrid_num_rows',
+ 'cubrid_pconnect_with_url',
+ 'cubrid_pconnect',
+ 'cubrid_prepare',
+ 'cubrid_put',
+ 'cubrid_rollback',
+ 'cubrid_schema',
+ 'cubrid_seq_drop',
+ 'cubrid_seq_insert',
+ 'cubrid_seq_put',
+ 'cubrid_set_add',
+ 'cubrid_set_autocommit',
+ 'cubrid_set_db_parameter',
+ 'cubrid_set_drop',
+ 'cubrid_set_query_timeout',
+ 'cubrid_version'),
+ 'Cairo': ('cairo_create',
+ 'cairo_font_face_get_type',
+ 'cairo_font_face_status',
+ 'cairo_font_options_create',
+ 'cairo_font_options_equal',
+ 'cairo_font_options_get_antialias',
+ 'cairo_font_options_get_hint_metrics',
+ 'cairo_font_options_get_hint_style',
+ 'cairo_font_options_get_subpixel_order',
+ 'cairo_font_options_hash',
+ 'cairo_font_options_merge',
+ 'cairo_font_options_set_antialias',
+ 'cairo_font_options_set_hint_metrics',
+ 'cairo_font_options_set_hint_style',
+ 'cairo_font_options_set_subpixel_order',
+ 'cairo_font_options_status',
+ 'cairo_format_stride_for_width',
+ 'cairo_image_surface_create_for_data',
+ 'cairo_image_surface_create_from_png',
+ 'cairo_image_surface_create',
+ 'cairo_image_surface_get_data',
+ 'cairo_image_surface_get_format',
+ 'cairo_image_surface_get_height',
+ 'cairo_image_surface_get_stride',
+ 'cairo_image_surface_get_width',
+ 'cairo_matrix_create_scale',
+ 'cairo_matrix_create_translate',
+ 'cairo_matrix_invert',
+ 'cairo_matrix_multiply',
+ 'cairo_matrix_rotate',
+ 'cairo_matrix_transform_distance',
+ 'cairo_matrix_transform_point',
+ 'cairo_matrix_translate',
+ 'cairo_pattern_add_color_stop_rgb',
+ 'cairo_pattern_add_color_stop_rgba',
+ 'cairo_pattern_create_for_surface',
+ 'cairo_pattern_create_linear',
+ 'cairo_pattern_create_radial',
+ 'cairo_pattern_create_rgb',
+ 'cairo_pattern_create_rgba',
+ 'cairo_pattern_get_color_stop_count',
+ 'cairo_pattern_get_color_stop_rgba',
+ 'cairo_pattern_get_extend',
+ 'cairo_pattern_get_filter',
+ 'cairo_pattern_get_linear_points',
+ 'cairo_pattern_get_matrix',
+ 'cairo_pattern_get_radial_circles',
+ 'cairo_pattern_get_rgba',
+ 'cairo_pattern_get_surface',
+ 'cairo_pattern_get_type',
+ 'cairo_pattern_set_extend',
+ 'cairo_pattern_set_filter',
+ 'cairo_pattern_set_matrix',
+ 'cairo_pattern_status',
+ 'cairo_pdf_surface_create',
+ 'cairo_pdf_surface_set_size',
+ 'cairo_ps_get_levels',
+ 'cairo_ps_level_to_string',
+ 'cairo_ps_surface_create',
+ 'cairo_ps_surface_dsc_begin_page_setup',
+ 'cairo_ps_surface_dsc_begin_setup',
+ 'cairo_ps_surface_dsc_comment',
+ 'cairo_ps_surface_get_eps',
+ 'cairo_ps_surface_restrict_to_level',
+ 'cairo_ps_surface_set_eps',
+ 'cairo_ps_surface_set_size',
+ 'cairo_scaled_font_create',
+ 'cairo_scaled_font_extents',
+ 'cairo_scaled_font_get_ctm',
+ 'cairo_scaled_font_get_font_face',
+ 'cairo_scaled_font_get_font_matrix',
+ 'cairo_scaled_font_get_font_options',
+ 'cairo_scaled_font_get_scale_matrix',
+ 'cairo_scaled_font_get_type',
+ 'cairo_scaled_font_glyph_extents',
+ 'cairo_scaled_font_status',
+ 'cairo_scaled_font_text_extents',
+ 'cairo_surface_copy_page',
+ 'cairo_surface_create_similar',
+ 'cairo_surface_finish',
+ 'cairo_surface_flush',
+ 'cairo_surface_get_content',
+ 'cairo_surface_get_device_offset',
+ 'cairo_surface_get_font_options',
+ 'cairo_surface_get_type',
+ 'cairo_surface_mark_dirty_rectangle',
+ 'cairo_surface_mark_dirty',
+ 'cairo_surface_set_device_offset',
+ 'cairo_surface_set_fallback_resolution',
+ 'cairo_surface_show_page',
+ 'cairo_surface_status',
+ 'cairo_surface_write_to_png',
+ 'cairo_svg_surface_create',
+ 'cairo_svg_surface_restrict_to_version',
+ 'cairo_svg_version_to_string'),
+ 'Calendar': ('cal_days_in_month',
+ 'cal_from_jd',
+ 'cal_info',
+ 'cal_to_jd',
+ 'easter_date',
+ 'easter_days',
+ 'FrenchToJD',
+ 'GregorianToJD',
+ 'JDDayOfWeek',
+ 'JDMonthName',
+ 'JDToFrench',
+ 'JDToGregorian',
+ 'jdtojewish',
+ 'JDToJulian',
+ 'jdtounix',
+ 'JewishToJD',
+ 'JulianToJD',
+ 'unixtojd'),
+ 'Classes/Object': ('__autoload',
+ 'call_user_method_array',
+ 'call_user_method',
+ 'class_alias',
+ 'class_exists',
+ 'get_called_class',
+ 'get_class_methods',
+ 'get_class_vars',
+ 'get_class',
+ 'get_declared_classes',
+ 'get_declared_interfaces',
+ 'get_declared_traits',
+ 'get_object_vars',
+ 'get_parent_class',
+ 'interface_exists',
+ 'is_a',
+ 'is_subclass_of',
+ 'method_exists',
+ 'property_exists',
+ 'trait_exists'),
+ 'Classkit': ('classkit_import',
+ 'classkit_method_add',
+ 'classkit_method_copy',
+ 'classkit_method_redefine',
+ 'classkit_method_remove',
+ 'classkit_method_rename'),
+ 'Crack': ('crack_check',
+ 'crack_closedict',
+ 'crack_getlastmessage',
+ 'crack_opendict'),
+ 'Ctype': ('ctype_alnum',
+ 'ctype_alpha',
+ 'ctype_cntrl',
+ 'ctype_digit',
+ 'ctype_graph',
+ 'ctype_lower',
+ 'ctype_print',
+ 'ctype_punct',
+ 'ctype_space',
+ 'ctype_upper',
+ 'ctype_xdigit'),
+ 'Cyrus': ('cyrus_authenticate',
+ 'cyrus_bind',
+ 'cyrus_close',
+ 'cyrus_connect',
+ 'cyrus_query',
+ 'cyrus_unbind'),
+ 'DB++': ('dbplus_add',
+ 'dbplus_aql',
+ 'dbplus_chdir',
+ 'dbplus_close',
+ 'dbplus_curr',
+ 'dbplus_errcode',
+ 'dbplus_errno',
+ 'dbplus_find',
+ 'dbplus_first',
+ 'dbplus_flush',
+ 'dbplus_freealllocks',
+ 'dbplus_freelock',
+ 'dbplus_freerlocks',
+ 'dbplus_getlock',
+ 'dbplus_getunique',
+ 'dbplus_info',
+ 'dbplus_last',
+ 'dbplus_lockrel',
+ 'dbplus_next',
+ 'dbplus_open',
+ 'dbplus_prev',
+ 'dbplus_rchperm',
+ 'dbplus_rcreate',
+ 'dbplus_rcrtexact',
+ 'dbplus_rcrtlike',
+ 'dbplus_resolve',
+ 'dbplus_restorepos',
+ 'dbplus_rkeys',
+ 'dbplus_ropen',
+ 'dbplus_rquery',
+ 'dbplus_rrename',
+ 'dbplus_rsecindex',
+ 'dbplus_runlink',
+ 'dbplus_rzap',
+ 'dbplus_savepos',
+ 'dbplus_setindex',
+ 'dbplus_setindexbynumber',
+ 'dbplus_sql',
+ 'dbplus_tcl',
+ 'dbplus_tremove',
+ 'dbplus_undo',
+ 'dbplus_undoprepare',
+ 'dbplus_unlockrel',
+ 'dbplus_unselect',
+ 'dbplus_update',
+ 'dbplus_xlockrel',
+ 'dbplus_xunlockrel'),
+ 'DBA': ('dba_close',
+ 'dba_delete',
+ 'dba_exists',
+ 'dba_fetch',
+ 'dba_firstkey',
+ 'dba_handlers',
+ 'dba_insert',
+ 'dba_key_split',
+ 'dba_list',
+ 'dba_nextkey',
+ 'dba_open',
+ 'dba_optimize',
+ 'dba_popen',
+ 'dba_replace',
+ 'dba_sync'),
+ 'DOM': ('dom_import_simplexml',),
+ 'Date/Time': ('checkdate',
+ 'date_add',
+ 'date_create_from_format',
+ 'date_create_immutable_from_format',
+ 'date_create_immutable',
+ 'date_create',
+ 'date_date_set',
+ 'date_default_timezone_get',
+ 'date_default_timezone_set',
+ 'date_diff',
+ 'date_format',
+ 'date_get_last_errors',
+ 'date_interval_create_from_date_string',
+ 'date_interval_format',
+ 'date_isodate_set',
+ 'date_modify',
+ 'date_offset_get',
+ 'date_parse_from_format',
+ 'date_parse',
+ 'date_sub',
+ 'date_sun_info',
+ 'date_sunrise',
+ 'date_sunset',
+ 'date_time_set',
+ 'date_timestamp_get',
+ 'date_timestamp_set',
+ 'date_timezone_get',
+ 'date_timezone_set',
+ 'date',
+ 'getdate',
+ 'gettimeofday',
+ 'gmdate',
+ 'gmmktime',
+ 'gmstrftime',
+ 'idate',
+ 'localtime',
+ 'microtime',
+ 'mktime',
+ 'strftime',
+ 'strptime',
+ 'strtotime',
+ 'time',
+ 'timezone_abbreviations_list',
+ 'timezone_identifiers_list',
+ 'timezone_location_get',
+ 'timezone_name_from_abbr',
+ 'timezone_name_get',
+ 'timezone_offset_get',
+ 'timezone_open',
+ 'timezone_transitions_get',
+ 'timezone_version_get'),
+ 'Direct IO': ('dio_close',
+ 'dio_fcntl',
+ 'dio_open',
+ 'dio_read',
+ 'dio_seek',
+ 'dio_stat',
+ 'dio_tcsetattr',
+ 'dio_truncate',
+ 'dio_write'),
+ 'Directory': ('chdir',
+ 'chroot',
+ 'closedir',
+ 'dir',
+ 'getcwd',
+ 'opendir',
+ 'readdir',
+ 'rewinddir',
+ 'scandir'),
+ 'Eio': ('eio_busy',
+ 'eio_cancel',
+ 'eio_chmod',
+ 'eio_chown',
+ 'eio_close',
+ 'eio_custom',
+ 'eio_dup2',
+ 'eio_event_loop',
+ 'eio_fallocate',
+ 'eio_fchmod',
+ 'eio_fchown',
+ 'eio_fdatasync',
+ 'eio_fstat',
+ 'eio_fstatvfs',
+ 'eio_fsync',
+ 'eio_ftruncate',
+ 'eio_futime',
+ 'eio_get_event_stream',
+ 'eio_get_last_error',
+ 'eio_grp_add',
+ 'eio_grp_cancel',
+ 'eio_grp_limit',
+ 'eio_grp',
+ 'eio_init',
+ 'eio_link',
+ 'eio_lstat',
+ 'eio_mkdir',
+ 'eio_mknod',
+ 'eio_nop',
+ 'eio_npending',
+ 'eio_nready',
+ 'eio_nreqs',
+ 'eio_nthreads',
+ 'eio_open',
+ 'eio_poll',
+ 'eio_read',
+ 'eio_readahead',
+ 'eio_readdir',
+ 'eio_readlink',
+ 'eio_realpath',
+ 'eio_rename',
+ 'eio_rmdir',
+ 'eio_seek',
+ 'eio_sendfile',
+ 'eio_set_max_idle',
+ 'eio_set_max_parallel',
+ 'eio_set_max_poll_reqs',
+ 'eio_set_max_poll_time',
+ 'eio_set_min_parallel',
+ 'eio_stat',
+ 'eio_statvfs',
+ 'eio_symlink',
+ 'eio_sync_file_range',
+ 'eio_sync',
+ 'eio_syncfs',
+ 'eio_truncate',
+ 'eio_unlink',
+ 'eio_utime',
+ 'eio_write'),
+ 'Enchant': ('enchant_broker_describe',
+ 'enchant_broker_dict_exists',
+ 'enchant_broker_free_dict',
+ 'enchant_broker_free',
+ 'enchant_broker_get_error',
+ 'enchant_broker_init',
+ 'enchant_broker_list_dicts',
+ 'enchant_broker_request_dict',
+ 'enchant_broker_request_pwl_dict',
+ 'enchant_broker_set_ordering',
+ 'enchant_dict_add_to_personal',
+ 'enchant_dict_add_to_session',
+ 'enchant_dict_check',
+ 'enchant_dict_describe',
+ 'enchant_dict_get_error',
+ 'enchant_dict_is_in_session',
+ 'enchant_dict_quick_check',
+ 'enchant_dict_store_replacement',
+ 'enchant_dict_suggest'),
+ 'Error Handling': ('debug_backtrace',
+ 'debug_print_backtrace',
+ 'error_get_last',
+ 'error_log',
+ 'error_reporting',
+ 'restore_error_handler',
+ 'restore_exception_handler',
+ 'set_error_handler',
+ 'set_exception_handler',
+ 'trigger_error',
+ 'user_error'),
+ 'Exif': ('exif_imagetype',
+ 'exif_read_data',
+ 'exif_tagname',
+ 'exif_thumbnail',
+ 'read_exif_data'),
+ 'Expect': ('expect_expectl', 'expect_popen'),
+ 'FAM': ('fam_cancel_monitor',
+ 'fam_close',
+ 'fam_monitor_collection',
+ 'fam_monitor_directory',
+ 'fam_monitor_file',
+ 'fam_next_event',
+ 'fam_open',
+ 'fam_pending',
+ 'fam_resume_monitor',
+ 'fam_suspend_monitor'),
+ 'FDF': ('fdf_add_doc_javascript',
+ 'fdf_add_template',
+ 'fdf_close',
+ 'fdf_create',
+ 'fdf_enum_values',
+ 'fdf_errno',
+ 'fdf_error',
+ 'fdf_get_ap',
+ 'fdf_get_attachment',
+ 'fdf_get_encoding',
+ 'fdf_get_file',
+ 'fdf_get_flags',
+ 'fdf_get_opt',
+ 'fdf_get_status',
+ 'fdf_get_value',
+ 'fdf_get_version',
+ 'fdf_header',
+ 'fdf_next_field_name',
+ 'fdf_open_string',
+ 'fdf_open',
+ 'fdf_remove_item',
+ 'fdf_save_string',
+ 'fdf_save',
+ 'fdf_set_ap',
+ 'fdf_set_encoding',
+ 'fdf_set_file',
+ 'fdf_set_flags',
+ 'fdf_set_javascript_action',
+ 'fdf_set_on_import_javascript',
+ 'fdf_set_opt',
+ 'fdf_set_status',
+ 'fdf_set_submit_form_action',
+ 'fdf_set_target_frame',
+ 'fdf_set_value',
+ 'fdf_set_version'),
+ 'FPM': ('fastcgi_finish_request',),
+ 'FTP': ('ftp_alloc',
+ 'ftp_cdup',
+ 'ftp_chdir',
+ 'ftp_chmod',
+ 'ftp_close',
+ 'ftp_connect',
+ 'ftp_delete',
+ 'ftp_exec',
+ 'ftp_fget',
+ 'ftp_fput',
+ 'ftp_get_option',
+ 'ftp_get',
+ 'ftp_login',
+ 'ftp_mdtm',
+ 'ftp_mkdir',
+ 'ftp_nb_continue',
+ 'ftp_nb_fget',
+ 'ftp_nb_fput',
+ 'ftp_nb_get',
+ 'ftp_nb_put',
+ 'ftp_nlist',
+ 'ftp_pasv',
+ 'ftp_put',
+ 'ftp_pwd',
+ 'ftp_quit',
+ 'ftp_raw',
+ 'ftp_rawlist',
+ 'ftp_rename',
+ 'ftp_rmdir',
+ 'ftp_set_option',
+ 'ftp_site',
+ 'ftp_size',
+ 'ftp_ssl_connect',
+ 'ftp_systype'),
+ 'Fann': ('fann_cascadetrain_on_data',
+ 'fann_cascadetrain_on_file',
+ 'fann_clear_scaling_params',
+ 'fann_copy',
+ 'fann_create_from_file',
+ 'fann_create_shortcut_array',
+ 'fann_create_shortcut',
+ 'fann_create_sparse_array',
+ 'fann_create_sparse',
+ 'fann_create_standard_array',
+ 'fann_create_standard',
+ 'fann_create_train_from_callback',
+ 'fann_create_train',
+ 'fann_descale_input',
+ 'fann_descale_output',
+ 'fann_descale_train',
+ 'fann_destroy_train',
+ 'fann_destroy',
+ 'fann_duplicate_train_data',
+ 'fann_get_activation_function',
+ 'fann_get_activation_steepness',
+ 'fann_get_bias_array',
+ 'fann_get_bit_fail_limit',
+ 'fann_get_bit_fail',
+ 'fann_get_cascade_activation_functions_count',
+ 'fann_get_cascade_activation_functions',
+ 'fann_get_cascade_activation_steepnesses_count',
+ 'fann_get_cascade_activation_steepnesses',
+ 'fann_get_cascade_candidate_change_fraction',
+ 'fann_get_cascade_candidate_limit',
+ 'fann_get_cascade_candidate_stagnation_epochs',
+ 'fann_get_cascade_max_cand_epochs',
+ 'fann_get_cascade_max_out_epochs',
+ 'fann_get_cascade_min_cand_epochs',
+ 'fann_get_cascade_min_out_epochs',
+ 'fann_get_cascade_num_candidate_groups',
+ 'fann_get_cascade_num_candidates',
+ 'fann_get_cascade_output_change_fraction',
+ 'fann_get_cascade_output_stagnation_epochs',
+ 'fann_get_cascade_weight_multiplier',
+ 'fann_get_connection_array',
+ 'fann_get_connection_rate',
+ 'fann_get_errno',
+ 'fann_get_errstr',
+ 'fann_get_layer_array',
+ 'fann_get_learning_momentum',
+ 'fann_get_learning_rate',
+ 'fann_get_MSE',
+ 'fann_get_network_type',
+ 'fann_get_num_input',
+ 'fann_get_num_layers',
+ 'fann_get_num_output',
+ 'fann_get_quickprop_decay',
+ 'fann_get_quickprop_mu',
+ 'fann_get_rprop_decrease_factor',
+ 'fann_get_rprop_delta_max',
+ 'fann_get_rprop_delta_min',
+ 'fann_get_rprop_delta_zero',
+ 'fann_get_rprop_increase_factor',
+ 'fann_get_sarprop_step_error_shift',
+ 'fann_get_sarprop_step_error_threshold_factor',
+ 'fann_get_sarprop_temperature',
+ 'fann_get_sarprop_weight_decay_shift',
+ 'fann_get_total_connections',
+ 'fann_get_total_neurons',
+ 'fann_get_train_error_function',
+ 'fann_get_train_stop_function',
+ 'fann_get_training_algorithm',
+ 'fann_init_weights',
+ 'fann_length_train_data',
+ 'fann_merge_train_data',
+ 'fann_num_input_train_data',
+ 'fann_num_output_train_data',
+ 'fann_print_error',
+ 'fann_randomize_weights',
+ 'fann_read_train_from_file',
+ 'fann_reset_errno',
+ 'fann_reset_errstr',
+ 'fann_reset_MSE',
+ 'fann_run',
+ 'fann_save_train',
+ 'fann_save',
+ 'fann_scale_input_train_data',
+ 'fann_scale_input',
+ 'fann_scale_output_train_data',
+ 'fann_scale_output',
+ 'fann_scale_train_data',
+ 'fann_scale_train',
+ 'fann_set_activation_function_hidden',
+ 'fann_set_activation_function_layer',
+ 'fann_set_activation_function_output',
+ 'fann_set_activation_function',
+ 'fann_set_activation_steepness_hidden',
+ 'fann_set_activation_steepness_layer',
+ 'fann_set_activation_steepness_output',
+ 'fann_set_activation_steepness',
+ 'fann_set_bit_fail_limit',
+ 'fann_set_callback',
+ 'fann_set_cascade_activation_functions',
+ 'fann_set_cascade_activation_steepnesses',
+ 'fann_set_cascade_candidate_change_fraction',
+ 'fann_set_cascade_candidate_limit',
+ 'fann_set_cascade_candidate_stagnation_epochs',
+ 'fann_set_cascade_max_cand_epochs',
+ 'fann_set_cascade_max_out_epochs',
+ 'fann_set_cascade_min_cand_epochs',
+ 'fann_set_cascade_min_out_epochs',
+ 'fann_set_cascade_num_candidate_groups',
+ 'fann_set_cascade_output_change_fraction',
+ 'fann_set_cascade_output_stagnation_epochs',
+ 'fann_set_cascade_weight_multiplier',
+ 'fann_set_error_log',
+ 'fann_set_input_scaling_params',
+ 'fann_set_learning_momentum',
+ 'fann_set_learning_rate',
+ 'fann_set_output_scaling_params',
+ 'fann_set_quickprop_decay',
+ 'fann_set_quickprop_mu',
+ 'fann_set_rprop_decrease_factor',
+ 'fann_set_rprop_delta_max',
+ 'fann_set_rprop_delta_min',
+ 'fann_set_rprop_delta_zero',
+ 'fann_set_rprop_increase_factor',
+ 'fann_set_sarprop_step_error_shift',
+ 'fann_set_sarprop_step_error_threshold_factor',
+ 'fann_set_sarprop_temperature',
+ 'fann_set_sarprop_weight_decay_shift',
+ 'fann_set_scaling_params',
+ 'fann_set_train_error_function',
+ 'fann_set_train_stop_function',
+ 'fann_set_training_algorithm',
+ 'fann_set_weight_array',
+ 'fann_set_weight',
+ 'fann_shuffle_train_data',
+ 'fann_subset_train_data',
+ 'fann_test_data',
+ 'fann_test',
+ 'fann_train_epoch',
+ 'fann_train_on_data',
+ 'fann_train_on_file',
+ 'fann_train'),
+ 'Fileinfo': ('finfo_buffer',
+ 'finfo_close',
+ 'finfo_file',
+ 'finfo_open',
+ 'finfo_set_flags',
+ 'mime_content_type'),
+ 'Filesystem': ('basename',
+ 'chgrp',
+ 'chmod',
+ 'chown',
+ 'clearstatcache',
+ 'copy',
+ 'dirname',
+ 'disk_free_space',
+ 'disk_total_space',
+ 'diskfreespace',
+ 'fclose',
+ 'feof',
+ 'fflush',
+ 'fgetc',
+ 'fgetcsv',
+ 'fgets',
+ 'fgetss',
+ 'file_exists',
+ 'file_get_contents',
+ 'file_put_contents',
+ 'file',
+ 'fileatime',
+ 'filectime',
+ 'filegroup',
+ 'fileinode',
+ 'filemtime',
+ 'fileowner',
+ 'fileperms',
+ 'filesize',
+ 'filetype',
+ 'flock',
+ 'fnmatch',
+ 'fopen',
+ 'fpassthru',
+ 'fputcsv',
+ 'fputs',
+ 'fread',
+ 'fscanf',
+ 'fseek',
+ 'fstat',
+ 'ftell',
+ 'ftruncate',
+ 'fwrite',
+ 'glob',
+ 'is_dir',
+ 'is_executable',
+ 'is_file',
+ 'is_link',
+ 'is_readable',
+ 'is_uploaded_file',
+ 'is_writable',
+ 'is_writeable',
+ 'lchgrp',
+ 'lchown',
+ 'link',
+ 'linkinfo',
+ 'lstat',
+ 'mkdir',
+ 'move_uploaded_file',
+ 'parse_ini_file',
+ 'parse_ini_string',
+ 'pathinfo',
+ 'pclose',
+ 'popen',
+ 'readfile',
+ 'readlink',
+ 'realpath_cache_get',
+ 'realpath_cache_size',
+ 'realpath',
+ 'rename',
+ 'rewind',
+ 'rmdir',
+ 'set_file_buffer',
+ 'stat',
+ 'symlink',
+ 'tempnam',
+ 'tmpfile',
+ 'touch',
+ 'umask',
+ 'unlink'),
+ 'Filter': ('filter_has_var',
+ 'filter_id',
+ 'filter_input_array',
+ 'filter_input',
+ 'filter_list',
+ 'filter_var_array',
+ 'filter_var'),
+ 'Firebird/InterBase': ('ibase_add_user',
+ 'ibase_affected_rows',
+ 'ibase_backup',
+ 'ibase_blob_add',
+ 'ibase_blob_cancel',
+ 'ibase_blob_close',
+ 'ibase_blob_create',
+ 'ibase_blob_echo',
+ 'ibase_blob_get',
+ 'ibase_blob_import',
+ 'ibase_blob_info',
+ 'ibase_blob_open',
+ 'ibase_close',
+ 'ibase_commit_ret',
+ 'ibase_commit',
+ 'ibase_connect',
+ 'ibase_db_info',
+ 'ibase_delete_user',
+ 'ibase_drop_db',
+ 'ibase_errcode',
+ 'ibase_errmsg',
+ 'ibase_execute',
+ 'ibase_fetch_assoc',
+ 'ibase_fetch_object',
+ 'ibase_fetch_row',
+ 'ibase_field_info',
+ 'ibase_free_event_handler',
+ 'ibase_free_query',
+ 'ibase_free_result',
+ 'ibase_gen_id',
+ 'ibase_maintain_db',
+ 'ibase_modify_user',
+ 'ibase_name_result',
+ 'ibase_num_fields',
+ 'ibase_num_params',
+ 'ibase_param_info',
+ 'ibase_pconnect',
+ 'ibase_prepare',
+ 'ibase_query',
+ 'ibase_restore',
+ 'ibase_rollback_ret',
+ 'ibase_rollback',
+ 'ibase_server_info',
+ 'ibase_service_attach',
+ 'ibase_service_detach',
+ 'ibase_set_event_handler',
+ 'ibase_trans',
+ 'ibase_wait_event'),
+ 'FriBiDi': ('fribidi_log2vis',),
+ 'FrontBase': ('fbsql_affected_rows',
+ 'fbsql_autocommit',
+ 'fbsql_blob_size',
+ 'fbsql_change_user',
+ 'fbsql_clob_size',
+ 'fbsql_close',
+ 'fbsql_commit',
+ 'fbsql_connect',
+ 'fbsql_create_blob',
+ 'fbsql_create_clob',
+ 'fbsql_create_db',
+ 'fbsql_data_seek',
+ 'fbsql_database_password',
+ 'fbsql_database',
+ 'fbsql_db_query',
+ 'fbsql_db_status',
+ 'fbsql_drop_db',
+ 'fbsql_errno',
+ 'fbsql_error',
+ 'fbsql_fetch_array',
+ 'fbsql_fetch_assoc',
+ 'fbsql_fetch_field',
+ 'fbsql_fetch_lengths',
+ 'fbsql_fetch_object',
+ 'fbsql_fetch_row',
+ 'fbsql_field_flags',
+ 'fbsql_field_len',
+ 'fbsql_field_name',
+ 'fbsql_field_seek',
+ 'fbsql_field_table',
+ 'fbsql_field_type',
+ 'fbsql_free_result',
+ 'fbsql_get_autostart_info',
+ 'fbsql_hostname',
+ 'fbsql_insert_id',
+ 'fbsql_list_dbs',
+ 'fbsql_list_fields',
+ 'fbsql_list_tables',
+ 'fbsql_next_result',
+ 'fbsql_num_fields',
+ 'fbsql_num_rows',
+ 'fbsql_password',
+ 'fbsql_pconnect',
+ 'fbsql_query',
+ 'fbsql_read_blob',
+ 'fbsql_read_clob',
+ 'fbsql_result',
+ 'fbsql_rollback',
+ 'fbsql_rows_fetched',
+ 'fbsql_select_db',
+ 'fbsql_set_characterset',
+ 'fbsql_set_lob_mode',
+ 'fbsql_set_password',
+ 'fbsql_set_transaction',
+ 'fbsql_start_db',
+ 'fbsql_stop_db',
+ 'fbsql_table_name',
+ 'fbsql_tablename',
+ 'fbsql_username',
+ 'fbsql_warnings'),
+ 'Function handling': ('call_user_func_array',
+ 'call_user_func',
+ 'create_function',
+ 'forward_static_call_array',
+ 'forward_static_call',
+ 'func_get_arg',
+ 'func_get_args',
+ 'func_num_args',
+ 'function_exists',
+ 'get_defined_functions',
+ 'register_shutdown_function',
+ 'register_tick_function',
+ 'unregister_tick_function'),
+ 'GD and Image': ('gd_info',
+ 'getimagesize',
+ 'getimagesizefromstring',
+ 'image_type_to_extension',
+ 'image_type_to_mime_type',
+ 'image2wbmp',
+ 'imageaffine',
+ 'imageaffinematrixconcat',
+ 'imageaffinematrixget',
+ 'imagealphablending',
+ 'imageantialias',
+ 'imagearc',
+ 'imagechar',
+ 'imagecharup',
+ 'imagecolorallocate',
+ 'imagecolorallocatealpha',
+ 'imagecolorat',
+ 'imagecolorclosest',
+ 'imagecolorclosestalpha',
+ 'imagecolorclosesthwb',
+ 'imagecolordeallocate',
+ 'imagecolorexact',
+ 'imagecolorexactalpha',
+ 'imagecolormatch',
+ 'imagecolorresolve',
+ 'imagecolorresolvealpha',
+ 'imagecolorset',
+ 'imagecolorsforindex',
+ 'imagecolorstotal',
+ 'imagecolortransparent',
+ 'imageconvolution',
+ 'imagecopy',
+ 'imagecopymerge',
+ 'imagecopymergegray',
+ 'imagecopyresampled',
+ 'imagecopyresized',
+ 'imagecreate',
+ 'imagecreatefromgd2',
+ 'imagecreatefromgd2part',
+ 'imagecreatefromgd',
+ 'imagecreatefromgif',
+ 'imagecreatefromjpeg',
+ 'imagecreatefrompng',
+ 'imagecreatefromstring',
+ 'imagecreatefromwbmp',
+ 'imagecreatefromwebp',
+ 'imagecreatefromxbm',
+ 'imagecreatefromxpm',
+ 'imagecreatetruecolor',
+ 'imagecrop',
+ 'imagecropauto',
+ 'imagedashedline',
+ 'imagedestroy',
+ 'imageellipse',
+ 'imagefill',
+ 'imagefilledarc',
+ 'imagefilledellipse',
+ 'imagefilledpolygon',
+ 'imagefilledrectangle',
+ 'imagefilltoborder',
+ 'imagefilter',
+ 'imageflip',
+ 'imagefontheight',
+ 'imagefontwidth',
+ 'imageftbbox',
+ 'imagefttext',
+ 'imagegammacorrect',
+ 'imagegd2',
+ 'imagegd',
+ 'imagegif',
+ 'imagegrabscreen',
+ 'imagegrabwindow',
+ 'imageinterlace',
+ 'imageistruecolor',
+ 'imagejpeg',
+ 'imagelayereffect',
+ 'imageline',
+ 'imageloadfont',
+ 'imagepalettecopy',
+ 'imagepalettetotruecolor',
+ 'imagepng',
+ 'imagepolygon',
+ 'imagepsbbox',
+ 'imagepsencodefont',
+ 'imagepsextendfont',
+ 'imagepsfreefont',
+ 'imagepsloadfont',
+ 'imagepsslantfont',
+ 'imagepstext',
+ 'imagerectangle',
+ 'imagerotate',
+ 'imagesavealpha',
+ 'imagescale',
+ 'imagesetbrush',
+ 'imagesetinterpolation',
+ 'imagesetpixel',
+ 'imagesetstyle',
+ 'imagesetthickness',
+ 'imagesettile',
+ 'imagestring',
+ 'imagestringup',
+ 'imagesx',
+ 'imagesy',
+ 'imagetruecolortopalette',
+ 'imagettfbbox',
+ 'imagettftext',
+ 'imagetypes',
+ 'imagewbmp',
+ 'imagewebp',
+ 'imagexbm',
+ 'iptcembed',
+ 'iptcparse',
+ 'jpeg2wbmp',
+ 'png2wbmp'),
+ 'GMP': ('gmp_abs',
+ 'gmp_add',
+ 'gmp_and',
+ 'gmp_clrbit',
+ 'gmp_cmp',
+ 'gmp_com',
+ 'gmp_div_q',
+ 'gmp_div_qr',
+ 'gmp_div_r',
+ 'gmp_div',
+ 'gmp_divexact',
+ 'gmp_fact',
+ 'gmp_gcd',
+ 'gmp_gcdext',
+ 'gmp_hamdist',
+ 'gmp_init',
+ 'gmp_intval',
+ 'gmp_invert',
+ 'gmp_jacobi',
+ 'gmp_legendre',
+ 'gmp_mod',
+ 'gmp_mul',
+ 'gmp_neg',
+ 'gmp_nextprime',
+ 'gmp_or',
+ 'gmp_perfect_square',
+ 'gmp_popcount',
+ 'gmp_pow',
+ 'gmp_powm',
+ 'gmp_prob_prime',
+ 'gmp_random',
+ 'gmp_scan0',
+ 'gmp_scan1',
+ 'gmp_setbit',
+ 'gmp_sign',
+ 'gmp_sqrt',
+ 'gmp_sqrtrem',
+ 'gmp_strval',
+ 'gmp_sub',
+ 'gmp_testbit',
+ 'gmp_xor'),
+ 'GeoIP': ('geoip_asnum_by_name',
+ 'geoip_continent_code_by_name',
+ 'geoip_country_code_by_name',
+ 'geoip_country_code3_by_name',
+ 'geoip_country_name_by_name',
+ 'geoip_database_info',
+ 'geoip_db_avail',
+ 'geoip_db_filename',
+ 'geoip_db_get_all_info',
+ 'geoip_domain_by_name',
+ 'geoip_id_by_name',
+ 'geoip_isp_by_name',
+ 'geoip_netspeedcell_by_name',
+ 'geoip_org_by_name',
+ 'geoip_record_by_name',
+ 'geoip_region_by_name',
+ 'geoip_region_name_by_code',
+ 'geoip_setup_custom_directory',
+ 'geoip_time_zone_by_country_and_region'),
+ 'Gettext': ('bind_textdomain_codeset',
+ 'bindtextdomain',
+ 'dcgettext',
+ 'dcngettext',
+ 'dgettext',
+ 'dngettext',
+ 'gettext',
+ 'ngettext',
+ 'textdomain'),
+ 'GnuPG': ('gnupg_adddecryptkey',
+ 'gnupg_addencryptkey',
+ 'gnupg_addsignkey',
+ 'gnupg_cleardecryptkeys',
+ 'gnupg_clearencryptkeys',
+ 'gnupg_clearsignkeys',
+ 'gnupg_decrypt',
+ 'gnupg_decryptverify',
+ 'gnupg_encrypt',
+ 'gnupg_encryptsign',
+ 'gnupg_export',
+ 'gnupg_geterror',
+ 'gnupg_getprotocol',
+ 'gnupg_import',
+ 'gnupg_init',
+ 'gnupg_keyinfo',
+ 'gnupg_setarmor',
+ 'gnupg_seterrormode',
+ 'gnupg_setsignmode',
+ 'gnupg_sign',
+ 'gnupg_verify'),
+ 'Gopher': ('gopher_parsedir',),
+ 'Grapheme': ('grapheme_extract',
+ 'grapheme_stripos',
+ 'grapheme_stristr',
+ 'grapheme_strlen',
+ 'grapheme_strpos',
+ 'grapheme_strripos',
+ 'grapheme_strrpos',
+ 'grapheme_strstr',
+ 'grapheme_substr'),
+ 'Gupnp': ('gupnp_context_get_host_ip',
+ 'gupnp_context_get_port',
+ 'gupnp_context_get_subscription_timeout',
+ 'gupnp_context_host_path',
+ 'gupnp_context_new',
+ 'gupnp_context_set_subscription_timeout',
+ 'gupnp_context_timeout_add',
+ 'gupnp_context_unhost_path',
+ 'gupnp_control_point_browse_start',
+ 'gupnp_control_point_browse_stop',
+ 'gupnp_control_point_callback_set',
+ 'gupnp_control_point_new',
+ 'gupnp_device_action_callback_set',
+ 'gupnp_device_info_get_service',
+ 'gupnp_device_info_get',
+ 'gupnp_root_device_get_available',
+ 'gupnp_root_device_get_relative_location',
+ 'gupnp_root_device_new',
+ 'gupnp_root_device_set_available',
+ 'gupnp_root_device_start',
+ 'gupnp_root_device_stop',
+ 'gupnp_service_action_get',
+ 'gupnp_service_action_return_error',
+ 'gupnp_service_action_return',
+ 'gupnp_service_action_set',
+ 'gupnp_service_freeze_notify',
+ 'gupnp_service_info_get_introspection',
+ 'gupnp_service_info_get',
+ 'gupnp_service_introspection_get_state_variable',
+ 'gupnp_service_notify',
+ 'gupnp_service_proxy_action_get',
+ 'gupnp_service_proxy_action_set',
+ 'gupnp_service_proxy_add_notify',
+ 'gupnp_service_proxy_callback_set',
+ 'gupnp_service_proxy_get_subscribed',
+ 'gupnp_service_proxy_remove_notify',
+ 'gupnp_service_proxy_set_subscribed',
+ 'gupnp_service_thaw_notify'),
+ 'HTTP': ('http_cache_etag',
+ 'http_cache_last_modified',
+ 'http_chunked_decode',
+ 'http_deflate',
+ 'http_inflate',
+ 'http_build_cookie',
+ 'http_date',
+ 'http_get_request_body_stream',
+ 'http_get_request_body',
+ 'http_get_request_headers',
+ 'http_match_etag',
+ 'http_match_modified',
+ 'http_match_request_header',
+ 'http_support',
+ 'http_negotiate_charset',
+ 'http_negotiate_content_type',
+ 'http_negotiate_language',
+ 'ob_deflatehandler',
+ 'ob_etaghandler',
+ 'ob_inflatehandler',
+ 'http_parse_cookie',
+ 'http_parse_headers',
+ 'http_parse_message',
+ 'http_parse_params',
+ 'http_persistent_handles_clean',
+ 'http_persistent_handles_count',
+ 'http_persistent_handles_ident',
+ 'http_get',
+ 'http_head',
+ 'http_post_data',
+ 'http_post_fields',
+ 'http_put_data',
+ 'http_put_file',
+ 'http_put_stream',
+ 'http_request_body_encode',
+ 'http_request_method_exists',
+ 'http_request_method_name',
+ 'http_request_method_register',
+ 'http_request_method_unregister',
+ 'http_request',
+ 'http_redirect',
+ 'http_send_content_disposition',
+ 'http_send_content_type',
+ 'http_send_data',
+ 'http_send_file',
+ 'http_send_last_modified',
+ 'http_send_status',
+ 'http_send_stream',
+ 'http_throttle',
+ 'http_build_str',
+ 'http_build_url'),
+ 'Hash': ('hash_algos',
+ 'hash_copy',
+ 'hash_file',
+ 'hash_final',
+ 'hash_hmac_file',
+ 'hash_hmac',
+ 'hash_init',
+ 'hash_pbkdf2',
+ 'hash_update_file',
+ 'hash_update_stream',
+ 'hash_update',
+ 'hash'),
+ 'Hyperwave': ('hw_Array2Objrec',
+ 'hw_changeobject',
+ 'hw_Children',
+ 'hw_ChildrenObj',
+ 'hw_Close',
+ 'hw_Connect',
+ 'hw_connection_info',
+ 'hw_cp',
+ 'hw_Deleteobject',
+ 'hw_DocByAnchor',
+ 'hw_DocByAnchorObj',
+ 'hw_Document_Attributes',
+ 'hw_Document_BodyTag',
+ 'hw_Document_Content',
+ 'hw_Document_SetContent',
+ 'hw_Document_Size',
+ 'hw_dummy',
+ 'hw_EditText',
+ 'hw_Error',
+ 'hw_ErrorMsg',
+ 'hw_Free_Document',
+ 'hw_GetAnchors',
+ 'hw_GetAnchorsObj',
+ 'hw_GetAndLock',
+ 'hw_GetChildColl',
+ 'hw_GetChildCollObj',
+ 'hw_GetChildDocColl',
+ 'hw_GetChildDocCollObj',
+ 'hw_GetObject',
+ 'hw_GetObjectByQuery',
+ 'hw_GetObjectByQueryColl',
+ 'hw_GetObjectByQueryCollObj',
+ 'hw_GetObjectByQueryObj',
+ 'hw_GetParents',
+ 'hw_GetParentsObj',
+ 'hw_getrellink',
+ 'hw_GetRemote',
+ 'hw_getremotechildren',
+ 'hw_GetSrcByDestObj',
+ 'hw_GetText',
+ 'hw_getusername',
+ 'hw_Identify',
+ 'hw_InCollections',
+ 'hw_Info',
+ 'hw_InsColl',
+ 'hw_InsDoc',
+ 'hw_insertanchors',
+ 'hw_InsertDocument',
+ 'hw_InsertObject',
+ 'hw_mapid',
+ 'hw_Modifyobject',
+ 'hw_mv',
+ 'hw_New_Document',
+ 'hw_objrec2array',
+ 'hw_Output_Document',
+ 'hw_pConnect',
+ 'hw_PipeDocument',
+ 'hw_Root',
+ 'hw_setlinkroot',
+ 'hw_stat',
+ 'hw_Unlock',
+ 'hw_Who'),
+ 'Hyperwave API': ('hwapi_attribute_new',
+ 'hwapi_content_new',
+ 'hwapi_hgcsp',
+ 'hwapi_object_new'),
+ 'IBM DB2': ('db2_autocommit',
+ 'db2_bind_param',
+ 'db2_client_info',
+ 'db2_close',
+ 'db2_column_privileges',
+ 'db2_columns',
+ 'db2_commit',
+ 'db2_conn_error',
+ 'db2_conn_errormsg',
+ 'db2_connect',
+ 'db2_cursor_type',
+ 'db2_escape_string',
+ 'db2_exec',
+ 'db2_execute',
+ 'db2_fetch_array',
+ 'db2_fetch_assoc',
+ 'db2_fetch_both',
+ 'db2_fetch_object',
+ 'db2_fetch_row',
+ 'db2_field_display_size',
+ 'db2_field_name',
+ 'db2_field_num',
+ 'db2_field_precision',
+ 'db2_field_scale',
+ 'db2_field_type',
+ 'db2_field_width',
+ 'db2_foreign_keys',
+ 'db2_free_result',
+ 'db2_free_stmt',
+ 'db2_get_option',
+ 'db2_last_insert_id',
+ 'db2_lob_read',
+ 'db2_next_result',
+ 'db2_num_fields',
+ 'db2_num_rows',
+ 'db2_pclose',
+ 'db2_pconnect',
+ 'db2_prepare',
+ 'db2_primary_keys',
+ 'db2_procedure_columns',
+ 'db2_procedures',
+ 'db2_result',
+ 'db2_rollback',
+ 'db2_server_info',
+ 'db2_set_option',
+ 'db2_special_columns',
+ 'db2_statistics',
+ 'db2_stmt_error',
+ 'db2_stmt_errormsg',
+ 'db2_table_privileges',
+ 'db2_tables'),
+ 'ID3': ('id3_get_frame_long_name',
+ 'id3_get_frame_short_name',
+ 'id3_get_genre_id',
+ 'id3_get_genre_list',
+ 'id3_get_genre_name',
+ 'id3_get_tag',
+ 'id3_get_version',
+ 'id3_remove_tag',
+ 'id3_set_tag'),
+ 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
+ 'IIS': ('iis_add_server',
+ 'iis_get_dir_security',
+ 'iis_get_script_map',
+ 'iis_get_server_by_comment',
+ 'iis_get_server_by_path',
+ 'iis_get_server_rights',
+ 'iis_get_service_state',
+ 'iis_remove_server',
+ 'iis_set_app_settings',
+ 'iis_set_dir_security',
+ 'iis_set_script_map',
+ 'iis_set_server_rights',
+ 'iis_start_server',
+ 'iis_start_service',
+ 'iis_stop_server',
+ 'iis_stop_service'),
+ 'IMAP': ('imap_8bit',
+ 'imap_alerts',
+ 'imap_append',
+ 'imap_base64',
+ 'imap_binary',
+ 'imap_body',
+ 'imap_bodystruct',
+ 'imap_check',
+ 'imap_clearflag_full',
+ 'imap_close',
+ 'imap_create',
+ 'imap_createmailbox',
+ 'imap_delete',
+ 'imap_deletemailbox',
+ 'imap_errors',
+ 'imap_expunge',
+ 'imap_fetch_overview',
+ 'imap_fetchbody',
+ 'imap_fetchheader',
+ 'imap_fetchmime',
+ 'imap_fetchstructure',
+ 'imap_fetchtext',
+ 'imap_gc',
+ 'imap_get_quota',
+ 'imap_get_quotaroot',
+ 'imap_getacl',
+ 'imap_getmailboxes',
+ 'imap_getsubscribed',
+ 'imap_header',
+ 'imap_headerinfo',
+ 'imap_headers',
+ 'imap_last_error',
+ 'imap_list',
+ 'imap_listmailbox',
+ 'imap_listscan',
+ 'imap_listsubscribed',
+ 'imap_lsub',
+ 'imap_mail_compose',
+ 'imap_mail_copy',
+ 'imap_mail_move',
+ 'imap_mail',
+ 'imap_mailboxmsginfo',
+ 'imap_mime_header_decode',
+ 'imap_msgno',
+ 'imap_num_msg',
+ 'imap_num_recent',
+ 'imap_open',
+ 'imap_ping',
+ 'imap_qprint',
+ 'imap_rename',
+ 'imap_renamemailbox',
+ 'imap_reopen',
+ 'imap_rfc822_parse_adrlist',
+ 'imap_rfc822_parse_headers',
+ 'imap_rfc822_write_address',
+ 'imap_savebody',
+ 'imap_scan',
+ 'imap_scanmailbox',
+ 'imap_search',
+ 'imap_set_quota',
+ 'imap_setacl',
+ 'imap_setflag_full',
+ 'imap_sort',
+ 'imap_status',
+ 'imap_subscribe',
+ 'imap_thread',
+ 'imap_timeout',
+ 'imap_uid',
+ 'imap_undelete',
+ 'imap_unsubscribe',
+ 'imap_utf7_decode',
+ 'imap_utf7_encode',
+ 'imap_utf8'),
+ 'Informix': ('ifx_affected_rows',
+ 'ifx_blobinfile_mode',
+ 'ifx_byteasvarchar',
+ 'ifx_close',
+ 'ifx_connect',
+ 'ifx_copy_blob',
+ 'ifx_create_blob',
+ 'ifx_create_char',
+ 'ifx_do',
+ 'ifx_error',
+ 'ifx_errormsg',
+ 'ifx_fetch_row',
+ 'ifx_fieldproperties',
+ 'ifx_fieldtypes',
+ 'ifx_free_blob',
+ 'ifx_free_char',
+ 'ifx_free_result',
+ 'ifx_get_blob',
+ 'ifx_get_char',
+ 'ifx_getsqlca',
+ 'ifx_htmltbl_result',
+ 'ifx_nullformat',
+ 'ifx_num_fields',
+ 'ifx_num_rows',
+ 'ifx_pconnect',
+ 'ifx_prepare',
+ 'ifx_query',
+ 'ifx_textasvarchar',
+ 'ifx_update_blob',
+ 'ifx_update_char',
+ 'ifxus_close_slob',
+ 'ifxus_create_slob',
+ 'ifxus_free_slob',
+ 'ifxus_open_slob',
+ 'ifxus_read_slob',
+ 'ifxus_seek_slob',
+ 'ifxus_tell_slob',
+ 'ifxus_write_slob'),
+ 'Ingres': ('ingres_autocommit_state',
+ 'ingres_autocommit',
+ 'ingres_charset',
+ 'ingres_close',
+ 'ingres_commit',
+ 'ingres_connect',
+ 'ingres_cursor',
+ 'ingres_errno',
+ 'ingres_error',
+ 'ingres_errsqlstate',
+ 'ingres_escape_string',
+ 'ingres_execute',
+ 'ingres_fetch_array',
+ 'ingres_fetch_assoc',
+ 'ingres_fetch_object',
+ 'ingres_fetch_proc_return',
+ 'ingres_fetch_row',
+ 'ingres_field_length',
+ 'ingres_field_name',
+ 'ingres_field_nullable',
+ 'ingres_field_precision',
+ 'ingres_field_scale',
+ 'ingres_field_type',
+ 'ingres_free_result',
+ 'ingres_next_error',
+ 'ingres_num_fields',
+ 'ingres_num_rows',
+ 'ingres_pconnect',
+ 'ingres_prepare',
+ 'ingres_query',
+ 'ingres_result_seek',
+ 'ingres_rollback',
+ 'ingres_set_environment',
+ 'ingres_unbuffered_query'),
+ 'Inotify': ('inotify_add_watch',
+ 'inotify_init',
+ 'inotify_queue_len',
+ 'inotify_read',
+ 'inotify_rm_watch'),
+ 'JSON': ('json_decode',
+ 'json_encode',
+ 'json_last_error_msg',
+ 'json_last_error'),
+ 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
+ 'Judy': ('judy_type', 'judy_version'),
+ 'KADM5': ('kadm5_chpass_principal',
+ 'kadm5_create_principal',
+ 'kadm5_delete_principal',
+ 'kadm5_destroy',
+ 'kadm5_flush',
+ 'kadm5_get_policies',
+ 'kadm5_get_principal',
+ 'kadm5_get_principals',
+ 'kadm5_init_with_password',
+ 'kadm5_modify_principal'),
+ 'LDAP': ('ldap_8859_to_t61',
+ 'ldap_add',
+ 'ldap_bind',
+ 'ldap_close',
+ 'ldap_compare',
+ 'ldap_connect',
+ 'ldap_control_paged_result_response',
+ 'ldap_control_paged_result',
+ 'ldap_count_entries',
+ 'ldap_delete',
+ 'ldap_dn2ufn',
+ 'ldap_err2str',
+ 'ldap_errno',
+ 'ldap_error',
+ 'ldap_explode_dn',
+ 'ldap_first_attribute',
+ 'ldap_first_entry',
+ 'ldap_first_reference',
+ 'ldap_free_result',
+ 'ldap_get_attributes',
+ 'ldap_get_dn',
+ 'ldap_get_entries',
+ 'ldap_get_option',
+ 'ldap_get_values_len',
+ 'ldap_get_values',
+ 'ldap_list',
+ 'ldap_mod_add',
+ 'ldap_mod_del',
+ 'ldap_mod_replace',
+ 'ldap_modify',
+ 'ldap_next_attribute',
+ 'ldap_next_entry',
+ 'ldap_next_reference',
+ 'ldap_parse_reference',
+ 'ldap_parse_result',
+ 'ldap_read',
+ 'ldap_rename',
+ 'ldap_sasl_bind',
+ 'ldap_search',
+ 'ldap_set_option',
+ 'ldap_set_rebind_proc',
+ 'ldap_sort',
+ 'ldap_start_tls',
+ 'ldap_t61_to_8859',
+ 'ldap_unbind'),
+ 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
+ 'Libevent': ('event_add',
+ 'event_base_free',
+ 'event_base_loop',
+ 'event_base_loopbreak',
+ 'event_base_loopexit',
+ 'event_base_new',
+ 'event_base_priority_init',
+ 'event_base_set',
+ 'event_buffer_base_set',
+ 'event_buffer_disable',
+ 'event_buffer_enable',
+ 'event_buffer_fd_set',
+ 'event_buffer_free',
+ 'event_buffer_new',
+ 'event_buffer_priority_set',
+ 'event_buffer_read',
+ 'event_buffer_set_callback',
+ 'event_buffer_timeout_set',
+ 'event_buffer_watermark_set',
+ 'event_buffer_write',
+ 'event_del',
+ 'event_free',
+ 'event_new',
+ 'event_set'),
+ 'Lotus Notes': ('notes_body',
+ 'notes_copy_db',
+ 'notes_create_db',
+ 'notes_create_note',
+ 'notes_drop_db',
+ 'notes_find_note',
+ 'notes_header_info',
+ 'notes_list_msgs',
+ 'notes_mark_read',
+ 'notes_mark_unread',
+ 'notes_nav_create',
+ 'notes_search',
+ 'notes_unread',
+ 'notes_version'),
+ 'MCVE': ('m_checkstatus',
+ 'm_completeauthorizations',
+ 'm_connect',
+ 'm_connectionerror',
+ 'm_deletetrans',
+ 'm_destroyconn',
+ 'm_destroyengine',
+ 'm_getcell',
+ 'm_getcellbynum',
+ 'm_getcommadelimited',
+ 'm_getheader',
+ 'm_initconn',
+ 'm_initengine',
+ 'm_iscommadelimited',
+ 'm_maxconntimeout',
+ 'm_monitor',
+ 'm_numcolumns',
+ 'm_numrows',
+ 'm_parsecommadelimited',
+ 'm_responsekeys',
+ 'm_responseparam',
+ 'm_returnstatus',
+ 'm_setblocking',
+ 'm_setdropfile',
+ 'm_setip',
+ 'm_setssl_cafile',
+ 'm_setssl_files',
+ 'm_setssl',
+ 'm_settimeout',
+ 'm_sslcert_gen_hash',
+ 'm_transactionssent',
+ 'm_transinqueue',
+ 'm_transkeyval',
+ 'm_transnew',
+ 'm_transsend',
+ 'm_uwait',
+ 'm_validateidentifier',
+ 'm_verifyconnection',
+ 'm_verifysslcert'),
+ 'Mail': ('ezmlm_hash', 'mail'),
+ 'Mailparse': ('mailparse_determine_best_xfer_encoding',
+ 'mailparse_msg_create',
+ 'mailparse_msg_extract_part_file',
+ 'mailparse_msg_extract_part',
+ 'mailparse_msg_extract_whole_part_file',
+ 'mailparse_msg_free',
+ 'mailparse_msg_get_part_data',
+ 'mailparse_msg_get_part',
+ 'mailparse_msg_get_structure',
+ 'mailparse_msg_parse_file',
+ 'mailparse_msg_parse',
+ 'mailparse_rfc822_parse_addresses',
+ 'mailparse_stream_encode',
+ 'mailparse_uudecode_all'),
+ 'Math': ('abs',
+ 'acos',
+ 'acosh',
+ 'asin',
+ 'asinh',
+ 'atan2',
+ 'atan',
+ 'atanh',
+ 'base_convert',
+ 'bindec',
+ 'ceil',
+ 'cos',
+ 'cosh',
+ 'decbin',
+ 'dechex',
+ 'decoct',
+ 'deg2rad',
+ 'exp',
+ 'expm1',
+ 'floor',
+ 'fmod',
+ 'getrandmax',
+ 'hexdec',
+ 'hypot',
+ 'is_finite',
+ 'is_infinite',
+ 'is_nan',
+ 'lcg_value',
+ 'log10',
+ 'log1p',
+ 'log',
+ 'max',
+ 'min',
+ 'mt_getrandmax',
+ 'mt_rand',
+ 'mt_srand',
+ 'octdec',
+ 'pi',
+ 'pow',
+ 'rad2deg',
+ 'rand',
+ 'round',
+ 'sin',
+ 'sinh',
+ 'sqrt',
+ 'srand',
+ 'tan',
+ 'tanh'),
+ 'MaxDB': ('maxdb_affected_rows',
+ 'maxdb_autocommit',
+ 'maxdb_bind_param',
+ 'maxdb_bind_result',
+ 'maxdb_change_user',
+ 'maxdb_character_set_name',
+ 'maxdb_client_encoding',
+ 'maxdb_close_long_data',
+ 'maxdb_close',
+ 'maxdb_commit',
+ 'maxdb_connect_errno',
+ 'maxdb_connect_error',
+ 'maxdb_connect',
+ 'maxdb_data_seek',
+ 'maxdb_debug',
+ 'maxdb_disable_reads_from_master',
+ 'maxdb_disable_rpl_parse',
+ 'maxdb_dump_debug_info',
+ 'maxdb_embedded_connect',
+ 'maxdb_enable_reads_from_master',
+ 'maxdb_enable_rpl_parse',
+ 'maxdb_errno',
+ 'maxdb_error',
+ 'maxdb_escape_string',
+ 'maxdb_execute',
+ 'maxdb_fetch_array',
+ 'maxdb_fetch_assoc',
+ 'maxdb_fetch_field_direct',
+ 'maxdb_fetch_field',
+ 'maxdb_fetch_fields',
+ 'maxdb_fetch_lengths',
+ 'maxdb_fetch_object',
+ 'maxdb_fetch_row',
+ 'maxdb_fetch',
+ 'maxdb_field_count',
+ 'maxdb_field_seek',
+ 'maxdb_field_tell',
+ 'maxdb_free_result',
+ 'maxdb_get_client_info',
+ 'maxdb_get_client_version',
+ 'maxdb_get_host_info',
+ 'maxdb_get_metadata',
+ 'maxdb_get_proto_info',
+ 'maxdb_get_server_info',
+ 'maxdb_get_server_version',
+ 'maxdb_info',
+ 'maxdb_init',
+ 'maxdb_insert_id',
+ 'maxdb_kill',
+ 'maxdb_master_query',
+ 'maxdb_more_results',
+ 'maxdb_multi_query',
+ 'maxdb_next_result',
+ 'maxdb_num_fields',
+ 'maxdb_num_rows',
+ 'maxdb_options',
+ 'maxdb_param_count',
+ 'maxdb_ping',
+ 'maxdb_prepare',
+ 'maxdb_query',
+ 'maxdb_real_connect',
+ 'maxdb_real_escape_string',
+ 'maxdb_real_query',
+ 'maxdb_report',
+ 'maxdb_rollback',
+ 'maxdb_rpl_parse_enabled',
+ 'maxdb_rpl_probe',
+ 'maxdb_rpl_query_type',
+ 'maxdb_select_db',
+ 'maxdb_send_long_data',
+ 'maxdb_send_query',
+ 'maxdb_server_end',
+ 'maxdb_server_init',
+ 'maxdb_set_opt',
+ 'maxdb_sqlstate',
+ 'maxdb_ssl_set',
+ 'maxdb_stat',
+ 'maxdb_stmt_affected_rows',
+ 'maxdb_stmt_bind_param',
+ 'maxdb_stmt_bind_result',
+ 'maxdb_stmt_close_long_data',
+ 'maxdb_stmt_close',
+ 'maxdb_stmt_data_seek',
+ 'maxdb_stmt_errno',
+ 'maxdb_stmt_error',
+ 'maxdb_stmt_execute',
+ 'maxdb_stmt_fetch',
+ 'maxdb_stmt_free_result',
+ 'maxdb_stmt_init',
+ 'maxdb_stmt_num_rows',
+ 'maxdb_stmt_param_count',
+ 'maxdb_stmt_prepare',
+ 'maxdb_stmt_reset',
+ 'maxdb_stmt_result_metadata',
+ 'maxdb_stmt_send_long_data',
+ 'maxdb_stmt_sqlstate',
+ 'maxdb_stmt_store_result',
+ 'maxdb_store_result',
+ 'maxdb_thread_id',
+ 'maxdb_thread_safe',
+ 'maxdb_use_result',
+ 'maxdb_warning_count'),
+ 'Mcrypt': ('mcrypt_cbc',
+ 'mcrypt_cfb',
+ 'mcrypt_create_iv',
+ 'mcrypt_decrypt',
+ 'mcrypt_ecb',
+ 'mcrypt_enc_get_algorithms_name',
+ 'mcrypt_enc_get_block_size',
+ 'mcrypt_enc_get_iv_size',
+ 'mcrypt_enc_get_key_size',
+ 'mcrypt_enc_get_modes_name',
+ 'mcrypt_enc_get_supported_key_sizes',
+ 'mcrypt_enc_is_block_algorithm_mode',
+ 'mcrypt_enc_is_block_algorithm',
+ 'mcrypt_enc_is_block_mode',
+ 'mcrypt_enc_self_test',
+ 'mcrypt_encrypt',
+ 'mcrypt_generic_deinit',
+ 'mcrypt_generic_end',
+ 'mcrypt_generic_init',
+ 'mcrypt_generic',
+ 'mcrypt_get_block_size',
+ 'mcrypt_get_cipher_name',
+ 'mcrypt_get_iv_size',
+ 'mcrypt_get_key_size',
+ 'mcrypt_list_algorithms',
+ 'mcrypt_list_modes',
+ 'mcrypt_module_close',
+ 'mcrypt_module_get_algo_block_size',
+ 'mcrypt_module_get_algo_key_size',
+ 'mcrypt_module_get_supported_key_sizes',
+ 'mcrypt_module_is_block_algorithm_mode',
+ 'mcrypt_module_is_block_algorithm',
+ 'mcrypt_module_is_block_mode',
+ 'mcrypt_module_open',
+ 'mcrypt_module_self_test',
+ 'mcrypt_ofb',
+ 'mdecrypt_generic'),
+ 'Memcache': ('memcache_debug',),
+ 'Mhash': ('mhash_count',
+ 'mhash_get_block_size',
+ 'mhash_get_hash_name',
+ 'mhash_keygen_s2k',
+ 'mhash'),
+ 'Ming': ('ming_keypress',
+ 'ming_setcubicthreshold',
+ 'ming_setscale',
+ 'ming_setswfcompression',
+ 'ming_useconstants',
+ 'ming_useswfversion'),
+ 'Misc.': ('connection_aborted',
+ 'connection_status',
+ 'connection_timeout',
+ 'constant',
+ 'define',
+ 'defined',
+ 'die',
+ 'eval',
+ 'exit',
+ 'get_browser',
+ '__halt_compiler',
+ 'highlight_file',
+ 'highlight_string',
+ 'ignore_user_abort',
+ 'pack',
+ 'php_check_syntax',
+ 'php_strip_whitespace',
+ 'show_source',
+ 'sleep',
+ 'sys_getloadavg',
+ 'time_nanosleep',
+ 'time_sleep_until',
+ 'uniqid',
+ 'unpack',
+ 'usleep'),
+ 'Mongo': ('bson_decode', 'bson_encode'),
+ 'Msession': ('msession_connect',
+ 'msession_count',
+ 'msession_create',
+ 'msession_destroy',
+ 'msession_disconnect',
+ 'msession_find',
+ 'msession_get_array',
+ 'msession_get_data',
+ 'msession_get',
+ 'msession_inc',
+ 'msession_list',
+ 'msession_listvar',
+ 'msession_lock',
+ 'msession_plugin',
+ 'msession_randstr',
+ 'msession_set_array',
+ 'msession_set_data',
+ 'msession_set',
+ 'msession_timeout',
+ 'msession_uniq',
+ 'msession_unlock'),
+ 'Mssql': ('mssql_bind',
+ 'mssql_close',
+ 'mssql_connect',
+ 'mssql_data_seek',
+ 'mssql_execute',
+ 'mssql_fetch_array',
+ 'mssql_fetch_assoc',
+ 'mssql_fetch_batch',
+ 'mssql_fetch_field',
+ 'mssql_fetch_object',
+ 'mssql_fetch_row',
+ 'mssql_field_length',
+ 'mssql_field_name',
+ 'mssql_field_seek',
+ 'mssql_field_type',
+ 'mssql_free_result',
+ 'mssql_free_statement',
+ 'mssql_get_last_message',
+ 'mssql_guid_string',
+ 'mssql_init',
+ 'mssql_min_error_severity',
+ 'mssql_min_message_severity',
+ 'mssql_next_result',
+ 'mssql_num_fields',
+ 'mssql_num_rows',
+ 'mssql_pconnect',
+ 'mssql_query',
+ 'mssql_result',
+ 'mssql_rows_affected',
+ 'mssql_select_db'),
+ 'Multibyte String': ('mb_check_encoding',
+ 'mb_convert_case',
+ 'mb_convert_encoding',
+ 'mb_convert_kana',
+ 'mb_convert_variables',
+ 'mb_decode_mimeheader',
+ 'mb_decode_numericentity',
+ 'mb_detect_encoding',
+ 'mb_detect_order',
+ 'mb_encode_mimeheader',
+ 'mb_encode_numericentity',
+ 'mb_encoding_aliases',
+ 'mb_ereg_match',
+ 'mb_ereg_replace_callback',
+ 'mb_ereg_replace',
+ 'mb_ereg_search_getpos',
+ 'mb_ereg_search_getregs',
+ 'mb_ereg_search_init',
+ 'mb_ereg_search_pos',
+ 'mb_ereg_search_regs',
+ 'mb_ereg_search_setpos',
+ 'mb_ereg_search',
+ 'mb_ereg',
+ 'mb_eregi_replace',
+ 'mb_eregi',
+ 'mb_get_info',
+ 'mb_http_input',
+ 'mb_http_output',
+ 'mb_internal_encoding',
+ 'mb_language',
+ 'mb_list_encodings',
+ 'mb_output_handler',
+ 'mb_parse_str',
+ 'mb_preferred_mime_name',
+ 'mb_regex_encoding',
+ 'mb_regex_set_options',
+ 'mb_send_mail',
+ 'mb_split',
+ 'mb_strcut',
+ 'mb_strimwidth',
+ 'mb_stripos',
+ 'mb_stristr',
+ 'mb_strlen',
+ 'mb_strpos',
+ 'mb_strrchr',
+ 'mb_strrichr',
+ 'mb_strripos',
+ 'mb_strrpos',
+ 'mb_strstr',
+ 'mb_strtolower',
+ 'mb_strtoupper',
+ 'mb_strwidth',
+ 'mb_substitute_character',
+ 'mb_substr_count',
+ 'mb_substr'),
+ 'MySQL': ('mysql_affected_rows',
+ 'mysql_client_encoding',
+ 'mysql_close',
+ 'mysql_connect',
+ 'mysql_create_db',
+ 'mysql_data_seek',
+ 'mysql_db_name',
+ 'mysql_db_query',
+ 'mysql_drop_db',
+ 'mysql_errno',
+ 'mysql_error',
+ 'mysql_escape_string',
+ 'mysql_fetch_array',
+ 'mysql_fetch_assoc',
+ 'mysql_fetch_field',
+ 'mysql_fetch_lengths',
+ 'mysql_fetch_object',
+ 'mysql_fetch_row',
+ 'mysql_field_flags',
+ 'mysql_field_len',
+ 'mysql_field_name',
+ 'mysql_field_seek',
+ 'mysql_field_table',
+ 'mysql_field_type',
+ 'mysql_free_result',
+ 'mysql_get_client_info',
+ 'mysql_get_host_info',
+ 'mysql_get_proto_info',
+ 'mysql_get_server_info',
+ 'mysql_info',
+ 'mysql_insert_id',
+ 'mysql_list_dbs',
+ 'mysql_list_fields',
+ 'mysql_list_processes',
+ 'mysql_list_tables',
+ 'mysql_num_fields',
+ 'mysql_num_rows',
+ 'mysql_pconnect',
+ 'mysql_ping',
+ 'mysql_query',
+ 'mysql_real_escape_string',
+ 'mysql_result',
+ 'mysql_select_db',
+ 'mysql_set_charset',
+ 'mysql_stat',
+ 'mysql_tablename',
+ 'mysql_thread_id',
+ 'mysql_unbuffered_query'),
+ 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
+ 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
+ 'mysqlnd_ms_fabric_select_global',
+ 'mysqlnd_ms_fabric_select_shard',
+ 'mysqlnd_ms_get_last_gtid',
+ 'mysqlnd_ms_get_last_used_connection',
+ 'mysqlnd_ms_get_stats',
+ 'mysqlnd_ms_match_wild',
+ 'mysqlnd_ms_query_is_select',
+ 'mysqlnd_ms_set_qos',
+ 'mysqlnd_ms_set_user_pick_server'),
+ 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
+ 'mysqlnd_uh_set_connection_proxy',
+ 'mysqlnd_uh_set_statement_proxy'),
+ 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
+ 'Ncurses': ('ncurses_addch',
+ 'ncurses_addchnstr',
+ 'ncurses_addchstr',
+ 'ncurses_addnstr',
+ 'ncurses_addstr',
+ 'ncurses_assume_default_colors',
+ 'ncurses_attroff',
+ 'ncurses_attron',
+ 'ncurses_attrset',
+ 'ncurses_baudrate',
+ 'ncurses_beep',
+ 'ncurses_bkgd',
+ 'ncurses_bkgdset',
+ 'ncurses_border',
+ 'ncurses_bottom_panel',
+ 'ncurses_can_change_color',
+ 'ncurses_cbreak',
+ 'ncurses_clear',
+ 'ncurses_clrtobot',
+ 'ncurses_clrtoeol',
+ 'ncurses_color_content',
+ 'ncurses_color_set',
+ 'ncurses_curs_set',
+ 'ncurses_def_prog_mode',
+ 'ncurses_def_shell_mode',
+ 'ncurses_define_key',
+ 'ncurses_del_panel',
+ 'ncurses_delay_output',
+ 'ncurses_delch',
+ 'ncurses_deleteln',
+ 'ncurses_delwin',
+ 'ncurses_doupdate',
+ 'ncurses_echo',
+ 'ncurses_echochar',
+ 'ncurses_end',
+ 'ncurses_erase',
+ 'ncurses_erasechar',
+ 'ncurses_filter',
+ 'ncurses_flash',
+ 'ncurses_flushinp',
+ 'ncurses_getch',
+ 'ncurses_getmaxyx',
+ 'ncurses_getmouse',
+ 'ncurses_getyx',
+ 'ncurses_halfdelay',
+ 'ncurses_has_colors',
+ 'ncurses_has_ic',
+ 'ncurses_has_il',
+ 'ncurses_has_key',
+ 'ncurses_hide_panel',
+ 'ncurses_hline',
+ 'ncurses_inch',
+ 'ncurses_init_color',
+ 'ncurses_init_pair',
+ 'ncurses_init',
+ 'ncurses_insch',
+ 'ncurses_insdelln',
+ 'ncurses_insertln',
+ 'ncurses_insstr',
+ 'ncurses_instr',
+ 'ncurses_isendwin',
+ 'ncurses_keyok',
+ 'ncurses_keypad',
+ 'ncurses_killchar',
+ 'ncurses_longname',
+ 'ncurses_meta',
+ 'ncurses_mouse_trafo',
+ 'ncurses_mouseinterval',
+ 'ncurses_mousemask',
+ 'ncurses_move_panel',
+ 'ncurses_move',
+ 'ncurses_mvaddch',
+ 'ncurses_mvaddchnstr',
+ 'ncurses_mvaddchstr',
+ 'ncurses_mvaddnstr',
+ 'ncurses_mvaddstr',
+ 'ncurses_mvcur',
+ 'ncurses_mvdelch',
+ 'ncurses_mvgetch',
+ 'ncurses_mvhline',
+ 'ncurses_mvinch',
+ 'ncurses_mvvline',
+ 'ncurses_mvwaddstr',
+ 'ncurses_napms',
+ 'ncurses_new_panel',
+ 'ncurses_newpad',
+ 'ncurses_newwin',
+ 'ncurses_nl',
+ 'ncurses_nocbreak',
+ 'ncurses_noecho',
+ 'ncurses_nonl',
+ 'ncurses_noqiflush',
+ 'ncurses_noraw',
+ 'ncurses_pair_content',
+ 'ncurses_panel_above',
+ 'ncurses_panel_below',
+ 'ncurses_panel_window',
+ 'ncurses_pnoutrefresh',
+ 'ncurses_prefresh',
+ 'ncurses_putp',
+ 'ncurses_qiflush',
+ 'ncurses_raw',
+ 'ncurses_refresh',
+ 'ncurses_replace_panel',
+ 'ncurses_reset_prog_mode',
+ 'ncurses_reset_shell_mode',
+ 'ncurses_resetty',
+ 'ncurses_savetty',
+ 'ncurses_scr_dump',
+ 'ncurses_scr_init',
+ 'ncurses_scr_restore',
+ 'ncurses_scr_set',
+ 'ncurses_scrl',
+ 'ncurses_show_panel',
+ 'ncurses_slk_attr',
+ 'ncurses_slk_attroff',
+ 'ncurses_slk_attron',
+ 'ncurses_slk_attrset',
+ 'ncurses_slk_clear',
+ 'ncurses_slk_color',
+ 'ncurses_slk_init',
+ 'ncurses_slk_noutrefresh',
+ 'ncurses_slk_refresh',
+ 'ncurses_slk_restore',
+ 'ncurses_slk_set',
+ 'ncurses_slk_touch',
+ 'ncurses_standend',
+ 'ncurses_standout',
+ 'ncurses_start_color',
+ 'ncurses_termattrs',
+ 'ncurses_termname',
+ 'ncurses_timeout',
+ 'ncurses_top_panel',
+ 'ncurses_typeahead',
+ 'ncurses_ungetch',
+ 'ncurses_ungetmouse',
+ 'ncurses_update_panels',
+ 'ncurses_use_default_colors',
+ 'ncurses_use_env',
+ 'ncurses_use_extended_names',
+ 'ncurses_vidattr',
+ 'ncurses_vline',
+ 'ncurses_waddch',
+ 'ncurses_waddstr',
+ 'ncurses_wattroff',
+ 'ncurses_wattron',
+ 'ncurses_wattrset',
+ 'ncurses_wborder',
+ 'ncurses_wclear',
+ 'ncurses_wcolor_set',
+ 'ncurses_werase',
+ 'ncurses_wgetch',
+ 'ncurses_whline',
+ 'ncurses_wmouse_trafo',
+ 'ncurses_wmove',
+ 'ncurses_wnoutrefresh',
+ 'ncurses_wrefresh',
+ 'ncurses_wstandend',
+ 'ncurses_wstandout',
+ 'ncurses_wvline'),
+ 'Network': ('checkdnsrr',
+ 'closelog',
+ 'define_syslog_variables',
+ 'dns_check_record',
+ 'dns_get_mx',
+ 'dns_get_record',
+ 'fsockopen',
+ 'gethostbyaddr',
+ 'gethostbyname',
+ 'gethostbynamel',
+ 'gethostname',
+ 'getmxrr',
+ 'getprotobyname',
+ 'getprotobynumber',
+ 'getservbyname',
+ 'getservbyport',
+ 'header_register_callback',
+ 'header_remove',
+ 'header',
+ 'headers_list',
+ 'headers_sent',
+ 'http_response_code',
+ 'inet_ntop',
+ 'inet_pton',
+ 'ip2long',
+ 'long2ip',
+ 'openlog',
+ 'pfsockopen',
+ 'setcookie',
+ 'setrawcookie',
+ 'socket_get_status',
+ 'socket_set_blocking',
+ 'socket_set_timeout',
+ 'syslog'),
+ 'Newt': ('newt_bell',
+ 'newt_button_bar',
+ 'newt_button',
+ 'newt_centered_window',
+ 'newt_checkbox_get_value',
+ 'newt_checkbox_set_flags',
+ 'newt_checkbox_set_value',
+ 'newt_checkbox_tree_add_item',
+ 'newt_checkbox_tree_find_item',
+ 'newt_checkbox_tree_get_current',
+ 'newt_checkbox_tree_get_entry_value',
+ 'newt_checkbox_tree_get_multi_selection',
+ 'newt_checkbox_tree_get_selection',
+ 'newt_checkbox_tree_multi',
+ 'newt_checkbox_tree_set_current',
+ 'newt_checkbox_tree_set_entry_value',
+ 'newt_checkbox_tree_set_entry',
+ 'newt_checkbox_tree_set_width',
+ 'newt_checkbox_tree',
+ 'newt_checkbox',
+ 'newt_clear_key_buffer',
+ 'newt_cls',
+ 'newt_compact_button',
+ 'newt_component_add_callback',
+ 'newt_component_takes_focus',
+ 'newt_create_grid',
+ 'newt_cursor_off',
+ 'newt_cursor_on',
+ 'newt_delay',
+ 'newt_draw_form',
+ 'newt_draw_root_text',
+ 'newt_entry_get_value',
+ 'newt_entry_set_filter',
+ 'newt_entry_set_flags',
+ 'newt_entry_set',
+ 'newt_entry',
+ 'newt_finished',
+ 'newt_form_add_component',
+ 'newt_form_add_components',
+ 'newt_form_add_hot_key',
+ 'newt_form_destroy',
+ 'newt_form_get_current',
+ 'newt_form_run',
+ 'newt_form_set_background',
+ 'newt_form_set_height',
+ 'newt_form_set_size',
+ 'newt_form_set_timer',
+ 'newt_form_set_width',
+ 'newt_form_watch_fd',
+ 'newt_form',
+ 'newt_get_screen_size',
+ 'newt_grid_add_components_to_form',
+ 'newt_grid_basic_window',
+ 'newt_grid_free',
+ 'newt_grid_get_size',
+ 'newt_grid_h_close_stacked',
+ 'newt_grid_h_stacked',
+ 'newt_grid_place',
+ 'newt_grid_set_field',
+ 'newt_grid_simple_window',
+ 'newt_grid_v_close_stacked',
+ 'newt_grid_v_stacked',
+ 'newt_grid_wrapped_window_at',
+ 'newt_grid_wrapped_window',
+ 'newt_init',
+ 'newt_label_set_text',
+ 'newt_label',
+ 'newt_listbox_append_entry',
+ 'newt_listbox_clear_selection',
+ 'newt_listbox_clear',
+ 'newt_listbox_delete_entry',
+ 'newt_listbox_get_current',
+ 'newt_listbox_get_selection',
+ 'newt_listbox_insert_entry',
+ 'newt_listbox_item_count',
+ 'newt_listbox_select_item',
+ 'newt_listbox_set_current_by_key',
+ 'newt_listbox_set_current',
+ 'newt_listbox_set_data',
+ 'newt_listbox_set_entry',
+ 'newt_listbox_set_width',
+ 'newt_listbox',
+ 'newt_listitem_get_data',
+ 'newt_listitem_set',
+ 'newt_listitem',
+ 'newt_open_window',
+ 'newt_pop_help_line',
+ 'newt_pop_window',
+ 'newt_push_help_line',
+ 'newt_radio_get_current',
+ 'newt_radiobutton',
+ 'newt_redraw_help_line',
+ 'newt_reflow_text',
+ 'newt_refresh',
+ 'newt_resize_screen',
+ 'newt_resume',
+ 'newt_run_form',
+ 'newt_scale_set',
+ 'newt_scale',
+ 'newt_scrollbar_set',
+ 'newt_set_help_callback',
+ 'newt_set_suspend_callback',
+ 'newt_suspend',
+ 'newt_textbox_get_num_lines',
+ 'newt_textbox_reflowed',
+ 'newt_textbox_set_height',
+ 'newt_textbox_set_text',
+ 'newt_textbox',
+ 'newt_vertical_scrollbar',
+ 'newt_wait_for_key',
+ 'newt_win_choice',
+ 'newt_win_entries',
+ 'newt_win_menu',
+ 'newt_win_message',
+ 'newt_win_messagev',
+ 'newt_win_ternary'),
+ 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
+ 'OCI8': ('oci_bind_array_by_name',
+ 'oci_bind_by_name',
+ 'oci_cancel',
+ 'oci_client_version',
+ 'oci_close',
+ 'oci_commit',
+ 'oci_connect',
+ 'oci_define_by_name',
+ 'oci_error',
+ 'oci_execute',
+ 'oci_fetch_all',
+ 'oci_fetch_array',
+ 'oci_fetch_assoc',
+ 'oci_fetch_object',
+ 'oci_fetch_row',
+ 'oci_fetch',
+ 'oci_field_is_null',
+ 'oci_field_name',
+ 'oci_field_precision',
+ 'oci_field_scale',
+ 'oci_field_size',
+ 'oci_field_type_raw',
+ 'oci_field_type',
+ 'oci_free_descriptor',
+ 'oci_free_statement',
+ 'oci_get_implicit_resultset',
+ 'oci_internal_debug',
+ 'oci_lob_copy',
+ 'oci_lob_is_equal',
+ 'oci_new_collection',
+ 'oci_new_connect',
+ 'oci_new_cursor',
+ 'oci_new_descriptor',
+ 'oci_num_fields',
+ 'oci_num_rows',
+ 'oci_parse',
+ 'oci_password_change',
+ 'oci_pconnect',
+ 'oci_result',
+ 'oci_rollback',
+ 'oci_server_version',
+ 'oci_set_action',
+ 'oci_set_client_identifier',
+ 'oci_set_client_info',
+ 'oci_set_edition',
+ 'oci_set_module_name',
+ 'oci_set_prefetch',
+ 'oci_statement_type'),
+ 'ODBC': ('odbc_autocommit',
+ 'odbc_binmode',
+ 'odbc_close_all',
+ 'odbc_close',
+ 'odbc_columnprivileges',
+ 'odbc_columns',
+ 'odbc_commit',
+ 'odbc_connect',
+ 'odbc_cursor',
+ 'odbc_data_source',
+ 'odbc_do',
+ 'odbc_error',
+ 'odbc_errormsg',
+ 'odbc_exec',
+ 'odbc_execute',
+ 'odbc_fetch_array',
+ 'odbc_fetch_into',
+ 'odbc_fetch_object',
+ 'odbc_fetch_row',
+ 'odbc_field_len',
+ 'odbc_field_name',
+ 'odbc_field_num',
+ 'odbc_field_precision',
+ 'odbc_field_scale',
+ 'odbc_field_type',
+ 'odbc_foreignkeys',
+ 'odbc_free_result',
+ 'odbc_gettypeinfo',
+ 'odbc_longreadlen',
+ 'odbc_next_result',
+ 'odbc_num_fields',
+ 'odbc_num_rows',
+ 'odbc_pconnect',
+ 'odbc_prepare',
+ 'odbc_primarykeys',
+ 'odbc_procedurecolumns',
+ 'odbc_procedures',
+ 'odbc_result_all',
+ 'odbc_result',
+ 'odbc_rollback',
+ 'odbc_setoption',
+ 'odbc_specialcolumns',
+ 'odbc_statistics',
+ 'odbc_tableprivileges',
+ 'odbc_tables'),
+ 'OPcache': ('opcache_compile_file',
+ 'opcache_get_configuration',
+ 'opcache_get_status',
+ 'opcache_invalidate',
+ 'opcache_reset'),
+ 'Object Aggregation': ('aggregate_info',
+ 'aggregate_methods_by_list',
+ 'aggregate_methods_by_regexp',
+ 'aggregate_methods',
+ 'aggregate_properties_by_list',
+ 'aggregate_properties_by_regexp',
+ 'aggregate_properties',
+ 'aggregate',
+ 'aggregation_info',
+ 'deaggregate'),
+ 'OpenAL': ('openal_buffer_create',
+ 'openal_buffer_data',
+ 'openal_buffer_destroy',
+ 'openal_buffer_get',
+ 'openal_buffer_loadwav',
+ 'openal_context_create',
+ 'openal_context_current',
+ 'openal_context_destroy',
+ 'openal_context_process',
+ 'openal_context_suspend',
+ 'openal_device_close',
+ 'openal_device_open',
+ 'openal_listener_get',
+ 'openal_listener_set',
+ 'openal_source_create',
+ 'openal_source_destroy',
+ 'openal_source_get',
+ 'openal_source_pause',
+ 'openal_source_play',
+ 'openal_source_rewind',
+ 'openal_source_set',
+ 'openal_source_stop',
+ 'openal_stream'),
+ 'OpenSSL': ('openssl_cipher_iv_length',
+ 'openssl_csr_export_to_file',
+ 'openssl_csr_export',
+ 'openssl_csr_get_public_key',
+ 'openssl_csr_get_subject',
+ 'openssl_csr_new',
+ 'openssl_csr_sign',
+ 'openssl_decrypt',
+ 'openssl_dh_compute_key',
+ 'openssl_digest',
+ 'openssl_encrypt',
+ 'openssl_error_string',
+ 'openssl_free_key',
+ 'openssl_get_cipher_methods',
+ 'openssl_get_md_methods',
+ 'openssl_get_privatekey',
+ 'openssl_get_publickey',
+ 'openssl_open',
+ 'openssl_pbkdf2',
+ 'openssl_pkcs12_export_to_file',
+ 'openssl_pkcs12_export',
+ 'openssl_pkcs12_read',
+ 'openssl_pkcs7_decrypt',
+ 'openssl_pkcs7_encrypt',
+ 'openssl_pkcs7_sign',
+ 'openssl_pkcs7_verify',
+ 'openssl_pkey_export_to_file',
+ 'openssl_pkey_export',
+ 'openssl_pkey_free',
+ 'openssl_pkey_get_details',
+ 'openssl_pkey_get_private',
+ 'openssl_pkey_get_public',
+ 'openssl_pkey_new',
+ 'openssl_private_decrypt',
+ 'openssl_private_encrypt',
+ 'openssl_public_decrypt',
+ 'openssl_public_encrypt',
+ 'openssl_random_pseudo_bytes',
+ 'openssl_seal',
+ 'openssl_sign',
+ 'openssl_spki_export_challenge',
+ 'openssl_spki_export',
+ 'openssl_spki_new',
+ 'openssl_spki_verify',
+ 'openssl_verify',
+ 'openssl_x509_check_private_key',
+ 'openssl_x509_checkpurpose',
+ 'openssl_x509_export_to_file',
+ 'openssl_x509_export',
+ 'openssl_x509_free',
+ 'openssl_x509_parse',
+ 'openssl_x509_read'),
+ 'Output Control': ('flush',
+ 'ob_clean',
+ 'ob_end_clean',
+ 'ob_end_flush',
+ 'ob_flush',
+ 'ob_get_clean',
+ 'ob_get_contents',
+ 'ob_get_flush',
+ 'ob_get_length',
+ 'ob_get_level',
+ 'ob_get_status',
+ 'ob_gzhandler',
+ 'ob_implicit_flush',
+ 'ob_list_handlers',
+ 'ob_start',
+ 'output_add_rewrite_var',
+ 'output_reset_rewrite_vars'),
+ 'Ovrimos SQL': ('ovrimos_close',
+ 'ovrimos_commit',
+ 'ovrimos_connect',
+ 'ovrimos_cursor',
+ 'ovrimos_exec',
+ 'ovrimos_execute',
+ 'ovrimos_fetch_into',
+ 'ovrimos_fetch_row',
+ 'ovrimos_field_len',
+ 'ovrimos_field_name',
+ 'ovrimos_field_num',
+ 'ovrimos_field_type',
+ 'ovrimos_free_result',
+ 'ovrimos_longreadlen',
+ 'ovrimos_num_fields',
+ 'ovrimos_num_rows',
+ 'ovrimos_prepare',
+ 'ovrimos_result_all',
+ 'ovrimos_result',
+ 'ovrimos_rollback'),
+ 'PCNTL': ('pcntl_alarm',
+ 'pcntl_errno',
+ 'pcntl_exec',
+ 'pcntl_fork',
+ 'pcntl_get_last_error',
+ 'pcntl_getpriority',
+ 'pcntl_setpriority',
+ 'pcntl_signal_dispatch',
+ 'pcntl_signal',
+ 'pcntl_sigprocmask',
+ 'pcntl_sigtimedwait',
+ 'pcntl_sigwaitinfo',
+ 'pcntl_strerror',
+ 'pcntl_wait',
+ 'pcntl_waitpid',
+ 'pcntl_wexitstatus',
+ 'pcntl_wifexited',
+ 'pcntl_wifsignaled',
+ 'pcntl_wifstopped',
+ 'pcntl_wstopsig',
+ 'pcntl_wtermsig'),
+ 'PCRE': ('preg_filter',
+ 'preg_grep',
+ 'preg_last_error',
+ 'preg_match_all',
+ 'preg_match',
+ 'preg_quote',
+ 'preg_replace_callback',
+ 'preg_replace',
+ 'preg_split'),
+ 'PDF': ('PDF_activate_item',
+ 'PDF_add_annotation',
+ 'PDF_add_bookmark',
+ 'PDF_add_launchlink',
+ 'PDF_add_locallink',
+ 'PDF_add_nameddest',
+ 'PDF_add_note',
+ 'PDF_add_outline',
+ 'PDF_add_pdflink',
+ 'PDF_add_table_cell',
+ 'PDF_add_textflow',
+ 'PDF_add_thumbnail',
+ 'PDF_add_weblink',
+ 'PDF_arc',
+ 'PDF_arcn',
+ 'PDF_attach_file',
+ 'PDF_begin_document',
+ 'PDF_begin_font',
+ 'PDF_begin_glyph',
+ 'PDF_begin_item',
+ 'PDF_begin_layer',
+ 'PDF_begin_page_ext',
+ 'PDF_begin_page',
+ 'PDF_begin_pattern',
+ 'PDF_begin_template_ext',
+ 'PDF_begin_template',
+ 'PDF_circle',
+ 'PDF_clip',
+ 'PDF_close_image',
+ 'PDF_close_pdi_page',
+ 'PDF_close_pdi',
+ 'PDF_close',
+ 'PDF_closepath_fill_stroke',
+ 'PDF_closepath_stroke',
+ 'PDF_closepath',
+ 'PDF_concat',
+ 'PDF_continue_text',
+ 'PDF_create_3dview',
+ 'PDF_create_action',
+ 'PDF_create_annotation',
+ 'PDF_create_bookmark',
+ 'PDF_create_field',
+ 'PDF_create_fieldgroup',
+ 'PDF_create_gstate',
+ 'PDF_create_pvf',
+ 'PDF_create_textflow',
+ 'PDF_curveto',
+ 'PDF_define_layer',
+ 'PDF_delete_pvf',
+ 'PDF_delete_table',
+ 'PDF_delete_textflow',
+ 'PDF_delete',
+ 'PDF_encoding_set_char',
+ 'PDF_end_document',
+ 'PDF_end_font',
+ 'PDF_end_glyph',
+ 'PDF_end_item',
+ 'PDF_end_layer',
+ 'PDF_end_page_ext',
+ 'PDF_end_page',
+ 'PDF_end_pattern',
+ 'PDF_end_template',
+ 'PDF_endpath',
+ 'PDF_fill_imageblock',
+ 'PDF_fill_pdfblock',
+ 'PDF_fill_stroke',
+ 'PDF_fill_textblock',
+ 'PDF_fill',
+ 'PDF_findfont',
+ 'PDF_fit_image',
+ 'PDF_fit_pdi_page',
+ 'PDF_fit_table',
+ 'PDF_fit_textflow',
+ 'PDF_fit_textline',
+ 'PDF_get_apiname',
+ 'PDF_get_buffer',
+ 'PDF_get_errmsg',
+ 'PDF_get_errnum',
+ 'PDF_get_font',
+ 'PDF_get_fontname',
+ 'PDF_get_fontsize',
+ 'PDF_get_image_height',
+ 'PDF_get_image_width',
+ 'PDF_get_majorversion',
+ 'PDF_get_minorversion',
+ 'PDF_get_parameter',
+ 'PDF_get_pdi_parameter',
+ 'PDF_get_pdi_value',
+ 'PDF_get_value',
+ 'PDF_info_font',
+ 'PDF_info_matchbox',
+ 'PDF_info_table',
+ 'PDF_info_textflow',
+ 'PDF_info_textline',
+ 'PDF_initgraphics',
+ 'PDF_lineto',
+ 'PDF_load_3ddata',
+ 'PDF_load_font',
+ 'PDF_load_iccprofile',
+ 'PDF_load_image',
+ 'PDF_makespotcolor',
+ 'PDF_moveto',
+ 'PDF_new',
+ 'PDF_open_ccitt',
+ 'PDF_open_file',
+ 'PDF_open_gif',
+ 'PDF_open_image_file',
+ 'PDF_open_image',
+ 'PDF_open_jpeg',
+ 'PDF_open_memory_image',
+ 'PDF_open_pdi_document',
+ 'PDF_open_pdi_page',
+ 'PDF_open_pdi',
+ 'PDF_open_tiff',
+ 'PDF_pcos_get_number',
+ 'PDF_pcos_get_stream',
+ 'PDF_pcos_get_string',
+ 'PDF_place_image',
+ 'PDF_place_pdi_page',
+ 'PDF_process_pdi',
+ 'PDF_rect',
+ 'PDF_restore',
+ 'PDF_resume_page',
+ 'PDF_rotate',
+ 'PDF_save',
+ 'PDF_scale',
+ 'PDF_set_border_color',
+ 'PDF_set_border_dash',
+ 'PDF_set_border_style',
+ 'PDF_set_char_spacing',
+ 'PDF_set_duration',
+ 'PDF_set_gstate',
+ 'PDF_set_horiz_scaling',
+ 'PDF_set_info_author',
+ 'PDF_set_info_creator',
+ 'PDF_set_info_keywords',
+ 'PDF_set_info_subject',
+ 'PDF_set_info_title',
+ 'PDF_set_info',
+ 'PDF_set_layer_dependency',
+ 'PDF_set_leading',
+ 'PDF_set_parameter',
+ 'PDF_set_text_matrix',
+ 'PDF_set_text_pos',
+ 'PDF_set_text_rendering',
+ 'PDF_set_text_rise',
+ 'PDF_set_value',
+ 'PDF_set_word_spacing',
+ 'PDF_setcolor',
+ 'PDF_setdash',
+ 'PDF_setdashpattern',
+ 'PDF_setflat',
+ 'PDF_setfont',
+ 'PDF_setgray_fill',
+ 'PDF_setgray_stroke',
+ 'PDF_setgray',
+ 'PDF_setlinecap',
+ 'PDF_setlinejoin',
+ 'PDF_setlinewidth',
+ 'PDF_setmatrix',
+ 'PDF_setmiterlimit',
+ 'PDF_setpolydash',
+ 'PDF_setrgbcolor_fill',
+ 'PDF_setrgbcolor_stroke',
+ 'PDF_setrgbcolor',
+ 'PDF_shading_pattern',
+ 'PDF_shading',
+ 'PDF_shfill',
+ 'PDF_show_boxed',
+ 'PDF_show_xy',
+ 'PDF_show',
+ 'PDF_skew',
+ 'PDF_stringwidth',
+ 'PDF_stroke',
+ 'PDF_suspend_page',
+ 'PDF_translate',
+ 'PDF_utf16_to_utf8',
+ 'PDF_utf32_to_utf16',
+ 'PDF_utf8_to_utf16'),
+ 'PHP Options/Info': ('assert_options',
+ 'assert',
+ 'cli_get_process_title',
+ 'cli_set_process_title',
+ 'dl',
+ 'extension_loaded',
+ 'gc_collect_cycles',
+ 'gc_disable',
+ 'gc_enable',
+ 'gc_enabled',
+ 'get_cfg_var',
+ 'get_current_user',
+ 'get_defined_constants',
+ 'get_extension_funcs',
+ 'get_include_path',
+ 'get_included_files',
+ 'get_loaded_extensions',
+ 'get_magic_quotes_gpc',
+ 'get_magic_quotes_runtime',
+ 'get_required_files',
+ 'getenv',
+ 'getlastmod',
+ 'getmygid',
+ 'getmyinode',
+ 'getmypid',
+ 'getmyuid',
+ 'getopt',
+ 'getrusage',
+ 'ini_alter',
+ 'ini_get_all',
+ 'ini_get',
+ 'ini_restore',
+ 'ini_set',
+ 'magic_quotes_runtime',
+ 'memory_get_peak_usage',
+ 'memory_get_usage',
+ 'php_ini_loaded_file',
+ 'php_ini_scanned_files',
+ 'php_logo_guid',
+ 'php_sapi_name',
+ 'php_uname',
+ 'phpcredits',
+ 'phpinfo',
+ 'phpversion',
+ 'putenv',
+ 'restore_include_path',
+ 'set_include_path',
+ 'set_magic_quotes_runtime',
+ 'set_time_limit',
+ 'sys_get_temp_dir',
+ 'version_compare',
+ 'zend_logo_guid',
+ 'zend_thread_id',
+ 'zend_version'),
+ 'POSIX': ('posix_access',
+ 'posix_ctermid',
+ 'posix_errno',
+ 'posix_get_last_error',
+ 'posix_getcwd',
+ 'posix_getegid',
+ 'posix_geteuid',
+ 'posix_getgid',
+ 'posix_getgrgid',
+ 'posix_getgrnam',
+ 'posix_getgroups',
+ 'posix_getlogin',
+ 'posix_getpgid',
+ 'posix_getpgrp',
+ 'posix_getpid',
+ 'posix_getppid',
+ 'posix_getpwnam',
+ 'posix_getpwuid',
+ 'posix_getrlimit',
+ 'posix_getsid',
+ 'posix_getuid',
+ 'posix_initgroups',
+ 'posix_isatty',
+ 'posix_kill',
+ 'posix_mkfifo',
+ 'posix_mknod',
+ 'posix_setegid',
+ 'posix_seteuid',
+ 'posix_setgid',
+ 'posix_setpgid',
+ 'posix_setsid',
+ 'posix_setuid',
+ 'posix_strerror',
+ 'posix_times',
+ 'posix_ttyname',
+ 'posix_uname'),
+ 'POSIX Regex': ('ereg_replace',
+ 'ereg',
+ 'eregi_replace',
+ 'eregi',
+ 'split',
+ 'spliti',
+ 'sql_regcase'),
+ 'PS': ('ps_add_bookmark',
+ 'ps_add_launchlink',
+ 'ps_add_locallink',
+ 'ps_add_note',
+ 'ps_add_pdflink',
+ 'ps_add_weblink',
+ 'ps_arc',
+ 'ps_arcn',
+ 'ps_begin_page',
+ 'ps_begin_pattern',
+ 'ps_begin_template',
+ 'ps_circle',
+ 'ps_clip',
+ 'ps_close_image',
+ 'ps_close',
+ 'ps_closepath_stroke',
+ 'ps_closepath',
+ 'ps_continue_text',
+ 'ps_curveto',
+ 'ps_delete',
+ 'ps_end_page',
+ 'ps_end_pattern',
+ 'ps_end_template',
+ 'ps_fill_stroke',
+ 'ps_fill',
+ 'ps_findfont',
+ 'ps_get_buffer',
+ 'ps_get_parameter',
+ 'ps_get_value',
+ 'ps_hyphenate',
+ 'ps_include_file',
+ 'ps_lineto',
+ 'ps_makespotcolor',
+ 'ps_moveto',
+ 'ps_new',
+ 'ps_open_file',
+ 'ps_open_image_file',
+ 'ps_open_image',
+ 'ps_open_memory_image',
+ 'ps_place_image',
+ 'ps_rect',
+ 'ps_restore',
+ 'ps_rotate',
+ 'ps_save',
+ 'ps_scale',
+ 'ps_set_border_color',
+ 'ps_set_border_dash',
+ 'ps_set_border_style',
+ 'ps_set_info',
+ 'ps_set_parameter',
+ 'ps_set_text_pos',
+ 'ps_set_value',
+ 'ps_setcolor',
+ 'ps_setdash',
+ 'ps_setflat',
+ 'ps_setfont',
+ 'ps_setgray',
+ 'ps_setlinecap',
+ 'ps_setlinejoin',
+ 'ps_setlinewidth',
+ 'ps_setmiterlimit',
+ 'ps_setoverprintmode',
+ 'ps_setpolydash',
+ 'ps_shading_pattern',
+ 'ps_shading',
+ 'ps_shfill',
+ 'ps_show_boxed',
+ 'ps_show_xy2',
+ 'ps_show_xy',
+ 'ps_show2',
+ 'ps_show',
+ 'ps_string_geometry',
+ 'ps_stringwidth',
+ 'ps_stroke',
+ 'ps_symbol_name',
+ 'ps_symbol_width',
+ 'ps_symbol',
+ 'ps_translate'),
+ 'Paradox': ('px_close',
+ 'px_create_fp',
+ 'px_date2string',
+ 'px_delete_record',
+ 'px_delete',
+ 'px_get_field',
+ 'px_get_info',
+ 'px_get_parameter',
+ 'px_get_record',
+ 'px_get_schema',
+ 'px_get_value',
+ 'px_insert_record',
+ 'px_new',
+ 'px_numfields',
+ 'px_numrecords',
+ 'px_open_fp',
+ 'px_put_record',
+ 'px_retrieve_record',
+ 'px_set_blob_file',
+ 'px_set_parameter',
+ 'px_set_tablename',
+ 'px_set_targetencoding',
+ 'px_set_value',
+ 'px_timestamp2string',
+ 'px_update_record'),
+ 'Parsekit': ('parsekit_compile_file',
+ 'parsekit_compile_string',
+ 'parsekit_func_arginfo'),
+ 'Password Hashing': ('password_get_info',
+ 'password_hash',
+ 'password_needs_rehash',
+ 'password_verify'),
+ 'PostgreSQL': ('pg_affected_rows',
+ 'pg_cancel_query',
+ 'pg_client_encoding',
+ 'pg_close',
+ 'pg_connect',
+ 'pg_connection_busy',
+ 'pg_connection_reset',
+ 'pg_connection_status',
+ 'pg_convert',
+ 'pg_copy_from',
+ 'pg_copy_to',
+ 'pg_dbname',
+ 'pg_delete',
+ 'pg_end_copy',
+ 'pg_escape_bytea',
+ 'pg_escape_identifier',
+ 'pg_escape_literal',
+ 'pg_escape_string',
+ 'pg_execute',
+ 'pg_fetch_all_columns',
+ 'pg_fetch_all',
+ 'pg_fetch_array',
+ 'pg_fetch_assoc',
+ 'pg_fetch_object',
+ 'pg_fetch_result',
+ 'pg_fetch_row',
+ 'pg_field_is_null',
+ 'pg_field_name',
+ 'pg_field_num',
+ 'pg_field_prtlen',
+ 'pg_field_size',
+ 'pg_field_table',
+ 'pg_field_type_oid',
+ 'pg_field_type',
+ 'pg_free_result',
+ 'pg_get_notify',
+ 'pg_get_pid',
+ 'pg_get_result',
+ 'pg_host',
+ 'pg_insert',
+ 'pg_last_error',
+ 'pg_last_notice',
+ 'pg_last_oid',
+ 'pg_lo_close',
+ 'pg_lo_create',
+ 'pg_lo_export',
+ 'pg_lo_import',
+ 'pg_lo_open',
+ 'pg_lo_read_all',
+ 'pg_lo_read',
+ 'pg_lo_seek',
+ 'pg_lo_tell',
+ 'pg_lo_truncate',
+ 'pg_lo_unlink',
+ 'pg_lo_write',
+ 'pg_meta_data',
+ 'pg_num_fields',
+ 'pg_num_rows',
+ 'pg_options',
+ 'pg_parameter_status',
+ 'pg_pconnect',
+ 'pg_ping',
+ 'pg_port',
+ 'pg_prepare',
+ 'pg_put_line',
+ 'pg_query_params',
+ 'pg_query',
+ 'pg_result_error_field',
+ 'pg_result_error',
+ 'pg_result_seek',
+ 'pg_result_status',
+ 'pg_select',
+ 'pg_send_execute',
+ 'pg_send_prepare',
+ 'pg_send_query_params',
+ 'pg_send_query',
+ 'pg_set_client_encoding',
+ 'pg_set_error_verbosity',
+ 'pg_trace',
+ 'pg_transaction_status',
+ 'pg_tty',
+ 'pg_unescape_bytea',
+ 'pg_untrace',
+ 'pg_update',
+ 'pg_version'),
+ 'Printer': ('printer_abort',
+ 'printer_close',
+ 'printer_create_brush',
+ 'printer_create_dc',
+ 'printer_create_font',
+ 'printer_create_pen',
+ 'printer_delete_brush',
+ 'printer_delete_dc',
+ 'printer_delete_font',
+ 'printer_delete_pen',
+ 'printer_draw_bmp',
+ 'printer_draw_chord',
+ 'printer_draw_elipse',
+ 'printer_draw_line',
+ 'printer_draw_pie',
+ 'printer_draw_rectangle',
+ 'printer_draw_roundrect',
+ 'printer_draw_text',
+ 'printer_end_doc',
+ 'printer_end_page',
+ 'printer_get_option',
+ 'printer_list',
+ 'printer_logical_fontheight',
+ 'printer_open',
+ 'printer_select_brush',
+ 'printer_select_font',
+ 'printer_select_pen',
+ 'printer_set_option',
+ 'printer_start_doc',
+ 'printer_start_page',
+ 'printer_write'),
+ 'Proctitle': ('setproctitle', 'setthreadtitle'),
+ 'Program execution': ('escapeshellarg',
+ 'escapeshellcmd',
+ 'exec',
+ 'passthru',
+ 'proc_close',
+ 'proc_get_status',
+ 'proc_nice',
+ 'proc_open',
+ 'proc_terminate',
+ 'shell_exec',
+ 'system'),
+ 'Pspell': ('pspell_add_to_personal',
+ 'pspell_add_to_session',
+ 'pspell_check',
+ 'pspell_clear_session',
+ 'pspell_config_create',
+ 'pspell_config_data_dir',
+ 'pspell_config_dict_dir',
+ 'pspell_config_ignore',
+ 'pspell_config_mode',
+ 'pspell_config_personal',
+ 'pspell_config_repl',
+ 'pspell_config_runtogether',
+ 'pspell_config_save_repl',
+ 'pspell_new_config',
+ 'pspell_new_personal',
+ 'pspell_new',
+ 'pspell_save_wordlist',
+ 'pspell_store_replacement',
+ 'pspell_suggest'),
+ 'RPM Reader': ('rpm_close',
+ 'rpm_get_tag',
+ 'rpm_is_valid',
+ 'rpm_open',
+ 'rpm_version'),
+ 'RRD': ('rrd_create',
+ 'rrd_error',
+ 'rrd_fetch',
+ 'rrd_first',
+ 'rrd_graph',
+ 'rrd_info',
+ 'rrd_last',
+ 'rrd_lastupdate',
+ 'rrd_restore',
+ 'rrd_tune',
+ 'rrd_update',
+ 'rrd_version',
+ 'rrd_xport',
+ 'rrdc_disconnect'),
+ 'Radius': ('radius_acct_open',
+ 'radius_add_server',
+ 'radius_auth_open',
+ 'radius_close',
+ 'radius_config',
+ 'radius_create_request',
+ 'radius_cvt_addr',
+ 'radius_cvt_int',
+ 'radius_cvt_string',
+ 'radius_demangle_mppe_key',
+ 'radius_demangle',
+ 'radius_get_attr',
+ 'radius_get_tagged_attr_data',
+ 'radius_get_tagged_attr_tag',
+ 'radius_get_vendor_attr',
+ 'radius_put_addr',
+ 'radius_put_attr',
+ 'radius_put_int',
+ 'radius_put_string',
+ 'radius_put_vendor_addr',
+ 'radius_put_vendor_attr',
+ 'radius_put_vendor_int',
+ 'radius_put_vendor_string',
+ 'radius_request_authenticator',
+ 'radius_salt_encrypt_attr',
+ 'radius_send_request',
+ 'radius_server_secret',
+ 'radius_strerror'),
+ 'Rar': ('rar_wrapper_cache_stats',),
+ 'Readline': ('readline_add_history',
+ 'readline_callback_handler_install',
+ 'readline_callback_handler_remove',
+ 'readline_callback_read_char',
+ 'readline_clear_history',
+ 'readline_completion_function',
+ 'readline_info',
+ 'readline_list_history',
+ 'readline_on_new_line',
+ 'readline_read_history',
+ 'readline_redisplay',
+ 'readline_write_history',
+ 'readline'),
+ 'Recode': ('recode_file', 'recode_string', 'recode'),
+ 'SNMP': ('snmp_get_quick_print',
+ 'snmp_get_valueretrieval',
+ 'snmp_read_mib',
+ 'snmp_set_enum_print',
+ 'snmp_set_oid_numeric_print',
+ 'snmp_set_oid_output_format',
+ 'snmp_set_quick_print',
+ 'snmp_set_valueretrieval',
+ 'snmp2_get',
+ 'snmp2_getnext',
+ 'snmp2_real_walk',
+ 'snmp2_set',
+ 'snmp2_walk',
+ 'snmp3_get',
+ 'snmp3_getnext',
+ 'snmp3_real_walk',
+ 'snmp3_set',
+ 'snmp3_walk',
+ 'snmpget',
+ 'snmpgetnext',
+ 'snmprealwalk',
+ 'snmpset',
+ 'snmpwalk',
+ 'snmpwalkoid'),
+ 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
+ 'SPL': ('class_implements',
+ 'class_parents',
+ 'class_uses',
+ 'iterator_apply',
+ 'iterator_count',
+ 'iterator_to_array',
+ 'spl_autoload_call',
+ 'spl_autoload_extensions',
+ 'spl_autoload_functions',
+ 'spl_autoload_register',
+ 'spl_autoload_unregister',
+ 'spl_autoload',
+ 'spl_classes',
+ 'spl_object_hash'),
+ 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
+ 'SQLSRV': ('sqlsrv_begin_transaction',
+ 'sqlsrv_cancel',
+ 'sqlsrv_client_info',
+ 'sqlsrv_close',
+ 'sqlsrv_commit',
+ 'sqlsrv_configure',
+ 'sqlsrv_connect',
+ 'sqlsrv_errors',
+ 'sqlsrv_execute',
+ 'sqlsrv_fetch_array',
+ 'sqlsrv_fetch_object',
+ 'sqlsrv_fetch',
+ 'sqlsrv_field_metadata',
+ 'sqlsrv_free_stmt',
+ 'sqlsrv_get_config',
+ 'sqlsrv_get_field',
+ 'sqlsrv_has_rows',
+ 'sqlsrv_next_result',
+ 'sqlsrv_num_fields',
+ 'sqlsrv_num_rows',
+ 'sqlsrv_prepare',
+ 'sqlsrv_query',
+ 'sqlsrv_rollback',
+ 'sqlsrv_rows_affected',
+ 'sqlsrv_send_stream_data',
+ 'sqlsrv_server_info'),
+ 'SQLite': ('sqlite_array_query',
+ 'sqlite_busy_timeout',
+ 'sqlite_changes',
+ 'sqlite_close',
+ 'sqlite_column',
+ 'sqlite_create_aggregate',
+ 'sqlite_create_function',
+ 'sqlite_current',
+ 'sqlite_error_string',
+ 'sqlite_escape_string',
+ 'sqlite_exec',
+ 'sqlite_factory',
+ 'sqlite_fetch_all',
+ 'sqlite_fetch_array',
+ 'sqlite_fetch_column_types',
+ 'sqlite_fetch_object',
+ 'sqlite_fetch_single',
+ 'sqlite_fetch_string',
+ 'sqlite_field_name',
+ 'sqlite_has_more',
+ 'sqlite_has_prev',
+ 'sqlite_key',
+ 'sqlite_last_error',
+ 'sqlite_last_insert_rowid',
+ 'sqlite_libencoding',
+ 'sqlite_libversion',
+ 'sqlite_next',
+ 'sqlite_num_fields',
+ 'sqlite_num_rows',
+ 'sqlite_open',
+ 'sqlite_popen',
+ 'sqlite_prev',
+ 'sqlite_query',
+ 'sqlite_rewind',
+ 'sqlite_seek',
+ 'sqlite_single_query',
+ 'sqlite_udf_decode_binary',
+ 'sqlite_udf_encode_binary',
+ 'sqlite_unbuffered_query',
+ 'sqlite_valid'),
+ 'SSH2': ('ssh2_auth_agent',
+ 'ssh2_auth_hostbased_file',
+ 'ssh2_auth_none',
+ 'ssh2_auth_password',
+ 'ssh2_auth_pubkey_file',
+ 'ssh2_connect',
+ 'ssh2_exec',
+ 'ssh2_fetch_stream',
+ 'ssh2_fingerprint',
+ 'ssh2_methods_negotiated',
+ 'ssh2_publickey_add',
+ 'ssh2_publickey_init',
+ 'ssh2_publickey_list',
+ 'ssh2_publickey_remove',
+ 'ssh2_scp_recv',
+ 'ssh2_scp_send',
+ 'ssh2_sftp_chmod',
+ 'ssh2_sftp_lstat',
+ 'ssh2_sftp_mkdir',
+ 'ssh2_sftp_readlink',
+ 'ssh2_sftp_realpath',
+ 'ssh2_sftp_rename',
+ 'ssh2_sftp_rmdir',
+ 'ssh2_sftp_stat',
+ 'ssh2_sftp_symlink',
+ 'ssh2_sftp_unlink',
+ 'ssh2_sftp',
+ 'ssh2_shell',
+ 'ssh2_tunnel'),
+ 'SVN': ('svn_add',
+ 'svn_auth_get_parameter',
+ 'svn_auth_set_parameter',
+ 'svn_blame',
+ 'svn_cat',
+ 'svn_checkout',
+ 'svn_cleanup',
+ 'svn_client_version',
+ 'svn_commit',
+ 'svn_delete',
+ 'svn_diff',
+ 'svn_export',
+ 'svn_fs_abort_txn',
+ 'svn_fs_apply_text',
+ 'svn_fs_begin_txn2',
+ 'svn_fs_change_node_prop',
+ 'svn_fs_check_path',
+ 'svn_fs_contents_changed',
+ 'svn_fs_copy',
+ 'svn_fs_delete',
+ 'svn_fs_dir_entries',
+ 'svn_fs_file_contents',
+ 'svn_fs_file_length',
+ 'svn_fs_is_dir',
+ 'svn_fs_is_file',
+ 'svn_fs_make_dir',
+ 'svn_fs_make_file',
+ 'svn_fs_node_created_rev',
+ 'svn_fs_node_prop',
+ 'svn_fs_props_changed',
+ 'svn_fs_revision_prop',
+ 'svn_fs_revision_root',
+ 'svn_fs_txn_root',
+ 'svn_fs_youngest_rev',
+ 'svn_import',
+ 'svn_log',
+ 'svn_ls',
+ 'svn_mkdir',
+ 'svn_repos_create',
+ 'svn_repos_fs_begin_txn_for_commit',
+ 'svn_repos_fs_commit_txn',
+ 'svn_repos_fs',
+ 'svn_repos_hotcopy',
+ 'svn_repos_open',
+ 'svn_repos_recover',
+ 'svn_revert',
+ 'svn_status',
+ 'svn_update'),
+ 'SWF': ('swf_actiongeturl',
+ 'swf_actiongotoframe',
+ 'swf_actiongotolabel',
+ 'swf_actionnextframe',
+ 'swf_actionplay',
+ 'swf_actionprevframe',
+ 'swf_actionsettarget',
+ 'swf_actionstop',
+ 'swf_actiontogglequality',
+ 'swf_actionwaitforframe',
+ 'swf_addbuttonrecord',
+ 'swf_addcolor',
+ 'swf_closefile',
+ 'swf_definebitmap',
+ 'swf_definefont',
+ 'swf_defineline',
+ 'swf_definepoly',
+ 'swf_definerect',
+ 'swf_definetext',
+ 'swf_endbutton',
+ 'swf_enddoaction',
+ 'swf_endshape',
+ 'swf_endsymbol',
+ 'swf_fontsize',
+ 'swf_fontslant',
+ 'swf_fonttracking',
+ 'swf_getbitmapinfo',
+ 'swf_getfontinfo',
+ 'swf_getframe',
+ 'swf_labelframe',
+ 'swf_lookat',
+ 'swf_modifyobject',
+ 'swf_mulcolor',
+ 'swf_nextid',
+ 'swf_oncondition',
+ 'swf_openfile',
+ 'swf_ortho2',
+ 'swf_ortho',
+ 'swf_perspective',
+ 'swf_placeobject',
+ 'swf_polarview',
+ 'swf_popmatrix',
+ 'swf_posround',
+ 'swf_pushmatrix',
+ 'swf_removeobject',
+ 'swf_rotate',
+ 'swf_scale',
+ 'swf_setfont',
+ 'swf_setframe',
+ 'swf_shapearc',
+ 'swf_shapecurveto3',
+ 'swf_shapecurveto',
+ 'swf_shapefillbitmapclip',
+ 'swf_shapefillbitmaptile',
+ 'swf_shapefilloff',
+ 'swf_shapefillsolid',
+ 'swf_shapelinesolid',
+ 'swf_shapelineto',
+ 'swf_shapemoveto',
+ 'swf_showframe',
+ 'swf_startbutton',
+ 'swf_startdoaction',
+ 'swf_startshape',
+ 'swf_startsymbol',
+ 'swf_textwidth',
+ 'swf_translate',
+ 'swf_viewport'),
+ 'Semaphore': ('ftok',
+ 'msg_get_queue',
+ 'msg_queue_exists',
+ 'msg_receive',
+ 'msg_remove_queue',
+ 'msg_send',
+ 'msg_set_queue',
+ 'msg_stat_queue',
+ 'sem_acquire',
+ 'sem_get',
+ 'sem_release',
+ 'sem_remove',
+ 'shm_attach',
+ 'shm_detach',
+ 'shm_get_var',
+ 'shm_has_var',
+ 'shm_put_var',
+ 'shm_remove_var',
+ 'shm_remove'),
+ 'Session': ('session_cache_expire',
+ 'session_cache_limiter',
+ 'session_commit',
+ 'session_decode',
+ 'session_destroy',
+ 'session_encode',
+ 'session_get_cookie_params',
+ 'session_id',
+ 'session_is_registered',
+ 'session_module_name',
+ 'session_name',
+ 'session_regenerate_id',
+ 'session_register_shutdown',
+ 'session_register',
+ 'session_save_path',
+ 'session_set_cookie_params',
+ 'session_set_save_handler',
+ 'session_start',
+ 'session_status',
+ 'session_unregister',
+ 'session_unset',
+ 'session_write_close'),
+ 'Session PgSQL': ('session_pgsql_add_error',
+ 'session_pgsql_get_error',
+ 'session_pgsql_get_field',
+ 'session_pgsql_reset',
+ 'session_pgsql_set_field',
+ 'session_pgsql_status'),
+ 'Shared Memory': ('shmop_close',
+ 'shmop_delete',
+ 'shmop_open',
+ 'shmop_read',
+ 'shmop_size',
+ 'shmop_write'),
+ 'SimpleXML': ('simplexml_import_dom',
+ 'simplexml_load_file',
+ 'simplexml_load_string'),
+ 'Socket': ('socket_accept',
+ 'socket_bind',
+ 'socket_clear_error',
+ 'socket_close',
+ 'socket_cmsg_space',
+ 'socket_connect',
+ 'socket_create_listen',
+ 'socket_create_pair',
+ 'socket_create',
+ 'socket_get_option',
+ 'socket_getpeername',
+ 'socket_getsockname',
+ 'socket_import_stream',
+ 'socket_last_error',
+ 'socket_listen',
+ 'socket_read',
+ 'socket_recv',
+ 'socket_recvfrom',
+ 'socket_recvmsg',
+ 'socket_select',
+ 'socket_send',
+ 'socket_sendmsg',
+ 'socket_sendto',
+ 'socket_set_block',
+ 'socket_set_nonblock',
+ 'socket_set_option',
+ 'socket_shutdown',
+ 'socket_strerror',
+ 'socket_write'),
+ 'Solr': ('solr_get_version',),
+ 'Statistic': ('stats_absolute_deviation',
+ 'stats_cdf_beta',
+ 'stats_cdf_binomial',
+ 'stats_cdf_cauchy',
+ 'stats_cdf_chisquare',
+ 'stats_cdf_exponential',
+ 'stats_cdf_f',
+ 'stats_cdf_gamma',
+ 'stats_cdf_laplace',
+ 'stats_cdf_logistic',
+ 'stats_cdf_negative_binomial',
+ 'stats_cdf_noncentral_chisquare',
+ 'stats_cdf_noncentral_f',
+ 'stats_cdf_poisson',
+ 'stats_cdf_t',
+ 'stats_cdf_uniform',
+ 'stats_cdf_weibull',
+ 'stats_covariance',
+ 'stats_den_uniform',
+ 'stats_dens_beta',
+ 'stats_dens_cauchy',
+ 'stats_dens_chisquare',
+ 'stats_dens_exponential',
+ 'stats_dens_f',
+ 'stats_dens_gamma',
+ 'stats_dens_laplace',
+ 'stats_dens_logistic',
+ 'stats_dens_negative_binomial',
+ 'stats_dens_normal',
+ 'stats_dens_pmf_binomial',
+ 'stats_dens_pmf_hypergeometric',
+ 'stats_dens_pmf_poisson',
+ 'stats_dens_t',
+ 'stats_dens_weibull',
+ 'stats_harmonic_mean',
+ 'stats_kurtosis',
+ 'stats_rand_gen_beta',
+ 'stats_rand_gen_chisquare',
+ 'stats_rand_gen_exponential',
+ 'stats_rand_gen_f',
+ 'stats_rand_gen_funiform',
+ 'stats_rand_gen_gamma',
+ 'stats_rand_gen_ibinomial_negative',
+ 'stats_rand_gen_ibinomial',
+ 'stats_rand_gen_int',
+ 'stats_rand_gen_ipoisson',
+ 'stats_rand_gen_iuniform',
+ 'stats_rand_gen_noncenral_chisquare',
+ 'stats_rand_gen_noncentral_f',
+ 'stats_rand_gen_noncentral_t',
+ 'stats_rand_gen_normal',
+ 'stats_rand_gen_t',
+ 'stats_rand_get_seeds',
+ 'stats_rand_phrase_to_seeds',
+ 'stats_rand_ranf',
+ 'stats_rand_setall',
+ 'stats_skew',
+ 'stats_standard_deviation',
+ 'stats_stat_binomial_coef',
+ 'stats_stat_correlation',
+ 'stats_stat_gennch',
+ 'stats_stat_independent_t',
+ 'stats_stat_innerproduct',
+ 'stats_stat_noncentral_t',
+ 'stats_stat_paired_t',
+ 'stats_stat_percentile',
+ 'stats_stat_powersum',
+ 'stats_variance'),
+ 'Stomp': ('stomp_connect_error', 'stomp_version'),
+ 'Stream': ('set_socket_blocking',
+ 'stream_bucket_append',
+ 'stream_bucket_make_writeable',
+ 'stream_bucket_new',
+ 'stream_bucket_prepend',
+ 'stream_context_create',
+ 'stream_context_get_default',
+ 'stream_context_get_options',
+ 'stream_context_get_params',
+ 'stream_context_set_default',
+ 'stream_context_set_option',
+ 'stream_context_set_params',
+ 'stream_copy_to_stream',
+ 'stream_encoding',
+ 'stream_filter_append',
+ 'stream_filter_prepend',
+ 'stream_filter_register',
+ 'stream_filter_remove',
+ 'stream_get_contents',
+ 'stream_get_filters',
+ 'stream_get_line',
+ 'stream_get_meta_data',
+ 'stream_get_transports',
+ 'stream_get_wrappers',
+ 'stream_is_local',
+ 'stream_notification_callback',
+ 'stream_register_wrapper',
+ 'stream_resolve_include_path',
+ 'stream_select',
+ 'stream_set_blocking',
+ 'stream_set_chunk_size',
+ 'stream_set_read_buffer',
+ 'stream_set_timeout',
+ 'stream_set_write_buffer',
+ 'stream_socket_accept',
+ 'stream_socket_client',
+ 'stream_socket_enable_crypto',
+ 'stream_socket_get_name',
+ 'stream_socket_pair',
+ 'stream_socket_recvfrom',
+ 'stream_socket_sendto',
+ 'stream_socket_server',
+ 'stream_socket_shutdown',
+ 'stream_supports_lock',
+ 'stream_wrapper_register',
+ 'stream_wrapper_restore',
+ 'stream_wrapper_unregister'),
+ 'String': ('addcslashes',
+ 'addslashes',
+ 'bin2hex',
+ 'chop',
+ 'chr',
+ 'chunk_split',
+ 'convert_cyr_string',
+ 'convert_uudecode',
+ 'convert_uuencode',
+ 'count_chars',
+ 'crc32',
+ 'crypt',
+ 'echo',
+ 'explode',
+ 'fprintf',
+ 'get_html_translation_table',
+ 'hebrev',
+ 'hebrevc',
+ 'hex2bin',
+ 'html_entity_decode',
+ 'htmlentities',
+ 'htmlspecialchars_decode',
+ 'htmlspecialchars',
+ 'implode',
+ 'join',
+ 'lcfirst',
+ 'levenshtein',
+ 'localeconv',
+ 'ltrim',
+ 'md5_file',
+ 'md5',
+ 'metaphone',
+ 'money_format',
+ 'nl_langinfo',
+ 'nl2br',
+ 'number_format',
+ 'ord',
+ 'parse_str',
+ 'print',
+ 'printf',
+ 'quoted_printable_decode',
+ 'quoted_printable_encode',
+ 'quotemeta',
+ 'rtrim',
+ 'setlocale',
+ 'sha1_file',
+ 'sha1',
+ 'similar_text',
+ 'soundex',
+ 'sprintf',
+ 'sscanf',
+ 'str_getcsv',
+ 'str_ireplace',
+ 'str_pad',
+ 'str_repeat',
+ 'str_replace',
+ 'str_rot13',
+ 'str_shuffle',
+ 'str_split',
+ 'str_word_count',
+ 'strcasecmp',
+ 'strchr',
+ 'strcmp',
+ 'strcoll',
+ 'strcspn',
+ 'strip_tags',
+ 'stripcslashes',
+ 'stripos',
+ 'stripslashes',
+ 'stristr',
+ 'strlen',
+ 'strnatcasecmp',
+ 'strnatcmp',
+ 'strncasecmp',
+ 'strncmp',
+ 'strpbrk',
+ 'strpos',
+ 'strrchr',
+ 'strrev',
+ 'strripos',
+ 'strrpos',
+ 'strspn',
+ 'strstr',
+ 'strtok',
+ 'strtolower',
+ 'strtoupper',
+ 'strtr',
+ 'substr_compare',
+ 'substr_count',
+ 'substr_replace',
+ 'substr',
+ 'trim',
+ 'ucfirst',
+ 'ucwords',
+ 'vfprintf',
+ 'vprintf',
+ 'vsprintf',
+ 'wordwrap'),
+ 'Sybase': ('sybase_affected_rows',
+ 'sybase_close',
+ 'sybase_connect',
+ 'sybase_data_seek',
+ 'sybase_deadlock_retry_count',
+ 'sybase_fetch_array',
+ 'sybase_fetch_assoc',
+ 'sybase_fetch_field',
+ 'sybase_fetch_object',
+ 'sybase_fetch_row',
+ 'sybase_field_seek',
+ 'sybase_free_result',
+ 'sybase_get_last_message',
+ 'sybase_min_client_severity',
+ 'sybase_min_error_severity',
+ 'sybase_min_message_severity',
+ 'sybase_min_server_severity',
+ 'sybase_num_fields',
+ 'sybase_num_rows',
+ 'sybase_pconnect',
+ 'sybase_query',
+ 'sybase_result',
+ 'sybase_select_db',
+ 'sybase_set_message_handler',
+ 'sybase_unbuffered_query'),
+ 'TCP': ('tcpwrap_check',),
+ 'Taint': ('is_tainted', 'taint', 'untaint'),
+ 'Tidy': ('ob_tidyhandler',
+ 'tidy_access_count',
+ 'tidy_config_count',
+ 'tidy_error_count',
+ 'tidy_get_output',
+ 'tidy_load_config',
+ 'tidy_reset_config',
+ 'tidy_save_config',
+ 'tidy_set_encoding',
+ 'tidy_setopt',
+ 'tidy_warning_count'),
+ 'Tokenizer': ('token_get_all', 'token_name'),
+ 'Trader': ('trader_acos',
+ 'trader_ad',
+ 'trader_add',
+ 'trader_adosc',
+ 'trader_adx',
+ 'trader_adxr',
+ 'trader_apo',
+ 'trader_aroon',
+ 'trader_aroonosc',
+ 'trader_asin',
+ 'trader_atan',
+ 'trader_atr',
+ 'trader_avgprice',
+ 'trader_bbands',
+ 'trader_beta',
+ 'trader_bop',
+ 'trader_cci',
+ 'trader_cdl2crows',
+ 'trader_cdl3blackcrows',
+ 'trader_cdl3inside',
+ 'trader_cdl3linestrike',
+ 'trader_cdl3outside',
+ 'trader_cdl3starsinsouth',
+ 'trader_cdl3whitesoldiers',
+ 'trader_cdlabandonedbaby',
+ 'trader_cdladvanceblock',
+ 'trader_cdlbelthold',
+ 'trader_cdlbreakaway',
+ 'trader_cdlclosingmarubozu',
+ 'trader_cdlconcealbabyswall',
+ 'trader_cdlcounterattack',
+ 'trader_cdldarkcloudcover',
+ 'trader_cdldoji',
+ 'trader_cdldojistar',
+ 'trader_cdldragonflydoji',
+ 'trader_cdlengulfing',
+ 'trader_cdleveningdojistar',
+ 'trader_cdleveningstar',
+ 'trader_cdlgapsidesidewhite',
+ 'trader_cdlgravestonedoji',
+ 'trader_cdlhammer',
+ 'trader_cdlhangingman',
+ 'trader_cdlharami',
+ 'trader_cdlharamicross',
+ 'trader_cdlhighwave',
+ 'trader_cdlhikkake',
+ 'trader_cdlhikkakemod',
+ 'trader_cdlhomingpigeon',
+ 'trader_cdlidentical3crows',
+ 'trader_cdlinneck',
+ 'trader_cdlinvertedhammer',
+ 'trader_cdlkicking',
+ 'trader_cdlkickingbylength',
+ 'trader_cdlladderbottom',
+ 'trader_cdllongleggeddoji',
+ 'trader_cdllongline',
+ 'trader_cdlmarubozu',
+ 'trader_cdlmatchinglow',
+ 'trader_cdlmathold',
+ 'trader_cdlmorningdojistar',
+ 'trader_cdlmorningstar',
+ 'trader_cdlonneck',
+ 'trader_cdlpiercing',
+ 'trader_cdlrickshawman',
+ 'trader_cdlrisefall3methods',
+ 'trader_cdlseparatinglines',
+ 'trader_cdlshootingstar',
+ 'trader_cdlshortline',
+ 'trader_cdlspinningtop',
+ 'trader_cdlstalledpattern',
+ 'trader_cdlsticksandwich',
+ 'trader_cdltakuri',
+ 'trader_cdltasukigap',
+ 'trader_cdlthrusting',
+ 'trader_cdltristar',
+ 'trader_cdlunique3river',
+ 'trader_cdlupsidegap2crows',
+ 'trader_cdlxsidegap3methods',
+ 'trader_ceil',
+ 'trader_cmo',
+ 'trader_correl',
+ 'trader_cos',
+ 'trader_cosh',
+ 'trader_dema',
+ 'trader_div',
+ 'trader_dx',
+ 'trader_ema',
+ 'trader_errno',
+ 'trader_exp',
+ 'trader_floor',
+ 'trader_get_compat',
+ 'trader_get_unstable_period',
+ 'trader_ht_dcperiod',
+ 'trader_ht_dcphase',
+ 'trader_ht_phasor',
+ 'trader_ht_sine',
+ 'trader_ht_trendline',
+ 'trader_ht_trendmode',
+ 'trader_kama',
+ 'trader_linearreg_angle',
+ 'trader_linearreg_intercept',
+ 'trader_linearreg_slope',
+ 'trader_linearreg',
+ 'trader_ln',
+ 'trader_log10',
+ 'trader_ma',
+ 'trader_macd',
+ 'trader_macdext',
+ 'trader_macdfix',
+ 'trader_mama',
+ 'trader_mavp',
+ 'trader_max',
+ 'trader_maxindex',
+ 'trader_medprice',
+ 'trader_mfi',
+ 'trader_midpoint',
+ 'trader_midprice',
+ 'trader_min',
+ 'trader_minindex',
+ 'trader_minmax',
+ 'trader_minmaxindex',
+ 'trader_minus_di',
+ 'trader_minus_dm',
+ 'trader_mom',
+ 'trader_mult',
+ 'trader_natr',
+ 'trader_obv',
+ 'trader_plus_di',
+ 'trader_plus_dm',
+ 'trader_ppo',
+ 'trader_roc',
+ 'trader_rocp',
+ 'trader_rocr100',
+ 'trader_rocr',
+ 'trader_rsi',
+ 'trader_sar',
+ 'trader_sarext',
+ 'trader_set_compat',
+ 'trader_set_unstable_period',
+ 'trader_sin',
+ 'trader_sinh',
+ 'trader_sma',
+ 'trader_sqrt',
+ 'trader_stddev',
+ 'trader_stoch',
+ 'trader_stochf',
+ 'trader_stochrsi',
+ 'trader_sub',
+ 'trader_sum',
+ 'trader_t3',
+ 'trader_tan',
+ 'trader_tanh',
+ 'trader_tema',
+ 'trader_trange',
+ 'trader_trima',
+ 'trader_trix',
+ 'trader_tsf',
+ 'trader_typprice',
+ 'trader_ultosc',
+ 'trader_var',
+ 'trader_wclprice',
+ 'trader_willr',
+ 'trader_wma'),
+ 'URL': ('base64_decode',
+ 'base64_encode',
+ 'get_headers',
+ 'get_meta_tags',
+ 'http_build_query',
+ 'parse_url',
+ 'rawurldecode',
+ 'rawurlencode',
+ 'urldecode',
+ 'urlencode'),
+ 'Uopz': ('uopz_backup',
+ 'uopz_compose',
+ 'uopz_copy',
+ 'uopz_delete',
+ 'uopz_extend',
+ 'uopz_flags',
+ 'uopz_function',
+ 'uopz_implement',
+ 'uopz_overload',
+ 'uopz_redefine',
+ 'uopz_rename',
+ 'uopz_restore',
+ 'uopz_undefine'),
+ 'Variable handling': ('boolval',
+ 'debug_zval_dump',
+ 'doubleval',
+ 'empty',
+ 'floatval',
+ 'get_defined_vars',
+ 'get_resource_type',
+ 'gettype',
+ 'import_request_variables',
+ 'intval',
+ 'is_array',
+ 'is_bool',
+ 'is_callable',
+ 'is_double',
+ 'is_float',
+ 'is_int',
+ 'is_integer',
+ 'is_long',
+ 'is_null',
+ 'is_numeric',
+ 'is_object',
+ 'is_real',
+ 'is_resource',
+ 'is_scalar',
+ 'is_string',
+ 'isset',
+ 'print_r',
+ 'serialize',
+ 'settype',
+ 'strval',
+ 'unserialize',
+ 'unset',
+ 'var_dump',
+ 'var_export'),
+ 'W32api': ('w32api_deftype',
+ 'w32api_init_dtype',
+ 'w32api_invoke_function',
+ 'w32api_register_function',
+ 'w32api_set_call_method'),
+ 'WDDX': ('wddx_add_vars',
+ 'wddx_deserialize',
+ 'wddx_packet_end',
+ 'wddx_packet_start',
+ 'wddx_serialize_value',
+ 'wddx_serialize_vars'),
+ 'WinCache': ('wincache_fcache_fileinfo',
+ 'wincache_fcache_meminfo',
+ 'wincache_lock',
+ 'wincache_ocache_fileinfo',
+ 'wincache_ocache_meminfo',
+ 'wincache_refresh_if_changed',
+ 'wincache_rplist_fileinfo',
+ 'wincache_rplist_meminfo',
+ 'wincache_scache_info',
+ 'wincache_scache_meminfo',
+ 'wincache_ucache_add',
+ 'wincache_ucache_cas',
+ 'wincache_ucache_clear',
+ 'wincache_ucache_dec',
+ 'wincache_ucache_delete',
+ 'wincache_ucache_exists',
+ 'wincache_ucache_get',
+ 'wincache_ucache_inc',
+ 'wincache_ucache_info',
+ 'wincache_ucache_meminfo',
+ 'wincache_ucache_set',
+ 'wincache_unlock'),
+ 'XML Parser': ('utf8_decode',
+ 'utf8_encode',
+ 'xml_error_string',
+ 'xml_get_current_byte_index',
+ 'xml_get_current_column_number',
+ 'xml_get_current_line_number',
+ 'xml_get_error_code',
+ 'xml_parse_into_struct',
+ 'xml_parse',
+ 'xml_parser_create_ns',
+ 'xml_parser_create',
+ 'xml_parser_free',
+ 'xml_parser_get_option',
+ 'xml_parser_set_option',
+ 'xml_set_character_data_handler',
+ 'xml_set_default_handler',
+ 'xml_set_element_handler',
+ 'xml_set_end_namespace_decl_handler',
+ 'xml_set_external_entity_ref_handler',
+ 'xml_set_notation_decl_handler',
+ 'xml_set_object',
+ 'xml_set_processing_instruction_handler',
+ 'xml_set_start_namespace_decl_handler',
+ 'xml_set_unparsed_entity_decl_handler'),
+ 'XML-RPC': ('xmlrpc_decode_request',
+ 'xmlrpc_decode',
+ 'xmlrpc_encode_request',
+ 'xmlrpc_encode',
+ 'xmlrpc_get_type',
+ 'xmlrpc_is_fault',
+ 'xmlrpc_parse_method_descriptions',
+ 'xmlrpc_server_add_introspection_data',
+ 'xmlrpc_server_call_method',
+ 'xmlrpc_server_create',
+ 'xmlrpc_server_destroy',
+ 'xmlrpc_server_register_introspection_callback',
+ 'xmlrpc_server_register_method',
+ 'xmlrpc_set_type'),
+ 'XSLT (PHP 4)': ('xslt_backend_info',
+ 'xslt_backend_name',
+ 'xslt_backend_version',
+ 'xslt_create',
+ 'xslt_errno',
+ 'xslt_error',
+ 'xslt_free',
+ 'xslt_getopt',
+ 'xslt_process',
+ 'xslt_set_base',
+ 'xslt_set_encoding',
+ 'xslt_set_error_handler',
+ 'xslt_set_log',
+ 'xslt_set_object',
+ 'xslt_set_sax_handler',
+ 'xslt_set_sax_handlers',
+ 'xslt_set_scheme_handler',
+ 'xslt_set_scheme_handlers',
+ 'xslt_setopt'),
+ 'Xhprof': ('xhprof_disable',
+ 'xhprof_enable',
+ 'xhprof_sample_disable',
+ 'xhprof_sample_enable'),
+ 'YAZ': ('yaz_addinfo',
+ 'yaz_ccl_conf',
+ 'yaz_ccl_parse',
+ 'yaz_close',
+ 'yaz_connect',
+ 'yaz_database',
+ 'yaz_element',
+ 'yaz_errno',
+ 'yaz_error',
+ 'yaz_es_result',
+ 'yaz_es',
+ 'yaz_get_option',
+ 'yaz_hits',
+ 'yaz_itemorder',
+ 'yaz_present',
+ 'yaz_range',
+ 'yaz_record',
+ 'yaz_scan_result',
+ 'yaz_scan',
+ 'yaz_schema',
+ 'yaz_search',
+ 'yaz_set_option',
+ 'yaz_sort',
+ 'yaz_syntax',
+ 'yaz_wait'),
+ 'YP/NIS': ('yp_all',
+ 'yp_cat',
+ 'yp_err_string',
+ 'yp_errno',
+ 'yp_first',
+ 'yp_get_default_domain',
+ 'yp_master',
+ 'yp_match',
+ 'yp_next',
+ 'yp_order'),
+ 'Yaml': ('yaml_emit_file',
+ 'yaml_emit',
+ 'yaml_parse_file',
+ 'yaml_parse_url',
+ 'yaml_parse'),
+ 'Zip': ('zip_close',
+ 'zip_entry_close',
+ 'zip_entry_compressedsize',
+ 'zip_entry_compressionmethod',
+ 'zip_entry_filesize',
+ 'zip_entry_name',
+ 'zip_entry_open',
+ 'zip_entry_read',
+ 'zip_open',
+ 'zip_read'),
+ 'Zlib': ('gzclose',
+ 'gzcompress',
+ 'gzdecode',
+ 'gzdeflate',
+ 'gzencode',
+ 'gzeof',
+ 'gzfile',
+ 'gzgetc',
+ 'gzgets',
+ 'gzgetss',
+ 'gzinflate',
+ 'gzopen',
+ 'gzpassthru',
+ 'gzputs',
+ 'gzread',
+ 'gzrewind',
+ 'gzseek',
+ 'gztell',
+ 'gzuncompress',
+ 'gzwrite',
+ 'readgzfile',
+ 'zlib_decode',
+ 'zlib_encode',
+ 'zlib_get_coding_type'),
+ 'bcompiler': ('bcompiler_load_exe',
+ 'bcompiler_load',
+ 'bcompiler_parse_class',
+ 'bcompiler_read',
+ 'bcompiler_write_class',
+ 'bcompiler_write_constant',
+ 'bcompiler_write_exe_footer',
+ 'bcompiler_write_file',
+ 'bcompiler_write_footer',
+ 'bcompiler_write_function',
+ 'bcompiler_write_functions_from_file',
+ 'bcompiler_write_header',
+ 'bcompiler_write_included_filename'),
+ 'cURL': ('curl_close',
+ 'curl_copy_handle',
+ 'curl_errno',
+ 'curl_error',
+ 'curl_escape',
+ 'curl_exec',
+ 'curl_file_create',
+ 'curl_getinfo',
+ 'curl_init',
+ 'curl_multi_add_handle',
+ 'curl_multi_close',
+ 'curl_multi_exec',
+ 'curl_multi_getcontent',
+ 'curl_multi_info_read',
+ 'curl_multi_init',
+ 'curl_multi_remove_handle',
+ 'curl_multi_select',
+ 'curl_multi_setopt',
+ 'curl_multi_strerror',
+ 'curl_pause',
+ 'curl_reset',
+ 'curl_setopt_array',
+ 'curl_setopt',
+ 'curl_share_close',
+ 'curl_share_init',
+ 'curl_share_setopt',
+ 'curl_strerror',
+ 'curl_unescape',
+ 'curl_version'),
+ 'chdb': ('chdb_create',),
+ 'dBase': ('dbase_add_record',
+ 'dbase_close',
+ 'dbase_create',
+ 'dbase_delete_record',
+ 'dbase_get_header_info',
+ 'dbase_get_record_with_names',
+ 'dbase_get_record',
+ 'dbase_numfields',
+ 'dbase_numrecords',
+ 'dbase_open',
+ 'dbase_pack',
+ 'dbase_replace_record'),
+ 'dbx': ('dbx_close',
+ 'dbx_compare',
+ 'dbx_connect',
+ 'dbx_error',
+ 'dbx_escape_string',
+ 'dbx_fetch_row',
+ 'dbx_query',
+ 'dbx_sort'),
+ 'filePro': ('filepro_fieldcount',
+ 'filepro_fieldname',
+ 'filepro_fieldtype',
+ 'filepro_fieldwidth',
+ 'filepro_retrieve',
+ 'filepro_rowcount',
+ 'filepro'),
+ 'iconv': ('iconv_get_encoding',
+ 'iconv_mime_decode_headers',
+ 'iconv_mime_decode',
+ 'iconv_mime_encode',
+ 'iconv_set_encoding',
+ 'iconv_strlen',
+ 'iconv_strpos',
+ 'iconv_strrpos',
+ 'iconv_substr',
+ 'iconv',
+ 'ob_iconv_handler'),
+ 'inclued': ('inclued_get_data',),
+ 'intl': ('intl_error_name',
+ 'intl_get_error_code',
+ 'intl_get_error_message',
+ 'intl_is_failure'),
+ 'libxml': ('libxml_clear_errors',
+ 'libxml_disable_entity_loader',
+ 'libxml_get_errors',
+ 'libxml_get_last_error',
+ 'libxml_set_external_entity_loader',
+ 'libxml_set_streams_context',
+ 'libxml_use_internal_errors'),
+ 'mSQL': ('msql_affected_rows',
+ 'msql_close',
+ 'msql_connect',
+ 'msql_create_db',
+ 'msql_createdb',
+ 'msql_data_seek',
+ 'msql_db_query',
+ 'msql_dbname',
+ 'msql_drop_db',
+ 'msql_error',
+ 'msql_fetch_array',
+ 'msql_fetch_field',
+ 'msql_fetch_object',
+ 'msql_fetch_row',
+ 'msql_field_flags',
+ 'msql_field_len',
+ 'msql_field_name',
+ 'msql_field_seek',
+ 'msql_field_table',
+ 'msql_field_type',
+ 'msql_fieldflags',
+ 'msql_fieldlen',
+ 'msql_fieldname',
+ 'msql_fieldtable',
+ 'msql_fieldtype',
+ 'msql_free_result',
+ 'msql_list_dbs',
+ 'msql_list_fields',
+ 'msql_list_tables',
+ 'msql_num_fields',
+ 'msql_num_rows',
+ 'msql_numfields',
+ 'msql_numrows',
+ 'msql_pconnect',
+ 'msql_query',
+ 'msql_regcase',
+ 'msql_result',
+ 'msql_select_db',
+ 'msql_tablename',
+ 'msql'),
+ 'mnoGoSearch': ('udm_add_search_limit',
+ 'udm_alloc_agent_array',
+ 'udm_alloc_agent',
+ 'udm_api_version',
+ 'udm_cat_list',
+ 'udm_cat_path',
+ 'udm_check_charset',
+ 'udm_check_stored',
+ 'udm_clear_search_limits',
+ 'udm_close_stored',
+ 'udm_crc32',
+ 'udm_errno',
+ 'udm_error',
+ 'udm_find',
+ 'udm_free_agent',
+ 'udm_free_ispell_data',
+ 'udm_free_res',
+ 'udm_get_doc_count',
+ 'udm_get_res_field',
+ 'udm_get_res_param',
+ 'udm_hash32',
+ 'udm_load_ispell_data',
+ 'udm_open_stored',
+ 'udm_set_agent_param'),
+ 'mqseries': ('mqseries_back',
+ 'mqseries_begin',
+ 'mqseries_close',
+ 'mqseries_cmit',
+ 'mqseries_conn',
+ 'mqseries_connx',
+ 'mqseries_disc',
+ 'mqseries_get',
+ 'mqseries_inq',
+ 'mqseries_open',
+ 'mqseries_put1',
+ 'mqseries_put',
+ 'mqseries_set',
+ 'mqseries_strerror'),
+ 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
+ 'mysqlnd_qc_get_available_handlers',
+ 'mysqlnd_qc_get_cache_info',
+ 'mysqlnd_qc_get_core_stats',
+ 'mysqlnd_qc_get_normalized_query_trace_log',
+ 'mysqlnd_qc_get_query_trace_log',
+ 'mysqlnd_qc_set_cache_condition',
+ 'mysqlnd_qc_set_is_select',
+ 'mysqlnd_qc_set_storage_handler',
+ 'mysqlnd_qc_set_user_handlers'),
+ 'qtdom': ('qdom_error', 'qdom_tree'),
+ 'runkit': ('runkit_class_adopt',
+ 'runkit_class_emancipate',
+ 'runkit_constant_add',
+ 'runkit_constant_redefine',
+ 'runkit_constant_remove',
+ 'runkit_function_add',
+ 'runkit_function_copy',
+ 'runkit_function_redefine',
+ 'runkit_function_remove',
+ 'runkit_function_rename',
+ 'runkit_import',
+ 'runkit_lint_file',
+ 'runkit_lint',
+ 'runkit_method_add',
+ 'runkit_method_copy',
+ 'runkit_method_redefine',
+ 'runkit_method_remove',
+ 'runkit_method_rename',
+ 'runkit_return_value_used',
+ 'runkit_sandbox_output_handler',
+ 'runkit_superglobals'),
+ 'ssdeep': ('ssdeep_fuzzy_compare',
+ 'ssdeep_fuzzy_hash_filename',
+ 'ssdeep_fuzzy_hash'),
+ 'vpopmail': ('vpopmail_add_alias_domain_ex',
+ 'vpopmail_add_alias_domain',
+ 'vpopmail_add_domain_ex',
+ 'vpopmail_add_domain',
+ 'vpopmail_add_user',
+ 'vpopmail_alias_add',
+ 'vpopmail_alias_del_domain',
+ 'vpopmail_alias_del',
+ 'vpopmail_alias_get_all',
+ 'vpopmail_alias_get',
+ 'vpopmail_auth_user',
+ 'vpopmail_del_domain_ex',
+ 'vpopmail_del_domain',
+ 'vpopmail_del_user',
+ 'vpopmail_error',
+ 'vpopmail_passwd',
+ 'vpopmail_set_user_quota'),
+ 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
+ 'win32service': ('win32_continue_service',
+ 'win32_create_service',
+ 'win32_delete_service',
+ 'win32_get_last_control_message',
+ 'win32_pause_service',
+ 'win32_query_service_status',
+ 'win32_set_service_status',
+ 'win32_start_service_ctrl_dispatcher',
+ 'win32_start_service',
+ 'win32_stop_service'),
+ 'xattr': ('xattr_get',
+ 'xattr_list',
+ 'xattr_remove',
+ 'xattr_set',
+ 'xattr_supported'),
+ 'xdiff': ('xdiff_file_bdiff_size',
+ 'xdiff_file_bdiff',
+ 'xdiff_file_bpatch',
+ 'xdiff_file_diff_binary',
+ 'xdiff_file_diff',
+ 'xdiff_file_merge3',
+ 'xdiff_file_patch_binary',
+ 'xdiff_file_patch',
+ 'xdiff_file_rabdiff',
+ 'xdiff_string_bdiff_size',
+ 'xdiff_string_bdiff',
+ 'xdiff_string_bpatch',
+ 'xdiff_string_diff_binary',
+ 'xdiff_string_diff',
+ 'xdiff_string_merge3',
+ 'xdiff_string_patch_binary',
+ 'xdiff_string_patch',
+ 'xdiff_string_rabdiff')}
+
+
+if __name__ == '__main__': # pragma: no cover
+ import glob
+ import os
+ import pprint
+ import re
+ import shutil
+ import tarfile
+ try:
+ from urllib import urlretrieve
+ except ImportError:
+ from urllib.request import urlretrieve
+
+ PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
+ PHP_MANUAL_DIR = './php-chunked-xhtml/'
+ PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = r'<a href="function\..*?\.html">(.*?)</a>'
- PHP_MODULE_RE = '<title>(.*?) Functions</title>'
-
- def get_php_functions():
- function_re = re.compile(PHP_FUNCTION_RE)
- module_re = re.compile(PHP_MODULE_RE)
- modules = {}
-
- for file in get_php_references():
- module = ''
+ PHP_MODULE_RE = '<title>(.*?) Functions</title>'
+
+ def get_php_functions():
+ function_re = re.compile(PHP_FUNCTION_RE)
+ module_re = re.compile(PHP_MODULE_RE)
+ modules = {}
+
+ for file in get_php_references():
+ module = ''
with open(file) as f:
for line in f:
if not module:
@@ -4702,51 +4702,51 @@ if __name__ == '__main__': # pragma: no cover
if search:
module = search.group(1)
modules[module] = []
-
+
elif 'href="function.' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '-&gt;' not in fn and '::' not in fn and fn not in modules[module]:
modules[module].append(fn)
-
- if module:
- # These are dummy manual pages, not actual functions
- if module == 'PHP Options/Info':
- modules[module].remove('main')
-
- if module == 'Filesystem':
- modules[module].remove('delete')
-
- if not modules[module]:
- del modules[module]
-
- return modules
-
- def get_php_references():
- download = urlretrieve(PHP_MANUAL_URL)
+
+ if module:
+ # These are dummy manual pages, not actual functions
+ if module == 'PHP Options/Info':
+ modules[module].remove('main')
+
+ if module == 'Filesystem':
+ modules[module].remove('delete')
+
+ if not modules[module]:
+ del modules[module]
+
+ return modules
+
+ def get_php_references():
+ download = urlretrieve(PHP_MANUAL_URL)
with tarfile.open(download[0]) as tar:
tar.extractall()
yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
- os.remove(download[0])
-
- def regenerate(filename, modules):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('MODULES = {')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
- fp.write(footer)
-
- def run():
- print('>> Downloading Function Index')
- modules = get_php_functions()
- total = sum(len(v) for v in modules.values())
- print('%d functions found' % total)
- regenerate(__file__, modules)
- shutil.rmtree(PHP_MANUAL_DIR)
-
- run()
+ os.remove(download[0])
+
+ def regenerate(filename, modules):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('MODULES = {')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
+
+ def run():
+ print('>> Downloading Function Index')
+ modules = get_php_functions()
+ total = sum(len(v) for v in modules.values())
+ print('%d functions found' % total)
+ regenerate(__file__, modules)
+ shutil.rmtree(PHP_MANUAL_DIR)
+
+ run()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
index 93bc42059a..b4b1bd6c85 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
@@ -1,677 +1,677 @@
-"""
- pygments.lexers._postgres_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Self-updating data files for PostgreSQL lexer.
-
+"""
+ pygments.lexers._postgres_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Self-updating data files for PostgreSQL lexer.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-# Autogenerated: please edit them if you like wasting your time.
-
-KEYWORDS = (
- 'ABORT',
- 'ABSOLUTE',
- 'ACCESS',
- 'ACTION',
- 'ADD',
- 'ADMIN',
- 'AFTER',
- 'AGGREGATE',
- 'ALL',
- 'ALSO',
- 'ALTER',
- 'ALWAYS',
- 'ANALYSE',
- 'ANALYZE',
- 'AND',
- 'ANY',
- 'ARRAY',
- 'AS',
- 'ASC',
- 'ASSERTION',
- 'ASSIGNMENT',
- 'ASYMMETRIC',
- 'AT',
+ :license: BSD, see LICENSE for details.
+"""
+
+
+# Autogenerated: please edit them if you like wasting your time.
+
+KEYWORDS = (
+ 'ABORT',
+ 'ABSOLUTE',
+ 'ACCESS',
+ 'ACTION',
+ 'ADD',
+ 'ADMIN',
+ 'AFTER',
+ 'AGGREGATE',
+ 'ALL',
+ 'ALSO',
+ 'ALTER',
+ 'ALWAYS',
+ 'ANALYSE',
+ 'ANALYZE',
+ 'AND',
+ 'ANY',
+ 'ARRAY',
+ 'AS',
+ 'ASC',
+ 'ASSERTION',
+ 'ASSIGNMENT',
+ 'ASYMMETRIC',
+ 'AT',
'ATTACH',
- 'ATTRIBUTE',
- 'AUTHORIZATION',
- 'BACKWARD',
- 'BEFORE',
- 'BEGIN',
- 'BETWEEN',
- 'BIGINT',
- 'BINARY',
- 'BIT',
- 'BOOLEAN',
- 'BOTH',
- 'BY',
- 'CACHE',
+ 'ATTRIBUTE',
+ 'AUTHORIZATION',
+ 'BACKWARD',
+ 'BEFORE',
+ 'BEGIN',
+ 'BETWEEN',
+ 'BIGINT',
+ 'BINARY',
+ 'BIT',
+ 'BOOLEAN',
+ 'BOTH',
+ 'BY',
+ 'CACHE',
'CALL',
- 'CALLED',
- 'CASCADE',
- 'CASCADED',
- 'CASE',
- 'CAST',
- 'CATALOG',
- 'CHAIN',
- 'CHAR',
- 'CHARACTER',
- 'CHARACTERISTICS',
- 'CHECK',
- 'CHECKPOINT',
- 'CLASS',
- 'CLOSE',
- 'CLUSTER',
- 'COALESCE',
- 'COLLATE',
- 'COLLATION',
- 'COLUMN',
+ 'CALLED',
+ 'CASCADE',
+ 'CASCADED',
+ 'CASE',
+ 'CAST',
+ 'CATALOG',
+ 'CHAIN',
+ 'CHAR',
+ 'CHARACTER',
+ 'CHARACTERISTICS',
+ 'CHECK',
+ 'CHECKPOINT',
+ 'CLASS',
+ 'CLOSE',
+ 'CLUSTER',
+ 'COALESCE',
+ 'COLLATE',
+ 'COLLATION',
+ 'COLUMN',
'COLUMNS',
- 'COMMENT',
- 'COMMENTS',
- 'COMMIT',
- 'COMMITTED',
- 'CONCURRENTLY',
- 'CONFIGURATION',
+ 'COMMENT',
+ 'COMMENTS',
+ 'COMMIT',
+ 'COMMITTED',
+ 'CONCURRENTLY',
+ 'CONFIGURATION',
'CONFLICT',
- 'CONNECTION',
- 'CONSTRAINT',
- 'CONSTRAINTS',
- 'CONTENT',
- 'CONTINUE',
- 'CONVERSION',
- 'COPY',
- 'COST',
- 'CREATE',
- 'CROSS',
- 'CSV',
+ 'CONNECTION',
+ 'CONSTRAINT',
+ 'CONSTRAINTS',
+ 'CONTENT',
+ 'CONTINUE',
+ 'CONVERSION',
+ 'COPY',
+ 'COST',
+ 'CREATE',
+ 'CROSS',
+ 'CSV',
'CUBE',
- 'CURRENT',
- 'CURRENT_CATALOG',
- 'CURRENT_DATE',
- 'CURRENT_ROLE',
- 'CURRENT_SCHEMA',
- 'CURRENT_TIME',
- 'CURRENT_TIMESTAMP',
- 'CURRENT_USER',
- 'CURSOR',
- 'CYCLE',
- 'DATA',
- 'DATABASE',
- 'DAY',
- 'DEALLOCATE',
- 'DEC',
- 'DECIMAL',
- 'DECLARE',
- 'DEFAULT',
- 'DEFAULTS',
- 'DEFERRABLE',
- 'DEFERRED',
- 'DEFINER',
- 'DELETE',
- 'DELIMITER',
- 'DELIMITERS',
+ 'CURRENT',
+ 'CURRENT_CATALOG',
+ 'CURRENT_DATE',
+ 'CURRENT_ROLE',
+ 'CURRENT_SCHEMA',
+ 'CURRENT_TIME',
+ 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER',
+ 'CURSOR',
+ 'CYCLE',
+ 'DATA',
+ 'DATABASE',
+ 'DAY',
+ 'DEALLOCATE',
+ 'DEC',
+ 'DECIMAL',
+ 'DECLARE',
+ 'DEFAULT',
+ 'DEFAULTS',
+ 'DEFERRABLE',
+ 'DEFERRED',
+ 'DEFINER',
+ 'DELETE',
+ 'DELIMITER',
+ 'DELIMITERS',
'DEPENDS',
- 'DESC',
+ 'DESC',
'DETACH',
- 'DICTIONARY',
- 'DISABLE',
- 'DISCARD',
- 'DISTINCT',
- 'DO',
- 'DOCUMENT',
- 'DOMAIN',
- 'DOUBLE',
- 'DROP',
- 'EACH',
- 'ELSE',
- 'ENABLE',
- 'ENCODING',
- 'ENCRYPTED',
- 'END',
- 'ENUM',
- 'ESCAPE',
- 'EVENT',
- 'EXCEPT',
- 'EXCLUDE',
- 'EXCLUDING',
- 'EXCLUSIVE',
- 'EXECUTE',
- 'EXISTS',
- 'EXPLAIN',
+ 'DICTIONARY',
+ 'DISABLE',
+ 'DISCARD',
+ 'DISTINCT',
+ 'DO',
+ 'DOCUMENT',
+ 'DOMAIN',
+ 'DOUBLE',
+ 'DROP',
+ 'EACH',
+ 'ELSE',
+ 'ENABLE',
+ 'ENCODING',
+ 'ENCRYPTED',
+ 'END',
+ 'ENUM',
+ 'ESCAPE',
+ 'EVENT',
+ 'EXCEPT',
+ 'EXCLUDE',
+ 'EXCLUDING',
+ 'EXCLUSIVE',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXPLAIN',
'EXPRESSION',
- 'EXTENSION',
- 'EXTERNAL',
- 'EXTRACT',
- 'FALSE',
- 'FAMILY',
- 'FETCH',
- 'FILTER',
- 'FIRST',
- 'FLOAT',
- 'FOLLOWING',
- 'FOR',
- 'FORCE',
- 'FOREIGN',
- 'FORWARD',
- 'FREEZE',
- 'FROM',
- 'FULL',
- 'FUNCTION',
- 'FUNCTIONS',
+ 'EXTENSION',
+ 'EXTERNAL',
+ 'EXTRACT',
+ 'FALSE',
+ 'FAMILY',
+ 'FETCH',
+ 'FILTER',
+ 'FIRST',
+ 'FLOAT',
+ 'FOLLOWING',
+ 'FOR',
+ 'FORCE',
+ 'FOREIGN',
+ 'FORWARD',
+ 'FREEZE',
+ 'FROM',
+ 'FULL',
+ 'FUNCTION',
+ 'FUNCTIONS',
'GENERATED',
- 'GLOBAL',
- 'GRANT',
- 'GRANTED',
- 'GREATEST',
- 'GROUP',
+ 'GLOBAL',
+ 'GRANT',
+ 'GRANTED',
+ 'GREATEST',
+ 'GROUP',
'GROUPING',
'GROUPS',
- 'HANDLER',
- 'HAVING',
- 'HEADER',
- 'HOLD',
- 'HOUR',
- 'IDENTITY',
- 'IF',
- 'ILIKE',
- 'IMMEDIATE',
- 'IMMUTABLE',
- 'IMPLICIT',
+ 'HANDLER',
+ 'HAVING',
+ 'HEADER',
+ 'HOLD',
+ 'HOUR',
+ 'IDENTITY',
+ 'IF',
+ 'ILIKE',
+ 'IMMEDIATE',
+ 'IMMUTABLE',
+ 'IMPLICIT',
'IMPORT',
- 'IN',
+ 'IN',
'INCLUDE',
- 'INCLUDING',
- 'INCREMENT',
- 'INDEX',
- 'INDEXES',
- 'INHERIT',
- 'INHERITS',
- 'INITIALLY',
- 'INLINE',
- 'INNER',
- 'INOUT',
- 'INPUT',
- 'INSENSITIVE',
- 'INSERT',
- 'INSTEAD',
- 'INT',
- 'INTEGER',
- 'INTERSECT',
- 'INTERVAL',
- 'INTO',
- 'INVOKER',
- 'IS',
- 'ISNULL',
- 'ISOLATION',
- 'JOIN',
- 'KEY',
- 'LABEL',
- 'LANGUAGE',
- 'LARGE',
- 'LAST',
- 'LATERAL',
- 'LEADING',
- 'LEAKPROOF',
- 'LEAST',
- 'LEFT',
- 'LEVEL',
- 'LIKE',
- 'LIMIT',
- 'LISTEN',
- 'LOAD',
- 'LOCAL',
- 'LOCALTIME',
- 'LOCALTIMESTAMP',
- 'LOCATION',
- 'LOCK',
+ 'INCLUDING',
+ 'INCREMENT',
+ 'INDEX',
+ 'INDEXES',
+ 'INHERIT',
+ 'INHERITS',
+ 'INITIALLY',
+ 'INLINE',
+ 'INNER',
+ 'INOUT',
+ 'INPUT',
+ 'INSENSITIVE',
+ 'INSERT',
+ 'INSTEAD',
+ 'INT',
+ 'INTEGER',
+ 'INTERSECT',
+ 'INTERVAL',
+ 'INTO',
+ 'INVOKER',
+ 'IS',
+ 'ISNULL',
+ 'ISOLATION',
+ 'JOIN',
+ 'KEY',
+ 'LABEL',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LAST',
+ 'LATERAL',
+ 'LEADING',
+ 'LEAKPROOF',
+ 'LEAST',
+ 'LEFT',
+ 'LEVEL',
+ 'LIKE',
+ 'LIMIT',
+ 'LISTEN',
+ 'LOAD',
+ 'LOCAL',
+ 'LOCALTIME',
+ 'LOCALTIMESTAMP',
+ 'LOCATION',
+ 'LOCK',
'LOCKED',
'LOGGED',
- 'MAPPING',
- 'MATCH',
- 'MATERIALIZED',
- 'MAXVALUE',
+ 'MAPPING',
+ 'MATCH',
+ 'MATERIALIZED',
+ 'MAXVALUE',
'METHOD',
- 'MINUTE',
- 'MINVALUE',
- 'MODE',
- 'MONTH',
- 'MOVE',
- 'NAME',
- 'NAMES',
- 'NATIONAL',
- 'NATURAL',
- 'NCHAR',
+ 'MINUTE',
+ 'MINVALUE',
+ 'MODE',
+ 'MONTH',
+ 'MOVE',
+ 'NAME',
+ 'NAMES',
+ 'NATIONAL',
+ 'NATURAL',
+ 'NCHAR',
'NEW',
- 'NEXT',
+ 'NEXT',
'NFC',
'NFD',
'NFKC',
'NFKD',
- 'NO',
- 'NONE',
+ 'NO',
+ 'NONE',
'NORMALIZE',
'NORMALIZED',
- 'NOT',
- 'NOTHING',
- 'NOTIFY',
- 'NOTNULL',
- 'NOWAIT',
- 'NULL',
- 'NULLIF',
- 'NULLS',
- 'NUMERIC',
- 'OBJECT',
- 'OF',
- 'OFF',
- 'OFFSET',
- 'OIDS',
+ 'NOT',
+ 'NOTHING',
+ 'NOTIFY',
+ 'NOTNULL',
+ 'NOWAIT',
+ 'NULL',
+ 'NULLIF',
+ 'NULLS',
+ 'NUMERIC',
+ 'OBJECT',
+ 'OF',
+ 'OFF',
+ 'OFFSET',
+ 'OIDS',
'OLD',
- 'ON',
- 'ONLY',
- 'OPERATOR',
- 'OPTION',
- 'OPTIONS',
- 'OR',
- 'ORDER',
- 'ORDINALITY',
+ 'ON',
+ 'ONLY',
+ 'OPERATOR',
+ 'OPTION',
+ 'OPTIONS',
+ 'OR',
+ 'ORDER',
+ 'ORDINALITY',
'OTHERS',
- 'OUT',
- 'OUTER',
- 'OVER',
- 'OVERLAPS',
- 'OVERLAY',
+ 'OUT',
+ 'OUTER',
+ 'OVER',
+ 'OVERLAPS',
+ 'OVERLAY',
'OVERRIDING',
- 'OWNED',
- 'OWNER',
+ 'OWNED',
+ 'OWNER',
'PARALLEL',
- 'PARSER',
- 'PARTIAL',
- 'PARTITION',
- 'PASSING',
- 'PASSWORD',
- 'PLACING',
- 'PLANS',
- 'POLICY',
- 'POSITION',
- 'PRECEDING',
- 'PRECISION',
- 'PREPARE',
- 'PREPARED',
- 'PRESERVE',
- 'PRIMARY',
- 'PRIOR',
- 'PRIVILEGES',
- 'PROCEDURAL',
- 'PROCEDURE',
+ 'PARSER',
+ 'PARTIAL',
+ 'PARTITION',
+ 'PASSING',
+ 'PASSWORD',
+ 'PLACING',
+ 'PLANS',
+ 'POLICY',
+ 'POSITION',
+ 'PRECEDING',
+ 'PRECISION',
+ 'PREPARE',
+ 'PREPARED',
+ 'PRESERVE',
+ 'PRIMARY',
+ 'PRIOR',
+ 'PRIVILEGES',
+ 'PROCEDURAL',
+ 'PROCEDURE',
'PROCEDURES',
- 'PROGRAM',
+ 'PROGRAM',
'PUBLICATION',
- 'QUOTE',
- 'RANGE',
- 'READ',
- 'REAL',
- 'REASSIGN',
- 'RECHECK',
- 'RECURSIVE',
- 'REF',
- 'REFERENCES',
+ 'QUOTE',
+ 'RANGE',
+ 'READ',
+ 'REAL',
+ 'REASSIGN',
+ 'RECHECK',
+ 'RECURSIVE',
+ 'REF',
+ 'REFERENCES',
'REFERENCING',
- 'REFRESH',
- 'REINDEX',
- 'RELATIVE',
- 'RELEASE',
- 'RENAME',
- 'REPEATABLE',
- 'REPLACE',
- 'REPLICA',
- 'RESET',
- 'RESTART',
- 'RESTRICT',
- 'RETURNING',
- 'RETURNS',
- 'REVOKE',
- 'RIGHT',
- 'ROLE',
- 'ROLLBACK',
+ 'REFRESH',
+ 'REINDEX',
+ 'RELATIVE',
+ 'RELEASE',
+ 'RENAME',
+ 'REPEATABLE',
+ 'REPLACE',
+ 'REPLICA',
+ 'RESET',
+ 'RESTART',
+ 'RESTRICT',
+ 'RETURNING',
+ 'RETURNS',
+ 'REVOKE',
+ 'RIGHT',
+ 'ROLE',
+ 'ROLLBACK',
'ROLLUP',
'ROUTINE',
'ROUTINES',
- 'ROW',
- 'ROWS',
- 'RULE',
- 'SAVEPOINT',
- 'SCHEMA',
+ 'ROW',
+ 'ROWS',
+ 'RULE',
+ 'SAVEPOINT',
+ 'SCHEMA',
'SCHEMAS',
- 'SCROLL',
- 'SEARCH',
- 'SECOND',
- 'SECURITY',
- 'SELECT',
- 'SEQUENCE',
- 'SEQUENCES',
- 'SERIALIZABLE',
- 'SERVER',
- 'SESSION',
- 'SESSION_USER',
- 'SET',
- 'SETOF',
+ 'SCROLL',
+ 'SEARCH',
+ 'SECOND',
+ 'SECURITY',
+ 'SELECT',
+ 'SEQUENCE',
+ 'SEQUENCES',
+ 'SERIALIZABLE',
+ 'SERVER',
+ 'SESSION',
+ 'SESSION_USER',
+ 'SET',
+ 'SETOF',
'SETS',
- 'SHARE',
- 'SHOW',
- 'SIMILAR',
- 'SIMPLE',
+ 'SHARE',
+ 'SHOW',
+ 'SIMILAR',
+ 'SIMPLE',
'SKIP',
- 'SMALLINT',
- 'SNAPSHOT',
- 'SOME',
+ 'SMALLINT',
+ 'SNAPSHOT',
+ 'SOME',
'SQL',
- 'STABLE',
- 'STANDALONE',
- 'START',
- 'STATEMENT',
- 'STATISTICS',
- 'STDIN',
- 'STDOUT',
- 'STORAGE',
+ 'STABLE',
+ 'STANDALONE',
+ 'START',
+ 'STATEMENT',
+ 'STATISTICS',
+ 'STDIN',
+ 'STDOUT',
+ 'STORAGE',
'STORED',
- 'STRICT',
- 'STRIP',
+ 'STRICT',
+ 'STRIP',
'SUBSCRIPTION',
- 'SUBSTRING',
+ 'SUBSTRING',
'SUPPORT',
- 'SYMMETRIC',
- 'SYSID',
- 'SYSTEM',
- 'TABLE',
- 'TABLES',
+ 'SYMMETRIC',
+ 'SYSID',
+ 'SYSTEM',
+ 'TABLE',
+ 'TABLES',
'TABLESAMPLE',
- 'TABLESPACE',
- 'TEMP',
- 'TEMPLATE',
- 'TEMPORARY',
- 'TEXT',
- 'THEN',
+ 'TABLESPACE',
+ 'TEMP',
+ 'TEMPLATE',
+ 'TEMPORARY',
+ 'TEXT',
+ 'THEN',
'TIES',
- 'TIME',
- 'TIMESTAMP',
- 'TO',
- 'TRAILING',
- 'TRANSACTION',
+ 'TIME',
+ 'TIMESTAMP',
+ 'TO',
+ 'TRAILING',
+ 'TRANSACTION',
'TRANSFORM',
- 'TREAT',
- 'TRIGGER',
- 'TRIM',
- 'TRUE',
- 'TRUNCATE',
- 'TRUSTED',
- 'TYPE',
- 'TYPES',
+ 'TREAT',
+ 'TRIGGER',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUSTED',
+ 'TYPE',
+ 'TYPES',
'UESCAPE',
- 'UNBOUNDED',
- 'UNCOMMITTED',
- 'UNENCRYPTED',
- 'UNION',
- 'UNIQUE',
- 'UNKNOWN',
- 'UNLISTEN',
- 'UNLOGGED',
- 'UNTIL',
- 'UPDATE',
- 'USER',
- 'USING',
- 'VACUUM',
- 'VALID',
- 'VALIDATE',
- 'VALIDATOR',
- 'VALUE',
- 'VALUES',
- 'VARCHAR',
- 'VARIADIC',
- 'VARYING',
- 'VERBOSE',
- 'VERSION',
- 'VIEW',
- 'VIEWS',
- 'VOLATILE',
- 'WHEN',
- 'WHERE',
- 'WHITESPACE',
- 'WINDOW',
- 'WITH',
- 'WITHIN',
- 'WITHOUT',
- 'WORK',
- 'WRAPPER',
- 'WRITE',
- 'XML',
- 'XMLATTRIBUTES',
- 'XMLCONCAT',
- 'XMLELEMENT',
- 'XMLEXISTS',
- 'XMLFOREST',
+ 'UNBOUNDED',
+ 'UNCOMMITTED',
+ 'UNENCRYPTED',
+ 'UNION',
+ 'UNIQUE',
+ 'UNKNOWN',
+ 'UNLISTEN',
+ 'UNLOGGED',
+ 'UNTIL',
+ 'UPDATE',
+ 'USER',
+ 'USING',
+ 'VACUUM',
+ 'VALID',
+ 'VALIDATE',
+ 'VALIDATOR',
+ 'VALUE',
+ 'VALUES',
+ 'VARCHAR',
+ 'VARIADIC',
+ 'VARYING',
+ 'VERBOSE',
+ 'VERSION',
+ 'VIEW',
+ 'VIEWS',
+ 'VOLATILE',
+ 'WHEN',
+ 'WHERE',
+ 'WHITESPACE',
+ 'WINDOW',
+ 'WITH',
+ 'WITHIN',
+ 'WITHOUT',
+ 'WORK',
+ 'WRAPPER',
+ 'WRITE',
+ 'XML',
+ 'XMLATTRIBUTES',
+ 'XMLCONCAT',
+ 'XMLELEMENT',
+ 'XMLEXISTS',
+ 'XMLFOREST',
'XMLNAMESPACES',
- 'XMLPARSE',
- 'XMLPI',
- 'XMLROOT',
- 'XMLSERIALIZE',
+ 'XMLPARSE',
+ 'XMLPI',
+ 'XMLROOT',
+ 'XMLSERIALIZE',
'XMLTABLE',
- 'YEAR',
- 'YES',
- 'ZONE',
-)
-
-DATATYPES = (
- 'bigint',
- 'bigserial',
- 'bit',
- 'bit varying',
- 'bool',
- 'boolean',
- 'box',
- 'bytea',
- 'char',
- 'character',
- 'character varying',
- 'cidr',
- 'circle',
- 'date',
- 'decimal',
- 'double precision',
- 'float4',
- 'float8',
- 'inet',
- 'int',
- 'int2',
- 'int4',
- 'int8',
- 'integer',
- 'interval',
- 'json',
- 'jsonb',
- 'line',
- 'lseg',
- 'macaddr',
+ 'YEAR',
+ 'YES',
+ 'ZONE',
+)
+
+DATATYPES = (
+ 'bigint',
+ 'bigserial',
+ 'bit',
+ 'bit varying',
+ 'bool',
+ 'boolean',
+ 'box',
+ 'bytea',
+ 'char',
+ 'character',
+ 'character varying',
+ 'cidr',
+ 'circle',
+ 'date',
+ 'decimal',
+ 'double precision',
+ 'float4',
+ 'float8',
+ 'inet',
+ 'int',
+ 'int2',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'interval',
+ 'json',
+ 'jsonb',
+ 'line',
+ 'lseg',
+ 'macaddr',
'macaddr8',
- 'money',
- 'numeric',
- 'path',
- 'pg_lsn',
+ 'money',
+ 'numeric',
+ 'path',
+ 'pg_lsn',
'pg_snapshot',
- 'point',
- 'polygon',
- 'real',
- 'serial',
- 'serial2',
- 'serial4',
- 'serial8',
- 'smallint',
- 'smallserial',
- 'text',
- 'time',
- 'timestamp',
- 'timestamptz',
- 'timetz',
- 'tsquery',
- 'tsvector',
- 'txid_snapshot',
- 'uuid',
- 'varbit',
- 'varchar',
- 'with time zone',
- 'without time zone',
- 'xml',
-)
-
-PSEUDO_TYPES = (
- 'any',
+ 'point',
+ 'polygon',
+ 'real',
+ 'serial',
+ 'serial2',
+ 'serial4',
+ 'serial8',
+ 'smallint',
+ 'smallserial',
+ 'text',
+ 'time',
+ 'timestamp',
+ 'timestamptz',
+ 'timetz',
+ 'tsquery',
+ 'tsvector',
+ 'txid_snapshot',
+ 'uuid',
+ 'varbit',
+ 'varchar',
+ 'with time zone',
+ 'without time zone',
+ 'xml',
+)
+
+PSEUDO_TYPES = (
+ 'any',
'anyarray',
'anycompatible',
'anycompatiblearray',
'anycompatiblenonarray',
'anycompatiblerange',
- 'anyelement',
+ 'anyelement',
'anyenum',
- 'anynonarray',
- 'anyrange',
- 'cstring',
+ 'anynonarray',
+ 'anyrange',
+ 'cstring',
'event_trigger',
'fdw_handler',
'index_am_handler',
- 'internal',
- 'language_handler',
+ 'internal',
+ 'language_handler',
'pg_ddl_command',
- 'record',
+ 'record',
'table_am_handler',
- 'trigger',
+ 'trigger',
'tsm_handler',
'unknown',
- 'void',
-)
-
-# Remove 'trigger' from types
-PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
-
-PLPGSQL_KEYWORDS = (
- 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
- 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
- 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- try:
- from urllib import urlopen
- except ImportError:
- from urllib.request import urlopen
-
- from pygments.util import format_lines
-
- # One man's constant is another man's variable.
- SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
+ 'void',
+)
+
+# Remove 'trigger' from types
+PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
+
+PLPGSQL_KEYWORDS = (
+ 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
+ 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
+ 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ try:
+ from urllib import urlopen
+ except ImportError:
+ from urllib.request import urlopen
+
+ from pygments.util import format_lines
+
+ # One man's constant is another man's variable.
+ SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
KEYWORDS_URL = SOURCE_URL + '/src/include/parser/kwlist.h'
- DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
-
- def update_myself():
+ DATATYPES_URL = SOURCE_URL + '/doc/src/sgml/datatype.sgml'
+
+ def update_myself():
content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
data_file = list(content.splitlines())
- datatypes = parse_datatypes(data_file)
- pseudos = parse_pseudos(data_file)
-
+ datatypes = parse_datatypes(data_file)
+ pseudos = parse_pseudos(data_file)
+
content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
keywords = parse_keywords(content)
- update_consts(__file__, 'DATATYPES', datatypes)
- update_consts(__file__, 'PSEUDO_TYPES', pseudos)
- update_consts(__file__, 'KEYWORDS', keywords)
-
- def parse_keywords(f):
- kw = []
+ update_consts(__file__, 'DATATYPES', datatypes)
+ update_consts(__file__, 'PSEUDO_TYPES', pseudos)
+ update_consts(__file__, 'KEYWORDS', keywords)
+
+ def parse_keywords(f):
+ kw = []
for m in re.finditer(r'PG_KEYWORD\("(.+?)"', f):
kw.append(m.group(1).upper())
-
- if not kw:
- raise ValueError('no keyword found')
-
- kw.sort()
- return kw
-
- def parse_datatypes(f):
- dt = set()
- for line in f:
- if '<sect1' in line:
- break
- if '<entry><type>' not in line:
- continue
-
- # Parse a string such as
- # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
- # into types "time" and "without time zone"
-
- # remove all the tags
- line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
- line = re.sub("<[^>]+>", "", line)
-
- # Drop the parts containing braces
- for tmp in [t for tmp in line.split('[')
- for t in tmp.split(']') if "(" not in t]:
- for t in tmp.split(','):
- t = t.strip()
- if not t: continue
- dt.add(" ".join(t.split()))
-
- dt = list(dt)
- dt.sort()
- return dt
-
- def parse_pseudos(f):
- dt = []
- re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
+
+ if not kw:
+ raise ValueError('no keyword found')
+
+ kw.sort()
+ return kw
+
+ def parse_datatypes(f):
+ dt = set()
+ for line in f:
+ if '<sect1' in line:
+ break
+ if '<entry><type>' not in line:
+ continue
+
+ # Parse a string such as
+ # time [ (<replaceable>p</replaceable>) ] [ without time zone ]
+ # into types "time" and "without time zone"
+
+ # remove all the tags
+ line = re.sub("<replaceable>[^<]+</replaceable>", "", line)
+ line = re.sub("<[^>]+>", "", line)
+
+ # Drop the parts containing braces
+ for tmp in [t for tmp in line.split('[')
+ for t in tmp.split(']') if "(" not in t]:
+ for t in tmp.split(','):
+ t = t.strip()
+ if not t: continue
+ dt.add(" ".join(t.split()))
+
+ dt = list(dt)
+ dt.sort()
+ return dt
+
+ def parse_pseudos(f):
+ dt = []
+ re_start = re.compile(r'\s*<table id="datatype-pseudotypes-table">')
re_entry = re.compile(r'\s*<entry><type>(.+?)</type></entry>')
- re_end = re.compile(r'\s*</table>')
-
- f = iter(f)
- for line in f:
- if re_start.match(line) is not None:
- break
- else:
- raise ValueError('pseudo datatypes table not found')
-
- for line in f:
- m = re_entry.match(line)
- if m is not None:
- dt.append(m.group(1))
-
- if re_end.match(line) is not None:
- break
- else:
- raise ValueError('end of pseudo datatypes table not found')
-
- if not dt:
- raise ValueError('pseudo datatypes not found')
-
+ re_end = re.compile(r'\s*</table>')
+
+ f = iter(f)
+ for line in f:
+ if re_start.match(line) is not None:
+ break
+ else:
+ raise ValueError('pseudo datatypes table not found')
+
+ for line in f:
+ m = re_entry.match(line)
+ if m is not None:
+ dt.append(m.group(1))
+
+ if re_end.match(line) is not None:
+ break
+ else:
+ raise ValueError('end of pseudo datatypes table not found')
+
+ if not dt:
+ raise ValueError('pseudo datatypes not found')
+
dt.sort()
- return dt
-
- def update_consts(filename, constname, content):
- with open(filename) as f:
- data = f.read()
-
- # Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
- m = re_match.search(data)
- if not m:
- raise ValueError('Could not find existing definition for %s' %
- (constname,))
-
- new_block = format_lines(constname, content)
- data = data[:m.start()] + new_block + data[m.end():]
-
+ return dt
+
+ def update_consts(filename, constname, content):
+ with open(filename) as f:
+ data = f.read()
+
+ # Line to start/end inserting
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find existing definition for %s' %
+ (constname,))
+
+ new_block = format_lines(constname, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
with open(filename, 'w', newline='\n') as f:
- f.write(data)
-
- update_myself()
+ f.write(data)
+
+ update_myself()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
index e2cfcb9622..acdceb3827 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
@@ -1,3093 +1,3093 @@
-"""
- pygments.lexers._scilab_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the ScilabLexer.
-
+"""
+ pygments.lexers._scilab_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the ScilabLexer.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Autogenerated
-
-commands_kw = (
- 'abort',
- 'apropos',
- 'break',
- 'case',
- 'catch',
- 'continue',
- 'do',
- 'else',
- 'elseif',
- 'end',
- 'endfunction',
- 'for',
- 'function',
- 'help',
- 'if',
- 'pause',
- 'quit',
- 'select',
- 'then',
- 'try',
- 'while',
-)
-
-functions_kw = (
- '!!_invoke_',
- '%H5Object_e',
- '%H5Object_fieldnames',
- '%H5Object_p',
- '%XMLAttr_6',
- '%XMLAttr_e',
- '%XMLAttr_i_XMLElem',
- '%XMLAttr_length',
- '%XMLAttr_p',
- '%XMLAttr_size',
- '%XMLDoc_6',
- '%XMLDoc_e',
- '%XMLDoc_i_XMLList',
- '%XMLDoc_p',
- '%XMLElem_6',
- '%XMLElem_e',
- '%XMLElem_i_XMLDoc',
- '%XMLElem_i_XMLElem',
- '%XMLElem_i_XMLList',
- '%XMLElem_p',
- '%XMLList_6',
- '%XMLList_e',
- '%XMLList_i_XMLElem',
- '%XMLList_i_XMLList',
- '%XMLList_length',
- '%XMLList_p',
- '%XMLList_size',
- '%XMLNs_6',
- '%XMLNs_e',
- '%XMLNs_i_XMLElem',
- '%XMLNs_p',
- '%XMLSet_6',
- '%XMLSet_e',
- '%XMLSet_length',
- '%XMLSet_p',
- '%XMLSet_size',
- '%XMLValid_p',
- '%_EClass_6',
- '%_EClass_e',
- '%_EClass_p',
- '%_EObj_0',
- '%_EObj_1__EObj',
- '%_EObj_1_b',
- '%_EObj_1_c',
- '%_EObj_1_i',
- '%_EObj_1_s',
- '%_EObj_2__EObj',
- '%_EObj_2_b',
- '%_EObj_2_c',
- '%_EObj_2_i',
- '%_EObj_2_s',
- '%_EObj_3__EObj',
- '%_EObj_3_b',
- '%_EObj_3_c',
- '%_EObj_3_i',
- '%_EObj_3_s',
- '%_EObj_4__EObj',
- '%_EObj_4_b',
- '%_EObj_4_c',
- '%_EObj_4_i',
- '%_EObj_4_s',
- '%_EObj_5',
- '%_EObj_6',
- '%_EObj_a__EObj',
- '%_EObj_a_b',
- '%_EObj_a_c',
- '%_EObj_a_i',
- '%_EObj_a_s',
- '%_EObj_d__EObj',
- '%_EObj_d_b',
- '%_EObj_d_c',
- '%_EObj_d_i',
- '%_EObj_d_s',
- '%_EObj_disp',
- '%_EObj_e',
- '%_EObj_g__EObj',
- '%_EObj_g_b',
- '%_EObj_g_c',
- '%_EObj_g_i',
- '%_EObj_g_s',
- '%_EObj_h__EObj',
- '%_EObj_h_b',
- '%_EObj_h_c',
- '%_EObj_h_i',
- '%_EObj_h_s',
- '%_EObj_i__EObj',
- '%_EObj_j__EObj',
- '%_EObj_j_b',
- '%_EObj_j_c',
- '%_EObj_j_i',
- '%_EObj_j_s',
- '%_EObj_k__EObj',
- '%_EObj_k_b',
- '%_EObj_k_c',
- '%_EObj_k_i',
- '%_EObj_k_s',
- '%_EObj_l__EObj',
- '%_EObj_l_b',
- '%_EObj_l_c',
- '%_EObj_l_i',
- '%_EObj_l_s',
- '%_EObj_m__EObj',
- '%_EObj_m_b',
- '%_EObj_m_c',
- '%_EObj_m_i',
- '%_EObj_m_s',
- '%_EObj_n__EObj',
- '%_EObj_n_b',
- '%_EObj_n_c',
- '%_EObj_n_i',
- '%_EObj_n_s',
- '%_EObj_o__EObj',
- '%_EObj_o_b',
- '%_EObj_o_c',
- '%_EObj_o_i',
- '%_EObj_o_s',
- '%_EObj_p',
- '%_EObj_p__EObj',
- '%_EObj_p_b',
- '%_EObj_p_c',
- '%_EObj_p_i',
- '%_EObj_p_s',
- '%_EObj_q__EObj',
- '%_EObj_q_b',
- '%_EObj_q_c',
- '%_EObj_q_i',
- '%_EObj_q_s',
- '%_EObj_r__EObj',
- '%_EObj_r_b',
- '%_EObj_r_c',
- '%_EObj_r_i',
- '%_EObj_r_s',
- '%_EObj_s__EObj',
- '%_EObj_s_b',
- '%_EObj_s_c',
- '%_EObj_s_i',
- '%_EObj_s_s',
- '%_EObj_t',
- '%_EObj_x__EObj',
- '%_EObj_x_b',
- '%_EObj_x_c',
- '%_EObj_x_i',
- '%_EObj_x_s',
- '%_EObj_y__EObj',
- '%_EObj_y_b',
- '%_EObj_y_c',
- '%_EObj_y_i',
- '%_EObj_y_s',
- '%_EObj_z__EObj',
- '%_EObj_z_b',
- '%_EObj_z_c',
- '%_EObj_z_i',
- '%_EObj_z_s',
- '%_eigs',
- '%_load',
- '%b_1__EObj',
- '%b_2__EObj',
- '%b_3__EObj',
- '%b_4__EObj',
- '%b_a__EObj',
- '%b_d__EObj',
- '%b_g__EObj',
- '%b_h__EObj',
- '%b_i_XMLList',
- '%b_i__EObj',
- '%b_j__EObj',
- '%b_k__EObj',
- '%b_l__EObj',
- '%b_m__EObj',
- '%b_n__EObj',
- '%b_o__EObj',
- '%b_p__EObj',
- '%b_q__EObj',
- '%b_r__EObj',
- '%b_s__EObj',
- '%b_x__EObj',
- '%b_y__EObj',
- '%b_z__EObj',
- '%c_1__EObj',
- '%c_2__EObj',
- '%c_3__EObj',
- '%c_4__EObj',
- '%c_a__EObj',
- '%c_d__EObj',
- '%c_g__EObj',
- '%c_h__EObj',
- '%c_i_XMLAttr',
- '%c_i_XMLDoc',
- '%c_i_XMLElem',
- '%c_i_XMLList',
- '%c_i__EObj',
- '%c_j__EObj',
- '%c_k__EObj',
- '%c_l__EObj',
- '%c_m__EObj',
- '%c_n__EObj',
- '%c_o__EObj',
- '%c_p__EObj',
- '%c_q__EObj',
- '%c_r__EObj',
- '%c_s__EObj',
- '%c_x__EObj',
- '%c_y__EObj',
- '%c_z__EObj',
- '%ce_i_XMLList',
- '%fptr_i_XMLList',
- '%h_i_XMLList',
- '%hm_i_XMLList',
- '%i_1__EObj',
- '%i_2__EObj',
- '%i_3__EObj',
- '%i_4__EObj',
- '%i_a__EObj',
- '%i_abs',
- '%i_cumprod',
- '%i_cumsum',
- '%i_d__EObj',
- '%i_diag',
- '%i_g__EObj',
- '%i_h__EObj',
- '%i_i_XMLList',
- '%i_i__EObj',
- '%i_j__EObj',
- '%i_k__EObj',
- '%i_l__EObj',
- '%i_m__EObj',
- '%i_matrix',
- '%i_max',
- '%i_maxi',
- '%i_min',
- '%i_mini',
- '%i_mput',
- '%i_n__EObj',
- '%i_o__EObj',
- '%i_p',
- '%i_p__EObj',
- '%i_prod',
- '%i_q__EObj',
- '%i_r__EObj',
- '%i_s__EObj',
- '%i_sum',
- '%i_tril',
- '%i_triu',
- '%i_x__EObj',
- '%i_y__EObj',
- '%i_z__EObj',
- '%ip_i_XMLList',
- '%l_i_XMLList',
- '%l_i__EObj',
- '%lss_i_XMLList',
- '%mc_i_XMLList',
- '%msp_full',
- '%msp_i_XMLList',
- '%msp_spget',
- '%p_i_XMLList',
- '%ptr_i_XMLList',
- '%r_i_XMLList',
- '%s_1__EObj',
- '%s_2__EObj',
- '%s_3__EObj',
- '%s_4__EObj',
- '%s_a__EObj',
- '%s_d__EObj',
- '%s_g__EObj',
- '%s_h__EObj',
- '%s_i_XMLList',
- '%s_i__EObj',
- '%s_j__EObj',
- '%s_k__EObj',
- '%s_l__EObj',
- '%s_m__EObj',
- '%s_n__EObj',
- '%s_o__EObj',
- '%s_p__EObj',
- '%s_q__EObj',
- '%s_r__EObj',
- '%s_s__EObj',
- '%s_x__EObj',
- '%s_y__EObj',
- '%s_z__EObj',
- '%sp_i_XMLList',
- '%spb_i_XMLList',
- '%st_i_XMLList',
- 'Calendar',
- 'ClipBoard',
- 'Matplot',
- 'Matplot1',
- 'PlaySound',
- 'TCL_DeleteInterp',
- 'TCL_DoOneEvent',
- 'TCL_EvalFile',
- 'TCL_EvalStr',
- 'TCL_ExistArray',
- 'TCL_ExistInterp',
- 'TCL_ExistVar',
- 'TCL_GetVar',
- 'TCL_GetVersion',
- 'TCL_SetVar',
- 'TCL_UnsetVar',
- 'TCL_UpVar',
- '_',
- '_code2str',
- '_d',
- '_str2code',
- 'about',
- 'abs',
- 'acos',
- 'addModulePreferences',
- 'addcolor',
- 'addf',
- 'addhistory',
- 'addinter',
- 'addlocalizationdomain',
- 'amell',
- 'and',
- 'argn',
- 'arl2_ius',
- 'ascii',
- 'asin',
- 'atan',
- 'backslash',
- 'balanc',
- 'banner',
- 'base2dec',
- 'basename',
- 'bdiag',
- 'beep',
- 'besselh',
- 'besseli',
- 'besselj',
- 'besselk',
- 'bessely',
- 'beta',
- 'bezout',
- 'bfinit',
- 'blkfc1i',
- 'blkslvi',
- 'bool2s',
- 'browsehistory',
- 'browsevar',
- 'bsplin3val',
- 'buildDoc',
- 'buildouttb',
- 'bvode',
- 'c_link',
- 'call',
- 'callblk',
- 'captions',
- 'cd',
- 'cdfbet',
- 'cdfbin',
- 'cdfchi',
- 'cdfchn',
- 'cdff',
- 'cdffnc',
- 'cdfgam',
- 'cdfnbn',
- 'cdfnor',
- 'cdfpoi',
- 'cdft',
- 'ceil',
- 'champ',
- 'champ1',
- 'chdir',
- 'chol',
- 'clc',
- 'clean',
- 'clear',
- 'clearfun',
- 'clearglobal',
- 'closeEditor',
- 'closeEditvar',
- 'closeXcos',
- 'code2str',
- 'coeff',
- 'color',
- 'comp',
- 'completion',
- 'conj',
- 'contour2di',
- 'contr',
- 'conv2',
- 'convstr',
- 'copy',
- 'copyfile',
- 'corr',
- 'cos',
- 'coserror',
- 'createdir',
- 'cshep2d',
- 'csvDefault',
- 'csvIsnum',
- 'csvRead',
- 'csvStringToDouble',
- 'csvTextScan',
- 'csvWrite',
- 'ctree2',
- 'ctree3',
- 'ctree4',
- 'cumprod',
- 'cumsum',
- 'curblock',
- 'curblockc',
- 'daskr',
- 'dasrt',
- 'dassl',
- 'data2sig',
- 'datatipCreate',
- 'datatipManagerMode',
- 'datatipMove',
- 'datatipRemove',
- 'datatipSetDisplay',
- 'datatipSetInterp',
- 'datatipSetOrientation',
- 'datatipSetStyle',
- 'datatipToggle',
- 'dawson',
- 'dct',
- 'debug',
- 'dec2base',
- 'deff',
- 'definedfields',
- 'degree',
- 'delbpt',
- 'delete',
- 'deletefile',
- 'delip',
- 'delmenu',
- 'det',
- 'dgettext',
- 'dhinf',
- 'diag',
- 'diary',
- 'diffobjs',
- 'disp',
- 'dispbpt',
- 'displayhistory',
- 'disposefftwlibrary',
- 'dlgamma',
- 'dnaupd',
- 'dneupd',
- 'double',
- 'drawaxis',
- 'drawlater',
- 'drawnow',
- 'driver',
- 'dsaupd',
- 'dsearch',
- 'dseupd',
- 'dst',
- 'duplicate',
- 'editvar',
- 'emptystr',
- 'end_scicosim',
- 'ereduc',
- 'erf',
- 'erfc',
- 'erfcx',
- 'erfi',
- 'errcatch',
- 'errclear',
- 'error',
- 'eval_cshep2d',
- 'exec',
- 'execstr',
- 'exists',
- 'exit',
- 'exp',
- 'expm',
- 'exportUI',
- 'export_to_hdf5',
- 'eye',
- 'fadj2sp',
- 'fec',
- 'feval',
- 'fft',
- 'fftw',
- 'fftw_flags',
- 'fftw_forget_wisdom',
- 'fftwlibraryisloaded',
- 'figure',
- 'file',
- 'filebrowser',
- 'fileext',
- 'fileinfo',
- 'fileparts',
- 'filesep',
- 'find',
- 'findBD',
- 'findfiles',
- 'fire_closing_finished',
- 'floor',
- 'format',
- 'fort',
- 'fprintfMat',
- 'freq',
- 'frexp',
- 'fromc',
- 'fromjava',
- 'fscanfMat',
- 'fsolve',
- 'fstair',
- 'full',
- 'fullpath',
- 'funcprot',
- 'funptr',
- 'gamma',
- 'gammaln',
- 'geom3d',
- 'get',
- 'getURL',
- 'get_absolute_file_path',
- 'get_fftw_wisdom',
- 'getblocklabel',
- 'getcallbackobject',
- 'getdate',
- 'getdebuginfo',
- 'getdefaultlanguage',
- 'getdrives',
- 'getdynlibext',
- 'getenv',
- 'getfield',
- 'gethistory',
- 'gethistoryfile',
- 'getinstalledlookandfeels',
- 'getio',
- 'getlanguage',
- 'getlongpathname',
- 'getlookandfeel',
- 'getmd5',
- 'getmemory',
- 'getmodules',
- 'getos',
- 'getpid',
- 'getrelativefilename',
- 'getscicosvars',
- 'getscilabmode',
- 'getshortpathname',
- 'gettext',
- 'getvariablesonstack',
- 'getversion',
- 'glist',
- 'global',
- 'glue',
- 'grand',
- 'graphicfunction',
- 'grayplot',
- 'grep',
- 'gsort',
- 'gstacksize',
- 'h5attr',
- 'h5close',
- 'h5cp',
- 'h5dataset',
- 'h5dump',
- 'h5exists',
- 'h5flush',
- 'h5get',
- 'h5group',
- 'h5isArray',
- 'h5isAttr',
- 'h5isCompound',
- 'h5isFile',
- 'h5isGroup',
- 'h5isList',
- 'h5isRef',
- 'h5isSet',
- 'h5isSpace',
- 'h5isType',
- 'h5isVlen',
- 'h5label',
- 'h5ln',
- 'h5ls',
- 'h5mount',
- 'h5mv',
- 'h5open',
- 'h5read',
- 'h5readattr',
- 'h5rm',
- 'h5umount',
- 'h5write',
- 'h5writeattr',
- 'havewindow',
- 'helpbrowser',
- 'hess',
- 'hinf',
- 'historymanager',
- 'historysize',
- 'host',
- 'htmlDump',
- 'htmlRead',
- 'htmlReadStr',
- 'htmlWrite',
- 'iconvert',
- 'ieee',
- 'ilib_verbose',
- 'imag',
- 'impl',
- 'import_from_hdf5',
- 'imult',
- 'inpnvi',
- 'int',
- 'int16',
- 'int2d',
- 'int32',
- 'int3d',
- 'int8',
- 'interp',
- 'interp2d',
- 'interp3d',
- 'intg',
- 'intppty',
- 'inttype',
- 'inv',
- 'invoke_lu',
- 'is_handle_valid',
- 'is_hdf5_file',
- 'isalphanum',
- 'isascii',
- 'isdef',
- 'isdigit',
- 'isdir',
- 'isequal',
- 'isequalbitwise',
- 'iserror',
- 'isfile',
- 'isglobal',
- 'isletter',
- 'isnum',
- 'isreal',
- 'iswaitingforinput',
- 'jallowClassReloading',
- 'jarray',
- 'jautoTranspose',
- 'jautoUnwrap',
- 'javaclasspath',
- 'javalibrarypath',
- 'jcast',
- 'jcompile',
- 'jconvMatrixMethod',
- 'jcreatejar',
- 'jdeff',
- 'jdisableTrace',
- 'jenableTrace',
- 'jexists',
- 'jgetclassname',
- 'jgetfield',
- 'jgetfields',
- 'jgetinfo',
- 'jgetmethods',
- 'jimport',
- 'jinvoke',
- 'jinvoke_db',
- 'jnewInstance',
- 'jremove',
- 'jsetfield',
- 'junwrap',
- 'junwraprem',
- 'jwrap',
- 'jwrapinfloat',
- 'kron',
- 'lasterror',
- 'ldiv',
- 'ldivf',
- 'legendre',
- 'length',
- 'lib',
- 'librarieslist',
- 'libraryinfo',
- 'light',
- 'linear_interpn',
- 'lines',
- 'link',
- 'linmeq',
- 'list',
- 'listvar_in_hdf5',
- 'load',
- 'loadGui',
- 'loadScicos',
- 'loadXcos',
- 'loadfftwlibrary',
- 'loadhistory',
- 'log',
- 'log1p',
- 'lsq',
- 'lsq_splin',
- 'lsqrsolve',
- 'lsslist',
- 'lstcat',
- 'lstsize',
- 'ltitr',
- 'lu',
- 'ludel',
- 'lufact',
- 'luget',
- 'lusolve',
- 'macr2lst',
- 'macr2tree',
- 'matfile_close',
- 'matfile_listvar',
- 'matfile_open',
- 'matfile_varreadnext',
- 'matfile_varwrite',
- 'matrix',
- 'max',
- 'maxfiles',
- 'mclearerr',
- 'mclose',
- 'meof',
- 'merror',
- 'messagebox',
- 'mfprintf',
- 'mfscanf',
- 'mget',
- 'mgeti',
- 'mgetl',
- 'mgetstr',
- 'min',
- 'mlist',
- 'mode',
- 'model2blk',
- 'mopen',
- 'move',
- 'movefile',
- 'mprintf',
- 'mput',
- 'mputl',
- 'mputstr',
- 'mscanf',
- 'mseek',
- 'msprintf',
- 'msscanf',
- 'mtell',
- 'mtlb_mode',
- 'mtlb_sparse',
- 'mucomp',
- 'mulf',
- 'name2rgb',
- 'nearfloat',
- 'newaxes',
- 'newest',
- 'newfun',
- 'nnz',
- 'norm',
- 'notify',
- 'number_properties',
- 'ode',
- 'odedc',
- 'ones',
- 'openged',
- 'opentk',
- 'optim',
- 'or',
- 'ordmmd',
- 'parallel_concurrency',
- 'parallel_run',
- 'param3d',
- 'param3d1',
- 'part',
- 'pathconvert',
- 'pathsep',
- 'phase_simulation',
- 'plot2d',
- 'plot2d1',
- 'plot2d2',
- 'plot2d3',
- 'plot2d4',
- 'plot3d',
- 'plot3d1',
- 'plotbrowser',
- 'pointer_xproperty',
- 'poly',
- 'ppol',
- 'pppdiv',
- 'predef',
- 'preferences',
- 'print',
- 'printf',
- 'printfigure',
- 'printsetupbox',
- 'prod',
- 'progressionbar',
- 'prompt',
- 'pwd',
- 'qld',
- 'qp_solve',
- 'qr',
- 'raise_window',
- 'rand',
- 'rankqr',
- 'rat',
- 'rcond',
- 'rdivf',
- 'read',
- 'read4b',
- 'read_csv',
- 'readb',
- 'readgateway',
- 'readmps',
- 'real',
- 'realtime',
- 'realtimeinit',
- 'regexp',
- 'relocate_handle',
- 'remez',
- 'removeModulePreferences',
- 'removedir',
- 'removelinehistory',
- 'res_with_prec',
- 'resethistory',
- 'residu',
- 'resume',
- 'return',
- 'ricc',
- 'rlist',
- 'roots',
- 'rotate_axes',
- 'round',
- 'rpem',
- 'rtitr',
- 'rubberbox',
- 'save',
- 'saveGui',
- 'saveafterncommands',
- 'saveconsecutivecommands',
- 'savehistory',
- 'schur',
- 'sci_haltscicos',
- 'sci_tree2',
- 'sci_tree3',
- 'sci_tree4',
- 'sciargs',
- 'scicos_debug',
- 'scicos_debug_count',
- 'scicos_time',
- 'scicosim',
- 'scinotes',
- 'sctree',
- 'semidef',
- 'set',
- 'set_blockerror',
- 'set_fftw_wisdom',
- 'set_xproperty',
- 'setbpt',
- 'setdefaultlanguage',
- 'setenv',
- 'setfield',
- 'sethistoryfile',
- 'setlanguage',
- 'setlookandfeel',
- 'setmenu',
- 'sfact',
- 'sfinit',
- 'show_window',
- 'sident',
- 'sig2data',
- 'sign',
- 'simp',
- 'simp_mode',
- 'sin',
- 'size',
- 'slash',
- 'sleep',
- 'sorder',
- 'sparse',
- 'spchol',
- 'spcompack',
- 'spec',
- 'spget',
- 'splin',
- 'splin2d',
- 'splin3d',
- 'splitURL',
- 'spones',
- 'sprintf',
- 'sqrt',
- 'stacksize',
- 'str2code',
- 'strcat',
- 'strchr',
- 'strcmp',
- 'strcspn',
- 'strindex',
- 'string',
- 'stringbox',
- 'stripblanks',
- 'strncpy',
- 'strrchr',
- 'strrev',
- 'strsplit',
- 'strspn',
- 'strstr',
- 'strsubst',
- 'strtod',
- 'strtok',
- 'subf',
- 'sum',
- 'svd',
- 'swap_handles',
- 'symfcti',
- 'syredi',
- 'system_getproperty',
- 'system_setproperty',
- 'ta2lpd',
- 'tan',
- 'taucs_chdel',
- 'taucs_chfact',
- 'taucs_chget',
- 'taucs_chinfo',
- 'taucs_chsolve',
- 'tempname',
- 'testmatrix',
- 'timer',
- 'tlist',
- 'tohome',
- 'tokens',
- 'toolbar',
- 'toprint',
- 'tr_zer',
- 'tril',
- 'triu',
- 'type',
- 'typename',
- 'uiDisplayTree',
- 'uicontextmenu',
- 'uicontrol',
- 'uigetcolor',
- 'uigetdir',
- 'uigetfile',
- 'uigetfont',
- 'uimenu',
- 'uint16',
- 'uint32',
- 'uint8',
- 'uipopup',
- 'uiputfile',
- 'uiwait',
- 'ulink',
- 'umf_ludel',
- 'umf_lufact',
- 'umf_luget',
- 'umf_luinfo',
- 'umf_lusolve',
- 'umfpack',
- 'unglue',
- 'unix',
- 'unsetmenu',
- 'unzoom',
- 'updatebrowsevar',
- 'usecanvas',
- 'useeditor',
- 'user',
- 'var2vec',
- 'varn',
- 'vec2var',
- 'waitbar',
- 'warnBlockByUID',
- 'warning',
- 'what',
- 'where',
- 'whereis',
- 'who',
- 'winsid',
- 'with_module',
- 'writb',
- 'write',
- 'write4b',
- 'write_csv',
- 'x_choose',
- 'x_choose_modeless',
- 'x_dialog',
- 'x_mdialog',
- 'xarc',
- 'xarcs',
- 'xarrows',
- 'xchange',
- 'xchoicesi',
- 'xclick',
- 'xcos',
- 'xcosAddToolsMenu',
- 'xcosConfigureXmlFile',
- 'xcosDiagramToScilab',
- 'xcosPalCategoryAdd',
- 'xcosPalDelete',
- 'xcosPalDisable',
- 'xcosPalEnable',
- 'xcosPalGenerateIcon',
- 'xcosPalGet',
- 'xcosPalLoad',
- 'xcosPalMove',
- 'xcosSimulationStarted',
- 'xcosUpdateBlock',
- 'xdel',
- 'xend',
- 'xfarc',
- 'xfarcs',
- 'xfpoly',
- 'xfpolys',
- 'xfrect',
- 'xget',
- 'xgetmouse',
- 'xgraduate',
- 'xgrid',
- 'xinit',
- 'xlfont',
- 'xls_open',
- 'xls_read',
- 'xmlAddNs',
- 'xmlAppend',
- 'xmlAsNumber',
- 'xmlAsText',
- 'xmlDTD',
- 'xmlDelete',
- 'xmlDocument',
- 'xmlDump',
- 'xmlElement',
- 'xmlFormat',
- 'xmlGetNsByHref',
- 'xmlGetNsByPrefix',
- 'xmlGetOpenDocs',
- 'xmlIsValidObject',
- 'xmlName',
- 'xmlNs',
- 'xmlRead',
- 'xmlReadStr',
- 'xmlRelaxNG',
- 'xmlRemove',
- 'xmlSchema',
- 'xmlSetAttributes',
- 'xmlValidate',
- 'xmlWrite',
- 'xmlXPath',
- 'xname',
- 'xpause',
- 'xpoly',
- 'xpolys',
- 'xrect',
- 'xrects',
- 'xs2bmp',
- 'xs2emf',
- 'xs2eps',
- 'xs2gif',
- 'xs2jpg',
- 'xs2pdf',
- 'xs2png',
- 'xs2ppm',
- 'xs2ps',
- 'xs2svg',
- 'xsegs',
- 'xset',
- 'xstring',
- 'xstringb',
- 'xtitle',
- 'zeros',
- 'znaupd',
- 'zneupd',
- 'zoom_rect',
-)
-
-macros_kw = (
- '!_deff_wrapper',
- '%0_i_st',
- '%3d_i_h',
- '%Block_xcosUpdateBlock',
- '%TNELDER_p',
- '%TNELDER_string',
- '%TNMPLOT_p',
- '%TNMPLOT_string',
- '%TOPTIM_p',
- '%TOPTIM_string',
- '%TSIMPLEX_p',
- '%TSIMPLEX_string',
- '%_EVoid_p',
- '%_gsort',
- '%_listvarinfile',
- '%_rlist',
- '%_save',
- '%_sodload',
- '%_strsplit',
- '%_unwrap',
- '%ar_p',
- '%asn',
- '%b_a_b',
- '%b_a_s',
- '%b_c_s',
- '%b_c_spb',
- '%b_cumprod',
- '%b_cumsum',
- '%b_d_s',
- '%b_diag',
- '%b_e',
- '%b_f_s',
- '%b_f_spb',
- '%b_g_s',
- '%b_g_spb',
- '%b_grand',
- '%b_h_s',
- '%b_h_spb',
- '%b_i_b',
- '%b_i_ce',
- '%b_i_h',
- '%b_i_hm',
- '%b_i_s',
- '%b_i_sp',
- '%b_i_spb',
- '%b_i_st',
- '%b_iconvert',
- '%b_l_b',
- '%b_l_s',
- '%b_m_b',
- '%b_m_s',
- '%b_matrix',
- '%b_n_hm',
- '%b_o_hm',
- '%b_p_s',
- '%b_prod',
- '%b_r_b',
- '%b_r_s',
- '%b_s_b',
- '%b_s_s',
- '%b_string',
- '%b_sum',
- '%b_tril',
- '%b_triu',
- '%b_x_b',
- '%b_x_s',
- '%bicg',
- '%bicgstab',
- '%c_a_c',
- '%c_b_c',
- '%c_b_s',
- '%c_diag',
- '%c_dsearch',
- '%c_e',
- '%c_eye',
- '%c_f_s',
- '%c_grand',
- '%c_i_c',
- '%c_i_ce',
- '%c_i_h',
- '%c_i_hm',
- '%c_i_lss',
- '%c_i_r',
- '%c_i_s',
- '%c_i_st',
- '%c_matrix',
- '%c_n_l',
- '%c_n_st',
- '%c_o_l',
- '%c_o_st',
- '%c_ones',
- '%c_rand',
- '%c_tril',
- '%c_triu',
- '%cblock_c_cblock',
- '%cblock_c_s',
- '%cblock_e',
- '%cblock_f_cblock',
- '%cblock_p',
- '%cblock_size',
- '%ce_6',
- '%ce_c_ce',
- '%ce_e',
- '%ce_f_ce',
- '%ce_i_ce',
- '%ce_i_s',
- '%ce_i_st',
- '%ce_matrix',
- '%ce_p',
- '%ce_size',
- '%ce_string',
- '%ce_t',
- '%cgs',
- '%champdat_i_h',
- '%choose',
- '%diagram_xcos',
- '%dir_p',
- '%fptr_i_st',
- '%grand_perm',
- '%grayplot_i_h',
- '%h_i_st',
- '%hmS_k_hmS_generic',
- '%hm_1_hm',
- '%hm_1_s',
- '%hm_2_hm',
- '%hm_2_s',
- '%hm_3_hm',
- '%hm_3_s',
- '%hm_4_hm',
- '%hm_4_s',
- '%hm_5',
- '%hm_a_hm',
- '%hm_a_r',
- '%hm_a_s',
- '%hm_abs',
- '%hm_and',
- '%hm_bool2s',
- '%hm_c_hm',
- '%hm_ceil',
- '%hm_conj',
- '%hm_cos',
- '%hm_cumprod',
- '%hm_cumsum',
- '%hm_d_hm',
- '%hm_d_s',
- '%hm_degree',
- '%hm_dsearch',
- '%hm_e',
- '%hm_exp',
- '%hm_eye',
- '%hm_f_hm',
- '%hm_find',
- '%hm_floor',
- '%hm_g_hm',
- '%hm_grand',
- '%hm_gsort',
- '%hm_h_hm',
- '%hm_i_b',
- '%hm_i_ce',
- '%hm_i_h',
- '%hm_i_hm',
- '%hm_i_i',
- '%hm_i_p',
- '%hm_i_r',
- '%hm_i_s',
- '%hm_i_st',
- '%hm_iconvert',
- '%hm_imag',
- '%hm_int',
- '%hm_isnan',
- '%hm_isreal',
- '%hm_j_hm',
- '%hm_j_s',
- '%hm_k_hm',
- '%hm_k_s',
- '%hm_log',
- '%hm_m_p',
- '%hm_m_r',
- '%hm_m_s',
- '%hm_matrix',
- '%hm_max',
- '%hm_mean',
- '%hm_median',
- '%hm_min',
- '%hm_n_b',
- '%hm_n_c',
- '%hm_n_hm',
- '%hm_n_i',
- '%hm_n_p',
- '%hm_n_s',
- '%hm_o_b',
- '%hm_o_c',
- '%hm_o_hm',
- '%hm_o_i',
- '%hm_o_p',
- '%hm_o_s',
- '%hm_ones',
- '%hm_or',
- '%hm_p',
- '%hm_prod',
- '%hm_q_hm',
- '%hm_r_s',
- '%hm_rand',
- '%hm_real',
- '%hm_round',
- '%hm_s',
- '%hm_s_hm',
- '%hm_s_r',
- '%hm_s_s',
- '%hm_sign',
- '%hm_sin',
- '%hm_size',
- '%hm_sqrt',
- '%hm_stdev',
- '%hm_string',
- '%hm_sum',
- '%hm_x_hm',
- '%hm_x_p',
- '%hm_x_s',
- '%hm_zeros',
- '%i_1_s',
- '%i_2_s',
- '%i_3_s',
- '%i_4_s',
- '%i_Matplot',
- '%i_a_i',
- '%i_a_s',
- '%i_and',
- '%i_ascii',
- '%i_b_s',
- '%i_bezout',
- '%i_champ',
- '%i_champ1',
- '%i_contour',
- '%i_contour2d',
- '%i_d_i',
- '%i_d_s',
- '%i_dsearch',
- '%i_e',
- '%i_fft',
- '%i_g_i',
- '%i_gcd',
- '%i_grand',
- '%i_h_i',
- '%i_i_ce',
- '%i_i_h',
- '%i_i_hm',
- '%i_i_i',
- '%i_i_s',
- '%i_i_st',
- '%i_j_i',
- '%i_j_s',
- '%i_l_s',
- '%i_lcm',
- '%i_length',
- '%i_m_i',
- '%i_m_s',
- '%i_mfprintf',
- '%i_mprintf',
- '%i_msprintf',
- '%i_n_s',
- '%i_o_s',
- '%i_or',
- '%i_p_i',
- '%i_p_s',
- '%i_plot2d',
- '%i_plot2d1',
- '%i_plot2d2',
- '%i_q_s',
- '%i_r_i',
- '%i_r_s',
- '%i_round',
- '%i_s_i',
- '%i_s_s',
- '%i_sign',
- '%i_string',
- '%i_x_i',
- '%i_x_s',
- '%ip_a_s',
- '%ip_i_st',
- '%ip_m_s',
- '%ip_n_ip',
- '%ip_o_ip',
- '%ip_p',
- '%ip_part',
- '%ip_s_s',
- '%ip_string',
- '%k',
- '%l_i_h',
- '%l_i_s',
- '%l_i_st',
- '%l_isequal',
- '%l_n_c',
- '%l_n_l',
- '%l_n_m',
- '%l_n_p',
- '%l_n_s',
- '%l_n_st',
- '%l_o_c',
- '%l_o_l',
- '%l_o_m',
- '%l_o_p',
- '%l_o_s',
- '%l_o_st',
- '%lss_a_lss',
- '%lss_a_p',
- '%lss_a_r',
- '%lss_a_s',
- '%lss_c_lss',
- '%lss_c_p',
- '%lss_c_r',
- '%lss_c_s',
- '%lss_e',
- '%lss_eye',
- '%lss_f_lss',
- '%lss_f_p',
- '%lss_f_r',
- '%lss_f_s',
- '%lss_i_ce',
- '%lss_i_lss',
- '%lss_i_p',
- '%lss_i_r',
- '%lss_i_s',
- '%lss_i_st',
- '%lss_inv',
- '%lss_l_lss',
- '%lss_l_p',
- '%lss_l_r',
- '%lss_l_s',
- '%lss_m_lss',
- '%lss_m_p',
- '%lss_m_r',
- '%lss_m_s',
- '%lss_n_lss',
- '%lss_n_p',
- '%lss_n_r',
- '%lss_n_s',
- '%lss_norm',
- '%lss_o_lss',
- '%lss_o_p',
- '%lss_o_r',
- '%lss_o_s',
- '%lss_ones',
- '%lss_r_lss',
- '%lss_r_p',
- '%lss_r_r',
- '%lss_r_s',
- '%lss_rand',
- '%lss_s',
- '%lss_s_lss',
- '%lss_s_p',
- '%lss_s_r',
- '%lss_s_s',
- '%lss_size',
- '%lss_t',
- '%lss_v_lss',
- '%lss_v_p',
- '%lss_v_r',
- '%lss_v_s',
- '%lt_i_s',
- '%m_n_l',
- '%m_o_l',
- '%mc_i_h',
- '%mc_i_s',
- '%mc_i_st',
- '%mc_n_st',
- '%mc_o_st',
- '%mc_string',
- '%mps_p',
- '%mps_string',
- '%msp_a_s',
- '%msp_abs',
- '%msp_e',
- '%msp_find',
- '%msp_i_s',
- '%msp_i_st',
- '%msp_length',
- '%msp_m_s',
- '%msp_maxi',
- '%msp_n_msp',
- '%msp_nnz',
- '%msp_o_msp',
- '%msp_p',
- '%msp_sparse',
- '%msp_spones',
- '%msp_t',
- '%p_a_lss',
- '%p_a_r',
- '%p_c_lss',
- '%p_c_r',
- '%p_cumprod',
- '%p_cumsum',
- '%p_d_p',
- '%p_d_r',
- '%p_d_s',
- '%p_det',
- '%p_e',
- '%p_f_lss',
- '%p_f_r',
- '%p_grand',
- '%p_i_ce',
- '%p_i_h',
- '%p_i_hm',
- '%p_i_lss',
- '%p_i_p',
- '%p_i_r',
- '%p_i_s',
- '%p_i_st',
- '%p_inv',
- '%p_j_s',
- '%p_k_p',
- '%p_k_r',
- '%p_k_s',
- '%p_l_lss',
- '%p_l_p',
- '%p_l_r',
- '%p_l_s',
- '%p_m_hm',
- '%p_m_lss',
- '%p_m_r',
- '%p_matrix',
- '%p_n_l',
- '%p_n_lss',
- '%p_n_r',
- '%p_o_l',
- '%p_o_lss',
- '%p_o_r',
- '%p_o_sp',
- '%p_p_s',
- '%p_part',
- '%p_prod',
- '%p_q_p',
- '%p_q_r',
- '%p_q_s',
- '%p_r_lss',
- '%p_r_p',
- '%p_r_r',
- '%p_r_s',
- '%p_s_lss',
- '%p_s_r',
- '%p_simp',
- '%p_string',
- '%p_sum',
- '%p_v_lss',
- '%p_v_p',
- '%p_v_r',
- '%p_v_s',
- '%p_x_hm',
- '%p_x_r',
- '%p_y_p',
- '%p_y_r',
- '%p_y_s',
- '%p_z_p',
- '%p_z_r',
- '%p_z_s',
- '%pcg',
- '%plist_p',
- '%plist_string',
- '%r_0',
- '%r_a_hm',
- '%r_a_lss',
- '%r_a_p',
- '%r_a_r',
- '%r_a_s',
- '%r_c_lss',
- '%r_c_p',
- '%r_c_r',
- '%r_c_s',
- '%r_clean',
- '%r_cumprod',
- '%r_cumsum',
- '%r_d_p',
- '%r_d_r',
- '%r_d_s',
- '%r_det',
- '%r_diag',
- '%r_e',
- '%r_eye',
- '%r_f_lss',
- '%r_f_p',
- '%r_f_r',
- '%r_f_s',
- '%r_i_ce',
- '%r_i_hm',
- '%r_i_lss',
- '%r_i_p',
- '%r_i_r',
- '%r_i_s',
- '%r_i_st',
- '%r_inv',
- '%r_j_s',
- '%r_k_p',
- '%r_k_r',
- '%r_k_s',
- '%r_l_lss',
- '%r_l_p',
- '%r_l_r',
- '%r_l_s',
- '%r_m_hm',
- '%r_m_lss',
- '%r_m_p',
- '%r_m_r',
- '%r_m_s',
- '%r_matrix',
- '%r_n_lss',
- '%r_n_p',
- '%r_n_r',
- '%r_n_s',
- '%r_norm',
- '%r_o_lss',
- '%r_o_p',
- '%r_o_r',
- '%r_o_s',
- '%r_ones',
- '%r_p',
- '%r_p_s',
- '%r_prod',
- '%r_q_p',
- '%r_q_r',
- '%r_q_s',
- '%r_r_lss',
- '%r_r_p',
- '%r_r_r',
- '%r_r_s',
- '%r_rand',
- '%r_s',
- '%r_s_hm',
- '%r_s_lss',
- '%r_s_p',
- '%r_s_r',
- '%r_s_s',
- '%r_simp',
- '%r_size',
- '%r_string',
- '%r_sum',
- '%r_t',
- '%r_tril',
- '%r_triu',
- '%r_v_lss',
- '%r_v_p',
- '%r_v_r',
- '%r_v_s',
- '%r_varn',
- '%r_x_p',
- '%r_x_r',
- '%r_x_s',
- '%r_y_p',
- '%r_y_r',
- '%r_y_s',
- '%r_z_p',
- '%r_z_r',
- '%r_z_s',
- '%s_1_hm',
- '%s_1_i',
- '%s_2_hm',
- '%s_2_i',
- '%s_3_hm',
- '%s_3_i',
- '%s_4_hm',
- '%s_4_i',
- '%s_5',
- '%s_a_b',
- '%s_a_hm',
- '%s_a_i',
- '%s_a_ip',
- '%s_a_lss',
- '%s_a_msp',
- '%s_a_r',
- '%s_a_sp',
- '%s_and',
- '%s_b_i',
- '%s_b_s',
- '%s_bezout',
- '%s_c_b',
- '%s_c_cblock',
- '%s_c_lss',
- '%s_c_r',
- '%s_c_sp',
- '%s_d_b',
- '%s_d_i',
- '%s_d_p',
- '%s_d_r',
- '%s_d_sp',
- '%s_e',
- '%s_f_b',
- '%s_f_cblock',
- '%s_f_lss',
- '%s_f_r',
- '%s_f_sp',
- '%s_g_b',
- '%s_g_s',
- '%s_gcd',
- '%s_grand',
- '%s_h_b',
- '%s_h_s',
- '%s_i_b',
- '%s_i_c',
- '%s_i_ce',
- '%s_i_h',
- '%s_i_hm',
- '%s_i_i',
- '%s_i_lss',
- '%s_i_p',
- '%s_i_r',
- '%s_i_s',
- '%s_i_sp',
- '%s_i_spb',
- '%s_i_st',
- '%s_j_i',
- '%s_k_hm',
- '%s_k_p',
- '%s_k_r',
- '%s_k_sp',
- '%s_l_b',
- '%s_l_hm',
- '%s_l_i',
- '%s_l_lss',
- '%s_l_p',
- '%s_l_r',
- '%s_l_s',
- '%s_l_sp',
- '%s_lcm',
- '%s_m_b',
- '%s_m_hm',
- '%s_m_i',
- '%s_m_ip',
- '%s_m_lss',
- '%s_m_msp',
- '%s_m_r',
- '%s_matrix',
- '%s_n_hm',
- '%s_n_i',
- '%s_n_l',
- '%s_n_lss',
- '%s_n_r',
- '%s_n_st',
- '%s_o_hm',
- '%s_o_i',
- '%s_o_l',
- '%s_o_lss',
- '%s_o_r',
- '%s_o_st',
- '%s_or',
- '%s_p_b',
- '%s_p_i',
- '%s_pow',
- '%s_q_hm',
- '%s_q_i',
- '%s_q_p',
- '%s_q_r',
- '%s_q_sp',
- '%s_r_b',
- '%s_r_i',
- '%s_r_lss',
- '%s_r_p',
- '%s_r_r',
- '%s_r_s',
- '%s_r_sp',
- '%s_s_b',
- '%s_s_hm',
- '%s_s_i',
- '%s_s_ip',
- '%s_s_lss',
- '%s_s_r',
- '%s_s_sp',
- '%s_simp',
- '%s_v_lss',
- '%s_v_p',
- '%s_v_r',
- '%s_v_s',
- '%s_x_b',
- '%s_x_hm',
- '%s_x_i',
- '%s_x_r',
- '%s_y_p',
- '%s_y_r',
- '%s_y_sp',
- '%s_z_p',
- '%s_z_r',
- '%s_z_sp',
- '%sn',
- '%sp_a_s',
- '%sp_a_sp',
- '%sp_and',
- '%sp_c_s',
- '%sp_ceil',
- '%sp_conj',
- '%sp_cos',
- '%sp_cumprod',
- '%sp_cumsum',
- '%sp_d_s',
- '%sp_d_sp',
- '%sp_det',
- '%sp_diag',
- '%sp_e',
- '%sp_exp',
- '%sp_f_s',
- '%sp_floor',
- '%sp_grand',
- '%sp_gsort',
- '%sp_i_ce',
- '%sp_i_h',
- '%sp_i_s',
- '%sp_i_sp',
- '%sp_i_st',
- '%sp_int',
- '%sp_inv',
- '%sp_k_s',
- '%sp_k_sp',
- '%sp_l_s',
- '%sp_l_sp',
- '%sp_length',
- '%sp_max',
- '%sp_min',
- '%sp_norm',
- '%sp_or',
- '%sp_p_s',
- '%sp_prod',
- '%sp_q_s',
- '%sp_q_sp',
- '%sp_r_s',
- '%sp_r_sp',
- '%sp_round',
- '%sp_s_s',
- '%sp_s_sp',
- '%sp_sin',
- '%sp_sqrt',
- '%sp_string',
- '%sp_sum',
- '%sp_tril',
- '%sp_triu',
- '%sp_y_s',
- '%sp_y_sp',
- '%sp_z_s',
- '%sp_z_sp',
- '%spb_and',
- '%spb_c_b',
- '%spb_cumprod',
- '%spb_cumsum',
- '%spb_diag',
- '%spb_e',
- '%spb_f_b',
- '%spb_g_b',
- '%spb_g_spb',
- '%spb_h_b',
- '%spb_h_spb',
- '%spb_i_b',
- '%spb_i_ce',
- '%spb_i_h',
- '%spb_i_st',
- '%spb_or',
- '%spb_prod',
- '%spb_sum',
- '%spb_tril',
- '%spb_triu',
- '%st_6',
- '%st_c_st',
- '%st_e',
- '%st_f_st',
- '%st_i_b',
- '%st_i_c',
- '%st_i_fptr',
- '%st_i_h',
- '%st_i_i',
- '%st_i_ip',
- '%st_i_lss',
- '%st_i_msp',
- '%st_i_p',
- '%st_i_r',
- '%st_i_s',
- '%st_i_sp',
- '%st_i_spb',
- '%st_i_st',
- '%st_matrix',
- '%st_n_c',
- '%st_n_l',
- '%st_n_mc',
- '%st_n_p',
- '%st_n_s',
- '%st_o_c',
- '%st_o_l',
- '%st_o_mc',
- '%st_o_p',
- '%st_o_s',
- '%st_o_tl',
- '%st_p',
- '%st_size',
- '%st_string',
- '%st_t',
- '%ticks_i_h',
- '%xls_e',
- '%xls_p',
- '%xlssheet_e',
- '%xlssheet_p',
- '%xlssheet_size',
- '%xlssheet_string',
- 'DominationRank',
- 'G_make',
- 'IsAScalar',
- 'NDcost',
- 'OS_Version',
- 'PlotSparse',
- 'ReadHBSparse',
- 'TCL_CreateSlave',
- 'abcd',
- 'abinv',
- 'accept_func_default',
- 'accept_func_vfsa',
- 'acf',
- 'acosd',
- 'acosh',
- 'acoshm',
- 'acosm',
- 'acot',
- 'acotd',
- 'acoth',
- 'acsc',
- 'acscd',
- 'acsch',
- 'add_demo',
- 'add_help_chapter',
- 'add_module_help_chapter',
- 'add_param',
- 'add_profiling',
- 'adj2sp',
- 'aff2ab',
- 'ana_style',
- 'analpf',
- 'analyze',
- 'aplat',
- 'arhnk',
- 'arl2',
- 'arma2p',
- 'arma2ss',
- 'armac',
- 'armax',
- 'armax1',
- 'arobasestring2strings',
- 'arsimul',
- 'ascii2string',
- 'asciimat',
- 'asec',
- 'asecd',
- 'asech',
- 'asind',
- 'asinh',
- 'asinhm',
- 'asinm',
- 'assert_checkalmostequal',
- 'assert_checkequal',
- 'assert_checkerror',
- 'assert_checkfalse',
- 'assert_checkfilesequal',
- 'assert_checktrue',
- 'assert_comparecomplex',
- 'assert_computedigits',
- 'assert_cond2reltol',
- 'assert_cond2reqdigits',
- 'assert_generror',
- 'atand',
- 'atanh',
- 'atanhm',
- 'atanm',
- 'atomsAutoload',
- 'atomsAutoloadAdd',
- 'atomsAutoloadDel',
- 'atomsAutoloadList',
- 'atomsCategoryList',
- 'atomsCheckModule',
- 'atomsDepTreeShow',
- 'atomsGetConfig',
- 'atomsGetInstalled',
- 'atomsGetInstalledPath',
- 'atomsGetLoaded',
- 'atomsGetLoadedPath',
- 'atomsInstall',
- 'atomsIsInstalled',
- 'atomsIsLoaded',
- 'atomsList',
- 'atomsLoad',
- 'atomsQuit',
- 'atomsRemove',
- 'atomsRepositoryAdd',
- 'atomsRepositoryDel',
- 'atomsRepositoryList',
- 'atomsRestoreConfig',
- 'atomsSaveConfig',
- 'atomsSearch',
- 'atomsSetConfig',
- 'atomsShow',
- 'atomsSystemInit',
- 'atomsSystemUpdate',
- 'atomsTest',
- 'atomsUpdate',
- 'atomsVersion',
- 'augment',
- 'auread',
- 'auwrite',
- 'balreal',
- 'bench_run',
- 'bilin',
- 'bilt',
- 'bin2dec',
- 'binomial',
- 'bitand',
- 'bitcmp',
- 'bitget',
- 'bitor',
- 'bitset',
- 'bitxor',
- 'black',
- 'blanks',
- 'bloc2exp',
- 'bloc2ss',
- 'block_parameter_error',
- 'bode',
- 'bode_asymp',
- 'bstap',
- 'buttmag',
- 'bvodeS',
- 'bytecode',
- 'bytecodewalk',
- 'cainv',
- 'calendar',
- 'calerf',
- 'calfrq',
- 'canon',
- 'casc',
- 'cat',
- 'cat_code',
- 'cb_m2sci_gui',
- 'ccontrg',
- 'cell',
- 'cell2mat',
- 'cellstr',
- 'center',
- 'cepstrum',
- 'cfspec',
- 'char',
- 'chart',
- 'cheb1mag',
- 'cheb2mag',
- 'check_gateways',
- 'check_modules_xml',
- 'check_versions',
- 'chepol',
- 'chfact',
- 'chsolve',
- 'classmarkov',
- 'clean_help',
- 'clock',
- 'cls2dls',
- 'cmb_lin',
- 'cmndred',
- 'cmoment',
- 'coding_ga_binary',
- 'coding_ga_identity',
- 'coff',
- 'coffg',
- 'colcomp',
- 'colcompr',
- 'colinout',
- 'colregul',
- 'companion',
- 'complex',
- 'compute_initial_temp',
- 'cond',
- 'cond2sp',
- 'condestsp',
- 'configure_msifort',
- 'configure_msvc',
- 'conjgrad',
- 'cont_frm',
- 'cont_mat',
- 'contrss',
- 'conv',
- 'convert_to_float',
- 'convertindex',
- 'convol',
- 'convol2d',
- 'copfac',
- 'correl',
- 'cosd',
- 'cosh',
- 'coshm',
- 'cosm',
- 'cotd',
- 'cotg',
- 'coth',
- 'cothm',
- 'cov',
- 'covar',
- 'createXConfiguration',
- 'createfun',
- 'createstruct',
- 'cross',
- 'crossover_ga_binary',
- 'crossover_ga_default',
- 'csc',
- 'cscd',
- 'csch',
- 'csgn',
- 'csim',
- 'cspect',
- 'ctr_gram',
- 'czt',
- 'dae',
- 'daeoptions',
- 'damp',
- 'datafit',
- 'date',
- 'datenum',
- 'datevec',
- 'dbphi',
- 'dcf',
- 'ddp',
- 'dec2bin',
- 'dec2hex',
- 'dec2oct',
- 'del_help_chapter',
- 'del_module_help_chapter',
- 'demo_begin',
- 'demo_choose',
- 'demo_compiler',
- 'demo_end',
- 'demo_file_choice',
- 'demo_folder_choice',
- 'demo_function_choice',
- 'demo_gui',
- 'demo_run',
- 'demo_viewCode',
- 'denom',
- 'derivat',
- 'derivative',
- 'des2ss',
- 'des2tf',
- 'detectmsifort64tools',
- 'detectmsvc64tools',
- 'determ',
- 'detr',
- 'detrend',
- 'devtools_run_builder',
- 'dhnorm',
- 'diff',
- 'diophant',
- 'dir',
- 'dirname',
- 'dispfiles',
- 'dllinfo',
- 'dscr',
- 'dsimul',
- 'dt_ility',
- 'dtsi',
- 'edit',
- 'edit_error',
- 'editor',
- 'eigenmarkov',
- 'eigs',
- 'ell1mag',
- 'enlarge_shape',
- 'entropy',
- 'eomday',
- 'epred',
- 'eqfir',
- 'eqiir',
- 'equil',
- 'equil1',
- 'erfinv',
- 'etime',
- 'eval',
- 'evans',
- 'evstr',
- 'example_run',
- 'expression2code',
- 'extract_help_examples',
- 'factor',
- 'factorial',
- 'factors',
- 'faurre',
- 'ffilt',
- 'fft2',
- 'fftshift',
- 'fieldnames',
- 'filt_sinc',
- 'filter',
- 'findABCD',
- 'findAC',
- 'findBDK',
- 'findR',
- 'find_freq',
- 'find_links',
- 'find_scicos_version',
- 'findm',
- 'findmsifortcompiler',
- 'findmsvccompiler',
- 'findx0BD',
- 'firstnonsingleton',
- 'fix',
- 'fixedpointgcd',
- 'flipdim',
- 'flts',
- 'fminsearch',
- 'formatBlackTip',
- 'formatBodeMagTip',
- 'formatBodePhaseTip',
- 'formatGainplotTip',
- 'formatHallModuleTip',
- 'formatHallPhaseTip',
- 'formatNicholsGainTip',
- 'formatNicholsPhaseTip',
- 'formatNyquistTip',
- 'formatPhaseplotTip',
- 'formatSgridDampingTip',
- 'formatSgridFreqTip',
- 'formatZgridDampingTip',
- 'formatZgridFreqTip',
- 'format_txt',
- 'fourplan',
- 'frep2tf',
- 'freson',
- 'frfit',
- 'frmag',
- 'fseek_origin',
- 'fsfirlin',
- 'fspec',
- 'fspecg',
- 'fstabst',
- 'ftest',
- 'ftuneq',
- 'fullfile',
- 'fullrf',
- 'fullrfk',
- 'fun2string',
- 'g_margin',
- 'gainplot',
- 'gamitg',
- 'gcare',
- 'gcd',
- 'gencompilationflags_unix',
- 'generateBlockImage',
- 'generateBlockImages',
- 'generic_i_ce',
- 'generic_i_h',
- 'generic_i_hm',
- 'generic_i_s',
- 'generic_i_st',
- 'genlib',
- 'genmarkov',
- 'geomean',
- 'getDiagramVersion',
- 'getModelicaPath',
- 'getPreferencesValue',
- 'get_file_path',
- 'get_function_path',
- 'get_param',
- 'get_profile',
- 'get_scicos_version',
- 'getd',
- 'getscilabkeywords',
- 'getshell',
- 'gettklib',
- 'gfare',
- 'gfrancis',
- 'givens',
- 'glever',
- 'gmres',
- 'group',
- 'gschur',
- 'gspec',
- 'gtild',
- 'h2norm',
- 'h_cl',
- 'h_inf',
- 'h_inf_st',
- 'h_norm',
- 'hallchart',
- 'halt',
- 'hank',
- 'hankelsv',
- 'harmean',
- 'haveacompiler',
- 'head_comments',
- 'help_from_sci',
- 'help_skeleton',
- 'hermit',
- 'hex2dec',
- 'hilb',
- 'hilbert',
- 'histc',
- 'horner',
- 'householder',
- 'hrmt',
- 'htrianr',
- 'hypermat',
- 'idct',
- 'idst',
- 'ifft',
- 'ifftshift',
- 'iir',
- 'iirgroup',
- 'iirlp',
- 'iirmod',
- 'ilib_build',
- 'ilib_build_jar',
- 'ilib_compile',
- 'ilib_for_link',
- 'ilib_gen_Make',
- 'ilib_gen_Make_unix',
- 'ilib_gen_cleaner',
- 'ilib_gen_gateway',
- 'ilib_gen_loader',
- 'ilib_include_flag',
- 'ilib_mex_build',
- 'im_inv',
- 'importScicosDiagram',
- 'importScicosPal',
- 'importXcosDiagram',
- 'imrep2ss',
- 'ind2sub',
- 'inistate',
- 'init_ga_default',
- 'init_param',
- 'initial_scicos_tables',
- 'input',
- 'instruction2code',
- 'intc',
- 'intdec',
- 'integrate',
- 'interp1',
- 'interpln',
- 'intersect',
- 'intl',
- 'intsplin',
- 'inttrap',
- 'inv_coeff',
- 'invr',
- 'invrs',
- 'invsyslin',
- 'iqr',
- 'isLeapYear',
- 'is_absolute_path',
- 'is_param',
- 'iscell',
- 'iscellstr',
- 'iscolumn',
- 'isempty',
- 'isfield',
- 'isinf',
- 'ismatrix',
- 'isnan',
- 'isrow',
- 'isscalar',
- 'issparse',
- 'issquare',
- 'isstruct',
- 'isvector',
- 'jmat',
- 'justify',
- 'kalm',
- 'karmarkar',
- 'kernel',
- 'kpure',
- 'krac2',
- 'kroneck',
- 'lattn',
- 'lattp',
- 'launchtest',
- 'lcf',
- 'lcm',
- 'lcmdiag',
- 'leastsq',
- 'leqe',
- 'leqr',
- 'lev',
- 'levin',
- 'lex_sort',
- 'lft',
- 'lin',
- 'lin2mu',
- 'lincos',
- 'lindquist',
- 'linf',
- 'linfn',
- 'linsolve',
- 'linspace',
- 'list2vec',
- 'list_param',
- 'listfiles',
- 'listfunctions',
- 'listvarinfile',
- 'lmisolver',
- 'lmitool',
- 'loadXcosLibs',
- 'loadmatfile',
- 'loadwave',
- 'log10',
- 'log2',
- 'logm',
- 'logspace',
- 'lqe',
- 'lqg',
- 'lqg2stan',
- 'lqg_ltr',
- 'lqr',
- 'ls',
- 'lyap',
- 'm2sci_gui',
- 'm_circle',
- 'macglov',
- 'macrovar',
- 'mad',
- 'makecell',
- 'manedit',
- 'mapsound',
- 'markp2ss',
- 'matfile2sci',
- 'mdelete',
- 'mean',
- 'meanf',
- 'median',
- 'members',
- 'mese',
- 'meshgrid',
- 'mfft',
- 'mfile2sci',
- 'minreal',
- 'minss',
- 'mkdir',
- 'modulo',
- 'moment',
- 'mrfit',
- 'msd',
- 'mstr2sci',
- 'mtlb',
- 'mtlb_0',
- 'mtlb_a',
- 'mtlb_all',
- 'mtlb_any',
- 'mtlb_axes',
- 'mtlb_axis',
- 'mtlb_beta',
- 'mtlb_box',
- 'mtlb_choices',
- 'mtlb_close',
- 'mtlb_colordef',
- 'mtlb_cond',
- 'mtlb_cov',
- 'mtlb_cumprod',
- 'mtlb_cumsum',
- 'mtlb_dec2hex',
- 'mtlb_delete',
- 'mtlb_diag',
- 'mtlb_diff',
- 'mtlb_dir',
- 'mtlb_double',
- 'mtlb_e',
- 'mtlb_echo',
- 'mtlb_error',
- 'mtlb_eval',
- 'mtlb_exist',
- 'mtlb_eye',
- 'mtlb_false',
- 'mtlb_fft',
- 'mtlb_fftshift',
- 'mtlb_filter',
- 'mtlb_find',
- 'mtlb_findstr',
- 'mtlb_fliplr',
- 'mtlb_fopen',
- 'mtlb_format',
- 'mtlb_fprintf',
- 'mtlb_fread',
- 'mtlb_fscanf',
- 'mtlb_full',
- 'mtlb_fwrite',
- 'mtlb_get',
- 'mtlb_grid',
- 'mtlb_hold',
- 'mtlb_i',
- 'mtlb_ifft',
- 'mtlb_image',
- 'mtlb_imp',
- 'mtlb_int16',
- 'mtlb_int32',
- 'mtlb_int8',
- 'mtlb_is',
- 'mtlb_isa',
- 'mtlb_isfield',
- 'mtlb_isletter',
- 'mtlb_isspace',
- 'mtlb_l',
- 'mtlb_legendre',
- 'mtlb_linspace',
- 'mtlb_logic',
- 'mtlb_logical',
- 'mtlb_loglog',
- 'mtlb_lower',
- 'mtlb_max',
- 'mtlb_mean',
- 'mtlb_median',
- 'mtlb_mesh',
- 'mtlb_meshdom',
- 'mtlb_min',
- 'mtlb_more',
- 'mtlb_num2str',
- 'mtlb_ones',
- 'mtlb_pcolor',
- 'mtlb_plot',
- 'mtlb_prod',
- 'mtlb_qr',
- 'mtlb_qz',
- 'mtlb_rand',
- 'mtlb_randn',
- 'mtlb_rcond',
- 'mtlb_realmax',
- 'mtlb_realmin',
- 'mtlb_s',
- 'mtlb_semilogx',
- 'mtlb_semilogy',
- 'mtlb_setstr',
- 'mtlb_size',
- 'mtlb_sort',
- 'mtlb_sortrows',
- 'mtlb_sprintf',
- 'mtlb_sscanf',
- 'mtlb_std',
- 'mtlb_strcmp',
- 'mtlb_strcmpi',
- 'mtlb_strfind',
- 'mtlb_strrep',
- 'mtlb_subplot',
- 'mtlb_sum',
- 'mtlb_t',
- 'mtlb_toeplitz',
- 'mtlb_tril',
- 'mtlb_triu',
- 'mtlb_true',
- 'mtlb_type',
- 'mtlb_uint16',
- 'mtlb_uint32',
- 'mtlb_uint8',
- 'mtlb_upper',
- 'mtlb_var',
- 'mtlb_zeros',
- 'mu2lin',
- 'mutation_ga_binary',
- 'mutation_ga_default',
- 'mvcorrel',
- 'mvvacov',
- 'nancumsum',
- 'nand2mean',
- 'nanmax',
- 'nanmean',
- 'nanmeanf',
- 'nanmedian',
- 'nanmin',
- 'nanreglin',
- 'nanstdev',
- 'nansum',
- 'narsimul',
- 'ndgrid',
- 'ndims',
- 'nehari',
- 'neigh_func_csa',
- 'neigh_func_default',
- 'neigh_func_fsa',
- 'neigh_func_vfsa',
- 'neldermead_cget',
- 'neldermead_configure',
- 'neldermead_costf',
- 'neldermead_defaultoutput',
- 'neldermead_destroy',
- 'neldermead_function',
- 'neldermead_get',
- 'neldermead_log',
- 'neldermead_new',
- 'neldermead_restart',
- 'neldermead_search',
- 'neldermead_updatesimp',
- 'nextpow2',
- 'nfreq',
- 'nicholschart',
- 'nlev',
- 'nmplot_cget',
- 'nmplot_configure',
- 'nmplot_contour',
- 'nmplot_destroy',
- 'nmplot_function',
- 'nmplot_get',
- 'nmplot_historyplot',
- 'nmplot_log',
- 'nmplot_new',
- 'nmplot_outputcmd',
- 'nmplot_restart',
- 'nmplot_search',
- 'nmplot_simplexhistory',
- 'noisegen',
- 'nonreg_test_run',
- 'now',
- 'nthroot',
- 'null',
- 'num2cell',
- 'numderivative',
- 'numdiff',
- 'numer',
- 'nyquist',
- 'nyquistfrequencybounds',
- 'obs_gram',
- 'obscont',
- 'observer',
- 'obsv_mat',
- 'obsvss',
- 'oct2dec',
- 'odeoptions',
- 'optim_ga',
- 'optim_moga',
- 'optim_nsga',
- 'optim_nsga2',
- 'optim_sa',
- 'optimbase_cget',
- 'optimbase_checkbounds',
- 'optimbase_checkcostfun',
- 'optimbase_checkx0',
- 'optimbase_configure',
- 'optimbase_destroy',
- 'optimbase_function',
- 'optimbase_get',
- 'optimbase_hasbounds',
- 'optimbase_hasconstraints',
- 'optimbase_hasnlcons',
- 'optimbase_histget',
- 'optimbase_histset',
- 'optimbase_incriter',
- 'optimbase_isfeasible',
- 'optimbase_isinbounds',
- 'optimbase_isinnonlincons',
- 'optimbase_log',
- 'optimbase_logshutdown',
- 'optimbase_logstartup',
- 'optimbase_new',
- 'optimbase_outputcmd',
- 'optimbase_outstruct',
- 'optimbase_proj2bnds',
- 'optimbase_set',
- 'optimbase_stoplog',
- 'optimbase_terminate',
- 'optimget',
- 'optimplotfunccount',
- 'optimplotfval',
- 'optimplotx',
- 'optimset',
- 'optimsimplex_center',
- 'optimsimplex_check',
- 'optimsimplex_compsomefv',
- 'optimsimplex_computefv',
- 'optimsimplex_deltafv',
- 'optimsimplex_deltafvmax',
- 'optimsimplex_destroy',
- 'optimsimplex_dirmat',
- 'optimsimplex_fvmean',
- 'optimsimplex_fvstdev',
- 'optimsimplex_fvvariance',
- 'optimsimplex_getall',
- 'optimsimplex_getallfv',
- 'optimsimplex_getallx',
- 'optimsimplex_getfv',
- 'optimsimplex_getn',
- 'optimsimplex_getnbve',
- 'optimsimplex_getve',
- 'optimsimplex_getx',
- 'optimsimplex_gradientfv',
- 'optimsimplex_log',
- 'optimsimplex_new',
- 'optimsimplex_reflect',
- 'optimsimplex_setall',
- 'optimsimplex_setallfv',
- 'optimsimplex_setallx',
- 'optimsimplex_setfv',
- 'optimsimplex_setn',
- 'optimsimplex_setnbve',
- 'optimsimplex_setve',
- 'optimsimplex_setx',
- 'optimsimplex_shrink',
- 'optimsimplex_size',
- 'optimsimplex_sort',
- 'optimsimplex_xbar',
- 'orth',
- 'output_ga_default',
- 'output_moga_default',
- 'output_nsga2_default',
- 'output_nsga_default',
- 'p_margin',
- 'pack',
- 'pareto_filter',
- 'parrot',
- 'pbig',
- 'pca',
- 'pcg',
- 'pdiv',
- 'pen2ea',
- 'pencan',
- 'pencost',
- 'penlaur',
- 'perctl',
- 'perl',
- 'perms',
- 'permute',
- 'pertrans',
- 'pfactors',
- 'pfss',
- 'phasemag',
- 'phaseplot',
- 'phc',
- 'pinv',
- 'playsnd',
- 'plotprofile',
- 'plzr',
- 'pmodulo',
- 'pol2des',
- 'pol2str',
- 'polar',
- 'polfact',
- 'prbs_a',
- 'prettyprint',
- 'primes',
- 'princomp',
- 'profile',
- 'proj',
- 'projsl',
- 'projspec',
- 'psmall',
- 'pspect',
- 'qmr',
- 'qpsolve',
- 'quart',
- 'quaskro',
- 'rafiter',
- 'randpencil',
- 'range',
- 'rank',
- 'readxls',
- 'recompilefunction',
- 'recons',
- 'reglin',
- 'regress',
- 'remezb',
- 'remove_param',
- 'remove_profiling',
- 'repfreq',
- 'replace_Ix_by_Fx',
- 'repmat',
- 'reset_profiling',
- 'resize_matrix',
- 'returntoscilab',
- 'rhs2code',
- 'ric_desc',
- 'riccati',
- 'rmdir',
- 'routh_t',
- 'rowcomp',
- 'rowcompr',
- 'rowinout',
- 'rowregul',
- 'rowshuff',
- 'rref',
- 'sample',
- 'samplef',
- 'samwr',
- 'savematfile',
- 'savewave',
- 'scanf',
- 'sci2exp',
- 'sciGUI_init',
- 'sci_sparse',
- 'scicos_getvalue',
- 'scicos_simulate',
- 'scicos_workspace_init',
- 'scisptdemo',
- 'scitest',
- 'sdiff',
- 'sec',
- 'secd',
- 'sech',
- 'selection_ga_elitist',
- 'selection_ga_random',
- 'sensi',
- 'setPreferencesValue',
- 'set_param',
- 'setdiff',
- 'sgrid',
- 'show_margins',
- 'show_pca',
- 'showprofile',
- 'signm',
- 'sinc',
- 'sincd',
- 'sind',
- 'sinh',
- 'sinhm',
- 'sinm',
- 'sm2des',
- 'sm2ss',
- 'smga',
- 'smooth',
- 'solve',
- 'sound',
- 'soundsec',
- 'sp2adj',
- 'spaninter',
- 'spanplus',
- 'spantwo',
- 'specfact',
- 'speye',
- 'sprand',
- 'spzeros',
- 'sqroot',
- 'sqrtm',
- 'squarewave',
- 'squeeze',
- 'srfaur',
- 'srkf',
- 'ss2des',
- 'ss2ss',
- 'ss2tf',
- 'sskf',
- 'ssprint',
- 'ssrand',
- 'st_deviation',
- 'st_i_generic',
- 'st_ility',
- 'stabil',
- 'statgain',
- 'stdev',
- 'stdevf',
- 'steadycos',
- 'strange',
- 'strcmpi',
- 'struct',
- 'sub2ind',
- 'sva',
- 'svplot',
- 'sylm',
- 'sylv',
- 'sysconv',
- 'sysdiag',
- 'sysfact',
- 'syslin',
- 'syssize',
- 'system',
- 'systmat',
- 'tabul',
- 'tand',
- 'tanh',
- 'tanhm',
- 'tanm',
- 'tbx_build_blocks',
- 'tbx_build_cleaner',
- 'tbx_build_gateway',
- 'tbx_build_gateway_clean',
- 'tbx_build_gateway_loader',
- 'tbx_build_help',
- 'tbx_build_help_loader',
- 'tbx_build_loader',
- 'tbx_build_localization',
- 'tbx_build_macros',
- 'tbx_build_pal_loader',
- 'tbx_build_src',
- 'tbx_builder',
- 'tbx_builder_gateway',
- 'tbx_builder_gateway_lang',
- 'tbx_builder_help',
- 'tbx_builder_help_lang',
- 'tbx_builder_macros',
- 'tbx_builder_src',
- 'tbx_builder_src_lang',
- 'tbx_generate_pofile',
- 'temp_law_csa',
- 'temp_law_default',
- 'temp_law_fsa',
- 'temp_law_huang',
- 'temp_law_vfsa',
- 'test_clean',
- 'test_on_columns',
- 'test_run',
- 'test_run_level',
- 'testexamples',
- 'tf2des',
- 'tf2ss',
- 'thrownan',
- 'tic',
- 'time_id',
- 'toc',
- 'toeplitz',
- 'tokenpos',
- 'toolboxes',
- 'trace',
- 'trans',
- 'translatepaths',
- 'tree2code',
- 'trfmod',
- 'trianfml',
- 'trimmean',
- 'trisolve',
- 'trzeros',
- 'typeof',
- 'ui_observer',
- 'union',
- 'unique',
- 'unit_test_run',
- 'unix_g',
- 'unix_s',
- 'unix_w',
- 'unix_x',
- 'unobs',
- 'unpack',
- 'unwrap',
- 'variance',
- 'variancef',
- 'vec2list',
- 'vectorfind',
- 'ver',
- 'warnobsolete',
- 'wavread',
- 'wavwrite',
- 'wcenter',
- 'weekday',
- 'wfir',
- 'wfir_gui',
- 'whereami',
- 'who_user',
- 'whos',
- 'wiener',
- 'wigner',
- 'window',
- 'winlist',
- 'with_javasci',
- 'with_macros_source',
- 'with_modelica_compiler',
- 'with_tk',
- 'xcorr',
- 'xcosBlockEval',
- 'xcosBlockInterface',
- 'xcosCodeGeneration',
- 'xcosConfigureModelica',
- 'xcosPal',
- 'xcosPalAdd',
- 'xcosPalAddBlock',
- 'xcosPalExport',
- 'xcosPalGenerateAllIcons',
- 'xcosShowBlockWarning',
- 'xcosValidateBlockSet',
- 'xcosValidateCompareBlock',
- 'xcos_compile',
- 'xcos_debug_gui',
- 'xcos_run',
- 'xcos_simulate',
- 'xcov',
- 'xmltochm',
- 'xmltoformat',
- 'xmltohtml',
- 'xmltojar',
- 'xmltopdf',
- 'xmltops',
- 'xmltoweb',
- 'yulewalk',
- 'zeropen',
- 'zgrid',
- 'zpbutt',
- 'zpch1',
- 'zpch2',
- 'zpell',
-)
-
-variables_kw = (
- '$',
- '%F',
- '%T',
- '%e',
- '%eps',
- '%f',
- '%fftw',
- '%gui',
- '%i',
- '%inf',
- '%io',
- '%modalWarning',
- '%nan',
- '%pi',
- '%s',
- '%t',
- '%tk',
- '%toolboxes',
- '%toolboxes_dir',
- '%z',
- 'PWD',
- 'SCI',
- 'SCIHOME',
- 'TMPDIR',
- 'arnoldilib',
- 'assertlib',
- 'atomslib',
- 'cacsdlib',
- 'compatibility_functilib',
- 'corelib',
- 'data_structureslib',
- 'demo_toolslib',
- 'development_toolslib',
- 'differential_equationlib',
- 'dynamic_linklib',
- 'elementary_functionslib',
- 'enull',
- 'evoid',
- 'external_objectslib',
- 'fd',
- 'fileiolib',
- 'functionslib',
- 'genetic_algorithmslib',
- 'helptoolslib',
- 'home',
- 'integerlib',
- 'interpolationlib',
- 'iolib',
- 'jnull',
- 'jvoid',
- 'linear_algebralib',
- 'm2scilib',
- 'matiolib',
- 'modules_managerlib',
- 'neldermeadlib',
- 'optimbaselib',
- 'optimizationlib',
- 'optimsimplexlib',
- 'output_streamlib',
- 'overloadinglib',
- 'parameterslib',
- 'polynomialslib',
- 'preferenceslib',
- 'randliblib',
- 'scicos_autolib',
- 'scicos_utilslib',
- 'scinoteslib',
- 'signal_processinglib',
- 'simulated_annealinglib',
- 'soundlib',
- 'sparselib',
- 'special_functionslib',
- 'spreadsheetlib',
- 'statisticslib',
- 'stringlib',
- 'tclscilib',
- 'timelib',
- 'umfpacklib',
- 'xcoslib',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import subprocess
- from pygments.util import format_lines, duplicates_removed
-
- mapping = {'variables': 'builtin'}
-
- def extract_completion(var_type):
- s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = s.communicate('''\
-fd = mopen("/dev/stderr", "wt");
-mputl(strcat(completion("", "%s"), "||"), fd);
-mclose(fd)\n''' % var_type)
- if '||' not in output[1]:
- raise Exception(output[0])
- # Invalid DISPLAY causes this to be output:
- text = output[1].strip()
- if text.startswith('Error: unable to open display \n'):
- text = text[len('Error: unable to open display \n'):]
- return text.split('||')
-
- new_data = {}
- seen = set() # only keep first type for a given word
- for t in ('functions', 'commands', 'macros', 'variables'):
- new_data[t] = duplicates_removed(extract_completion(t), seen)
- seen.update(set(new_data[t]))
-
-
- with open(__file__) as f:
- content = f.read()
-
- header = content[:content.find('# Autogenerated')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
- with open(__file__, 'w') as f:
- f.write(header)
- f.write('# Autogenerated\n\n')
+ :license: BSD, see LICENSE for details.
+"""
+
+# Autogenerated
+
+commands_kw = (
+ 'abort',
+ 'apropos',
+ 'break',
+ 'case',
+ 'catch',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'endfunction',
+ 'for',
+ 'function',
+ 'help',
+ 'if',
+ 'pause',
+ 'quit',
+ 'select',
+ 'then',
+ 'try',
+ 'while',
+)
+
+functions_kw = (
+ '!!_invoke_',
+ '%H5Object_e',
+ '%H5Object_fieldnames',
+ '%H5Object_p',
+ '%XMLAttr_6',
+ '%XMLAttr_e',
+ '%XMLAttr_i_XMLElem',
+ '%XMLAttr_length',
+ '%XMLAttr_p',
+ '%XMLAttr_size',
+ '%XMLDoc_6',
+ '%XMLDoc_e',
+ '%XMLDoc_i_XMLList',
+ '%XMLDoc_p',
+ '%XMLElem_6',
+ '%XMLElem_e',
+ '%XMLElem_i_XMLDoc',
+ '%XMLElem_i_XMLElem',
+ '%XMLElem_i_XMLList',
+ '%XMLElem_p',
+ '%XMLList_6',
+ '%XMLList_e',
+ '%XMLList_i_XMLElem',
+ '%XMLList_i_XMLList',
+ '%XMLList_length',
+ '%XMLList_p',
+ '%XMLList_size',
+ '%XMLNs_6',
+ '%XMLNs_e',
+ '%XMLNs_i_XMLElem',
+ '%XMLNs_p',
+ '%XMLSet_6',
+ '%XMLSet_e',
+ '%XMLSet_length',
+ '%XMLSet_p',
+ '%XMLSet_size',
+ '%XMLValid_p',
+ '%_EClass_6',
+ '%_EClass_e',
+ '%_EClass_p',
+ '%_EObj_0',
+ '%_EObj_1__EObj',
+ '%_EObj_1_b',
+ '%_EObj_1_c',
+ '%_EObj_1_i',
+ '%_EObj_1_s',
+ '%_EObj_2__EObj',
+ '%_EObj_2_b',
+ '%_EObj_2_c',
+ '%_EObj_2_i',
+ '%_EObj_2_s',
+ '%_EObj_3__EObj',
+ '%_EObj_3_b',
+ '%_EObj_3_c',
+ '%_EObj_3_i',
+ '%_EObj_3_s',
+ '%_EObj_4__EObj',
+ '%_EObj_4_b',
+ '%_EObj_4_c',
+ '%_EObj_4_i',
+ '%_EObj_4_s',
+ '%_EObj_5',
+ '%_EObj_6',
+ '%_EObj_a__EObj',
+ '%_EObj_a_b',
+ '%_EObj_a_c',
+ '%_EObj_a_i',
+ '%_EObj_a_s',
+ '%_EObj_d__EObj',
+ '%_EObj_d_b',
+ '%_EObj_d_c',
+ '%_EObj_d_i',
+ '%_EObj_d_s',
+ '%_EObj_disp',
+ '%_EObj_e',
+ '%_EObj_g__EObj',
+ '%_EObj_g_b',
+ '%_EObj_g_c',
+ '%_EObj_g_i',
+ '%_EObj_g_s',
+ '%_EObj_h__EObj',
+ '%_EObj_h_b',
+ '%_EObj_h_c',
+ '%_EObj_h_i',
+ '%_EObj_h_s',
+ '%_EObj_i__EObj',
+ '%_EObj_j__EObj',
+ '%_EObj_j_b',
+ '%_EObj_j_c',
+ '%_EObj_j_i',
+ '%_EObj_j_s',
+ '%_EObj_k__EObj',
+ '%_EObj_k_b',
+ '%_EObj_k_c',
+ '%_EObj_k_i',
+ '%_EObj_k_s',
+ '%_EObj_l__EObj',
+ '%_EObj_l_b',
+ '%_EObj_l_c',
+ '%_EObj_l_i',
+ '%_EObj_l_s',
+ '%_EObj_m__EObj',
+ '%_EObj_m_b',
+ '%_EObj_m_c',
+ '%_EObj_m_i',
+ '%_EObj_m_s',
+ '%_EObj_n__EObj',
+ '%_EObj_n_b',
+ '%_EObj_n_c',
+ '%_EObj_n_i',
+ '%_EObj_n_s',
+ '%_EObj_o__EObj',
+ '%_EObj_o_b',
+ '%_EObj_o_c',
+ '%_EObj_o_i',
+ '%_EObj_o_s',
+ '%_EObj_p',
+ '%_EObj_p__EObj',
+ '%_EObj_p_b',
+ '%_EObj_p_c',
+ '%_EObj_p_i',
+ '%_EObj_p_s',
+ '%_EObj_q__EObj',
+ '%_EObj_q_b',
+ '%_EObj_q_c',
+ '%_EObj_q_i',
+ '%_EObj_q_s',
+ '%_EObj_r__EObj',
+ '%_EObj_r_b',
+ '%_EObj_r_c',
+ '%_EObj_r_i',
+ '%_EObj_r_s',
+ '%_EObj_s__EObj',
+ '%_EObj_s_b',
+ '%_EObj_s_c',
+ '%_EObj_s_i',
+ '%_EObj_s_s',
+ '%_EObj_t',
+ '%_EObj_x__EObj',
+ '%_EObj_x_b',
+ '%_EObj_x_c',
+ '%_EObj_x_i',
+ '%_EObj_x_s',
+ '%_EObj_y__EObj',
+ '%_EObj_y_b',
+ '%_EObj_y_c',
+ '%_EObj_y_i',
+ '%_EObj_y_s',
+ '%_EObj_z__EObj',
+ '%_EObj_z_b',
+ '%_EObj_z_c',
+ '%_EObj_z_i',
+ '%_EObj_z_s',
+ '%_eigs',
+ '%_load',
+ '%b_1__EObj',
+ '%b_2__EObj',
+ '%b_3__EObj',
+ '%b_4__EObj',
+ '%b_a__EObj',
+ '%b_d__EObj',
+ '%b_g__EObj',
+ '%b_h__EObj',
+ '%b_i_XMLList',
+ '%b_i__EObj',
+ '%b_j__EObj',
+ '%b_k__EObj',
+ '%b_l__EObj',
+ '%b_m__EObj',
+ '%b_n__EObj',
+ '%b_o__EObj',
+ '%b_p__EObj',
+ '%b_q__EObj',
+ '%b_r__EObj',
+ '%b_s__EObj',
+ '%b_x__EObj',
+ '%b_y__EObj',
+ '%b_z__EObj',
+ '%c_1__EObj',
+ '%c_2__EObj',
+ '%c_3__EObj',
+ '%c_4__EObj',
+ '%c_a__EObj',
+ '%c_d__EObj',
+ '%c_g__EObj',
+ '%c_h__EObj',
+ '%c_i_XMLAttr',
+ '%c_i_XMLDoc',
+ '%c_i_XMLElem',
+ '%c_i_XMLList',
+ '%c_i__EObj',
+ '%c_j__EObj',
+ '%c_k__EObj',
+ '%c_l__EObj',
+ '%c_m__EObj',
+ '%c_n__EObj',
+ '%c_o__EObj',
+ '%c_p__EObj',
+ '%c_q__EObj',
+ '%c_r__EObj',
+ '%c_s__EObj',
+ '%c_x__EObj',
+ '%c_y__EObj',
+ '%c_z__EObj',
+ '%ce_i_XMLList',
+ '%fptr_i_XMLList',
+ '%h_i_XMLList',
+ '%hm_i_XMLList',
+ '%i_1__EObj',
+ '%i_2__EObj',
+ '%i_3__EObj',
+ '%i_4__EObj',
+ '%i_a__EObj',
+ '%i_abs',
+ '%i_cumprod',
+ '%i_cumsum',
+ '%i_d__EObj',
+ '%i_diag',
+ '%i_g__EObj',
+ '%i_h__EObj',
+ '%i_i_XMLList',
+ '%i_i__EObj',
+ '%i_j__EObj',
+ '%i_k__EObj',
+ '%i_l__EObj',
+ '%i_m__EObj',
+ '%i_matrix',
+ '%i_max',
+ '%i_maxi',
+ '%i_min',
+ '%i_mini',
+ '%i_mput',
+ '%i_n__EObj',
+ '%i_o__EObj',
+ '%i_p',
+ '%i_p__EObj',
+ '%i_prod',
+ '%i_q__EObj',
+ '%i_r__EObj',
+ '%i_s__EObj',
+ '%i_sum',
+ '%i_tril',
+ '%i_triu',
+ '%i_x__EObj',
+ '%i_y__EObj',
+ '%i_z__EObj',
+ '%ip_i_XMLList',
+ '%l_i_XMLList',
+ '%l_i__EObj',
+ '%lss_i_XMLList',
+ '%mc_i_XMLList',
+ '%msp_full',
+ '%msp_i_XMLList',
+ '%msp_spget',
+ '%p_i_XMLList',
+ '%ptr_i_XMLList',
+ '%r_i_XMLList',
+ '%s_1__EObj',
+ '%s_2__EObj',
+ '%s_3__EObj',
+ '%s_4__EObj',
+ '%s_a__EObj',
+ '%s_d__EObj',
+ '%s_g__EObj',
+ '%s_h__EObj',
+ '%s_i_XMLList',
+ '%s_i__EObj',
+ '%s_j__EObj',
+ '%s_k__EObj',
+ '%s_l__EObj',
+ '%s_m__EObj',
+ '%s_n__EObj',
+ '%s_o__EObj',
+ '%s_p__EObj',
+ '%s_q__EObj',
+ '%s_r__EObj',
+ '%s_s__EObj',
+ '%s_x__EObj',
+ '%s_y__EObj',
+ '%s_z__EObj',
+ '%sp_i_XMLList',
+ '%spb_i_XMLList',
+ '%st_i_XMLList',
+ 'Calendar',
+ 'ClipBoard',
+ 'Matplot',
+ 'Matplot1',
+ 'PlaySound',
+ 'TCL_DeleteInterp',
+ 'TCL_DoOneEvent',
+ 'TCL_EvalFile',
+ 'TCL_EvalStr',
+ 'TCL_ExistArray',
+ 'TCL_ExistInterp',
+ 'TCL_ExistVar',
+ 'TCL_GetVar',
+ 'TCL_GetVersion',
+ 'TCL_SetVar',
+ 'TCL_UnsetVar',
+ 'TCL_UpVar',
+ '_',
+ '_code2str',
+ '_d',
+ '_str2code',
+ 'about',
+ 'abs',
+ 'acos',
+ 'addModulePreferences',
+ 'addcolor',
+ 'addf',
+ 'addhistory',
+ 'addinter',
+ 'addlocalizationdomain',
+ 'amell',
+ 'and',
+ 'argn',
+ 'arl2_ius',
+ 'ascii',
+ 'asin',
+ 'atan',
+ 'backslash',
+ 'balanc',
+ 'banner',
+ 'base2dec',
+ 'basename',
+ 'bdiag',
+ 'beep',
+ 'besselh',
+ 'besseli',
+ 'besselj',
+ 'besselk',
+ 'bessely',
+ 'beta',
+ 'bezout',
+ 'bfinit',
+ 'blkfc1i',
+ 'blkslvi',
+ 'bool2s',
+ 'browsehistory',
+ 'browsevar',
+ 'bsplin3val',
+ 'buildDoc',
+ 'buildouttb',
+ 'bvode',
+ 'c_link',
+ 'call',
+ 'callblk',
+ 'captions',
+ 'cd',
+ 'cdfbet',
+ 'cdfbin',
+ 'cdfchi',
+ 'cdfchn',
+ 'cdff',
+ 'cdffnc',
+ 'cdfgam',
+ 'cdfnbn',
+ 'cdfnor',
+ 'cdfpoi',
+ 'cdft',
+ 'ceil',
+ 'champ',
+ 'champ1',
+ 'chdir',
+ 'chol',
+ 'clc',
+ 'clean',
+ 'clear',
+ 'clearfun',
+ 'clearglobal',
+ 'closeEditor',
+ 'closeEditvar',
+ 'closeXcos',
+ 'code2str',
+ 'coeff',
+ 'color',
+ 'comp',
+ 'completion',
+ 'conj',
+ 'contour2di',
+ 'contr',
+ 'conv2',
+ 'convstr',
+ 'copy',
+ 'copyfile',
+ 'corr',
+ 'cos',
+ 'coserror',
+ 'createdir',
+ 'cshep2d',
+ 'csvDefault',
+ 'csvIsnum',
+ 'csvRead',
+ 'csvStringToDouble',
+ 'csvTextScan',
+ 'csvWrite',
+ 'ctree2',
+ 'ctree3',
+ 'ctree4',
+ 'cumprod',
+ 'cumsum',
+ 'curblock',
+ 'curblockc',
+ 'daskr',
+ 'dasrt',
+ 'dassl',
+ 'data2sig',
+ 'datatipCreate',
+ 'datatipManagerMode',
+ 'datatipMove',
+ 'datatipRemove',
+ 'datatipSetDisplay',
+ 'datatipSetInterp',
+ 'datatipSetOrientation',
+ 'datatipSetStyle',
+ 'datatipToggle',
+ 'dawson',
+ 'dct',
+ 'debug',
+ 'dec2base',
+ 'deff',
+ 'definedfields',
+ 'degree',
+ 'delbpt',
+ 'delete',
+ 'deletefile',
+ 'delip',
+ 'delmenu',
+ 'det',
+ 'dgettext',
+ 'dhinf',
+ 'diag',
+ 'diary',
+ 'diffobjs',
+ 'disp',
+ 'dispbpt',
+ 'displayhistory',
+ 'disposefftwlibrary',
+ 'dlgamma',
+ 'dnaupd',
+ 'dneupd',
+ 'double',
+ 'drawaxis',
+ 'drawlater',
+ 'drawnow',
+ 'driver',
+ 'dsaupd',
+ 'dsearch',
+ 'dseupd',
+ 'dst',
+ 'duplicate',
+ 'editvar',
+ 'emptystr',
+ 'end_scicosim',
+ 'ereduc',
+ 'erf',
+ 'erfc',
+ 'erfcx',
+ 'erfi',
+ 'errcatch',
+ 'errclear',
+ 'error',
+ 'eval_cshep2d',
+ 'exec',
+ 'execstr',
+ 'exists',
+ 'exit',
+ 'exp',
+ 'expm',
+ 'exportUI',
+ 'export_to_hdf5',
+ 'eye',
+ 'fadj2sp',
+ 'fec',
+ 'feval',
+ 'fft',
+ 'fftw',
+ 'fftw_flags',
+ 'fftw_forget_wisdom',
+ 'fftwlibraryisloaded',
+ 'figure',
+ 'file',
+ 'filebrowser',
+ 'fileext',
+ 'fileinfo',
+ 'fileparts',
+ 'filesep',
+ 'find',
+ 'findBD',
+ 'findfiles',
+ 'fire_closing_finished',
+ 'floor',
+ 'format',
+ 'fort',
+ 'fprintfMat',
+ 'freq',
+ 'frexp',
+ 'fromc',
+ 'fromjava',
+ 'fscanfMat',
+ 'fsolve',
+ 'fstair',
+ 'full',
+ 'fullpath',
+ 'funcprot',
+ 'funptr',
+ 'gamma',
+ 'gammaln',
+ 'geom3d',
+ 'get',
+ 'getURL',
+ 'get_absolute_file_path',
+ 'get_fftw_wisdom',
+ 'getblocklabel',
+ 'getcallbackobject',
+ 'getdate',
+ 'getdebuginfo',
+ 'getdefaultlanguage',
+ 'getdrives',
+ 'getdynlibext',
+ 'getenv',
+ 'getfield',
+ 'gethistory',
+ 'gethistoryfile',
+ 'getinstalledlookandfeels',
+ 'getio',
+ 'getlanguage',
+ 'getlongpathname',
+ 'getlookandfeel',
+ 'getmd5',
+ 'getmemory',
+ 'getmodules',
+ 'getos',
+ 'getpid',
+ 'getrelativefilename',
+ 'getscicosvars',
+ 'getscilabmode',
+ 'getshortpathname',
+ 'gettext',
+ 'getvariablesonstack',
+ 'getversion',
+ 'glist',
+ 'global',
+ 'glue',
+ 'grand',
+ 'graphicfunction',
+ 'grayplot',
+ 'grep',
+ 'gsort',
+ 'gstacksize',
+ 'h5attr',
+ 'h5close',
+ 'h5cp',
+ 'h5dataset',
+ 'h5dump',
+ 'h5exists',
+ 'h5flush',
+ 'h5get',
+ 'h5group',
+ 'h5isArray',
+ 'h5isAttr',
+ 'h5isCompound',
+ 'h5isFile',
+ 'h5isGroup',
+ 'h5isList',
+ 'h5isRef',
+ 'h5isSet',
+ 'h5isSpace',
+ 'h5isType',
+ 'h5isVlen',
+ 'h5label',
+ 'h5ln',
+ 'h5ls',
+ 'h5mount',
+ 'h5mv',
+ 'h5open',
+ 'h5read',
+ 'h5readattr',
+ 'h5rm',
+ 'h5umount',
+ 'h5write',
+ 'h5writeattr',
+ 'havewindow',
+ 'helpbrowser',
+ 'hess',
+ 'hinf',
+ 'historymanager',
+ 'historysize',
+ 'host',
+ 'htmlDump',
+ 'htmlRead',
+ 'htmlReadStr',
+ 'htmlWrite',
+ 'iconvert',
+ 'ieee',
+ 'ilib_verbose',
+ 'imag',
+ 'impl',
+ 'import_from_hdf5',
+ 'imult',
+ 'inpnvi',
+ 'int',
+ 'int16',
+ 'int2d',
+ 'int32',
+ 'int3d',
+ 'int8',
+ 'interp',
+ 'interp2d',
+ 'interp3d',
+ 'intg',
+ 'intppty',
+ 'inttype',
+ 'inv',
+ 'invoke_lu',
+ 'is_handle_valid',
+ 'is_hdf5_file',
+ 'isalphanum',
+ 'isascii',
+ 'isdef',
+ 'isdigit',
+ 'isdir',
+ 'isequal',
+ 'isequalbitwise',
+ 'iserror',
+ 'isfile',
+ 'isglobal',
+ 'isletter',
+ 'isnum',
+ 'isreal',
+ 'iswaitingforinput',
+ 'jallowClassReloading',
+ 'jarray',
+ 'jautoTranspose',
+ 'jautoUnwrap',
+ 'javaclasspath',
+ 'javalibrarypath',
+ 'jcast',
+ 'jcompile',
+ 'jconvMatrixMethod',
+ 'jcreatejar',
+ 'jdeff',
+ 'jdisableTrace',
+ 'jenableTrace',
+ 'jexists',
+ 'jgetclassname',
+ 'jgetfield',
+ 'jgetfields',
+ 'jgetinfo',
+ 'jgetmethods',
+ 'jimport',
+ 'jinvoke',
+ 'jinvoke_db',
+ 'jnewInstance',
+ 'jremove',
+ 'jsetfield',
+ 'junwrap',
+ 'junwraprem',
+ 'jwrap',
+ 'jwrapinfloat',
+ 'kron',
+ 'lasterror',
+ 'ldiv',
+ 'ldivf',
+ 'legendre',
+ 'length',
+ 'lib',
+ 'librarieslist',
+ 'libraryinfo',
+ 'light',
+ 'linear_interpn',
+ 'lines',
+ 'link',
+ 'linmeq',
+ 'list',
+ 'listvar_in_hdf5',
+ 'load',
+ 'loadGui',
+ 'loadScicos',
+ 'loadXcos',
+ 'loadfftwlibrary',
+ 'loadhistory',
+ 'log',
+ 'log1p',
+ 'lsq',
+ 'lsq_splin',
+ 'lsqrsolve',
+ 'lsslist',
+ 'lstcat',
+ 'lstsize',
+ 'ltitr',
+ 'lu',
+ 'ludel',
+ 'lufact',
+ 'luget',
+ 'lusolve',
+ 'macr2lst',
+ 'macr2tree',
+ 'matfile_close',
+ 'matfile_listvar',
+ 'matfile_open',
+ 'matfile_varreadnext',
+ 'matfile_varwrite',
+ 'matrix',
+ 'max',
+ 'maxfiles',
+ 'mclearerr',
+ 'mclose',
+ 'meof',
+ 'merror',
+ 'messagebox',
+ 'mfprintf',
+ 'mfscanf',
+ 'mget',
+ 'mgeti',
+ 'mgetl',
+ 'mgetstr',
+ 'min',
+ 'mlist',
+ 'mode',
+ 'model2blk',
+ 'mopen',
+ 'move',
+ 'movefile',
+ 'mprintf',
+ 'mput',
+ 'mputl',
+ 'mputstr',
+ 'mscanf',
+ 'mseek',
+ 'msprintf',
+ 'msscanf',
+ 'mtell',
+ 'mtlb_mode',
+ 'mtlb_sparse',
+ 'mucomp',
+ 'mulf',
+ 'name2rgb',
+ 'nearfloat',
+ 'newaxes',
+ 'newest',
+ 'newfun',
+ 'nnz',
+ 'norm',
+ 'notify',
+ 'number_properties',
+ 'ode',
+ 'odedc',
+ 'ones',
+ 'openged',
+ 'opentk',
+ 'optim',
+ 'or',
+ 'ordmmd',
+ 'parallel_concurrency',
+ 'parallel_run',
+ 'param3d',
+ 'param3d1',
+ 'part',
+ 'pathconvert',
+ 'pathsep',
+ 'phase_simulation',
+ 'plot2d',
+ 'plot2d1',
+ 'plot2d2',
+ 'plot2d3',
+ 'plot2d4',
+ 'plot3d',
+ 'plot3d1',
+ 'plotbrowser',
+ 'pointer_xproperty',
+ 'poly',
+ 'ppol',
+ 'pppdiv',
+ 'predef',
+ 'preferences',
+ 'print',
+ 'printf',
+ 'printfigure',
+ 'printsetupbox',
+ 'prod',
+ 'progressionbar',
+ 'prompt',
+ 'pwd',
+ 'qld',
+ 'qp_solve',
+ 'qr',
+ 'raise_window',
+ 'rand',
+ 'rankqr',
+ 'rat',
+ 'rcond',
+ 'rdivf',
+ 'read',
+ 'read4b',
+ 'read_csv',
+ 'readb',
+ 'readgateway',
+ 'readmps',
+ 'real',
+ 'realtime',
+ 'realtimeinit',
+ 'regexp',
+ 'relocate_handle',
+ 'remez',
+ 'removeModulePreferences',
+ 'removedir',
+ 'removelinehistory',
+ 'res_with_prec',
+ 'resethistory',
+ 'residu',
+ 'resume',
+ 'return',
+ 'ricc',
+ 'rlist',
+ 'roots',
+ 'rotate_axes',
+ 'round',
+ 'rpem',
+ 'rtitr',
+ 'rubberbox',
+ 'save',
+ 'saveGui',
+ 'saveafterncommands',
+ 'saveconsecutivecommands',
+ 'savehistory',
+ 'schur',
+ 'sci_haltscicos',
+ 'sci_tree2',
+ 'sci_tree3',
+ 'sci_tree4',
+ 'sciargs',
+ 'scicos_debug',
+ 'scicos_debug_count',
+ 'scicos_time',
+ 'scicosim',
+ 'scinotes',
+ 'sctree',
+ 'semidef',
+ 'set',
+ 'set_blockerror',
+ 'set_fftw_wisdom',
+ 'set_xproperty',
+ 'setbpt',
+ 'setdefaultlanguage',
+ 'setenv',
+ 'setfield',
+ 'sethistoryfile',
+ 'setlanguage',
+ 'setlookandfeel',
+ 'setmenu',
+ 'sfact',
+ 'sfinit',
+ 'show_window',
+ 'sident',
+ 'sig2data',
+ 'sign',
+ 'simp',
+ 'simp_mode',
+ 'sin',
+ 'size',
+ 'slash',
+ 'sleep',
+ 'sorder',
+ 'sparse',
+ 'spchol',
+ 'spcompack',
+ 'spec',
+ 'spget',
+ 'splin',
+ 'splin2d',
+ 'splin3d',
+ 'splitURL',
+ 'spones',
+ 'sprintf',
+ 'sqrt',
+ 'stacksize',
+ 'str2code',
+ 'strcat',
+ 'strchr',
+ 'strcmp',
+ 'strcspn',
+ 'strindex',
+ 'string',
+ 'stringbox',
+ 'stripblanks',
+ 'strncpy',
+ 'strrchr',
+ 'strrev',
+ 'strsplit',
+ 'strspn',
+ 'strstr',
+ 'strsubst',
+ 'strtod',
+ 'strtok',
+ 'subf',
+ 'sum',
+ 'svd',
+ 'swap_handles',
+ 'symfcti',
+ 'syredi',
+ 'system_getproperty',
+ 'system_setproperty',
+ 'ta2lpd',
+ 'tan',
+ 'taucs_chdel',
+ 'taucs_chfact',
+ 'taucs_chget',
+ 'taucs_chinfo',
+ 'taucs_chsolve',
+ 'tempname',
+ 'testmatrix',
+ 'timer',
+ 'tlist',
+ 'tohome',
+ 'tokens',
+ 'toolbar',
+ 'toprint',
+ 'tr_zer',
+ 'tril',
+ 'triu',
+ 'type',
+ 'typename',
+ 'uiDisplayTree',
+ 'uicontextmenu',
+ 'uicontrol',
+ 'uigetcolor',
+ 'uigetdir',
+ 'uigetfile',
+ 'uigetfont',
+ 'uimenu',
+ 'uint16',
+ 'uint32',
+ 'uint8',
+ 'uipopup',
+ 'uiputfile',
+ 'uiwait',
+ 'ulink',
+ 'umf_ludel',
+ 'umf_lufact',
+ 'umf_luget',
+ 'umf_luinfo',
+ 'umf_lusolve',
+ 'umfpack',
+ 'unglue',
+ 'unix',
+ 'unsetmenu',
+ 'unzoom',
+ 'updatebrowsevar',
+ 'usecanvas',
+ 'useeditor',
+ 'user',
+ 'var2vec',
+ 'varn',
+ 'vec2var',
+ 'waitbar',
+ 'warnBlockByUID',
+ 'warning',
+ 'what',
+ 'where',
+ 'whereis',
+ 'who',
+ 'winsid',
+ 'with_module',
+ 'writb',
+ 'write',
+ 'write4b',
+ 'write_csv',
+ 'x_choose',
+ 'x_choose_modeless',
+ 'x_dialog',
+ 'x_mdialog',
+ 'xarc',
+ 'xarcs',
+ 'xarrows',
+ 'xchange',
+ 'xchoicesi',
+ 'xclick',
+ 'xcos',
+ 'xcosAddToolsMenu',
+ 'xcosConfigureXmlFile',
+ 'xcosDiagramToScilab',
+ 'xcosPalCategoryAdd',
+ 'xcosPalDelete',
+ 'xcosPalDisable',
+ 'xcosPalEnable',
+ 'xcosPalGenerateIcon',
+ 'xcosPalGet',
+ 'xcosPalLoad',
+ 'xcosPalMove',
+ 'xcosSimulationStarted',
+ 'xcosUpdateBlock',
+ 'xdel',
+ 'xend',
+ 'xfarc',
+ 'xfarcs',
+ 'xfpoly',
+ 'xfpolys',
+ 'xfrect',
+ 'xget',
+ 'xgetmouse',
+ 'xgraduate',
+ 'xgrid',
+ 'xinit',
+ 'xlfont',
+ 'xls_open',
+ 'xls_read',
+ 'xmlAddNs',
+ 'xmlAppend',
+ 'xmlAsNumber',
+ 'xmlAsText',
+ 'xmlDTD',
+ 'xmlDelete',
+ 'xmlDocument',
+ 'xmlDump',
+ 'xmlElement',
+ 'xmlFormat',
+ 'xmlGetNsByHref',
+ 'xmlGetNsByPrefix',
+ 'xmlGetOpenDocs',
+ 'xmlIsValidObject',
+ 'xmlName',
+ 'xmlNs',
+ 'xmlRead',
+ 'xmlReadStr',
+ 'xmlRelaxNG',
+ 'xmlRemove',
+ 'xmlSchema',
+ 'xmlSetAttributes',
+ 'xmlValidate',
+ 'xmlWrite',
+ 'xmlXPath',
+ 'xname',
+ 'xpause',
+ 'xpoly',
+ 'xpolys',
+ 'xrect',
+ 'xrects',
+ 'xs2bmp',
+ 'xs2emf',
+ 'xs2eps',
+ 'xs2gif',
+ 'xs2jpg',
+ 'xs2pdf',
+ 'xs2png',
+ 'xs2ppm',
+ 'xs2ps',
+ 'xs2svg',
+ 'xsegs',
+ 'xset',
+ 'xstring',
+ 'xstringb',
+ 'xtitle',
+ 'zeros',
+ 'znaupd',
+ 'zneupd',
+ 'zoom_rect',
+)
+
+macros_kw = (
+ '!_deff_wrapper',
+ '%0_i_st',
+ '%3d_i_h',
+ '%Block_xcosUpdateBlock',
+ '%TNELDER_p',
+ '%TNELDER_string',
+ '%TNMPLOT_p',
+ '%TNMPLOT_string',
+ '%TOPTIM_p',
+ '%TOPTIM_string',
+ '%TSIMPLEX_p',
+ '%TSIMPLEX_string',
+ '%_EVoid_p',
+ '%_gsort',
+ '%_listvarinfile',
+ '%_rlist',
+ '%_save',
+ '%_sodload',
+ '%_strsplit',
+ '%_unwrap',
+ '%ar_p',
+ '%asn',
+ '%b_a_b',
+ '%b_a_s',
+ '%b_c_s',
+ '%b_c_spb',
+ '%b_cumprod',
+ '%b_cumsum',
+ '%b_d_s',
+ '%b_diag',
+ '%b_e',
+ '%b_f_s',
+ '%b_f_spb',
+ '%b_g_s',
+ '%b_g_spb',
+ '%b_grand',
+ '%b_h_s',
+ '%b_h_spb',
+ '%b_i_b',
+ '%b_i_ce',
+ '%b_i_h',
+ '%b_i_hm',
+ '%b_i_s',
+ '%b_i_sp',
+ '%b_i_spb',
+ '%b_i_st',
+ '%b_iconvert',
+ '%b_l_b',
+ '%b_l_s',
+ '%b_m_b',
+ '%b_m_s',
+ '%b_matrix',
+ '%b_n_hm',
+ '%b_o_hm',
+ '%b_p_s',
+ '%b_prod',
+ '%b_r_b',
+ '%b_r_s',
+ '%b_s_b',
+ '%b_s_s',
+ '%b_string',
+ '%b_sum',
+ '%b_tril',
+ '%b_triu',
+ '%b_x_b',
+ '%b_x_s',
+ '%bicg',
+ '%bicgstab',
+ '%c_a_c',
+ '%c_b_c',
+ '%c_b_s',
+ '%c_diag',
+ '%c_dsearch',
+ '%c_e',
+ '%c_eye',
+ '%c_f_s',
+ '%c_grand',
+ '%c_i_c',
+ '%c_i_ce',
+ '%c_i_h',
+ '%c_i_hm',
+ '%c_i_lss',
+ '%c_i_r',
+ '%c_i_s',
+ '%c_i_st',
+ '%c_matrix',
+ '%c_n_l',
+ '%c_n_st',
+ '%c_o_l',
+ '%c_o_st',
+ '%c_ones',
+ '%c_rand',
+ '%c_tril',
+ '%c_triu',
+ '%cblock_c_cblock',
+ '%cblock_c_s',
+ '%cblock_e',
+ '%cblock_f_cblock',
+ '%cblock_p',
+ '%cblock_size',
+ '%ce_6',
+ '%ce_c_ce',
+ '%ce_e',
+ '%ce_f_ce',
+ '%ce_i_ce',
+ '%ce_i_s',
+ '%ce_i_st',
+ '%ce_matrix',
+ '%ce_p',
+ '%ce_size',
+ '%ce_string',
+ '%ce_t',
+ '%cgs',
+ '%champdat_i_h',
+ '%choose',
+ '%diagram_xcos',
+ '%dir_p',
+ '%fptr_i_st',
+ '%grand_perm',
+ '%grayplot_i_h',
+ '%h_i_st',
+ '%hmS_k_hmS_generic',
+ '%hm_1_hm',
+ '%hm_1_s',
+ '%hm_2_hm',
+ '%hm_2_s',
+ '%hm_3_hm',
+ '%hm_3_s',
+ '%hm_4_hm',
+ '%hm_4_s',
+ '%hm_5',
+ '%hm_a_hm',
+ '%hm_a_r',
+ '%hm_a_s',
+ '%hm_abs',
+ '%hm_and',
+ '%hm_bool2s',
+ '%hm_c_hm',
+ '%hm_ceil',
+ '%hm_conj',
+ '%hm_cos',
+ '%hm_cumprod',
+ '%hm_cumsum',
+ '%hm_d_hm',
+ '%hm_d_s',
+ '%hm_degree',
+ '%hm_dsearch',
+ '%hm_e',
+ '%hm_exp',
+ '%hm_eye',
+ '%hm_f_hm',
+ '%hm_find',
+ '%hm_floor',
+ '%hm_g_hm',
+ '%hm_grand',
+ '%hm_gsort',
+ '%hm_h_hm',
+ '%hm_i_b',
+ '%hm_i_ce',
+ '%hm_i_h',
+ '%hm_i_hm',
+ '%hm_i_i',
+ '%hm_i_p',
+ '%hm_i_r',
+ '%hm_i_s',
+ '%hm_i_st',
+ '%hm_iconvert',
+ '%hm_imag',
+ '%hm_int',
+ '%hm_isnan',
+ '%hm_isreal',
+ '%hm_j_hm',
+ '%hm_j_s',
+ '%hm_k_hm',
+ '%hm_k_s',
+ '%hm_log',
+ '%hm_m_p',
+ '%hm_m_r',
+ '%hm_m_s',
+ '%hm_matrix',
+ '%hm_max',
+ '%hm_mean',
+ '%hm_median',
+ '%hm_min',
+ '%hm_n_b',
+ '%hm_n_c',
+ '%hm_n_hm',
+ '%hm_n_i',
+ '%hm_n_p',
+ '%hm_n_s',
+ '%hm_o_b',
+ '%hm_o_c',
+ '%hm_o_hm',
+ '%hm_o_i',
+ '%hm_o_p',
+ '%hm_o_s',
+ '%hm_ones',
+ '%hm_or',
+ '%hm_p',
+ '%hm_prod',
+ '%hm_q_hm',
+ '%hm_r_s',
+ '%hm_rand',
+ '%hm_real',
+ '%hm_round',
+ '%hm_s',
+ '%hm_s_hm',
+ '%hm_s_r',
+ '%hm_s_s',
+ '%hm_sign',
+ '%hm_sin',
+ '%hm_size',
+ '%hm_sqrt',
+ '%hm_stdev',
+ '%hm_string',
+ '%hm_sum',
+ '%hm_x_hm',
+ '%hm_x_p',
+ '%hm_x_s',
+ '%hm_zeros',
+ '%i_1_s',
+ '%i_2_s',
+ '%i_3_s',
+ '%i_4_s',
+ '%i_Matplot',
+ '%i_a_i',
+ '%i_a_s',
+ '%i_and',
+ '%i_ascii',
+ '%i_b_s',
+ '%i_bezout',
+ '%i_champ',
+ '%i_champ1',
+ '%i_contour',
+ '%i_contour2d',
+ '%i_d_i',
+ '%i_d_s',
+ '%i_dsearch',
+ '%i_e',
+ '%i_fft',
+ '%i_g_i',
+ '%i_gcd',
+ '%i_grand',
+ '%i_h_i',
+ '%i_i_ce',
+ '%i_i_h',
+ '%i_i_hm',
+ '%i_i_i',
+ '%i_i_s',
+ '%i_i_st',
+ '%i_j_i',
+ '%i_j_s',
+ '%i_l_s',
+ '%i_lcm',
+ '%i_length',
+ '%i_m_i',
+ '%i_m_s',
+ '%i_mfprintf',
+ '%i_mprintf',
+ '%i_msprintf',
+ '%i_n_s',
+ '%i_o_s',
+ '%i_or',
+ '%i_p_i',
+ '%i_p_s',
+ '%i_plot2d',
+ '%i_plot2d1',
+ '%i_plot2d2',
+ '%i_q_s',
+ '%i_r_i',
+ '%i_r_s',
+ '%i_round',
+ '%i_s_i',
+ '%i_s_s',
+ '%i_sign',
+ '%i_string',
+ '%i_x_i',
+ '%i_x_s',
+ '%ip_a_s',
+ '%ip_i_st',
+ '%ip_m_s',
+ '%ip_n_ip',
+ '%ip_o_ip',
+ '%ip_p',
+ '%ip_part',
+ '%ip_s_s',
+ '%ip_string',
+ '%k',
+ '%l_i_h',
+ '%l_i_s',
+ '%l_i_st',
+ '%l_isequal',
+ '%l_n_c',
+ '%l_n_l',
+ '%l_n_m',
+ '%l_n_p',
+ '%l_n_s',
+ '%l_n_st',
+ '%l_o_c',
+ '%l_o_l',
+ '%l_o_m',
+ '%l_o_p',
+ '%l_o_s',
+ '%l_o_st',
+ '%lss_a_lss',
+ '%lss_a_p',
+ '%lss_a_r',
+ '%lss_a_s',
+ '%lss_c_lss',
+ '%lss_c_p',
+ '%lss_c_r',
+ '%lss_c_s',
+ '%lss_e',
+ '%lss_eye',
+ '%lss_f_lss',
+ '%lss_f_p',
+ '%lss_f_r',
+ '%lss_f_s',
+ '%lss_i_ce',
+ '%lss_i_lss',
+ '%lss_i_p',
+ '%lss_i_r',
+ '%lss_i_s',
+ '%lss_i_st',
+ '%lss_inv',
+ '%lss_l_lss',
+ '%lss_l_p',
+ '%lss_l_r',
+ '%lss_l_s',
+ '%lss_m_lss',
+ '%lss_m_p',
+ '%lss_m_r',
+ '%lss_m_s',
+ '%lss_n_lss',
+ '%lss_n_p',
+ '%lss_n_r',
+ '%lss_n_s',
+ '%lss_norm',
+ '%lss_o_lss',
+ '%lss_o_p',
+ '%lss_o_r',
+ '%lss_o_s',
+ '%lss_ones',
+ '%lss_r_lss',
+ '%lss_r_p',
+ '%lss_r_r',
+ '%lss_r_s',
+ '%lss_rand',
+ '%lss_s',
+ '%lss_s_lss',
+ '%lss_s_p',
+ '%lss_s_r',
+ '%lss_s_s',
+ '%lss_size',
+ '%lss_t',
+ '%lss_v_lss',
+ '%lss_v_p',
+ '%lss_v_r',
+ '%lss_v_s',
+ '%lt_i_s',
+ '%m_n_l',
+ '%m_o_l',
+ '%mc_i_h',
+ '%mc_i_s',
+ '%mc_i_st',
+ '%mc_n_st',
+ '%mc_o_st',
+ '%mc_string',
+ '%mps_p',
+ '%mps_string',
+ '%msp_a_s',
+ '%msp_abs',
+ '%msp_e',
+ '%msp_find',
+ '%msp_i_s',
+ '%msp_i_st',
+ '%msp_length',
+ '%msp_m_s',
+ '%msp_maxi',
+ '%msp_n_msp',
+ '%msp_nnz',
+ '%msp_o_msp',
+ '%msp_p',
+ '%msp_sparse',
+ '%msp_spones',
+ '%msp_t',
+ '%p_a_lss',
+ '%p_a_r',
+ '%p_c_lss',
+ '%p_c_r',
+ '%p_cumprod',
+ '%p_cumsum',
+ '%p_d_p',
+ '%p_d_r',
+ '%p_d_s',
+ '%p_det',
+ '%p_e',
+ '%p_f_lss',
+ '%p_f_r',
+ '%p_grand',
+ '%p_i_ce',
+ '%p_i_h',
+ '%p_i_hm',
+ '%p_i_lss',
+ '%p_i_p',
+ '%p_i_r',
+ '%p_i_s',
+ '%p_i_st',
+ '%p_inv',
+ '%p_j_s',
+ '%p_k_p',
+ '%p_k_r',
+ '%p_k_s',
+ '%p_l_lss',
+ '%p_l_p',
+ '%p_l_r',
+ '%p_l_s',
+ '%p_m_hm',
+ '%p_m_lss',
+ '%p_m_r',
+ '%p_matrix',
+ '%p_n_l',
+ '%p_n_lss',
+ '%p_n_r',
+ '%p_o_l',
+ '%p_o_lss',
+ '%p_o_r',
+ '%p_o_sp',
+ '%p_p_s',
+ '%p_part',
+ '%p_prod',
+ '%p_q_p',
+ '%p_q_r',
+ '%p_q_s',
+ '%p_r_lss',
+ '%p_r_p',
+ '%p_r_r',
+ '%p_r_s',
+ '%p_s_lss',
+ '%p_s_r',
+ '%p_simp',
+ '%p_string',
+ '%p_sum',
+ '%p_v_lss',
+ '%p_v_p',
+ '%p_v_r',
+ '%p_v_s',
+ '%p_x_hm',
+ '%p_x_r',
+ '%p_y_p',
+ '%p_y_r',
+ '%p_y_s',
+ '%p_z_p',
+ '%p_z_r',
+ '%p_z_s',
+ '%pcg',
+ '%plist_p',
+ '%plist_string',
+ '%r_0',
+ '%r_a_hm',
+ '%r_a_lss',
+ '%r_a_p',
+ '%r_a_r',
+ '%r_a_s',
+ '%r_c_lss',
+ '%r_c_p',
+ '%r_c_r',
+ '%r_c_s',
+ '%r_clean',
+ '%r_cumprod',
+ '%r_cumsum',
+ '%r_d_p',
+ '%r_d_r',
+ '%r_d_s',
+ '%r_det',
+ '%r_diag',
+ '%r_e',
+ '%r_eye',
+ '%r_f_lss',
+ '%r_f_p',
+ '%r_f_r',
+ '%r_f_s',
+ '%r_i_ce',
+ '%r_i_hm',
+ '%r_i_lss',
+ '%r_i_p',
+ '%r_i_r',
+ '%r_i_s',
+ '%r_i_st',
+ '%r_inv',
+ '%r_j_s',
+ '%r_k_p',
+ '%r_k_r',
+ '%r_k_s',
+ '%r_l_lss',
+ '%r_l_p',
+ '%r_l_r',
+ '%r_l_s',
+ '%r_m_hm',
+ '%r_m_lss',
+ '%r_m_p',
+ '%r_m_r',
+ '%r_m_s',
+ '%r_matrix',
+ '%r_n_lss',
+ '%r_n_p',
+ '%r_n_r',
+ '%r_n_s',
+ '%r_norm',
+ '%r_o_lss',
+ '%r_o_p',
+ '%r_o_r',
+ '%r_o_s',
+ '%r_ones',
+ '%r_p',
+ '%r_p_s',
+ '%r_prod',
+ '%r_q_p',
+ '%r_q_r',
+ '%r_q_s',
+ '%r_r_lss',
+ '%r_r_p',
+ '%r_r_r',
+ '%r_r_s',
+ '%r_rand',
+ '%r_s',
+ '%r_s_hm',
+ '%r_s_lss',
+ '%r_s_p',
+ '%r_s_r',
+ '%r_s_s',
+ '%r_simp',
+ '%r_size',
+ '%r_string',
+ '%r_sum',
+ '%r_t',
+ '%r_tril',
+ '%r_triu',
+ '%r_v_lss',
+ '%r_v_p',
+ '%r_v_r',
+ '%r_v_s',
+ '%r_varn',
+ '%r_x_p',
+ '%r_x_r',
+ '%r_x_s',
+ '%r_y_p',
+ '%r_y_r',
+ '%r_y_s',
+ '%r_z_p',
+ '%r_z_r',
+ '%r_z_s',
+ '%s_1_hm',
+ '%s_1_i',
+ '%s_2_hm',
+ '%s_2_i',
+ '%s_3_hm',
+ '%s_3_i',
+ '%s_4_hm',
+ '%s_4_i',
+ '%s_5',
+ '%s_a_b',
+ '%s_a_hm',
+ '%s_a_i',
+ '%s_a_ip',
+ '%s_a_lss',
+ '%s_a_msp',
+ '%s_a_r',
+ '%s_a_sp',
+ '%s_and',
+ '%s_b_i',
+ '%s_b_s',
+ '%s_bezout',
+ '%s_c_b',
+ '%s_c_cblock',
+ '%s_c_lss',
+ '%s_c_r',
+ '%s_c_sp',
+ '%s_d_b',
+ '%s_d_i',
+ '%s_d_p',
+ '%s_d_r',
+ '%s_d_sp',
+ '%s_e',
+ '%s_f_b',
+ '%s_f_cblock',
+ '%s_f_lss',
+ '%s_f_r',
+ '%s_f_sp',
+ '%s_g_b',
+ '%s_g_s',
+ '%s_gcd',
+ '%s_grand',
+ '%s_h_b',
+ '%s_h_s',
+ '%s_i_b',
+ '%s_i_c',
+ '%s_i_ce',
+ '%s_i_h',
+ '%s_i_hm',
+ '%s_i_i',
+ '%s_i_lss',
+ '%s_i_p',
+ '%s_i_r',
+ '%s_i_s',
+ '%s_i_sp',
+ '%s_i_spb',
+ '%s_i_st',
+ '%s_j_i',
+ '%s_k_hm',
+ '%s_k_p',
+ '%s_k_r',
+ '%s_k_sp',
+ '%s_l_b',
+ '%s_l_hm',
+ '%s_l_i',
+ '%s_l_lss',
+ '%s_l_p',
+ '%s_l_r',
+ '%s_l_s',
+ '%s_l_sp',
+ '%s_lcm',
+ '%s_m_b',
+ '%s_m_hm',
+ '%s_m_i',
+ '%s_m_ip',
+ '%s_m_lss',
+ '%s_m_msp',
+ '%s_m_r',
+ '%s_matrix',
+ '%s_n_hm',
+ '%s_n_i',
+ '%s_n_l',
+ '%s_n_lss',
+ '%s_n_r',
+ '%s_n_st',
+ '%s_o_hm',
+ '%s_o_i',
+ '%s_o_l',
+ '%s_o_lss',
+ '%s_o_r',
+ '%s_o_st',
+ '%s_or',
+ '%s_p_b',
+ '%s_p_i',
+ '%s_pow',
+ '%s_q_hm',
+ '%s_q_i',
+ '%s_q_p',
+ '%s_q_r',
+ '%s_q_sp',
+ '%s_r_b',
+ '%s_r_i',
+ '%s_r_lss',
+ '%s_r_p',
+ '%s_r_r',
+ '%s_r_s',
+ '%s_r_sp',
+ '%s_s_b',
+ '%s_s_hm',
+ '%s_s_i',
+ '%s_s_ip',
+ '%s_s_lss',
+ '%s_s_r',
+ '%s_s_sp',
+ '%s_simp',
+ '%s_v_lss',
+ '%s_v_p',
+ '%s_v_r',
+ '%s_v_s',
+ '%s_x_b',
+ '%s_x_hm',
+ '%s_x_i',
+ '%s_x_r',
+ '%s_y_p',
+ '%s_y_r',
+ '%s_y_sp',
+ '%s_z_p',
+ '%s_z_r',
+ '%s_z_sp',
+ '%sn',
+ '%sp_a_s',
+ '%sp_a_sp',
+ '%sp_and',
+ '%sp_c_s',
+ '%sp_ceil',
+ '%sp_conj',
+ '%sp_cos',
+ '%sp_cumprod',
+ '%sp_cumsum',
+ '%sp_d_s',
+ '%sp_d_sp',
+ '%sp_det',
+ '%sp_diag',
+ '%sp_e',
+ '%sp_exp',
+ '%sp_f_s',
+ '%sp_floor',
+ '%sp_grand',
+ '%sp_gsort',
+ '%sp_i_ce',
+ '%sp_i_h',
+ '%sp_i_s',
+ '%sp_i_sp',
+ '%sp_i_st',
+ '%sp_int',
+ '%sp_inv',
+ '%sp_k_s',
+ '%sp_k_sp',
+ '%sp_l_s',
+ '%sp_l_sp',
+ '%sp_length',
+ '%sp_max',
+ '%sp_min',
+ '%sp_norm',
+ '%sp_or',
+ '%sp_p_s',
+ '%sp_prod',
+ '%sp_q_s',
+ '%sp_q_sp',
+ '%sp_r_s',
+ '%sp_r_sp',
+ '%sp_round',
+ '%sp_s_s',
+ '%sp_s_sp',
+ '%sp_sin',
+ '%sp_sqrt',
+ '%sp_string',
+ '%sp_sum',
+ '%sp_tril',
+ '%sp_triu',
+ '%sp_y_s',
+ '%sp_y_sp',
+ '%sp_z_s',
+ '%sp_z_sp',
+ '%spb_and',
+ '%spb_c_b',
+ '%spb_cumprod',
+ '%spb_cumsum',
+ '%spb_diag',
+ '%spb_e',
+ '%spb_f_b',
+ '%spb_g_b',
+ '%spb_g_spb',
+ '%spb_h_b',
+ '%spb_h_spb',
+ '%spb_i_b',
+ '%spb_i_ce',
+ '%spb_i_h',
+ '%spb_i_st',
+ '%spb_or',
+ '%spb_prod',
+ '%spb_sum',
+ '%spb_tril',
+ '%spb_triu',
+ '%st_6',
+ '%st_c_st',
+ '%st_e',
+ '%st_f_st',
+ '%st_i_b',
+ '%st_i_c',
+ '%st_i_fptr',
+ '%st_i_h',
+ '%st_i_i',
+ '%st_i_ip',
+ '%st_i_lss',
+ '%st_i_msp',
+ '%st_i_p',
+ '%st_i_r',
+ '%st_i_s',
+ '%st_i_sp',
+ '%st_i_spb',
+ '%st_i_st',
+ '%st_matrix',
+ '%st_n_c',
+ '%st_n_l',
+ '%st_n_mc',
+ '%st_n_p',
+ '%st_n_s',
+ '%st_o_c',
+ '%st_o_l',
+ '%st_o_mc',
+ '%st_o_p',
+ '%st_o_s',
+ '%st_o_tl',
+ '%st_p',
+ '%st_size',
+ '%st_string',
+ '%st_t',
+ '%ticks_i_h',
+ '%xls_e',
+ '%xls_p',
+ '%xlssheet_e',
+ '%xlssheet_p',
+ '%xlssheet_size',
+ '%xlssheet_string',
+ 'DominationRank',
+ 'G_make',
+ 'IsAScalar',
+ 'NDcost',
+ 'OS_Version',
+ 'PlotSparse',
+ 'ReadHBSparse',
+ 'TCL_CreateSlave',
+ 'abcd',
+ 'abinv',
+ 'accept_func_default',
+ 'accept_func_vfsa',
+ 'acf',
+ 'acosd',
+ 'acosh',
+ 'acoshm',
+ 'acosm',
+ 'acot',
+ 'acotd',
+ 'acoth',
+ 'acsc',
+ 'acscd',
+ 'acsch',
+ 'add_demo',
+ 'add_help_chapter',
+ 'add_module_help_chapter',
+ 'add_param',
+ 'add_profiling',
+ 'adj2sp',
+ 'aff2ab',
+ 'ana_style',
+ 'analpf',
+ 'analyze',
+ 'aplat',
+ 'arhnk',
+ 'arl2',
+ 'arma2p',
+ 'arma2ss',
+ 'armac',
+ 'armax',
+ 'armax1',
+ 'arobasestring2strings',
+ 'arsimul',
+ 'ascii2string',
+ 'asciimat',
+ 'asec',
+ 'asecd',
+ 'asech',
+ 'asind',
+ 'asinh',
+ 'asinhm',
+ 'asinm',
+ 'assert_checkalmostequal',
+ 'assert_checkequal',
+ 'assert_checkerror',
+ 'assert_checkfalse',
+ 'assert_checkfilesequal',
+ 'assert_checktrue',
+ 'assert_comparecomplex',
+ 'assert_computedigits',
+ 'assert_cond2reltol',
+ 'assert_cond2reqdigits',
+ 'assert_generror',
+ 'atand',
+ 'atanh',
+ 'atanhm',
+ 'atanm',
+ 'atomsAutoload',
+ 'atomsAutoloadAdd',
+ 'atomsAutoloadDel',
+ 'atomsAutoloadList',
+ 'atomsCategoryList',
+ 'atomsCheckModule',
+ 'atomsDepTreeShow',
+ 'atomsGetConfig',
+ 'atomsGetInstalled',
+ 'atomsGetInstalledPath',
+ 'atomsGetLoaded',
+ 'atomsGetLoadedPath',
+ 'atomsInstall',
+ 'atomsIsInstalled',
+ 'atomsIsLoaded',
+ 'atomsList',
+ 'atomsLoad',
+ 'atomsQuit',
+ 'atomsRemove',
+ 'atomsRepositoryAdd',
+ 'atomsRepositoryDel',
+ 'atomsRepositoryList',
+ 'atomsRestoreConfig',
+ 'atomsSaveConfig',
+ 'atomsSearch',
+ 'atomsSetConfig',
+ 'atomsShow',
+ 'atomsSystemInit',
+ 'atomsSystemUpdate',
+ 'atomsTest',
+ 'atomsUpdate',
+ 'atomsVersion',
+ 'augment',
+ 'auread',
+ 'auwrite',
+ 'balreal',
+ 'bench_run',
+ 'bilin',
+ 'bilt',
+ 'bin2dec',
+ 'binomial',
+ 'bitand',
+ 'bitcmp',
+ 'bitget',
+ 'bitor',
+ 'bitset',
+ 'bitxor',
+ 'black',
+ 'blanks',
+ 'bloc2exp',
+ 'bloc2ss',
+ 'block_parameter_error',
+ 'bode',
+ 'bode_asymp',
+ 'bstap',
+ 'buttmag',
+ 'bvodeS',
+ 'bytecode',
+ 'bytecodewalk',
+ 'cainv',
+ 'calendar',
+ 'calerf',
+ 'calfrq',
+ 'canon',
+ 'casc',
+ 'cat',
+ 'cat_code',
+ 'cb_m2sci_gui',
+ 'ccontrg',
+ 'cell',
+ 'cell2mat',
+ 'cellstr',
+ 'center',
+ 'cepstrum',
+ 'cfspec',
+ 'char',
+ 'chart',
+ 'cheb1mag',
+ 'cheb2mag',
+ 'check_gateways',
+ 'check_modules_xml',
+ 'check_versions',
+ 'chepol',
+ 'chfact',
+ 'chsolve',
+ 'classmarkov',
+ 'clean_help',
+ 'clock',
+ 'cls2dls',
+ 'cmb_lin',
+ 'cmndred',
+ 'cmoment',
+ 'coding_ga_binary',
+ 'coding_ga_identity',
+ 'coff',
+ 'coffg',
+ 'colcomp',
+ 'colcompr',
+ 'colinout',
+ 'colregul',
+ 'companion',
+ 'complex',
+ 'compute_initial_temp',
+ 'cond',
+ 'cond2sp',
+ 'condestsp',
+ 'configure_msifort',
+ 'configure_msvc',
+ 'conjgrad',
+ 'cont_frm',
+ 'cont_mat',
+ 'contrss',
+ 'conv',
+ 'convert_to_float',
+ 'convertindex',
+ 'convol',
+ 'convol2d',
+ 'copfac',
+ 'correl',
+ 'cosd',
+ 'cosh',
+ 'coshm',
+ 'cosm',
+ 'cotd',
+ 'cotg',
+ 'coth',
+ 'cothm',
+ 'cov',
+ 'covar',
+ 'createXConfiguration',
+ 'createfun',
+ 'createstruct',
+ 'cross',
+ 'crossover_ga_binary',
+ 'crossover_ga_default',
+ 'csc',
+ 'cscd',
+ 'csch',
+ 'csgn',
+ 'csim',
+ 'cspect',
+ 'ctr_gram',
+ 'czt',
+ 'dae',
+ 'daeoptions',
+ 'damp',
+ 'datafit',
+ 'date',
+ 'datenum',
+ 'datevec',
+ 'dbphi',
+ 'dcf',
+ 'ddp',
+ 'dec2bin',
+ 'dec2hex',
+ 'dec2oct',
+ 'del_help_chapter',
+ 'del_module_help_chapter',
+ 'demo_begin',
+ 'demo_choose',
+ 'demo_compiler',
+ 'demo_end',
+ 'demo_file_choice',
+ 'demo_folder_choice',
+ 'demo_function_choice',
+ 'demo_gui',
+ 'demo_run',
+ 'demo_viewCode',
+ 'denom',
+ 'derivat',
+ 'derivative',
+ 'des2ss',
+ 'des2tf',
+ 'detectmsifort64tools',
+ 'detectmsvc64tools',
+ 'determ',
+ 'detr',
+ 'detrend',
+ 'devtools_run_builder',
+ 'dhnorm',
+ 'diff',
+ 'diophant',
+ 'dir',
+ 'dirname',
+ 'dispfiles',
+ 'dllinfo',
+ 'dscr',
+ 'dsimul',
+ 'dt_ility',
+ 'dtsi',
+ 'edit',
+ 'edit_error',
+ 'editor',
+ 'eigenmarkov',
+ 'eigs',
+ 'ell1mag',
+ 'enlarge_shape',
+ 'entropy',
+ 'eomday',
+ 'epred',
+ 'eqfir',
+ 'eqiir',
+ 'equil',
+ 'equil1',
+ 'erfinv',
+ 'etime',
+ 'eval',
+ 'evans',
+ 'evstr',
+ 'example_run',
+ 'expression2code',
+ 'extract_help_examples',
+ 'factor',
+ 'factorial',
+ 'factors',
+ 'faurre',
+ 'ffilt',
+ 'fft2',
+ 'fftshift',
+ 'fieldnames',
+ 'filt_sinc',
+ 'filter',
+ 'findABCD',
+ 'findAC',
+ 'findBDK',
+ 'findR',
+ 'find_freq',
+ 'find_links',
+ 'find_scicos_version',
+ 'findm',
+ 'findmsifortcompiler',
+ 'findmsvccompiler',
+ 'findx0BD',
+ 'firstnonsingleton',
+ 'fix',
+ 'fixedpointgcd',
+ 'flipdim',
+ 'flts',
+ 'fminsearch',
+ 'formatBlackTip',
+ 'formatBodeMagTip',
+ 'formatBodePhaseTip',
+ 'formatGainplotTip',
+ 'formatHallModuleTip',
+ 'formatHallPhaseTip',
+ 'formatNicholsGainTip',
+ 'formatNicholsPhaseTip',
+ 'formatNyquistTip',
+ 'formatPhaseplotTip',
+ 'formatSgridDampingTip',
+ 'formatSgridFreqTip',
+ 'formatZgridDampingTip',
+ 'formatZgridFreqTip',
+ 'format_txt',
+ 'fourplan',
+ 'frep2tf',
+ 'freson',
+ 'frfit',
+ 'frmag',
+ 'fseek_origin',
+ 'fsfirlin',
+ 'fspec',
+ 'fspecg',
+ 'fstabst',
+ 'ftest',
+ 'ftuneq',
+ 'fullfile',
+ 'fullrf',
+ 'fullrfk',
+ 'fun2string',
+ 'g_margin',
+ 'gainplot',
+ 'gamitg',
+ 'gcare',
+ 'gcd',
+ 'gencompilationflags_unix',
+ 'generateBlockImage',
+ 'generateBlockImages',
+ 'generic_i_ce',
+ 'generic_i_h',
+ 'generic_i_hm',
+ 'generic_i_s',
+ 'generic_i_st',
+ 'genlib',
+ 'genmarkov',
+ 'geomean',
+ 'getDiagramVersion',
+ 'getModelicaPath',
+ 'getPreferencesValue',
+ 'get_file_path',
+ 'get_function_path',
+ 'get_param',
+ 'get_profile',
+ 'get_scicos_version',
+ 'getd',
+ 'getscilabkeywords',
+ 'getshell',
+ 'gettklib',
+ 'gfare',
+ 'gfrancis',
+ 'givens',
+ 'glever',
+ 'gmres',
+ 'group',
+ 'gschur',
+ 'gspec',
+ 'gtild',
+ 'h2norm',
+ 'h_cl',
+ 'h_inf',
+ 'h_inf_st',
+ 'h_norm',
+ 'hallchart',
+ 'halt',
+ 'hank',
+ 'hankelsv',
+ 'harmean',
+ 'haveacompiler',
+ 'head_comments',
+ 'help_from_sci',
+ 'help_skeleton',
+ 'hermit',
+ 'hex2dec',
+ 'hilb',
+ 'hilbert',
+ 'histc',
+ 'horner',
+ 'householder',
+ 'hrmt',
+ 'htrianr',
+ 'hypermat',
+ 'idct',
+ 'idst',
+ 'ifft',
+ 'ifftshift',
+ 'iir',
+ 'iirgroup',
+ 'iirlp',
+ 'iirmod',
+ 'ilib_build',
+ 'ilib_build_jar',
+ 'ilib_compile',
+ 'ilib_for_link',
+ 'ilib_gen_Make',
+ 'ilib_gen_Make_unix',
+ 'ilib_gen_cleaner',
+ 'ilib_gen_gateway',
+ 'ilib_gen_loader',
+ 'ilib_include_flag',
+ 'ilib_mex_build',
+ 'im_inv',
+ 'importScicosDiagram',
+ 'importScicosPal',
+ 'importXcosDiagram',
+ 'imrep2ss',
+ 'ind2sub',
+ 'inistate',
+ 'init_ga_default',
+ 'init_param',
+ 'initial_scicos_tables',
+ 'input',
+ 'instruction2code',
+ 'intc',
+ 'intdec',
+ 'integrate',
+ 'interp1',
+ 'interpln',
+ 'intersect',
+ 'intl',
+ 'intsplin',
+ 'inttrap',
+ 'inv_coeff',
+ 'invr',
+ 'invrs',
+ 'invsyslin',
+ 'iqr',
+ 'isLeapYear',
+ 'is_absolute_path',
+ 'is_param',
+ 'iscell',
+ 'iscellstr',
+ 'iscolumn',
+ 'isempty',
+ 'isfield',
+ 'isinf',
+ 'ismatrix',
+ 'isnan',
+ 'isrow',
+ 'isscalar',
+ 'issparse',
+ 'issquare',
+ 'isstruct',
+ 'isvector',
+ 'jmat',
+ 'justify',
+ 'kalm',
+ 'karmarkar',
+ 'kernel',
+ 'kpure',
+ 'krac2',
+ 'kroneck',
+ 'lattn',
+ 'lattp',
+ 'launchtest',
+ 'lcf',
+ 'lcm',
+ 'lcmdiag',
+ 'leastsq',
+ 'leqe',
+ 'leqr',
+ 'lev',
+ 'levin',
+ 'lex_sort',
+ 'lft',
+ 'lin',
+ 'lin2mu',
+ 'lincos',
+ 'lindquist',
+ 'linf',
+ 'linfn',
+ 'linsolve',
+ 'linspace',
+ 'list2vec',
+ 'list_param',
+ 'listfiles',
+ 'listfunctions',
+ 'listvarinfile',
+ 'lmisolver',
+ 'lmitool',
+ 'loadXcosLibs',
+ 'loadmatfile',
+ 'loadwave',
+ 'log10',
+ 'log2',
+ 'logm',
+ 'logspace',
+ 'lqe',
+ 'lqg',
+ 'lqg2stan',
+ 'lqg_ltr',
+ 'lqr',
+ 'ls',
+ 'lyap',
+ 'm2sci_gui',
+ 'm_circle',
+ 'macglov',
+ 'macrovar',
+ 'mad',
+ 'makecell',
+ 'manedit',
+ 'mapsound',
+ 'markp2ss',
+ 'matfile2sci',
+ 'mdelete',
+ 'mean',
+ 'meanf',
+ 'median',
+ 'members',
+ 'mese',
+ 'meshgrid',
+ 'mfft',
+ 'mfile2sci',
+ 'minreal',
+ 'minss',
+ 'mkdir',
+ 'modulo',
+ 'moment',
+ 'mrfit',
+ 'msd',
+ 'mstr2sci',
+ 'mtlb',
+ 'mtlb_0',
+ 'mtlb_a',
+ 'mtlb_all',
+ 'mtlb_any',
+ 'mtlb_axes',
+ 'mtlb_axis',
+ 'mtlb_beta',
+ 'mtlb_box',
+ 'mtlb_choices',
+ 'mtlb_close',
+ 'mtlb_colordef',
+ 'mtlb_cond',
+ 'mtlb_cov',
+ 'mtlb_cumprod',
+ 'mtlb_cumsum',
+ 'mtlb_dec2hex',
+ 'mtlb_delete',
+ 'mtlb_diag',
+ 'mtlb_diff',
+ 'mtlb_dir',
+ 'mtlb_double',
+ 'mtlb_e',
+ 'mtlb_echo',
+ 'mtlb_error',
+ 'mtlb_eval',
+ 'mtlb_exist',
+ 'mtlb_eye',
+ 'mtlb_false',
+ 'mtlb_fft',
+ 'mtlb_fftshift',
+ 'mtlb_filter',
+ 'mtlb_find',
+ 'mtlb_findstr',
+ 'mtlb_fliplr',
+ 'mtlb_fopen',
+ 'mtlb_format',
+ 'mtlb_fprintf',
+ 'mtlb_fread',
+ 'mtlb_fscanf',
+ 'mtlb_full',
+ 'mtlb_fwrite',
+ 'mtlb_get',
+ 'mtlb_grid',
+ 'mtlb_hold',
+ 'mtlb_i',
+ 'mtlb_ifft',
+ 'mtlb_image',
+ 'mtlb_imp',
+ 'mtlb_int16',
+ 'mtlb_int32',
+ 'mtlb_int8',
+ 'mtlb_is',
+ 'mtlb_isa',
+ 'mtlb_isfield',
+ 'mtlb_isletter',
+ 'mtlb_isspace',
+ 'mtlb_l',
+ 'mtlb_legendre',
+ 'mtlb_linspace',
+ 'mtlb_logic',
+ 'mtlb_logical',
+ 'mtlb_loglog',
+ 'mtlb_lower',
+ 'mtlb_max',
+ 'mtlb_mean',
+ 'mtlb_median',
+ 'mtlb_mesh',
+ 'mtlb_meshdom',
+ 'mtlb_min',
+ 'mtlb_more',
+ 'mtlb_num2str',
+ 'mtlb_ones',
+ 'mtlb_pcolor',
+ 'mtlb_plot',
+ 'mtlb_prod',
+ 'mtlb_qr',
+ 'mtlb_qz',
+ 'mtlb_rand',
+ 'mtlb_randn',
+ 'mtlb_rcond',
+ 'mtlb_realmax',
+ 'mtlb_realmin',
+ 'mtlb_s',
+ 'mtlb_semilogx',
+ 'mtlb_semilogy',
+ 'mtlb_setstr',
+ 'mtlb_size',
+ 'mtlb_sort',
+ 'mtlb_sortrows',
+ 'mtlb_sprintf',
+ 'mtlb_sscanf',
+ 'mtlb_std',
+ 'mtlb_strcmp',
+ 'mtlb_strcmpi',
+ 'mtlb_strfind',
+ 'mtlb_strrep',
+ 'mtlb_subplot',
+ 'mtlb_sum',
+ 'mtlb_t',
+ 'mtlb_toeplitz',
+ 'mtlb_tril',
+ 'mtlb_triu',
+ 'mtlb_true',
+ 'mtlb_type',
+ 'mtlb_uint16',
+ 'mtlb_uint32',
+ 'mtlb_uint8',
+ 'mtlb_upper',
+ 'mtlb_var',
+ 'mtlb_zeros',
+ 'mu2lin',
+ 'mutation_ga_binary',
+ 'mutation_ga_default',
+ 'mvcorrel',
+ 'mvvacov',
+ 'nancumsum',
+ 'nand2mean',
+ 'nanmax',
+ 'nanmean',
+ 'nanmeanf',
+ 'nanmedian',
+ 'nanmin',
+ 'nanreglin',
+ 'nanstdev',
+ 'nansum',
+ 'narsimul',
+ 'ndgrid',
+ 'ndims',
+ 'nehari',
+ 'neigh_func_csa',
+ 'neigh_func_default',
+ 'neigh_func_fsa',
+ 'neigh_func_vfsa',
+ 'neldermead_cget',
+ 'neldermead_configure',
+ 'neldermead_costf',
+ 'neldermead_defaultoutput',
+ 'neldermead_destroy',
+ 'neldermead_function',
+ 'neldermead_get',
+ 'neldermead_log',
+ 'neldermead_new',
+ 'neldermead_restart',
+ 'neldermead_search',
+ 'neldermead_updatesimp',
+ 'nextpow2',
+ 'nfreq',
+ 'nicholschart',
+ 'nlev',
+ 'nmplot_cget',
+ 'nmplot_configure',
+ 'nmplot_contour',
+ 'nmplot_destroy',
+ 'nmplot_function',
+ 'nmplot_get',
+ 'nmplot_historyplot',
+ 'nmplot_log',
+ 'nmplot_new',
+ 'nmplot_outputcmd',
+ 'nmplot_restart',
+ 'nmplot_search',
+ 'nmplot_simplexhistory',
+ 'noisegen',
+ 'nonreg_test_run',
+ 'now',
+ 'nthroot',
+ 'null',
+ 'num2cell',
+ 'numderivative',
+ 'numdiff',
+ 'numer',
+ 'nyquist',
+ 'nyquistfrequencybounds',
+ 'obs_gram',
+ 'obscont',
+ 'observer',
+ 'obsv_mat',
+ 'obsvss',
+ 'oct2dec',
+ 'odeoptions',
+ 'optim_ga',
+ 'optim_moga',
+ 'optim_nsga',
+ 'optim_nsga2',
+ 'optim_sa',
+ 'optimbase_cget',
+ 'optimbase_checkbounds',
+ 'optimbase_checkcostfun',
+ 'optimbase_checkx0',
+ 'optimbase_configure',
+ 'optimbase_destroy',
+ 'optimbase_function',
+ 'optimbase_get',
+ 'optimbase_hasbounds',
+ 'optimbase_hasconstraints',
+ 'optimbase_hasnlcons',
+ 'optimbase_histget',
+ 'optimbase_histset',
+ 'optimbase_incriter',
+ 'optimbase_isfeasible',
+ 'optimbase_isinbounds',
+ 'optimbase_isinnonlincons',
+ 'optimbase_log',
+ 'optimbase_logshutdown',
+ 'optimbase_logstartup',
+ 'optimbase_new',
+ 'optimbase_outputcmd',
+ 'optimbase_outstruct',
+ 'optimbase_proj2bnds',
+ 'optimbase_set',
+ 'optimbase_stoplog',
+ 'optimbase_terminate',
+ 'optimget',
+ 'optimplotfunccount',
+ 'optimplotfval',
+ 'optimplotx',
+ 'optimset',
+ 'optimsimplex_center',
+ 'optimsimplex_check',
+ 'optimsimplex_compsomefv',
+ 'optimsimplex_computefv',
+ 'optimsimplex_deltafv',
+ 'optimsimplex_deltafvmax',
+ 'optimsimplex_destroy',
+ 'optimsimplex_dirmat',
+ 'optimsimplex_fvmean',
+ 'optimsimplex_fvstdev',
+ 'optimsimplex_fvvariance',
+ 'optimsimplex_getall',
+ 'optimsimplex_getallfv',
+ 'optimsimplex_getallx',
+ 'optimsimplex_getfv',
+ 'optimsimplex_getn',
+ 'optimsimplex_getnbve',
+ 'optimsimplex_getve',
+ 'optimsimplex_getx',
+ 'optimsimplex_gradientfv',
+ 'optimsimplex_log',
+ 'optimsimplex_new',
+ 'optimsimplex_reflect',
+ 'optimsimplex_setall',
+ 'optimsimplex_setallfv',
+ 'optimsimplex_setallx',
+ 'optimsimplex_setfv',
+ 'optimsimplex_setn',
+ 'optimsimplex_setnbve',
+ 'optimsimplex_setve',
+ 'optimsimplex_setx',
+ 'optimsimplex_shrink',
+ 'optimsimplex_size',
+ 'optimsimplex_sort',
+ 'optimsimplex_xbar',
+ 'orth',
+ 'output_ga_default',
+ 'output_moga_default',
+ 'output_nsga2_default',
+ 'output_nsga_default',
+ 'p_margin',
+ 'pack',
+ 'pareto_filter',
+ 'parrot',
+ 'pbig',
+ 'pca',
+ 'pcg',
+ 'pdiv',
+ 'pen2ea',
+ 'pencan',
+ 'pencost',
+ 'penlaur',
+ 'perctl',
+ 'perl',
+ 'perms',
+ 'permute',
+ 'pertrans',
+ 'pfactors',
+ 'pfss',
+ 'phasemag',
+ 'phaseplot',
+ 'phc',
+ 'pinv',
+ 'playsnd',
+ 'plotprofile',
+ 'plzr',
+ 'pmodulo',
+ 'pol2des',
+ 'pol2str',
+ 'polar',
+ 'polfact',
+ 'prbs_a',
+ 'prettyprint',
+ 'primes',
+ 'princomp',
+ 'profile',
+ 'proj',
+ 'projsl',
+ 'projspec',
+ 'psmall',
+ 'pspect',
+ 'qmr',
+ 'qpsolve',
+ 'quart',
+ 'quaskro',
+ 'rafiter',
+ 'randpencil',
+ 'range',
+ 'rank',
+ 'readxls',
+ 'recompilefunction',
+ 'recons',
+ 'reglin',
+ 'regress',
+ 'remezb',
+ 'remove_param',
+ 'remove_profiling',
+ 'repfreq',
+ 'replace_Ix_by_Fx',
+ 'repmat',
+ 'reset_profiling',
+ 'resize_matrix',
+ 'returntoscilab',
+ 'rhs2code',
+ 'ric_desc',
+ 'riccati',
+ 'rmdir',
+ 'routh_t',
+ 'rowcomp',
+ 'rowcompr',
+ 'rowinout',
+ 'rowregul',
+ 'rowshuff',
+ 'rref',
+ 'sample',
+ 'samplef',
+ 'samwr',
+ 'savematfile',
+ 'savewave',
+ 'scanf',
+ 'sci2exp',
+ 'sciGUI_init',
+ 'sci_sparse',
+ 'scicos_getvalue',
+ 'scicos_simulate',
+ 'scicos_workspace_init',
+ 'scisptdemo',
+ 'scitest',
+ 'sdiff',
+ 'sec',
+ 'secd',
+ 'sech',
+ 'selection_ga_elitist',
+ 'selection_ga_random',
+ 'sensi',
+ 'setPreferencesValue',
+ 'set_param',
+ 'setdiff',
+ 'sgrid',
+ 'show_margins',
+ 'show_pca',
+ 'showprofile',
+ 'signm',
+ 'sinc',
+ 'sincd',
+ 'sind',
+ 'sinh',
+ 'sinhm',
+ 'sinm',
+ 'sm2des',
+ 'sm2ss',
+ 'smga',
+ 'smooth',
+ 'solve',
+ 'sound',
+ 'soundsec',
+ 'sp2adj',
+ 'spaninter',
+ 'spanplus',
+ 'spantwo',
+ 'specfact',
+ 'speye',
+ 'sprand',
+ 'spzeros',
+ 'sqroot',
+ 'sqrtm',
+ 'squarewave',
+ 'squeeze',
+ 'srfaur',
+ 'srkf',
+ 'ss2des',
+ 'ss2ss',
+ 'ss2tf',
+ 'sskf',
+ 'ssprint',
+ 'ssrand',
+ 'st_deviation',
+ 'st_i_generic',
+ 'st_ility',
+ 'stabil',
+ 'statgain',
+ 'stdev',
+ 'stdevf',
+ 'steadycos',
+ 'strange',
+ 'strcmpi',
+ 'struct',
+ 'sub2ind',
+ 'sva',
+ 'svplot',
+ 'sylm',
+ 'sylv',
+ 'sysconv',
+ 'sysdiag',
+ 'sysfact',
+ 'syslin',
+ 'syssize',
+ 'system',
+ 'systmat',
+ 'tabul',
+ 'tand',
+ 'tanh',
+ 'tanhm',
+ 'tanm',
+ 'tbx_build_blocks',
+ 'tbx_build_cleaner',
+ 'tbx_build_gateway',
+ 'tbx_build_gateway_clean',
+ 'tbx_build_gateway_loader',
+ 'tbx_build_help',
+ 'tbx_build_help_loader',
+ 'tbx_build_loader',
+ 'tbx_build_localization',
+ 'tbx_build_macros',
+ 'tbx_build_pal_loader',
+ 'tbx_build_src',
+ 'tbx_builder',
+ 'tbx_builder_gateway',
+ 'tbx_builder_gateway_lang',
+ 'tbx_builder_help',
+ 'tbx_builder_help_lang',
+ 'tbx_builder_macros',
+ 'tbx_builder_src',
+ 'tbx_builder_src_lang',
+ 'tbx_generate_pofile',
+ 'temp_law_csa',
+ 'temp_law_default',
+ 'temp_law_fsa',
+ 'temp_law_huang',
+ 'temp_law_vfsa',
+ 'test_clean',
+ 'test_on_columns',
+ 'test_run',
+ 'test_run_level',
+ 'testexamples',
+ 'tf2des',
+ 'tf2ss',
+ 'thrownan',
+ 'tic',
+ 'time_id',
+ 'toc',
+ 'toeplitz',
+ 'tokenpos',
+ 'toolboxes',
+ 'trace',
+ 'trans',
+ 'translatepaths',
+ 'tree2code',
+ 'trfmod',
+ 'trianfml',
+ 'trimmean',
+ 'trisolve',
+ 'trzeros',
+ 'typeof',
+ 'ui_observer',
+ 'union',
+ 'unique',
+ 'unit_test_run',
+ 'unix_g',
+ 'unix_s',
+ 'unix_w',
+ 'unix_x',
+ 'unobs',
+ 'unpack',
+ 'unwrap',
+ 'variance',
+ 'variancef',
+ 'vec2list',
+ 'vectorfind',
+ 'ver',
+ 'warnobsolete',
+ 'wavread',
+ 'wavwrite',
+ 'wcenter',
+ 'weekday',
+ 'wfir',
+ 'wfir_gui',
+ 'whereami',
+ 'who_user',
+ 'whos',
+ 'wiener',
+ 'wigner',
+ 'window',
+ 'winlist',
+ 'with_javasci',
+ 'with_macros_source',
+ 'with_modelica_compiler',
+ 'with_tk',
+ 'xcorr',
+ 'xcosBlockEval',
+ 'xcosBlockInterface',
+ 'xcosCodeGeneration',
+ 'xcosConfigureModelica',
+ 'xcosPal',
+ 'xcosPalAdd',
+ 'xcosPalAddBlock',
+ 'xcosPalExport',
+ 'xcosPalGenerateAllIcons',
+ 'xcosShowBlockWarning',
+ 'xcosValidateBlockSet',
+ 'xcosValidateCompareBlock',
+ 'xcos_compile',
+ 'xcos_debug_gui',
+ 'xcos_run',
+ 'xcos_simulate',
+ 'xcov',
+ 'xmltochm',
+ 'xmltoformat',
+ 'xmltohtml',
+ 'xmltojar',
+ 'xmltopdf',
+ 'xmltops',
+ 'xmltoweb',
+ 'yulewalk',
+ 'zeropen',
+ 'zgrid',
+ 'zpbutt',
+ 'zpch1',
+ 'zpch2',
+ 'zpell',
+)
+
+variables_kw = (
+ '$',
+ '%F',
+ '%T',
+ '%e',
+ '%eps',
+ '%f',
+ '%fftw',
+ '%gui',
+ '%i',
+ '%inf',
+ '%io',
+ '%modalWarning',
+ '%nan',
+ '%pi',
+ '%s',
+ '%t',
+ '%tk',
+ '%toolboxes',
+ '%toolboxes_dir',
+ '%z',
+ 'PWD',
+ 'SCI',
+ 'SCIHOME',
+ 'TMPDIR',
+ 'arnoldilib',
+ 'assertlib',
+ 'atomslib',
+ 'cacsdlib',
+ 'compatibility_functilib',
+ 'corelib',
+ 'data_structureslib',
+ 'demo_toolslib',
+ 'development_toolslib',
+ 'differential_equationlib',
+ 'dynamic_linklib',
+ 'elementary_functionslib',
+ 'enull',
+ 'evoid',
+ 'external_objectslib',
+ 'fd',
+ 'fileiolib',
+ 'functionslib',
+ 'genetic_algorithmslib',
+ 'helptoolslib',
+ 'home',
+ 'integerlib',
+ 'interpolationlib',
+ 'iolib',
+ 'jnull',
+ 'jvoid',
+ 'linear_algebralib',
+ 'm2scilib',
+ 'matiolib',
+ 'modules_managerlib',
+ 'neldermeadlib',
+ 'optimbaselib',
+ 'optimizationlib',
+ 'optimsimplexlib',
+ 'output_streamlib',
+ 'overloadinglib',
+ 'parameterslib',
+ 'polynomialslib',
+ 'preferenceslib',
+ 'randliblib',
+ 'scicos_autolib',
+ 'scicos_utilslib',
+ 'scinoteslib',
+ 'signal_processinglib',
+ 'simulated_annealinglib',
+ 'soundlib',
+ 'sparselib',
+ 'special_functionslib',
+ 'spreadsheetlib',
+ 'statisticslib',
+ 'stringlib',
+ 'tclscilib',
+ 'timelib',
+ 'umfpacklib',
+ 'xcoslib',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import subprocess
+ from pygments.util import format_lines, duplicates_removed
+
+ mapping = {'variables': 'builtin'}
+
+ def extract_completion(var_type):
+ s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = s.communicate('''\
+fd = mopen("/dev/stderr", "wt");
+mputl(strcat(completion("", "%s"), "||"), fd);
+mclose(fd)\n''' % var_type)
+ if '||' not in output[1]:
+ raise Exception(output[0])
+ # Invalid DISPLAY causes this to be output:
+ text = output[1].strip()
+ if text.startswith('Error: unable to open display \n'):
+ text = text[len('Error: unable to open display \n'):]
+ return text.split('||')
+
+ new_data = {}
+ seen = set() # only keep first type for a given word
+ for t in ('functions', 'commands', 'macros', 'variables'):
+ new_data[t] = duplicates_removed(extract_completion(t), seen)
+ seen.update(set(new_data[t]))
+
+
+ with open(__file__) as f:
+ content = f.read()
+
+ header = content[:content.find('# Autogenerated')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(__file__, 'w') as f:
+ f.write(header)
+ f.write('# Autogenerated\n\n')
for k, v in sorted(new_data.items()):
- f.write(format_lines(k + '_kw', v) + '\n\n')
- f.write(footer)
+ f.write(format_lines(k + '_kw', v) + '\n\n')
+ f.write(footer)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
index 2977a2c04f..956bc7d1be 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
@@ -1,1160 +1,1160 @@
-"""
- pygments.lexers._sourcemod_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of SourceMod functions.
- It is able to re-generate itself.
-
- Do not edit the FUNCTIONS list by hand.
-
+"""
+ pygments.lexers._sourcemod_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of SourceMod functions.
+ It is able to re-generate itself.
+
+ Do not edit the FUNCTIONS list by hand.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-FUNCTIONS = (
- 'OnEntityCreated',
- 'OnEntityDestroyed',
- 'OnGetGameDescription',
- 'OnLevelInit',
- 'SDKHook',
- 'SDKHookEx',
- 'SDKUnhook',
- 'SDKHooks_TakeDamage',
- 'SDKHooks_DropWeapon',
- 'TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'DisplayTopMenuCategory',
- 'FindTopMenuCategory',
- 'SetTopMenuTitleCaching',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'StringToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageType',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'FindDataMapInfo',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityAddress',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetPlayerJingleFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'GivePlayerAmmo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'RequestFrameCallback',
- 'RequestFrame',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'CommandExists',
- 'OnClientSayCommand',
- 'OnClientSayCommand_Post',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_RemoveWearable',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'TF2_OnPlayerTeleport',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_SetCharset',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'SQL_CreateTransaction',
- 'SQL_AddQuery',
- 'SQLTxnSuccess',
- 'SQLTxnFailure',
- 'SQL_ExecuteTransaction',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'GetGameSoundParams',
- 'EmitGameSound',
- 'EmitAmbientGameSound',
- 'EmitGameSoundToClient',
- 'EmitGameSoundToAll',
- 'PrecacheScriptSound',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetMaxHumanPlayers',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientAuthId',
- 'GetSteamAccountID',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GameConfGetAddress',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'PbReadInt',
- 'PbReadFloat',
- 'PbReadBool',
- 'PbReadString',
- 'PbReadColor',
- 'PbReadAngle',
- 'PbReadVector',
- 'PbReadVector2D',
- 'PbGetRepeatedFieldCount',
- 'PbSetInt',
- 'PbSetFloat',
- 'PbSetBool',
- 'PbSetString',
- 'PbSetColor',
- 'PbSetAngle',
- 'PbSetVector',
- 'PbSetVector2D',
- 'PbAddInt',
- 'PbAddFloat',
- 'PbAddBool',
- 'PbAddString',
- 'PbAddColor',
- 'PbAddAngle',
- 'PbAddVector',
- 'PbAddVector2D',
- 'PbRemoveRepeatedFieldValue',
- 'PbReadMessage',
- 'PbReadRepeatedMessage',
- 'PbAddMessage',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'CS_GetTeamScore',
- 'CS_SetTeamScore',
- 'CS_GetMVPCount',
- 'CS_SetMVPCount',
- 'CS_GetClientContributionScore',
- 'CS_SetClientContributionScore',
- 'CS_GetClientAssists',
- 'CS_SetClientAssists',
- 'CS_AliasToWeaponID',
- 'CS_WeaponIDToAlias',
- 'CS_IsValidWeaponID',
- 'CS_UpdateClientModel',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GetEngineVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetAddress',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall',
- 'GetPlayerResourceEntity',
-)
-
-
-if __name__ == '__main__': # pragma: no cover
- import re
- import sys
- try:
- from urllib import FancyURLopener
- except ImportError:
- from urllib.request import FancyURLopener
-
- from pygments.util import format_lines
-
- # urllib ends up wanting to import a module called 'math' -- if
- # pygments/lexers is in the path, this ends badly.
- for i in range(len(sys.path)-1, -1, -1):
- if sys.path[i].endswith('/lexers'):
- del sys.path[i]
-
- class Opener(FancyURLopener):
- version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
-
- opener = Opener()
-
- def get_version():
- f = opener.open('http://docs.sourcemod.net/api/index.php')
- r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
- for line in f:
- m = r.search(line)
- if m is not None:
- return m.groups()[0]
- raise ValueError('No version in api docs')
-
- def get_sm_functions():
- f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
- r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def regenerate(filename, natives):
- with open(filename) as fp:
- content = fp.read()
-
- header = content[:content.find('FUNCTIONS = (')]
- footer = content[content.find("if __name__ == '__main__':")-1:]
-
-
- with open(filename, 'w') as fp:
- fp.write(header)
- fp.write(format_lines('FUNCTIONS', natives))
- fp.write(footer)
-
- def run():
- version = get_version()
- print('> Downloading function index for SourceMod %s' % version)
- functions = get_sm_functions()
- print('> %d functions found:' % len(functions))
-
- functionlist = []
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- functionlist.append(full_function_name)
-
- regenerate(__file__, functionlist)
-
-
- run()
+ :license: BSD, see LICENSE for details.
+"""
+
+FUNCTIONS = (
+ 'OnEntityCreated',
+ 'OnEntityDestroyed',
+ 'OnGetGameDescription',
+ 'OnLevelInit',
+ 'SDKHook',
+ 'SDKHookEx',
+ 'SDKUnhook',
+ 'SDKHooks_TakeDamage',
+ 'SDKHooks_DropWeapon',
+ 'TopMenuHandler',
+ 'CreateTopMenu',
+ 'LoadTopMenuConfig',
+ 'AddToTopMenu',
+ 'GetTopMenuInfoString',
+ 'GetTopMenuObjName',
+ 'RemoveFromTopMenu',
+ 'DisplayTopMenu',
+ 'DisplayTopMenuCategory',
+ 'FindTopMenuCategory',
+ 'SetTopMenuTitleCaching',
+ 'OnAdminMenuCreated',
+ 'OnAdminMenuReady',
+ 'GetAdminTopMenu',
+ 'AddTargetsToMenu',
+ 'AddTargetsToMenu2',
+ 'RedisplayAdminMenu',
+ 'TEHook',
+ 'AddTempEntHook',
+ 'RemoveTempEntHook',
+ 'TE_Start',
+ 'TE_IsValidProp',
+ 'TE_WriteNum',
+ 'TE_ReadNum',
+ 'TE_WriteFloat',
+ 'TE_ReadFloat',
+ 'TE_WriteVector',
+ 'TE_ReadVector',
+ 'TE_WriteAngles',
+ 'TE_WriteFloatArray',
+ 'TE_Send',
+ 'TE_WriteEncodedEnt',
+ 'TE_SendToAll',
+ 'TE_SendToClient',
+ 'CreateKeyValues',
+ 'KvSetString',
+ 'KvSetNum',
+ 'KvSetUInt64',
+ 'KvSetFloat',
+ 'KvSetColor',
+ 'KvSetVector',
+ 'KvGetString',
+ 'KvGetNum',
+ 'KvGetFloat',
+ 'KvGetColor',
+ 'KvGetUInt64',
+ 'KvGetVector',
+ 'KvJumpToKey',
+ 'KvJumpToKeySymbol',
+ 'KvGotoFirstSubKey',
+ 'KvGotoNextKey',
+ 'KvSavePosition',
+ 'KvDeleteKey',
+ 'KvDeleteThis',
+ 'KvGoBack',
+ 'KvRewind',
+ 'KvGetSectionName',
+ 'KvSetSectionName',
+ 'KvGetDataType',
+ 'KeyValuesToFile',
+ 'FileToKeyValues',
+ 'StringToKeyValues',
+ 'KvSetEscapeSequences',
+ 'KvNodesInStack',
+ 'KvCopySubkeys',
+ 'KvFindKeyById',
+ 'KvGetNameSymbol',
+ 'KvGetSectionSymbol',
+ 'TE_SetupSparks',
+ 'TE_SetupSmoke',
+ 'TE_SetupDust',
+ 'TE_SetupMuzzleFlash',
+ 'TE_SetupMetalSparks',
+ 'TE_SetupEnergySplash',
+ 'TE_SetupArmorRicochet',
+ 'TE_SetupGlowSprite',
+ 'TE_SetupExplosion',
+ 'TE_SetupBloodSprite',
+ 'TE_SetupBeamRingPoint',
+ 'TE_SetupBeamPoints',
+ 'TE_SetupBeamLaser',
+ 'TE_SetupBeamRing',
+ 'TE_SetupBeamFollow',
+ 'HookEvent',
+ 'HookEventEx',
+ 'UnhookEvent',
+ 'CreateEvent',
+ 'FireEvent',
+ 'CancelCreatedEvent',
+ 'GetEventBool',
+ 'SetEventBool',
+ 'GetEventInt',
+ 'SetEventInt',
+ 'GetEventFloat',
+ 'SetEventFloat',
+ 'GetEventString',
+ 'SetEventString',
+ 'GetEventName',
+ 'SetEventBroadcast',
+ 'GetUserMessageType',
+ 'GetUserMessageId',
+ 'GetUserMessageName',
+ 'StartMessage',
+ 'StartMessageEx',
+ 'EndMessage',
+ 'MsgHook',
+ 'MsgPostHook',
+ 'HookUserMessage',
+ 'UnhookUserMessage',
+ 'StartMessageAll',
+ 'StartMessageOne',
+ 'InactivateClient',
+ 'ReconnectClient',
+ 'GetMaxEntities',
+ 'GetEntityCount',
+ 'IsValidEntity',
+ 'IsValidEdict',
+ 'IsEntNetworkable',
+ 'CreateEdict',
+ 'RemoveEdict',
+ 'GetEdictFlags',
+ 'SetEdictFlags',
+ 'GetEdictClassname',
+ 'GetEntityNetClass',
+ 'ChangeEdictState',
+ 'GetEntData',
+ 'SetEntData',
+ 'GetEntDataFloat',
+ 'SetEntDataFloat',
+ 'GetEntDataEnt2',
+ 'SetEntDataEnt2',
+ 'GetEntDataVector',
+ 'SetEntDataVector',
+ 'GetEntDataString',
+ 'SetEntDataString',
+ 'FindSendPropOffs',
+ 'FindSendPropInfo',
+ 'FindDataMapOffs',
+ 'FindDataMapInfo',
+ 'GetEntSendPropOffs',
+ 'GetEntProp',
+ 'SetEntProp',
+ 'GetEntPropFloat',
+ 'SetEntPropFloat',
+ 'GetEntPropEnt',
+ 'SetEntPropEnt',
+ 'GetEntPropVector',
+ 'SetEntPropVector',
+ 'GetEntPropString',
+ 'SetEntPropString',
+ 'GetEntPropArraySize',
+ 'GetEntDataArray',
+ 'SetEntDataArray',
+ 'GetEntityAddress',
+ 'GetEntityClassname',
+ 'float',
+ 'FloatMul',
+ 'FloatDiv',
+ 'FloatAdd',
+ 'FloatSub',
+ 'FloatFraction',
+ 'RoundToZero',
+ 'RoundToCeil',
+ 'RoundToFloor',
+ 'RoundToNearest',
+ 'FloatCompare',
+ 'SquareRoot',
+ 'Pow',
+ 'Exponential',
+ 'Logarithm',
+ 'Sine',
+ 'Cosine',
+ 'Tangent',
+ 'FloatAbs',
+ 'ArcTangent',
+ 'ArcCosine',
+ 'ArcSine',
+ 'ArcTangent2',
+ 'RoundFloat',
+ 'operator%',
+ 'DegToRad',
+ 'RadToDeg',
+ 'GetURandomInt',
+ 'GetURandomFloat',
+ 'SetURandomSeed',
+ 'SetURandomSeedSimple',
+ 'RemovePlayerItem',
+ 'GivePlayerItem',
+ 'GetPlayerWeaponSlot',
+ 'IgniteEntity',
+ 'ExtinguishEntity',
+ 'TeleportEntity',
+ 'ForcePlayerSuicide',
+ 'SlapPlayer',
+ 'FindEntityByClassname',
+ 'GetClientEyeAngles',
+ 'CreateEntityByName',
+ 'DispatchSpawn',
+ 'DispatchKeyValue',
+ 'DispatchKeyValueFloat',
+ 'DispatchKeyValueVector',
+ 'GetClientAimTarget',
+ 'GetTeamCount',
+ 'GetTeamName',
+ 'GetTeamScore',
+ 'SetTeamScore',
+ 'GetTeamClientCount',
+ 'SetEntityModel',
+ 'GetPlayerDecalFile',
+ 'GetPlayerJingleFile',
+ 'GetServerNetStats',
+ 'EquipPlayerWeapon',
+ 'ActivateEntity',
+ 'SetClientInfo',
+ 'GivePlayerAmmo',
+ 'SetClientListeningFlags',
+ 'GetClientListeningFlags',
+ 'SetListenOverride',
+ 'GetListenOverride',
+ 'IsClientMuted',
+ 'TR_GetPointContents',
+ 'TR_GetPointContentsEnt',
+ 'TR_TraceRay',
+ 'TR_TraceHull',
+ 'TR_TraceRayFilter',
+ 'TR_TraceHullFilter',
+ 'TR_TraceRayEx',
+ 'TR_TraceHullEx',
+ 'TR_TraceRayFilterEx',
+ 'TR_TraceHullFilterEx',
+ 'TR_GetFraction',
+ 'TR_GetEndPosition',
+ 'TR_GetEntityIndex',
+ 'TR_DidHit',
+ 'TR_GetHitGroup',
+ 'TR_GetPlaneNormal',
+ 'TR_PointOutsideWorld',
+ 'SortIntegers',
+ 'SortFloats',
+ 'SortStrings',
+ 'SortFunc1D',
+ 'SortCustom1D',
+ 'SortCustom2D',
+ 'SortADTArray',
+ 'SortFuncADTArray',
+ 'SortADTArrayCustom',
+ 'CompileRegex',
+ 'MatchRegex',
+ 'GetRegexSubString',
+ 'SimpleRegexMatch',
+ 'TF2_GetPlayerClass',
+ 'TF2_SetPlayerClass',
+ 'TF2_RemoveWeaponSlot',
+ 'TF2_RemoveAllWeapons',
+ 'TF2_IsPlayerInCondition',
+ 'TF2_GetObjectType',
+ 'TF2_GetObjectMode',
+ 'NominateMap',
+ 'RemoveNominationByMap',
+ 'RemoveNominationByOwner',
+ 'GetExcludeMapList',
+ 'GetNominatedMapList',
+ 'CanMapChooserStartVote',
+ 'InitiateMapChooserVote',
+ 'HasEndOfMapVoteFinished',
+ 'EndOfMapVoteEnabled',
+ 'OnNominationRemoved',
+ 'OnMapVoteStarted',
+ 'CreateTimer',
+ 'KillTimer',
+ 'TriggerTimer',
+ 'GetTickedTime',
+ 'GetMapTimeLeft',
+ 'GetMapTimeLimit',
+ 'ExtendMapTimeLimit',
+ 'GetTickInterval',
+ 'OnMapTimeLeftChanged',
+ 'IsServerProcessing',
+ 'CreateDataTimer',
+ 'ByteCountToCells',
+ 'CreateArray',
+ 'ClearArray',
+ 'CloneArray',
+ 'ResizeArray',
+ 'GetArraySize',
+ 'PushArrayCell',
+ 'PushArrayString',
+ 'PushArrayArray',
+ 'GetArrayCell',
+ 'GetArrayString',
+ 'GetArrayArray',
+ 'SetArrayCell',
+ 'SetArrayString',
+ 'SetArrayArray',
+ 'ShiftArrayUp',
+ 'RemoveFromArray',
+ 'SwapArrayItems',
+ 'FindStringInArray',
+ 'FindValueInArray',
+ 'ProcessTargetString',
+ 'ReplyToTargetError',
+ 'MultiTargetFilter',
+ 'AddMultiTargetFilter',
+ 'RemoveMultiTargetFilter',
+ 'OnBanClient',
+ 'OnBanIdentity',
+ 'OnRemoveBan',
+ 'BanClient',
+ 'BanIdentity',
+ 'RemoveBan',
+ 'CreateTrie',
+ 'SetTrieValue',
+ 'SetTrieArray',
+ 'SetTrieString',
+ 'GetTrieValue',
+ 'GetTrieArray',
+ 'GetTrieString',
+ 'RemoveFromTrie',
+ 'ClearTrie',
+ 'GetTrieSize',
+ 'GetFunctionByName',
+ 'CreateGlobalForward',
+ 'CreateForward',
+ 'GetForwardFunctionCount',
+ 'AddToForward',
+ 'RemoveFromForward',
+ 'RemoveAllFromForward',
+ 'Call_StartForward',
+ 'Call_StartFunction',
+ 'Call_PushCell',
+ 'Call_PushCellRef',
+ 'Call_PushFloat',
+ 'Call_PushFloatRef',
+ 'Call_PushArray',
+ 'Call_PushArrayEx',
+ 'Call_PushString',
+ 'Call_PushStringEx',
+ 'Call_Finish',
+ 'Call_Cancel',
+ 'NativeCall',
+ 'CreateNative',
+ 'ThrowNativeError',
+ 'GetNativeStringLength',
+ 'GetNativeString',
+ 'SetNativeString',
+ 'GetNativeCell',
+ 'GetNativeCellRef',
+ 'SetNativeCellRef',
+ 'GetNativeArray',
+ 'SetNativeArray',
+ 'FormatNativeString',
+ 'RequestFrameCallback',
+ 'RequestFrame',
+ 'OnRebuildAdminCache',
+ 'DumpAdminCache',
+ 'AddCommandOverride',
+ 'GetCommandOverride',
+ 'UnsetCommandOverride',
+ 'CreateAdmGroup',
+ 'FindAdmGroup',
+ 'SetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlags',
+ 'SetAdmGroupImmuneFrom',
+ 'GetAdmGroupImmuneCount',
+ 'GetAdmGroupImmuneFrom',
+ 'AddAdmGroupCmdOverride',
+ 'GetAdmGroupCmdOverride',
+ 'RegisterAuthIdentType',
+ 'CreateAdmin',
+ 'GetAdminUsername',
+ 'BindAdminIdentity',
+ 'SetAdminFlag',
+ 'GetAdminFlag',
+ 'GetAdminFlags',
+ 'AdminInheritGroup',
+ 'GetAdminGroupCount',
+ 'GetAdminGroup',
+ 'SetAdminPassword',
+ 'GetAdminPassword',
+ 'FindAdminByIdentity',
+ 'RemoveAdmin',
+ 'FlagBitsToBitArray',
+ 'FlagBitArrayToBits',
+ 'FlagArrayToBits',
+ 'FlagBitsToArray',
+ 'FindFlagByName',
+ 'FindFlagByChar',
+ 'FindFlagChar',
+ 'ReadFlagString',
+ 'CanAdminTarget',
+ 'CreateAuthMethod',
+ 'SetAdmGroupImmunityLevel',
+ 'GetAdmGroupImmunityLevel',
+ 'SetAdminImmunityLevel',
+ 'GetAdminImmunityLevel',
+ 'FlagToBit',
+ 'BitToFlag',
+ 'ServerCommand',
+ 'ServerCommandEx',
+ 'InsertServerCommand',
+ 'ServerExecute',
+ 'ClientCommand',
+ 'FakeClientCommand',
+ 'FakeClientCommandEx',
+ 'PrintToServer',
+ 'PrintToConsole',
+ 'ReplyToCommand',
+ 'GetCmdReplySource',
+ 'SetCmdReplySource',
+ 'IsChatTrigger',
+ 'ShowActivity2',
+ 'ShowActivity',
+ 'ShowActivityEx',
+ 'FormatActivitySource',
+ 'SrvCmd',
+ 'RegServerCmd',
+ 'ConCmd',
+ 'RegConsoleCmd',
+ 'RegAdminCmd',
+ 'GetCmdArgs',
+ 'GetCmdArg',
+ 'GetCmdArgString',
+ 'CreateConVar',
+ 'FindConVar',
+ 'ConVarChanged',
+ 'HookConVarChange',
+ 'UnhookConVarChange',
+ 'GetConVarBool',
+ 'SetConVarBool',
+ 'GetConVarInt',
+ 'SetConVarInt',
+ 'GetConVarFloat',
+ 'SetConVarFloat',
+ 'GetConVarString',
+ 'SetConVarString',
+ 'ResetConVar',
+ 'GetConVarDefault',
+ 'GetConVarFlags',
+ 'SetConVarFlags',
+ 'GetConVarBounds',
+ 'SetConVarBounds',
+ 'GetConVarName',
+ 'QueryClientConVar',
+ 'GetCommandIterator',
+ 'ReadCommandIterator',
+ 'CheckCommandAccess',
+ 'CheckAccess',
+ 'IsValidConVarChar',
+ 'GetCommandFlags',
+ 'SetCommandFlags',
+ 'FindFirstConCommand',
+ 'FindNextConCommand',
+ 'SendConVarValue',
+ 'AddServerTag',
+ 'RemoveServerTag',
+ 'CommandListener',
+ 'AddCommandListener',
+ 'RemoveCommandListener',
+ 'CommandExists',
+ 'OnClientSayCommand',
+ 'OnClientSayCommand_Post',
+ 'TF2_IgnitePlayer',
+ 'TF2_RespawnPlayer',
+ 'TF2_RegeneratePlayer',
+ 'TF2_AddCondition',
+ 'TF2_RemoveCondition',
+ 'TF2_SetPlayerPowerPlay',
+ 'TF2_DisguisePlayer',
+ 'TF2_RemovePlayerDisguise',
+ 'TF2_StunPlayer',
+ 'TF2_MakeBleed',
+ 'TF2_GetClass',
+ 'TF2_CalcIsAttackCritical',
+ 'TF2_OnIsHolidayActive',
+ 'TF2_IsHolidayActive',
+ 'TF2_IsPlayerInDuel',
+ 'TF2_RemoveWearable',
+ 'TF2_OnConditionAdded',
+ 'TF2_OnConditionRemoved',
+ 'TF2_OnWaitingForPlayersStart',
+ 'TF2_OnWaitingForPlayersEnd',
+ 'TF2_OnPlayerTeleport',
+ 'SQL_Connect',
+ 'SQL_DefConnect',
+ 'SQL_ConnectCustom',
+ 'SQLite_UseDatabase',
+ 'SQL_CheckConfig',
+ 'SQL_GetDriver',
+ 'SQL_ReadDriver',
+ 'SQL_GetDriverIdent',
+ 'SQL_GetDriverProduct',
+ 'SQL_SetCharset',
+ 'SQL_GetAffectedRows',
+ 'SQL_GetInsertId',
+ 'SQL_GetError',
+ 'SQL_EscapeString',
+ 'SQL_QuoteString',
+ 'SQL_FastQuery',
+ 'SQL_Query',
+ 'SQL_PrepareQuery',
+ 'SQL_FetchMoreResults',
+ 'SQL_HasResultSet',
+ 'SQL_GetRowCount',
+ 'SQL_GetFieldCount',
+ 'SQL_FieldNumToName',
+ 'SQL_FieldNameToNum',
+ 'SQL_FetchRow',
+ 'SQL_MoreRows',
+ 'SQL_Rewind',
+ 'SQL_FetchString',
+ 'SQL_FetchFloat',
+ 'SQL_FetchInt',
+ 'SQL_IsFieldNull',
+ 'SQL_FetchSize',
+ 'SQL_BindParamInt',
+ 'SQL_BindParamFloat',
+ 'SQL_BindParamString',
+ 'SQL_Execute',
+ 'SQL_LockDatabase',
+ 'SQL_UnlockDatabase',
+ 'SQLTCallback',
+ 'SQL_IsSameConnection',
+ 'SQL_TConnect',
+ 'SQL_TQuery',
+ 'SQL_CreateTransaction',
+ 'SQL_AddQuery',
+ 'SQLTxnSuccess',
+ 'SQLTxnFailure',
+ 'SQL_ExecuteTransaction',
+ 'CloseHandle',
+ 'CloneHandle',
+ 'MenuHandler',
+ 'CreateMenu',
+ 'DisplayMenu',
+ 'DisplayMenuAtItem',
+ 'AddMenuItem',
+ 'InsertMenuItem',
+ 'RemoveMenuItem',
+ 'RemoveAllMenuItems',
+ 'GetMenuItem',
+ 'GetMenuSelectionPosition',
+ 'GetMenuItemCount',
+ 'SetMenuPagination',
+ 'GetMenuPagination',
+ 'GetMenuStyle',
+ 'SetMenuTitle',
+ 'GetMenuTitle',
+ 'CreatePanelFromMenu',
+ 'GetMenuExitButton',
+ 'SetMenuExitButton',
+ 'GetMenuExitBackButton',
+ 'SetMenuExitBackButton',
+ 'SetMenuNoVoteButton',
+ 'CancelMenu',
+ 'GetMenuOptionFlags',
+ 'SetMenuOptionFlags',
+ 'IsVoteInProgress',
+ 'CancelVote',
+ 'VoteMenu',
+ 'VoteMenuToAll',
+ 'VoteHandler',
+ 'SetVoteResultCallback',
+ 'CheckVoteDelay',
+ 'IsClientInVotePool',
+ 'RedrawClientVoteMenu',
+ 'GetMenuStyleHandle',
+ 'CreatePanel',
+ 'CreateMenuEx',
+ 'GetClientMenu',
+ 'CancelClientMenu',
+ 'GetMaxPageItems',
+ 'GetPanelStyle',
+ 'SetPanelTitle',
+ 'DrawPanelItem',
+ 'DrawPanelText',
+ 'CanPanelDrawFlags',
+ 'SetPanelKeys',
+ 'SendPanelToClient',
+ 'GetPanelTextRemaining',
+ 'GetPanelCurrentKey',
+ 'SetPanelCurrentKey',
+ 'RedrawMenuItem',
+ 'InternalShowMenu',
+ 'GetMenuVoteInfo',
+ 'IsNewVoteAllowed',
+ 'PrefetchSound',
+ 'EmitAmbientSound',
+ 'FadeClientVolume',
+ 'StopSound',
+ 'EmitSound',
+ 'EmitSentence',
+ 'GetDistGainFromSoundLevel',
+ 'AmbientSHook',
+ 'NormalSHook',
+ 'AddAmbientSoundHook',
+ 'AddNormalSoundHook',
+ 'RemoveAmbientSoundHook',
+ 'RemoveNormalSoundHook',
+ 'EmitSoundToClient',
+ 'EmitSoundToAll',
+ 'ATTN_TO_SNDLEVEL',
+ 'GetGameSoundParams',
+ 'EmitGameSound',
+ 'EmitAmbientGameSound',
+ 'EmitGameSoundToClient',
+ 'EmitGameSoundToAll',
+ 'PrecacheScriptSound',
+ 'strlen',
+ 'StrContains',
+ 'strcmp',
+ 'strncmp',
+ 'StrEqual',
+ 'strcopy',
+ 'Format',
+ 'FormatEx',
+ 'VFormat',
+ 'StringToInt',
+ 'StringToIntEx',
+ 'IntToString',
+ 'StringToFloat',
+ 'StringToFloatEx',
+ 'FloatToString',
+ 'BreakString',
+ 'TrimString',
+ 'SplitString',
+ 'ReplaceString',
+ 'ReplaceStringEx',
+ 'GetCharBytes',
+ 'IsCharAlpha',
+ 'IsCharNumeric',
+ 'IsCharSpace',
+ 'IsCharMB',
+ 'IsCharUpper',
+ 'IsCharLower',
+ 'StripQuotes',
+ 'CharToUpper',
+ 'CharToLower',
+ 'FindCharInString',
+ 'StrCat',
+ 'ExplodeString',
+ 'ImplodeStrings',
+ 'GetVectorLength',
+ 'GetVectorDistance',
+ 'GetVectorDotProduct',
+ 'GetVectorCrossProduct',
+ 'NormalizeVector',
+ 'GetAngleVectors',
+ 'GetVectorAngles',
+ 'GetVectorVectors',
+ 'AddVectors',
+ 'SubtractVectors',
+ 'ScaleVector',
+ 'NegateVector',
+ 'MakeVectorFromPoints',
+ 'BaseComm_IsClientGagged',
+ 'BaseComm_IsClientMuted',
+ 'BaseComm_SetClientGag',
+ 'BaseComm_SetClientMute',
+ 'FormatUserLogText',
+ 'FindPluginByFile',
+ 'FindTarget',
+ 'AcceptEntityInput',
+ 'SetVariantBool',
+ 'SetVariantString',
+ 'SetVariantInt',
+ 'SetVariantFloat',
+ 'SetVariantVector3D',
+ 'SetVariantPosVector3D',
+ 'SetVariantColor',
+ 'SetVariantEntity',
+ 'GameRules_GetProp',
+ 'GameRules_SetProp',
+ 'GameRules_GetPropFloat',
+ 'GameRules_SetPropFloat',
+ 'GameRules_GetPropEnt',
+ 'GameRules_SetPropEnt',
+ 'GameRules_GetPropVector',
+ 'GameRules_SetPropVector',
+ 'GameRules_GetPropString',
+ 'GameRules_SetPropString',
+ 'GameRules_GetRoundState',
+ 'OnClientConnect',
+ 'OnClientConnected',
+ 'OnClientPutInServer',
+ 'OnClientDisconnect',
+ 'OnClientDisconnect_Post',
+ 'OnClientCommand',
+ 'OnClientSettingsChanged',
+ 'OnClientAuthorized',
+ 'OnClientPreAdminCheck',
+ 'OnClientPostAdminFilter',
+ 'OnClientPostAdminCheck',
+ 'GetMaxClients',
+ 'GetMaxHumanPlayers',
+ 'GetClientCount',
+ 'GetClientName',
+ 'GetClientIP',
+ 'GetClientAuthString',
+ 'GetClientAuthId',
+ 'GetSteamAccountID',
+ 'GetClientUserId',
+ 'IsClientConnected',
+ 'IsClientInGame',
+ 'IsClientInKickQueue',
+ 'IsClientAuthorized',
+ 'IsFakeClient',
+ 'IsClientSourceTV',
+ 'IsClientReplay',
+ 'IsClientObserver',
+ 'IsPlayerAlive',
+ 'GetClientInfo',
+ 'GetClientTeam',
+ 'SetUserAdmin',
+ 'GetUserAdmin',
+ 'AddUserFlags',
+ 'RemoveUserFlags',
+ 'SetUserFlagBits',
+ 'GetUserFlagBits',
+ 'CanUserTarget',
+ 'RunAdminCacheChecks',
+ 'NotifyPostAdminCheck',
+ 'CreateFakeClient',
+ 'SetFakeClientConVar',
+ 'GetClientHealth',
+ 'GetClientModel',
+ 'GetClientWeapon',
+ 'GetClientMaxs',
+ 'GetClientMins',
+ 'GetClientAbsAngles',
+ 'GetClientAbsOrigin',
+ 'GetClientArmor',
+ 'GetClientDeaths',
+ 'GetClientFrags',
+ 'GetClientDataRate',
+ 'IsClientTimingOut',
+ 'GetClientTime',
+ 'GetClientLatency',
+ 'GetClientAvgLatency',
+ 'GetClientAvgLoss',
+ 'GetClientAvgChoke',
+ 'GetClientAvgData',
+ 'GetClientAvgPackets',
+ 'GetClientOfUserId',
+ 'KickClient',
+ 'KickClientEx',
+ 'ChangeClientTeam',
+ 'GetClientSerial',
+ 'GetClientFromSerial',
+ 'FindStringTable',
+ 'GetNumStringTables',
+ 'GetStringTableNumStrings',
+ 'GetStringTableMaxStrings',
+ 'GetStringTableName',
+ 'FindStringIndex',
+ 'ReadStringTable',
+ 'GetStringTableDataLength',
+ 'GetStringTableData',
+ 'SetStringTableData',
+ 'AddToStringTable',
+ 'LockStringTables',
+ 'AddFileToDownloadsTable',
+ 'GetEntityFlags',
+ 'SetEntityFlags',
+ 'GetEntityMoveType',
+ 'SetEntityMoveType',
+ 'GetEntityRenderMode',
+ 'SetEntityRenderMode',
+ 'GetEntityRenderFx',
+ 'SetEntityRenderFx',
+ 'SetEntityRenderColor',
+ 'GetEntityGravity',
+ 'SetEntityGravity',
+ 'SetEntityHealth',
+ 'GetClientButtons',
+ 'EntityOutput',
+ 'HookEntityOutput',
+ 'UnhookEntityOutput',
+ 'HookSingleEntityOutput',
+ 'UnhookSingleEntityOutput',
+ 'SMC_CreateParser',
+ 'SMC_ParseFile',
+ 'SMC_GetErrorString',
+ 'SMC_ParseStart',
+ 'SMC_SetParseStart',
+ 'SMC_ParseEnd',
+ 'SMC_SetParseEnd',
+ 'SMC_NewSection',
+ 'SMC_KeyValue',
+ 'SMC_EndSection',
+ 'SMC_SetReaders',
+ 'SMC_RawLine',
+ 'SMC_SetRawLine',
+ 'BfWriteBool',
+ 'BfWriteByte',
+ 'BfWriteChar',
+ 'BfWriteShort',
+ 'BfWriteWord',
+ 'BfWriteNum',
+ 'BfWriteFloat',
+ 'BfWriteString',
+ 'BfWriteEntity',
+ 'BfWriteAngle',
+ 'BfWriteCoord',
+ 'BfWriteVecCoord',
+ 'BfWriteVecNormal',
+ 'BfWriteAngles',
+ 'BfReadBool',
+ 'BfReadByte',
+ 'BfReadChar',
+ 'BfReadShort',
+ 'BfReadWord',
+ 'BfReadNum',
+ 'BfReadFloat',
+ 'BfReadString',
+ 'BfReadEntity',
+ 'BfReadAngle',
+ 'BfReadCoord',
+ 'BfReadVecCoord',
+ 'BfReadVecNormal',
+ 'BfReadAngles',
+ 'BfGetNumBytesLeft',
+ 'CreateProfiler',
+ 'StartProfiling',
+ 'StopProfiling',
+ 'GetProfilerTime',
+ 'OnPluginStart',
+ 'AskPluginLoad2',
+ 'OnPluginEnd',
+ 'OnPluginPauseChange',
+ 'OnGameFrame',
+ 'OnMapStart',
+ 'OnMapEnd',
+ 'OnConfigsExecuted',
+ 'OnAutoConfigsBuffered',
+ 'OnAllPluginsLoaded',
+ 'GetMyHandle',
+ 'GetPluginIterator',
+ 'MorePlugins',
+ 'ReadPlugin',
+ 'GetPluginStatus',
+ 'GetPluginFilename',
+ 'IsPluginDebugging',
+ 'GetPluginInfo',
+ 'FindPluginByNumber',
+ 'SetFailState',
+ 'ThrowError',
+ 'GetTime',
+ 'FormatTime',
+ 'LoadGameConfigFile',
+ 'GameConfGetOffset',
+ 'GameConfGetKeyValue',
+ 'GameConfGetAddress',
+ 'GetSysTickCount',
+ 'AutoExecConfig',
+ 'RegPluginLibrary',
+ 'LibraryExists',
+ 'GetExtensionFileStatus',
+ 'OnLibraryAdded',
+ 'OnLibraryRemoved',
+ 'ReadMapList',
+ 'SetMapListCompatBind',
+ 'OnClientFloodCheck',
+ 'OnClientFloodResult',
+ 'CanTestFeatures',
+ 'GetFeatureStatus',
+ 'RequireFeature',
+ 'LoadFromAddress',
+ 'StoreToAddress',
+ 'CreateStack',
+ 'PushStackCell',
+ 'PushStackString',
+ 'PushStackArray',
+ 'PopStackCell',
+ 'PopStackString',
+ 'PopStackArray',
+ 'IsStackEmpty',
+ 'PopStack',
+ 'OnPlayerRunCmd',
+ 'BuildPath',
+ 'OpenDirectory',
+ 'ReadDirEntry',
+ 'OpenFile',
+ 'DeleteFile',
+ 'ReadFileLine',
+ 'ReadFile',
+ 'ReadFileString',
+ 'WriteFile',
+ 'WriteFileString',
+ 'WriteFileLine',
+ 'ReadFileCell',
+ 'WriteFileCell',
+ 'IsEndOfFile',
+ 'FileSeek',
+ 'FilePosition',
+ 'FileExists',
+ 'RenameFile',
+ 'DirExists',
+ 'FileSize',
+ 'FlushFile',
+ 'RemoveDir',
+ 'CreateDirectory',
+ 'GetFileTime',
+ 'LogToOpenFile',
+ 'LogToOpenFileEx',
+ 'PbReadInt',
+ 'PbReadFloat',
+ 'PbReadBool',
+ 'PbReadString',
+ 'PbReadColor',
+ 'PbReadAngle',
+ 'PbReadVector',
+ 'PbReadVector2D',
+ 'PbGetRepeatedFieldCount',
+ 'PbSetInt',
+ 'PbSetFloat',
+ 'PbSetBool',
+ 'PbSetString',
+ 'PbSetColor',
+ 'PbSetAngle',
+ 'PbSetVector',
+ 'PbSetVector2D',
+ 'PbAddInt',
+ 'PbAddFloat',
+ 'PbAddBool',
+ 'PbAddString',
+ 'PbAddColor',
+ 'PbAddAngle',
+ 'PbAddVector',
+ 'PbAddVector2D',
+ 'PbRemoveRepeatedFieldValue',
+ 'PbReadMessage',
+ 'PbReadRepeatedMessage',
+ 'PbAddMessage',
+ 'SetNextMap',
+ 'GetNextMap',
+ 'ForceChangeLevel',
+ 'GetMapHistorySize',
+ 'GetMapHistory',
+ 'GeoipCode2',
+ 'GeoipCode3',
+ 'GeoipCountry',
+ 'MarkNativeAsOptional',
+ 'RegClientCookie',
+ 'FindClientCookie',
+ 'SetClientCookie',
+ 'GetClientCookie',
+ 'SetAuthIdCookie',
+ 'AreClientCookiesCached',
+ 'OnClientCookiesCached',
+ 'CookieMenuHandler',
+ 'SetCookiePrefabMenu',
+ 'SetCookieMenuItem',
+ 'ShowCookieMenu',
+ 'GetCookieIterator',
+ 'ReadCookieIterator',
+ 'GetCookieAccess',
+ 'GetClientCookieTime',
+ 'LoadTranslations',
+ 'SetGlobalTransTarget',
+ 'GetClientLanguage',
+ 'GetServerLanguage',
+ 'GetLanguageCount',
+ 'GetLanguageInfo',
+ 'SetClientLanguage',
+ 'GetLanguageByCode',
+ 'GetLanguageByName',
+ 'CS_OnBuyCommand',
+ 'CS_OnCSWeaponDrop',
+ 'CS_OnGetWeaponPrice',
+ 'CS_OnTerminateRound',
+ 'CS_RespawnPlayer',
+ 'CS_SwitchTeam',
+ 'CS_DropWeapon',
+ 'CS_TerminateRound',
+ 'CS_GetTranslatedWeaponAlias',
+ 'CS_GetWeaponPrice',
+ 'CS_GetClientClanTag',
+ 'CS_SetClientClanTag',
+ 'CS_GetTeamScore',
+ 'CS_SetTeamScore',
+ 'CS_GetMVPCount',
+ 'CS_SetMVPCount',
+ 'CS_GetClientContributionScore',
+ 'CS_SetClientContributionScore',
+ 'CS_GetClientAssists',
+ 'CS_SetClientAssists',
+ 'CS_AliasToWeaponID',
+ 'CS_WeaponIDToAlias',
+ 'CS_IsValidWeaponID',
+ 'CS_UpdateClientModel',
+ 'LogToGame',
+ 'SetRandomSeed',
+ 'GetRandomFloat',
+ 'GetRandomInt',
+ 'IsMapValid',
+ 'IsDedicatedServer',
+ 'GetEngineTime',
+ 'GetGameTime',
+ 'GetGameTickCount',
+ 'GetGameDescription',
+ 'GetGameFolderName',
+ 'GetCurrentMap',
+ 'PrecacheModel',
+ 'PrecacheSentenceFile',
+ 'PrecacheDecal',
+ 'PrecacheGeneric',
+ 'IsModelPrecached',
+ 'IsDecalPrecached',
+ 'IsGenericPrecached',
+ 'PrecacheSound',
+ 'IsSoundPrecached',
+ 'CreateDialog',
+ 'GetEngineVersion',
+ 'PrintToChat',
+ 'PrintToChatAll',
+ 'PrintCenterText',
+ 'PrintCenterTextAll',
+ 'PrintHintText',
+ 'PrintHintTextToAll',
+ 'ShowVGUIPanel',
+ 'CreateHudSynchronizer',
+ 'SetHudTextParams',
+ 'SetHudTextParamsEx',
+ 'ShowSyncHudText',
+ 'ClearSyncHud',
+ 'ShowHudText',
+ 'ShowMOTDPanel',
+ 'DisplayAskConnectBox',
+ 'EntIndexToEntRef',
+ 'EntRefToEntIndex',
+ 'MakeCompatEntRef',
+ 'SetClientViewEntity',
+ 'SetLightStyle',
+ 'GetClientEyePosition',
+ 'CreateDataPack',
+ 'WritePackCell',
+ 'WritePackFloat',
+ 'WritePackString',
+ 'ReadPackCell',
+ 'ReadPackFloat',
+ 'ReadPackString',
+ 'ResetPack',
+ 'GetPackPosition',
+ 'SetPackPosition',
+ 'IsPackReadable',
+ 'LogMessage',
+ 'LogToFile',
+ 'LogToFileEx',
+ 'LogAction',
+ 'LogError',
+ 'OnLogAction',
+ 'GameLogHook',
+ 'AddGameLogHook',
+ 'RemoveGameLogHook',
+ 'FindTeamByName',
+ 'StartPrepSDKCall',
+ 'PrepSDKCall_SetVirtual',
+ 'PrepSDKCall_SetSignature',
+ 'PrepSDKCall_SetAddress',
+ 'PrepSDKCall_SetFromConf',
+ 'PrepSDKCall_SetReturnInfo',
+ 'PrepSDKCall_AddParameter',
+ 'EndPrepSDKCall',
+ 'SDKCall',
+ 'GetPlayerResourceEntity',
+)
+
+
+if __name__ == '__main__': # pragma: no cover
+ import re
+ import sys
+ try:
+ from urllib import FancyURLopener
+ except ImportError:
+ from urllib.request import FancyURLopener
+
+ from pygments.util import format_lines
+
+ # urllib ends up wanting to import a module called 'math' -- if
+ # pygments/lexers is in the path, this ends badly.
+ for i in range(len(sys.path)-1, -1, -1):
+ if sys.path[i].endswith('/lexers'):
+ del sys.path[i]
+
+ class Opener(FancyURLopener):
+ version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
+
+ opener = Opener()
+
+ def get_version():
+ f = opener.open('http://docs.sourcemod.net/api/index.php')
+ r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
+ for line in f:
+ m = r.search(line)
+ if m is not None:
+ return m.groups()[0]
+ raise ValueError('No version in api docs')
+
+ def get_sm_functions():
+ f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
+ r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def regenerate(filename, natives):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('FUNCTIONS = (')]
+ footer = content[content.find("if __name__ == '__main__':")-1:]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write(format_lines('FUNCTIONS', natives))
+ fp.write(footer)
+
+ def run():
+ version = get_version()
+ print('> Downloading function index for SourceMod %s' % version)
+ functions = get_sm_functions()
+ print('> %d functions found:' % len(functions))
+
+ functionlist = []
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ functionlist.append(full_function_name)
+
+ regenerate(__file__, functionlist)
+
+
+ run()
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
index f15167053a..4c7e21e1e4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
@@ -1,557 +1,557 @@
-"""
- pygments.lexers._stan_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of functions for Stan used by
+"""
+ pygments.lexers._stan_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.17.0.
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-KEYWORDS = (
+ :license: BSD, see LICENSE for details.
+"""
+
+KEYWORDS = (
'break',
'continue',
- 'else',
- 'for',
- 'if',
- 'in',
- 'print',
- 'reject',
- 'return',
+ 'else',
+ 'for',
+ 'if',
+ 'in',
+ 'print',
+ 'reject',
+ 'return',
'while',
-)
-
-TYPES = (
- 'cholesky_factor_corr',
- 'cholesky_factor_cov',
- 'corr_matrix',
- 'cov_matrix',
- 'int',
- 'matrix',
- 'ordered',
- 'positive_ordered',
- 'real',
- 'row_vector',
- 'simplex',
- 'unit_vector',
- 'vector',
+)
+
+TYPES = (
+ 'cholesky_factor_corr',
+ 'cholesky_factor_cov',
+ 'corr_matrix',
+ 'cov_matrix',
+ 'int',
+ 'matrix',
+ 'ordered',
+ 'positive_ordered',
+ 'real',
+ 'row_vector',
+ 'simplex',
+ 'unit_vector',
+ 'vector',
'void',
)
-
-FUNCTIONS = (
- 'abs',
- 'acos',
- 'acosh',
+
+FUNCTIONS = (
+ 'abs',
+ 'acos',
+ 'acosh',
'algebra_solver',
'append_array',
- 'append_col',
- 'append_row',
- 'asin',
- 'asinh',
- 'atan',
- 'atan2',
- 'atanh',
- 'bernoulli_cdf',
+ 'append_col',
+ 'append_row',
+ 'asin',
+ 'asinh',
+ 'atan',
+ 'atan2',
+ 'atanh',
+ 'bernoulli_cdf',
'bernoulli_lccdf',
'bernoulli_lcdf',
'bernoulli_logit_lpmf',
'bernoulli_logit_rng',
'bernoulli_lpmf',
- 'bernoulli_rng',
- 'bessel_first_kind',
- 'bessel_second_kind',
- 'beta_binomial_cdf',
+ 'bernoulli_rng',
+ 'bessel_first_kind',
+ 'bessel_second_kind',
+ 'beta_binomial_cdf',
'beta_binomial_lccdf',
'beta_binomial_lcdf',
'beta_binomial_lpmf',
- 'beta_binomial_rng',
- 'beta_cdf',
+ 'beta_binomial_rng',
+ 'beta_cdf',
'beta_lccdf',
'beta_lcdf',
'beta_lpdf',
- 'beta_rng',
- 'binary_log_loss',
- 'binomial_cdf',
- 'binomial_coefficient_log',
+ 'beta_rng',
+ 'binary_log_loss',
+ 'binomial_cdf',
+ 'binomial_coefficient_log',
'binomial_lccdf',
'binomial_lcdf',
'binomial_logit_lpmf',
'binomial_lpmf',
- 'binomial_rng',
- 'block',
+ 'binomial_rng',
+ 'block',
'categorical_logit_lpmf',
'categorical_logit_rng',
'categorical_lpmf',
- 'categorical_rng',
- 'cauchy_cdf',
+ 'categorical_rng',
+ 'cauchy_cdf',
'cauchy_lccdf',
'cauchy_lcdf',
'cauchy_lpdf',
- 'cauchy_rng',
- 'cbrt',
- 'ceil',
- 'chi_square_cdf',
+ 'cauchy_rng',
+ 'cbrt',
+ 'ceil',
+ 'chi_square_cdf',
'chi_square_lccdf',
'chi_square_lcdf',
'chi_square_lpdf',
- 'chi_square_rng',
- 'cholesky_decompose',
+ 'chi_square_rng',
+ 'cholesky_decompose',
'choose',
- 'col',
- 'cols',
- 'columns_dot_product',
- 'columns_dot_self',
- 'cos',
- 'cosh',
+ 'col',
+ 'cols',
+ 'columns_dot_product',
+ 'columns_dot_self',
+ 'cos',
+ 'cosh',
'cov_exp_quad',
- 'crossprod',
- 'csr_extract_u',
- 'csr_extract_v',
- 'csr_extract_w',
- 'csr_matrix_times_vector',
- 'csr_to_dense_matrix',
- 'cumulative_sum',
- 'determinant',
- 'diag_matrix',
- 'diag_post_multiply',
- 'diag_pre_multiply',
- 'diagonal',
- 'digamma',
- 'dims',
+ 'crossprod',
+ 'csr_extract_u',
+ 'csr_extract_v',
+ 'csr_extract_w',
+ 'csr_matrix_times_vector',
+ 'csr_to_dense_matrix',
+ 'cumulative_sum',
+ 'determinant',
+ 'diag_matrix',
+ 'diag_post_multiply',
+ 'diag_pre_multiply',
+ 'diagonal',
+ 'digamma',
+ 'dims',
'dirichlet_lpdf',
- 'dirichlet_rng',
- 'distance',
- 'dot_product',
- 'dot_self',
- 'double_exponential_cdf',
+ 'dirichlet_rng',
+ 'distance',
+ 'dot_product',
+ 'dot_self',
+ 'double_exponential_cdf',
'double_exponential_lccdf',
'double_exponential_lcdf',
'double_exponential_lpdf',
- 'double_exponential_rng',
- 'e',
- 'eigenvalues_sym',
- 'eigenvectors_sym',
- 'erf',
- 'erfc',
- 'exp',
- 'exp2',
- 'exp_mod_normal_cdf',
+ 'double_exponential_rng',
+ 'e',
+ 'eigenvalues_sym',
+ 'eigenvectors_sym',
+ 'erf',
+ 'erfc',
+ 'exp',
+ 'exp2',
+ 'exp_mod_normal_cdf',
'exp_mod_normal_lccdf',
'exp_mod_normal_lcdf',
'exp_mod_normal_lpdf',
- 'exp_mod_normal_rng',
- 'expm1',
- 'exponential_cdf',
+ 'exp_mod_normal_rng',
+ 'expm1',
+ 'exponential_cdf',
'exponential_lccdf',
'exponential_lcdf',
'exponential_lpdf',
- 'exponential_rng',
- 'fabs',
- 'falling_factorial',
- 'fdim',
- 'floor',
- 'fma',
- 'fmax',
- 'fmin',
- 'fmod',
- 'frechet_cdf',
+ 'exponential_rng',
+ 'fabs',
+ 'falling_factorial',
+ 'fdim',
+ 'floor',
+ 'fma',
+ 'fmax',
+ 'fmin',
+ 'fmod',
+ 'frechet_cdf',
'frechet_lccdf',
'frechet_lcdf',
'frechet_lpdf',
- 'frechet_rng',
- 'gamma_cdf',
+ 'frechet_rng',
+ 'gamma_cdf',
'gamma_lccdf',
'gamma_lcdf',
'gamma_lpdf',
- 'gamma_p',
- 'gamma_q',
- 'gamma_rng',
+ 'gamma_p',
+ 'gamma_q',
+ 'gamma_rng',
'gaussian_dlm_obs_lpdf',
- 'get_lp',
- 'gumbel_cdf',
+ 'get_lp',
+ 'gumbel_cdf',
'gumbel_lccdf',
'gumbel_lcdf',
'gumbel_lpdf',
- 'gumbel_rng',
- 'head',
+ 'gumbel_rng',
+ 'head',
'hypergeometric_lpmf',
- 'hypergeometric_rng',
- 'hypot',
+ 'hypergeometric_rng',
+ 'hypot',
'inc_beta',
- 'int_step',
+ 'int_step',
'integrate_ode',
'integrate_ode_bdf',
'integrate_ode_rk45',
- 'inv',
- 'inv_chi_square_cdf',
+ 'inv',
+ 'inv_chi_square_cdf',
'inv_chi_square_lccdf',
'inv_chi_square_lcdf',
'inv_chi_square_lpdf',
- 'inv_chi_square_rng',
- 'inv_cloglog',
- 'inv_gamma_cdf',
+ 'inv_chi_square_rng',
+ 'inv_cloglog',
+ 'inv_gamma_cdf',
'inv_gamma_lccdf',
'inv_gamma_lcdf',
'inv_gamma_lpdf',
- 'inv_gamma_rng',
- 'inv_logit',
+ 'inv_gamma_rng',
+ 'inv_logit',
'inv_Phi',
- 'inv_sqrt',
- 'inv_square',
+ 'inv_sqrt',
+ 'inv_square',
'inv_wishart_lpdf',
- 'inv_wishart_rng',
- 'inverse',
- 'inverse_spd',
- 'is_inf',
- 'is_nan',
- 'lbeta',
+ 'inv_wishart_rng',
+ 'inverse',
+ 'inverse_spd',
+ 'is_inf',
+ 'is_nan',
+ 'lbeta',
'lchoose',
- 'lgamma',
+ 'lgamma',
'lkj_corr_cholesky_lpdf',
- 'lkj_corr_cholesky_rng',
+ 'lkj_corr_cholesky_rng',
'lkj_corr_lpdf',
- 'lkj_corr_rng',
- 'lmgamma',
+ 'lkj_corr_rng',
+ 'lmgamma',
'lmultiply',
- 'log',
- 'log10',
- 'log1m',
- 'log1m_exp',
- 'log1m_inv_logit',
- 'log1p',
- 'log1p_exp',
- 'log2',
- 'log_determinant',
- 'log_diff_exp',
- 'log_falling_factorial',
- 'log_inv_logit',
- 'log_mix',
- 'log_rising_factorial',
- 'log_softmax',
- 'log_sum_exp',
- 'logistic_cdf',
+ 'log',
+ 'log10',
+ 'log1m',
+ 'log1m_exp',
+ 'log1m_inv_logit',
+ 'log1p',
+ 'log1p_exp',
+ 'log2',
+ 'log_determinant',
+ 'log_diff_exp',
+ 'log_falling_factorial',
+ 'log_inv_logit',
+ 'log_mix',
+ 'log_rising_factorial',
+ 'log_softmax',
+ 'log_sum_exp',
+ 'logistic_cdf',
'logistic_lccdf',
'logistic_lcdf',
'logistic_lpdf',
- 'logistic_rng',
- 'logit',
- 'lognormal_cdf',
+ 'logistic_rng',
+ 'logit',
+ 'lognormal_cdf',
'lognormal_lccdf',
'lognormal_lcdf',
'lognormal_lpdf',
- 'lognormal_rng',
- 'machine_precision',
+ 'lognormal_rng',
+ 'machine_precision',
'matrix_exp',
- 'max',
+ 'max',
'mdivide_left_spd',
- 'mdivide_left_tri_low',
+ 'mdivide_left_tri_low',
'mdivide_right_spd',
- 'mdivide_right_tri_low',
- 'mean',
- 'min',
- 'modified_bessel_first_kind',
- 'modified_bessel_second_kind',
+ 'mdivide_right_tri_low',
+ 'mean',
+ 'min',
+ 'modified_bessel_first_kind',
+ 'modified_bessel_second_kind',
'multi_gp_cholesky_lpdf',
'multi_gp_lpdf',
'multi_normal_cholesky_lpdf',
- 'multi_normal_cholesky_rng',
+ 'multi_normal_cholesky_rng',
'multi_normal_lpdf',
'multi_normal_prec_lpdf',
- 'multi_normal_rng',
+ 'multi_normal_rng',
'multi_student_t_lpdf',
- 'multi_student_t_rng',
+ 'multi_student_t_rng',
'multinomial_lpmf',
- 'multinomial_rng',
- 'multiply_log',
- 'multiply_lower_tri_self_transpose',
- 'neg_binomial_2_cdf',
+ 'multinomial_rng',
+ 'multiply_log',
+ 'multiply_lower_tri_self_transpose',
+ 'neg_binomial_2_cdf',
'neg_binomial_2_lccdf',
'neg_binomial_2_lcdf',
'neg_binomial_2_log_lpmf',
- 'neg_binomial_2_log_rng',
+ 'neg_binomial_2_log_rng',
'neg_binomial_2_lpmf',
- 'neg_binomial_2_rng',
- 'neg_binomial_cdf',
+ 'neg_binomial_2_rng',
+ 'neg_binomial_cdf',
'neg_binomial_lccdf',
'neg_binomial_lcdf',
'neg_binomial_lpmf',
- 'neg_binomial_rng',
- 'negative_infinity',
- 'normal_cdf',
+ 'neg_binomial_rng',
+ 'negative_infinity',
+ 'normal_cdf',
'normal_lccdf',
'normal_lcdf',
'normal_lpdf',
- 'normal_rng',
- 'not_a_number',
- 'num_elements',
+ 'normal_rng',
+ 'not_a_number',
+ 'num_elements',
'ordered_logistic_lpmf',
- 'ordered_logistic_rng',
- 'owens_t',
- 'pareto_cdf',
+ 'ordered_logistic_rng',
+ 'owens_t',
+ 'pareto_cdf',
'pareto_lccdf',
'pareto_lcdf',
'pareto_lpdf',
- 'pareto_rng',
- 'pareto_type_2_cdf',
+ 'pareto_rng',
+ 'pareto_type_2_cdf',
'pareto_type_2_lccdf',
'pareto_type_2_lcdf',
'pareto_type_2_lpdf',
- 'pareto_type_2_rng',
+ 'pareto_type_2_rng',
'Phi',
'Phi_approx',
- 'pi',
- 'poisson_cdf',
+ 'pi',
+ 'poisson_cdf',
'poisson_lccdf',
'poisson_lcdf',
'poisson_log_lpmf',
- 'poisson_log_rng',
+ 'poisson_log_rng',
'poisson_lpmf',
- 'poisson_rng',
- 'positive_infinity',
- 'pow',
+ 'poisson_rng',
+ 'positive_infinity',
+ 'pow',
'print',
- 'prod',
- 'qr_Q',
- 'qr_R',
- 'quad_form',
- 'quad_form_diag',
- 'quad_form_sym',
- 'rank',
- 'rayleigh_cdf',
+ 'prod',
+ 'qr_Q',
+ 'qr_R',
+ 'quad_form',
+ 'quad_form_diag',
+ 'quad_form_sym',
+ 'rank',
+ 'rayleigh_cdf',
'rayleigh_lccdf',
'rayleigh_lcdf',
'rayleigh_lpdf',
- 'rayleigh_rng',
+ 'rayleigh_rng',
'reject',
- 'rep_array',
- 'rep_matrix',
- 'rep_row_vector',
- 'rep_vector',
- 'rising_factorial',
- 'round',
- 'row',
- 'rows',
- 'rows_dot_product',
- 'rows_dot_self',
- 'scaled_inv_chi_square_cdf',
+ 'rep_array',
+ 'rep_matrix',
+ 'rep_row_vector',
+ 'rep_vector',
+ 'rising_factorial',
+ 'round',
+ 'row',
+ 'rows',
+ 'rows_dot_product',
+ 'rows_dot_self',
+ 'scaled_inv_chi_square_cdf',
'scaled_inv_chi_square_lccdf',
'scaled_inv_chi_square_lcdf',
'scaled_inv_chi_square_lpdf',
- 'scaled_inv_chi_square_rng',
- 'sd',
- 'segment',
- 'sin',
- 'singular_values',
- 'sinh',
- 'size',
- 'skew_normal_cdf',
+ 'scaled_inv_chi_square_rng',
+ 'sd',
+ 'segment',
+ 'sin',
+ 'singular_values',
+ 'sinh',
+ 'size',
+ 'skew_normal_cdf',
'skew_normal_lccdf',
'skew_normal_lcdf',
'skew_normal_lpdf',
- 'skew_normal_rng',
- 'softmax',
- 'sort_asc',
- 'sort_desc',
- 'sort_indices_asc',
- 'sort_indices_desc',
- 'sqrt',
- 'sqrt2',
- 'square',
- 'squared_distance',
- 'step',
- 'student_t_cdf',
+ 'skew_normal_rng',
+ 'softmax',
+ 'sort_asc',
+ 'sort_desc',
+ 'sort_indices_asc',
+ 'sort_indices_desc',
+ 'sqrt',
+ 'sqrt2',
+ 'square',
+ 'squared_distance',
+ 'step',
+ 'student_t_cdf',
'student_t_lccdf',
'student_t_lcdf',
'student_t_lpdf',
- 'student_t_rng',
- 'sub_col',
- 'sub_row',
- 'sum',
- 'tail',
- 'tan',
- 'tanh',
+ 'student_t_rng',
+ 'sub_col',
+ 'sub_row',
+ 'sum',
+ 'tail',
+ 'tan',
+ 'tanh',
'target',
- 'tcrossprod',
- 'tgamma',
- 'to_array_1d',
- 'to_array_2d',
- 'to_matrix',
- 'to_row_vector',
- 'to_vector',
- 'trace',
- 'trace_gen_quad_form',
- 'trace_quad_form',
- 'trigamma',
- 'trunc',
- 'uniform_cdf',
+ 'tcrossprod',
+ 'tgamma',
+ 'to_array_1d',
+ 'to_array_2d',
+ 'to_matrix',
+ 'to_row_vector',
+ 'to_vector',
+ 'trace',
+ 'trace_gen_quad_form',
+ 'trace_quad_form',
+ 'trigamma',
+ 'trunc',
+ 'uniform_cdf',
'uniform_lccdf',
'uniform_lcdf',
'uniform_lpdf',
- 'uniform_rng',
- 'variance',
+ 'uniform_rng',
+ 'variance',
'von_mises_lpdf',
- 'von_mises_rng',
- 'weibull_cdf',
+ 'von_mises_rng',
+ 'weibull_cdf',
'weibull_lccdf',
'weibull_lcdf',
'weibull_lpdf',
- 'weibull_rng',
+ 'weibull_rng',
'wiener_lpdf',
'wishart_lpdf',
'wishart_rng',
-)
-
-DISTRIBUTIONS = (
- 'bernoulli',
- 'bernoulli_logit',
- 'beta',
- 'beta_binomial',
- 'binomial',
- 'binomial_logit',
- 'categorical',
- 'categorical_logit',
- 'cauchy',
- 'chi_square',
- 'dirichlet',
- 'double_exponential',
- 'exp_mod_normal',
- 'exponential',
- 'frechet',
- 'gamma',
- 'gaussian_dlm_obs',
- 'gumbel',
- 'hypergeometric',
- 'inv_chi_square',
- 'inv_gamma',
- 'inv_wishart',
- 'lkj_corr',
- 'lkj_corr_cholesky',
- 'logistic',
- 'lognormal',
- 'multi_gp',
- 'multi_gp_cholesky',
- 'multi_normal',
- 'multi_normal_cholesky',
- 'multi_normal_prec',
- 'multi_student_t',
- 'multinomial',
- 'neg_binomial',
- 'neg_binomial_2',
- 'neg_binomial_2_log',
- 'normal',
- 'ordered_logistic',
- 'pareto',
- 'pareto_type_2',
- 'poisson',
- 'poisson_log',
- 'rayleigh',
- 'scaled_inv_chi_square',
- 'skew_normal',
- 'student_t',
- 'uniform',
- 'von_mises',
- 'weibull',
- 'wiener',
+)
+
+DISTRIBUTIONS = (
+ 'bernoulli',
+ 'bernoulli_logit',
+ 'beta',
+ 'beta_binomial',
+ 'binomial',
+ 'binomial_logit',
+ 'categorical',
+ 'categorical_logit',
+ 'cauchy',
+ 'chi_square',
+ 'dirichlet',
+ 'double_exponential',
+ 'exp_mod_normal',
+ 'exponential',
+ 'frechet',
+ 'gamma',
+ 'gaussian_dlm_obs',
+ 'gumbel',
+ 'hypergeometric',
+ 'inv_chi_square',
+ 'inv_gamma',
+ 'inv_wishart',
+ 'lkj_corr',
+ 'lkj_corr_cholesky',
+ 'logistic',
+ 'lognormal',
+ 'multi_gp',
+ 'multi_gp_cholesky',
+ 'multi_normal',
+ 'multi_normal_cholesky',
+ 'multi_normal_prec',
+ 'multi_student_t',
+ 'multinomial',
+ 'neg_binomial',
+ 'neg_binomial_2',
+ 'neg_binomial_2_log',
+ 'normal',
+ 'ordered_logistic',
+ 'pareto',
+ 'pareto_type_2',
+ 'poisson',
+ 'poisson_log',
+ 'rayleigh',
+ 'scaled_inv_chi_square',
+ 'skew_normal',
+ 'student_t',
+ 'uniform',
+ 'von_mises',
+ 'weibull',
+ 'wiener',
'wishart',
-)
-
-RESERVED = (
- 'alignas',
- 'alignof',
- 'and',
- 'and_eq',
- 'asm',
- 'auto',
- 'bitand',
- 'bitor',
- 'bool',
- 'break',
- 'case',
- 'catch',
- 'char',
- 'char16_t',
- 'char32_t',
- 'class',
- 'compl',
- 'const',
- 'const_cast',
- 'constexpr',
- 'continue',
- 'decltype',
- 'default',
- 'delete',
- 'do',
- 'double',
- 'dynamic_cast',
+)
+
+RESERVED = (
+ 'alignas',
+ 'alignof',
+ 'and',
+ 'and_eq',
+ 'asm',
+ 'auto',
+ 'bitand',
+ 'bitor',
+ 'bool',
+ 'break',
+ 'case',
+ 'catch',
+ 'char',
+ 'char16_t',
+ 'char32_t',
+ 'class',
+ 'compl',
+ 'const',
+ 'const_cast',
+ 'constexpr',
+ 'continue',
+ 'decltype',
+ 'default',
+ 'delete',
+ 'do',
+ 'double',
+ 'dynamic_cast',
'else',
- 'enum',
- 'explicit',
- 'export',
- 'extern',
- 'false',
- 'float',
+ 'enum',
+ 'explicit',
+ 'export',
+ 'extern',
+ 'false',
+ 'float',
'for',
- 'friend',
- 'fvar',
- 'goto',
+ 'friend',
+ 'fvar',
+ 'goto',
'if',
'in',
- 'inline',
- 'int',
- 'long',
+ 'inline',
+ 'int',
+ 'long',
'lp__',
- 'mutable',
- 'namespace',
- 'new',
- 'noexcept',
- 'not',
- 'not_eq',
- 'nullptr',
- 'operator',
- 'or',
- 'or_eq',
- 'private',
- 'protected',
- 'public',
- 'register',
- 'reinterpret_cast',
- 'repeat',
+ 'mutable',
+ 'namespace',
+ 'new',
+ 'noexcept',
+ 'not',
+ 'not_eq',
+ 'nullptr',
+ 'operator',
+ 'or',
+ 'or_eq',
+ 'private',
+ 'protected',
+ 'public',
+ 'register',
+ 'reinterpret_cast',
+ 'repeat',
'return',
- 'short',
- 'signed',
- 'sizeof',
+ 'short',
+ 'signed',
+ 'sizeof',
'STAN_MAJOR',
'STAN_MATH_MAJOR',
'STAN_MATH_MINOR',
'STAN_MATH_PATCH',
'STAN_MINOR',
'STAN_PATCH',
- 'static',
- 'static_assert',
- 'static_cast',
- 'struct',
- 'switch',
- 'template',
- 'then',
- 'this',
- 'thread_local',
- 'throw',
- 'true',
- 'try',
- 'typedef',
- 'typeid',
- 'typename',
- 'union',
- 'unsigned',
- 'until',
- 'using',
- 'var',
- 'virtual',
- 'void',
- 'volatile',
- 'wchar_t',
+ 'static',
+ 'static_assert',
+ 'static_cast',
+ 'struct',
+ 'switch',
+ 'template',
+ 'then',
+ 'this',
+ 'thread_local',
+ 'throw',
+ 'true',
+ 'try',
+ 'typedef',
+ 'typeid',
+ 'typename',
+ 'union',
+ 'unsigned',
+ 'until',
+ 'using',
+ 'var',
+ 'virtual',
+ 'void',
+ 'volatile',
+ 'wchar_t',
'while',
- 'xor',
+ 'xor',
'xor_eq',
-)
+)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
index 9690511304..eebb7d84bb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
@@ -1,1938 +1,1938 @@
-"""
- pygments.lexers._vim_builtins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file is autogenerated by scripts/get_vimkw.py
-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
- var = (
- ('BufAdd','BufAdd'),
- ('BufCreate','BufCreate'),
- ('BufDelete','BufDelete'),
- ('BufEnter','BufEnter'),
- ('BufFilePost','BufFilePost'),
- ('BufFilePre','BufFilePre'),
- ('BufHidden','BufHidden'),
- ('BufLeave','BufLeave'),
- ('BufNew','BufNew'),
- ('BufNewFile','BufNewFile'),
- ('BufRead','BufRead'),
- ('BufReadCmd','BufReadCmd'),
- ('BufReadPost','BufReadPost'),
- ('BufReadPre','BufReadPre'),
- ('BufUnload','BufUnload'),
- ('BufWinEnter','BufWinEnter'),
- ('BufWinLeave','BufWinLeave'),
- ('BufWipeout','BufWipeout'),
- ('BufWrite','BufWrite'),
- ('BufWriteCmd','BufWriteCmd'),
- ('BufWritePost','BufWritePost'),
- ('BufWritePre','BufWritePre'),
- ('Cmd','Cmd'),
- ('CmdwinEnter','CmdwinEnter'),
- ('CmdwinLeave','CmdwinLeave'),
- ('ColorScheme','ColorScheme'),
- ('CompleteDone','CompleteDone'),
- ('CursorHold','CursorHold'),
- ('CursorHoldI','CursorHoldI'),
- ('CursorMoved','CursorMoved'),
- ('CursorMovedI','CursorMovedI'),
- ('EncodingChanged','EncodingChanged'),
- ('FileAppendCmd','FileAppendCmd'),
- ('FileAppendPost','FileAppendPost'),
- ('FileAppendPre','FileAppendPre'),
- ('FileChangedRO','FileChangedRO'),
- ('FileChangedShell','FileChangedShell'),
- ('FileChangedShellPost','FileChangedShellPost'),
- ('FileEncoding','FileEncoding'),
- ('FileReadCmd','FileReadCmd'),
- ('FileReadPost','FileReadPost'),
- ('FileReadPre','FileReadPre'),
- ('FileType','FileType'),
- ('FileWriteCmd','FileWriteCmd'),
- ('FileWritePost','FileWritePost'),
- ('FileWritePre','FileWritePre'),
- ('FilterReadPost','FilterReadPost'),
- ('FilterReadPre','FilterReadPre'),
- ('FilterWritePost','FilterWritePost'),
- ('FilterWritePre','FilterWritePre'),
- ('FocusGained','FocusGained'),
- ('FocusLost','FocusLost'),
- ('FuncUndefined','FuncUndefined'),
- ('GUIEnter','GUIEnter'),
- ('GUIFailed','GUIFailed'),
- ('InsertChange','InsertChange'),
- ('InsertCharPre','InsertCharPre'),
- ('InsertEnter','InsertEnter'),
- ('InsertLeave','InsertLeave'),
- ('MenuPopup','MenuPopup'),
- ('QuickFixCmdPost','QuickFixCmdPost'),
- ('QuickFixCmdPre','QuickFixCmdPre'),
- ('QuitPre','QuitPre'),
- ('RemoteReply','RemoteReply'),
- ('SessionLoadPost','SessionLoadPost'),
- ('ShellCmdPost','ShellCmdPost'),
- ('ShellFilterPost','ShellFilterPost'),
- ('SourceCmd','SourceCmd'),
- ('SourcePre','SourcePre'),
- ('SpellFileMissing','SpellFileMissing'),
- ('StdinReadPost','StdinReadPost'),
- ('StdinReadPre','StdinReadPre'),
- ('SwapExists','SwapExists'),
- ('Syntax','Syntax'),
- ('TabEnter','TabEnter'),
- ('TabLeave','TabLeave'),
- ('TermChanged','TermChanged'),
- ('TermResponse','TermResponse'),
- ('TextChanged','TextChanged'),
- ('TextChangedI','TextChangedI'),
- ('User','User'),
- ('UserGettingBored','UserGettingBored'),
- ('VimEnter','VimEnter'),
- ('VimLeave','VimLeave'),
- ('VimLeavePre','VimLeavePre'),
- ('VimResized','VimResized'),
- ('WinEnter','WinEnter'),
- ('WinLeave','WinLeave'),
- ('event','event'),
- )
- return var
-auto = _getauto()
-
-def _getcommand():
- var = (
- ('a','a'),
- ('ab','ab'),
- ('abc','abclear'),
- ('abo','aboveleft'),
- ('al','all'),
- ('ar','ar'),
- ('ar','args'),
- ('arga','argadd'),
- ('argd','argdelete'),
- ('argdo','argdo'),
- ('arge','argedit'),
- ('argg','argglobal'),
- ('argl','arglocal'),
- ('argu','argument'),
- ('as','ascii'),
- ('au','au'),
- ('b','buffer'),
- ('bN','bNext'),
- ('ba','ball'),
- ('bad','badd'),
- ('bd','bdelete'),
- ('bel','belowright'),
- ('bf','bfirst'),
- ('bl','blast'),
- ('bm','bmodified'),
- ('bn','bnext'),
- ('bo','botright'),
- ('bp','bprevious'),
- ('br','br'),
- ('br','brewind'),
- ('brea','break'),
- ('breaka','breakadd'),
- ('breakd','breakdel'),
- ('breakl','breaklist'),
- ('bro','browse'),
- ('bu','bu'),
- ('buf','buf'),
- ('bufdo','bufdo'),
- ('buffers','buffers'),
- ('bun','bunload'),
- ('bw','bwipeout'),
- ('c','c'),
- ('c','change'),
- ('cN','cN'),
- ('cN','cNext'),
- ('cNf','cNf'),
- ('cNf','cNfile'),
- ('cabc','cabclear'),
- ('cad','cad'),
- ('cad','caddexpr'),
- ('caddb','caddbuffer'),
- ('caddf','caddfile'),
- ('cal','call'),
- ('cat','catch'),
- ('cb','cbuffer'),
- ('cc','cc'),
- ('ccl','cclose'),
- ('cd','cd'),
- ('ce','center'),
- ('cex','cexpr'),
- ('cf','cfile'),
- ('cfir','cfirst'),
- ('cg','cgetfile'),
- ('cgetb','cgetbuffer'),
- ('cgete','cgetexpr'),
- ('changes','changes'),
- ('chd','chdir'),
- ('che','checkpath'),
- ('checkt','checktime'),
- ('cl','cl'),
- ('cl','clist'),
- ('cla','clast'),
- ('clo','close'),
- ('cmapc','cmapclear'),
- ('cn','cn'),
- ('cn','cnext'),
- ('cnew','cnewer'),
- ('cnf','cnf'),
- ('cnf','cnfile'),
- ('co','copy'),
- ('col','colder'),
- ('colo','colorscheme'),
- ('com','com'),
- ('comc','comclear'),
- ('comp','compiler'),
- ('con','con'),
- ('con','continue'),
- ('conf','confirm'),
- ('cope','copen'),
- ('cp','cprevious'),
- ('cpf','cpfile'),
- ('cq','cquit'),
- ('cr','crewind'),
- ('cs','cs'),
- ('cscope','cscope'),
- ('cstag','cstag'),
- ('cuna','cunabbrev'),
- ('cw','cwindow'),
- ('d','d'),
- ('d','delete'),
- ('de','de'),
- ('debug','debug'),
- ('debugg','debuggreedy'),
- ('del','del'),
- ('delc','delcommand'),
- ('delel','delel'),
- ('delep','delep'),
- ('deletel','deletel'),
- ('deletep','deletep'),
- ('deletl','deletl'),
- ('deletp','deletp'),
- ('delf','delf'),
- ('delf','delfunction'),
- ('dell','dell'),
- ('delm','delmarks'),
- ('delp','delp'),
- ('dep','dep'),
- ('di','di'),
- ('di','display'),
- ('diffg','diffget'),
- ('diffo','diffoff'),
- ('diffp','diffpatch'),
- ('diffpu','diffput'),
- ('diffs','diffsplit'),
- ('difft','diffthis'),
- ('diffu','diffupdate'),
- ('dig','dig'),
- ('dig','digraphs'),
- ('dir','dir'),
- ('dj','djump'),
- ('dl','dl'),
- ('dli','dlist'),
- ('do','do'),
- ('doau','doau'),
- ('dp','dp'),
- ('dr','drop'),
- ('ds','dsearch'),
- ('dsp','dsplit'),
- ('e','e'),
- ('e','edit'),
- ('ea','ea'),
- ('earlier','earlier'),
- ('ec','ec'),
- ('echoe','echoerr'),
- ('echom','echomsg'),
- ('echon','echon'),
- ('el','else'),
- ('elsei','elseif'),
- ('em','emenu'),
- ('en','en'),
- ('en','endif'),
- ('endf','endf'),
- ('endf','endfunction'),
- ('endfo','endfor'),
- ('endfun','endfun'),
- ('endt','endtry'),
- ('endw','endwhile'),
- ('ene','enew'),
- ('ex','ex'),
- ('exi','exit'),
- ('exu','exusage'),
- ('f','f'),
- ('f','file'),
- ('files','files'),
- ('filet','filet'),
- ('filetype','filetype'),
- ('fin','fin'),
- ('fin','find'),
- ('fina','finally'),
- ('fini','finish'),
- ('fir','first'),
- ('fix','fixdel'),
- ('fo','fold'),
- ('foldc','foldclose'),
- ('foldd','folddoopen'),
- ('folddoc','folddoclosed'),
- ('foldo','foldopen'),
- ('for','for'),
- ('fu','fu'),
- ('fu','function'),
- ('fun','fun'),
- ('g','g'),
- ('go','goto'),
- ('gr','grep'),
- ('grepa','grepadd'),
- ('gui','gui'),
- ('gvim','gvim'),
- ('h','h'),
- ('h','help'),
- ('ha','hardcopy'),
- ('helpf','helpfind'),
- ('helpg','helpgrep'),
- ('helpt','helptags'),
- ('hi','hi'),
- ('hid','hide'),
- ('his','history'),
- ('i','i'),
- ('ia','ia'),
- ('iabc','iabclear'),
- ('if','if'),
- ('ij','ijump'),
- ('il','ilist'),
- ('imapc','imapclear'),
- ('in','in'),
- ('intro','intro'),
- ('is','isearch'),
- ('isp','isplit'),
- ('iuna','iunabbrev'),
- ('j','join'),
- ('ju','jumps'),
- ('k','k'),
- ('kee','keepmarks'),
- ('keepa','keepa'),
- ('keepalt','keepalt'),
- ('keepj','keepjumps'),
- ('keepp','keeppatterns'),
- ('l','l'),
- ('l','list'),
- ('lN','lN'),
- ('lN','lNext'),
- ('lNf','lNf'),
- ('lNf','lNfile'),
- ('la','la'),
- ('la','last'),
- ('lad','lad'),
- ('lad','laddexpr'),
- ('laddb','laddbuffer'),
- ('laddf','laddfile'),
- ('lan','lan'),
- ('lan','language'),
- ('lat','lat'),
- ('later','later'),
- ('lb','lbuffer'),
- ('lc','lcd'),
- ('lch','lchdir'),
- ('lcl','lclose'),
- ('lcs','lcs'),
- ('lcscope','lcscope'),
- ('le','left'),
- ('lefta','leftabove'),
- ('lex','lexpr'),
- ('lf','lfile'),
- ('lfir','lfirst'),
- ('lg','lgetfile'),
- ('lgetb','lgetbuffer'),
- ('lgete','lgetexpr'),
- ('lgr','lgrep'),
- ('lgrepa','lgrepadd'),
- ('lh','lhelpgrep'),
- ('ll','ll'),
- ('lla','llast'),
- ('lli','llist'),
- ('lmak','lmake'),
- ('lmapc','lmapclear'),
- ('lne','lne'),
- ('lne','lnext'),
- ('lnew','lnewer'),
- ('lnf','lnf'),
- ('lnf','lnfile'),
- ('lo','lo'),
- ('lo','loadview'),
- ('loadk','loadk'),
- ('loadkeymap','loadkeymap'),
- ('loc','lockmarks'),
- ('lockv','lockvar'),
- ('lol','lolder'),
- ('lop','lopen'),
- ('lp','lprevious'),
- ('lpf','lpfile'),
- ('lr','lrewind'),
- ('ls','ls'),
- ('lt','ltag'),
- ('lua','lua'),
- ('luado','luado'),
- ('luafile','luafile'),
- ('lv','lvimgrep'),
- ('lvimgrepa','lvimgrepadd'),
- ('lw','lwindow'),
- ('m','move'),
- ('ma','ma'),
- ('ma','mark'),
- ('mak','make'),
- ('marks','marks'),
- ('mat','match'),
- ('menut','menut'),
- ('menut','menutranslate'),
- ('mes','mes'),
- ('messages','messages'),
- ('mk','mk'),
- ('mk','mkexrc'),
- ('mks','mksession'),
- ('mksp','mkspell'),
- ('mkv','mkv'),
- ('mkv','mkvimrc'),
- ('mkvie','mkview'),
- ('mo','mo'),
- ('mod','mode'),
- ('mz','mz'),
- ('mz','mzscheme'),
- ('mzf','mzfile'),
- ('n','n'),
- ('n','next'),
- ('nb','nbkey'),
- ('nbc','nbclose'),
- ('nbs','nbstart'),
- ('ne','ne'),
- ('new','new'),
- ('nmapc','nmapclear'),
- ('noa','noa'),
- ('noautocmd','noautocmd'),
- ('noh','nohlsearch'),
- ('nu','number'),
- ('o','o'),
- ('o','open'),
- ('ol','oldfiles'),
- ('omapc','omapclear'),
- ('on','only'),
- ('opt','options'),
- ('ownsyntax','ownsyntax'),
- ('p','p'),
- ('p','print'),
- ('pc','pclose'),
- ('pe','pe'),
- ('pe','perl'),
- ('ped','pedit'),
- ('perld','perldo'),
- ('po','pop'),
- ('popu','popu'),
- ('popu','popup'),
- ('pp','ppop'),
- ('pr','pr'),
- ('pre','preserve'),
- ('prev','previous'),
- ('pro','pro'),
- ('prof','profile'),
- ('profd','profdel'),
- ('promptf','promptfind'),
- ('promptr','promptrepl'),
- ('ps','psearch'),
- ('ptN','ptN'),
- ('ptN','ptNext'),
- ('pta','ptag'),
- ('ptf','ptfirst'),
- ('ptj','ptjump'),
- ('ptl','ptlast'),
- ('ptn','ptn'),
- ('ptn','ptnext'),
- ('ptp','ptprevious'),
- ('ptr','ptrewind'),
- ('pts','ptselect'),
- ('pu','put'),
- ('pw','pwd'),
- ('py','py'),
- ('py','python'),
- ('py3','py3'),
- ('py3','py3'),
- ('py3do','py3do'),
- ('pydo','pydo'),
- ('pyf','pyfile'),
- ('python3','python3'),
- ('q','q'),
- ('q','quit'),
- ('qa','qall'),
- ('quita','quitall'),
- ('r','r'),
- ('r','read'),
- ('re','re'),
- ('rec','recover'),
- ('red','red'),
- ('red','redo'),
- ('redi','redir'),
- ('redr','redraw'),
- ('redraws','redrawstatus'),
- ('reg','registers'),
- ('res','resize'),
- ('ret','retab'),
- ('retu','return'),
- ('rew','rewind'),
- ('ri','right'),
- ('rightb','rightbelow'),
- ('ru','ru'),
- ('ru','runtime'),
- ('rub','ruby'),
- ('rubyd','rubydo'),
- ('rubyf','rubyfile'),
- ('rundo','rundo'),
- ('rv','rviminfo'),
- ('sN','sNext'),
- ('sa','sargument'),
- ('sal','sall'),
- ('san','sandbox'),
- ('sav','saveas'),
- ('sb','sbuffer'),
- ('sbN','sbNext'),
- ('sba','sball'),
- ('sbf','sbfirst'),
- ('sbl','sblast'),
- ('sbm','sbmodified'),
- ('sbn','sbnext'),
- ('sbp','sbprevious'),
- ('sbr','sbrewind'),
- ('scrip','scrip'),
- ('scrip','scriptnames'),
- ('scripte','scriptencoding'),
- ('scs','scs'),
- ('scscope','scscope'),
- ('se','set'),
- ('setf','setfiletype'),
- ('setg','setglobal'),
- ('setl','setlocal'),
- ('sf','sfind'),
- ('sfir','sfirst'),
- ('sh','shell'),
- ('si','si'),
- ('sig','sig'),
- ('sign','sign'),
- ('sil','silent'),
- ('sim','simalt'),
- ('sl','sl'),
- ('sl','sleep'),
- ('sla','slast'),
- ('sm','smagic'),
- ('sm','smap'),
- ('sme','sme'),
- ('smenu','smenu'),
- ('sn','snext'),
- ('sni','sniff'),
- ('sno','snomagic'),
- ('snoreme','snoreme'),
- ('snoremenu','snoremenu'),
- ('so','so'),
- ('so','source'),
- ('sor','sort'),
- ('sp','split'),
- ('spe','spe'),
- ('spe','spellgood'),
- ('spelld','spelldump'),
- ('spelli','spellinfo'),
- ('spellr','spellrepall'),
- ('spellu','spellundo'),
- ('spellw','spellwrong'),
- ('spr','sprevious'),
- ('sre','srewind'),
- ('st','st'),
- ('st','stop'),
- ('sta','stag'),
- ('star','star'),
- ('star','startinsert'),
- ('start','start'),
- ('startg','startgreplace'),
- ('startr','startreplace'),
- ('stj','stjump'),
- ('stopi','stopinsert'),
- ('sts','stselect'),
- ('sun','sunhide'),
- ('sunme','sunme'),
- ('sunmenu','sunmenu'),
- ('sus','suspend'),
- ('sv','sview'),
- ('sw','swapname'),
- ('sy','sy'),
- ('syn','syn'),
- ('sync','sync'),
- ('syncbind','syncbind'),
- ('syntime','syntime'),
- ('t','t'),
- ('tN','tN'),
- ('tN','tNext'),
- ('ta','ta'),
- ('ta','tag'),
- ('tab','tab'),
- ('tabN','tabN'),
- ('tabN','tabNext'),
- ('tabc','tabclose'),
- ('tabd','tabdo'),
- ('tabe','tabedit'),
- ('tabf','tabfind'),
- ('tabfir','tabfirst'),
- ('tabl','tablast'),
- ('tabm','tabmove'),
- ('tabn','tabnext'),
- ('tabnew','tabnew'),
- ('tabo','tabonly'),
- ('tabp','tabprevious'),
- ('tabr','tabrewind'),
- ('tabs','tabs'),
- ('tags','tags'),
- ('tc','tcl'),
- ('tcld','tcldo'),
- ('tclf','tclfile'),
- ('te','tearoff'),
- ('tf','tfirst'),
- ('th','throw'),
- ('tj','tjump'),
- ('tl','tlast'),
- ('tm','tm'),
- ('tm','tmenu'),
- ('tn','tn'),
- ('tn','tnext'),
- ('to','topleft'),
- ('tp','tprevious'),
- ('tr','tr'),
- ('tr','trewind'),
- ('try','try'),
- ('ts','tselect'),
- ('tu','tu'),
- ('tu','tunmenu'),
- ('u','u'),
- ('u','undo'),
- ('un','un'),
- ('una','unabbreviate'),
- ('undoj','undojoin'),
- ('undol','undolist'),
- ('unh','unhide'),
- ('unl','unl'),
- ('unlo','unlockvar'),
- ('uns','unsilent'),
- ('up','update'),
- ('v','v'),
- ('ve','ve'),
- ('ve','version'),
- ('verb','verbose'),
- ('vert','vertical'),
- ('vi','vi'),
- ('vi','visual'),
- ('vie','view'),
- ('vim','vimgrep'),
- ('vimgrepa','vimgrepadd'),
- ('viu','viusage'),
- ('vmapc','vmapclear'),
- ('vne','vnew'),
- ('vs','vsplit'),
- ('w','w'),
- ('w','write'),
- ('wN','wNext'),
- ('wa','wall'),
- ('wh','while'),
- ('win','win'),
- ('win','winsize'),
- ('winc','wincmd'),
- ('windo','windo'),
- ('winp','winpos'),
- ('wn','wnext'),
- ('wp','wprevious'),
- ('wq','wq'),
- ('wqa','wqall'),
- ('ws','wsverb'),
- ('wundo','wundo'),
- ('wv','wviminfo'),
- ('x','x'),
- ('x','xit'),
- ('xa','xall'),
- ('xmapc','xmapclear'),
- ('xme','xme'),
- ('xmenu','xmenu'),
- ('xnoreme','xnoreme'),
- ('xnoremenu','xnoremenu'),
- ('xunme','xunme'),
- ('xunmenu','xunmenu'),
- ('xwininfo','xwininfo'),
- ('y','yank'),
- )
- return var
-command = _getcommand()
-
-def _getoption():
- var = (
- ('acd','acd'),
- ('ai','ai'),
- ('akm','akm'),
- ('al','al'),
- ('aleph','aleph'),
- ('allowrevins','allowrevins'),
- ('altkeymap','altkeymap'),
- ('ambiwidth','ambiwidth'),
- ('ambw','ambw'),
- ('anti','anti'),
- ('antialias','antialias'),
- ('ar','ar'),
- ('arab','arab'),
- ('arabic','arabic'),
- ('arabicshape','arabicshape'),
- ('ari','ari'),
- ('arshape','arshape'),
- ('autochdir','autochdir'),
- ('autoindent','autoindent'),
- ('autoread','autoread'),
- ('autowrite','autowrite'),
- ('autowriteall','autowriteall'),
- ('aw','aw'),
- ('awa','awa'),
- ('background','background'),
- ('backspace','backspace'),
- ('backup','backup'),
- ('backupcopy','backupcopy'),
- ('backupdir','backupdir'),
- ('backupext','backupext'),
- ('backupskip','backupskip'),
- ('balloondelay','balloondelay'),
- ('ballooneval','ballooneval'),
- ('balloonexpr','balloonexpr'),
- ('bdir','bdir'),
- ('bdlay','bdlay'),
- ('beval','beval'),
- ('bex','bex'),
- ('bexpr','bexpr'),
- ('bg','bg'),
- ('bh','bh'),
- ('bin','bin'),
- ('binary','binary'),
- ('biosk','biosk'),
- ('bioskey','bioskey'),
- ('bk','bk'),
- ('bkc','bkc'),
- ('bl','bl'),
- ('bomb','bomb'),
- ('breakat','breakat'),
- ('brk','brk'),
- ('browsedir','browsedir'),
- ('bs','bs'),
- ('bsdir','bsdir'),
- ('bsk','bsk'),
- ('bt','bt'),
- ('bufhidden','bufhidden'),
- ('buflisted','buflisted'),
- ('buftype','buftype'),
- ('casemap','casemap'),
- ('cb','cb'),
- ('cc','cc'),
- ('ccv','ccv'),
- ('cd','cd'),
- ('cdpath','cdpath'),
- ('cedit','cedit'),
- ('cf','cf'),
- ('cfu','cfu'),
- ('ch','ch'),
- ('charconvert','charconvert'),
- ('ci','ci'),
- ('cin','cin'),
- ('cindent','cindent'),
- ('cink','cink'),
- ('cinkeys','cinkeys'),
- ('cino','cino'),
- ('cinoptions','cinoptions'),
- ('cinw','cinw'),
- ('cinwords','cinwords'),
- ('clipboard','clipboard'),
- ('cmdheight','cmdheight'),
- ('cmdwinheight','cmdwinheight'),
- ('cmp','cmp'),
- ('cms','cms'),
- ('co','co'),
- ('cocu','cocu'),
- ('cole','cole'),
- ('colorcolumn','colorcolumn'),
- ('columns','columns'),
- ('com','com'),
- ('comments','comments'),
- ('commentstring','commentstring'),
- ('compatible','compatible'),
- ('complete','complete'),
- ('completefunc','completefunc'),
- ('completeopt','completeopt'),
- ('concealcursor','concealcursor'),
- ('conceallevel','conceallevel'),
- ('confirm','confirm'),
- ('consk','consk'),
- ('conskey','conskey'),
- ('copyindent','copyindent'),
- ('cot','cot'),
- ('cp','cp'),
- ('cpo','cpo'),
- ('cpoptions','cpoptions'),
- ('cpt','cpt'),
- ('crb','crb'),
- ('cryptmethod','cryptmethod'),
- ('cscopepathcomp','cscopepathcomp'),
- ('cscopeprg','cscopeprg'),
- ('cscopequickfix','cscopequickfix'),
- ('cscoperelative','cscoperelative'),
- ('cscopetag','cscopetag'),
- ('cscopetagorder','cscopetagorder'),
- ('cscopeverbose','cscopeverbose'),
- ('cspc','cspc'),
- ('csprg','csprg'),
- ('csqf','csqf'),
- ('csre','csre'),
- ('cst','cst'),
- ('csto','csto'),
- ('csverb','csverb'),
- ('cuc','cuc'),
- ('cul','cul'),
- ('cursorbind','cursorbind'),
- ('cursorcolumn','cursorcolumn'),
- ('cursorline','cursorline'),
- ('cwh','cwh'),
- ('debug','debug'),
- ('deco','deco'),
- ('def','def'),
- ('define','define'),
- ('delcombine','delcombine'),
- ('dex','dex'),
- ('dg','dg'),
- ('dict','dict'),
- ('dictionary','dictionary'),
- ('diff','diff'),
- ('diffexpr','diffexpr'),
- ('diffopt','diffopt'),
- ('digraph','digraph'),
- ('dip','dip'),
- ('dir','dir'),
- ('directory','directory'),
- ('display','display'),
- ('dy','dy'),
- ('ea','ea'),
- ('ead','ead'),
- ('eadirection','eadirection'),
- ('eb','eb'),
- ('ed','ed'),
- ('edcompatible','edcompatible'),
- ('ef','ef'),
- ('efm','efm'),
- ('ei','ei'),
- ('ek','ek'),
- ('enc','enc'),
- ('encoding','encoding'),
- ('endofline','endofline'),
- ('eol','eol'),
- ('ep','ep'),
- ('equalalways','equalalways'),
- ('equalprg','equalprg'),
- ('errorbells','errorbells'),
- ('errorfile','errorfile'),
- ('errorformat','errorformat'),
- ('esckeys','esckeys'),
- ('et','et'),
- ('eventignore','eventignore'),
- ('ex','ex'),
- ('expandtab','expandtab'),
- ('exrc','exrc'),
- ('fcl','fcl'),
- ('fcs','fcs'),
- ('fdc','fdc'),
- ('fde','fde'),
- ('fdi','fdi'),
- ('fdl','fdl'),
- ('fdls','fdls'),
- ('fdm','fdm'),
- ('fdn','fdn'),
- ('fdo','fdo'),
- ('fdt','fdt'),
- ('fen','fen'),
- ('fenc','fenc'),
- ('fencs','fencs'),
- ('fex','fex'),
- ('ff','ff'),
- ('ffs','ffs'),
- ('fic','fic'),
- ('fileencoding','fileencoding'),
- ('fileencodings','fileencodings'),
- ('fileformat','fileformat'),
- ('fileformats','fileformats'),
- ('fileignorecase','fileignorecase'),
- ('filetype','filetype'),
- ('fillchars','fillchars'),
- ('fk','fk'),
- ('fkmap','fkmap'),
- ('flp','flp'),
- ('fml','fml'),
- ('fmr','fmr'),
- ('fo','fo'),
- ('foldclose','foldclose'),
- ('foldcolumn','foldcolumn'),
- ('foldenable','foldenable'),
- ('foldexpr','foldexpr'),
- ('foldignore','foldignore'),
- ('foldlevel','foldlevel'),
- ('foldlevelstart','foldlevelstart'),
- ('foldmarker','foldmarker'),
- ('foldmethod','foldmethod'),
- ('foldminlines','foldminlines'),
- ('foldnestmax','foldnestmax'),
- ('foldopen','foldopen'),
- ('foldtext','foldtext'),
- ('formatexpr','formatexpr'),
- ('formatlistpat','formatlistpat'),
- ('formatoptions','formatoptions'),
- ('formatprg','formatprg'),
- ('fp','fp'),
- ('fs','fs'),
- ('fsync','fsync'),
- ('ft','ft'),
- ('gcr','gcr'),
- ('gd','gd'),
- ('gdefault','gdefault'),
- ('gfm','gfm'),
- ('gfn','gfn'),
- ('gfs','gfs'),
- ('gfw','gfw'),
- ('ghr','ghr'),
- ('go','go'),
- ('gp','gp'),
- ('grepformat','grepformat'),
- ('grepprg','grepprg'),
- ('gtl','gtl'),
- ('gtt','gtt'),
- ('guicursor','guicursor'),
- ('guifont','guifont'),
- ('guifontset','guifontset'),
- ('guifontwide','guifontwide'),
- ('guiheadroom','guiheadroom'),
- ('guioptions','guioptions'),
- ('guipty','guipty'),
- ('guitablabel','guitablabel'),
- ('guitabtooltip','guitabtooltip'),
- ('helpfile','helpfile'),
- ('helpheight','helpheight'),
- ('helplang','helplang'),
- ('hf','hf'),
- ('hh','hh'),
- ('hi','hi'),
- ('hid','hid'),
- ('hidden','hidden'),
- ('highlight','highlight'),
- ('history','history'),
- ('hk','hk'),
- ('hkmap','hkmap'),
- ('hkmapp','hkmapp'),
- ('hkp','hkp'),
- ('hl','hl'),
- ('hlg','hlg'),
- ('hls','hls'),
- ('hlsearch','hlsearch'),
- ('ic','ic'),
- ('icon','icon'),
- ('iconstring','iconstring'),
- ('ignorecase','ignorecase'),
- ('im','im'),
- ('imactivatefunc','imactivatefunc'),
- ('imactivatekey','imactivatekey'),
- ('imaf','imaf'),
- ('imak','imak'),
- ('imc','imc'),
- ('imcmdline','imcmdline'),
- ('imd','imd'),
- ('imdisable','imdisable'),
- ('imi','imi'),
- ('iminsert','iminsert'),
- ('ims','ims'),
- ('imsearch','imsearch'),
- ('imsf','imsf'),
- ('imstatusfunc','imstatusfunc'),
- ('inc','inc'),
- ('include','include'),
- ('includeexpr','includeexpr'),
- ('incsearch','incsearch'),
- ('inde','inde'),
- ('indentexpr','indentexpr'),
- ('indentkeys','indentkeys'),
- ('indk','indk'),
- ('inex','inex'),
- ('inf','inf'),
- ('infercase','infercase'),
- ('inoremap','inoremap'),
- ('insertmode','insertmode'),
- ('invacd','invacd'),
- ('invai','invai'),
- ('invakm','invakm'),
- ('invallowrevins','invallowrevins'),
- ('invaltkeymap','invaltkeymap'),
- ('invanti','invanti'),
- ('invantialias','invantialias'),
- ('invar','invar'),
- ('invarab','invarab'),
- ('invarabic','invarabic'),
- ('invarabicshape','invarabicshape'),
- ('invari','invari'),
- ('invarshape','invarshape'),
- ('invautochdir','invautochdir'),
- ('invautoindent','invautoindent'),
- ('invautoread','invautoread'),
- ('invautowrite','invautowrite'),
- ('invautowriteall','invautowriteall'),
- ('invaw','invaw'),
- ('invawa','invawa'),
- ('invbackup','invbackup'),
- ('invballooneval','invballooneval'),
- ('invbeval','invbeval'),
- ('invbin','invbin'),
- ('invbinary','invbinary'),
- ('invbiosk','invbiosk'),
- ('invbioskey','invbioskey'),
- ('invbk','invbk'),
- ('invbl','invbl'),
- ('invbomb','invbomb'),
- ('invbuflisted','invbuflisted'),
- ('invcf','invcf'),
- ('invci','invci'),
- ('invcin','invcin'),
- ('invcindent','invcindent'),
- ('invcompatible','invcompatible'),
- ('invconfirm','invconfirm'),
- ('invconsk','invconsk'),
- ('invconskey','invconskey'),
- ('invcopyindent','invcopyindent'),
- ('invcp','invcp'),
- ('invcrb','invcrb'),
- ('invcscoperelative','invcscoperelative'),
- ('invcscopetag','invcscopetag'),
- ('invcscopeverbose','invcscopeverbose'),
- ('invcsre','invcsre'),
- ('invcst','invcst'),
- ('invcsverb','invcsverb'),
- ('invcuc','invcuc'),
- ('invcul','invcul'),
- ('invcursorbind','invcursorbind'),
- ('invcursorcolumn','invcursorcolumn'),
- ('invcursorline','invcursorline'),
- ('invdeco','invdeco'),
- ('invdelcombine','invdelcombine'),
- ('invdg','invdg'),
- ('invdiff','invdiff'),
- ('invdigraph','invdigraph'),
- ('invea','invea'),
- ('inveb','inveb'),
- ('inved','inved'),
- ('invedcompatible','invedcompatible'),
- ('invek','invek'),
- ('invendofline','invendofline'),
- ('inveol','inveol'),
- ('invequalalways','invequalalways'),
- ('inverrorbells','inverrorbells'),
- ('invesckeys','invesckeys'),
- ('invet','invet'),
- ('invex','invex'),
- ('invexpandtab','invexpandtab'),
- ('invexrc','invexrc'),
- ('invfen','invfen'),
- ('invfic','invfic'),
- ('invfileignorecase','invfileignorecase'),
- ('invfk','invfk'),
- ('invfkmap','invfkmap'),
- ('invfoldenable','invfoldenable'),
- ('invgd','invgd'),
- ('invgdefault','invgdefault'),
- ('invguipty','invguipty'),
- ('invhid','invhid'),
- ('invhidden','invhidden'),
- ('invhk','invhk'),
- ('invhkmap','invhkmap'),
- ('invhkmapp','invhkmapp'),
- ('invhkp','invhkp'),
- ('invhls','invhls'),
- ('invhlsearch','invhlsearch'),
- ('invic','invic'),
- ('invicon','invicon'),
- ('invignorecase','invignorecase'),
- ('invim','invim'),
- ('invimc','invimc'),
- ('invimcmdline','invimcmdline'),
- ('invimd','invimd'),
- ('invimdisable','invimdisable'),
- ('invincsearch','invincsearch'),
- ('invinf','invinf'),
- ('invinfercase','invinfercase'),
- ('invinsertmode','invinsertmode'),
- ('invis','invis'),
- ('invjoinspaces','invjoinspaces'),
- ('invjs','invjs'),
- ('invlazyredraw','invlazyredraw'),
- ('invlbr','invlbr'),
- ('invlinebreak','invlinebreak'),
- ('invlisp','invlisp'),
- ('invlist','invlist'),
- ('invloadplugins','invloadplugins'),
- ('invlpl','invlpl'),
- ('invlz','invlz'),
- ('invma','invma'),
- ('invmacatsui','invmacatsui'),
- ('invmagic','invmagic'),
- ('invmh','invmh'),
- ('invml','invml'),
- ('invmod','invmod'),
- ('invmodeline','invmodeline'),
- ('invmodifiable','invmodifiable'),
- ('invmodified','invmodified'),
- ('invmore','invmore'),
- ('invmousef','invmousef'),
- ('invmousefocus','invmousefocus'),
- ('invmousehide','invmousehide'),
- ('invnu','invnu'),
- ('invnumber','invnumber'),
- ('invodev','invodev'),
- ('invopendevice','invopendevice'),
- ('invpaste','invpaste'),
- ('invpi','invpi'),
- ('invpreserveindent','invpreserveindent'),
- ('invpreviewwindow','invpreviewwindow'),
- ('invprompt','invprompt'),
- ('invpvw','invpvw'),
- ('invreadonly','invreadonly'),
- ('invrelativenumber','invrelativenumber'),
- ('invremap','invremap'),
- ('invrestorescreen','invrestorescreen'),
- ('invrevins','invrevins'),
- ('invri','invri'),
- ('invrightleft','invrightleft'),
- ('invrl','invrl'),
- ('invrnu','invrnu'),
- ('invro','invro'),
- ('invrs','invrs'),
- ('invru','invru'),
- ('invruler','invruler'),
- ('invsb','invsb'),
- ('invsc','invsc'),
- ('invscb','invscb'),
- ('invscrollbind','invscrollbind'),
- ('invscs','invscs'),
- ('invsecure','invsecure'),
- ('invsft','invsft'),
- ('invshellslash','invshellslash'),
- ('invshelltemp','invshelltemp'),
- ('invshiftround','invshiftround'),
- ('invshortname','invshortname'),
- ('invshowcmd','invshowcmd'),
- ('invshowfulltag','invshowfulltag'),
- ('invshowmatch','invshowmatch'),
- ('invshowmode','invshowmode'),
- ('invsi','invsi'),
- ('invsm','invsm'),
- ('invsmartcase','invsmartcase'),
- ('invsmartindent','invsmartindent'),
- ('invsmarttab','invsmarttab'),
- ('invsmd','invsmd'),
- ('invsn','invsn'),
- ('invsol','invsol'),
- ('invspell','invspell'),
- ('invsplitbelow','invsplitbelow'),
- ('invsplitright','invsplitright'),
- ('invspr','invspr'),
- ('invsr','invsr'),
- ('invssl','invssl'),
- ('invsta','invsta'),
- ('invstartofline','invstartofline'),
- ('invstmp','invstmp'),
- ('invswapfile','invswapfile'),
- ('invswf','invswf'),
- ('invta','invta'),
- ('invtagbsearch','invtagbsearch'),
- ('invtagrelative','invtagrelative'),
- ('invtagstack','invtagstack'),
- ('invtbi','invtbi'),
- ('invtbidi','invtbidi'),
- ('invtbs','invtbs'),
- ('invtermbidi','invtermbidi'),
- ('invterse','invterse'),
- ('invtextauto','invtextauto'),
- ('invtextmode','invtextmode'),
- ('invtf','invtf'),
- ('invtgst','invtgst'),
- ('invtildeop','invtildeop'),
- ('invtimeout','invtimeout'),
- ('invtitle','invtitle'),
- ('invto','invto'),
- ('invtop','invtop'),
- ('invtr','invtr'),
- ('invttimeout','invttimeout'),
- ('invttybuiltin','invttybuiltin'),
- ('invttyfast','invttyfast'),
- ('invtx','invtx'),
- ('invudf','invudf'),
- ('invundofile','invundofile'),
- ('invvb','invvb'),
- ('invvisualbell','invvisualbell'),
- ('invwa','invwa'),
- ('invwarn','invwarn'),
- ('invwb','invwb'),
- ('invweirdinvert','invweirdinvert'),
- ('invwfh','invwfh'),
- ('invwfw','invwfw'),
- ('invwic','invwic'),
- ('invwildignorecase','invwildignorecase'),
- ('invwildmenu','invwildmenu'),
- ('invwinfixheight','invwinfixheight'),
- ('invwinfixwidth','invwinfixwidth'),
- ('invwiv','invwiv'),
- ('invwmnu','invwmnu'),
- ('invwrap','invwrap'),
- ('invwrapscan','invwrapscan'),
- ('invwrite','invwrite'),
- ('invwriteany','invwriteany'),
- ('invwritebackup','invwritebackup'),
- ('invws','invws'),
- ('is','is'),
- ('isf','isf'),
- ('isfname','isfname'),
- ('isi','isi'),
- ('isident','isident'),
- ('isk','isk'),
- ('iskeyword','iskeyword'),
- ('isp','isp'),
- ('isprint','isprint'),
- ('joinspaces','joinspaces'),
- ('js','js'),
- ('key','key'),
- ('keymap','keymap'),
- ('keymodel','keymodel'),
- ('keywordprg','keywordprg'),
- ('km','km'),
- ('kmp','kmp'),
- ('kp','kp'),
- ('langmap','langmap'),
- ('langmenu','langmenu'),
- ('laststatus','laststatus'),
- ('lazyredraw','lazyredraw'),
- ('lbr','lbr'),
- ('lcs','lcs'),
- ('linebreak','linebreak'),
- ('lines','lines'),
- ('linespace','linespace'),
- ('lisp','lisp'),
- ('lispwords','lispwords'),
- ('list','list'),
- ('listchars','listchars'),
- ('lm','lm'),
- ('lmap','lmap'),
- ('loadplugins','loadplugins'),
- ('lpl','lpl'),
- ('ls','ls'),
- ('lsp','lsp'),
- ('lw','lw'),
- ('lz','lz'),
- ('ma','ma'),
- ('macatsui','macatsui'),
- ('magic','magic'),
- ('makeef','makeef'),
- ('makeprg','makeprg'),
- ('mat','mat'),
- ('matchpairs','matchpairs'),
- ('matchtime','matchtime'),
- ('maxcombine','maxcombine'),
- ('maxfuncdepth','maxfuncdepth'),
- ('maxmapdepth','maxmapdepth'),
- ('maxmem','maxmem'),
- ('maxmempattern','maxmempattern'),
- ('maxmemtot','maxmemtot'),
- ('mco','mco'),
- ('mef','mef'),
- ('menuitems','menuitems'),
- ('mfd','mfd'),
- ('mh','mh'),
- ('mis','mis'),
- ('mkspellmem','mkspellmem'),
- ('ml','ml'),
- ('mls','mls'),
- ('mm','mm'),
- ('mmd','mmd'),
- ('mmp','mmp'),
- ('mmt','mmt'),
- ('mod','mod'),
- ('modeline','modeline'),
- ('modelines','modelines'),
- ('modifiable','modifiable'),
- ('modified','modified'),
- ('more','more'),
- ('mouse','mouse'),
- ('mousef','mousef'),
- ('mousefocus','mousefocus'),
- ('mousehide','mousehide'),
- ('mousem','mousem'),
- ('mousemodel','mousemodel'),
- ('mouses','mouses'),
- ('mouseshape','mouseshape'),
- ('mouset','mouset'),
- ('mousetime','mousetime'),
- ('mp','mp'),
- ('mps','mps'),
- ('msm','msm'),
- ('mzq','mzq'),
- ('mzquantum','mzquantum'),
- ('nf','nf'),
- ('nnoremap','nnoremap'),
- ('noacd','noacd'),
- ('noai','noai'),
- ('noakm','noakm'),
- ('noallowrevins','noallowrevins'),
- ('noaltkeymap','noaltkeymap'),
- ('noanti','noanti'),
- ('noantialias','noantialias'),
- ('noar','noar'),
- ('noarab','noarab'),
- ('noarabic','noarabic'),
- ('noarabicshape','noarabicshape'),
- ('noari','noari'),
- ('noarshape','noarshape'),
- ('noautochdir','noautochdir'),
- ('noautoindent','noautoindent'),
- ('noautoread','noautoread'),
- ('noautowrite','noautowrite'),
- ('noautowriteall','noautowriteall'),
- ('noaw','noaw'),
- ('noawa','noawa'),
- ('nobackup','nobackup'),
- ('noballooneval','noballooneval'),
- ('nobeval','nobeval'),
- ('nobin','nobin'),
- ('nobinary','nobinary'),
- ('nobiosk','nobiosk'),
- ('nobioskey','nobioskey'),
- ('nobk','nobk'),
- ('nobl','nobl'),
- ('nobomb','nobomb'),
- ('nobuflisted','nobuflisted'),
- ('nocf','nocf'),
- ('noci','noci'),
- ('nocin','nocin'),
- ('nocindent','nocindent'),
- ('nocompatible','nocompatible'),
- ('noconfirm','noconfirm'),
- ('noconsk','noconsk'),
- ('noconskey','noconskey'),
- ('nocopyindent','nocopyindent'),
- ('nocp','nocp'),
- ('nocrb','nocrb'),
- ('nocscoperelative','nocscoperelative'),
- ('nocscopetag','nocscopetag'),
- ('nocscopeverbose','nocscopeverbose'),
- ('nocsre','nocsre'),
- ('nocst','nocst'),
- ('nocsverb','nocsverb'),
- ('nocuc','nocuc'),
- ('nocul','nocul'),
- ('nocursorbind','nocursorbind'),
- ('nocursorcolumn','nocursorcolumn'),
- ('nocursorline','nocursorline'),
- ('nodeco','nodeco'),
- ('nodelcombine','nodelcombine'),
- ('nodg','nodg'),
- ('nodiff','nodiff'),
- ('nodigraph','nodigraph'),
- ('noea','noea'),
- ('noeb','noeb'),
- ('noed','noed'),
- ('noedcompatible','noedcompatible'),
- ('noek','noek'),
- ('noendofline','noendofline'),
- ('noeol','noeol'),
- ('noequalalways','noequalalways'),
- ('noerrorbells','noerrorbells'),
- ('noesckeys','noesckeys'),
- ('noet','noet'),
- ('noex','noex'),
- ('noexpandtab','noexpandtab'),
- ('noexrc','noexrc'),
- ('nofen','nofen'),
- ('nofic','nofic'),
- ('nofileignorecase','nofileignorecase'),
- ('nofk','nofk'),
- ('nofkmap','nofkmap'),
- ('nofoldenable','nofoldenable'),
- ('nogd','nogd'),
- ('nogdefault','nogdefault'),
- ('noguipty','noguipty'),
- ('nohid','nohid'),
- ('nohidden','nohidden'),
- ('nohk','nohk'),
- ('nohkmap','nohkmap'),
- ('nohkmapp','nohkmapp'),
- ('nohkp','nohkp'),
- ('nohls','nohls'),
- ('nohlsearch','nohlsearch'),
- ('noic','noic'),
- ('noicon','noicon'),
- ('noignorecase','noignorecase'),
- ('noim','noim'),
- ('noimc','noimc'),
- ('noimcmdline','noimcmdline'),
- ('noimd','noimd'),
- ('noimdisable','noimdisable'),
- ('noincsearch','noincsearch'),
- ('noinf','noinf'),
- ('noinfercase','noinfercase'),
- ('noinsertmode','noinsertmode'),
- ('nois','nois'),
- ('nojoinspaces','nojoinspaces'),
- ('nojs','nojs'),
- ('nolazyredraw','nolazyredraw'),
- ('nolbr','nolbr'),
- ('nolinebreak','nolinebreak'),
- ('nolisp','nolisp'),
- ('nolist','nolist'),
- ('noloadplugins','noloadplugins'),
- ('nolpl','nolpl'),
- ('nolz','nolz'),
- ('noma','noma'),
- ('nomacatsui','nomacatsui'),
- ('nomagic','nomagic'),
- ('nomh','nomh'),
- ('noml','noml'),
- ('nomod','nomod'),
- ('nomodeline','nomodeline'),
- ('nomodifiable','nomodifiable'),
- ('nomodified','nomodified'),
- ('nomore','nomore'),
- ('nomousef','nomousef'),
- ('nomousefocus','nomousefocus'),
- ('nomousehide','nomousehide'),
- ('nonu','nonu'),
- ('nonumber','nonumber'),
- ('noodev','noodev'),
- ('noopendevice','noopendevice'),
- ('nopaste','nopaste'),
- ('nopi','nopi'),
- ('nopreserveindent','nopreserveindent'),
- ('nopreviewwindow','nopreviewwindow'),
- ('noprompt','noprompt'),
- ('nopvw','nopvw'),
- ('noreadonly','noreadonly'),
- ('norelativenumber','norelativenumber'),
- ('noremap','noremap'),
- ('norestorescreen','norestorescreen'),
- ('norevins','norevins'),
- ('nori','nori'),
- ('norightleft','norightleft'),
- ('norl','norl'),
- ('nornu','nornu'),
- ('noro','noro'),
- ('nors','nors'),
- ('noru','noru'),
- ('noruler','noruler'),
- ('nosb','nosb'),
- ('nosc','nosc'),
- ('noscb','noscb'),
- ('noscrollbind','noscrollbind'),
- ('noscs','noscs'),
- ('nosecure','nosecure'),
- ('nosft','nosft'),
- ('noshellslash','noshellslash'),
- ('noshelltemp','noshelltemp'),
- ('noshiftround','noshiftround'),
- ('noshortname','noshortname'),
- ('noshowcmd','noshowcmd'),
- ('noshowfulltag','noshowfulltag'),
- ('noshowmatch','noshowmatch'),
- ('noshowmode','noshowmode'),
- ('nosi','nosi'),
- ('nosm','nosm'),
- ('nosmartcase','nosmartcase'),
- ('nosmartindent','nosmartindent'),
- ('nosmarttab','nosmarttab'),
- ('nosmd','nosmd'),
- ('nosn','nosn'),
- ('nosol','nosol'),
- ('nospell','nospell'),
- ('nosplitbelow','nosplitbelow'),
- ('nosplitright','nosplitright'),
- ('nospr','nospr'),
- ('nosr','nosr'),
- ('nossl','nossl'),
- ('nosta','nosta'),
- ('nostartofline','nostartofline'),
- ('nostmp','nostmp'),
- ('noswapfile','noswapfile'),
- ('noswf','noswf'),
- ('nota','nota'),
- ('notagbsearch','notagbsearch'),
- ('notagrelative','notagrelative'),
- ('notagstack','notagstack'),
- ('notbi','notbi'),
- ('notbidi','notbidi'),
- ('notbs','notbs'),
- ('notermbidi','notermbidi'),
- ('noterse','noterse'),
- ('notextauto','notextauto'),
- ('notextmode','notextmode'),
- ('notf','notf'),
- ('notgst','notgst'),
- ('notildeop','notildeop'),
- ('notimeout','notimeout'),
- ('notitle','notitle'),
- ('noto','noto'),
- ('notop','notop'),
- ('notr','notr'),
- ('nottimeout','nottimeout'),
- ('nottybuiltin','nottybuiltin'),
- ('nottyfast','nottyfast'),
- ('notx','notx'),
- ('noudf','noudf'),
- ('noundofile','noundofile'),
- ('novb','novb'),
- ('novisualbell','novisualbell'),
- ('nowa','nowa'),
- ('nowarn','nowarn'),
- ('nowb','nowb'),
- ('noweirdinvert','noweirdinvert'),
- ('nowfh','nowfh'),
- ('nowfw','nowfw'),
- ('nowic','nowic'),
- ('nowildignorecase','nowildignorecase'),
- ('nowildmenu','nowildmenu'),
- ('nowinfixheight','nowinfixheight'),
- ('nowinfixwidth','nowinfixwidth'),
- ('nowiv','nowiv'),
- ('nowmnu','nowmnu'),
- ('nowrap','nowrap'),
- ('nowrapscan','nowrapscan'),
- ('nowrite','nowrite'),
- ('nowriteany','nowriteany'),
- ('nowritebackup','nowritebackup'),
- ('nows','nows'),
- ('nrformats','nrformats'),
- ('nu','nu'),
- ('number','number'),
- ('numberwidth','numberwidth'),
- ('nuw','nuw'),
- ('odev','odev'),
- ('oft','oft'),
- ('ofu','ofu'),
- ('omnifunc','omnifunc'),
- ('opendevice','opendevice'),
- ('operatorfunc','operatorfunc'),
- ('opfunc','opfunc'),
- ('osfiletype','osfiletype'),
- ('pa','pa'),
- ('para','para'),
- ('paragraphs','paragraphs'),
- ('paste','paste'),
- ('pastetoggle','pastetoggle'),
- ('patchexpr','patchexpr'),
- ('patchmode','patchmode'),
- ('path','path'),
- ('pdev','pdev'),
- ('penc','penc'),
- ('pex','pex'),
- ('pexpr','pexpr'),
- ('pfn','pfn'),
- ('ph','ph'),
- ('pheader','pheader'),
- ('pi','pi'),
- ('pm','pm'),
- ('pmbcs','pmbcs'),
- ('pmbfn','pmbfn'),
- ('popt','popt'),
- ('preserveindent','preserveindent'),
- ('previewheight','previewheight'),
- ('previewwindow','previewwindow'),
- ('printdevice','printdevice'),
- ('printencoding','printencoding'),
- ('printexpr','printexpr'),
- ('printfont','printfont'),
- ('printheader','printheader'),
- ('printmbcharset','printmbcharset'),
- ('printmbfont','printmbfont'),
- ('printoptions','printoptions'),
- ('prompt','prompt'),
- ('pt','pt'),
- ('pumheight','pumheight'),
- ('pvh','pvh'),
- ('pvw','pvw'),
- ('qe','qe'),
- ('quoteescape','quoteescape'),
- ('rdt','rdt'),
- ('re','re'),
- ('readonly','readonly'),
- ('redrawtime','redrawtime'),
- ('regexpengine','regexpengine'),
- ('relativenumber','relativenumber'),
- ('remap','remap'),
- ('report','report'),
- ('restorescreen','restorescreen'),
- ('revins','revins'),
- ('ri','ri'),
- ('rightleft','rightleft'),
- ('rightleftcmd','rightleftcmd'),
- ('rl','rl'),
- ('rlc','rlc'),
- ('rnu','rnu'),
- ('ro','ro'),
- ('rs','rs'),
- ('rtp','rtp'),
- ('ru','ru'),
- ('ruf','ruf'),
- ('ruler','ruler'),
- ('rulerformat','rulerformat'),
- ('runtimepath','runtimepath'),
- ('sb','sb'),
- ('sbo','sbo'),
- ('sbr','sbr'),
- ('sc','sc'),
- ('scb','scb'),
- ('scr','scr'),
- ('scroll','scroll'),
- ('scrollbind','scrollbind'),
- ('scrolljump','scrolljump'),
- ('scrolloff','scrolloff'),
- ('scrollopt','scrollopt'),
- ('scs','scs'),
- ('sect','sect'),
- ('sections','sections'),
- ('secure','secure'),
- ('sel','sel'),
- ('selection','selection'),
- ('selectmode','selectmode'),
- ('sessionoptions','sessionoptions'),
- ('sft','sft'),
- ('sh','sh'),
- ('shcf','shcf'),
- ('shell','shell'),
- ('shellcmdflag','shellcmdflag'),
- ('shellpipe','shellpipe'),
- ('shellquote','shellquote'),
- ('shellredir','shellredir'),
- ('shellslash','shellslash'),
- ('shelltemp','shelltemp'),
- ('shelltype','shelltype'),
- ('shellxescape','shellxescape'),
- ('shellxquote','shellxquote'),
- ('shiftround','shiftround'),
- ('shiftwidth','shiftwidth'),
- ('shm','shm'),
- ('shortmess','shortmess'),
- ('shortname','shortname'),
- ('showbreak','showbreak'),
- ('showcmd','showcmd'),
- ('showfulltag','showfulltag'),
- ('showmatch','showmatch'),
- ('showmode','showmode'),
- ('showtabline','showtabline'),
- ('shq','shq'),
- ('si','si'),
- ('sidescroll','sidescroll'),
- ('sidescrolloff','sidescrolloff'),
- ('siso','siso'),
- ('sj','sj'),
- ('slm','slm'),
- ('sm','sm'),
- ('smartcase','smartcase'),
- ('smartindent','smartindent'),
- ('smarttab','smarttab'),
- ('smc','smc'),
- ('smd','smd'),
- ('sn','sn'),
- ('so','so'),
- ('softtabstop','softtabstop'),
- ('sol','sol'),
- ('sp','sp'),
- ('spc','spc'),
- ('spell','spell'),
- ('spellcapcheck','spellcapcheck'),
- ('spellfile','spellfile'),
- ('spelllang','spelllang'),
- ('spellsuggest','spellsuggest'),
- ('spf','spf'),
- ('spl','spl'),
- ('splitbelow','splitbelow'),
- ('splitright','splitright'),
- ('spr','spr'),
- ('sps','sps'),
- ('sr','sr'),
- ('srr','srr'),
- ('ss','ss'),
- ('ssl','ssl'),
- ('ssop','ssop'),
- ('st','st'),
- ('sta','sta'),
- ('stal','stal'),
- ('startofline','startofline'),
- ('statusline','statusline'),
- ('stl','stl'),
- ('stmp','stmp'),
- ('sts','sts'),
- ('su','su'),
- ('sua','sua'),
- ('suffixes','suffixes'),
- ('suffixesadd','suffixesadd'),
- ('sw','sw'),
- ('swapfile','swapfile'),
- ('swapsync','swapsync'),
- ('swb','swb'),
- ('swf','swf'),
- ('switchbuf','switchbuf'),
- ('sws','sws'),
- ('sxe','sxe'),
- ('sxq','sxq'),
- ('syn','syn'),
- ('synmaxcol','synmaxcol'),
- ('syntax','syntax'),
- ('t_AB','t_AB'),
- ('t_AF','t_AF'),
- ('t_AL','t_AL'),
- ('t_CS','t_CS'),
- ('t_CV','t_CV'),
- ('t_Ce','t_Ce'),
- ('t_Co','t_Co'),
- ('t_Cs','t_Cs'),
- ('t_DL','t_DL'),
- ('t_EI','t_EI'),
- ('t_F1','t_F1'),
- ('t_F2','t_F2'),
- ('t_F3','t_F3'),
- ('t_F4','t_F4'),
- ('t_F5','t_F5'),
- ('t_F6','t_F6'),
- ('t_F7','t_F7'),
- ('t_F8','t_F8'),
- ('t_F9','t_F9'),
- ('t_IE','t_IE'),
- ('t_IS','t_IS'),
- ('t_K1','t_K1'),
- ('t_K3','t_K3'),
- ('t_K4','t_K4'),
- ('t_K5','t_K5'),
- ('t_K6','t_K6'),
- ('t_K7','t_K7'),
- ('t_K8','t_K8'),
- ('t_K9','t_K9'),
- ('t_KA','t_KA'),
- ('t_KB','t_KB'),
- ('t_KC','t_KC'),
- ('t_KD','t_KD'),
- ('t_KE','t_KE'),
- ('t_KF','t_KF'),
- ('t_KG','t_KG'),
- ('t_KH','t_KH'),
- ('t_KI','t_KI'),
- ('t_KJ','t_KJ'),
- ('t_KK','t_KK'),
- ('t_KL','t_KL'),
- ('t_RI','t_RI'),
- ('t_RV','t_RV'),
- ('t_SI','t_SI'),
- ('t_Sb','t_Sb'),
- ('t_Sf','t_Sf'),
- ('t_WP','t_WP'),
- ('t_WS','t_WS'),
- ('t_ZH','t_ZH'),
- ('t_ZR','t_ZR'),
- ('t_al','t_al'),
- ('t_bc','t_bc'),
- ('t_cd','t_cd'),
- ('t_ce','t_ce'),
- ('t_cl','t_cl'),
- ('t_cm','t_cm'),
- ('t_cs','t_cs'),
- ('t_da','t_da'),
- ('t_db','t_db'),
- ('t_dl','t_dl'),
- ('t_fs','t_fs'),
- ('t_k1','t_k1'),
- ('t_k2','t_k2'),
- ('t_k3','t_k3'),
- ('t_k4','t_k4'),
- ('t_k5','t_k5'),
- ('t_k6','t_k6'),
- ('t_k7','t_k7'),
- ('t_k8','t_k8'),
- ('t_k9','t_k9'),
- ('t_kB','t_kB'),
- ('t_kD','t_kD'),
- ('t_kI','t_kI'),
- ('t_kN','t_kN'),
- ('t_kP','t_kP'),
- ('t_kb','t_kb'),
- ('t_kd','t_kd'),
- ('t_ke','t_ke'),
- ('t_kh','t_kh'),
- ('t_kl','t_kl'),
- ('t_kr','t_kr'),
- ('t_ks','t_ks'),
- ('t_ku','t_ku'),
- ('t_le','t_le'),
- ('t_mb','t_mb'),
- ('t_md','t_md'),
- ('t_me','t_me'),
- ('t_mr','t_mr'),
- ('t_ms','t_ms'),
- ('t_nd','t_nd'),
- ('t_op','t_op'),
- ('t_se','t_se'),
- ('t_so','t_so'),
- ('t_sr','t_sr'),
- ('t_te','t_te'),
- ('t_ti','t_ti'),
- ('t_ts','t_ts'),
- ('t_u7','t_u7'),
- ('t_ue','t_ue'),
- ('t_us','t_us'),
- ('t_ut','t_ut'),
- ('t_vb','t_vb'),
- ('t_ve','t_ve'),
- ('t_vi','t_vi'),
- ('t_vs','t_vs'),
- ('t_xs','t_xs'),
- ('ta','ta'),
- ('tabline','tabline'),
- ('tabpagemax','tabpagemax'),
- ('tabstop','tabstop'),
- ('tag','tag'),
- ('tagbsearch','tagbsearch'),
- ('taglength','taglength'),
- ('tagrelative','tagrelative'),
- ('tags','tags'),
- ('tagstack','tagstack'),
- ('tal','tal'),
- ('tb','tb'),
- ('tbi','tbi'),
- ('tbidi','tbidi'),
- ('tbis','tbis'),
- ('tbs','tbs'),
- ('tenc','tenc'),
- ('term','term'),
- ('termbidi','termbidi'),
- ('termencoding','termencoding'),
- ('terse','terse'),
- ('textauto','textauto'),
- ('textmode','textmode'),
- ('textwidth','textwidth'),
- ('tf','tf'),
- ('tgst','tgst'),
- ('thesaurus','thesaurus'),
- ('tildeop','tildeop'),
- ('timeout','timeout'),
- ('timeoutlen','timeoutlen'),
- ('title','title'),
- ('titlelen','titlelen'),
- ('titleold','titleold'),
- ('titlestring','titlestring'),
- ('tl','tl'),
- ('tm','tm'),
- ('to','to'),
- ('toolbar','toolbar'),
- ('toolbariconsize','toolbariconsize'),
- ('top','top'),
- ('tpm','tpm'),
- ('tr','tr'),
- ('ts','ts'),
- ('tsl','tsl'),
- ('tsr','tsr'),
- ('ttimeout','ttimeout'),
- ('ttimeoutlen','ttimeoutlen'),
- ('ttm','ttm'),
- ('tty','tty'),
- ('ttybuiltin','ttybuiltin'),
- ('ttyfast','ttyfast'),
- ('ttym','ttym'),
- ('ttymouse','ttymouse'),
- ('ttyscroll','ttyscroll'),
- ('ttytype','ttytype'),
- ('tw','tw'),
- ('tx','tx'),
- ('uc','uc'),
- ('udf','udf'),
- ('udir','udir'),
- ('ul','ul'),
- ('undodir','undodir'),
- ('undofile','undofile'),
- ('undolevels','undolevels'),
- ('undoreload','undoreload'),
- ('updatecount','updatecount'),
- ('updatetime','updatetime'),
- ('ur','ur'),
- ('ut','ut'),
- ('vb','vb'),
- ('vbs','vbs'),
- ('vdir','vdir'),
- ('ve','ve'),
- ('verbose','verbose'),
- ('verbosefile','verbosefile'),
- ('vfile','vfile'),
- ('vi','vi'),
- ('viewdir','viewdir'),
- ('viewoptions','viewoptions'),
- ('viminfo','viminfo'),
- ('virtualedit','virtualedit'),
- ('visualbell','visualbell'),
- ('vnoremap','vnoremap'),
- ('vop','vop'),
- ('wa','wa'),
- ('wak','wak'),
- ('warn','warn'),
- ('wb','wb'),
- ('wc','wc'),
- ('wcm','wcm'),
- ('wd','wd'),
- ('weirdinvert','weirdinvert'),
- ('wfh','wfh'),
- ('wfw','wfw'),
- ('wh','wh'),
- ('whichwrap','whichwrap'),
- ('wi','wi'),
- ('wic','wic'),
- ('wig','wig'),
- ('wildchar','wildchar'),
- ('wildcharm','wildcharm'),
- ('wildignore','wildignore'),
- ('wildignorecase','wildignorecase'),
- ('wildmenu','wildmenu'),
- ('wildmode','wildmode'),
- ('wildoptions','wildoptions'),
- ('wim','wim'),
- ('winaltkeys','winaltkeys'),
- ('window','window'),
- ('winfixheight','winfixheight'),
- ('winfixwidth','winfixwidth'),
- ('winheight','winheight'),
- ('winminheight','winminheight'),
- ('winminwidth','winminwidth'),
- ('winwidth','winwidth'),
- ('wiv','wiv'),
- ('wiw','wiw'),
- ('wm','wm'),
- ('wmh','wmh'),
- ('wmnu','wmnu'),
- ('wmw','wmw'),
- ('wop','wop'),
- ('wrap','wrap'),
- ('wrapmargin','wrapmargin'),
- ('wrapscan','wrapscan'),
- ('write','write'),
- ('writeany','writeany'),
- ('writebackup','writebackup'),
- ('writedelay','writedelay'),
- ('ws','ws'),
- ('ww','ww'),
- )
- return var
-option = _getoption()
-
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+
+def _getauto():
+ var = (
+ ('BufAdd','BufAdd'),
+ ('BufCreate','BufCreate'),
+ ('BufDelete','BufDelete'),
+ ('BufEnter','BufEnter'),
+ ('BufFilePost','BufFilePost'),
+ ('BufFilePre','BufFilePre'),
+ ('BufHidden','BufHidden'),
+ ('BufLeave','BufLeave'),
+ ('BufNew','BufNew'),
+ ('BufNewFile','BufNewFile'),
+ ('BufRead','BufRead'),
+ ('BufReadCmd','BufReadCmd'),
+ ('BufReadPost','BufReadPost'),
+ ('BufReadPre','BufReadPre'),
+ ('BufUnload','BufUnload'),
+ ('BufWinEnter','BufWinEnter'),
+ ('BufWinLeave','BufWinLeave'),
+ ('BufWipeout','BufWipeout'),
+ ('BufWrite','BufWrite'),
+ ('BufWriteCmd','BufWriteCmd'),
+ ('BufWritePost','BufWritePost'),
+ ('BufWritePre','BufWritePre'),
+ ('Cmd','Cmd'),
+ ('CmdwinEnter','CmdwinEnter'),
+ ('CmdwinLeave','CmdwinLeave'),
+ ('ColorScheme','ColorScheme'),
+ ('CompleteDone','CompleteDone'),
+ ('CursorHold','CursorHold'),
+ ('CursorHoldI','CursorHoldI'),
+ ('CursorMoved','CursorMoved'),
+ ('CursorMovedI','CursorMovedI'),
+ ('EncodingChanged','EncodingChanged'),
+ ('FileAppendCmd','FileAppendCmd'),
+ ('FileAppendPost','FileAppendPost'),
+ ('FileAppendPre','FileAppendPre'),
+ ('FileChangedRO','FileChangedRO'),
+ ('FileChangedShell','FileChangedShell'),
+ ('FileChangedShellPost','FileChangedShellPost'),
+ ('FileEncoding','FileEncoding'),
+ ('FileReadCmd','FileReadCmd'),
+ ('FileReadPost','FileReadPost'),
+ ('FileReadPre','FileReadPre'),
+ ('FileType','FileType'),
+ ('FileWriteCmd','FileWriteCmd'),
+ ('FileWritePost','FileWritePost'),
+ ('FileWritePre','FileWritePre'),
+ ('FilterReadPost','FilterReadPost'),
+ ('FilterReadPre','FilterReadPre'),
+ ('FilterWritePost','FilterWritePost'),
+ ('FilterWritePre','FilterWritePre'),
+ ('FocusGained','FocusGained'),
+ ('FocusLost','FocusLost'),
+ ('FuncUndefined','FuncUndefined'),
+ ('GUIEnter','GUIEnter'),
+ ('GUIFailed','GUIFailed'),
+ ('InsertChange','InsertChange'),
+ ('InsertCharPre','InsertCharPre'),
+ ('InsertEnter','InsertEnter'),
+ ('InsertLeave','InsertLeave'),
+ ('MenuPopup','MenuPopup'),
+ ('QuickFixCmdPost','QuickFixCmdPost'),
+ ('QuickFixCmdPre','QuickFixCmdPre'),
+ ('QuitPre','QuitPre'),
+ ('RemoteReply','RemoteReply'),
+ ('SessionLoadPost','SessionLoadPost'),
+ ('ShellCmdPost','ShellCmdPost'),
+ ('ShellFilterPost','ShellFilterPost'),
+ ('SourceCmd','SourceCmd'),
+ ('SourcePre','SourcePre'),
+ ('SpellFileMissing','SpellFileMissing'),
+ ('StdinReadPost','StdinReadPost'),
+ ('StdinReadPre','StdinReadPre'),
+ ('SwapExists','SwapExists'),
+ ('Syntax','Syntax'),
+ ('TabEnter','TabEnter'),
+ ('TabLeave','TabLeave'),
+ ('TermChanged','TermChanged'),
+ ('TermResponse','TermResponse'),
+ ('TextChanged','TextChanged'),
+ ('TextChangedI','TextChangedI'),
+ ('User','User'),
+ ('UserGettingBored','UserGettingBored'),
+ ('VimEnter','VimEnter'),
+ ('VimLeave','VimLeave'),
+ ('VimLeavePre','VimLeavePre'),
+ ('VimResized','VimResized'),
+ ('WinEnter','WinEnter'),
+ ('WinLeave','WinLeave'),
+ ('event','event'),
+ )
+ return var
+auto = _getauto()
+
+def _getcommand():
+ var = (
+ ('a','a'),
+ ('ab','ab'),
+ ('abc','abclear'),
+ ('abo','aboveleft'),
+ ('al','all'),
+ ('ar','ar'),
+ ('ar','args'),
+ ('arga','argadd'),
+ ('argd','argdelete'),
+ ('argdo','argdo'),
+ ('arge','argedit'),
+ ('argg','argglobal'),
+ ('argl','arglocal'),
+ ('argu','argument'),
+ ('as','ascii'),
+ ('au','au'),
+ ('b','buffer'),
+ ('bN','bNext'),
+ ('ba','ball'),
+ ('bad','badd'),
+ ('bd','bdelete'),
+ ('bel','belowright'),
+ ('bf','bfirst'),
+ ('bl','blast'),
+ ('bm','bmodified'),
+ ('bn','bnext'),
+ ('bo','botright'),
+ ('bp','bprevious'),
+ ('br','br'),
+ ('br','brewind'),
+ ('brea','break'),
+ ('breaka','breakadd'),
+ ('breakd','breakdel'),
+ ('breakl','breaklist'),
+ ('bro','browse'),
+ ('bu','bu'),
+ ('buf','buf'),
+ ('bufdo','bufdo'),
+ ('buffers','buffers'),
+ ('bun','bunload'),
+ ('bw','bwipeout'),
+ ('c','c'),
+ ('c','change'),
+ ('cN','cN'),
+ ('cN','cNext'),
+ ('cNf','cNf'),
+ ('cNf','cNfile'),
+ ('cabc','cabclear'),
+ ('cad','cad'),
+ ('cad','caddexpr'),
+ ('caddb','caddbuffer'),
+ ('caddf','caddfile'),
+ ('cal','call'),
+ ('cat','catch'),
+ ('cb','cbuffer'),
+ ('cc','cc'),
+ ('ccl','cclose'),
+ ('cd','cd'),
+ ('ce','center'),
+ ('cex','cexpr'),
+ ('cf','cfile'),
+ ('cfir','cfirst'),
+ ('cg','cgetfile'),
+ ('cgetb','cgetbuffer'),
+ ('cgete','cgetexpr'),
+ ('changes','changes'),
+ ('chd','chdir'),
+ ('che','checkpath'),
+ ('checkt','checktime'),
+ ('cl','cl'),
+ ('cl','clist'),
+ ('cla','clast'),
+ ('clo','close'),
+ ('cmapc','cmapclear'),
+ ('cn','cn'),
+ ('cn','cnext'),
+ ('cnew','cnewer'),
+ ('cnf','cnf'),
+ ('cnf','cnfile'),
+ ('co','copy'),
+ ('col','colder'),
+ ('colo','colorscheme'),
+ ('com','com'),
+ ('comc','comclear'),
+ ('comp','compiler'),
+ ('con','con'),
+ ('con','continue'),
+ ('conf','confirm'),
+ ('cope','copen'),
+ ('cp','cprevious'),
+ ('cpf','cpfile'),
+ ('cq','cquit'),
+ ('cr','crewind'),
+ ('cs','cs'),
+ ('cscope','cscope'),
+ ('cstag','cstag'),
+ ('cuna','cunabbrev'),
+ ('cw','cwindow'),
+ ('d','d'),
+ ('d','delete'),
+ ('de','de'),
+ ('debug','debug'),
+ ('debugg','debuggreedy'),
+ ('del','del'),
+ ('delc','delcommand'),
+ ('delel','delel'),
+ ('delep','delep'),
+ ('deletel','deletel'),
+ ('deletep','deletep'),
+ ('deletl','deletl'),
+ ('deletp','deletp'),
+ ('delf','delf'),
+ ('delf','delfunction'),
+ ('dell','dell'),
+ ('delm','delmarks'),
+ ('delp','delp'),
+ ('dep','dep'),
+ ('di','di'),
+ ('di','display'),
+ ('diffg','diffget'),
+ ('diffo','diffoff'),
+ ('diffp','diffpatch'),
+ ('diffpu','diffput'),
+ ('diffs','diffsplit'),
+ ('difft','diffthis'),
+ ('diffu','diffupdate'),
+ ('dig','dig'),
+ ('dig','digraphs'),
+ ('dir','dir'),
+ ('dj','djump'),
+ ('dl','dl'),
+ ('dli','dlist'),
+ ('do','do'),
+ ('doau','doau'),
+ ('dp','dp'),
+ ('dr','drop'),
+ ('ds','dsearch'),
+ ('dsp','dsplit'),
+ ('e','e'),
+ ('e','edit'),
+ ('ea','ea'),
+ ('earlier','earlier'),
+ ('ec','ec'),
+ ('echoe','echoerr'),
+ ('echom','echomsg'),
+ ('echon','echon'),
+ ('el','else'),
+ ('elsei','elseif'),
+ ('em','emenu'),
+ ('en','en'),
+ ('en','endif'),
+ ('endf','endf'),
+ ('endf','endfunction'),
+ ('endfo','endfor'),
+ ('endfun','endfun'),
+ ('endt','endtry'),
+ ('endw','endwhile'),
+ ('ene','enew'),
+ ('ex','ex'),
+ ('exi','exit'),
+ ('exu','exusage'),
+ ('f','f'),
+ ('f','file'),
+ ('files','files'),
+ ('filet','filet'),
+ ('filetype','filetype'),
+ ('fin','fin'),
+ ('fin','find'),
+ ('fina','finally'),
+ ('fini','finish'),
+ ('fir','first'),
+ ('fix','fixdel'),
+ ('fo','fold'),
+ ('foldc','foldclose'),
+ ('foldd','folddoopen'),
+ ('folddoc','folddoclosed'),
+ ('foldo','foldopen'),
+ ('for','for'),
+ ('fu','fu'),
+ ('fu','function'),
+ ('fun','fun'),
+ ('g','g'),
+ ('go','goto'),
+ ('gr','grep'),
+ ('grepa','grepadd'),
+ ('gui','gui'),
+ ('gvim','gvim'),
+ ('h','h'),
+ ('h','help'),
+ ('ha','hardcopy'),
+ ('helpf','helpfind'),
+ ('helpg','helpgrep'),
+ ('helpt','helptags'),
+ ('hi','hi'),
+ ('hid','hide'),
+ ('his','history'),
+ ('i','i'),
+ ('ia','ia'),
+ ('iabc','iabclear'),
+ ('if','if'),
+ ('ij','ijump'),
+ ('il','ilist'),
+ ('imapc','imapclear'),
+ ('in','in'),
+ ('intro','intro'),
+ ('is','isearch'),
+ ('isp','isplit'),
+ ('iuna','iunabbrev'),
+ ('j','join'),
+ ('ju','jumps'),
+ ('k','k'),
+ ('kee','keepmarks'),
+ ('keepa','keepa'),
+ ('keepalt','keepalt'),
+ ('keepj','keepjumps'),
+ ('keepp','keeppatterns'),
+ ('l','l'),
+ ('l','list'),
+ ('lN','lN'),
+ ('lN','lNext'),
+ ('lNf','lNf'),
+ ('lNf','lNfile'),
+ ('la','la'),
+ ('la','last'),
+ ('lad','lad'),
+ ('lad','laddexpr'),
+ ('laddb','laddbuffer'),
+ ('laddf','laddfile'),
+ ('lan','lan'),
+ ('lan','language'),
+ ('lat','lat'),
+ ('later','later'),
+ ('lb','lbuffer'),
+ ('lc','lcd'),
+ ('lch','lchdir'),
+ ('lcl','lclose'),
+ ('lcs','lcs'),
+ ('lcscope','lcscope'),
+ ('le','left'),
+ ('lefta','leftabove'),
+ ('lex','lexpr'),
+ ('lf','lfile'),
+ ('lfir','lfirst'),
+ ('lg','lgetfile'),
+ ('lgetb','lgetbuffer'),
+ ('lgete','lgetexpr'),
+ ('lgr','lgrep'),
+ ('lgrepa','lgrepadd'),
+ ('lh','lhelpgrep'),
+ ('ll','ll'),
+ ('lla','llast'),
+ ('lli','llist'),
+ ('lmak','lmake'),
+ ('lmapc','lmapclear'),
+ ('lne','lne'),
+ ('lne','lnext'),
+ ('lnew','lnewer'),
+ ('lnf','lnf'),
+ ('lnf','lnfile'),
+ ('lo','lo'),
+ ('lo','loadview'),
+ ('loadk','loadk'),
+ ('loadkeymap','loadkeymap'),
+ ('loc','lockmarks'),
+ ('lockv','lockvar'),
+ ('lol','lolder'),
+ ('lop','lopen'),
+ ('lp','lprevious'),
+ ('lpf','lpfile'),
+ ('lr','lrewind'),
+ ('ls','ls'),
+ ('lt','ltag'),
+ ('lua','lua'),
+ ('luado','luado'),
+ ('luafile','luafile'),
+ ('lv','lvimgrep'),
+ ('lvimgrepa','lvimgrepadd'),
+ ('lw','lwindow'),
+ ('m','move'),
+ ('ma','ma'),
+ ('ma','mark'),
+ ('mak','make'),
+ ('marks','marks'),
+ ('mat','match'),
+ ('menut','menut'),
+ ('menut','menutranslate'),
+ ('mes','mes'),
+ ('messages','messages'),
+ ('mk','mk'),
+ ('mk','mkexrc'),
+ ('mks','mksession'),
+ ('mksp','mkspell'),
+ ('mkv','mkv'),
+ ('mkv','mkvimrc'),
+ ('mkvie','mkview'),
+ ('mo','mo'),
+ ('mod','mode'),
+ ('mz','mz'),
+ ('mz','mzscheme'),
+ ('mzf','mzfile'),
+ ('n','n'),
+ ('n','next'),
+ ('nb','nbkey'),
+ ('nbc','nbclose'),
+ ('nbs','nbstart'),
+ ('ne','ne'),
+ ('new','new'),
+ ('nmapc','nmapclear'),
+ ('noa','noa'),
+ ('noautocmd','noautocmd'),
+ ('noh','nohlsearch'),
+ ('nu','number'),
+ ('o','o'),
+ ('o','open'),
+ ('ol','oldfiles'),
+ ('omapc','omapclear'),
+ ('on','only'),
+ ('opt','options'),
+ ('ownsyntax','ownsyntax'),
+ ('p','p'),
+ ('p','print'),
+ ('pc','pclose'),
+ ('pe','pe'),
+ ('pe','perl'),
+ ('ped','pedit'),
+ ('perld','perldo'),
+ ('po','pop'),
+ ('popu','popu'),
+ ('popu','popup'),
+ ('pp','ppop'),
+ ('pr','pr'),
+ ('pre','preserve'),
+ ('prev','previous'),
+ ('pro','pro'),
+ ('prof','profile'),
+ ('profd','profdel'),
+ ('promptf','promptfind'),
+ ('promptr','promptrepl'),
+ ('ps','psearch'),
+ ('ptN','ptN'),
+ ('ptN','ptNext'),
+ ('pta','ptag'),
+ ('ptf','ptfirst'),
+ ('ptj','ptjump'),
+ ('ptl','ptlast'),
+ ('ptn','ptn'),
+ ('ptn','ptnext'),
+ ('ptp','ptprevious'),
+ ('ptr','ptrewind'),
+ ('pts','ptselect'),
+ ('pu','put'),
+ ('pw','pwd'),
+ ('py','py'),
+ ('py','python'),
+ ('py3','py3'),
+ ('py3','py3'),
+ ('py3do','py3do'),
+ ('pydo','pydo'),
+ ('pyf','pyfile'),
+ ('python3','python3'),
+ ('q','q'),
+ ('q','quit'),
+ ('qa','qall'),
+ ('quita','quitall'),
+ ('r','r'),
+ ('r','read'),
+ ('re','re'),
+ ('rec','recover'),
+ ('red','red'),
+ ('red','redo'),
+ ('redi','redir'),
+ ('redr','redraw'),
+ ('redraws','redrawstatus'),
+ ('reg','registers'),
+ ('res','resize'),
+ ('ret','retab'),
+ ('retu','return'),
+ ('rew','rewind'),
+ ('ri','right'),
+ ('rightb','rightbelow'),
+ ('ru','ru'),
+ ('ru','runtime'),
+ ('rub','ruby'),
+ ('rubyd','rubydo'),
+ ('rubyf','rubyfile'),
+ ('rundo','rundo'),
+ ('rv','rviminfo'),
+ ('sN','sNext'),
+ ('sa','sargument'),
+ ('sal','sall'),
+ ('san','sandbox'),
+ ('sav','saveas'),
+ ('sb','sbuffer'),
+ ('sbN','sbNext'),
+ ('sba','sball'),
+ ('sbf','sbfirst'),
+ ('sbl','sblast'),
+ ('sbm','sbmodified'),
+ ('sbn','sbnext'),
+ ('sbp','sbprevious'),
+ ('sbr','sbrewind'),
+ ('scrip','scrip'),
+ ('scrip','scriptnames'),
+ ('scripte','scriptencoding'),
+ ('scs','scs'),
+ ('scscope','scscope'),
+ ('se','set'),
+ ('setf','setfiletype'),
+ ('setg','setglobal'),
+ ('setl','setlocal'),
+ ('sf','sfind'),
+ ('sfir','sfirst'),
+ ('sh','shell'),
+ ('si','si'),
+ ('sig','sig'),
+ ('sign','sign'),
+ ('sil','silent'),
+ ('sim','simalt'),
+ ('sl','sl'),
+ ('sl','sleep'),
+ ('sla','slast'),
+ ('sm','smagic'),
+ ('sm','smap'),
+ ('sme','sme'),
+ ('smenu','smenu'),
+ ('sn','snext'),
+ ('sni','sniff'),
+ ('sno','snomagic'),
+ ('snoreme','snoreme'),
+ ('snoremenu','snoremenu'),
+ ('so','so'),
+ ('so','source'),
+ ('sor','sort'),
+ ('sp','split'),
+ ('spe','spe'),
+ ('spe','spellgood'),
+ ('spelld','spelldump'),
+ ('spelli','spellinfo'),
+ ('spellr','spellrepall'),
+ ('spellu','spellundo'),
+ ('spellw','spellwrong'),
+ ('spr','sprevious'),
+ ('sre','srewind'),
+ ('st','st'),
+ ('st','stop'),
+ ('sta','stag'),
+ ('star','star'),
+ ('star','startinsert'),
+ ('start','start'),
+ ('startg','startgreplace'),
+ ('startr','startreplace'),
+ ('stj','stjump'),
+ ('stopi','stopinsert'),
+ ('sts','stselect'),
+ ('sun','sunhide'),
+ ('sunme','sunme'),
+ ('sunmenu','sunmenu'),
+ ('sus','suspend'),
+ ('sv','sview'),
+ ('sw','swapname'),
+ ('sy','sy'),
+ ('syn','syn'),
+ ('sync','sync'),
+ ('syncbind','syncbind'),
+ ('syntime','syntime'),
+ ('t','t'),
+ ('tN','tN'),
+ ('tN','tNext'),
+ ('ta','ta'),
+ ('ta','tag'),
+ ('tab','tab'),
+ ('tabN','tabN'),
+ ('tabN','tabNext'),
+ ('tabc','tabclose'),
+ ('tabd','tabdo'),
+ ('tabe','tabedit'),
+ ('tabf','tabfind'),
+ ('tabfir','tabfirst'),
+ ('tabl','tablast'),
+ ('tabm','tabmove'),
+ ('tabn','tabnext'),
+ ('tabnew','tabnew'),
+ ('tabo','tabonly'),
+ ('tabp','tabprevious'),
+ ('tabr','tabrewind'),
+ ('tabs','tabs'),
+ ('tags','tags'),
+ ('tc','tcl'),
+ ('tcld','tcldo'),
+ ('tclf','tclfile'),
+ ('te','tearoff'),
+ ('tf','tfirst'),
+ ('th','throw'),
+ ('tj','tjump'),
+ ('tl','tlast'),
+ ('tm','tm'),
+ ('tm','tmenu'),
+ ('tn','tn'),
+ ('tn','tnext'),
+ ('to','topleft'),
+ ('tp','tprevious'),
+ ('tr','tr'),
+ ('tr','trewind'),
+ ('try','try'),
+ ('ts','tselect'),
+ ('tu','tu'),
+ ('tu','tunmenu'),
+ ('u','u'),
+ ('u','undo'),
+ ('un','un'),
+ ('una','unabbreviate'),
+ ('undoj','undojoin'),
+ ('undol','undolist'),
+ ('unh','unhide'),
+ ('unl','unl'),
+ ('unlo','unlockvar'),
+ ('uns','unsilent'),
+ ('up','update'),
+ ('v','v'),
+ ('ve','ve'),
+ ('ve','version'),
+ ('verb','verbose'),
+ ('vert','vertical'),
+ ('vi','vi'),
+ ('vi','visual'),
+ ('vie','view'),
+ ('vim','vimgrep'),
+ ('vimgrepa','vimgrepadd'),
+ ('viu','viusage'),
+ ('vmapc','vmapclear'),
+ ('vne','vnew'),
+ ('vs','vsplit'),
+ ('w','w'),
+ ('w','write'),
+ ('wN','wNext'),
+ ('wa','wall'),
+ ('wh','while'),
+ ('win','win'),
+ ('win','winsize'),
+ ('winc','wincmd'),
+ ('windo','windo'),
+ ('winp','winpos'),
+ ('wn','wnext'),
+ ('wp','wprevious'),
+ ('wq','wq'),
+ ('wqa','wqall'),
+ ('ws','wsverb'),
+ ('wundo','wundo'),
+ ('wv','wviminfo'),
+ ('x','x'),
+ ('x','xit'),
+ ('xa','xall'),
+ ('xmapc','xmapclear'),
+ ('xme','xme'),
+ ('xmenu','xmenu'),
+ ('xnoreme','xnoreme'),
+ ('xnoremenu','xnoremenu'),
+ ('xunme','xunme'),
+ ('xunmenu','xunmenu'),
+ ('xwininfo','xwininfo'),
+ ('y','yank'),
+ )
+ return var
+command = _getcommand()
+
+def _getoption():
+ var = (
+ ('acd','acd'),
+ ('ai','ai'),
+ ('akm','akm'),
+ ('al','al'),
+ ('aleph','aleph'),
+ ('allowrevins','allowrevins'),
+ ('altkeymap','altkeymap'),
+ ('ambiwidth','ambiwidth'),
+ ('ambw','ambw'),
+ ('anti','anti'),
+ ('antialias','antialias'),
+ ('ar','ar'),
+ ('arab','arab'),
+ ('arabic','arabic'),
+ ('arabicshape','arabicshape'),
+ ('ari','ari'),
+ ('arshape','arshape'),
+ ('autochdir','autochdir'),
+ ('autoindent','autoindent'),
+ ('autoread','autoread'),
+ ('autowrite','autowrite'),
+ ('autowriteall','autowriteall'),
+ ('aw','aw'),
+ ('awa','awa'),
+ ('background','background'),
+ ('backspace','backspace'),
+ ('backup','backup'),
+ ('backupcopy','backupcopy'),
+ ('backupdir','backupdir'),
+ ('backupext','backupext'),
+ ('backupskip','backupskip'),
+ ('balloondelay','balloondelay'),
+ ('ballooneval','ballooneval'),
+ ('balloonexpr','balloonexpr'),
+ ('bdir','bdir'),
+ ('bdlay','bdlay'),
+ ('beval','beval'),
+ ('bex','bex'),
+ ('bexpr','bexpr'),
+ ('bg','bg'),
+ ('bh','bh'),
+ ('bin','bin'),
+ ('binary','binary'),
+ ('biosk','biosk'),
+ ('bioskey','bioskey'),
+ ('bk','bk'),
+ ('bkc','bkc'),
+ ('bl','bl'),
+ ('bomb','bomb'),
+ ('breakat','breakat'),
+ ('brk','brk'),
+ ('browsedir','browsedir'),
+ ('bs','bs'),
+ ('bsdir','bsdir'),
+ ('bsk','bsk'),
+ ('bt','bt'),
+ ('bufhidden','bufhidden'),
+ ('buflisted','buflisted'),
+ ('buftype','buftype'),
+ ('casemap','casemap'),
+ ('cb','cb'),
+ ('cc','cc'),
+ ('ccv','ccv'),
+ ('cd','cd'),
+ ('cdpath','cdpath'),
+ ('cedit','cedit'),
+ ('cf','cf'),
+ ('cfu','cfu'),
+ ('ch','ch'),
+ ('charconvert','charconvert'),
+ ('ci','ci'),
+ ('cin','cin'),
+ ('cindent','cindent'),
+ ('cink','cink'),
+ ('cinkeys','cinkeys'),
+ ('cino','cino'),
+ ('cinoptions','cinoptions'),
+ ('cinw','cinw'),
+ ('cinwords','cinwords'),
+ ('clipboard','clipboard'),
+ ('cmdheight','cmdheight'),
+ ('cmdwinheight','cmdwinheight'),
+ ('cmp','cmp'),
+ ('cms','cms'),
+ ('co','co'),
+ ('cocu','cocu'),
+ ('cole','cole'),
+ ('colorcolumn','colorcolumn'),
+ ('columns','columns'),
+ ('com','com'),
+ ('comments','comments'),
+ ('commentstring','commentstring'),
+ ('compatible','compatible'),
+ ('complete','complete'),
+ ('completefunc','completefunc'),
+ ('completeopt','completeopt'),
+ ('concealcursor','concealcursor'),
+ ('conceallevel','conceallevel'),
+ ('confirm','confirm'),
+ ('consk','consk'),
+ ('conskey','conskey'),
+ ('copyindent','copyindent'),
+ ('cot','cot'),
+ ('cp','cp'),
+ ('cpo','cpo'),
+ ('cpoptions','cpoptions'),
+ ('cpt','cpt'),
+ ('crb','crb'),
+ ('cryptmethod','cryptmethod'),
+ ('cscopepathcomp','cscopepathcomp'),
+ ('cscopeprg','cscopeprg'),
+ ('cscopequickfix','cscopequickfix'),
+ ('cscoperelative','cscoperelative'),
+ ('cscopetag','cscopetag'),
+ ('cscopetagorder','cscopetagorder'),
+ ('cscopeverbose','cscopeverbose'),
+ ('cspc','cspc'),
+ ('csprg','csprg'),
+ ('csqf','csqf'),
+ ('csre','csre'),
+ ('cst','cst'),
+ ('csto','csto'),
+ ('csverb','csverb'),
+ ('cuc','cuc'),
+ ('cul','cul'),
+ ('cursorbind','cursorbind'),
+ ('cursorcolumn','cursorcolumn'),
+ ('cursorline','cursorline'),
+ ('cwh','cwh'),
+ ('debug','debug'),
+ ('deco','deco'),
+ ('def','def'),
+ ('define','define'),
+ ('delcombine','delcombine'),
+ ('dex','dex'),
+ ('dg','dg'),
+ ('dict','dict'),
+ ('dictionary','dictionary'),
+ ('diff','diff'),
+ ('diffexpr','diffexpr'),
+ ('diffopt','diffopt'),
+ ('digraph','digraph'),
+ ('dip','dip'),
+ ('dir','dir'),
+ ('directory','directory'),
+ ('display','display'),
+ ('dy','dy'),
+ ('ea','ea'),
+ ('ead','ead'),
+ ('eadirection','eadirection'),
+ ('eb','eb'),
+ ('ed','ed'),
+ ('edcompatible','edcompatible'),
+ ('ef','ef'),
+ ('efm','efm'),
+ ('ei','ei'),
+ ('ek','ek'),
+ ('enc','enc'),
+ ('encoding','encoding'),
+ ('endofline','endofline'),
+ ('eol','eol'),
+ ('ep','ep'),
+ ('equalalways','equalalways'),
+ ('equalprg','equalprg'),
+ ('errorbells','errorbells'),
+ ('errorfile','errorfile'),
+ ('errorformat','errorformat'),
+ ('esckeys','esckeys'),
+ ('et','et'),
+ ('eventignore','eventignore'),
+ ('ex','ex'),
+ ('expandtab','expandtab'),
+ ('exrc','exrc'),
+ ('fcl','fcl'),
+ ('fcs','fcs'),
+ ('fdc','fdc'),
+ ('fde','fde'),
+ ('fdi','fdi'),
+ ('fdl','fdl'),
+ ('fdls','fdls'),
+ ('fdm','fdm'),
+ ('fdn','fdn'),
+ ('fdo','fdo'),
+ ('fdt','fdt'),
+ ('fen','fen'),
+ ('fenc','fenc'),
+ ('fencs','fencs'),
+ ('fex','fex'),
+ ('ff','ff'),
+ ('ffs','ffs'),
+ ('fic','fic'),
+ ('fileencoding','fileencoding'),
+ ('fileencodings','fileencodings'),
+ ('fileformat','fileformat'),
+ ('fileformats','fileformats'),
+ ('fileignorecase','fileignorecase'),
+ ('filetype','filetype'),
+ ('fillchars','fillchars'),
+ ('fk','fk'),
+ ('fkmap','fkmap'),
+ ('flp','flp'),
+ ('fml','fml'),
+ ('fmr','fmr'),
+ ('fo','fo'),
+ ('foldclose','foldclose'),
+ ('foldcolumn','foldcolumn'),
+ ('foldenable','foldenable'),
+ ('foldexpr','foldexpr'),
+ ('foldignore','foldignore'),
+ ('foldlevel','foldlevel'),
+ ('foldlevelstart','foldlevelstart'),
+ ('foldmarker','foldmarker'),
+ ('foldmethod','foldmethod'),
+ ('foldminlines','foldminlines'),
+ ('foldnestmax','foldnestmax'),
+ ('foldopen','foldopen'),
+ ('foldtext','foldtext'),
+ ('formatexpr','formatexpr'),
+ ('formatlistpat','formatlistpat'),
+ ('formatoptions','formatoptions'),
+ ('formatprg','formatprg'),
+ ('fp','fp'),
+ ('fs','fs'),
+ ('fsync','fsync'),
+ ('ft','ft'),
+ ('gcr','gcr'),
+ ('gd','gd'),
+ ('gdefault','gdefault'),
+ ('gfm','gfm'),
+ ('gfn','gfn'),
+ ('gfs','gfs'),
+ ('gfw','gfw'),
+ ('ghr','ghr'),
+ ('go','go'),
+ ('gp','gp'),
+ ('grepformat','grepformat'),
+ ('grepprg','grepprg'),
+ ('gtl','gtl'),
+ ('gtt','gtt'),
+ ('guicursor','guicursor'),
+ ('guifont','guifont'),
+ ('guifontset','guifontset'),
+ ('guifontwide','guifontwide'),
+ ('guiheadroom','guiheadroom'),
+ ('guioptions','guioptions'),
+ ('guipty','guipty'),
+ ('guitablabel','guitablabel'),
+ ('guitabtooltip','guitabtooltip'),
+ ('helpfile','helpfile'),
+ ('helpheight','helpheight'),
+ ('helplang','helplang'),
+ ('hf','hf'),
+ ('hh','hh'),
+ ('hi','hi'),
+ ('hid','hid'),
+ ('hidden','hidden'),
+ ('highlight','highlight'),
+ ('history','history'),
+ ('hk','hk'),
+ ('hkmap','hkmap'),
+ ('hkmapp','hkmapp'),
+ ('hkp','hkp'),
+ ('hl','hl'),
+ ('hlg','hlg'),
+ ('hls','hls'),
+ ('hlsearch','hlsearch'),
+ ('ic','ic'),
+ ('icon','icon'),
+ ('iconstring','iconstring'),
+ ('ignorecase','ignorecase'),
+ ('im','im'),
+ ('imactivatefunc','imactivatefunc'),
+ ('imactivatekey','imactivatekey'),
+ ('imaf','imaf'),
+ ('imak','imak'),
+ ('imc','imc'),
+ ('imcmdline','imcmdline'),
+ ('imd','imd'),
+ ('imdisable','imdisable'),
+ ('imi','imi'),
+ ('iminsert','iminsert'),
+ ('ims','ims'),
+ ('imsearch','imsearch'),
+ ('imsf','imsf'),
+ ('imstatusfunc','imstatusfunc'),
+ ('inc','inc'),
+ ('include','include'),
+ ('includeexpr','includeexpr'),
+ ('incsearch','incsearch'),
+ ('inde','inde'),
+ ('indentexpr','indentexpr'),
+ ('indentkeys','indentkeys'),
+ ('indk','indk'),
+ ('inex','inex'),
+ ('inf','inf'),
+ ('infercase','infercase'),
+ ('inoremap','inoremap'),
+ ('insertmode','insertmode'),
+ ('invacd','invacd'),
+ ('invai','invai'),
+ ('invakm','invakm'),
+ ('invallowrevins','invallowrevins'),
+ ('invaltkeymap','invaltkeymap'),
+ ('invanti','invanti'),
+ ('invantialias','invantialias'),
+ ('invar','invar'),
+ ('invarab','invarab'),
+ ('invarabic','invarabic'),
+ ('invarabicshape','invarabicshape'),
+ ('invari','invari'),
+ ('invarshape','invarshape'),
+ ('invautochdir','invautochdir'),
+ ('invautoindent','invautoindent'),
+ ('invautoread','invautoread'),
+ ('invautowrite','invautowrite'),
+ ('invautowriteall','invautowriteall'),
+ ('invaw','invaw'),
+ ('invawa','invawa'),
+ ('invbackup','invbackup'),
+ ('invballooneval','invballooneval'),
+ ('invbeval','invbeval'),
+ ('invbin','invbin'),
+ ('invbinary','invbinary'),
+ ('invbiosk','invbiosk'),
+ ('invbioskey','invbioskey'),
+ ('invbk','invbk'),
+ ('invbl','invbl'),
+ ('invbomb','invbomb'),
+ ('invbuflisted','invbuflisted'),
+ ('invcf','invcf'),
+ ('invci','invci'),
+ ('invcin','invcin'),
+ ('invcindent','invcindent'),
+ ('invcompatible','invcompatible'),
+ ('invconfirm','invconfirm'),
+ ('invconsk','invconsk'),
+ ('invconskey','invconskey'),
+ ('invcopyindent','invcopyindent'),
+ ('invcp','invcp'),
+ ('invcrb','invcrb'),
+ ('invcscoperelative','invcscoperelative'),
+ ('invcscopetag','invcscopetag'),
+ ('invcscopeverbose','invcscopeverbose'),
+ ('invcsre','invcsre'),
+ ('invcst','invcst'),
+ ('invcsverb','invcsverb'),
+ ('invcuc','invcuc'),
+ ('invcul','invcul'),
+ ('invcursorbind','invcursorbind'),
+ ('invcursorcolumn','invcursorcolumn'),
+ ('invcursorline','invcursorline'),
+ ('invdeco','invdeco'),
+ ('invdelcombine','invdelcombine'),
+ ('invdg','invdg'),
+ ('invdiff','invdiff'),
+ ('invdigraph','invdigraph'),
+ ('invea','invea'),
+ ('inveb','inveb'),
+ ('inved','inved'),
+ ('invedcompatible','invedcompatible'),
+ ('invek','invek'),
+ ('invendofline','invendofline'),
+ ('inveol','inveol'),
+ ('invequalalways','invequalalways'),
+ ('inverrorbells','inverrorbells'),
+ ('invesckeys','invesckeys'),
+ ('invet','invet'),
+ ('invex','invex'),
+ ('invexpandtab','invexpandtab'),
+ ('invexrc','invexrc'),
+ ('invfen','invfen'),
+ ('invfic','invfic'),
+ ('invfileignorecase','invfileignorecase'),
+ ('invfk','invfk'),
+ ('invfkmap','invfkmap'),
+ ('invfoldenable','invfoldenable'),
+ ('invgd','invgd'),
+ ('invgdefault','invgdefault'),
+ ('invguipty','invguipty'),
+ ('invhid','invhid'),
+ ('invhidden','invhidden'),
+ ('invhk','invhk'),
+ ('invhkmap','invhkmap'),
+ ('invhkmapp','invhkmapp'),
+ ('invhkp','invhkp'),
+ ('invhls','invhls'),
+ ('invhlsearch','invhlsearch'),
+ ('invic','invic'),
+ ('invicon','invicon'),
+ ('invignorecase','invignorecase'),
+ ('invim','invim'),
+ ('invimc','invimc'),
+ ('invimcmdline','invimcmdline'),
+ ('invimd','invimd'),
+ ('invimdisable','invimdisable'),
+ ('invincsearch','invincsearch'),
+ ('invinf','invinf'),
+ ('invinfercase','invinfercase'),
+ ('invinsertmode','invinsertmode'),
+ ('invis','invis'),
+ ('invjoinspaces','invjoinspaces'),
+ ('invjs','invjs'),
+ ('invlazyredraw','invlazyredraw'),
+ ('invlbr','invlbr'),
+ ('invlinebreak','invlinebreak'),
+ ('invlisp','invlisp'),
+ ('invlist','invlist'),
+ ('invloadplugins','invloadplugins'),
+ ('invlpl','invlpl'),
+ ('invlz','invlz'),
+ ('invma','invma'),
+ ('invmacatsui','invmacatsui'),
+ ('invmagic','invmagic'),
+ ('invmh','invmh'),
+ ('invml','invml'),
+ ('invmod','invmod'),
+ ('invmodeline','invmodeline'),
+ ('invmodifiable','invmodifiable'),
+ ('invmodified','invmodified'),
+ ('invmore','invmore'),
+ ('invmousef','invmousef'),
+ ('invmousefocus','invmousefocus'),
+ ('invmousehide','invmousehide'),
+ ('invnu','invnu'),
+ ('invnumber','invnumber'),
+ ('invodev','invodev'),
+ ('invopendevice','invopendevice'),
+ ('invpaste','invpaste'),
+ ('invpi','invpi'),
+ ('invpreserveindent','invpreserveindent'),
+ ('invpreviewwindow','invpreviewwindow'),
+ ('invprompt','invprompt'),
+ ('invpvw','invpvw'),
+ ('invreadonly','invreadonly'),
+ ('invrelativenumber','invrelativenumber'),
+ ('invremap','invremap'),
+ ('invrestorescreen','invrestorescreen'),
+ ('invrevins','invrevins'),
+ ('invri','invri'),
+ ('invrightleft','invrightleft'),
+ ('invrl','invrl'),
+ ('invrnu','invrnu'),
+ ('invro','invro'),
+ ('invrs','invrs'),
+ ('invru','invru'),
+ ('invruler','invruler'),
+ ('invsb','invsb'),
+ ('invsc','invsc'),
+ ('invscb','invscb'),
+ ('invscrollbind','invscrollbind'),
+ ('invscs','invscs'),
+ ('invsecure','invsecure'),
+ ('invsft','invsft'),
+ ('invshellslash','invshellslash'),
+ ('invshelltemp','invshelltemp'),
+ ('invshiftround','invshiftround'),
+ ('invshortname','invshortname'),
+ ('invshowcmd','invshowcmd'),
+ ('invshowfulltag','invshowfulltag'),
+ ('invshowmatch','invshowmatch'),
+ ('invshowmode','invshowmode'),
+ ('invsi','invsi'),
+ ('invsm','invsm'),
+ ('invsmartcase','invsmartcase'),
+ ('invsmartindent','invsmartindent'),
+ ('invsmarttab','invsmarttab'),
+ ('invsmd','invsmd'),
+ ('invsn','invsn'),
+ ('invsol','invsol'),
+ ('invspell','invspell'),
+ ('invsplitbelow','invsplitbelow'),
+ ('invsplitright','invsplitright'),
+ ('invspr','invspr'),
+ ('invsr','invsr'),
+ ('invssl','invssl'),
+ ('invsta','invsta'),
+ ('invstartofline','invstartofline'),
+ ('invstmp','invstmp'),
+ ('invswapfile','invswapfile'),
+ ('invswf','invswf'),
+ ('invta','invta'),
+ ('invtagbsearch','invtagbsearch'),
+ ('invtagrelative','invtagrelative'),
+ ('invtagstack','invtagstack'),
+ ('invtbi','invtbi'),
+ ('invtbidi','invtbidi'),
+ ('invtbs','invtbs'),
+ ('invtermbidi','invtermbidi'),
+ ('invterse','invterse'),
+ ('invtextauto','invtextauto'),
+ ('invtextmode','invtextmode'),
+ ('invtf','invtf'),
+ ('invtgst','invtgst'),
+ ('invtildeop','invtildeop'),
+ ('invtimeout','invtimeout'),
+ ('invtitle','invtitle'),
+ ('invto','invto'),
+ ('invtop','invtop'),
+ ('invtr','invtr'),
+ ('invttimeout','invttimeout'),
+ ('invttybuiltin','invttybuiltin'),
+ ('invttyfast','invttyfast'),
+ ('invtx','invtx'),
+ ('invudf','invudf'),
+ ('invundofile','invundofile'),
+ ('invvb','invvb'),
+ ('invvisualbell','invvisualbell'),
+ ('invwa','invwa'),
+ ('invwarn','invwarn'),
+ ('invwb','invwb'),
+ ('invweirdinvert','invweirdinvert'),
+ ('invwfh','invwfh'),
+ ('invwfw','invwfw'),
+ ('invwic','invwic'),
+ ('invwildignorecase','invwildignorecase'),
+ ('invwildmenu','invwildmenu'),
+ ('invwinfixheight','invwinfixheight'),
+ ('invwinfixwidth','invwinfixwidth'),
+ ('invwiv','invwiv'),
+ ('invwmnu','invwmnu'),
+ ('invwrap','invwrap'),
+ ('invwrapscan','invwrapscan'),
+ ('invwrite','invwrite'),
+ ('invwriteany','invwriteany'),
+ ('invwritebackup','invwritebackup'),
+ ('invws','invws'),
+ ('is','is'),
+ ('isf','isf'),
+ ('isfname','isfname'),
+ ('isi','isi'),
+ ('isident','isident'),
+ ('isk','isk'),
+ ('iskeyword','iskeyword'),
+ ('isp','isp'),
+ ('isprint','isprint'),
+ ('joinspaces','joinspaces'),
+ ('js','js'),
+ ('key','key'),
+ ('keymap','keymap'),
+ ('keymodel','keymodel'),
+ ('keywordprg','keywordprg'),
+ ('km','km'),
+ ('kmp','kmp'),
+ ('kp','kp'),
+ ('langmap','langmap'),
+ ('langmenu','langmenu'),
+ ('laststatus','laststatus'),
+ ('lazyredraw','lazyredraw'),
+ ('lbr','lbr'),
+ ('lcs','lcs'),
+ ('linebreak','linebreak'),
+ ('lines','lines'),
+ ('linespace','linespace'),
+ ('lisp','lisp'),
+ ('lispwords','lispwords'),
+ ('list','list'),
+ ('listchars','listchars'),
+ ('lm','lm'),
+ ('lmap','lmap'),
+ ('loadplugins','loadplugins'),
+ ('lpl','lpl'),
+ ('ls','ls'),
+ ('lsp','lsp'),
+ ('lw','lw'),
+ ('lz','lz'),
+ ('ma','ma'),
+ ('macatsui','macatsui'),
+ ('magic','magic'),
+ ('makeef','makeef'),
+ ('makeprg','makeprg'),
+ ('mat','mat'),
+ ('matchpairs','matchpairs'),
+ ('matchtime','matchtime'),
+ ('maxcombine','maxcombine'),
+ ('maxfuncdepth','maxfuncdepth'),
+ ('maxmapdepth','maxmapdepth'),
+ ('maxmem','maxmem'),
+ ('maxmempattern','maxmempattern'),
+ ('maxmemtot','maxmemtot'),
+ ('mco','mco'),
+ ('mef','mef'),
+ ('menuitems','menuitems'),
+ ('mfd','mfd'),
+ ('mh','mh'),
+ ('mis','mis'),
+ ('mkspellmem','mkspellmem'),
+ ('ml','ml'),
+ ('mls','mls'),
+ ('mm','mm'),
+ ('mmd','mmd'),
+ ('mmp','mmp'),
+ ('mmt','mmt'),
+ ('mod','mod'),
+ ('modeline','modeline'),
+ ('modelines','modelines'),
+ ('modifiable','modifiable'),
+ ('modified','modified'),
+ ('more','more'),
+ ('mouse','mouse'),
+ ('mousef','mousef'),
+ ('mousefocus','mousefocus'),
+ ('mousehide','mousehide'),
+ ('mousem','mousem'),
+ ('mousemodel','mousemodel'),
+ ('mouses','mouses'),
+ ('mouseshape','mouseshape'),
+ ('mouset','mouset'),
+ ('mousetime','mousetime'),
+ ('mp','mp'),
+ ('mps','mps'),
+ ('msm','msm'),
+ ('mzq','mzq'),
+ ('mzquantum','mzquantum'),
+ ('nf','nf'),
+ ('nnoremap','nnoremap'),
+ ('noacd','noacd'),
+ ('noai','noai'),
+ ('noakm','noakm'),
+ ('noallowrevins','noallowrevins'),
+ ('noaltkeymap','noaltkeymap'),
+ ('noanti','noanti'),
+ ('noantialias','noantialias'),
+ ('noar','noar'),
+ ('noarab','noarab'),
+ ('noarabic','noarabic'),
+ ('noarabicshape','noarabicshape'),
+ ('noari','noari'),
+ ('noarshape','noarshape'),
+ ('noautochdir','noautochdir'),
+ ('noautoindent','noautoindent'),
+ ('noautoread','noautoread'),
+ ('noautowrite','noautowrite'),
+ ('noautowriteall','noautowriteall'),
+ ('noaw','noaw'),
+ ('noawa','noawa'),
+ ('nobackup','nobackup'),
+ ('noballooneval','noballooneval'),
+ ('nobeval','nobeval'),
+ ('nobin','nobin'),
+ ('nobinary','nobinary'),
+ ('nobiosk','nobiosk'),
+ ('nobioskey','nobioskey'),
+ ('nobk','nobk'),
+ ('nobl','nobl'),
+ ('nobomb','nobomb'),
+ ('nobuflisted','nobuflisted'),
+ ('nocf','nocf'),
+ ('noci','noci'),
+ ('nocin','nocin'),
+ ('nocindent','nocindent'),
+ ('nocompatible','nocompatible'),
+ ('noconfirm','noconfirm'),
+ ('noconsk','noconsk'),
+ ('noconskey','noconskey'),
+ ('nocopyindent','nocopyindent'),
+ ('nocp','nocp'),
+ ('nocrb','nocrb'),
+ ('nocscoperelative','nocscoperelative'),
+ ('nocscopetag','nocscopetag'),
+ ('nocscopeverbose','nocscopeverbose'),
+ ('nocsre','nocsre'),
+ ('nocst','nocst'),
+ ('nocsverb','nocsverb'),
+ ('nocuc','nocuc'),
+ ('nocul','nocul'),
+ ('nocursorbind','nocursorbind'),
+ ('nocursorcolumn','nocursorcolumn'),
+ ('nocursorline','nocursorline'),
+ ('nodeco','nodeco'),
+ ('nodelcombine','nodelcombine'),
+ ('nodg','nodg'),
+ ('nodiff','nodiff'),
+ ('nodigraph','nodigraph'),
+ ('noea','noea'),
+ ('noeb','noeb'),
+ ('noed','noed'),
+ ('noedcompatible','noedcompatible'),
+ ('noek','noek'),
+ ('noendofline','noendofline'),
+ ('noeol','noeol'),
+ ('noequalalways','noequalalways'),
+ ('noerrorbells','noerrorbells'),
+ ('noesckeys','noesckeys'),
+ ('noet','noet'),
+ ('noex','noex'),
+ ('noexpandtab','noexpandtab'),
+ ('noexrc','noexrc'),
+ ('nofen','nofen'),
+ ('nofic','nofic'),
+ ('nofileignorecase','nofileignorecase'),
+ ('nofk','nofk'),
+ ('nofkmap','nofkmap'),
+ ('nofoldenable','nofoldenable'),
+ ('nogd','nogd'),
+ ('nogdefault','nogdefault'),
+ ('noguipty','noguipty'),
+ ('nohid','nohid'),
+ ('nohidden','nohidden'),
+ ('nohk','nohk'),
+ ('nohkmap','nohkmap'),
+ ('nohkmapp','nohkmapp'),
+ ('nohkp','nohkp'),
+ ('nohls','nohls'),
+ ('nohlsearch','nohlsearch'),
+ ('noic','noic'),
+ ('noicon','noicon'),
+ ('noignorecase','noignorecase'),
+ ('noim','noim'),
+ ('noimc','noimc'),
+ ('noimcmdline','noimcmdline'),
+ ('noimd','noimd'),
+ ('noimdisable','noimdisable'),
+ ('noincsearch','noincsearch'),
+ ('noinf','noinf'),
+ ('noinfercase','noinfercase'),
+ ('noinsertmode','noinsertmode'),
+ ('nois','nois'),
+ ('nojoinspaces','nojoinspaces'),
+ ('nojs','nojs'),
+ ('nolazyredraw','nolazyredraw'),
+ ('nolbr','nolbr'),
+ ('nolinebreak','nolinebreak'),
+ ('nolisp','nolisp'),
+ ('nolist','nolist'),
+ ('noloadplugins','noloadplugins'),
+ ('nolpl','nolpl'),
+ ('nolz','nolz'),
+ ('noma','noma'),
+ ('nomacatsui','nomacatsui'),
+ ('nomagic','nomagic'),
+ ('nomh','nomh'),
+ ('noml','noml'),
+ ('nomod','nomod'),
+ ('nomodeline','nomodeline'),
+ ('nomodifiable','nomodifiable'),
+ ('nomodified','nomodified'),
+ ('nomore','nomore'),
+ ('nomousef','nomousef'),
+ ('nomousefocus','nomousefocus'),
+ ('nomousehide','nomousehide'),
+ ('nonu','nonu'),
+ ('nonumber','nonumber'),
+ ('noodev','noodev'),
+ ('noopendevice','noopendevice'),
+ ('nopaste','nopaste'),
+ ('nopi','nopi'),
+ ('nopreserveindent','nopreserveindent'),
+ ('nopreviewwindow','nopreviewwindow'),
+ ('noprompt','noprompt'),
+ ('nopvw','nopvw'),
+ ('noreadonly','noreadonly'),
+ ('norelativenumber','norelativenumber'),
+ ('noremap','noremap'),
+ ('norestorescreen','norestorescreen'),
+ ('norevins','norevins'),
+ ('nori','nori'),
+ ('norightleft','norightleft'),
+ ('norl','norl'),
+ ('nornu','nornu'),
+ ('noro','noro'),
+ ('nors','nors'),
+ ('noru','noru'),
+ ('noruler','noruler'),
+ ('nosb','nosb'),
+ ('nosc','nosc'),
+ ('noscb','noscb'),
+ ('noscrollbind','noscrollbind'),
+ ('noscs','noscs'),
+ ('nosecure','nosecure'),
+ ('nosft','nosft'),
+ ('noshellslash','noshellslash'),
+ ('noshelltemp','noshelltemp'),
+ ('noshiftround','noshiftround'),
+ ('noshortname','noshortname'),
+ ('noshowcmd','noshowcmd'),
+ ('noshowfulltag','noshowfulltag'),
+ ('noshowmatch','noshowmatch'),
+ ('noshowmode','noshowmode'),
+ ('nosi','nosi'),
+ ('nosm','nosm'),
+ ('nosmartcase','nosmartcase'),
+ ('nosmartindent','nosmartindent'),
+ ('nosmarttab','nosmarttab'),
+ ('nosmd','nosmd'),
+ ('nosn','nosn'),
+ ('nosol','nosol'),
+ ('nospell','nospell'),
+ ('nosplitbelow','nosplitbelow'),
+ ('nosplitright','nosplitright'),
+ ('nospr','nospr'),
+ ('nosr','nosr'),
+ ('nossl','nossl'),
+ ('nosta','nosta'),
+ ('nostartofline','nostartofline'),
+ ('nostmp','nostmp'),
+ ('noswapfile','noswapfile'),
+ ('noswf','noswf'),
+ ('nota','nota'),
+ ('notagbsearch','notagbsearch'),
+ ('notagrelative','notagrelative'),
+ ('notagstack','notagstack'),
+ ('notbi','notbi'),
+ ('notbidi','notbidi'),
+ ('notbs','notbs'),
+ ('notermbidi','notermbidi'),
+ ('noterse','noterse'),
+ ('notextauto','notextauto'),
+ ('notextmode','notextmode'),
+ ('notf','notf'),
+ ('notgst','notgst'),
+ ('notildeop','notildeop'),
+ ('notimeout','notimeout'),
+ ('notitle','notitle'),
+ ('noto','noto'),
+ ('notop','notop'),
+ ('notr','notr'),
+ ('nottimeout','nottimeout'),
+ ('nottybuiltin','nottybuiltin'),
+ ('nottyfast','nottyfast'),
+ ('notx','notx'),
+ ('noudf','noudf'),
+ ('noundofile','noundofile'),
+ ('novb','novb'),
+ ('novisualbell','novisualbell'),
+ ('nowa','nowa'),
+ ('nowarn','nowarn'),
+ ('nowb','nowb'),
+ ('noweirdinvert','noweirdinvert'),
+ ('nowfh','nowfh'),
+ ('nowfw','nowfw'),
+ ('nowic','nowic'),
+ ('nowildignorecase','nowildignorecase'),
+ ('nowildmenu','nowildmenu'),
+ ('nowinfixheight','nowinfixheight'),
+ ('nowinfixwidth','nowinfixwidth'),
+ ('nowiv','nowiv'),
+ ('nowmnu','nowmnu'),
+ ('nowrap','nowrap'),
+ ('nowrapscan','nowrapscan'),
+ ('nowrite','nowrite'),
+ ('nowriteany','nowriteany'),
+ ('nowritebackup','nowritebackup'),
+ ('nows','nows'),
+ ('nrformats','nrformats'),
+ ('nu','nu'),
+ ('number','number'),
+ ('numberwidth','numberwidth'),
+ ('nuw','nuw'),
+ ('odev','odev'),
+ ('oft','oft'),
+ ('ofu','ofu'),
+ ('omnifunc','omnifunc'),
+ ('opendevice','opendevice'),
+ ('operatorfunc','operatorfunc'),
+ ('opfunc','opfunc'),
+ ('osfiletype','osfiletype'),
+ ('pa','pa'),
+ ('para','para'),
+ ('paragraphs','paragraphs'),
+ ('paste','paste'),
+ ('pastetoggle','pastetoggle'),
+ ('patchexpr','patchexpr'),
+ ('patchmode','patchmode'),
+ ('path','path'),
+ ('pdev','pdev'),
+ ('penc','penc'),
+ ('pex','pex'),
+ ('pexpr','pexpr'),
+ ('pfn','pfn'),
+ ('ph','ph'),
+ ('pheader','pheader'),
+ ('pi','pi'),
+ ('pm','pm'),
+ ('pmbcs','pmbcs'),
+ ('pmbfn','pmbfn'),
+ ('popt','popt'),
+ ('preserveindent','preserveindent'),
+ ('previewheight','previewheight'),
+ ('previewwindow','previewwindow'),
+ ('printdevice','printdevice'),
+ ('printencoding','printencoding'),
+ ('printexpr','printexpr'),
+ ('printfont','printfont'),
+ ('printheader','printheader'),
+ ('printmbcharset','printmbcharset'),
+ ('printmbfont','printmbfont'),
+ ('printoptions','printoptions'),
+ ('prompt','prompt'),
+ ('pt','pt'),
+ ('pumheight','pumheight'),
+ ('pvh','pvh'),
+ ('pvw','pvw'),
+ ('qe','qe'),
+ ('quoteescape','quoteescape'),
+ ('rdt','rdt'),
+ ('re','re'),
+ ('readonly','readonly'),
+ ('redrawtime','redrawtime'),
+ ('regexpengine','regexpengine'),
+ ('relativenumber','relativenumber'),
+ ('remap','remap'),
+ ('report','report'),
+ ('restorescreen','restorescreen'),
+ ('revins','revins'),
+ ('ri','ri'),
+ ('rightleft','rightleft'),
+ ('rightleftcmd','rightleftcmd'),
+ ('rl','rl'),
+ ('rlc','rlc'),
+ ('rnu','rnu'),
+ ('ro','ro'),
+ ('rs','rs'),
+ ('rtp','rtp'),
+ ('ru','ru'),
+ ('ruf','ruf'),
+ ('ruler','ruler'),
+ ('rulerformat','rulerformat'),
+ ('runtimepath','runtimepath'),
+ ('sb','sb'),
+ ('sbo','sbo'),
+ ('sbr','sbr'),
+ ('sc','sc'),
+ ('scb','scb'),
+ ('scr','scr'),
+ ('scroll','scroll'),
+ ('scrollbind','scrollbind'),
+ ('scrolljump','scrolljump'),
+ ('scrolloff','scrolloff'),
+ ('scrollopt','scrollopt'),
+ ('scs','scs'),
+ ('sect','sect'),
+ ('sections','sections'),
+ ('secure','secure'),
+ ('sel','sel'),
+ ('selection','selection'),
+ ('selectmode','selectmode'),
+ ('sessionoptions','sessionoptions'),
+ ('sft','sft'),
+ ('sh','sh'),
+ ('shcf','shcf'),
+ ('shell','shell'),
+ ('shellcmdflag','shellcmdflag'),
+ ('shellpipe','shellpipe'),
+ ('shellquote','shellquote'),
+ ('shellredir','shellredir'),
+ ('shellslash','shellslash'),
+ ('shelltemp','shelltemp'),
+ ('shelltype','shelltype'),
+ ('shellxescape','shellxescape'),
+ ('shellxquote','shellxquote'),
+ ('shiftround','shiftround'),
+ ('shiftwidth','shiftwidth'),
+ ('shm','shm'),
+ ('shortmess','shortmess'),
+ ('shortname','shortname'),
+ ('showbreak','showbreak'),
+ ('showcmd','showcmd'),
+ ('showfulltag','showfulltag'),
+ ('showmatch','showmatch'),
+ ('showmode','showmode'),
+ ('showtabline','showtabline'),
+ ('shq','shq'),
+ ('si','si'),
+ ('sidescroll','sidescroll'),
+ ('sidescrolloff','sidescrolloff'),
+ ('siso','siso'),
+ ('sj','sj'),
+ ('slm','slm'),
+ ('sm','sm'),
+ ('smartcase','smartcase'),
+ ('smartindent','smartindent'),
+ ('smarttab','smarttab'),
+ ('smc','smc'),
+ ('smd','smd'),
+ ('sn','sn'),
+ ('so','so'),
+ ('softtabstop','softtabstop'),
+ ('sol','sol'),
+ ('sp','sp'),
+ ('spc','spc'),
+ ('spell','spell'),
+ ('spellcapcheck','spellcapcheck'),
+ ('spellfile','spellfile'),
+ ('spelllang','spelllang'),
+ ('spellsuggest','spellsuggest'),
+ ('spf','spf'),
+ ('spl','spl'),
+ ('splitbelow','splitbelow'),
+ ('splitright','splitright'),
+ ('spr','spr'),
+ ('sps','sps'),
+ ('sr','sr'),
+ ('srr','srr'),
+ ('ss','ss'),
+ ('ssl','ssl'),
+ ('ssop','ssop'),
+ ('st','st'),
+ ('sta','sta'),
+ ('stal','stal'),
+ ('startofline','startofline'),
+ ('statusline','statusline'),
+ ('stl','stl'),
+ ('stmp','stmp'),
+ ('sts','sts'),
+ ('su','su'),
+ ('sua','sua'),
+ ('suffixes','suffixes'),
+ ('suffixesadd','suffixesadd'),
+ ('sw','sw'),
+ ('swapfile','swapfile'),
+ ('swapsync','swapsync'),
+ ('swb','swb'),
+ ('swf','swf'),
+ ('switchbuf','switchbuf'),
+ ('sws','sws'),
+ ('sxe','sxe'),
+ ('sxq','sxq'),
+ ('syn','syn'),
+ ('synmaxcol','synmaxcol'),
+ ('syntax','syntax'),
+ ('t_AB','t_AB'),
+ ('t_AF','t_AF'),
+ ('t_AL','t_AL'),
+ ('t_CS','t_CS'),
+ ('t_CV','t_CV'),
+ ('t_Ce','t_Ce'),
+ ('t_Co','t_Co'),
+ ('t_Cs','t_Cs'),
+ ('t_DL','t_DL'),
+ ('t_EI','t_EI'),
+ ('t_F1','t_F1'),
+ ('t_F2','t_F2'),
+ ('t_F3','t_F3'),
+ ('t_F4','t_F4'),
+ ('t_F5','t_F5'),
+ ('t_F6','t_F6'),
+ ('t_F7','t_F7'),
+ ('t_F8','t_F8'),
+ ('t_F9','t_F9'),
+ ('t_IE','t_IE'),
+ ('t_IS','t_IS'),
+ ('t_K1','t_K1'),
+ ('t_K3','t_K3'),
+ ('t_K4','t_K4'),
+ ('t_K5','t_K5'),
+ ('t_K6','t_K6'),
+ ('t_K7','t_K7'),
+ ('t_K8','t_K8'),
+ ('t_K9','t_K9'),
+ ('t_KA','t_KA'),
+ ('t_KB','t_KB'),
+ ('t_KC','t_KC'),
+ ('t_KD','t_KD'),
+ ('t_KE','t_KE'),
+ ('t_KF','t_KF'),
+ ('t_KG','t_KG'),
+ ('t_KH','t_KH'),
+ ('t_KI','t_KI'),
+ ('t_KJ','t_KJ'),
+ ('t_KK','t_KK'),
+ ('t_KL','t_KL'),
+ ('t_RI','t_RI'),
+ ('t_RV','t_RV'),
+ ('t_SI','t_SI'),
+ ('t_Sb','t_Sb'),
+ ('t_Sf','t_Sf'),
+ ('t_WP','t_WP'),
+ ('t_WS','t_WS'),
+ ('t_ZH','t_ZH'),
+ ('t_ZR','t_ZR'),
+ ('t_al','t_al'),
+ ('t_bc','t_bc'),
+ ('t_cd','t_cd'),
+ ('t_ce','t_ce'),
+ ('t_cl','t_cl'),
+ ('t_cm','t_cm'),
+ ('t_cs','t_cs'),
+ ('t_da','t_da'),
+ ('t_db','t_db'),
+ ('t_dl','t_dl'),
+ ('t_fs','t_fs'),
+ ('t_k1','t_k1'),
+ ('t_k2','t_k2'),
+ ('t_k3','t_k3'),
+ ('t_k4','t_k4'),
+ ('t_k5','t_k5'),
+ ('t_k6','t_k6'),
+ ('t_k7','t_k7'),
+ ('t_k8','t_k8'),
+ ('t_k9','t_k9'),
+ ('t_kB','t_kB'),
+ ('t_kD','t_kD'),
+ ('t_kI','t_kI'),
+ ('t_kN','t_kN'),
+ ('t_kP','t_kP'),
+ ('t_kb','t_kb'),
+ ('t_kd','t_kd'),
+ ('t_ke','t_ke'),
+ ('t_kh','t_kh'),
+ ('t_kl','t_kl'),
+ ('t_kr','t_kr'),
+ ('t_ks','t_ks'),
+ ('t_ku','t_ku'),
+ ('t_le','t_le'),
+ ('t_mb','t_mb'),
+ ('t_md','t_md'),
+ ('t_me','t_me'),
+ ('t_mr','t_mr'),
+ ('t_ms','t_ms'),
+ ('t_nd','t_nd'),
+ ('t_op','t_op'),
+ ('t_se','t_se'),
+ ('t_so','t_so'),
+ ('t_sr','t_sr'),
+ ('t_te','t_te'),
+ ('t_ti','t_ti'),
+ ('t_ts','t_ts'),
+ ('t_u7','t_u7'),
+ ('t_ue','t_ue'),
+ ('t_us','t_us'),
+ ('t_ut','t_ut'),
+ ('t_vb','t_vb'),
+ ('t_ve','t_ve'),
+ ('t_vi','t_vi'),
+ ('t_vs','t_vs'),
+ ('t_xs','t_xs'),
+ ('ta','ta'),
+ ('tabline','tabline'),
+ ('tabpagemax','tabpagemax'),
+ ('tabstop','tabstop'),
+ ('tag','tag'),
+ ('tagbsearch','tagbsearch'),
+ ('taglength','taglength'),
+ ('tagrelative','tagrelative'),
+ ('tags','tags'),
+ ('tagstack','tagstack'),
+ ('tal','tal'),
+ ('tb','tb'),
+ ('tbi','tbi'),
+ ('tbidi','tbidi'),
+ ('tbis','tbis'),
+ ('tbs','tbs'),
+ ('tenc','tenc'),
+ ('term','term'),
+ ('termbidi','termbidi'),
+ ('termencoding','termencoding'),
+ ('terse','terse'),
+ ('textauto','textauto'),
+ ('textmode','textmode'),
+ ('textwidth','textwidth'),
+ ('tf','tf'),
+ ('tgst','tgst'),
+ ('thesaurus','thesaurus'),
+ ('tildeop','tildeop'),
+ ('timeout','timeout'),
+ ('timeoutlen','timeoutlen'),
+ ('title','title'),
+ ('titlelen','titlelen'),
+ ('titleold','titleold'),
+ ('titlestring','titlestring'),
+ ('tl','tl'),
+ ('tm','tm'),
+ ('to','to'),
+ ('toolbar','toolbar'),
+ ('toolbariconsize','toolbariconsize'),
+ ('top','top'),
+ ('tpm','tpm'),
+ ('tr','tr'),
+ ('ts','ts'),
+ ('tsl','tsl'),
+ ('tsr','tsr'),
+ ('ttimeout','ttimeout'),
+ ('ttimeoutlen','ttimeoutlen'),
+ ('ttm','ttm'),
+ ('tty','tty'),
+ ('ttybuiltin','ttybuiltin'),
+ ('ttyfast','ttyfast'),
+ ('ttym','ttym'),
+ ('ttymouse','ttymouse'),
+ ('ttyscroll','ttyscroll'),
+ ('ttytype','ttytype'),
+ ('tw','tw'),
+ ('tx','tx'),
+ ('uc','uc'),
+ ('udf','udf'),
+ ('udir','udir'),
+ ('ul','ul'),
+ ('undodir','undodir'),
+ ('undofile','undofile'),
+ ('undolevels','undolevels'),
+ ('undoreload','undoreload'),
+ ('updatecount','updatecount'),
+ ('updatetime','updatetime'),
+ ('ur','ur'),
+ ('ut','ut'),
+ ('vb','vb'),
+ ('vbs','vbs'),
+ ('vdir','vdir'),
+ ('ve','ve'),
+ ('verbose','verbose'),
+ ('verbosefile','verbosefile'),
+ ('vfile','vfile'),
+ ('vi','vi'),
+ ('viewdir','viewdir'),
+ ('viewoptions','viewoptions'),
+ ('viminfo','viminfo'),
+ ('virtualedit','virtualedit'),
+ ('visualbell','visualbell'),
+ ('vnoremap','vnoremap'),
+ ('vop','vop'),
+ ('wa','wa'),
+ ('wak','wak'),
+ ('warn','warn'),
+ ('wb','wb'),
+ ('wc','wc'),
+ ('wcm','wcm'),
+ ('wd','wd'),
+ ('weirdinvert','weirdinvert'),
+ ('wfh','wfh'),
+ ('wfw','wfw'),
+ ('wh','wh'),
+ ('whichwrap','whichwrap'),
+ ('wi','wi'),
+ ('wic','wic'),
+ ('wig','wig'),
+ ('wildchar','wildchar'),
+ ('wildcharm','wildcharm'),
+ ('wildignore','wildignore'),
+ ('wildignorecase','wildignorecase'),
+ ('wildmenu','wildmenu'),
+ ('wildmode','wildmode'),
+ ('wildoptions','wildoptions'),
+ ('wim','wim'),
+ ('winaltkeys','winaltkeys'),
+ ('window','window'),
+ ('winfixheight','winfixheight'),
+ ('winfixwidth','winfixwidth'),
+ ('winheight','winheight'),
+ ('winminheight','winminheight'),
+ ('winminwidth','winminwidth'),
+ ('winwidth','winwidth'),
+ ('wiv','wiv'),
+ ('wiw','wiw'),
+ ('wm','wm'),
+ ('wmh','wmh'),
+ ('wmnu','wmnu'),
+ ('wmw','wmw'),
+ ('wop','wop'),
+ ('wrap','wrap'),
+ ('wrapmargin','wrapmargin'),
+ ('wrapscan','wrapscan'),
+ ('write','write'),
+ ('writeany','writeany'),
+ ('writebackup','writebackup'),
+ ('writedelay','writedelay'),
+ ('ws','ws'),
+ ('ww','ww'),
+ )
+ return var
+option = _getoption()
+
diff --git a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
index 28625586bd..423150004c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
@@ -1,244 +1,244 @@
-"""
- pygments.lexers.actionscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for ActionScript and MXML.
-
+"""
+ pygments.lexers.actionscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ActionScript and MXML.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, using, this, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, using, this, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
-
-
-class ActionScriptLexer(RegexLexer):
- """
- For ActionScript source code.
-
- .. versionadded:: 0.9
- """
-
- name = 'ActionScript'
+
+__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
+
+
+class ActionScriptLexer(RegexLexer):
+ """
+ For ActionScript source code.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'ActionScript'
aliases = ['actionscript', 'as']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript', 'text/x-actionscript',
- 'text/actionscript']
-
- flags = re.DOTALL
- tokens = {
- 'root': [
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript', 'text/x-actionscript',
+ 'text/actionscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex),
- (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
- (r'[{}\[\]();.]+', Punctuation),
- (words((
- 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
- 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
- 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
- 'switch'), suffix=r'\b'),
- Keyword),
- (words((
- 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
- 'protected', 'static', 'import', 'extends', 'implements', 'interface',
- 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
- 'namespace', 'package', 'set'), suffix=r'\b'),
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
- Keyword.Constant),
- (words((
- 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
- 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
- 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
- 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
- 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
- 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
- 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
- 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
- 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
- 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
- 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
- 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
- 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
- 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
- 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
- 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
- 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
- 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
- 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
- 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
- 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
- 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
- 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
- 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
- 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
- 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
- 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
- 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
- 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
- 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
- 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
- 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
- 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
- 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
- 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
- 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
- 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
- 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
- 'XMLUI'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
- 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
- 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
- 'unescape'), suffix=r'\b'),
- Name.Function),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ (r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
+ (r'[{}\[\]();.]+', Punctuation),
+ (words((
+ 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
+ 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
+ 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
+ 'switch'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
+ 'protected', 'static', 'import', 'extends', 'implements', 'interface',
+ 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
+ 'namespace', 'package', 'set'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
+ Keyword.Constant),
+ (words((
+ 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
+ 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
+ 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
+ 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
+ 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
+ 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
+ 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
+ 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
+ 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
+ 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
+ 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
+ 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
+ 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
+ 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
+ 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
+ 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
+ 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
+ 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
+ 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
+ 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
+ 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
+ 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
+ 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
+ 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
+ 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
+ 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
+ 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
+ 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
+ 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
+ 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
+ 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
+ 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
+ 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
+ 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
+ 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
+ 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
+ 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
+ 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
+ 'XMLUI'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
+ 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
+ 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
+ 'unescape'), suffix=r'\b'),
+ Name.Function),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
+ ]
+ }
+
def analyse_text(text):
"""This is only used to disambiguate between ActionScript and
ActionScript3. We return 0 here; the ActionScript3 lexer will match
AS3 variable definitions and that will hopefully suffice."""
return 0
-
-class ActionScript3Lexer(RegexLexer):
- """
- For ActionScript 3 source code.
-
- .. versionadded:: 0.11
- """
-
- name = 'ActionScript 3'
+
+class ActionScript3Lexer(RegexLexer):
+ """
+ For ActionScript 3 source code.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'ActionScript 3'
aliases = ['actionscript3', 'as3']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
- 'text/actionscript3']
-
- identifier = r'[$a-zA-Z_]\w*'
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
+ 'text/actionscript3']
+
+ identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + r'(?:\.<\w+>)?'
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'(function\s+)(' + identifier + r')(\s*)(\()',
- bygroups(Keyword.Declaration, Name.Function, Text, Operator),
- 'funcparams'),
- (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r')',
+ (r'(function\s+)(' + identifier + r')(\s*)(\()',
+ bygroups(Keyword.Declaration, Name.Function, Text, Operator),
+ 'funcparams'),
+ (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r')',
bygroups(Keyword.Declaration, Whitespace, Name, Whitespace, Punctuation, Whitespace,
- Keyword.Type)),
- (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
+ Keyword.Type)),
+ (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace)),
- (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
+ (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
bygroups(Keyword, Whitespace, Keyword.Type, Whitespace, Operator)),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\[^\\]|[^\\\n])*/[gisx]*', String.Regex),
- (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
- r'switch|import|include|as|is)\b',
- Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b', Name.Function),
- (identifier, Name),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
+ (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
+ r'switch|import|include|as|is)\b',
+ Keyword),
+ (r'(class|public|final|internal|native|override|private|protected|'
+ r'static|import|extends|implements|interface|intrinsic|return|super|'
+ r'dynamic|function|const|get|namespace|package|set)\b',
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
+ r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
+ r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
+ r'unescape)\b', Name.Function),
+ (identifier, Name),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
- ],
- 'funcparams': [
+ (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
+ ],
+ 'funcparams': [
(r'\s+', Whitespace),
- (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r'|\*)(\s*)',
+ (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r'|\*)(\s*)',
bygroups(Whitespace, Punctuation, Name, Whitespace, Operator, Whitespace,
Keyword.Type, Whitespace), 'defval'),
- (r'\)', Operator, 'type')
- ],
- 'type': [
- (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
+ (r'\)', Operator, 'type')
+ ],
+ 'type': [
+ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
bygroups(Whitespace, Operator, Whitespace, Keyword.Type), '#pop:2'),
- (r'\s+', Text, '#pop:2'),
- default('#pop:2')
- ],
- 'defval': [
- (r'(=)(\s*)([^(),]+)(\s*)(,?)',
+ (r'\s+', Text, '#pop:2'),
+ default('#pop:2')
+ ],
+ 'defval': [
+ (r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Whitespace, using(this), Whitespace, Operator), '#pop'),
- (r',', Operator, '#pop'),
- default('#pop')
- ]
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
- return 0
-
-
-class MxmlLexer(RegexLexer):
- """
- For MXML markup.
- Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- .. versionadded:: 1.1
- """
- flags = re.MULTILINE | re.DOTALL
- name = 'MXML'
- aliases = ['mxml']
- filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
- bygroups(String, using(ActionScript3Lexer), String)),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
+ (r',', Operator, '#pop'),
+ default('#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+ return 0
+
+
+class MxmlLexer(RegexLexer):
+ """
+ For MXML markup.
+ Nested AS3 in <script> tags is highlighted by the appropriate lexer.
+
+ .. versionadded:: 1.1
+ """
+ flags = re.MULTILINE | re.DOTALL
+ name = 'MXML'
+ aliases = ['mxml']
+ filenames = ['*.mxml']
+ mimetimes = ['text/xml', 'application/xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
+ bygroups(String, using(ActionScript3Lexer), String)),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
(r'\s+', Whitespace),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
(r'\s+', Whitespace),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/agile.py b/contrib/python/Pygments/py3/pygments/lexers/agile.py
index e309624fa9..58711bb30c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/agile.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/agile.py
@@ -1,23 +1,23 @@
-"""
- pygments.lexers.agile
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+"""
+ pygments.lexers.agile
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer
-from pygments.lexers.jvm import IokeLexer, ClojureLexer
-from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
- PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
-from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
-from pygments.lexers.perl import PerlLexer, Perl6Lexer
-from pygments.lexers.d import CrocLexer, MiniDLexer
-from pygments.lexers.iolang import IoLexer
-from pygments.lexers.tcl import TclLexer
-from pygments.lexers.factor import FactorLexer
-from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.lisp import SchemeLexer
+from pygments.lexers.jvm import IokeLexer, ClojureLexer
+from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
+ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
+from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
+from pygments.lexers.perl import PerlLexer, Perl6Lexer
+from pygments.lexers.d import CrocLexer, MiniDLexer
+from pygments.lexers.iolang import IoLexer
+from pygments.lexers.tcl import TclLexer
+from pygments.lexers.factor import FactorLexer
+from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/algebra.py b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
index 3e5c47b8dd..f5ac9857ca 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/algebra.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
@@ -1,75 +1,75 @@
-"""
- pygments.lexers.algebra
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer algebra systems.
-
+"""
+ pygments.lexers.algebra
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer algebra systems.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer']
-
-
-class GAPLexer(RegexLexer):
- """
- For `GAP <http://www.gap-system.org>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'GAP'
- aliases = ['gap']
- filenames = ['*.g', '*.gd', '*.gi', '*.gap']
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- if|then|elif|else|fi|
- for|while|do|od|
- repeat|until|
- break|continue|
- function|local|return|end|
- rec|
- quit|QUIT|
- IsBound|Unbind|
- TryNextMethod|
- Info|Assert
- )\b''', Keyword),
- (r'''(?x)\b(?:
- true|false|fail|infinity
- )\b''',
- Name.Constant),
- (r'''(?x)\b(?:
- (Declare|Install)([A-Z][A-Za-z]+)|
- BindGlobal|BIND_GLOBAL
- )\b''',
- Name.Builtin),
- (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
- (r'''(?x)\b(?:
- and|or|not|mod|in
- )\b''',
- Operator.Word),
- (r'''(?x)
- (?:\w+|`[^`]*`)
- (?:::\w+|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- }
-
+
+__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer', 'BCLexer']
+
+
+class GAPLexer(RegexLexer):
+ """
+ For `GAP <http://www.gap-system.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'GAP'
+ aliases = ['gap']
+ filenames = ['*.g', '*.gd', '*.gi', '*.gap']
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ if|then|elif|else|fi|
+ for|while|do|od|
+ repeat|until|
+ break|continue|
+ function|local|return|end|
+ rec|
+ quit|QUIT|
+ IsBound|Unbind|
+ TryNextMethod|
+ Info|Assert
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ true|false|fail|infinity
+ )\b''',
+ Name.Constant),
+ (r'''(?x)\b(?:
+ (Declare|Install)([A-Z][A-Za-z]+)|
+ BindGlobal|BIND_GLOBAL
+ )\b''',
+ Name.Builtin),
+ (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|mod|in
+ )\b''',
+ Operator.Word),
+ (r'''(?x)
+ (?:\w+|`[^`]*`)
+ (?:::\w+|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ],
+ }
+
def analyse_text(text):
score = 0.0
-
+
# Declaration part
if re.search(
r"(InstallTrueMethod|Declare(Attribute|Category|Filter|Operation" +
@@ -87,154 +87,154 @@ class GAPLexer(RegexLexer):
return min(score, 1.0)
-class MathematicaLexer(RegexLexer):
- """
- Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Mathematica'
- aliases = ['mathematica', 'mma', 'nb']
- filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
- mimetypes = ['application/mathematica',
- 'application/vnd.wolfram.mathematica',
- 'application/vnd.wolfram.mathematica.package',
- 'application/vnd.wolfram.cdf']
-
- # http://reference.wolfram.com/mathematica/guide/Syntax.html
- operators = (
- ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
- "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
- "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
- )
-
- punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
-
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
-
- tokens = {
- 'root': [
- (r'(?s)\(\*.*?\*\)', Comment),
-
- (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
- (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
- (r'#\d*', Name.Variable),
- (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
-
+class MathematicaLexer(RegexLexer):
+ """
+ Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mathematica'
+ aliases = ['mathematica', 'mma', 'nb']
+ filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
+ mimetypes = ['application/mathematica',
+ 'application/vnd.wolfram.mathematica',
+ 'application/vnd.wolfram.mathematica.package',
+ 'application/vnd.wolfram.cdf']
+
+ # http://reference.wolfram.com/mathematica/guide/Syntax.html
+ operators = (
+ ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
+ "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
+ "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
+ )
+
+ punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
+
+ def _multi_escape(entries):
+ return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
+
+ tokens = {
+ 'root': [
+ (r'(?s)\(\*.*?\*\)', Comment),
+
+ (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
+ (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
+ (r'#\d*', Name.Variable),
+ (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
+
(r'-?\d+\.\d*', Number.Float),
(r'-?\d*\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
-
- (words(operators), Operator),
- (words(punctuation), Punctuation),
- (r'".*?"', String),
- (r'\s+', Text.Whitespace),
- ],
- }
-
-
-class MuPADLexer(RegexLexer):
- """
- A `MuPAD <http://www.mupad.com>`_ lexer.
- Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- .. versionadded:: 0.8
- """
- name = 'MuPAD'
- aliases = ['mupad']
- filenames = ['*.mu']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- next|break|end|
- axiom|end_axiom|category|end_category|domain|end_domain|inherits|
- if|%if|then|elif|else|end_if|
- case|of|do|otherwise|end_case|
- while|end_while|
- repeat|until|end_repeat|
- for|from|to|downto|step|end_for|
- proc|local|option|save|begin|end_proc|
- delete|frame
- )\b''', Keyword),
- (r'''(?x)\b(?:
- DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
- DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
- DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
- DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
- )\b''', Name.Class),
- (r'''(?x)\b(?:
- PI|EULER|E|CATALAN|
- NIL|FAIL|undefined|infinity|
- TRUE|FALSE|UNKNOWN
- )\b''',
- Name.Constant),
- (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
- (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
- (r'''(?x)\b(?:
- and|or|not|xor|
- assuming|
- div|mod|
- union|minus|intersect|in|subset
- )\b''',
- Operator.Word),
- (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
- # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
- (r'''(?x)
- ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#][\w#]*|`[^`]*`)
- (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
+
+ (words(operators), Operator),
+ (words(punctuation), Punctuation),
+ (r'".*?"', String),
+ (r'\s+', Text.Whitespace),
+ ],
+ }
+
+
+class MuPADLexer(RegexLexer):
+ """
+ A `MuPAD <http://www.mupad.com>`_ lexer.
+ Contributed by Christopher Creutzig <christopher@creutzig.de>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'MuPAD'
+ aliases = ['mupad']
+ filenames = ['*.mu']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ next|break|end|
+ axiom|end_axiom|category|end_category|domain|end_domain|inherits|
+ if|%if|then|elif|else|end_if|
+ case|of|do|otherwise|end_case|
+ while|end_while|
+ repeat|until|end_repeat|
+ for|from|to|downto|step|end_for|
+ proc|local|option|save|begin|end_proc|
+ delete|frame
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
+ DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
+ DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
+ DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
+ )\b''', Name.Class),
+ (r'''(?x)\b(?:
+ PI|EULER|E|CATALAN|
+ NIL|FAIL|undefined|infinity|
+ TRUE|FALSE|UNKNOWN
+ )\b''',
+ Name.Constant),
+ (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
+ (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|xor|
+ assuming|
+ div|mod|
+ union|minus|intersect|in|subset
+ )\b''',
+ Operator.Word),
+ (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
+ # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
+ (r'''(?x)
+ ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'''(?x)
+ (?:[a-zA-Z_#][\w#]*|`[^`]*`)
+ (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'\s+', Whitespace),
- (r'.', Text)
- ],
- 'comment': [
+ (r'.', Text)
+ ],
+ 'comment': [
(r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class BCLexer(RegexLexer):
- """
- A `BC <https://www.gnu.org/software/bc/>`_ lexer.
-
- .. versionadded:: 2.1
- """
- name = 'BC'
- aliases = ['bc']
- filenames = ['*.bc']
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'[{}();,]', Punctuation),
- (words(('if', 'else', 'while', 'for', 'break', 'continue',
- 'halt', 'return', 'define', 'auto', 'print', 'read',
- 'length', 'scale', 'sqrt', 'limits', 'quit',
- 'warranty'), suffix=r'\b'), Keyword),
- (r'\+\+|--|\|\||&&|'
- r'([-<>+*%\^/!=])=?', Operator),
- # bc doesn't support exponential
- (r'[0-9]+(\.[0-9]*)?', Number),
- (r'\.[0-9]+', Number),
- (r'.', Text)
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
+
+
+class BCLexer(RegexLexer):
+ """
+ A `BC <https://www.gnu.org/software/bc/>`_ lexer.
+
+ .. versionadded:: 2.1
+ """
+ name = 'BC'
+ aliases = ['bc']
+ filenames = ['*.bc']
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'[{}();,]', Punctuation),
+ (words(('if', 'else', 'while', 'for', 'break', 'continue',
+ 'halt', 'return', 'define', 'auto', 'print', 'read',
+ 'length', 'scale', 'sqrt', 'limits', 'quit',
+ 'warranty'), suffix=r'\b'), Keyword),
+ (r'\+\+|--|\|\||&&|'
+ r'([-<>+*%\^/!=])=?', Operator),
+ # bc doesn't support exponential
+ (r'[0-9]+(\.[0-9]*)?', Number),
+ (r'\.[0-9]+', Number),
+ (r'.', Text)
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ambient.py b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
index 5f82804a03..38423a77d7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ambient.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
@@ -1,75 +1,75 @@
-"""
- pygments.lexers.ambient
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for AmbientTalk language.
-
+"""
+ pygments.lexers.ambient
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for AmbientTalk language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['AmbientTalkLexer']
-
-
-class AmbientTalkLexer(RegexLexer):
- """
- Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'AmbientTalk'
- filenames = ['*.at']
+
+__all__ = ['AmbientTalkLexer']
+
+
+class AmbientTalkLexer(RegexLexer):
+ """
+ Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'AmbientTalk'
+ filenames = ['*.at']
aliases = ['ambienttalk', 'ambienttalk/2', 'at']
- mimetypes = ['text/x-ambienttalk']
-
- flags = re.MULTILINE | re.DOTALL
-
- builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
- 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
- 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
- 'mirroredBy:', 'is:'))
- tokens = {
- 'root': [
+ mimetypes = ['text/x-ambienttalk']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
+ 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
+ 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
+ 'mirroredBy:', 'is:'))
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(def|deftype|import|alias|exclude)\b', Keyword),
- (builtin, Name.Builtin),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(def|deftype|import|alias|exclude)\b', Keyword),
+ (builtin, Name.Builtin),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'\|', Punctuation, 'arglist'),
- (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
- (r"`[a-zA-Z_]\w*", String.Symbol),
- (r"[a-zA-Z_]\w*:", Name.Function),
- (r"[{}()\[\];`]", Punctuation),
- (r'(self|super)\b', Name.Variable.Instance),
- (r"[a-zA-Z_]\w*", Name.Variable),
- (r"@[a-zA-Z_]\w*", Name.Class),
- (r"@\[", Name.Class, 'annotations'),
- include('numbers'),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'namespace': [
- (r'[a-zA-Z_]\w*\.', Name.Namespace),
- (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
- (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
- ],
- 'annotations': [
- (r"(.*?)\]", Name.Class, '#pop')
- ],
- 'arglist': [
- (r'\|', Punctuation, '#pop'),
+ (r'\|', Punctuation, 'arglist'),
+ (r'<:|[*^!%&<>+=,./?-]|:=', Operator),
+ (r"`[a-zA-Z_]\w*", String.Symbol),
+ (r"[a-zA-Z_]\w*:", Name.Function),
+ (r"[{}()\[\];`]", Punctuation),
+ (r'(self|super)\b', Name.Variable.Instance),
+ (r"[a-zA-Z_]\w*", Name.Variable),
+ (r"@[a-zA-Z_]\w*", Name.Class),
+ (r"@\[", Name.Class, 'annotations'),
+ include('numbers'),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_]\w*\.', Name.Namespace),
+ (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
+ (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
+ ],
+ 'annotations': [
+ (r"(.*?)\]", Name.Class, '#pop')
+ ],
+ 'arglist': [
+ (r'\|', Punctuation, '#pop'),
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- }
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/apl.py b/contrib/python/Pygments/py3/pygments/lexers/apl.py
index ab6bbe51df..5024c0c7e2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/apl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/apl.py
@@ -1,103 +1,103 @@
-"""
- pygments.lexers.apl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for APL.
-
+"""
+ pygments.lexers.apl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for APL.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['APLLexer']
-
-
-class APLLexer(RegexLexer):
- """
+
+__all__ = ['APLLexer']
+
+
+class APLLexer(RegexLexer):
+ """
A simple `APL <https://en.m.wikipedia.org/wiki/APL_(programming_language)>`_ lexer.
-
- .. versionadded:: 2.0
- """
- name = 'APL'
- aliases = ['apl']
+
+ .. versionadded:: 2.0
+ """
+ name = 'APL'
+ aliases = ['apl']
filenames = [
'*.apl', '*.aplf', '*.aplo', '*.apln',
'*.aplc', '*.apli', '*.dyalog',
]
-
- tokens = {
- 'root': [
- # Whitespace
- # ==========
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ # ==========
(r'\s+', Whitespace),
- #
- # Comment
- # =======
- # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
+ #
+ # Comment
+ # =======
+ # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
(r'[⍝#].*$', Comment.Single),
- #
- # Strings
- # =======
- (r'\'((\'\')|[^\'])*\'', String.Single),
- (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
- #
- # Punctuation
- # ===========
- # This token type is used for diamond and parenthesis
- # but not for bracket and ; (see below)
+ #
+ # Strings
+ # =======
+ (r'\'((\'\')|[^\'])*\'', String.Single),
+ (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
+ #
+ # Punctuation
+ # ===========
+ # This token type is used for diamond and parenthesis
+ # but not for bracket and ; (see below)
(r'[⋄◇()]', Punctuation),
- #
- # Array indexing
- # ==============
- # Since this token type is very important in APL, it is not included in
- # the punctuation token type but rather in the following one
- (r'[\[\];]', String.Regex),
- #
- # Distinguished names
- # ===================
- # following IBM APL2 standard
+ #
+ # Array indexing
+ # ==============
+ # Since this token type is very important in APL, it is not included in
+ # the punctuation token type but rather in the following one
+ (r'[\[\];]', String.Regex),
+ #
+ # Distinguished names
+ # ===================
+ # following IBM APL2 standard
(r'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
- #
- # Labels
- # ======
- # following IBM APL2 standard
+ #
+ # Labels
+ # ======
+ # following IBM APL2 standard
# (r'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
- #
- # Variables
- # =========
+ #
+ # Variables
+ # =========
# following IBM APL2 standard (with a leading _ ok for GNU APL and Dyalog)
(r'[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
- #
- # Numbers
- # =======
+ #
+ # Numbers
+ # =======
(r'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
r'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
- Number),
- #
- # Operators
- # ==========
+ Number),
+ #
+ # Operators
+ # ==========
(r'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘⌸&⌶@⌺⍥⍛⍢]', Name.Attribute), # closest token type
(r'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⊇⍸√⌾…⍮]',
- Operator),
- #
- # Constant
- # ========
+ Operator),
+ #
+ # Constant
+ # ========
(r'⍬', Name.Constant),
- #
- # Quad symbol
- # ===========
+ #
+ # Quad symbol
+ # ===========
(r'[⎕⍞]', Name.Variable.Global),
- #
- # Arrows left/right
- # =================
+ #
+ # Arrows left/right
+ # =================
(r'[←→]', Keyword.Declaration),
- #
- # D-Fn
- # ====
+ #
+ # D-Fn
+ # ====
(r'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
- (r'[{}]', Keyword.Type),
- ],
- }
+ (r'[{}]', Keyword.Type),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/archetype.py b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
index 4dfd6672fa..7e525a2e4d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/archetype.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
@@ -1,317 +1,317 @@
-"""
- pygments.lexers.archetype
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Archetype-related syntaxes, including:
-
- - ODIN syntax <https://github.com/openEHR/odin>
- - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
- - cADL sub-syntax of ADL
-
- For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
-
- Contributed by Thomas Beale <https://github.com/wolandscat>,
- <https://bitbucket.org/thomas_beale>.
-
+"""
+ pygments.lexers.archetype
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Archetype-related syntaxes, including:
+
+ - ODIN syntax <https://github.com/openEHR/odin>
+ - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
+ - cADL sub-syntax of ADL
+
+ For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
+
+ Contributed by Thomas Beale <https://github.com/wolandscat>,
+ <https://bitbucket.org/thomas_beale>.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Name, Literal, Number, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Name, Literal, Number, String, \
Punctuation, Keyword, Operator, Generic, Whitespace
-
-__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
-
-
-class AtomsLexer(RegexLexer):
- """
- Lexer for Values used in ADL and ODIN.
-
- .. versionadded:: 2.1
- """
-
- tokens = {
- # ----- pseudo-states for inclusion -----
- 'whitespace': [
+
+__all__ = ['OdinLexer', 'CadlLexer', 'AdlLexer']
+
+
+class AtomsLexer(RegexLexer):
+ """
+ Lexer for Values used in ADL and ODIN.
+
+ .. versionadded:: 2.1
+ """
+
+ tokens = {
+ # ----- pseudo-states for inclusion -----
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'([ \t]*)(--.*)$', bygroups(Whitespace, Comment)),
- ],
- 'archetype_id': [
+ ],
+ 'archetype_id': [
(r'([ \t]*)(([a-zA-Z]\w+(\.[a-zA-Z]\w+)*::)?[a-zA-Z]\w+(-[a-zA-Z]\w+){2}'
r'\.\w+[\w-]*\.v\d+(\.\d+){,2}((-[a-z]+)(\.\d+)?)?)', bygroups(Whitespace, Name.Decorator)),
- ],
- 'date_constraints': [
- # ISO 8601-based date/time constraints
- (r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
- # ISO 8601-based duration constraints + optional trailing slash
- (r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
- ],
- 'ordered_values': [
- # ISO 8601 date with optional 'T' ligature
- (r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
- # ISO 8601 time
- (r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
- # ISO 8601 duration
- (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
- r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
- (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ ],
+ 'date_constraints': [
+ # ISO 8601-based date/time constraints
+ (r'[Xx?YyMmDdHhSs\d]{2,4}([:-][Xx?YyMmDdHhSs\d]{2}){2}', Literal.Date),
+ # ISO 8601-based duration constraints + optional trailing slash
+ (r'(P[YyMmWwDd]+(T[HhMmSs]+)?|PT[HhMmSs]+)/?', Literal.Date),
+ ],
+ 'ordered_values': [
+ # ISO 8601 date with optional 'T' ligature
+ (r'\d{4}-\d{2}-\d{2}T?', Literal.Date),
+ # ISO 8601 time
+ (r'\d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{4}|Z)?', Literal.Date),
+ # ISO 8601 duration
+ (r'P((\d*(\.\d+)?[YyMmWwDd]){1,3}(T(\d*(\.\d+)?[HhMmSs]){,3})?|'
+ r'T(\d*(\.\d+)?[HhMmSs]){,3})', Literal.Date),
+ (r'[+-]?(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
(r'[+-]?\d*\.\d+%?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[+-]?\d+%?', Number.Integer),
- ],
- 'values': [
- include('ordered_values'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
- # term code
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
- Punctuation)),
- (r'\|', Punctuation, 'interval'),
- # list continuation
- (r'\.\.\.', Punctuation),
- ],
- 'constraint_values': [
- (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
- bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
- # ADL 1.4 ordinal constraint
- (r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
- bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
- include('date_constraints'),
- include('values'),
- ],
-
- # ----- real states -----
- 'string': [
- ('"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- # all other characters
- (r'[^\\"]+', String),
- # stray backslash
- (r'\\', String),
- ],
- 'uri': [
- # effective URI terminators
- (r'[,>\s]', Punctuation, '#pop'),
- (r'[^>\s,]+', Literal),
- ],
- 'interval': [
- (r'\|', Punctuation, '#pop'),
- include('ordered_values'),
- (r'\.\.', Punctuation),
- (r'[<>=] *', Punctuation),
- # handle +/-
- (r'\+/-', Punctuation),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[+-]?\d+%?', Number.Integer),
+ ],
+ 'values': [
+ include('ordered_values'),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'[a-z][a-z0-9+.-]*:', Literal, 'uri'),
+ # term code
+ (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)(\w[\w-]*)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation, Name.Decorator,
+ Punctuation)),
+ (r'\|', Punctuation, 'interval'),
+ # list continuation
+ (r'\.\.\.', Punctuation),
+ ],
+ 'constraint_values': [
+ (r'(\[)(\w[\w-]*(?:\([^)\n]+\))?)(::)',
+ bygroups(Punctuation, Name.Decorator, Punctuation), 'adl14_code_constraint'),
+ # ADL 1.4 ordinal constraint
+ (r'(\d*)(\|)(\[\w[\w-]*::\w[\w-]*\])((?:[,;])?)',
+ bygroups(Number, Punctuation, Name.Decorator, Punctuation)),
+ include('date_constraints'),
+ include('values'),
+ ],
+
+ # ----- real states -----
+ 'string': [
+ ('"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ # all other characters
+ (r'[^\\"]+', String),
+ # stray backslash
+ (r'\\', String),
+ ],
+ 'uri': [
+ # effective URI terminators
+ (r'[,>\s]', Punctuation, '#pop'),
+ (r'[^>\s,]+', Literal),
+ ],
+ 'interval': [
+ (r'\|', Punctuation, '#pop'),
+ include('ordered_values'),
+ (r'\.\.', Punctuation),
+ (r'[<>=] *', Punctuation),
+ # handle +/-
+ (r'\+/-', Punctuation),
(r'\s+', Whitespace),
- ],
- 'any_code': [
- include('archetype_id'),
- # if it is a code
- (r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
- # if it is tuple with attribute names
- (r'[a-z_]\w*', Name.Class),
- # if it is an integer, i.e. Xpath child index
- (r'[0-9]+', Text),
- (r'\|', Punctuation, 'code_rubric'),
- (r'\]', Punctuation, '#pop'),
- # handle use_archetype statement
+ ],
+ 'any_code': [
+ include('archetype_id'),
+ # if it is a code
+ (r'[a-z_]\w*[0-9.]+(@[^\]]+)?', Name.Decorator),
+ # if it is tuple with attribute names
+ (r'[a-z_]\w*', Name.Class),
+ # if it is an integer, i.e. Xpath child index
+ (r'[0-9]+', Text),
+ (r'\|', Punctuation, 'code_rubric'),
+ (r'\]', Punctuation, '#pop'),
+ # handle use_archetype statement
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace)),
- ],
- 'code_rubric': [
- (r'\|', Punctuation, '#pop'),
- (r'[^|]+', String),
- ],
- 'adl14_code_constraint': [
- (r'\]', Punctuation, '#pop'),
- (r'\|', Punctuation, 'code_rubric'),
- (r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
- include('whitespace'),
- ],
- }
-
-
-class OdinLexer(AtomsLexer):
- """
- Lexer for ODIN syntax.
-
- .. versionadded:: 2.1
- """
- name = 'ODIN'
- aliases = ['odin']
- filenames = ['*.odin']
- mimetypes = ['text/odin']
-
- tokens = {
- 'path': [
- (r'>', Punctuation, '#pop'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'key'),
+ ],
+ 'code_rubric': [
+ (r'\|', Punctuation, '#pop'),
+ (r'[^|]+', String),
+ ],
+ 'adl14_code_constraint': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\|', Punctuation, 'code_rubric'),
+ (r'(\w[\w-]*)([;,]?)', bygroups(Name.Decorator, Punctuation)),
+ include('whitespace'),
+ ],
+ }
+
+
+class OdinLexer(AtomsLexer):
+ """
+ Lexer for ODIN syntax.
+
+ .. versionadded:: 2.1
+ """
+ name = 'ODIN'
+ aliases = ['odin']
+ filenames = ['*.odin']
+ mimetypes = ['text/odin']
+
+ tokens = {
+ 'path': [
+ (r'>', Punctuation, '#pop'),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'/', Punctuation),
+ (r'\[', Punctuation, 'key'),
(r'(\s*)(,)(\s*)', bygroups(Whitespace, Punctuation, Whitespace), '#pop'),
(r'\s+', Whitespace, '#pop'),
- ],
- 'key': [
- include('values'),
- (r'\]', Punctuation, '#pop'),
- ],
- 'type_cast': [
- (r'\)', Punctuation, '#pop'),
- (r'[^)]+', Name.Class),
- ],
- 'root': [
- include('whitespace'),
- (r'([Tt]rue|[Ff]alse)', Literal),
- include('values'),
- # x-ref path
- (r'/', Punctuation, 'path'),
- # x-ref path starting with key
- (r'\[', Punctuation, 'key'),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'=', Operator),
- (r'\(', Punctuation, 'type_cast'),
- (r',', Punctuation),
- (r'<', Punctuation),
- (r'>', Punctuation),
- (r';', Punctuation),
- ],
- }
-
-
-class CadlLexer(AtomsLexer):
- """
- Lexer for cADL syntax.
-
- .. versionadded:: 2.1
- """
- name = 'cADL'
- aliases = ['cadl']
- filenames = ['*.cadl']
-
- tokens = {
- 'path': [
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'/', Punctuation),
- (r'\[', Punctuation, 'any_code'),
- (r'\s+', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- (r'(cardinality|existence|occurrences|group|include|exclude|'
- r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
- (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
- (r'(after|before|closed)\W', Keyword.Type),
- (r'(not)\W', Operator),
- (r'(matches|is_in)\W', Operator),
- # is_in / not is_in char
+ ],
+ 'key': [
+ include('values'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ 'type_cast': [
+ (r'\)', Punctuation, '#pop'),
+ (r'[^)]+', Name.Class),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ include('values'),
+ # x-ref path
+ (r'/', Punctuation, 'path'),
+ # x-ref path starting with key
+ (r'\[', Punctuation, 'key'),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'=', Operator),
+ (r'\(', Punctuation, 'type_cast'),
+ (r',', Punctuation),
+ (r'<', Punctuation),
+ (r'>', Punctuation),
+ (r';', Punctuation),
+ ],
+ }
+
+
+class CadlLexer(AtomsLexer):
+ """
+ Lexer for cADL syntax.
+
+ .. versionadded:: 2.1
+ """
+ name = 'cADL'
+ aliases = ['cadl']
+ filenames = ['*.cadl']
+
+ tokens = {
+ 'path': [
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'/', Punctuation),
+ (r'\[', Punctuation, 'any_code'),
+ (r'\s+', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'(cardinality|existence|occurrences|group|include|exclude|'
+ r'allow_archetype|use_archetype|use_node)\W', Keyword.Type),
+ (r'(and|or|not|there_exists|xor|implies|for_all)\W', Keyword.Type),
+ (r'(after|before|closed)\W', Keyword.Type),
+ (r'(not)\W', Operator),
+ (r'(matches|is_in)\W', Operator),
+ # is_in / not is_in char
('(\u2208|\u2209)', Operator),
- # there_exists / not there_exists / for_all / and / or
+ # there_exists / not there_exists / for_all / and / or
('(\u2203|\u2204|\u2200|\u2227|\u2228|\u22BB|\223C)',
- Operator),
- # regex in slot or as string constraint
+ Operator),
+ # regex in slot or as string constraint
(r'(\{)(\s*)(/[^}]+/)(\s*)(\})',
bygroups(Punctuation, Whitespace, String.Regex, Whitespace, Punctuation)),
- # regex in slot or as string constraint
+ # regex in slot or as string constraint
(r'(\{)(\s*)(\^[^}]+\^)(\s*)(\})',
bygroups(Punctuation, Whitespace, String.Regex, Whitespace, Punctuation)),
- (r'/', Punctuation, 'path'),
- # for cardinality etc
- (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
- r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
- bygroups(Punctuation, Number, Number, Punctuation)),
- # [{ is start of a tuple value
- (r'\[\{', Punctuation),
- (r'\}\]', Punctuation),
- (r'\{', Punctuation),
- (r'\}', Punctuation),
- include('constraint_values'),
- # type name
- (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
- # attribute name
- (r'[a-z_]\w*', Name.Class),
- (r'\[', Punctuation, 'any_code'),
- (r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
- (r'\(', Punctuation),
- (r'\)', Punctuation),
- # for lists of values
- (r',', Punctuation),
- (r'"', String, 'string'),
- # for assumed value
- (r';', Punctuation),
- ],
- }
-
-
-class AdlLexer(AtomsLexer):
- """
- Lexer for ADL syntax.
-
- .. versionadded:: 2.1
- """
-
- name = 'ADL'
- aliases = ['adl']
- filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
-
- tokens = {
- 'whitespace': [
- # blank line ends
+ (r'/', Punctuation, 'path'),
+ # for cardinality etc
+ (r'(\{)((?:\d+\.\.)?(?:\d+|\*))'
+ r'((?:\s*;\s*(?:ordered|unordered|unique)){,2})(\})',
+ bygroups(Punctuation, Number, Number, Punctuation)),
+ # [{ is start of a tuple value
+ (r'\[\{', Punctuation),
+ (r'\}\]', Punctuation),
+ (r'\{', Punctuation),
+ (r'\}', Punctuation),
+ include('constraint_values'),
+ # type name
+ (r'[A-Z]\w+(<[A-Z]\w+([A-Za-z_<>]*)>)?', Name.Class),
+ # attribute name
+ (r'[a-z_]\w*', Name.Class),
+ (r'\[', Punctuation, 'any_code'),
+ (r'(~|//|\\\\|\+|-|/|\*|\^|!=|=|<=|>=|<|>]?)', Operator),
+ (r'\(', Punctuation),
+ (r'\)', Punctuation),
+ # for lists of values
+ (r',', Punctuation),
+ (r'"', String, 'string'),
+ # for assumed value
+ (r';', Punctuation),
+ ],
+ }
+
+
+class AdlLexer(AtomsLexer):
+ """
+ Lexer for ADL syntax.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ADL'
+ aliases = ['adl']
+ filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
+
+ tokens = {
+ 'whitespace': [
+ # blank line ends
(r'\s*\n', Whitespace),
- # comment-only line
+ # comment-only line
(r'^([ \t]*)(--.*)$', bygroups(Whitespace, Comment)),
- ],
- 'odin_section': [
- # repeating the following two rules from the root state enable multi-line
- # strings that start in the first column to be dealt with
- (r'^(language|description|ontology|terminology|annotations|'
+ ],
+ 'odin_section': [
+ # repeating the following two rules from the root state enable multi-line
+ # strings that start in the first column to be dealt with
+ (r'^(language|description|ontology|terminology|annotations|'
r'component_terminologies|revision_history)([ \t]*\n)', bygroups(Generic.Heading, Whitespace)),
(r'^(definition)([ \t]*\n)', bygroups(Generic.Heading, Whitespace), 'cadl_section'),
- (r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
- (r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
- # template overlay delimiter
- (r'^----------*\n', Text, '#pop'),
- (r'^.*\n', String),
- default('#pop'),
- ],
- 'cadl_section': [
- (r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'rules_section': [
- (r'^[ \t]+.*\n', using(CadlLexer)),
- default('#pop'),
- ],
- 'metadata': [
- (r'\)', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'([Tt]rue|[Ff]alse)', Literal),
- # numbers and version ids
- (r'\d+(\.\d+)*', Literal),
- # Guids
- (r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
- (r'\w+', Name.Class),
- (r'"', String, 'string'),
- (r'=', Operator),
+ (r'^([ \t]*|[ \t]+.*)\n', using(OdinLexer)),
+ (r'^([^"]*")(>[ \t]*\n)', bygroups(String, Punctuation)),
+ # template overlay delimiter
+ (r'^----------*\n', Text, '#pop'),
+ (r'^.*\n', String),
+ default('#pop'),
+ ],
+ 'cadl_section': [
+ (r'^([ \t]*|[ \t]+.*)\n', using(CadlLexer)),
+ default('#pop'),
+ ],
+ 'rules_section': [
+ (r'^[ \t]+.*\n', using(CadlLexer)),
+ default('#pop'),
+ ],
+ 'metadata': [
+ (r'\)', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'([Tt]rue|[Ff]alse)', Literal),
+ # numbers and version ids
+ (r'\d+(\.\d+)*', Literal),
+ # Guids
+ (r'(\d|[a-fA-F])+(-(\d|[a-fA-F])+){3,}', Literal),
+ (r'\w+', Name.Class),
+ (r'"', String, 'string'),
+ (r'=', Operator),
(r'[ \t]+', Whitespace),
- default('#pop'),
- ],
- 'root': [
- (r'^(archetype|template_overlay|operational_template|template|'
- r'speciali[sz]e)', Generic.Heading),
- (r'^(language|description|ontology|terminology|annotations|'
- r'component_terminologies|revision_history)[ \t]*\n',
- Generic.Heading, 'odin_section'),
- (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
- (r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
- include('archetype_id'),
+ default('#pop'),
+ ],
+ 'root': [
+ (r'^(archetype|template_overlay|operational_template|template|'
+ r'speciali[sz]e)', Generic.Heading),
+ (r'^(language|description|ontology|terminology|annotations|'
+ r'component_terminologies|revision_history)[ \t]*\n',
+ Generic.Heading, 'odin_section'),
+ (r'^(definition)[ \t]*\n', Generic.Heading, 'cadl_section'),
+ (r'^(rules)[ \t]*\n', Generic.Heading, 'rules_section'),
+ include('archetype_id'),
(r'([ \t]*)(\()', bygroups(Whitespace, Punctuation), 'metadata'),
- include('whitespace'),
- ],
- }
+ include('whitespace'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asm.py b/contrib/python/Pygments/py3/pygments/lexers/asm.py
index e5f795f4f3..9f4ab89dc2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asm.py
@@ -1,215 +1,215 @@
-"""
- pygments.lexers.asm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for assembly languages.
-
+"""
+ pygments.lexers.asm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for assembly languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, bygroups, using, words, \
DelegatingLexer, default
-from pygments.lexers.c_cpp import CppLexer, CLexer
-from pygments.lexers.d import DLexer
-from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
+from pygments.lexers.c_cpp import CppLexer, CLexer
+from pygments.lexers.d import DLexer
+from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
Other, Keyword, Operator, Whitespace
-
-__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
+
+__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
'CObjdumpLexer', 'HsailLexer', 'LlvmLexer', 'LlvmMirBodyLexer',
'LlvmMirLexer', 'NasmLexer', 'NasmObjdumpLexer', 'TasmLexer',
'Ca65Lexer', 'Dasm16Lexer']
-
-
-class GasLexer(RegexLexer):
- """
- For Gas (AT&T) assembly code.
- """
- name = 'GAS'
- aliases = ['gas', 'asm']
- filenames = ['*.s', '*.S']
- mimetypes = ['text/x-gas']
-
- #: optional Comment or Whitespace
- string = r'"(\\"|[^"])*"'
- char = r'[\w$.@-]'
+
+
+class GasLexer(RegexLexer):
+ """
+ For Gas (AT&T) assembly code.
+ """
+ name = 'GAS'
+ aliases = ['gas', 'asm']
+ filenames = ['*.s', '*.S']
+ mimetypes = ['text/x-gas']
+
+ #: optional Comment or Whitespace
+ string = r'"(\\"|[^"])*"'
+ char = r'[\w$.@-]'
identifier = r'(?:[a-zA-Z$_]' + char + r'*|\.' + char + '+)'
number = r'(?:0[xX][a-fA-F0-9]+|#?-?\d+)'
register = '%' + identifier + r'\b'
-
- tokens = {
- 'root': [
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'\.' + identifier, Name.Attribute, 'directive-args'),
- (r'lock|rep(n?z)?|data\d+', Name.Attribute),
- (identifier, Name.Function, 'instruction-args'),
- (r'[\r\n]+', Text)
- ],
- 'directive-args': [
- (identifier, Name.Constant),
- (string, String),
- ('@' + identifier, Name.Attribute),
- (number, Number.Integer),
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (identifier + ':', Name.Label),
+ (r'\.' + identifier, Name.Attribute, 'directive-args'),
+ (r'lock|rep(n?z)?|data\d+', Name.Attribute),
+ (identifier, Name.Function, 'instruction-args'),
+ (r'[\r\n]+', Text)
+ ],
+ 'directive-args': [
+ (identifier, Name.Constant),
+ (string, String),
+ ('@' + identifier, Name.Attribute),
+ (number, Number.Integer),
(register, Name.Variable),
(r'[\r\n]+', Whitespace, '#pop'),
(r'([;#]|//).*?\n', Comment.Single, '#pop'),
(r'/[*].*?[*]/', Comment.Multiline),
(r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
-
- include('punctuation'),
- include('whitespace')
- ],
- 'instruction-args': [
- # For objdump-disassembled code, shouldn't occur in
- # actual assembler input
- ('([a-z0-9]+)( )(<)('+identifier+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation)),
- ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
- bygroups(Number.Hex, Text, Punctuation, Name.Constant,
- Punctuation, Number.Integer, Punctuation)),
-
- # Address constants
- (identifier, Name.Constant),
- (number, Number.Integer),
- # Registers
+
+ include('punctuation'),
+ include('whitespace')
+ ],
+ 'instruction-args': [
+ # For objdump-disassembled code, shouldn't occur in
+ # actual assembler input
+ ('([a-z0-9]+)( )(<)('+identifier+')(>)',
+ bygroups(Number.Hex, Text, Punctuation, Name.Constant,
+ Punctuation)),
+ ('([a-z0-9]+)( )(<)('+identifier+')([-+])('+number+')(>)',
+ bygroups(Number.Hex, Text, Punctuation, Name.Constant,
+ Punctuation, Number.Integer, Punctuation)),
+
+ # Address constants
+ (identifier, Name.Constant),
+ (number, Number.Integer),
+ # Registers
(register, Name.Variable),
- # Numeric constants
- ('$'+number, Number.Integer),
- (r"$'(.|\\')'", String.Char),
+ # Numeric constants
+ ('$'+number, Number.Integer),
+ (r"$'(.|\\')'", String.Char),
(r'[\r\n]+', Whitespace, '#pop'),
(r'([;#]|//).*?\n', Comment.Single, '#pop'),
(r'/[*].*?[*]/', Comment.Multiline),
(r'/[*].*?\n[\w\W]*?[*]/', Comment.Multiline, '#pop'),
- include('punctuation'),
- include('whitespace')
- ],
- 'whitespace': [
+ include('punctuation'),
+ include('whitespace')
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'([;#]|//).*?\n', Comment.Single),
(r'/[*][\w\W]*?[*]/', Comment.Multiline)
- ],
- 'punctuation': [
- (r'[-*,.()\[\]!:]+', Punctuation)
- ]
- }
-
- def analyse_text(text):
+ ],
+ 'punctuation': [
+ (r'[-*,.()\[\]!:]+', Punctuation)
+ ]
+ }
+
+ def analyse_text(text):
if re.search(r'^\.(text|data|section)', text, re.M):
- return True
+ return True
elif re.search(r'^\.\w+', text, re.M):
- return 0.1
-
-
-def _objdump_lexer_tokens(asm_lexer):
- """
- Common objdump lexer tokens to wrap an ASM lexer.
- """
- hex_re = r'[0-9A-Za-z]'
- return {
- 'root': [
- # File name & format:
- ('(.*?)(:)( +file format )(.*?)$',
- bygroups(Name.Label, Punctuation, Text, String)),
- # Section header
- ('(Disassembly of section )(.*?)(:)$',
- bygroups(Text, Name.Label, Punctuation)),
- # Function labels
- # (With offset)
- ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
+ return 0.1
+
+
+def _objdump_lexer_tokens(asm_lexer):
+ """
+ Common objdump lexer tokens to wrap an ASM lexer.
+ """
+ hex_re = r'[0-9A-Za-z]'
+ return {
+ 'root': [
+ # File name & format:
+ ('(.*?)(:)( +file format )(.*?)$',
+ bygroups(Name.Label, Punctuation, Text, String)),
+ # Section header
+ ('(Disassembly of section )(.*?)(:)$',
+ bygroups(Text, Name.Label, Punctuation)),
+ # Function labels
+ # (With offset)
+ ('('+hex_re+'+)( )(<)(.*?)([-+])(0[xX][A-Za-z0-9]+)(>:)$',
bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
- Punctuation, Number.Hex, Punctuation)),
- # (Without offset)
- ('('+hex_re+'+)( )(<)(.*?)(>:)$',
+ Punctuation, Number.Hex, Punctuation)),
+ # (Without offset)
+ ('('+hex_re+'+)( )(<)(.*?)(>:)$',
bygroups(Number.Hex, Whitespace, Punctuation, Name.Function,
- Punctuation)),
- # Code line with disassembled instructions
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
+ Punctuation)),
+ # Code line with disassembled instructions
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *\t)([a-zA-Z].*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace,
- using(asm_lexer))),
+ using(asm_lexer))),
# Code line without raw instructions (objdump --no-show-raw-insn)
('( *)('+hex_re+r'+:)( *\t)([a-zA-Z].*?)$',
bygroups(Whitespace, Name.Label, Whitespace,
using(asm_lexer))),
- # Code line with ascii
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
+ # Code line with ascii
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)( *)(.*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex, Whitespace, String)),
- # Continued code line, only raw opcodes without disassembled
- # instruction
- ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
+ # Continued code line, only raw opcodes without disassembled
+ # instruction
+ ('( *)('+hex_re+r'+:)(\t)((?:'+hex_re+hex_re+' )+)$',
bygroups(Whitespace, Name.Label, Whitespace, Number.Hex)),
- # Skipped a few bytes
- (r'\t\.\.\.$', Text),
- # Relocation line
- # (With offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
+ # Skipped a few bytes
+ (r'\t\.\.\.$', Text),
+ # Relocation line
+ # (With offset)
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)([-+])(0x'+hex_re+'+)$',
bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
- Name.Constant, Punctuation, Number.Hex)),
- # (Without offset)
- (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
+ Name.Constant, Punctuation, Number.Hex)),
+ # (Without offset)
+ (r'(\t\t\t)('+hex_re+r'+:)( )([^\t]+)(\t)(.*?)$',
bygroups(Whitespace, Name.Label, Whitespace, Name.Property, Whitespace,
- Name.Constant)),
- (r'[^\n]+\n', Other)
- ]
- }
-
-
-class ObjdumpLexer(RegexLexer):
- """
+ Name.Constant)),
+ (r'[^\n]+\n', Other)
+ ]
+ }
+
+
+class ObjdumpLexer(RegexLexer):
+ """
For the output of ``objdump -dr``.
- """
- name = 'objdump'
- aliases = ['objdump']
- filenames = ['*.objdump']
- mimetypes = ['text/x-objdump']
-
- tokens = _objdump_lexer_tokens(GasLexer)
-
-
-class DObjdumpLexer(DelegatingLexer):
- """
+ """
+ name = 'objdump'
+ aliases = ['objdump']
+ filenames = ['*.objdump']
+ mimetypes = ['text/x-objdump']
+
+ tokens = _objdump_lexer_tokens(GasLexer)
+
+
+class DObjdumpLexer(DelegatingLexer):
+ """
For the output of ``objdump -Sr`` on compiled D files.
- """
- name = 'd-objdump'
- aliases = ['d-objdump']
- filenames = ['*.d-objdump']
- mimetypes = ['text/x-d-objdump']
-
- def __init__(self, **options):
+ """
+ name = 'd-objdump'
+ aliases = ['d-objdump']
+ filenames = ['*.d-objdump']
+ mimetypes = ['text/x-d-objdump']
+
+ def __init__(self, **options):
super().__init__(DLexer, ObjdumpLexer, **options)
-
-
-class CppObjdumpLexer(DelegatingLexer):
- """
+
+
+class CppObjdumpLexer(DelegatingLexer):
+ """
For the output of ``objdump -Sr`` on compiled C++ files.
- """
- name = 'cpp-objdump'
- aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
- filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
- mimetypes = ['text/x-cpp-objdump']
-
- def __init__(self, **options):
+ """
+ name = 'cpp-objdump'
+ aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
+ filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
+ mimetypes = ['text/x-cpp-objdump']
+
+ def __init__(self, **options):
super().__init__(CppLexer, ObjdumpLexer, **options)
-
-
-class CObjdumpLexer(DelegatingLexer):
- """
+
+
+class CObjdumpLexer(DelegatingLexer):
+ """
For the output of ``objdump -Sr`` on compiled C files.
- """
- name = 'c-objdump'
- aliases = ['c-objdump']
- filenames = ['*.c-objdump']
- mimetypes = ['text/x-c-objdump']
-
- def __init__(self, **options):
+ """
+ name = 'c-objdump'
+ aliases = ['c-objdump']
+ filenames = ['*.c-objdump']
+ mimetypes = ['text/x-c-objdump']
+
+ def __init__(self, **options):
super().__init__(CLexer, ObjdumpLexer, **options)
-
-
+
+
class HsailLexer(RegexLexer):
"""
For HSAIL assembly code.
@@ -345,49 +345,49 @@ class HsailLexer(RegexLexer):
}
-class LlvmLexer(RegexLexer):
- """
- For LLVM assembly code.
- """
- name = 'LLVM'
- aliases = ['llvm']
- filenames = ['*.ll']
- mimetypes = ['text/x-llvm']
-
- #: optional Comment or Whitespace
- string = r'"[^"]*?"'
- identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
+class LlvmLexer(RegexLexer):
+ """
+ For LLVM assembly code.
+ """
+ name = 'LLVM'
+ aliases = ['llvm']
+ filenames = ['*.ll']
+ mimetypes = ['text/x-llvm']
+
+ #: optional Comment or Whitespace
+ string = r'"[^"]*?"'
+ identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
block_label = r'(' + identifier + r'|(\d+))'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Before keywords, because keywords are valid label names :(...
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Before keywords, because keywords are valid label names :(...
(block_label + r'\s*:', Name.Label),
-
- include('keyword'),
-
- (r'%' + identifier, Name.Variable),
- (r'@' + identifier, Name.Variable.Global),
- (r'%\d+', Name.Variable.Anonymous),
- (r'@\d+', Name.Variable.Global),
- (r'#\d+', Name.Variable.Global),
- (r'!' + identifier, Name.Variable),
- (r'!\d+', Name.Variable.Anonymous),
- (r'c?' + string, String),
-
- (r'0[xX][a-fA-F0-9]+', Number),
- (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
- (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
- ],
- 'whitespace': [
+
+ include('keyword'),
+
+ (r'%' + identifier, Name.Variable),
+ (r'@' + identifier, Name.Variable.Global),
+ (r'%\d+', Name.Variable.Anonymous),
+ (r'@\d+', Name.Variable.Global),
+ (r'#\d+', Name.Variable.Global),
+ (r'!' + identifier, Name.Variable),
+ (r'!\d+', Name.Variable.Anonymous),
+ (r'c?' + string, String),
+
+ (r'0[xX][a-fA-F0-9]+', Number),
+ (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
+
+ (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
+ ],
+ 'whitespace': [
(r'(\n|\s+)+', Whitespace),
- (r';.*?\n', Comment)
- ],
- 'keyword': [
- # Regular keywords
+ (r';.*?\n', Comment)
+ ],
+ 'keyword': [
+ # Regular keywords
(words((
'aarch64_sve_vector_pcs', 'aarch64_vector_pcs', 'acq_rel',
'acquire', 'add', 'addrspace', 'addrspacecast', 'afn', 'alias',
@@ -473,19 +473,19 @@ class LlvmLexer(RegexLexer):
'x86_vectorcallcc', 'xchg', 'xor', 'zeroext',
'zeroinitializer', 'zext', 'immarg', 'willreturn'),
suffix=r'\b'), Keyword),
-
- # Types
+
+ # Types
(words(('void', 'half', 'bfloat', 'float', 'double', 'fp128',
'x86_fp80', 'ppc_fp128', 'label', 'metadata', 'x86_mmx',
'x86_amx', 'token')),
Keyword.Type),
-
- # Integer types
+
+ # Integer types
(r'i[1-9]\d*', Keyword.Type)
- ]
- }
-
-
+ ]
+ }
+
+
class LlvmMirBodyLexer(RegexLexer):
"""
For LLVM MIR examples without the YAML wrapper.
@@ -708,105 +708,105 @@ class LlvmMirLexer(RegexLexer):
}
-class NasmLexer(RegexLexer):
- """
- For Nasm (Intel) assembly code.
- """
- name = 'NASM'
- aliases = ['nasm']
- filenames = ['*.asm', '*.ASM']
- mimetypes = ['text/x-nasm']
-
+class NasmLexer(RegexLexer):
+ """
+ For Nasm (Intel) assembly code.
+ """
+ name = 'NASM'
+ aliases = ['nasm']
+ filenames = ['*.asm', '*.ASM']
+ mimetypes = ['text/x-nasm']
+
# Tasm uses the same file endings, but TASM is not as common as NASM, so
# we prioritize NASM higher by default
priority = 1.0
- identifier = r'[a-z$._?][\w$.?#@~]*'
- hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
- octn = r'[0-7]+q'
- binn = r'[01]+b'
- decn = r'[0-9]+'
- floatn = decn + r'\.e?' + decn
- string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
- declkw = r'(?:res|d)[bwdqt]|times'
+ identifier = r'[a-z$._?][\w$.?#@~]*'
+ hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
+ octn = r'[0-7]+q'
+ binn = r'[01]+b'
+ decn = r'[0-9]+'
+ floatn = decn + r'\.e?' + decn
+ string = r'"(\\"|[^"\n])*"|' + r"'(\\'|[^'\n])*'|" + r"`(\\`|[^`\n])*`"
+ declkw = r'(?:res|d)[bwdqt]|times'
register = (r'(r[0-9][0-5]?[bwd]?|'
- r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
+ r'[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|'
r'mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7])\b')
- wordop = r'seg|wrt|strict'
- type = r'byte|[dq]?word'
+ wordop = r'seg|wrt|strict'
+ type = r'byte|[dq]?word'
# Directives must be followed by whitespace, otherwise CPU will match
# cpuid for instance.
directives = (r'(?:BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|'
- r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
+ r'ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|'
r'EXPORT|LIBRARY|MODULE)(?=\s)')
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- (r'^\s*%', Comment.Preproc, 'preproc'),
- include('whitespace'),
- (identifier + ':', Name.Label),
- (r'(%s)(\s+)(equ)' % identifier,
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'^\s*%', Comment.Preproc, 'preproc'),
+ include('whitespace'),
+ (identifier + ':', Name.Label),
+ (r'(%s)(\s+)(equ)' % identifier,
bygroups(Name.Constant, Whitespace, Keyword.Declaration),
- 'instruction-args'),
- (directives, Keyword, 'instruction-args'),
- (declkw, Keyword.Declaration, 'instruction-args'),
- (identifier, Name.Function, 'instruction-args'),
+ 'instruction-args'),
+ (directives, Keyword, 'instruction-args'),
+ (declkw, Keyword.Declaration, 'instruction-args'),
+ (identifier, Name.Function, 'instruction-args'),
(r'[\r\n]+', Whitespace)
- ],
- 'instruction-args': [
- (string, String),
- (hexn, Number.Hex),
- (octn, Number.Oct),
- (binn, Number.Bin),
- (floatn, Number.Float),
- (decn, Number.Integer),
- include('punctuation'),
- (register, Name.Builtin),
- (identifier, Name.Variable),
+ ],
+ 'instruction-args': [
+ (string, String),
+ (hexn, Number.Hex),
+ (octn, Number.Oct),
+ (binn, Number.Bin),
+ (floatn, Number.Float),
+ (decn, Number.Integer),
+ include('punctuation'),
+ (register, Name.Builtin),
+ (identifier, Name.Variable),
(r'[\r\n]+', Whitespace, '#pop'),
- include('whitespace')
- ],
- 'preproc': [
- (r'[^;\n]+', Comment.Preproc),
- (r';.*?\n', Comment.Single, '#pop'),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'whitespace': [
+ include('whitespace')
+ ],
+ 'preproc': [
+ (r'[^;\n]+', Comment.Preproc),
+ (r';.*?\n', Comment.Single, '#pop'),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'[ \t]+', Whitespace),
(r';.*', Comment.Single),
(r'#.*', Comment.Single)
- ],
- 'punctuation': [
- (r'[,():\[\]]+', Punctuation),
- (r'[&|^<>+*/%~-]+', Operator),
- (r'[$]+', Keyword.Constant),
- (wordop, Operator.Word),
- (type, Keyword.Type)
- ],
- }
-
+ ],
+ 'punctuation': [
+ (r'[,():\[\]]+', Punctuation),
+ (r'[&|^<>+*/%~-]+', Operator),
+ (r'[$]+', Keyword.Constant),
+ (wordop, Operator.Word),
+ (type, Keyword.Type)
+ ],
+ }
+
def analyse_text(text):
# Probably TASM
if re.match(r'PROC', text, re.IGNORECASE):
return False
+
-
-class NasmObjdumpLexer(ObjdumpLexer):
- """
+class NasmObjdumpLexer(ObjdumpLexer):
+ """
For the output of ``objdump -d -M intel``.
-
- .. versionadded:: 2.0
- """
- name = 'objdump-nasm'
- aliases = ['objdump-nasm']
- filenames = ['*.objdump-intel']
- mimetypes = ['text/x-nasm-objdump']
-
- tokens = _objdump_lexer_tokens(NasmLexer)
-
-
+
+ .. versionadded:: 2.0
+ """
+ name = 'objdump-nasm'
+ aliases = ['objdump-nasm']
+ filenames = ['*.objdump-intel']
+ mimetypes = ['text/x-nasm-objdump']
+
+ tokens = _objdump_lexer_tokens(NasmLexer)
+
+
class TasmLexer(RegexLexer):
"""
For Tasm (Turbo Assembler) assembly code.
@@ -892,43 +892,43 @@ class TasmLexer(RegexLexer):
return True
-class Ca65Lexer(RegexLexer):
- """
- For ca65 assembler sources.
-
- .. versionadded:: 1.6
- """
- name = 'ca65 assembler'
- aliases = ['ca65']
- filenames = ['*.s']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r';.*', Comment.Single),
+class Ca65Lexer(RegexLexer):
+ """
+ For ca65 assembler sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'ca65 assembler'
+ aliases = ['ca65']
+ filenames = ['*.s']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r';.*', Comment.Single),
(r'\s+', Whitespace),
- (r'[a-z_.@$][\w.@$]*:', Name.Label),
- (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
- r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
- r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
- r'|bit)\b', Keyword),
- (r'\.\w+', Keyword.Pseudo),
- (r'[-+~*/^&|!<>=]', Operator),
- (r'"[^"\n]*.', String),
- (r"'[^'\n]*.", String.Char),
- (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
- (r'\d+', Number.Integer),
- (r'%[01]+', Number.Bin),
- (r'[#,.:()=\[\]]', Punctuation),
- (r'[a-z_.@$][\w.@$]*', Name),
- ]
- }
-
- def analyse_text(self, text):
- # comments in GAS start with "#"
+ (r'[a-z_.@$][\w.@$]*:', Name.Label),
+ (r'((ld|st)[axy]|(in|de)[cxy]|asl|lsr|ro[lr]|adc|sbc|cmp|cp[xy]'
+ r'|cl[cvdi]|se[cdi]|jmp|jsr|bne|beq|bpl|bmi|bvc|bvs|bcc|bcs'
+ r'|p[lh][ap]|rt[is]|brk|nop|ta[xy]|t[xy]a|txs|tsx|and|ora|eor'
+ r'|bit)\b', Keyword),
+ (r'\.\w+', Keyword.Pseudo),
+ (r'[-+~*/^&|!<>=]', Operator),
+ (r'"[^"\n]*.', String),
+ (r"'[^'\n]*.", String.Char),
+ (r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'%[01]+', Number.Bin),
+ (r'[#,.:()=\[\]]', Punctuation),
+ (r'[a-z_.@$][\w.@$]*', Name),
+ ]
+ }
+
+ def analyse_text(self, text):
+ # comments in GAS start with "#"
if re.search(r'^\s*;', text, re.MULTILINE):
- return 0.9
+ return 0.9
class Dasm16Lexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/automation.py b/contrib/python/Pygments/py3/pygments/lexers/automation.py
index 7b03e39a06..3b0f1ac364 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/automation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/automation.py
@@ -1,379 +1,379 @@
-"""
- pygments.lexers.automation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for automation scripting languages.
-
+"""
+ pygments.lexers.automation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for automation scripting languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, combined
-from pygments.token import Text, Comment, Operator, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['AutohotkeyLexer', 'AutoItLexer']
-
-
-class AutohotkeyLexer(RegexLexer):
- """
- For `autohotkey <http://www.autohotkey.com/>`_ source code.
-
- .. versionadded:: 1.4
- """
- name = 'autohotkey'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, combined
+from pygments.token import Text, Comment, Operator, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['AutohotkeyLexer', 'AutoItLexer']
+
+
+class AutohotkeyLexer(RegexLexer):
+ """
+ For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'autohotkey'
aliases = ['autohotkey', 'ahk']
- filenames = ['*.ahk', '*.ahkl']
- mimetypes = ['text/x-autohotkey']
-
- tokens = {
- 'root': [
- (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
- (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
+ filenames = ['*.ahk', '*.ahkl']
+ mimetypes = ['text/x-autohotkey']
+
+ tokens = {
+ 'root': [
+ (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
+ (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
(r'\s+;.*?$', Comment.Single),
(r'^;.*?$', Comment.Single),
- (r'[]{}(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInVariables'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- include('garbage'),
- ],
- 'incomment': [
- (r'^\s*\*/', Comment.Multiline, '#pop'),
- (r'[^*/]', Comment.Multiline),
- (r'[*/]', Comment.Multiline)
- ],
- 'incontinuation': [
- (r'^\s*\)', Generic, '#pop'),
- (r'[^)]', Generic),
- (r'[)]', Generic),
- ],
- 'commands': [
- (r'(?i)^(\s*)(global|local|static|'
- r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
- r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
- r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
- r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
- r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
- r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
- r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
- r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
- r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
- r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
- r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
- r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
- r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
- r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
- r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
- r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
- r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
- r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
- r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
- r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
- r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
- r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
- r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
- r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
- r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
- r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
- r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
- r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
- r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
- r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
- r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
- r'SetBatchLines|SetCapslockState|SetControlDelay|'
- r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
- r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
- r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
- r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
- r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
- r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
- r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
- r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
- r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
- r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
- r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
- r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
- r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
- r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
- r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
- r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
- r'WinWait)\b', bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
- r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
- r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
- r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
- r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
- r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
- r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
- r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
- r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
- r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
- r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
- r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
- r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
- r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
- r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
- r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
- Name.Function),
- ],
- 'builtInVariables': [
- (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
- r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
- r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
- r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
- r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
- r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
- r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
- r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
- r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
- r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
- r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
- r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
- r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
- r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
- r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
- r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
- r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
- r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
- r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
- r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
- r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
- r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
- r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
- r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
- r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
- r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
- r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
- r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
- r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
- r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
- Name.Variable),
- ],
- 'labels': [
- # hotkeys and labels
- # technically, hotkey names are limited to named keys and buttons
- (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
- (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- # (r'.', Text), # no cheating
- ],
- }
-
-
-class AutoItLexer(RegexLexer):
- """
- For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
-
- AutoIt is a freeware BASIC-like scripting language
- designed for automating the Windows GUI and general scripting
-
- .. versionadded:: 1.6
- """
- name = 'AutoIt'
- aliases = ['autoit']
- filenames = ['*.au3']
- mimetypes = ['text/x-autoit']
-
- # Keywords, functions, macros from au3.keywords.properties
- # which can be found in AutoIt installed directory, e.g.
- # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
- keywords = """\
- #include-once #include #endregion #forcedef #forceref #region
- and byref case continueloop dim do else elseif endfunc endif
- endselect exit exitloop for func global
- if local next not or return select step
- then to until wend while exit""".split()
-
- functions = """\
- abs acos adlibregister adlibunregister asc ascw asin assign atan
- autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
- binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
- blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
- consolewrite consolewriteerror controlclick controlcommand controldisable
- controlenable controlfocus controlgetfocus controlgethandle controlgetpos
- controlgettext controlhide controllistview controlmove controlsend
- controlsettext controlshow controltreeview cos dec dircopy dircreate
- dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
- dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
- dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
- drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
- drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
- drivespacetotal drivestatus envget envset envupdate eval execute exp
- filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
- filedelete fileexists filefindfirstfile filefindnextfile fileflush
- filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
- filegetshortname filegetsize filegettime filegetversion fileinstall filemove
- fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
- filesavedialog fileselectfolder filesetattrib filesetpos filesettime
- filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
- guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
- guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
- guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
- guictrlcreateinput guictrlcreatelabel guictrlcreatelist
- guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
- guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
- guictrlcreatepic guictrlcreateprogress guictrlcreateradio
- guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
- guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
- guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
- guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
- guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
- guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
- guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
- guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
- guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
- guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
- guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
- httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
- inetread inidelete iniread inireadsection inireadsectionnames
- inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
- isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
- isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
- mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
- number objcreate objcreateinterface objevent objevent objget objname
- onautoitexitregister onautoitexitunregister opt ping pixelchecksum
- pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
- processgetstats processlist processsetpriority processwait processwaitclose
- progressoff progresson progressset ptr random regdelete regenumkey
- regenumval regread regwrite round run runas runaswait runwait send
- sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
- sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
- sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
- string stringaddcr stringcompare stringformat stringfromasciiarray
- stringinstr stringisalnum stringisalpha stringisascii stringisdigit
- stringisfloat stringisint stringislower stringisspace stringisupper
- stringisxdigit stringleft stringlen stringlower stringmid stringregexp
- stringregexpreplace stringreplace stringright stringsplit stringstripcr
- stringstripws stringtoasciiarray stringtobinary stringtrimleft
- stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
- tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
- timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
- trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
- trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
- traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
- udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
- winactivate winactive winclose winexists winflash wingetcaretpos
- wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
- wingetstate wingettext wingettitle winkill winlist winmenuselectitem
- winminimizeall winminimizeallundo winmove winsetontop winsetstate
- winsettitle winsettrans winwait winwaitactive winwaitclose
- winwaitnotactive""".split()
-
- macros = """\
- @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
- @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
- @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
- @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
- @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
- @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
- @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
- @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
- @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
- @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
- @osversion @programfilesdir @programscommondir @programsdir @scriptdir
- @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
- @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
- @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
- @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
- @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
- @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
- @windowsdir @workingdir @yday @year""".split()
-
- tokens = {
- 'root': [
- (r';.*\n', Comment.Single),
- (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
- Comment.Multiline),
- (r'[\[\]{}(),;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
- (r'[$|@][a-zA-Z_]\w*', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInMarcros'),
- (r'"', String, combined('stringescape', 'dqs')),
+ (r'[]{}(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInVariables'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ include('garbage'),
+ ],
+ 'incomment': [
+ (r'^\s*\*/', Comment.Multiline, '#pop'),
+ (r'[^*/]', Comment.Multiline),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'incontinuation': [
+ (r'^\s*\)', Generic, '#pop'),
+ (r'[^)]', Generic),
+ (r'[)]', Generic),
+ ],
+ 'commands': [
+ (r'(?i)^(\s*)(global|local|static|'
+ r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
+ r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
+ r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
+ r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
+ r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
+ r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
+ r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
+ r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
+ r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
+ r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
+ r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
+ r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
+ r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
+ r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
+ r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
+ r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
+ r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
+ r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
+ r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
+ r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
+ r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
+ r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
+ r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
+ r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
+ r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
+ r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
+ r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
+ r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
+ r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
+ r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
+ r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
+ r'SetBatchLines|SetCapslockState|SetControlDelay|'
+ r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
+ r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
+ r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
+ r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
+ r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
+ r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
+ r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
+ r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
+ r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
+ r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
+ r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
+ r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
+ r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
+ r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
+ r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
+ r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
+ r'WinWait)\b', bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
+ r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
+ r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
+ r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
+ r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
+ r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
+ r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
+ r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
+ r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
+ r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
+ r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
+ r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
+ r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
+ r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
+ r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
+ r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
+ Name.Function),
+ ],
+ 'builtInVariables': [
+ (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
+ r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
+ r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
+ r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
+ r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
+ r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
+ r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
+ r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
+ r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
+ r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
+ r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
+ r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
+ r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
+ r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
+ r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
+ r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
+ r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
+ r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
+ r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
+ r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
+ r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
+ r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
+ r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
+ r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
+ r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
+ r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
+ r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
+ r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
+ r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
+ r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
+ Name.Variable),
+ ],
+ 'labels': [
+ # hotkeys and labels
+ # technically, hotkey names are limited to named keys and buttons
+ (r'(^\s*)([^:\s("]+?:{1,2})', bygroups(Text, Name.Label)),
+ (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ # (r'.', Text), # no cheating
+ ],
+ }
+
+
+class AutoItLexer(RegexLexer):
+ """
+ For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
+
+ AutoIt is a freeware BASIC-like scripting language
+ designed for automating the Windows GUI and general scripting
+
+ .. versionadded:: 1.6
+ """
+ name = 'AutoIt'
+ aliases = ['autoit']
+ filenames = ['*.au3']
+ mimetypes = ['text/x-autoit']
+
+ # Keywords, functions, macros from au3.keywords.properties
+ # which can be found in AutoIt installed directory, e.g.
+ # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
+
+ keywords = """\
+ #include-once #include #endregion #forcedef #forceref #region
+ and byref case continueloop dim do else elseif endfunc endif
+ endselect exit exitloop for func global
+ if local next not or return select step
+ then to until wend while exit""".split()
+
+ functions = """\
+ abs acos adlibregister adlibunregister asc ascw asin assign atan
+ autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
+ binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
+ blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
+ consolewrite consolewriteerror controlclick controlcommand controldisable
+ controlenable controlfocus controlgetfocus controlgethandle controlgetpos
+ controlgettext controlhide controllistview controlmove controlsend
+ controlsettext controlshow controltreeview cos dec dircopy dircreate
+ dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
+ dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
+ dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
+ drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
+ drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
+ drivespacetotal drivestatus envget envset envupdate eval execute exp
+ filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
+ filedelete fileexists filefindfirstfile filefindnextfile fileflush
+ filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
+ filegetshortname filegetsize filegettime filegetversion fileinstall filemove
+ fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
+ filesavedialog fileselectfolder filesetattrib filesetpos filesettime
+ filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
+ guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
+ guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
+ guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
+ guictrlcreateinput guictrlcreatelabel guictrlcreatelist
+ guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
+ guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
+ guictrlcreatepic guictrlcreateprogress guictrlcreateradio
+ guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
+ guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
+ guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
+ guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
+ guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
+ guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
+ guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
+ guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
+ guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
+ guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
+ guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
+ httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
+ inetread inidelete iniread inireadsection inireadsectionnames
+ inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
+ isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
+ isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
+ mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
+ number objcreate objcreateinterface objevent objevent objget objname
+ onautoitexitregister onautoitexitunregister opt ping pixelchecksum
+ pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
+ processgetstats processlist processsetpriority processwait processwaitclose
+ progressoff progresson progressset ptr random regdelete regenumkey
+ regenumval regread regwrite round run runas runaswait runwait send
+ sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
+ sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
+ sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
+ string stringaddcr stringcompare stringformat stringfromasciiarray
+ stringinstr stringisalnum stringisalpha stringisascii stringisdigit
+ stringisfloat stringisint stringislower stringisspace stringisupper
+ stringisxdigit stringleft stringlen stringlower stringmid stringregexp
+ stringregexpreplace stringreplace stringright stringsplit stringstripcr
+ stringstripws stringtoasciiarray stringtobinary stringtrimleft
+ stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
+ tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
+ timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
+ trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
+ trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
+ traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
+ udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
+ winactivate winactive winclose winexists winflash wingetcaretpos
+ wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
+ wingetstate wingettext wingettitle winkill winlist winmenuselectitem
+ winminimizeall winminimizeallundo winmove winsetontop winsetstate
+ winsettitle winsettrans winwait winwaitactive winwaitclose
+ winwaitnotactive""".split()
+
+ macros = """\
+ @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
+ @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
+ @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
+ @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
+ @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
+ @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
+ @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
+ @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
+ @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
+ @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
+ @osversion @programfilesdir @programscommondir @programsdir @scriptdir
+ @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
+ @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
+ @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
+ @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
+ @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
+ @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
+ @windowsdir @workingdir @yday @year""".split()
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment.Single),
+ (r'(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)',
+ Comment.Multiline),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'[$|@][a-zA-Z_]\w*', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInMarcros'),
+ (r'"', String, combined('stringescape', 'dqs')),
(r"'", String, 'sqs'),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([,%`abfnrtv\-+;])', String.Escape),
- (r'_\n', Text), # Line continuation
- include('garbage'),
- ],
- 'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
- bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
- Name.Function),
- ],
- 'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
- Name.Variable.Global),
- ],
- 'labels': [
- # sendkeys
- (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([,%`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([,%`abfnrtv\-+;])', String.Escape),
+ (r'_\n', Text), # Line continuation
+ include('garbage'),
+ ],
+ 'commands': [
+ (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
+ bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(%s)\b' % '|'.join(functions),
+ Name.Function),
+ ],
+ 'builtInMarcros': [
+ (r'(?i)(%s)\b' % '|'.join(macros),
+ Name.Variable.Global),
+ ],
+ 'labels': [
+ # sendkeys
+ (r'(^\s*)(\{\S+?\})', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([,%`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
'sqs': [
(r'\'\'|\`([,%`abfnrtv])', String.Escape),
(r"'", String, '#pop'),
(r"[^'\n]+", String)
],
- 'garbage': [
- (r'[^\S\n]', Text),
- ],
- }
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/basic.py b/contrib/python/Pygments/py3/pygments/lexers/basic.py
index 3ccadf1e07..761dfb96d9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/basic.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/basic.py
@@ -1,74 +1,74 @@
-"""
- pygments.lexers.basic
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for BASIC like languages (other than VB.net).
-
+"""
+ pygments.lexers.basic
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BASIC like languages (other than VB.net).
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, words, include
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words, include
from pygments.token import Comment, Error, Keyword, Name, Number, \
Punctuation, Operator, String, Text, Whitespace
from pygments.lexers import _vbscript_builtins
+
-
-__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
+__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
'QBasicLexer', 'VBScriptLexer', 'BBCBasicLexer']
-
-
-class BlitzMaxLexer(RegexLexer):
- """
- For `BlitzMax <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'BlitzMax'
- aliases = ['blitzmax', 'bmax']
- filenames = ['*.bmx']
- mimetypes = ['text/x-bmx']
-
- bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
- bmax_sktypes = r'@{1,2}|[!#$%]'
- bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
- bmax_name = r'[a-z_]\w*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
- bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
+
+
+class BlitzMaxLexer(RegexLexer):
+ """
+ For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'BlitzMax'
+ aliases = ['blitzmax', 'bmax']
+ filenames = ['*.bmx']
+ mimetypes = ['text/x-bmx']
+
+ bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+ bmax_sktypes = r'@{1,2}|[!#$%]'
+ bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+ bmax_name = r'[a-z_]\w*'
+ bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
+ r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
+ (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+ bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
(r'\s+', Whitespace),
(r'(\.\.)(\n)', bygroups(Text, Whitespace)), # Line continuation
- # Comments
- (r"'.*?\n", Comment.Single),
- (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]*(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
- (r'[(),.:\[\]]', Punctuation),
- (r'(?:#[\w \t]*)', Name.Label),
- (r'(?:\?[\w \t]*)', Comment.Preproc),
- # Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+ # Comments
+ (r"'.*?\n", Comment.Single),
+ (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]*(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+ (bmax_vopwords), Operator),
+ (r'[(),.:\[\]]', Punctuation),
+ (r'(?:#[\w \t]*)', Name.Label),
+ (r'(?:\?[\w \t]*)', Comment.Preproc),
+ # Identifiers
+ (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
bygroups(Keyword.Reserved, Whitespace, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
+ (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+ (bmax_name, bmax_name),
bygroups(Keyword.Reserved, Whitespace, Keyword.Namespace)),
(bmax_func, bygroups(Name.Function, Whitespace, Keyword.Type,
Operator, Whitespace, Punctuation, Whitespace,
@@ -77,428 +77,428 @@ class BlitzMaxLexer(RegexLexer):
(bmax_var, bygroups(Name.Variable, Whitespace, Keyword.Type, Operator,
Whitespace, Punctuation, Whitespace, Keyword.Type,
Name.Class, Whitespace, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+ (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- # Keywords
- (r'\b(Ptr)\b', Keyword.Type),
- (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
- (words((
- 'TNullMethodException', 'TNullFunctionException',
- 'TNullObjectException', 'TArrayBoundsException',
- 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
- (words((
- 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
- 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
- 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
- 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
- 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
- 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
- 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
- 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
- 'RestoreData'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class BlitzBasicLexer(RegexLexer):
- """
- For `BlitzBasic <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'BlitzBasic'
- aliases = ['blitzbasic', 'b3d', 'bplus']
- filenames = ['*.bb', '*.decls']
- mimetypes = ['text/x-bb']
-
- bb_sktypes = r'@{1,2}|[#$%]'
- bb_name = r'[a-z]\w*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
+ # Keywords
+ (r'\b(Ptr)\b', Keyword.Type),
+ (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+ (words((
+ 'TNullMethodException', 'TNullFunctionException',
+ 'TNullObjectException', 'TArrayBoundsException',
+ 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
+ (words((
+ 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
+ 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
+ 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
+ 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
+ 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
+ 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
+ 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
+ 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
+ 'RestoreData'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ (r'(%s)' % (bmax_name), Name.Variable),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class BlitzBasicLexer(RegexLexer):
+ """
+ For `BlitzBasic <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'BlitzBasic'
+ aliases = ['blitzbasic', 'b3d', 'bplus']
+ filenames = ['*.bb', '*.decls']
+ mimetypes = ['text/x-bb']
+
+ bb_sktypes = r'@{1,2}|[#$%]'
+ bb_name = r'[a-z]\w*'
+ bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
+ (bb_name, bb_sktypes, bb_name)
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
(r'\s+', Whitespace),
- # Comments
- (r";.*?\n", Comment.Single),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
- 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
- 'First', 'Last', 'Before', 'After'),
- prefix=r'\b', suffix=r'\b'),
- Operator),
- (r'([+\-*/~=<>^])', Operator),
- (r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
- # Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
+ # Comments
+ (r";.*?\n", Comment.Single),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
+ 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
+ 'First', 'Last', 'Before', 'After'),
+ prefix=r'\b', suffix=r'\b'),
+ Operator),
+ (r'([+\-*/~=<>^])', Operator),
+ (r'[(),:\[\]\\]', Punctuation),
+ (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
+ # Identifiers
+ (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
+ (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
bygroups(Keyword.Reserved, Whitespace, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
+ (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
bygroups(Operator, Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
+ (r'\b%s\b([ \t]*)(\()' % bb_var,
bygroups(Name.Function, Whitespace, Keyword.Type, Whitespace, Punctuation,
Whitespace, Name.Class, Whitespace, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
+ (r'\b(Function)\b([ \t]+)%s' % bb_var,
bygroups(Keyword.Reserved, Whitespace, Name.Function, Whitespace, Keyword.Type,
Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
+ (r'\b(Type)([ \t]+)(%s)' % (bb_name),
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- # Keywords
- (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
- (words((
- 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
- 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
- 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
- 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
- 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- # Final resolve (for variable names and such)
- # (r'(%s)' % (bb_name), Name.Variable),
+ # Keywords
+ (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
+ (words((
+ 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
+ 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
+ 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ # (r'(%s)' % (bb_name), Name.Variable),
(bb_var, bygroups(Name.Variable, Whitespace, Keyword.Type,
Whitespace, Punctuation, Whitespace, Name.Class)),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
(r'[^"\n]+', String.Double),
- ],
- }
-
-
-class MonkeyLexer(RegexLexer):
- """
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Monkey'
- aliases = ['monkey']
- filenames = ['*.monkey']
- mimetypes = ['text/x-monkey']
-
- name_variable = r'[a-z_]\w*'
- name_function = r'[A-Z]\w*'
- name_constant = r'[A-Z_][A-Z0-9_]*'
- name_class = r'[A-Z]\w*'
- name_module = r'[a-z0-9_]*'
-
- keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
- # ? == Bool // % == Int // # == Float // $ == String
- keyword_type_special = r'[?%#$]'
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- # Text
+ ],
+ }
+
+
+class MonkeyLexer(RegexLexer):
+ """
+ For
+ `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Monkey'
+ aliases = ['monkey']
+ filenames = ['*.monkey']
+ mimetypes = ['text/x-monkey']
+
+ name_variable = r'[a-z_]\w*'
+ name_function = r'[A-Z]\w*'
+ name_constant = r'[A-Z_][A-Z0-9_]*'
+ name_class = r'[A-Z]\w*'
+ name_module = r'[a-z0-9_]*'
+
+ keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
+ # ? == Bool // % == Int // # == Float // $ == String
+ keyword_type_special = r'[?%#$]'
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # Text
(r'\s+', Whitespace),
- # Comments
- (r"'.*", Comment),
- (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
- # preprocessor directives
- (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
- # preprocessor variable (any line starting with '#' that is not a directive)
- (r'^#', Comment.Preproc, 'variables'),
- # String
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-fA-Z]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
- # Exception handling
- (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
- (r'Throwable', Name.Exception),
- # Builtins
- (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
- (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
- (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
- # Keywords
- (r'(?i)^(Import)(\s+)(.*)(\n)',
+ # Comments
+ (r"'.*", Comment),
+ (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
+ # preprocessor directives
+ (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
+ # preprocessor variable (any line starting with '#' that is not a directive)
+ (r'^#', Comment.Preproc, 'variables'),
+ # String
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-fA-Z]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Native data types
+ (r'\b%s\b' % keyword_type, Keyword.Type),
+ # Exception handling
+ (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
+ (r'Throwable', Name.Exception),
+ # Builtins
+ (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
+ (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
+ (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
+ # Keywords
+ (r'(?i)^(Import)(\s+)(.*)(\n)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace, Whitespace)),
- (r'(?i)^Strict\b.*\n', Keyword.Reserved),
- (r'(?i)(Const|Local|Global|Field)(\s+)',
+ (r'(?i)^Strict\b.*\n', Keyword.Reserved),
+ (r'(?i)(Const|Local|Global|Field)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'variables'),
- (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
+ (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
bygroups(Keyword.Reserved, Whitespace), 'classname'),
- (r'(?i)(Function|Method)(\s+)',
+ (r'(?i)(Function|Method)(\s+)',
bygroups(Keyword.Reserved, Whitespace), 'funcname'),
- (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
- r'Final|Abstract)\b', Keyword.Reserved),
- # Flow Control stuff
- (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
- r'Select|Case|Default|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'For|To|Until|Step|EachIn|Next|'
+ (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
+ r'Final|Abstract)\b', Keyword.Reserved),
+ # Flow Control stuff
+ (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
+ r'Select|Case|Default|'
+ r'While|Wend|'
+ r'Repeat|Until|Forever|'
+ r'For|To|Until|Step|EachIn|Next|'
r'Exit|Continue)(?=\s)', Keyword.Reserved),
- # not used yet
- (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
- # Array
- (r'[\[\]]', Punctuation),
- # Other
- (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
- (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
- (r'[(){}!#,.:]', Punctuation),
- # catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
- ],
- 'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
- (r':', Punctuation, 'classname'),
+ # not used yet
+ (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
+ # Array
+ (r'[\[\]]', Punctuation),
+ # Other
+ (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
+ (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
+ (r'[(){}!#,.:]', Punctuation),
+ # catch the rest
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_function, Name.Function),
+ (r'%s\b' % name_variable, Name.Variable),
+ ],
+ 'funcname': [
+ (r'(?i)%s\b' % name_function, Name.Function),
+ (r':', Punctuation, 'classname'),
(r'\s+', Whitespace),
- (r'\(', Punctuation, 'variables'),
- (r'\)', Punctuation, '#pop')
- ],
- 'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
- # array (of given size)
- (r'(\[)(\s*)(\d*)(\s*)(\])',
+ (r'\(', Punctuation, 'variables'),
+ (r'\)', Punctuation, '#pop')
+ ],
+ 'classname': [
+ (r'%s\.' % name_module, Name.Namespace),
+ (r'%s\b' % keyword_type, Keyword.Type),
+ (r'%s\b' % name_class, Name.Class),
+ # array (of given size)
+ (r'(\[)(\s*)(\d*)(\s*)(\])',
bygroups(Punctuation, Whitespace, Number.Integer, Whitespace, Punctuation)),
- # generics
+ # generics
(r'\s+(?!<)', Whitespace, '#pop'),
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
+ (r'<', Punctuation, '#push'),
+ (r'>', Punctuation, '#pop'),
(r'\n', Whitespace, '#pop'),
- default('#pop')
- ],
- 'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
+ default('#pop')
+ ],
+ 'variables': [
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_variable, Name.Variable),
+ (r'%s' % keyword_type_special, Keyword.Type),
(r'\s+', Whitespace),
- (r':', Punctuation, 'classname'),
- (r',', Punctuation, '#push'),
- default('#pop')
- ],
- 'string': [
- (r'[^"~]+', String.Double),
- (r'~q|~n|~r|~t|~z|~~', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'comment': [
- (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
- (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
- (r'\n', Comment.Multiline),
- (r'.+', Comment.Multiline),
- ],
- }
-
-
-class CbmBasicV2Lexer(RegexLexer):
- """
- For CBM BASIC V2 sources.
-
- .. versionadded:: 1.6
- """
- name = 'CBM BASIC V2'
- aliases = ['cbmbas']
- filenames = ['*.bas']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'rem.*\n', Comment.Single),
+ (r':', Punctuation, 'classname'),
+ (r',', Punctuation, '#push'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'[^"~]+', String.Double),
+ (r'~q|~n|~r|~t|~z|~~', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'comment': [
+ (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
+ (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
+ (r'\n', Comment.Multiline),
+ (r'.+', Comment.Multiline),
+ ],
+ }
+
+
+class CbmBasicV2Lexer(RegexLexer):
+ """
+ For CBM BASIC V2 sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CBM BASIC V2'
+ aliases = ['cbmbas']
+ filenames = ['*.bas']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'rem.*\n', Comment.Single),
(r'\s+', Whitespace),
- (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
- r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
- r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
- (r'data|restore|dim|let|def|fn', Keyword.Declaration),
- (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
- r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
- (r'[-+*/^<>=]', Operator),
- (r'not|and|or', Operator.Word),
- (r'"[^"\n]*.', String),
- (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'[(),:;]', Punctuation),
- (r'\w+[$%]?', Name),
- ]
- }
-
+ (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
+ r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
+ r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
+ (r'data|restore|dim|let|def|fn', Keyword.Declaration),
+ (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
+ r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
+ (r'[-+*/^<>=]', Operator),
+ (r'not|and|or', Operator.Word),
+ (r'"[^"\n]*.', String),
+ (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
+ (r'[(),:;]', Punctuation),
+ (r'\w+[$%]?', Name),
+ ]
+ }
+
def analyse_text(text):
- # if it starts with a line number, it shouldn't be a "modern" Basic
- # like VB.net
+ # if it starts with a line number, it shouldn't be a "modern" Basic
+ # like VB.net
if re.match(r'^\d+', text):
- return 0.2
-
-
-class QBasicLexer(RegexLexer):
- """
- For
- `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
- source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'QBasic'
- aliases = ['qbasic', 'basic']
- filenames = ['*.BAS', '*.bas']
- mimetypes = ['text/basic']
-
- declarations = ('DATA', 'LET')
-
- functions = (
- 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
- 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
- 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
- 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
- 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
- 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
- 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
- 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
- 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
- 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
- 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
- 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
- 'VARPTR$', 'VARSEG'
- )
-
- metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
-
- operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
-
- statements = (
- 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
- 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
- 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
- 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
- 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
- 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
- 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
- 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
- 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
- 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
- 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
- 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
- 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
- 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
- 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
- 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
- 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
- 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
- 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
- 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
- )
-
- keywords = (
- 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
- 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
- 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
- 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
- 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
- )
-
- tokens = {
- 'root': [
- (r'\n+', Text),
- (r'\s+', Text.Whitespace),
- (r'^(\s*)(\d*)(\s*)(REM .*)$',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
- Comment.Single)),
- (r'^(\s*)(\d+)(\s*)',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
- (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
- (r'(?=[^"]*)\'.*$', Comment.Single),
- (r'"[^\n"]*"', String.Double),
- (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
- bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
- (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name)),
- (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name.Variable.Global)),
- (r'(DIM)(\s+)([^\s(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
- (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
- bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
- Operator)),
- (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- (r'(SUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- include('declarations'),
- include('functions'),
- include('metacommands'),
- include('operators'),
- include('statements'),
- include('keywords'),
- (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
- (r'[a-zA-Z_]\w*\:', Name.Label),
- (r'\-?\d*\.\d+[@|#]?', Number.Float),
- (r'\-?\d+[@|#]', Number.Float),
- (r'\-?\d+#?', Number.Integer.Long),
- (r'\-?\d+#?', Number.Integer),
- (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
- (r'[\[\]{}(),;]', Punctuation),
- (r'[\w]+', Name.Variable.Global),
- ],
- # can't use regular \b because of X$()
- # XXX: use words() here
- 'declarations': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
- Keyword.Declaration),
- ],
- 'functions': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
- Keyword.Reserved),
- ],
- 'metacommands': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
- Keyword.Constant),
- ],
- 'operators': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
- ],
- 'statements': [
- (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
- Keyword.Reserved),
- ],
- 'keywords': [
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- ],
- }
-
- def analyse_text(text):
- if '$DYNAMIC' in text or '$STATIC' in text:
- return 0.9
+ return 0.2
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ('DATA', 'LET')
+
+ functions = (
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ )
+
+ metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
+
+ operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
+
+ statements = (
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ )
+
+ keywords = (
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ # XXX: use words() here
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ if '$DYNAMIC' in text or '$STATIC' in text:
+ return 0.9
class VBScriptLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/business.py b/contrib/python/Pygments/py3/pygments/lexers/business.py
index 47713198ed..6215db5624 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/business.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/business.py
@@ -1,146 +1,146 @@
-"""
- pygments.lexers.business
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for "business-oriented" languages.
-
+"""
+ pygments.lexers.business
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for "business-oriented" languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace
-
-from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
-
-__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
- 'GoodDataCLLexer', 'MaqlLexer']
-
-
-class CobolLexer(RegexLexer):
- """
- Lexer for OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOL'
- aliases = ['cobol']
- filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
- mimetypes = ['text/x-cobol']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: by PICTURE and USAGE
- # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
- # Logical (?): NOT, AND, OR
-
- # Reserved words:
- # http://opencobol.add1tocobol.com/#reserved-words
- # Intrinsics:
- # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
- tokens = {
- 'root': [
- include('comment'),
- include('strings'),
- include('core'),
- include('nums'),
- (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
- # (r'[\s]+', Text),
+
+from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
+
+__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
+ 'GoodDataCLLexer', 'MaqlLexer']
+
+
+class CobolLexer(RegexLexer):
+ """
+ Lexer for OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOL'
+ aliases = ['cobol']
+ filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
+ mimetypes = ['text/x-cobol']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: by PICTURE and USAGE
+ # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
+ # Logical (?): NOT, AND, OR
+
+ # Reserved words:
+ # http://opencobol.add1tocobol.com/#reserved-words
+ # Intrinsics:
+ # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (r'[a-z0-9]([\w\-]*[a-z0-9]+)?', Name.Variable),
+ # (r'[\s]+', Text),
(r'[ \t]+', Whitespace),
- ],
- 'comment': [
- (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
- ],
- 'core': [
- # Figurative constants
+ ],
+ 'comment': [
+ (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
+ ],
+ 'core': [
+ # Figurative constants
(r'(^|(?<=[^\w\-]))(ALL\s+)?'
- r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
+ r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
r'\s*($|(?=[^\w\-]))',
- Name.Constant),
-
- # Reserved words STATEMENTS and other bolds
- (words((
- 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
- 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
- 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
- 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
- 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
- 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
- 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
- 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
- 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
- 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
- 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
- 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
- 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
- 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
- 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
+ Name.Constant),
+
+ # Reserved words STATEMENTS and other bolds
+ (words((
+ 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
+ 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
+ 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
+ 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
+ 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
+ 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
+ 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
+ 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
+ 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
+ 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
+ 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
+ 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
+ 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
+ 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
+ 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Reserved),
-
- # Reserved words
- (words((
- 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
- 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
- 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
- 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
+ Keyword.Reserved),
+
+ # Reserved words
+ (words((
+ 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
+ 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
+ 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
+ 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC',
'AUTOTERMINATE', 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
- 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
+ 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL',
'COLLATING', 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE',
'COMMIT', 'COMMON', 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
- 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
+ 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE',
'DEBUGGING', 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
- 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
- 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
- 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
- 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
+ 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
+ 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
+ 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
+ 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
'EXCLUSIVE', 'EXTEND', 'EXTERNAL', 'FILE-ID', 'FILLER', 'FINAL',
'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL',
'FUNCTION', 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
- 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
- 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
+ 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
+ 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
'INITIAL', 'INITIALIZED', 'INPUT', 'INTO', 'INTRINSIC', 'INVALID',
'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
- 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
- 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
+ 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
+ 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', 'MULTIPLE',
'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', 'NEGATIVE', 'NEXT', 'NO',
'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', 'NUMERIC-EDITED',
'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
- 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
- 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
- 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
- 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
- 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
- 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
- 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
- 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
- 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
- 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
- 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
- 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
- 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
- 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
- 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
- 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
- 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
- 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
- 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
- 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
- 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
+ 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
+ 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
+ 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
+ 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
+ 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
+ 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
+ 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
+ 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
+ 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
+ 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
+ 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
+ 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
+ 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
+ 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
+ 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
+ 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
+ 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
+ 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
+ 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
+ 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
+ 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Keyword.Pseudo),
-
- # inactive reserved words
- (words((
+ Keyword.Pseudo),
+
+ # inactive reserved words
+ (words((
'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE',
'B-AND', 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER',
'CF', 'CH', 'CHAIN', 'CLASS-ID', 'CLASSIFICATION', 'COMMUNICATION',
@@ -151,7 +151,7 @@ class CobolLexer(RegexLexer):
'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', 'FUNCTION-POINTER',
'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', 'INHERITS',
'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
- 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
+ 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE',
'NORMAL', 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE',
'PAGE-COUNTER', 'PF', 'PH', 'PROPERTY', 'PROTOTYPE', 'PURGE',
@@ -163,218 +163,218 @@ class CobolLexer(RegexLexer):
'TYPEDEF', 'UCS-4', 'UNIVERSAL', 'USER-DEFAULT', 'UTF-16', 'UTF-8',
'VAL-STATUS', 'VALID', 'VALIDATE', 'VALIDATE-STATUS'),
prefix=r'(^|(?<=[^\w\-]))', suffix=r'\s*($|(?=[^\w\-]))'),
- Error),
-
- # Data Types
+ Error),
+
+ # Data Types
(r'(^|(?<=[^\w\-]))'
- r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
- r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
- r'BINARY-C-LONG|'
- r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
+ r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
+ r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
+ r'BINARY-C-LONG|'
+ r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
r'BINARY)\s*($|(?=[^\w\-]))', Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
- # (r'(::)', Keyword.Declaration),
-
- (r'([(),;:&%.])', Punctuation),
-
- # Intrinsics
+
+ # Operators
+ (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
+
+ # (r'(::)', Keyword.Declaration),
+
+ (r'([(),;:&%.])', Punctuation),
+
+ # Intrinsics
(r'(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
- r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
- r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
- r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
- r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
- r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
- r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
- r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
- r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
- r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
- r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
- r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
+ r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
+ r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
+ r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
+ r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
+ r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
+ r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
+ r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
+ r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
+ r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
+ r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
+ r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
r'($|(?=[^\w\-]))', Name.Function),
-
- # Booleans
+
+ # Booleans
(r'(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))', Name.Builtin),
- # Comparing Operators
+ # Comparing Operators
(r'(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|'
r'greater|less|than|not|and|or)\s*($|(?=[^\w\-]))', Operator.Word),
- ],
-
- # \"[^\"\n]*\"|\'[^\'\n]*\'
- 'strings': [
- # apparently strings can be delimited by EOL if they are continued
- # in the next line
- (r'"[^"\n]*("|\n)', String.Double),
- (r"'[^'\n]*('|\n)", String.Single),
- ],
-
- 'nums': [
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
- ],
- }
-
-
-class CobolFreeformatLexer(CobolLexer):
- """
- Lexer for Free format OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOLFree'
- aliases = ['cobolfree']
- filenames = ['*.cbl', '*.CBL']
- mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'comment': [
- (r'(\*>.*\n|^\w*\*.*$)', Comment),
- ],
- }
-
-
-class ABAPLexer(RegexLexer):
- """
- Lexer for ABAP, SAP's integrated language.
-
- .. versionadded:: 1.1
- """
- name = 'ABAP'
- aliases = ['abap']
- filenames = ['*.abap', '*.ABAP']
- mimetypes = ['text/x-abap']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'common': [
+ ],
+
+ # \"[^\"\n]*\"|\'[^\'\n]*\'
+ 'strings': [
+ # apparently strings can be delimited by EOL if they are continued
+ # in the next line
+ (r'"[^"\n]*("|\n)', String.Double),
+ (r"'[^'\n]*('|\n)", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(\s*|\.$|$)', Number.Integer),
+ (r'[+-]?\d*\.\d+(E[-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(E[-+]?\d+)?', Number.Float),
+ ],
+ }
+
+
+class CobolFreeformatLexer(CobolLexer):
+ """
+ Lexer for Free format OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOLFree'
+ aliases = ['cobolfree']
+ filenames = ['*.cbl', '*.CBL']
+ mimetypes = []
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'comment': [
+ (r'(\*>.*\n|^\w*\*.*$)', Comment),
+ ],
+ }
+
+
+class ABAPLexer(RegexLexer):
+ """
+ Lexer for ABAP, SAP's integrated language.
+
+ .. versionadded:: 1.1
+ """
+ name = 'ABAP'
+ aliases = ['abap']
+ filenames = ['*.abap', '*.ABAP']
+ mimetypes = ['text/x-abap']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'common': [
(r'\s+', Whitespace),
- (r'^\*.*$', Comment.Single),
- (r'\".*?\n', Comment.Single),
+ (r'^\*.*$', Comment.Single),
+ (r'\".*?\n', Comment.Single),
(r'##\w+', Comment.Special),
- ],
- 'variable-names': [
- (r'<\S+>', Name.Variable),
- (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
- ],
- 'root': [
- include('common'),
- # function calls
+ ],
+ 'variable-names': [
+ (r'<\S+>', Name.Variable),
+ (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
+ ],
+ 'root': [
+ include('common'),
+ # function calls
(r'CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)',
Keyword),
- (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
- r'TRANSACTION|TRANSFORMATION))\b',
- Keyword),
- (r'(FORM|PERFORM)(\s+)(\w+)',
+ (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
+ r'TRANSACTION|TRANSFORMATION))\b',
+ Keyword),
+ (r'(FORM|PERFORM)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Name.Function)),
- (r'(PERFORM)(\s+)(\()(\w+)(\))',
+ (r'(PERFORM)(\s+)(\()(\w+)(\))',
bygroups(Keyword, Whitespace, Punctuation, Name.Variable, Punctuation)),
- (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
+ (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace, Keyword)),
-
- # method implementation
- (r'(METHOD)(\s+)([\w~]+)',
+
+ # method implementation
+ (r'(METHOD)(\s+)([\w~]+)',
bygroups(Keyword, Whitespace, Name.Function)),
- # method calls
- (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
+ # method calls
+ (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
bygroups(Whitespace, Name.Variable, Operator, Name.Function)),
- # call methodnames returning style
- (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
+ # call methodnames returning style
+ (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+
# text elements
(r'(TEXT)(-)(\d{3})',
bygroups(Keyword, Punctuation, Number.Integer)),
(r'(TEXT)(-)(\w{3})',
bygroups(Keyword, Punctuation, Name.Variable)),
- # keywords with dashes in them.
- # these need to be first, because for instance the -ID part
- # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
- # first in the list of keywords.
- (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
- r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
- r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
- r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
- r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
- r'INTERFACE-POOL|INVERTED-DATE|'
- r'LOAD-OF-PROGRAM|LOG-POINT|'
- r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
- r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
- r'OUTPUT-LENGTH|PRINT-CONTROL|'
- r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
- r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
+ # keywords with dashes in them.
+ # these need to be first, because for instance the -ID part
+ # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
+ # first in the list of keywords.
+ (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
+ r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
+ r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
+ r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
+ r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
+ r'INTERFACE-POOL|INVERTED-DATE|'
+ r'LOAD-OF-PROGRAM|LOG-POINT|'
+ r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
+ r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
+ r'OUTPUT-LENGTH|PRINT-CONTROL|'
+ r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
+ r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
r'TYPE-POOL|TYPE-POOLS|NO-DISPLAY'
- r')\b', Keyword),
-
- # keyword kombinations
+ r')\b', Keyword),
+
+ # keyword kombinations
(r'(?<![-\>])(CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
r'(PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
r'(TYPE|LIKE)\s+((LINE\s+OF|REF\s+TO|'
- r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
- r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
- r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
- r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
- r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
- r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
- r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
- r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
- r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
- r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
- r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
- r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
- r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
- r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
- r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
- r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
- r'FREE\s(MEMORY|OBJECT)?|'
- r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
- r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
- r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
- r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
- r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
- r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
- r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
- r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
- r'SKIP|ULINE)|'
- r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
- r'TO LIST-PROCESSING|TO TRANSACTION)'
- r'(ENDING|STARTING)\s+AT|'
- r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
- r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
- r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
- r'(BEGIN|END)\s+OF|'
- r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
- r'COMPARING(\s+ALL\s+FIELDS)?|'
+ r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
+ r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
+ r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
+ r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
+ r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
+ r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
+ r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
+ r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
+ r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
+ r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
+ r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
+ r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
+ r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
+ r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
+ r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
+ r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
+ r'FREE\s(MEMORY|OBJECT)?|'
+ r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
+ r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
+ r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
+ r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
+ r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
+ r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
+ r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
+ r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
+ r'SKIP|ULINE)|'
+ r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
+ r'TO LIST-PROCESSING|TO TRANSACTION)'
+ r'(ENDING|STARTING)\s+AT|'
+ r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
+ r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
+ r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
+ r'(BEGIN|END)\s+OF|'
+ r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
+ r'COMPARING(\s+ALL\s+FIELDS)?|'
r'(INSERT|APPEND)(\s+INITIAL\s+LINE\s+(IN)?TO|\s+LINES\s+OF)?|'
- r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
- r'END-OF-(DEFINITION|PAGE|SELECTION)|'
- r'WITH\s+FRAME(\s+TITLE)|'
+ r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
+ r'END-OF-(DEFINITION|PAGE|SELECTION)|'
+ r'WITH\s+FRAME(\s+TITLE)|'
r'(REPLACE|FIND)\s+((FIRST|ALL)\s+OCCURRENCES?\s+OF\s+)?(SUBSTRING|REGEX)?|'
r'MATCH\s+(LENGTH|COUNT|LINE|OFFSET)|'
r'(RESPECTING|IGNORING)\s+CASE|'
r'IN\s+UPDATE\s+TASK|'
r'(SOURCE|RESULT)\s+(XML)?|'
r'REFERENCE\s+INTO|'
-
- # simple kombinations
- r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
- r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
- r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
- r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
- r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
- r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
+
+ # simple kombinations
+ r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
+ r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
+ r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
+ r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
+ r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
+ r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE|COMMON\s+PART)\b', Keyword),
-
- # single word keywords.
+
+ # single word keywords.
(r'(^|(?<=(\s|\.)))(ABBREVIATED|ABSTRACT|ADD|ALIASES|ALIGN|ALPHA|'
r'ASSERT|AS|ASSIGN(ING)?|AT(\s+FIRST)?|'
- r'BACK|BLOCK|BREAK-POINT|'
- r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
- r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
+ r'BACK|BLOCK|BREAK-POINT|'
+ r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
+ r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|COUNTRY|CURRENCY|'
r'DATA|DATE|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
r'DETAIL|DIRECTORY|DIVIDE|DO|DUMMY|'
@@ -382,12 +382,12 @@ class ABAPLexer(RegexLexer):
r'ENDIF|ENDINTERFACE|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|ENDWHILE|'
r'ENHANCEMENT|EVENTS|EXACT|EXCEPTIONS?|EXIT|EXPONENT|EXPORT|EXPORTING|EXTRACT|'
r'FETCH|FIELDS?|FOR|FORM|FORMAT|FREE|FROM|FUNCTION|'
- r'HIDE|'
- r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
- r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
+ r'HIDE|'
+ r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
+ r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
r'LANGUAGE|LEAVE|LENGTH|LINES|LOAD|LOCAL|'
- r'JOIN|'
- r'KEY|'
+ r'JOIN|'
+ r'KEY|'
r'NEXT|'
r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFIER|MODIFY|MOVE|MULTIPLY|'
r'NODES|NUMBER|'
@@ -399,110 +399,110 @@ class ABAPLexer(RegexLexer):
r'STYLE|SUBMATCHES|SUBMIT|SUBTRACT|SUM(?!\()|SUMMARY|SUMMING|SUPPLY|'
r'TABLE|TABLES|TIMESTAMP|TIMES?|TIMEZONE|TITLE|\??TO|'
r'TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
- r'ULINE|UNDER|UNPACK|UPDATE|USING|'
+ r'ULINE|UNDER|UNPACK|UPDATE|USING|'
r'VALUE|VALUES|VIA|VARYING|VARY|'
r'WAIT|WHEN|WHERE|WIDTH|WHILE|WITH|WINDOW|WRITE|XSD|ZERO)\b', Keyword),
-
- # builtins
- (r'(abs|acos|asin|atan|'
- r'boolc|boolx|bit_set|'
- r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
- r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
- r'count|count_any_of|count_any_not_of|'
- r'dbmaxlen|distance|'
- r'escape|exp|'
- r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
- r'insert|'
- r'lines|log|log10|'
- r'match|matches|'
- r'nmax|nmin|numofchar|'
- r'repeat|replace|rescale|reverse|round|'
- r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
- r'substring|substring_after|substring_from|substring_before|substring_to|'
- r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
- r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
- (r'&[0-9]', Name),
- (r'[0-9]+', Number.Integer),
-
- # operators which look like variable names before
- # parsing variable names.
+
+ # builtins
+ (r'(abs|acos|asin|atan|'
+ r'boolc|boolx|bit_set|'
+ r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
+ r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
+ r'count|count_any_of|count_any_not_of|'
+ r'dbmaxlen|distance|'
+ r'escape|exp|'
+ r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
+ r'insert|'
+ r'lines|log|log10|'
+ r'match|matches|'
+ r'nmax|nmin|numofchar|'
+ r'repeat|replace|rescale|reverse|round|'
+ r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
+ r'substring|substring_after|substring_from|substring_before|substring_to|'
+ r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
+ r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
+
+ (r'&[0-9]', Name),
+ (r'[0-9]+', Number.Integer),
+
+ # operators which look like variable names before
+ # parsing variable names.
(r'(?<=(\s|.))(AND|OR|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
- r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
+ r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator.Word),
-
- include('variable-names'),
-
+
+ include('variable-names'),
+
# standard operators after variable names,
- # because < and > are part of field symbols.
+ # because < and > are part of field symbols.
(r'[?*<>=\-+&]', Operator),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Single),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Single),
(r"([|}])([^{}|]*?)([|{])",
bygroups(Punctuation, String.Single, Punctuation)),
(r'[/;:()\[\],.]', Punctuation),
(r'(!)(\w+)', bygroups(Operator, Name)),
- ],
- }
-
-
-class OpenEdgeLexer(RegexLexer):
- """
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- .. versionadded:: 1.5
- """
- name = 'OpenEdge ABL'
- aliases = ['openedge', 'abl', 'progress']
- filenames = ['*.p', '*.cls']
- mimetypes = ['text/x-openedge', 'application/x-openedge']
-
+ ],
+ }
+
+
+class OpenEdgeLexer(RegexLexer):
+ """
+ Lexer for `OpenEdge ABL (formerly Progress)
+ <http://web.progress.com/en/openedge/abl.html>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'OpenEdge ABL'
+ aliases = ['openedge', 'abl', 'progress']
+ filenames = ['*.p', '*.cls']
+ mimetypes = ['text/x-openedge', 'application/x-openedge']
+
types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
- r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
- r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
- r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
+ r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
+ r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
+ r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))')
-
- keywords = words(OPENEDGEKEYWORDS,
+
+ keywords = words(OPENEDGEKEYWORDS,
prefix=r'(?i)(^|(?<=[^\w\-]))',
suffix=r'\s*($|(?=[^\w\-]))')
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'\{', Comment.Preproc, 'preprocessor'),
- (r'\s*&.*', Comment.Preproc),
- (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
- (types, Keyword.Type),
- (keywords, Name.Builtin),
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'\{', Comment.Preproc, 'preprocessor'),
+ (r'\s*&.*', Comment.Preproc),
+ (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
+ (types, Keyword.Type),
+ (keywords, Name.Builtin),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
(r'\s+', Whitespace),
- (r'[+*/=-]', Operator),
- (r'[.:()]', Punctuation),
- (r'.', Name.Variable), # Lazy catch-all
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'preprocessor': [
- (r'[^{}]', Comment.Preproc),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- ],
- }
-
+ (r'[+*/=-]', Operator),
+ (r'[.:()]', Punctuation),
+ (r'.', Name.Variable), # Lazy catch-all
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'preprocessor': [
+ (r'[^{}]', Comment.Preproc),
+ (r'\{', Comment.Preproc, '#push'),
+ (r'\}', Comment.Preproc, '#pop'),
+ ],
+ }
+
def analyse_text(text):
"""Try to identify OpenEdge ABL based on a few common constructs."""
result = 0
-
+
if 'END.' in text:
result += 0.05
@@ -515,112 +515,112 @@ class OpenEdgeLexer(RegexLexer):
return result
-class GoodDataCLLexer(RegexLexer):
- """
- Lexer for `GoodData-CL
+class GoodDataCLLexer(RegexLexer):
+ """
+ Lexer for `GoodData-CL
<https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
-com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- .. versionadded:: 1.4
- """
-
- name = 'GoodData-CL'
- aliases = ['gooddata-cl']
- filenames = ['*.gdc']
- mimetypes = ['text/x-gooddata-cl']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Function call
- (r'[a-z]\w*', Name.Function),
- # Argument list
- (r'\(', Punctuation, 'args-list'),
- # Punctuation
- (r';', Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'args-list': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'[a-z]\w*', Name.Variable),
- (r'=', Operator),
- (r'"', String, 'string-literal'),
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # Space is not significant
+com/gooddata/processor/COMMANDS.txt>`_
+ script files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'GoodData-CL'
+ aliases = ['gooddata-cl']
+ filenames = ['*.gdc']
+ mimetypes = ['text/x-gooddata-cl']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Function call
+ (r'[a-z]\w*', Name.Function),
+ # Argument list
+ (r'\(', Punctuation, 'args-list'),
+ # Punctuation
+ (r';', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'args-list': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[a-z]\w*', Name.Variable),
+ (r'=', Operator),
+ (r'"', String, 'string-literal'),
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # Space is not significant
(r'\s', Whitespace)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ]
- }
-
-
-class MaqlLexer(RegexLexer):
- """
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- .. versionadded:: 1.4
- """
-
- name = 'MAQL'
- aliases = ['maql']
- filenames = ['*.maql']
- mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # IDENTITY
- (r'IDENTIFIER\b', Name.Builtin),
- # IDENTIFIER
- (r'\{[^}]+\}', Name.Variable),
- # NUMBER
- (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
- # STRING
- (r'"', String, 'string-literal'),
- # RELATION
- (r'\<\>|\!\=', Operator),
- (r'\=|\>\=|\>|\<\=|\<', Operator),
- # :=
- (r'\:\=', Operator),
- # OBJECT
- (r'\[[^]]+\]', Name.Variable.Class),
- # keywords
- (words((
- 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
- 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
- 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
- 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
- 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
- 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
- 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
- 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
- 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
- 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
- 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
- suffix=r'\b'),
- Keyword),
- # FUNCNAME
- (r'[a-z]\w*\b', Name.Function),
- # Comments
- (r'#.*', Comment.Single),
- # Punctuation
- (r'[,;()]', Punctuation),
- # Space is not significant
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ]
+ }
+
+
+class MaqlLexer(RegexLexer):
+ """
+ Lexer for `GoodData MAQL
+ <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+ scripts.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'MAQL'
+ aliases = ['maql']
+ filenames = ['*.maql']
+ mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # IDENTITY
+ (r'IDENTIFIER\b', Name.Builtin),
+ # IDENTIFIER
+ (r'\{[^}]+\}', Name.Variable),
+ # NUMBER
+ (r'[0-9]+(?:\.[0-9]+)?(?:e[+-]?[0-9]{1,3})?', Number),
+ # STRING
+ (r'"', String, 'string-literal'),
+ # RELATION
+ (r'\<\>|\!\=', Operator),
+ (r'\=|\>\=|\>|\<\=|\<', Operator),
+ # :=
+ (r'\:\=', Operator),
+ # OBJECT
+ (r'\[[^]]+\]', Name.Variable.Class),
+ # keywords
+ (words((
+ 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
+ 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
+ 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
+ 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
+ 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
+ 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'WHEN', 'NOT', 'ON',
+ 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', 'LABELS', 'LABEL',
+ 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', 'ALTER', 'DROP',
+ 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', 'DOUBLE', 'DATE',
+ 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', 'DEFAULT', 'ORDER',
+ 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', 'TEMPLATE', 'MODIFY'),
+ suffix=r'\b'),
+ Keyword),
+ # FUNCNAME
+ (r'[a-z]\w*\b', Name.Function),
+ # Comments
+ (r'#.*', Comment.Single),
+ # Punctuation
+ (r'[,;()]', Punctuation),
+ # Space is not significant
(r'\s+', Whitespace)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', String, '#pop'),
- (r'[^\\"]+', String)
- ],
- }
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
index c9d1ed38ea..97b957e5d9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
@@ -1,34 +1,34 @@
-"""
- pygments.lexers.c_cpp
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for C/C++ languages.
-
+"""
+ pygments.lexers.c_cpp
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for C/C++ languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, inherit, default, words
-from pygments.util import get_bool_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, inherit, default, words
+from pygments.util import get_bool_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['CLexer', 'CppLexer']
-
-
-class CFamilyLexer(RegexLexer):
- """
- For C family source code. This is used as a base class to avoid repetitious
- definitions.
- """
-
- # The trailing ?, rather than *, avoids a geometric performance drop here.
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
-
+
+__all__ = ['CLexer', 'CppLexer']
+
+
+class CFamilyLexer(RegexLexer):
+ """
+ For C family source code. This is used as a base class to avoid repetitious
+ definitions.
+ """
+
+ # The trailing ?, rather than *, avoids a geometric performance drop here.
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)?'
+
# Hexadecimal part in an hexadecimal integer/floating-point literal.
# This includes decimal separators matching.
_hexpart = r'[0-9a-fA-F](\'?[0-9a-fA-F])*'
@@ -42,27 +42,27 @@ class CFamilyLexer(RegexLexer):
_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+'
_namespaced_ident = r'(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+'
- tokens = {
- 'whitespace': [
- # preprocessor directives: without whitespace
+ tokens = {
+ 'whitespace': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^(' + _ws1 + r')(#if\s+0)',
- bygroups(using(this), Comment.Preproc), 'if0'),
- ('^(' + _ws1 + ')(#)',
- bygroups(using(this), Comment.Preproc), 'macro'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^(' + _ws1 + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws1 + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
(r'(^[ \t]*)(?!(?:public|private|protected|default)\b)(case\b\s+)?(' + _ident + r')(\s*)(:)(?!:)',
bygroups(Whitespace, using(this), Name.Label, Whitespace, Punctuation)),
(r'\n', Whitespace),
(r'[^\S\n]+', Whitespace),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
(r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
# Open until EOF, so no ending delimeter
(r'/(\\\n)?[*][\w\W]*', Comment.Multiline),
- ],
- 'statements': [
+ ],
+ 'statements': [
include('keywords'),
include('types'),
(r'([LuU]|u8)?(")', bygroups(String.Affix, String), 'string'),
@@ -78,8 +78,8 @@ class CFamilyLexer(RegexLexer):
(r'(-)?0[bB][01](\'?[01])*' + _intsuffix, Number.Bin),
(r'(-)?0(\'?[0-7])+' + _intsuffix, Number.Oct),
(r'(-)?' + _decpart + _intsuffix, Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
(r'(true|false|NULL)\b', Name.Builtin),
(_ident, Name)
],
@@ -96,101 +96,101 @@ class CFamilyLexer(RegexLexer):
'if', 'register', 'restricted', 'return', 'sizeof', 'struct',
'static', 'switch', 'typedef', 'volatile', 'while', 'union',
'thread_local', 'alignas', 'alignof', 'static_assert', '_Pragma'),
- suffix=r'\b'), Keyword),
- (words(('inline', '_inline', '__inline', 'naked', 'restrict',
+ suffix=r'\b'), Keyword),
+ (words(('inline', '_inline', '__inline', 'naked', 'restrict',
'thread'), suffix=r'\b'), Keyword.Reserved),
- # Vector intrinsics
- (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
- # Microsoft-isms
- (words((
+ # Vector intrinsics
+ (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
+ # Microsoft-isms
+ (words((
'asm', 'based', 'except', 'stdcall', 'cdecl',
'fastcall', 'declspec', 'finally', 'try',
'leave', 'w64', 'unaligned', 'raise', 'noop',
- 'identifier', 'forceinline', 'assume'),
+ 'identifier', 'forceinline', 'assume'),
prefix=r'__', suffix=r'\b'), Keyword.Reserved)
- ],
- 'root': [
- include('whitespace'),
+ ],
+ 'root': [
+ include('whitespace'),
include('keywords'),
- # functions
+ # functions
(r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
- r'([^;{]*)(\{)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
+ r'([^;{]*)(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
(r'(' + _namespaced_ident + r'(?:[&*\s])+)' # return arguments
r'(' + _namespaced_ident + r')' # method name
r'(\s*\([^;]*?\))' # signature
- r'([^;]*)(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
+ r'([^;]*)(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
include('types'),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
(r'\}', Punctuation),
(r'[{;]', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
(r'('+_ws1+r')(include)('+_ws1+r')("[^"]+")([^\n]*)',
bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
(r'('+_ws1+r')(include)('+_ws1+r')(<[^>]+>)([^\n]*)',
bygroups(using(this), Comment.Preproc, using(this), Comment.PreprocFile, Comment.Single)),
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
],
'classname': [
(_ident, Name.Class, '#pop'),
# template specification
(r'\s*(?=>)', Text, '#pop'),
default('#pop')
- ]
- }
-
+ ]
+ }
+
stdlib_types = {
- 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
- 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
+ 'size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t', 'sig_atomic_t', 'fpos_t',
+ 'clock_t', 'time_t', 'va_list', 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t',
'mbstate_t', 'wctrans_t', 'wint_t', 'wctype_t'}
c99_types = {
'int8_t', 'int16_t', 'int32_t', 'int64_t', 'uint8_t',
- 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
- 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
- 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
- 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
+ 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t', 'int_least16_t',
+ 'int_least32_t', 'int_least64_t', 'uint_least8_t', 'uint_least16_t',
+ 'uint_least32_t', 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
+ 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t', 'uint_fast64_t',
'intptr_t', 'uintptr_t', 'intmax_t', 'uintmax_t'}
linux_types = {
- 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
- 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
+ 'clockid_t', 'cpu_set_t', 'cpumask_t', 'dev_t', 'gid_t', 'id_t', 'ino_t', 'key_t',
+ 'mode_t', 'nfds_t', 'pid_t', 'rlim_t', 'sig_t', 'sighandler_t', 'siginfo_t',
'sigset_t', 'sigval_t', 'socklen_t', 'timer_t', 'uid_t'}
c11_atomic_types = {
'atomic_bool', 'atomic_char', 'atomic_schar', 'atomic_uchar', 'atomic_short',
@@ -203,32 +203,32 @@ class CFamilyLexer(RegexLexer):
'atomic_int_fast32_t', 'atomic_uint_fast32_t', 'atomic_int_fast64_t',
'atomic_uint_fast64_t', 'atomic_intptr_t', 'atomic_uintptr_t', 'atomic_size_t',
'atomic_ptrdiff_t', 'atomic_intmax_t', 'atomic_uintmax_t'}
-
- def __init__(self, **options):
- self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
- self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
+
+ def __init__(self, **options):
+ self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
+ self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
self.c11highlighting = get_bool_opt(options, 'c11highlighting', True)
- self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.stdlibhighlighting and value in self.stdlib_types:
- token = Keyword.Type
- elif self.c99highlighting and value in self.c99_types:
- token = Keyword.Type
+ self.platformhighlighting = get_bool_opt(options, 'platformhighlighting', True)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.stdlibhighlighting and value in self.stdlib_types:
+ token = Keyword.Type
+ elif self.c99highlighting and value in self.c99_types:
+ token = Keyword.Type
elif self.c11highlighting and value in self.c11_atomic_types:
token = Keyword.Type
- elif self.platformhighlighting and value in self.linux_types:
- token = Keyword.Type
- yield index, token, value
-
-
-class CLexer(CFamilyLexer):
- """
- For C source code with preprocessor directives.
+ elif self.platformhighlighting and value in self.linux_types:
+ token = Keyword.Type
+ yield index, token, value
+
+
+class CLexer(CFamilyLexer):
+ """
+ For C source code with preprocessor directives.
Additional options accepted:
@@ -248,13 +248,13 @@ class CLexer(CFamilyLexer):
`platformhighlighting`
Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
(default: ``True``).
- """
- name = 'C'
- aliases = ['c']
+ """
+ name = 'C'
+ aliases = ['c']
filenames = ['*.c', '*.h', '*.idc', '*.x[bp]m']
mimetypes = ['text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap']
- priority = 0.1
-
+ priority = 0.1
+
tokens = {
'keywords': [
(words((
@@ -269,16 +269,16 @@ class CLexer(CFamilyLexer):
]
}
- def analyse_text(text):
+ def analyse_text(text):
if re.search(r'^\s*#include [<"]', text, re.MULTILINE):
- return 0.1
+ return 0.1
if re.search(r'^\s*#ifn?def ', text, re.MULTILINE):
- return 0.1
-
-
-class CppLexer(CFamilyLexer):
- """
- For C++ source code with preprocessor directives.
+ return 0.1
+
+
+class CppLexer(CFamilyLexer):
+ """
+ For C++ source code with preprocessor directives.
Additional options accepted:
@@ -298,39 +298,39 @@ class CppLexer(CFamilyLexer):
`platformhighlighting`
Highlight common types found in the platform SDK headers (e.g. `clockid_t` on Linux).
(default: ``True``).
- """
- name = 'C++'
- aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
- '*.cc', '*.hh', '*.cxx', '*.hxx',
- '*.C', '*.H', '*.cp', '*.CPP']
- mimetypes = ['text/x-c++hdr', 'text/x-c++src']
- priority = 0.1
-
- tokens = {
- 'statements': [
+ """
+ name = 'C++'
+ aliases = ['cpp', 'c++']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
+ '*.cc', '*.hh', '*.cxx', '*.hxx',
+ '*.C', '*.H', '*.cp', '*.CPP']
+ mimetypes = ['text/x-c++hdr', 'text/x-c++src']
+ priority = 0.1
+
+ tokens = {
+ 'statements': [
# C++11 raw strings
(r'((?:[LuU]|u8)?R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")',
bygroups(String.Affix, String, String.Delimiter, String.Delimiter,
String, String.Delimiter, String)),
- inherit,
- ],
- 'root': [
- inherit,
- # C++ Microsoft-isms
- (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
- 'multiple_inheritance', 'interface', 'event'),
- prefix=r'__', suffix=r'\b'), Keyword.Reserved),
- # Offload C++ extensions, http://offload.codeplay.com/
- (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
- ],
+ inherit,
+ ],
+ 'root': [
+ inherit,
+ # C++ Microsoft-isms
+ (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
+ 'multiple_inheritance', 'interface', 'event'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ # Offload C++ extensions, http://offload.codeplay.com/
+ (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
+ ],
'enumname': [
include('whitespace'),
# 'enum class' and 'enum struct' C++11 support
(words(('class', 'struct'), suffix=r'\b'), Keyword),
(CFamilyLexer._ident, Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
default('#pop')
],
'keywords': [
@@ -359,10 +359,10 @@ class CppLexer(CFamilyLexer):
(CFamilyLexer._ident, Name.Namespace),
include('statement')
]
- }
-
- def analyse_text(text):
+ }
+
+ def analyse_text(text):
if re.search('#include <[a-z_]+>', text):
- return 0.2
- if re.search('using namespace ', text):
- return 0.4
+ return 0.2
+ if re.search('using namespace ', text):
+ return 0.4
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_like.py b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
index ed3864832d..69d0a58520 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_like.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
@@ -1,291 +1,291 @@
-"""
- pygments.lexers.c_like
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for other C-like languages.
-
+"""
+ pygments.lexers.c_like
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for other C-like languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, inherit, words, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers import _mql_builtins
-
-__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers import _mql_builtins
+
+__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
'OmgIdlLexer']
-
-
-class PikeLexer(CppLexer):
- """
- For `Pike <http://pike.lysator.liu.se/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Pike'
- aliases = ['pike']
- filenames = ['*.pike', '*.pmod']
- mimetypes = ['text/x-pike']
-
- tokens = {
- 'statements': [
- (words((
- 'catch', 'new', 'private', 'protected', 'public', 'gauge',
- 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
- 'this', 'super', 'constant', 'final', 'static', 'import', 'use', 'extern',
- 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
- 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
- '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
- '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
- '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
- '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
- Keyword),
- (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
- r'array|multiset|program|function|lambda|mixed|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
+
+
+class PikeLexer(CppLexer):
+ """
+ For `Pike <http://pike.lysator.liu.se/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Pike'
+ aliases = ['pike']
+ filenames = ['*.pike', '*.pmod']
+ mimetypes = ['text/x-pike']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'catch', 'new', 'private', 'protected', 'public', 'gauge',
+ 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'constant', 'final', 'static', 'import', 'use', 'extern',
+ 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
+ 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
+ '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
+ '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
+ '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
+ '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
+ Keyword),
+ (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
+ r'array|multiset|program|function|lambda|mixed|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
(r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'[~!%^&*+=|?:<>/@-]', Operator),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
+ (r'[~!%^&*+=|?:<>/@-]', Operator),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
(r'\s*(?=>)', Whitespace, '#pop'),
- ],
- }
-
-
-class NesCLexer(CLexer):
- """
- For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
- directives.
-
- .. versionadded:: 2.0
- """
- name = 'nesC'
- aliases = ['nesc']
- filenames = ['*.nc']
- mimetypes = ['text/x-nescsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
- 'components', 'configuration', 'event', 'extends', 'generic',
- 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
- 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
- Keyword),
- (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
- 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
- 'nx_uint64_t'), suffix=r'\b'),
- Keyword.Type),
- inherit,
- ],
- }
-
-
-class ClayLexer(RegexLexer):
- """
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Clay'
- filenames = ['*.clay']
- aliases = ['clay']
- mimetypes = ['text/x-clay']
- tokens = {
- 'root': [
+ ],
+ }
+
+
+class NesCLexer(CLexer):
+ """
+ For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
+ directives.
+
+ .. versionadded:: 2.0
+ """
+ name = 'nesC'
+ aliases = ['nesc']
+ filenames = ['*.nc']
+ mimetypes = ['text/x-nescsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
+ 'components', 'configuration', 'event', 'extends', 'generic',
+ 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
+ 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
+ Keyword),
+ (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
+ 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
+ 'nx_uint64_t'), suffix=r'\b'),
+ Keyword.Type),
+ inherit,
+ ],
+ }
+
+
+class ClayLexer(RegexLexer):
+ """
+ For `Clay <http://claylabs.com/clay/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Clay'
+ filenames = ['*.clay']
+ aliases = ['clay']
+ mimetypes = ['text/x-clay']
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'//.*?$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\b(public|private|import|as|record|variant|instance'
- r'|define|overload|default|external|alias'
- r'|rvalue|ref|forward|inline|noinline|forceinline'
- r'|enum|var|and|or|not|if|else|goto|return|while'
- r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
- r'|finally|onerror|staticassert|eval|when|newtype'
- r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
- r')\b', Keyword),
- (r'[~!%^&*+=|:<>/-]', Operator),
- (r'[#(){}\[\],;.]', Punctuation),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\b(true|false)\b', Name.Builtin),
- (r'(?i)[a-z_?][\w?]*', Name),
- (r'"""', String, 'tdqs'),
- (r'"', String, 'dqs'),
- ],
- 'strings': [
- (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
- (r'.', String),
- ],
- 'nl': [
- (r'\n', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings'),
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl'),
- ],
- }
-
-
-class ECLexer(CLexer):
- """
- For eC source code with preprocessor directives.
-
- .. versionadded:: 1.5
- """
- name = 'eC'
- aliases = ['ec']
- filenames = ['*.ec', '*.eh']
- mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
- tokens = {
- 'statements': [
- (words((
- 'virtual', 'class', 'private', 'public', 'property', 'import',
- 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
- 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
- '__on_register_module', 'namespace', 'using', 'typed_object',
- 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
- 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
- 'class_default_property', 'property_category', 'class_data',
- 'class_property', 'thisclass', 'dbtable', 'dbindex',
- 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
- (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
- 'unichar', 'int64'), suffix=r'\b'),
- Keyword.Type),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\b(public|private|import|as|record|variant|instance'
+ r'|define|overload|default|external|alias'
+ r'|rvalue|ref|forward|inline|noinline|forceinline'
+ r'|enum|var|and|or|not|if|else|goto|return|while'
+ r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
+ r'|finally|onerror|staticassert|eval|when|newtype'
+ r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
+ r')\b', Keyword),
+ (r'[~!%^&*+=|:<>/-]', Operator),
+ (r'[#(){}\[\],;.]', Punctuation),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\b(true|false)\b', Name.Builtin),
+ (r'(?i)[a-z_?][\w?]*', Name),
+ (r'"""', String, 'tdqs'),
+ (r'"', String, 'dqs'),
+ ],
+ 'strings': [
+ (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
+ (r'.', String),
+ ],
+ 'nl': [
+ (r'\n', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings'),
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl'),
+ ],
+ }
+
+
+class ECLexer(CLexer):
+ """
+ For eC source code with preprocessor directives.
+
+ .. versionadded:: 1.5
+ """
+ name = 'eC'
+ aliases = ['ec']
+ filenames = ['*.ec', '*.eh']
+ mimetypes = ['text/x-echdr', 'text/x-ecsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'virtual', 'class', 'private', 'public', 'property', 'import',
+ 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
+ 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
+ '__on_register_module', 'namespace', 'using', 'typed_object',
+ 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
+ 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
+ 'class_default_property', 'property_category', 'class_data',
+ 'class_property', 'thisclass', 'dbtable', 'dbindex',
+ 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
+ (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
+ 'unichar', 'int64'), suffix=r'\b'),
+ Keyword.Type),
(r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
- (r'(null|value|this)\b', Name.Builtin),
- inherit,
+ (r'(null|value|this)\b', Name.Builtin),
+ inherit,
]
- }
-
-
-class ValaLexer(RegexLexer):
- """
- For Vala source code with preprocessor directives.
-
- .. versionadded:: 1.1
- """
- name = 'Vala'
- aliases = ['vala', 'vapi']
- filenames = ['*.vala', '*.vapi']
- mimetypes = ['text/x-vala']
-
- tokens = {
- 'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ }
+
+
+class ValaLexer(RegexLexer):
+ """
+ For Vala source code with preprocessor directives.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Vala'
+ aliases = ['vala', 'vapi']
+ filenames = ['*.vala', '*.vapi']
+ mimetypes = ['text/x-vala']
+
+ tokens = {
+ 'whitespace': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
(r'\n', Whitespace),
(r'\s+', Whitespace),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'[L@]?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'(?s)""".*?"""', String), # verbatim strings
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation)),
- # TODO: "correctly" parse complex code attributes
- (r'(\[)(CCode|(?:Integer|Floating)Type)',
- bygroups(Punctuation, Name.Decorator)),
- (r'[()\[\],.]', Punctuation),
- (words((
- 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
- 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
- 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
- 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
- 'typeof', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- (words((
- 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
- 'inline', 'internal', 'override', 'owned', 'private', 'protected',
- 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
- 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
- Keyword.Declaration),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'[L@]?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'(?s)""".*?"""', String), # verbatim strings
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation)),
+ # TODO: "correctly" parse complex code attributes
+ (r'(\[)(CCode|(?:Integer|Floating)Type)',
+ bygroups(Punctuation, Name.Decorator)),
+ (r'[()\[\],.]', Punctuation),
+ (words((
+ 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
+ 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
+ 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
+ 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
+ 'typeof', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
+ 'inline', 'internal', 'override', 'owned', 'private', 'protected',
+ 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
+ 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
+ Keyword.Declaration),
(r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Whitespace),
- 'namespace'),
- (r'(class|errordomain|interface|struct)(\s+)',
+ 'namespace'),
+ (r'(class|errordomain|interface|struct)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- # void is an actual keyword, others are in glib-2.0.vapi
- (words((
- 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
- 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
- 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false|null)\b', Name.Builtin),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ # void is an actual keyword, others are in glib-2.0.vapi
+ (words((
+ 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
+ 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
+ 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false|null)\b', Name.Builtin),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ],
- }
-
-
-class CudaLexer(CLexer):
- """
- For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- .. versionadded:: 1.6
- """
- name = 'CUDA'
- filenames = ['*.cu', '*.cuh']
- aliases = ['cuda', 'cu']
- mimetypes = ['text/x-cuda']
-
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CudaLexer(CLexer):
+ """
+ For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
+ source.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CUDA'
+ filenames = ['*.cu', '*.cuh']
+ aliases = ['cuda', 'cu']
+ mimetypes = ['text/x-cuda']
+
function_qualifiers = {'__device__', '__global__', '__host__',
'__noinline__', '__forceinline__'}
variable_qualifiers = {'__device__', '__constant__', '__shared__',
@@ -303,137 +303,137 @@ class CudaLexer(CLexer):
'__syncthreads', '__syncthreads_count', '__syncthreads_and',
'__syncthreads_or'}
execution_confs = {'<<<', '>>>'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in CLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.variable_qualifiers:
- token = Keyword.Type
- elif value in self.vector_types:
- token = Keyword.Type
- elif value in self.variables:
- token = Name.Builtin
- elif value in self.execution_confs:
- token = Keyword.Pseudo
- elif value in self.function_qualifiers:
- token = Keyword.Reserved
- elif value in self.functions:
- token = Name.Function
- yield index, token, value
-
-
-class SwigLexer(CppLexer):
- """
- For `SWIG <http://www.swig.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'SWIG'
- aliases = ['swig']
- filenames = ['*.swg', '*.i']
- mimetypes = ['text/swig']
- priority = 0.04 # Lower than C/C++ and Objective C/C++
-
- tokens = {
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.variable_qualifiers:
+ token = Keyword.Type
+ elif value in self.vector_types:
+ token = Keyword.Type
+ elif value in self.variables:
+ token = Name.Builtin
+ elif value in self.execution_confs:
+ token = Keyword.Pseudo
+ elif value in self.function_qualifiers:
+ token = Keyword.Reserved
+ elif value in self.functions:
+ token = Name.Function
+ yield index, token, value
+
+
+class SwigLexer(CppLexer):
+ """
+ For `SWIG <http://www.swig.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SWIG'
+ aliases = ['swig']
+ filenames = ['*.swg', '*.i']
+ mimetypes = ['text/swig']
+ priority = 0.04 # Lower than C/C++ and Objective C/C++
+
+ tokens = {
'root': [
# Match it here so it won't be matched as a function in the rest of root
(r'\$\**\&?\w+', Name),
inherit
],
- 'statements': [
- # SWIG directives
- (r'(%[a-z_][a-z0-9_]*)', Name.Function),
- # Special variables
+ 'statements': [
+ # SWIG directives
+ (r'(%[a-z_][a-z0-9_]*)', Name.Function),
+ # Special variables
(r'\$\**\&?\w+', Name),
- # Stringification / additional preprocessor directives
- (r'##*[a-zA-Z_]\w*', Comment.Preproc),
- inherit,
- ],
- }
-
- # This is a far from complete set of SWIG directives
+ # Stringification / additional preprocessor directives
+ (r'##*[a-zA-Z_]\w*', Comment.Preproc),
+ inherit,
+ ],
+ }
+
+ # This is a far from complete set of SWIG directives
swig_directives = {
- # Most common directives
- '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
- '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
- '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
- '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
- # Less common directives
- '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
- '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
- '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
- '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
- '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
- '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
- '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
- '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
- '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
- '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
- '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
- '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
+ # Most common directives
+ '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
+ '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
+ '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
+ '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
+ # Less common directives
+ '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
+ '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
+ '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
+ '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
+ '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
+ '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
+ '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
+ '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
+ '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
+ '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
+ '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
+ '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
'%warnfilter'}
-
- def analyse_text(text):
- rv = 0
- # Search for SWIG directives, which are conventionally at the beginning of
- # a line. The probability of them being within a line is low, so let another
- # lexer win in this case.
- matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
- for m in matches:
- if m in SwigLexer.swig_directives:
- rv = 0.98
- break
- else:
- rv = 0.91 # Fraction higher than MatlabLexer
- return rv
-
-
-class MqlLexer(CppLexer):
- """
- For `MQL4 <http://docs.mql4.com/>`_ and
- `MQL5 <http://www.mql5.com/en/docs>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'MQL'
- aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
- filenames = ['*.mq4', '*.mq5', '*.mqh']
- mimetypes = ['text/x-mql']
-
- tokens = {
- 'statements': [
- (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
- (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
- (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
- (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
- (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
- Name.Constant),
- inherit,
- ],
- }
-
-
-class ArduinoLexer(CppLexer):
- """
- For `Arduino(tm) <https://arduino.cc/>`_ source.
-
- This is an extension of the CppLexer, as the Arduino® Language is a superset
- of C++
-
- .. versionadded:: 2.1
- """
-
- name = 'Arduino'
- aliases = ['arduino']
- filenames = ['*.ino']
- mimetypes = ['text/x-arduino']
-
- # Language sketch main structure functions
+
+ def analyse_text(text):
+ rv = 0
+ # Search for SWIG directives, which are conventionally at the beginning of
+ # a line. The probability of them being within a line is low, so let another
+ # lexer win in this case.
+ matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
+ for m in matches:
+ if m in SwigLexer.swig_directives:
+ rv = 0.98
+ break
+ else:
+ rv = 0.91 # Fraction higher than MatlabLexer
+ return rv
+
+
+class MqlLexer(CppLexer):
+ """
+ For `MQL4 <http://docs.mql4.com/>`_ and
+ `MQL5 <http://www.mql5.com/en/docs>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'MQL'
+ aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
+ filenames = ['*.mq4', '*.mq5', '*.mqh']
+ mimetypes = ['text/x-mql']
+
+ tokens = {
+ 'statements': [
+ (words(_mql_builtins.keywords, suffix=r'\b'), Keyword),
+ (words(_mql_builtins.c_types, suffix=r'\b'), Keyword.Type),
+ (words(_mql_builtins.types, suffix=r'\b'), Name.Function),
+ (words(_mql_builtins.constants, suffix=r'\b'), Name.Constant),
+ (words(_mql_builtins.colors, prefix='(clr)?', suffix=r'\b'),
+ Name.Constant),
+ inherit,
+ ],
+ }
+
+
+class ArduinoLexer(CppLexer):
+ """
+ For `Arduino(tm) <https://arduino.cc/>`_ source.
+
+ This is an extension of the CppLexer, as the Arduino® Language is a superset
+ of C++
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Arduino'
+ aliases = ['arduino']
+ filenames = ['*.ino']
+ mimetypes = ['text/x-arduino']
+
+ # Language sketch main structure functions
structure = {'setup', 'loop'}
-
+
# Language operators
operators = {'not', 'or', 'and', 'xor'}
-
+
# Language 'variables'
variables = {
'DIGITAL_MESSAGE', 'FIRMATA_STRING', 'ANALOG_MESSAGE', 'REPORT_DIGITAL',
@@ -454,7 +454,7 @@ class ArduinoLexer(CppLexer):
'atomic_ushort', 'atomic_int', 'atomic_uint', 'atomic_long', 'atomic_ulong',
'atomic_llong', 'atomic_ullong', 'PROGMEM'}
- # Language shipped functions and class ( )
+ # Language shipped functions and class ( )
functions = {
'KeyboardController', 'MouseController', 'SoftwareSerial', 'EthernetServer',
'EthernetClient', 'LiquidCrystal', 'RobotControl', 'GSMVoiceCall',
@@ -518,15 +518,15 @@ class ArduinoLexer(CppLexer):
'isAlphaNumeric', 'isAlpha', 'isAscii', 'isWhitespace', 'isControl', 'isDigit',
'isGraph', 'isLowerCase', 'isPrintable', 'isPunct', 'isSpace', 'isUpperCase',
'isHexadecimalDigit'}
-
+
# do not highlight
suppress_highlight = {
'namespace', 'template', 'mutable', 'using', 'asm', 'typeid',
'typename', 'this', 'alignof', 'constexpr', 'decltype', 'noexcept',
'static_assert', 'thread_local', 'restrict'}
- def get_tokens_unprocessed(self, text):
- for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CppLexer.get_tokens_unprocessed(self, text):
if value in self.structure:
yield index, Name.Builtin, value
elif value in self.operators:
@@ -537,8 +537,8 @@ class ArduinoLexer(CppLexer):
yield index, Name, value
elif value in self.functions:
yield index, Name.Function, value
- else:
- yield index, token, value
+ else:
+ yield index, token, value
class CharmciLexer(CppLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/chapel.py b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
index ad25981d8c..0216bfea2b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/chapel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
@@ -1,31 +1,31 @@
-"""
- pygments.lexers.chapel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Chapel language.
-
+"""
+ pygments.lexers.chapel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Chapel language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['ChapelLexer']
-
-
-class ChapelLexer(RegexLexer):
- """
+
+__all__ = ['ChapelLexer']
+
+
+class ChapelLexer(RegexLexer):
+ """
For `Chapel <https://chapel-lang.org/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Chapel'
- filenames = ['*.chpl']
- aliases = ['chapel', 'chpl']
- # mimetypes = ['text/x-chapel']
-
+
+ .. versionadded:: 2.0
+ """
+ name = 'Chapel'
+ filenames = ['*.chpl']
+ aliases = ['chapel', 'chpl']
+ # mimetypes = ['text/x-chapel']
+
known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
'nothing', 'opaque', 'range', 'real', 'string', 'uint',
'void')
@@ -58,15 +58,15 @@ class ChapelLexer(RegexLexer):
'yield',
'zip')
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- (r'\\\n', Text),
-
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-
+ (r'\\\n', Text),
+
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+
(words(declarations, suffix=r'\b'), Keyword.Declaration),
(words(constants, suffix=r'\b'), Keyword.Constant),
(words(known_types, suffix=r'\b'), Keyword.Type),
@@ -76,48 +76,48 @@ class ChapelLexer(RegexLexer):
(r'(proc)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
(r'(operator)(\s+)', bygroups(Keyword, Whitespace), 'procname'),
(r'(class|interface|module|record|union)(\s+)', bygroups(Keyword, Whitespace),
- 'classname'),
-
- # imaginary integers
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
-
- # reals cannot end with a period due to lexical ambiguity with
- # .. operator. See reference for rationale.
- (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
-
- # integer literals
- # -- binary
- (r'0[bB][01]+', Number.Bin),
- # -- hex
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- octal
- (r'0[oO][0-7]+', Number.Oct),
- # -- decimal
- (r'[0-9]+', Number.Integer),
-
- # strings
+ 'classname'),
+
+ # imaginary integers
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+
+ # reals cannot end with a period due to lexical ambiguity with
+ # .. operator. See reference for rationale.
+ (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
+
+ # integer literals
+ # -- binary
+ (r'0[bB][01]+', Number.Bin),
+ # -- hex
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- octal
+ (r'0[oO][0-7]+', Number.Oct),
+ # -- decimal
+ (r'[0-9]+', Number.Integer),
+
+ # strings
(r'"(\\\\|\\"|[^"])*"', String),
(r"'(\\\\|\\'|[^'])*'", String),
-
- # tokens
- (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
- r'<=>|<~>|\.\.|by|#|\.\.\.|'
- r'&&|\|\||!|&|\||\^|~|<<|>>|'
- r'==|!=|<=|>=|<|>|'
- r'[+\-*/%]|\*\*)', Operator),
- (r'[:;,.?()\[\]{}]', Punctuation),
-
- # identifiers
- (r'[a-zA-Z_][\w$]*', Name.Other),
- ],
- 'classname': [
- (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
- ],
- 'procname': [
+
+ # tokens
+ (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
+ r'<=>|<~>|\.\.|by|#|\.\.\.|'
+ r'&&|\|\||!|&|\||\^|~|<<|>>|'
+ r'==|!=|<=|>=|<|>|'
+ r'[+\-*/%]|\*\*)', Operator),
+ (r'[:;,.?()\[\]{}]', Punctuation),
+
+ # identifiers
+ (r'[a-zA-Z_][\w$]*', Name.Other),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
+ ],
+ 'procname': [
(r'([a-zA-Z_][.\w$]*|' # regular function name, including secondary
r'\~[a-zA-Z_][.\w$]*|' # support for legacy destructors
r'[+*/!~%<>=&^|\-:]{1,2})', # operators
@@ -126,10 +126,10 @@ class ChapelLexer(RegexLexer):
# allow `proc (atomic T).foo`
(r'\(', Punctuation, "receivertype"),
(r'\)+\.', Punctuation),
- ],
+ ],
'receivertype': [
(words(type_modifiers, suffix=r'\b'), Keyword),
(words(known_types, suffix=r'\b'), Keyword.Type),
(r'[^()]*', Name.Other, '#pop'),
],
- }
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/compiled.py b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
index 13aa39ce2d..23e8ac07a0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/compiled.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
@@ -1,33 +1,33 @@
-"""
- pygments.lexers.compiled
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+"""
+ pygments.lexers.compiled
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.jvm import JavaLexer, ScalaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.objective import ObjectiveCLexer, \
- ObjectiveCppLexer, LogosLexer
-from pygments.lexers.go import GoLexer
-from pygments.lexers.rust import RustLexer
-from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
-from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
-from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
-from pygments.lexers.fortran import FortranLexer
-from pygments.lexers.prolog import PrologLexer
-from pygments.lexers.python import CythonLexer
-from pygments.lexers.graphics import GLShaderLexer
-from pygments.lexers.ml import OcamlLexer
-from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
-from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
-from pygments.lexers.ooc import OocLexer
-from pygments.lexers.felix import FelixLexer
-from pygments.lexers.nimrod import NimrodLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.jvm import JavaLexer, ScalaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.objective import ObjectiveCLexer, \
+ ObjectiveCppLexer, LogosLexer
+from pygments.lexers.go import GoLexer
+from pygments.lexers.rust import RustLexer
+from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
+from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
+from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
+from pygments.lexers.fortran import FortranLexer
+from pygments.lexers.prolog import PrologLexer
+from pygments.lexers.python import CythonLexer
+from pygments.lexers.graphics import GLShaderLexer
+from pygments.lexers.ml import OcamlLexer
+from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
+from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
+from pygments.lexers.ooc import OocLexer
+from pygments.lexers.felix import FelixLexer
+from pygments.lexers.nimrod import NimrodLexer
from pygments.lexers.crystal import CrystalLexer
-
-__all__ = []
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/configs.py b/contrib/python/Pygments/py3/pygments/lexers/configs.py
index 99fab14860..6af01e773d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/configs.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/configs.py
@@ -1,37 +1,37 @@
-"""
- pygments.lexers.configs
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for configuration file formats.
-
+"""
+ pygments.lexers.configs
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for configuration file formats.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import ExtendedRegexLexer, RegexLexer, default, words, \
bygroups, include, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace, Literal, Error, Generic
-from pygments.lexers.shell import BashLexer
+from pygments.lexers.shell import BashLexer
from pygments.lexers.data import JsonLexer
-
-__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
- 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
- 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
- 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
+
+__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
+ 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
+ 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer',
+ 'TerraformLexer', 'TermcapLexer', 'TerminfoLexer',
'PkgConfigLexer', 'PacmanConfLexer', 'AugeasLexer', 'TOMLLexer',
'NestedTextLexer', 'SingularityLexer']
-
-
-class IniLexer(RegexLexer):
- """
- Lexer for configuration files in INI style.
- """
-
- name = 'INI'
- aliases = ['ini', 'cfg', 'dosini']
+
+
+class IniLexer(RegexLexer):
+ """
+ Lexer for configuration files in INI style.
+ """
+
+ name = 'INI'
+ aliases = ['ini', 'cfg', 'dosini']
filenames = [
'*.ini', '*.cfg', '*.inf', '.editorconfig',
# systemd unit files
@@ -39,273 +39,273 @@ class IniLexer(RegexLexer):
'*.service', '*.socket', '*.device', '*.mount', '*.automount',
'*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
]
- mimetypes = ['text/x-ini', 'text/inf']
-
- tokens = {
- 'root': [
+ mimetypes = ['text/x-ini', 'text/inf']
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'[;#].*', Comment.Single),
- (r'\[.*?\]$', Keyword),
+ (r'[;#].*', Comment.Single),
+ (r'\[.*?\]$', Keyword),
(r'(.*?)([ \t]*)(=)([ \t]*)([^\t\n]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
# standalone option, supported by some INI parsers
(r'(.+?)$', Name.Attribute),
],
- }
-
- def analyse_text(text):
- npos = text.find('\n')
- if npos < 3:
- return False
- return text[0] == '[' and text[npos-1] == ']'
-
-
-class RegeditLexer(RegexLexer):
- """
- Lexer for `Windows Registry
- <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
- by regedit.
-
- .. versionadded:: 1.6
- """
-
- name = 'reg'
- aliases = ['registry']
- filenames = ['*.reg']
- mimetypes = ['text/x-windows-registry']
-
- tokens = {
- 'root': [
- (r'Windows Registry Editor.*', Text),
+ }
+
+ def analyse_text(text):
+ npos = text.find('\n')
+ if npos < 3:
+ return False
+ return text[0] == '[' and text[npos-1] == ']'
+
+
+class RegeditLexer(RegexLexer):
+ """
+ Lexer for `Windows Registry
+ <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
+ by regedit.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'reg'
+ aliases = ['registry']
+ filenames = ['*.reg']
+ mimetypes = ['text/x-windows-registry']
+
+ tokens = {
+ 'root': [
+ (r'Windows Registry Editor.*', Text),
(r'\s+', Whitespace),
- (r'[;#].*', Comment.Single),
- (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
- bygroups(Keyword, Operator, Name.Builtin, Keyword)),
- # String keys, which obey somewhat normal escaping
- (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
+ (r'[;#].*', Comment.Single),
+ (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
+ bygroups(Keyword, Operator, Name.Builtin, Keyword)),
+ # String keys, which obey somewhat normal escaping
+ (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
- 'value'),
- # Bare keys (includes @)
- (r'(.*?)([ \t]*)(=)([ \t]*)',
+ 'value'),
+ # Bare keys (includes @)
+ (r'(.*?)([ \t]*)(=)([ \t]*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace),
- 'value'),
- ],
- 'value': [
- (r'-', Operator, '#pop'), # delete value
- (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
- bygroups(Name.Variable, Punctuation, Number), '#pop'),
- # As far as I know, .reg files do not support line continuation.
- (r'.+', String, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
- """
- Lexer for configuration files in Java's properties format.
-
+ 'value'),
+ ],
+ 'value': [
+ (r'-', Operator, '#pop'), # delete value
+ (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
+ bygroups(Name.Variable, Punctuation, Number), '#pop'),
+ # As far as I know, .reg files do not support line continuation.
+ (r'.+', String, '#pop'),
+ default('#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith('Windows Registry Editor')
+
+
+class PropertiesLexer(RegexLexer):
+ """
+ Lexer for configuration files in Java's properties format.
+
Note: trailing whitespace counts as part of the value as per spec
- .. versionadded:: 1.4
- """
-
- name = 'Properties'
- aliases = ['properties', 'jproperties']
- filenames = ['*.properties']
- mimetypes = ['text/x-java-properties']
-
- tokens = {
- 'root': [
+ .. versionadded:: 1.4
+ """
+
+ name = 'Properties'
+ aliases = ['properties', 'jproperties']
+ filenames = ['*.properties']
+ mimetypes = ['text/x-java-properties']
+
+ tokens = {
+ 'root': [
(r'^(\w+)([ \t])(\w+\s*)$', bygroups(Name.Attribute, Whitespace, String)),
(r'^\w+(\\[ \t]\w*)*$', Name.Attribute),
(r'(^ *)([#!].*)', bygroups(Whitespace, Comment)),
# More controversial comments
(r'(^ *)((?:;|//).*)', bygroups(Whitespace, Comment)),
- (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+ (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
bygroups(Name.Attribute, Whitespace, Operator, Whitespace, String)),
(r'\s', Whitespace),
- ],
- }
-
-
-def _rx_indent(level):
- # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
- # Edit this if you are in an environment where KconfigLexer gets expanded
- # input (tabs expanded to spaces) and the expansion tab width is != 8,
- # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
- # Value range here is 2 <= {tab_width} <= 8.
- tab_width = 8
- # Regex matching a given indentation {level}, assuming that indentation is
- # a multiple of {tab_width}. In other cases there might be problems.
- if tab_width == 2:
- space_repeat = '+'
- else:
- space_repeat = '{1,%d}' % (tab_width - 1)
- if level == 1:
- level_repeat = ''
- else:
- level_repeat = '{%s}' % level
- return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
-
-
-class KconfigLexer(RegexLexer):
- """
- For Linux-style Kconfig files.
-
- .. versionadded:: 1.6
- """
-
- name = 'Kconfig'
- aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
- # Adjust this if new kconfig file names appear in your environment
+ ],
+ }
+
+
+def _rx_indent(level):
+ # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
+ # Edit this if you are in an environment where KconfigLexer gets expanded
+ # input (tabs expanded to spaces) and the expansion tab width is != 8,
+ # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
+ # Value range here is 2 <= {tab_width} <= 8.
+ tab_width = 8
+ # Regex matching a given indentation {level}, assuming that indentation is
+ # a multiple of {tab_width}. In other cases there might be problems.
+ if tab_width == 2:
+ space_repeat = '+'
+ else:
+ space_repeat = '{1,%d}' % (tab_width - 1)
+ if level == 1:
+ level_repeat = ''
+ else:
+ level_repeat = '{%s}' % level
+ return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
+
+
+class KconfigLexer(RegexLexer):
+ """
+ For Linux-style Kconfig files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Kconfig'
+ aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
+ # Adjust this if new kconfig file names appear in your environment
filenames = ['Kconfig*', '*Config.in*', 'external.in*',
- 'standard-modules.in']
- mimetypes = ['text/x-kconfig']
- # No re.MULTILINE, indentation-aware help text needs line-by-line handling
- flags = 0
-
- def call_indent(level):
- # If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
- def do_indent(level):
- # Print paragraphs of indentation level >= {level} as String.Doc,
- # ignoring blank lines. Then return to 'root' state.
- return [
- (_rx_indent(level), String.Doc),
- (r'\s*\n', Text),
- default('#pop:2')
- ]
-
- tokens = {
- 'root': [
+ 'standard-modules.in']
+ mimetypes = ['text/x-kconfig']
+ # No re.MULTILINE, indentation-aware help text needs line-by-line handling
+ flags = 0
+
+ def call_indent(level):
+ # If indentation >= {level} is detected, enter state 'indent{level}'
+ return (_rx_indent(level), String.Doc, 'indent%s' % level)
+
+ def do_indent(level):
+ # Print paragraphs of indentation level >= {level} as String.Doc,
+ # ignoring blank lines. Then return to 'root' state.
+ return [
+ (_rx_indent(level), String.Doc),
+ (r'\s*\n', Text),
+ default('#pop:2')
+ ]
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'#.*?\n', Comment.Single),
- (words((
- 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
- 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
- 'source', 'prompt', 'select', 'depends on', 'default',
- 'range', 'option'), suffix=r'\b'),
- Keyword),
- (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
- (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
- Name.Builtin),
- (r'[!=&|]', Operator),
- (r'[()]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r'\S+', Text),
- ],
- # Help text is indented, multi-line and ends when a lower indentation
- # level is detected.
- 'help': [
- # Skip blank lines after help token, if any
- (r'\s*\n', Text),
- # Determine the first help line's indentation level heuristically(!).
- # Attention: this is not perfect, but works for 99% of "normal"
- # indentation schemes up to a max. indentation level of 7.
- call_indent(7),
- call_indent(6),
- call_indent(5),
- call_indent(4),
- call_indent(3),
- call_indent(2),
- call_indent(1),
- default('#pop'), # for incomplete help sections without text
- ],
- # Handle text for indentation levels 7 to 1
- 'indent7': do_indent(7),
- 'indent6': do_indent(6),
- 'indent5': do_indent(5),
- 'indent4': do_indent(4),
- 'indent3': do_indent(3),
- 'indent2': do_indent(2),
- 'indent1': do_indent(1),
- }
-
-
-class Cfengine3Lexer(RegexLexer):
- """
- Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
-
- .. versionadded:: 1.5
- """
-
- name = 'CFEngine3'
- aliases = ['cfengine3', 'cf3']
- filenames = ['*.cf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*?\n', Comment),
- (r'(body)(\s+)(\S+)(\s+)(control)',
+ (r'#.*?\n', Comment.Single),
+ (words((
+ 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
+ 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
+ 'source', 'prompt', 'select', 'depends on', 'default',
+ 'range', 'option'), suffix=r'\b'),
+ Keyword),
+ (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
+ (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
+ Name.Builtin),
+ (r'[!=&|]', Operator),
+ (r'[()]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Double),
+ (r'\S+', Text),
+ ],
+ # Help text is indented, multi-line and ends when a lower indentation
+ # level is detected.
+ 'help': [
+ # Skip blank lines after help token, if any
+ (r'\s*\n', Text),
+ # Determine the first help line's indentation level heuristically(!).
+ # Attention: this is not perfect, but works for 99% of "normal"
+ # indentation schemes up to a max. indentation level of 7.
+ call_indent(7),
+ call_indent(6),
+ call_indent(5),
+ call_indent(4),
+ call_indent(3),
+ call_indent(2),
+ call_indent(1),
+ default('#pop'), # for incomplete help sections without text
+ ],
+ # Handle text for indentation levels 7 to 1
+ 'indent7': do_indent(7),
+ 'indent6': do_indent(6),
+ 'indent5': do_indent(5),
+ 'indent4': do_indent(4),
+ 'indent3': do_indent(3),
+ 'indent2': do_indent(2),
+ 'indent1': do_indent(1),
+ }
+
+
+class Cfengine3Lexer(RegexLexer):
+ """
+ Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'CFEngine3'
+ aliases = ['cfengine3', 'cf3']
+ filenames = ['*.cf']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*?\n', Comment),
+ (r'(body)(\s+)(\S+)(\s+)(control)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function, Punctuation),
- 'arglist'),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
+ 'arglist'),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
- (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
- bygroups(Punctuation, Name.Variable, Punctuation,
+ (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
+ bygroups(Punctuation, Name.Variable, Punctuation,
Whitespace, Keyword.Type, Whitespace, Operator, Whitespace)),
- (r'(\S+)(\s*)(=>)(\s*)',
+ (r'(\S+)(\s*)(=>)(\s*)',
bygroups(Keyword.Reserved, Whitespace, Operator, Text)),
- (r'"', String, 'string'),
- (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
- (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
- (r'@[{(][^)}]+[})]', Name.Variable),
- (r'[(){},;]', Punctuation),
- (r'=>', Operator),
- (r'->', Operator),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\w+', Name.Function),
+ (r'"', String, 'string'),
+ (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
+ (r'([\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)),
+ (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
+ (r'@[{(][^)}]+[})]', Name.Variable),
+ (r'[(){},;]', Punctuation),
+ (r'=>', Operator),
+ (r'->', Operator),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\w+', Name.Function),
(r'\s+', Whitespace),
- ],
- 'string': [
- (r'\$[{(]', String.Interpol, 'interpol'),
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', String),
- (r'.', String),
- ],
- 'interpol': [
- (r'\$[{(]', String.Interpol, '#push'),
- (r'[})]', String.Interpol, '#pop'),
- (r'[^${()}]+', String.Interpol),
- ],
- 'arglist': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\w+', Name.Variable),
+ ],
+ 'string': [
+ (r'\$[{(]', String.Interpol, 'interpol'),
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', String),
+ (r'.', String),
+ ],
+ 'interpol': [
+ (r'\$[{(]', String.Interpol, '#push'),
+ (r'[})]', String.Interpol, '#pop'),
+ (r'[^${()}]+', String.Interpol),
+ ],
+ 'arglist': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'\w+', Name.Variable),
(r'\s+', Whitespace),
- ],
- }
-
-
-class ApacheConfLexer(RegexLexer):
- """
- Lexer for configuration files following the Apache config file
- format.
-
- .. versionadded:: 0.6
- """
-
- name = 'ApacheConf'
- aliases = ['apacheconf', 'aconf', 'apache']
- filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
- mimetypes = ['text/x-apacheconf']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
+ ],
+ }
+
+
+class ApacheConfLexer(RegexLexer):
+ """
+ Lexer for configuration files following the Apache config file
+ format.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'ApacheConf'
+ aliases = ['apacheconf', 'aconf', 'apache']
+ filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
+ mimetypes = ['text/x-apacheconf']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'#(.*\\\n)+.*$|(#.*?)$', Comment),
(r'(<[^\s>/][^\s>]*)(?:(\s+)(.*))?(>)',
@@ -313,249 +313,249 @@ class ApacheConfLexer(RegexLexer):
(r'(</[^\s>]+)(>)',
bygroups(Name.Tag, Name.Tag)),
(r'[a-z]\w*', Name.Builtin, 'value'),
- (r'\.+', Text),
- ],
- 'value': [
- (r'\\\n', Text),
+ (r'\.+', Text),
+ ],
+ 'value': [
+ (r'\\\n', Text),
(r'\n+', Whitespace, '#pop'),
- (r'\\', Text),
+ (r'\\', Text),
(r'[^\S\n]+', Whitespace),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'\d+', Number),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'\d+', Number),
(r'/([*a-z0-9][*\w./-]+)', String.Other),
- (r'(on|off|none|any|all|double|email|dns|min|minimal|'
- r'os|productonly|full|emerg|alert|crit|error|warn|'
- r'notice|info|debug|registry|script|inetd|standalone|'
- r'user|group)\b', Keyword),
+ (r'(on|off|none|any|all|double|email|dns|min|minimal|'
+ r'os|productonly|full|emerg|alert|crit|error|warn|'
+ r'notice|info|debug|registry|script|inetd|standalone|'
+ r'user|group)\b', Keyword),
(r'"([^"\\]*(?:\\(.|\n)[^"\\]*)*)"', String.Double),
- (r'[^\s"\\]+', Text)
- ],
- }
-
-
-class SquidConfLexer(RegexLexer):
- """
- Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
-
- .. versionadded:: 0.9
- """
-
- name = 'SquidConf'
- aliases = ['squidconf', 'squid.conf', 'squid']
- filenames = ['squid.conf']
- mimetypes = ['text/x-squidconf']
- flags = re.IGNORECASE
-
- keywords = (
- "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to", "anonymize_headers",
- "append_domain", "as_whois_server", "auth_param_basic",
- "authenticate_children", "authenticate_program", "authenticate_ttl",
- "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
- "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
+ (r'[^\s"\\]+', Text)
+ ],
+ }
+
+
+class SquidConfLexer(RegexLexer):
+ """
+ Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'SquidConf'
+ aliases = ['squidconf', 'squid.conf', 'squid']
+ filenames = ['squid.conf']
+ mimetypes = ['text/x-squidconf']
+ flags = re.IGNORECASE
+
+ keywords = (
+ "access_log", "acl", "always_direct", "announce_host",
+ "announce_period", "announce_port", "announce_to", "anonymize_headers",
+ "append_domain", "as_whois_server", "auth_param_basic",
+ "authenticate_children", "authenticate_program", "authenticate_ttl",
+ "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
+ "cache_dir", "cache_dns_program", "cache_effective_group",
+ "cache_effective_user", "cache_host", "cache_host_acl",
+ "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
+ "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
"cache_peer_access", "cache_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
- "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
- "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters", "delay_pools",
- "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
- "dns_testnames", "emulate_httpd_log", "err_html_text",
- "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
- "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
- "header_replace", "hierarchy_stoplist", "high_response_time_warning",
- "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
- "http_anonymizer", "httpd_accel", "httpd_accel_host",
- "httpd_accel_port", "httpd_accel_uses_host_header",
- "httpd_accel_with_proxy", "http_port", "http_reply_access",
- "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average", "inside_firewall",
- "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
- "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
- "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy", "mime_table",
- "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
- "minimum_object_size", "minimum_retry_timeout", "miss_access",
- "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
- "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
- "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
- "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
- "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age",
- "refresh_pattern", "reload_into_ims", "request_body_max_size",
- "request_size", "request_timeout", "shutdown_lifetime",
- "single_parent_bypass", "siteselect_timeout", "snmp_access",
- "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
- "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
- "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
- "unlinkd_program", "uri_whitespace", "useragent_log",
- "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
- )
-
- opts = (
- "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
- "multicast-responder", "on", "off", "all", "deny", "allow", "via",
- "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
- "credentialsttl", "none", "disable", "offline_toggle", "diskd",
- )
-
- actions = (
- "shutdown", "info", "parameter", "server_list", "client_list",
- r'squid.conf',
- )
-
- actions_stats = (
- "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
- "redirector", "io", "reply_headers", "filedescriptors", "netdb",
- )
-
- actions_log = ("status", "enable", "disable", "clear")
-
- acls = (
- "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
- "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
- "dst", "time", "dstdomain", "ident", "snmp_community",
- )
-
- ip_re = (
- r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
- r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
- r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
- r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
- r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
- r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
- r'[1-9]?\d)){3}))'
- )
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#', Comment, 'comment'),
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
- # Actions
- (words(actions, prefix=r'\b', suffix=r'\b'), String),
- (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
- (words(actions_log, prefix=r'log/', suffix=r'='), String),
- (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
- (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
- (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
- (r'\S+', Text),
- ],
- 'comment': [
- (r'\s*TAG:.*', String.Escape, '#pop'),
- (r'.+', Comment, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class NginxConfLexer(RegexLexer):
- """
- Lexer for `Nginx <http://nginx.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Nginx configuration file'
- aliases = ['nginx']
+ "cache_stoplist_pattern", "cache_store_log", "cache_swap",
+ "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
+ "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
+ "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
+ "delay_initial_bucket_level", "delay_parameters", "delay_pools",
+ "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
+ "dns_testnames", "emulate_httpd_log", "err_html_text",
+ "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
+ "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
+ "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
+ "header_replace", "hierarchy_stoplist", "high_response_time_warning",
+ "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
+ "http_anonymizer", "httpd_accel", "httpd_accel_host",
+ "httpd_accel_port", "httpd_accel_uses_host_header",
+ "httpd_accel_with_proxy", "http_port", "http_reply_access",
+ "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
+ "ident_lookup", "ident_lookup_access", "ident_timeout",
+ "incoming_http_average", "incoming_icp_average", "inside_firewall",
+ "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
+ "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
+ "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
+ "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
+ "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
+ "memory_pools_limit", "memory_replacement_policy", "mime_table",
+ "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
+ "minimum_object_size", "minimum_retry_timeout", "miss_access",
+ "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
+ "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
+ "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
+ "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
+ "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
+ "quick_abort", "quick_abort_max", "quick_abort_min",
+ "quick_abort_pct", "range_offset_limit", "read_timeout",
+ "redirect_children", "redirect_program",
+ "redirect_rewrites_host_header", "reference_age",
+ "refresh_pattern", "reload_into_ims", "request_body_max_size",
+ "request_size", "request_timeout", "shutdown_lifetime",
+ "single_parent_bypass", "siteselect_timeout", "snmp_access",
+ "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
+ "store_avg_object_size", "store_objects_per_bucket",
+ "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
+ "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
+ "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
+ "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
+ "unlinkd_program", "uri_whitespace", "useragent_log",
+ "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
+ )
+
+ opts = (
+ "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
+ "multicast-responder", "on", "off", "all", "deny", "allow", "via",
+ "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
+ "credentialsttl", "none", "disable", "offline_toggle", "diskd",
+ )
+
+ actions = (
+ "shutdown", "info", "parameter", "server_list", "client_list",
+ r'squid.conf',
+ )
+
+ actions_stats = (
+ "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
+ "redirector", "io", "reply_headers", "filedescriptors", "netdb",
+ )
+
+ actions_log = ("status", "enable", "disable", "clear")
+
+ acls = (
+ "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
+ "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
+ "dst", "time", "dstdomain", "ident", "snmp_community",
+ )
+
+ ip_re = (
+ r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
+ r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
+ r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
+ r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
+ r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
+ r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
+ r'[1-9]?\d)){3}))'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'#', Comment, 'comment'),
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ # Actions
+ (words(actions, prefix=r'\b', suffix=r'\b'), String),
+ (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
+ (words(actions_log, prefix=r'log/', suffix=r'='), String),
+ (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
+ (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
+ (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
+ (r'\S+', Text),
+ ],
+ 'comment': [
+ (r'\s*TAG:.*', String.Escape, '#pop'),
+ (r'.+', Comment, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class NginxConfLexer(RegexLexer):
+ """
+ Lexer for `Nginx <http://nginx.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Nginx configuration file'
+ aliases = ['nginx']
filenames = ['nginx.conf']
- mimetypes = ['text/x-nginx-conf']
-
- tokens = {
- 'root': [
+ mimetypes = ['text/x-nginx-conf']
+
+ tokens = {
+ 'root': [
(r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Whitespace, Name)),
- (r'[^\s;#]+', Keyword, 'stmt'),
- include('base'),
- ],
- 'block': [
- (r'\}', Punctuation, '#pop:2'),
- (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
- include('base'),
- ],
- 'stmt': [
- (r'\{', Punctuation, 'block'),
- (r';', Punctuation, '#pop'),
- include('base'),
- ],
- 'base': [
- (r'#.*\n', Comment.Single),
- (r'on|off', Name.Constant),
- (r'\$[^\s;#()]+', Name.Variable),
- (r'([a-z0-9.-]+)(:)([0-9]+)',
- bygroups(Name, Punctuation, Number.Integer)),
- (r'[a-z-]+/[a-z-+]+', String), # mimetype
- # (r'[a-zA-Z._-]+', Keyword),
- (r'[0-9]+[km]?\b', Number.Integer),
+ (r'[^\s;#]+', Keyword, 'stmt'),
+ include('base'),
+ ],
+ 'block': [
+ (r'\}', Punctuation, '#pop:2'),
+ (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
+ include('base'),
+ ],
+ 'stmt': [
+ (r'\{', Punctuation, 'block'),
+ (r';', Punctuation, '#pop'),
+ include('base'),
+ ],
+ 'base': [
+ (r'#.*\n', Comment.Single),
+ (r'on|off', Name.Constant),
+ (r'\$[^\s;#()]+', Name.Variable),
+ (r'([a-z0-9.-]+)(:)([0-9]+)',
+ bygroups(Name, Punctuation, Number.Integer)),
+ (r'[a-z-]+/[a-z-+]+', String), # mimetype
+ # (r'[a-zA-Z._-]+', Keyword),
+ (r'[0-9]+[km]?\b', Number.Integer),
(r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Whitespace, String.Regex)),
- (r'[:=~]', Punctuation),
- (r'[^\s;#{}$]+', String), # catch all
- (r'/[^\s;#]*', Name), # pathname
+ (r'[:=~]', Punctuation),
+ (r'[^\s;#{}$]+', String), # catch all
+ (r'/[^\s;#]*', Name), # pathname
(r'\s+', Whitespace),
- (r'[$;]', Text), # leftover characters
- ],
- }
-
-
-class LighttpdConfLexer(RegexLexer):
- """
- Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Lighttpd configuration file'
+ (r'[$;]', Text), # leftover characters
+ ],
+ }
+
+
+class LighttpdConfLexer(RegexLexer):
+ """
+ Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Lighttpd configuration file'
aliases = ['lighttpd', 'lighty']
filenames = ['lighttpd.conf']
- mimetypes = ['text/x-lighttpd-conf']
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment.Single),
- (r'/\S*', Name), # pathname
- (r'[a-zA-Z._-]+', Keyword),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'[0-9]+', Number),
- (r'=>|=~|\+=|==|=|\+', Operator),
- (r'\$[A-Z]+', Name.Builtin),
- (r'[(){}\[\],]', Punctuation),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ mimetypes = ['text/x-lighttpd-conf']
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment.Single),
+ (r'/\S*', Name), # pathname
+ (r'[a-zA-Z._-]+', Keyword),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'[0-9]+', Number),
+ (r'=>|=~|\+=|==|=|\+', Operator),
+ (r'\$[A-Z]+', Name.Builtin),
+ (r'[(){}\[\],]', Punctuation),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
(r'\s+', Whitespace),
- ],
-
- }
-
-
-class DockerLexer(RegexLexer):
- """
- Lexer for `Docker <http://docker.io>`_ configuration files.
-
- .. versionadded:: 2.0
- """
- name = 'Docker'
- aliases = ['docker', 'dockerfile']
- filenames = ['Dockerfile', '*.docker']
- mimetypes = ['text/x-dockerfile-config']
-
+ ],
+
+ }
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
_keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
_lb = r'(?:\s*\\?\s*)' # dockerfile line break regex
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'#.*', Comment),
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'#.*', Comment),
(r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
bygroups(Keyword, Whitespace, String, Whitespace, Keyword, Whitespace, String)),
(r'(ONBUILD)(\s+)(%s)' % (_lb,), bygroups(Keyword, Whitespace, using(BashLexer))),
@@ -567,23 +567,23 @@ class DockerLexer(RegexLexer):
bygroups(Keyword, Whitespace, using(BashLexer))),
(r'(%s|VOLUME)\b(\s+)(.*)' % (_keywords), bygroups(Keyword, Whitespace, String)),
(r'(%s)(\s+)' % (_bash_keywords,), bygroups(Keyword, Whitespace)),
- (r'(.*\\\n)*.+', using(BashLexer)),
+ (r'(.*\\\n)*.+', using(BashLexer)),
]
- }
-
-
+ }
+
+
class TerraformLexer(ExtendedRegexLexer):
- """
- Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
-
- .. versionadded:: 2.1
- """
-
- name = 'Terraform'
- aliases = ['terraform', 'tf']
- filenames = ['*.tf']
- mimetypes = ['application/x-tf', 'application/x-terraform']
-
+ """
+ Lexer for `terraformi .tf files <https://www.terraform.io/>`_.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Terraform'
+ aliases = ['terraform', 'tf']
+ filenames = ['*.tf']
+ mimetypes = ['application/x-tf', 'application/x-terraform']
+
classes = ('backend', 'data', 'module', 'output', 'provider',
'provisioner', 'resource', 'variable')
classes_re = "({})".format(('|').join(classes))
@@ -670,8 +670,8 @@ class TerraformLexer(ExtendedRegexLexer):
yield amatch.start(), Error, amatch.group()
ctx.end = len(ctx.text)
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
include('basic'),
include('whitespace'),
@@ -687,8 +687,8 @@ class TerraformLexer(ExtendedRegexLexer):
include('identifier'),
include('punctuation'),
(r'[0-9]+', Number),
- ],
- 'basic': [
+ ],
+ 'basic': [
(r'\s*/\*', Comment.Multiline, 'comment'),
(r'\s*#.*\n', Comment.Single),
include('whitespace'),
@@ -722,242 +722,242 @@ class TerraformLexer(ExtendedRegexLexer):
r'(<<-?)\s*([a-zA-Z_]\w*)(.*?\n)',
heredoc_callback,
)
- ],
+ ],
'blockname': [
# e.g. resource "aws_security_group" "allow_tls" {
# e.g. backend "consul" {
(r'(\s*)("[0-9a-zA-Z-_]+")?(\s*)("[0-9a-zA-Z-_]+")(\s+)(\{)',
bygroups(Whitespace, Name.Class, Whitespace, Name.Variable, Whitespace, Punctuation)),
- ],
+ ],
'identifier': [
(r'\b(var\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
(r'\b([0-9a-zA-Z-_\[\]]+\.[0-9a-zA-Z-_\.\[\]]+)\b', bygroups(Name.Variable)),
- ],
- 'punctuation': [
+ ],
+ 'punctuation': [
(r'[\[\]()\{\},.?:!=]', Punctuation),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'whitespace': [
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
- }
-
-
-class TermcapLexer(RegexLexer):
- """
- Lexer for termcap database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Termcap'
+ ],
+ }
+
+
+class TermcapLexer(RegexLexer):
+ """
+ Lexer for termcap database source.
+
+ This is very simple and minimal.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Termcap'
aliases = ['termcap']
filenames = ['termcap', 'termcap.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with trailing backslash
- # * separator is ':'
- # * to embed colon as data, we must use \072
- # * space after separator is not allowed (mayve)
- tokens = {
- 'root': [
+ mimetypes = []
+
+ # NOTE:
+ # * multiline with trailing backslash
+ # * separator is ':'
+ # * to embed colon as data, we must use \072
+ # * space after separator is not allowed (mayve)
+ tokens = {
+ 'root': [
(r'^#.*', Comment),
(r'^[^\s#:|]+', Name.Tag, 'names'),
(r'\s+', Whitespace),
- ],
- 'names': [
+ ],
+ 'names': [
(r'\n', Whitespace, '#pop'),
- (r':', Punctuation, 'defs'),
- (r'\|', Punctuation),
+ (r':', Punctuation, 'defs'),
+ (r'\|', Punctuation),
(r'[^:|]+', Name.Attribute),
- ],
- 'defs': [
+ ],
+ 'defs': [
(r'(\\)(\n[ \t]*)', bygroups(Text, Whitespace)),
(r'\n[ \t]*', Whitespace, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
- (r':', Punctuation),
- (r'[^\s:=#]+', Name.Class),
- ],
- 'data': [
- (r'\\072', Literal),
- (r':', Punctuation, '#pop'),
- (r'[^:\\]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class TerminfoLexer(RegexLexer):
- """
- Lexer for terminfo database source.
-
- This is very simple and minimal.
-
- .. versionadded:: 2.1
- """
- name = 'Terminfo'
+ (r'(#)([0-9]+)', bygroups(Operator, Number)),
+ (r'=', Operator, 'data'),
+ (r':', Punctuation),
+ (r'[^\s:=#]+', Name.Class),
+ ],
+ 'data': [
+ (r'\\072', Literal),
+ (r':', Punctuation, '#pop'),
+ (r'[^:\\]+', Literal), # for performance
+ (r'.', Literal),
+ ],
+ }
+
+
+class TerminfoLexer(RegexLexer):
+ """
+ Lexer for terminfo database source.
+
+ This is very simple and minimal.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Terminfo'
aliases = ['terminfo']
filenames = ['terminfo', 'terminfo.src']
- mimetypes = []
-
- # NOTE:
- # * multiline with leading whitespace
- # * separator is ','
- # * to embed comma as data, we can use \,
- # * space after separator is allowed
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
+ mimetypes = []
+
+ # NOTE:
+ # * multiline with leading whitespace
+ # * separator is ','
+ # * to embed comma as data, we can use \,
+ # * space after separator is allowed
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
(r'^[^\s#,|]+', Name.Tag, 'names'),
(r'\s+', Whitespace),
- ],
- 'names': [
+ ],
+ 'names': [
(r'\n', Whitespace, '#pop'),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), 'defs'),
- (r'\|', Punctuation),
+ (r'\|', Punctuation),
(r'[^,|]+', Name.Attribute),
- ],
- 'defs': [
+ ],
+ 'defs': [
(r'\n[ \t]+', Whitespace),
(r'\n', Whitespace, '#pop:2'),
- (r'(#)([0-9]+)', bygroups(Operator, Number)),
- (r'=', Operator, 'data'),
+ (r'(#)([0-9]+)', bygroups(Operator, Number)),
+ (r'=', Operator, 'data'),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace)),
- (r'[^\s,=#]+', Name.Class),
- ],
- 'data': [
- (r'\\[,\\]', Literal),
+ (r'[^\s,=#]+', Name.Class),
+ ],
+ 'data': [
+ (r'\\[,\\]', Literal),
(r'(,)([ \t]*)', bygroups(Punctuation, Whitespace), '#pop'),
- (r'[^\\,]+', Literal), # for performance
- (r'.', Literal),
- ],
- }
-
-
-class PkgConfigLexer(RegexLexer):
- """
- Lexer for `pkg-config
- <http://www.freedesktop.org/wiki/Software/pkg-config/>`_
- (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
-
- .. versionadded:: 2.1
- """
-
- name = 'PkgConfig'
+ (r'[^\\,]+', Literal), # for performance
+ (r'.', Literal),
+ ],
+ }
+
+
+class PkgConfigLexer(RegexLexer):
+ """
+ Lexer for `pkg-config
+ <http://www.freedesktop.org/wiki/Software/pkg-config/>`_
+ (see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PkgConfig'
aliases = ['pkgconfig']
filenames = ['*.pc']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*$', Comment.Single),
-
- # variable definitions
- (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
-
- # keyword lines
- (r'^([\w.]+)(:)',
- bygroups(Name.Tag, Punctuation), 'spvalue'),
-
- # variable references
- include('interp'),
-
- # fallback
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+
+ # variable definitions
+ (r'^(\w+)(=)', bygroups(Name.Attribute, Operator)),
+
+ # keyword lines
+ (r'^([\w.]+)(:)',
+ bygroups(Name.Tag, Punctuation), 'spvalue'),
+
+ # variable references
+ include('interp'),
+
+ # fallback
(r'\s+', Whitespace),
- (r'[^${}#=:\n.]+', Text),
- (r'.', Text),
- ],
- 'interp': [
- # you can escape literal "$" as "$$"
- (r'\$\$', Text),
-
- # variable references
- (r'\$\{', String.Interpol, 'curly'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r'\w+', Name.Attribute),
- ],
- 'spvalue': [
- include('interp'),
-
- (r'#.*$', Comment.Single, '#pop'),
+ (r'[^${}#=:\n.]+', Text),
+ (r'.', Text),
+ ],
+ 'interp': [
+ # you can escape literal "$" as "$$"
+ (r'\$\$', Text),
+
+ # variable references
+ (r'\$\{', String.Interpol, 'curly'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'\w+', Name.Attribute),
+ ],
+ 'spvalue': [
+ include('interp'),
+
+ (r'#.*$', Comment.Single, '#pop'),
(r'\n', Whitespace, '#pop'),
-
- # fallback
+
+ # fallback
(r'\s+', Whitespace),
(r'[^${}#\n\s]+', Text),
- (r'.', Text),
- ],
- }
-
-
-class PacmanConfLexer(RegexLexer):
- """
- Lexer for `pacman.conf
- <https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
-
- Actually, IniLexer works almost fine for this format,
- but it yield error token. It is because pacman.conf has
- a form without assignment like:
-
- UseSyslog
- Color
- TotalDownload
- CheckSpace
- VerbosePkgLists
-
- These are flags to switch on.
-
- .. versionadded:: 2.1
- """
-
- name = 'PacmanConf'
+ (r'.', Text),
+ ],
+ }
+
+
+class PacmanConfLexer(RegexLexer):
+ """
+ Lexer for `pacman.conf
+ <https://www.archlinux.org/pacman/pacman.conf.5.html>`_.
+
+ Actually, IniLexer works almost fine for this format,
+ but it yield error token. It is because pacman.conf has
+ a form without assignment like:
+
+ UseSyslog
+ Color
+ TotalDownload
+ CheckSpace
+ VerbosePkgLists
+
+ These are flags to switch on.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PacmanConf'
aliases = ['pacmanconf']
filenames = ['pacman.conf']
- mimetypes = []
-
- tokens = {
- 'root': [
- # comment
- (r'#.*$', Comment.Single),
-
- # section header
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # comment
+ (r'#.*$', Comment.Single),
+
+ # section header
(r'^(\s*)(\[.*?\])(\s*)$', bygroups(Whitespace, Keyword, Whitespace)),
-
- # variable definitions
- # (Leading space is allowed...)
- (r'(\w+)(\s*)(=)',
+
+ # variable definitions
+ # (Leading space is allowed...)
+ (r'(\w+)(\s*)(=)',
bygroups(Name.Attribute, Whitespace, Operator)),
-
- # flags to on
- (r'^(\s*)(\w+)(\s*)$',
+
+ # flags to on
+ (r'^(\s*)(\w+)(\s*)$',
bygroups(Whitespace, Name.Attribute, Whitespace)),
-
- # built-in special values
- (words((
- '$repo', # repository
- '$arch', # architecture
- '%o', # outfile
- '%u', # url
- ), suffix=r'\b'),
- Name.Variable),
-
- # fallback
+
+ # built-in special values
+ (words((
+ '$repo', # repository
+ '$arch', # architecture
+ '%o', # outfile
+ '%u', # url
+ ), suffix=r'\b'),
+ Name.Variable),
+
+ # fallback
(r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
+ (r'.', Text),
+ ],
+ }
class AugeasLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/console.py b/contrib/python/Pygments/py3/pygments/lexers/console.py
index 5d7fea78f5..28cfe7e8c1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/console.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/console.py
@@ -1,114 +1,114 @@
-"""
- pygments.lexers.console
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc console output.
-
+"""
+ pygments.lexers.console
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc console output.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
Punctuation, Number, Whitespace
-
-__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
-
-
-class VCTreeStatusLexer(RegexLexer):
- """
- For colorizing output of version control status commands, like "hg
- status" or "svn status".
-
- .. versionadded:: 2.0
- """
- name = 'VCTreeStatus'
- aliases = ['vctreestatus']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^A \+ C\s+', Generic.Error),
- (r'^A\s+\+?\s+', String),
- (r'^M\s+', Generic.Inserted),
- (r'^C\s+', Generic.Error),
- (r'^D\s+', Generic.Deleted),
- (r'^[?!]\s+', Comment.Preproc),
- (r' >\s+.*\n', Comment.Preproc),
+
+__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
+
+
+class VCTreeStatusLexer(RegexLexer):
+ """
+ For colorizing output of version control status commands, like "hg
+ status" or "svn status".
+
+ .. versionadded:: 2.0
+ """
+ name = 'VCTreeStatus'
+ aliases = ['vctreestatus']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^A \+ C\s+', Generic.Error),
+ (r'^A\s+\+?\s+', String),
+ (r'^M\s+', Generic.Inserted),
+ (r'^C\s+', Generic.Error),
+ (r'^D\s+', Generic.Deleted),
+ (r'^[?!]\s+', Comment.Preproc),
+ (r' >\s+.*\n', Comment.Preproc),
(r'\S+', Text),
(r'\s+', Whitespace),
- ]
- }
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- .. versionadded:: 1.5
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
- (r"^\+\d+: ", Comment),
- (r"--end of the loop--", Comment),
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()(\w+(?:\.\w+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
- (r"<.*?>+", Name.Builtin),
- (r"(label|debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|float_neg|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
- r"cast_int_to_float|cast_float_to_int|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
- r"virtual_ref|mark_opaque_ptr|"
- r"call_may_force|call_assembler|call_loopinvariant|"
- r"call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
- r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r":", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
+ ]
+ }
+
+
+class PyPyLogLexer(RegexLexer):
+ """
+ Lexer for PyPy log files.
+
+ .. versionadded:: 1.5
+ """
+ name = "PyPy Log"
+ aliases = ["pypylog", "pypy"]
+ filenames = ["*.pypylog"]
+ mimetypes = ['application/x-pypylog']
+
+ tokens = {
+ "root": [
+ (r"\[\w+\] \{jit-log-.*?$", Keyword, "jit-log"),
+ (r"\[\w+\] \{jit-backend-counts$", Keyword, "jit-backend-counts"),
+ include("extra-stuff"),
+ ],
+ "jit-log": [
+ (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
+ (r"^\+\d+: ", Comment),
+ (r"--end of the loop--", Comment),
+ (r"[ifp]\d+", Name),
+ (r"ptr\d+", Name),
+ (r"(\()(\w+(?:\.\w+)?)(\))",
+ bygroups(Punctuation, Name.Builtin, Punctuation)),
+ (r"[\[\]=,()]", Punctuation),
+ (r"(\d+\.\d+|inf|-inf)", Number.Float),
+ (r"-?\d+", Number.Integer),
+ (r"'.*'", String),
+ (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
+ (r"<.*?>+", Name.Builtin),
+ (r"(label|debug_merge_point|jump|finish)", Name.Class),
+ (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
+ r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
+ r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
+ r"int_is_true|"
+ r"uint_floordiv|uint_ge|uint_lt|"
+ r"float_add|float_sub|float_mul|float_truediv|float_neg|"
+ r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
+ r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
+ r"cast_int_to_float|cast_float_to_int|"
+ r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
+ r"virtual_ref|mark_opaque_ptr|"
+ r"call_may_force|call_assembler|call_loopinvariant|"
+ r"call_release_gil|call_pure|call|"
+ r"new_with_vtable|new_array|newstr|newunicode|new|"
+ r"arraylen_gc|"
+ r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
+ r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
+ r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
+ r"getfield_raw|setfield_gc|setfield_raw|"
+ r"strgetitem|strsetitem|strlen|copystrcontent|"
+ r"unicodegetitem|unicodesetitem|unicodelen|"
+ r"guard_true|guard_false|guard_value|guard_isnull|"
+ r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
+ r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
+ Name.Builtin),
+ include("extra-stuff"),
+ ],
+ "jit-backend-counts": [
+ (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
+ (r":", Punctuation),
+ (r"\d+", Number),
+ include("extra-stuff"),
+ ],
+ "extra-stuff": [
(r"\s+", Whitespace),
- (r"#.*?$", Comment),
- ],
- }
+ (r"#.*?$", Comment),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/csound.py b/contrib/python/Pygments/py3/pygments/lexers/csound.py
index a871de8d72..67995baad5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/csound.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/csound.py
@@ -1,72 +1,72 @@
-"""
- pygments.lexers.csound
- ~~~~~~~~~~~~~~~~~~~~~~
-
+"""
+ pygments.lexers.csound
+ ~~~~~~~~~~~~~~~~~~~~~~
+
Lexers for Csound languages.
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-
-from pygments.lexer import RegexLexer, bygroups, default, include, using, words
+
+from pygments.lexer import RegexLexer, bygroups, default, include, using, words
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, Punctuation, \
String, Text, Whitespace
from pygments.lexers._csound_builtins import OPCODES, DEPRECATED_OPCODES, REMOVED_OPCODES
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.scripting import LuaLexer
-
-__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
-
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.scripting import LuaLexer
+
+__all__ = ['CsoundScoreLexer', 'CsoundOrchestraLexer', 'CsoundDocumentLexer']
+
newline = (r'((?:(?:;|//).*)*)(\n)', bygroups(Comment.Single, Text))
-
-
-class CsoundLexer(RegexLexer):
- tokens = {
- 'whitespace': [
+
+
+class CsoundLexer(RegexLexer):
+ tokens = {
+ 'whitespace': [
(r'[ \t]+', Whitespace),
(r'/[*](?:.|\n)*?[*]/', Comment.Multiline),
(r'(?:;|//).*$', Comment.Single),
(r'(\\)(\n)', bygroups(Text, Whitespace))
- ],
-
+ ],
+
'preprocessor directives': [
(r'#(?:e(?:nd(?:if)?|lse)\b|##)|@@?[ \t]*\d+', Comment.Preproc),
(r'#includestr', Comment.Preproc, 'includestr directive'),
(r'#include', Comment.Preproc, 'include directive'),
(r'#[ \t]*define', Comment.Preproc, 'define directive'),
(r'#(?:ifn?def|undef)\b', Comment.Preproc, 'macro directive')
- ],
-
+ ],
+
'include directive': [
- include('whitespace'),
+ include('whitespace'),
(r'([^ \t]).*?\1', String, '#pop')
- ],
+ ],
'includestr directive': [
include('whitespace'),
(r'"', String, ('#pop', 'quoted string'))
],
-
+
'define directive': [
(r'\n', Whitespace),
include('whitespace'),
(r'([A-Z_a-z]\w*)(\()', bygroups(Comment.Preproc, Punctuation),
('#pop', 'macro parameter name list')),
(r'[A-Z_a-z]\w*', Comment.Preproc, ('#pop', 'before macro body'))
- ],
+ ],
'macro parameter name list': [
- include('whitespace'),
+ include('whitespace'),
(r'[A-Z_a-z]\w*', Comment.Preproc),
(r"['#]", Punctuation),
(r'\)', Punctuation, ('#pop', 'before macro body'))
- ],
+ ],
'before macro body': [
(r'\n', Whitespace),
- include('whitespace'),
+ include('whitespace'),
(r'#', Punctuation, ('#pop', 'macro body'))
- ],
+ ],
'macro body': [
(r'(?:\\(?!#)|[^#\\]|\n)+', Comment.Preproc),
(r'\\#', Comment.Preproc),
@@ -74,15 +74,15 @@ class CsoundLexer(RegexLexer):
],
'macro directive': [
- include('whitespace'),
+ include('whitespace'),
(r'[A-Z_a-z]\w*', Comment.Preproc, '#pop')
- ],
+ ],
'macro uses': [
(r'(\$[A-Z_a-z]\w*\.?)(\()', bygroups(Comment.Preproc, Punctuation),
'macro parameter value list'),
(r'\$[A-Z_a-z]\w*(?:\.|\b)', Comment.Preproc)
- ],
+ ],
'macro parameter value list': [
(r'(?:[^\'#"{()]|\{(?!\{))+', Comment.Preproc),
(r"['#]", Punctuation),
@@ -90,12 +90,12 @@ class CsoundLexer(RegexLexer):
(r'\{\{', String, 'macro parameter value braced string'),
(r'\(', Comment.Preproc, 'macro parameter value parenthetical'),
(r'\)', Punctuation, '#pop')
- ],
+ ],
'macro parameter value quoted string': [
(r"\\[#'()]", Comment.Preproc),
(r"[#'()]", Error),
include('quoted string')
- ],
+ ],
'macro parameter value braced string': [
(r"\\[#'()]", Comment.Preproc),
(r"[#'()]", Error),
@@ -127,26 +127,26 @@ class CsoundLexer(RegexLexer):
'braced string': [
# Do nothing. This must be defined in subclasses.
- ]
- }
-
-
-class CsoundScoreLexer(CsoundLexer):
- """
+ ]
+ }
+
+
+class CsoundScoreLexer(CsoundLexer):
+ """
For `Csound <https://csound.com>`_ scores.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Score'
- aliases = ['csound-score', 'csound-sco']
- filenames = ['*.sco']
-
- tokens = {
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Csound Score'
+ aliases = ['csound-score', 'csound-sco']
+ filenames = ['*.sco']
+
+ tokens = {
'root': [
(r'\n', Whitespace),
include('whitespace and macro uses'),
- include('preprocessor directives'),
+ include('preprocessor directives'),
(r'[aBbCdefiqstvxy]', Keyword),
# There is also a w statement that is generated internally and should not be
@@ -167,14 +167,14 @@ class CsoundScoreLexer(CsoundLexer):
(r'[()\[\]]', Punctuation),
(r'"', String, 'quoted string'),
(r'\{', Comment.Preproc, 'loop after left brace'),
- ],
-
+ ],
+
'mark statement': [
include('whitespace and macro uses'),
(r'[A-Z_a-z]\w*', Name.Label),
(r'\n', Whitespace, '#pop')
- ],
-
+ ],
+
'loop after left brace': [
include('whitespace and macro uses'),
(r'\d+', Number.Integer, ('#pop', 'loop after repeat count')),
@@ -193,50 +193,50 @@ class CsoundScoreLexer(CsoundLexer):
'braced string': [
(r'\}\}', String, '#pop'),
(r'[^}]|\}(?!\})', String)
- ]
- }
-
-
-class CsoundOrchestraLexer(CsoundLexer):
- """
+ ]
+ }
+
+
+class CsoundOrchestraLexer(CsoundLexer):
+ """
For `Csound <https://csound.com>`_ orchestras.
-
- .. versionadded:: 2.1
- """
-
- name = 'Csound Orchestra'
- aliases = ['csound', 'csound-orc']
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Csound Orchestra'
+ aliases = ['csound', 'csound-orc']
filenames = ['*.orc', '*.udo']
-
- user_defined_opcodes = set()
-
- def opcode_name_callback(lexer, match):
- opcode = match.group(0)
- lexer.user_defined_opcodes.add(opcode)
- yield match.start(), Name.Function, opcode
-
- def name_callback(lexer, match):
+
+ user_defined_opcodes = set()
+
+ def opcode_name_callback(lexer, match):
+ opcode = match.group(0)
+ lexer.user_defined_opcodes.add(opcode)
+ yield match.start(), Name.Function, opcode
+
+ def name_callback(lexer, match):
type_annotation_token = Keyword.Type
name = match.group(1)
if name in OPCODES or name in DEPRECATED_OPCODES or name in REMOVED_OPCODES:
- yield match.start(), Name.Builtin, name
- elif name in lexer.user_defined_opcodes:
- yield match.start(), Name.Function, name
- else:
+ yield match.start(), Name.Builtin, name
+ elif name in lexer.user_defined_opcodes:
+ yield match.start(), Name.Function, name
+ else:
type_annotation_token = Name
name_match = re.search(r'^(g?[afikSw])(\w+)', name)
if name_match:
yield name_match.start(1), Keyword.Type, name_match.group(1)
yield name_match.start(2), Name, name_match.group(2)
- else:
- yield match.start(), Name, name
-
+ else:
+ yield match.start(), Name, name
+
if match.group(2):
yield match.start(2), Punctuation, match.group(2)
yield match.start(3), type_annotation_token, match.group(3)
- tokens = {
+ tokens = {
'root': [
(r'\n', Whitespace),
@@ -250,8 +250,8 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\b(?:end(?:in|op))\b', Keyword.Declaration),
include('partial statements')
- ],
-
+ ],
+
'partial statements': [
(r'\b(?:0dbfs|A4|k(?:r|smps)|nchnls(?:_i)?|sr)\b', Name.Variable.Global),
@@ -263,10 +263,10 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'"', String, 'quoted string'),
(r'\{\{', String, 'braced string'),
- (words((
- 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
- 'od', 'then', 'until', 'while',
- ), prefix=r'\b', suffix=r'\b'), Keyword),
+ (words((
+ 'do', 'else', 'elseif', 'endif', 'enduntil', 'fi', 'if', 'ithen', 'kthen',
+ 'od', 'then', 'until', 'while',
+ ), prefix=r'\b', suffix=r'\b'), Keyword),
(words(('return', 'rireturn'), prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\b[ik]?goto\b', Keyword, 'goto label'),
@@ -285,30 +285,30 @@ class CsoundOrchestraLexer(CsoundLexer):
(r'\blua_(?:exec|opdef)\b', Name.Builtin, 'Lua opcode'),
(r'\bp\d+\b', Name.Variable.Instance),
(r'\b([A-Z_a-z]\w*)(?:(:)([A-Za-z]))?\b', name_callback)
- ],
-
+ ],
+
'instrument numbers and identifiers': [
include('whitespace and macro uses'),
(r'\d+|[A-Z_a-z]\w*', Name.Function),
(r'[+,]', Punctuation),
(r'\n', Whitespace, '#pop')
- ],
-
+ ],
+
'after opcode keyword': [
include('whitespace and macro uses'),
(r'[A-Z_a-z]\w*', opcode_name_callback, ('#pop', 'opcode type signatures')),
(r'\n', Whitespace, '#pop')
- ],
+ ],
'opcode type signatures': [
include('whitespace and macro uses'),
-
+
# https://github.com/csound/csound/search?q=XIDENT+path%3AEngine+filename%3Acsound_orc.lex
(r'0|[afijkKoOpPStV\[\]]+', Keyword.Type),
- (r',', Punctuation),
+ (r',', Punctuation),
(r'\n', Whitespace, '#pop')
- ],
-
+ ],
+
'quoted string': [
(r'"', String, '#pop'),
(r'[^\\"$%)]+', String),
@@ -316,18 +316,18 @@ class CsoundOrchestraLexer(CsoundLexer):
include('escape sequences'),
include('format specifiers'),
(r'[\\$%)]', String)
- ],
+ ],
'braced string': [
(r'\}\}', String, '#pop'),
(r'(?:[^\\%)}]|\}(?!\}))+', String),
include('escape sequences'),
include('format specifiers'),
(r'[\\%)]', String)
- ],
+ ],
'escape sequences': [
# https://github.com/csound/csound/search?q=unquote_string+path%3AEngine+filename%3Acsound_orc_compile.c
(r'\\(?:[\\abnrt"]|[0-7]{1,3})', String.Escape)
- ],
+ ],
# Format specifiers are highlighted in all strings, even though only
# fprintks https://csound.com/docs/manual/fprintks.html
# fprints https://csound.com/docs/manual/fprints.html
@@ -348,119 +348,119 @@ class CsoundOrchestraLexer(CsoundLexer):
'format specifiers': [
(r'%[#0\- +]*\d*(?:\.\d+)?[AE-GXac-giosux]', String.Interpol),
(r'%%', String.Escape)
- ],
-
+ ],
+
'goto argument': [
include('whitespace and macro uses'),
(r',', Punctuation, '#pop'),
include('partial statements')
],
- 'goto label': [
+ 'goto label': [
include('whitespace and macro uses'),
- (r'\w+', Name.Label, '#pop'),
- default('#pop')
- ],
-
+ (r'\w+', Name.Label, '#pop'),
+ default('#pop')
+ ],
+
'prints opcode': [
include('whitespace and macro uses'),
(r'"', String, 'prints quoted string'),
default('#pop')
- ],
+ ],
'prints quoted string': [
(r'\\\\[aAbBnNrRtT]', String.Escape),
(r'%[!nNrRtT]|[~^]{1,2}', String.Escape),
include('quoted string')
- ],
-
+ ],
+
'Csound score opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Csound score'),
(r'\n', Whitespace, '#pop')
- ],
+ ],
'Csound score': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(CsoundScoreLexer))
- ],
-
+ ],
+
'Python opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Python'),
(r'\n', Whitespace, '#pop')
- ],
+ ],
'Python': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(PythonLexer))
- ],
-
+ ],
+
'Lua opcode': [
include('whitespace and macro uses'),
(r'"', String, 'quoted string'),
(r'\{\{', String, 'Lua'),
(r'\n', Whitespace, '#pop')
- ],
+ ],
'Lua': [
(r'\}\}', String, '#pop'),
(r'([^}]+)|\}(?!\})', using(LuaLexer))
- ]
- }
-
-
-class CsoundDocumentLexer(RegexLexer):
- """
+ ]
+ }
+
+
+class CsoundDocumentLexer(RegexLexer):
+ """
For `Csound <https://csound.com>`_ documents.
-
+
.. versionadded:: 2.1
- """
-
- name = 'Csound Document'
- aliases = ['csound-document', 'csound-csd']
- filenames = ['*.csd']
-
- # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
- # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
- # better idea, since Csound Document files look like XML files. However, Csound
- # Documents can contain Csound comments (preceded by //, for example) before and
- # after the root element, unescaped bitwise AND & and less than < operators, etc. In
- # other words, while Csound Document files look like XML files, they may not actually
- # be XML files.
- tokens = {
- 'root': [
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ """
+
+ name = 'Csound Document'
+ aliases = ['csound-document', 'csound-csd']
+ filenames = ['*.csd']
+
+ # These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
+ # CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
+ # better idea, since Csound Document files look like XML files. However, Csound
+ # Documents can contain Csound comments (preceded by //, for example) before and
+ # after the root element, unescaped bitwise AND & and less than < operators, etc. In
+ # other words, while Csound Document files look like XML files, they may not actually
+ # be XML files.
+ tokens = {
+ 'root': [
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'(?:;|//).*$', Comment.Single),
(r'[^/;<]+|/(?!/)', Text),
- (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
- (r'<\s*CsScore', Name.Tag, ('score', 'tag')),
+ (r'<\s*CsInstruments', Name.Tag, ('orchestra', 'tag')),
+ (r'<\s*CsScore', Name.Tag, ('score', 'tag')),
(r'<\s*[Hh][Tt][Mm][Ll]', Name.Tag, ('HTML', 'tag')),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
- ],
-
- 'orchestra': [
- (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
- ],
- 'score': [
- (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'),
- (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
- ],
- 'HTML': [
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag)
+ ],
+
+ 'orchestra': [
+ (r'<\s*/\s*CsInstruments\s*>', Name.Tag, '#pop'),
+ (r'(.|\n)+?(?=<\s*/\s*CsInstruments\s*>)', using(CsoundOrchestraLexer))
+ ],
+ 'score': [
+ (r'<\s*/\s*CsScore\s*>', Name.Tag, '#pop'),
+ (r'(.|\n)+?(?=<\s*/\s*CsScore\s*>)', using(CsoundScoreLexer))
+ ],
+ 'HTML': [
(r'<\s*/\s*[Hh][Tt][Mm][Ll]\s*>', Name.Tag, '#pop'),
(r'(.|\n)+?(?=<\s*/\s*[Hh][Tt][Mm][Ll]\s*>)', using(HtmlLexer))
- ],
+ ],
- 'tag': [
+ 'tag': [
(r'\s+', Whitespace),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop')
- ],
- 'attr': [
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop')
+ ],
+ 'attr': [
(r'\s+', Whitespace),
- (r'".*?"', String, '#pop'),
- (r"'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop')
- ]
- }
+ (r'".*?"', String, '#pop'),
+ (r"'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop')
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/css.py b/contrib/python/Pygments/py3/pygments/lexers/css.py
index 4b284cc8fa..2372b16368 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/css.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/css.py
@@ -1,24 +1,24 @@
-"""
- pygments.lexers.css
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for CSS and related stylesheet formats.
-
+"""
+ pygments.lexers.css
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for CSS and related stylesheet formats.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-import copy
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default, words, inherit
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import copy
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default, words, inherit
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
-
-
+
+__all__ = ['CssLexer', 'SassLexer', 'ScssLexer', 'LessCssLexer']
+
+
# List of vendor prefixes obtained from:
# https://www.w3.org/TR/CSS21/syndata.html#vendor-keyword-history
_vendor_prefixes = (
@@ -264,47 +264,47 @@ _all_units = _angle_units + _frequency_units + _length_units + \
_resolution_units + _time_units
-class CssLexer(RegexLexer):
- """
- For CSS (Cascading Style Sheets).
- """
-
- name = 'CSS'
- aliases = ['css']
- filenames = ['*.css']
- mimetypes = ['text/css']
-
- tokens = {
- 'root': [
- include('basics'),
- ],
- 'basics': [
+class CssLexer(RegexLexer):
+ """
+ For CSS (Cascading Style Sheets).
+ """
+
+ name = 'CSS'
+ aliases = ['css']
+ filenames = ['*.css']
+ mimetypes = ['text/css']
+
+ tokens = {
+ 'root': [
+ include('basics'),
+ ],
+ 'basics': [
(r'\s+', Whitespace),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'\{', Punctuation, 'content'),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\{', Punctuation, 'content'),
(r'(\:{1,2})([\w-]+)', bygroups(Punctuation, Name.Decorator)),
(r'(\.)([\w-]+)', bygroups(Punctuation, Name.Class)),
(r'(\#)([\w-]+)', bygroups(Punctuation, Name.Namespace)),
(r'(@)([\w-]+)', bygroups(Punctuation, Keyword), 'atrule'),
- (r'[\w-]+', Name.Tag),
- (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
+ (r'[\w-]+', Name.Tag),
+ (r'[~^*!%&$\[\]()<>|+=@:;,./?-]', Operator),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'atrule': [
- (r'\{', Punctuation, 'atcontent'),
- (r';', Punctuation, '#pop'),
- include('basics'),
- ],
- 'atcontent': [
- include('basics'),
- (r'\}', Punctuation, '#pop:2'),
- ],
- 'content': [
+ ],
+ 'atrule': [
+ (r'\{', Punctuation, 'atcontent'),
+ (r';', Punctuation, '#pop'),
+ include('basics'),
+ ],
+ 'atcontent': [
+ include('basics'),
+ (r'\}', Punctuation, '#pop:2'),
+ ],
+ 'content': [
(r'\s+', Whitespace),
- (r'\}', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop'),
(r';', Punctuation),
- (r'^@.*?$', Comment.Preproc),
+ (r'^@.*?$', Comment.Preproc),
(words(_vendor_prefixes,), Keyword.Pseudo),
(r'('+r'|'.join(_css_properties)+r')(\s*)(\:)',
@@ -329,8 +329,8 @@ class CssLexer(RegexLexer):
(words(_color_keywords, suffix=r'\b'), Keyword.Constant),
# for transition-property etc.
(words(_css_properties, suffix=r'\b'), Keyword),
- (r'\!important', Comment.Preproc),
- (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\!important', Comment.Preproc),
+ (r'/\*(?:.|\n)*?\*/', Comment),
include('numeric-values'),
@@ -384,311 +384,311 @@ class CssLexer(RegexLexer):
(r'%', Keyword.Type),
default('#pop'),
],
- }
-
-
-common_sass_tokens = {
- 'value': [
+ }
+
+
+common_sass_tokens = {
+ 'value': [
(r'[ \t]+', Whitespace),
- (r'[!$][\w-]+', Name.Variable),
- (r'url\(', String.Other, 'string-url'),
- (r'[a-z_-][\w-]*(?=\()', Name.Function),
+ (r'[!$][\w-]+', Name.Variable),
+ (r'url\(', String.Other, 'string-url'),
+ (r'[a-z_-][\w-]*(?=\()', Name.Function),
(words(_css_properties + (
- 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
- 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
- 'capitalize', 'center-left', 'center-right', 'center', 'circle',
- 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
- 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
- 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
- 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
- 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
- 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
- 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
- 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
- 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
- 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
- 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
- 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
- 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
- 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
- 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
- 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
- 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
- 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
- 'table-caption', 'table-cell', 'table-column', 'table-column-group',
- 'table-footer-group', 'table-header-group', 'table-row',
- 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
- 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
- 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
- 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
- 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
- Name.Constant),
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Name.Constant),
(words(_color_keywords, suffix=r'\b'), Name.Entity),
- (words((
- 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
- 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
- Name.Builtin),
- (r'\!(important|default)', Name.Exception),
- (r'(true|false)', Name.Pseudo),
- (r'(and|or|not)', Operator.Word),
- (r'/\*', Comment.Multiline, 'inline-comment'),
- (r'//[^\n]*', Comment.Single),
- (r'\#[a-z0-9]{1,6}', Number.Hex),
- (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
- (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[~^*!&%<>|+=@:,./?-]+', Operator),
- (r'[\[\]()]+', Punctuation),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- (r'[a-z_-][\w-]*', Name),
- ],
-
- 'interpolation': [
- (r'\}', String.Interpol, '#pop'),
- include('value'),
- ],
-
- 'selector': [
+ (words((
+ 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
+ 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!(important|default)', Name.Exception),
+ (r'(true|false)', Name.Pseudo),
+ (r'(and|or|not)', Operator.Word),
+ (r'/\*', Comment.Multiline, 'inline-comment'),
+ (r'//[^\n]*', Comment.Single),
+ (r'\#[a-z0-9]{1,6}', Number.Hex),
+ (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+ (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[~^*!&%<>|+=@:,./?-]+', Operator),
+ (r'[\[\]()]+', Punctuation),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ (r'[a-z_-][\w-]*', Name),
+ ],
+
+ 'interpolation': [
+ (r'\}', String.Interpol, '#pop'),
+ include('value'),
+ ],
+
+ 'selector': [
(r'[ \t]+', Whitespace),
- (r'\:', Name.Decorator, 'pseudo-class'),
- (r'\.', Name.Class, 'class'),
- (r'\#', Name.Namespace, 'id'),
- (r'[\w-]+', Name.Tag),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'&', Keyword),
- (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- ],
-
- 'string-double': [
- (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'"', String.Double, '#pop'),
- ],
-
- 'string-single': [
+ (r'\:', Name.Decorator, 'pseudo-class'),
+ (r'\.', Name.Class, 'class'),
+ (r'\#', Name.Namespace, 'id'),
+ (r'[\w-]+', Name.Tag),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'&', Keyword),
+ (r'[~^*!&\[\]()<>|+=@:;,./?-]', Operator),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ ],
+
+ 'string-double': [
+ (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ 'string-single': [
(r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Single),
- (r'#\{', String.Interpol, 'interpolation'),
+ (r'#\{', String.Interpol, 'interpolation'),
(r"'", String.Single, '#pop'),
- ],
-
- 'string-url': [
- (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'\)', String.Other, '#pop'),
- ],
-
- 'pseudo-class': [
- (r'[\w-]+', Name.Decorator),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'class': [
- (r'[\w-]+', Name.Class),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'id': [
- (r'[\w-]+', Name.Namespace),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'for': [
- (r'(from|to|through)', Operator.Word),
- include('value'),
- ],
-}
-
-
-def _indentation(lexer, match, ctx):
- indentation = match.group(0)
+ ],
+
+ 'string-url': [
+ (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'\)', String.Other, '#pop'),
+ ],
+
+ 'pseudo-class': [
+ (r'[\w-]+', Name.Decorator),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'class': [
+ (r'[\w-]+', Name.Class),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'id': [
+ (r'[\w-]+', Name.Namespace),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'for': [
+ (r'(from|to|through)', Operator.Word),
+ include('value'),
+ ],
+}
+
+
+def _indentation(lexer, match, ctx):
+ indentation = match.group(0)
yield match.start(), Whitespace, indentation
- ctx.last_indentation = indentation
- ctx.pos = match.end()
-
- if hasattr(ctx, 'block_state') and ctx.block_state and \
- indentation.startswith(ctx.block_indentation) and \
- indentation != ctx.block_indentation:
- ctx.stack.append(ctx.block_state)
- else:
- ctx.block_state = None
- ctx.block_indentation = None
- ctx.stack.append('content')
-
-
-def _starts_block(token, state):
- def callback(lexer, match, ctx):
- yield match.start(), token, match.group(0)
-
- if hasattr(ctx, 'last_indentation'):
- ctx.block_indentation = ctx.last_indentation
- else:
- ctx.block_indentation = ''
-
- ctx.block_state = state
- ctx.pos = match.end()
-
- return callback
-
-
-class SassLexer(ExtendedRegexLexer):
- """
- For Sass stylesheets.
-
- .. versionadded:: 1.3
- """
-
- name = 'Sass'
- aliases = ['sass']
- filenames = ['*.sass']
- mimetypes = ['text/x-sass']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
+ ctx.last_indentation = indentation
+ ctx.pos = match.end()
+
+ if hasattr(ctx, 'block_state') and ctx.block_state and \
+ indentation.startswith(ctx.block_indentation) and \
+ indentation != ctx.block_indentation:
+ ctx.stack.append(ctx.block_state)
+ else:
+ ctx.block_state = None
+ ctx.block_indentation = None
+ ctx.stack.append('content')
+
+
+def _starts_block(token, state):
+ def callback(lexer, match, ctx):
+ yield match.start(), token, match.group(0)
+
+ if hasattr(ctx, 'last_indentation'):
+ ctx.block_indentation = ctx.last_indentation
+ else:
+ ctx.block_indentation = ''
+
+ ctx.block_state = state
+ ctx.pos = match.end()
+
+ return callback
+
+
+class SassLexer(ExtendedRegexLexer):
+ """
+ For Sass stylesheets.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Sass'
+ aliases = ['sass']
+ filenames = ['*.sass']
+ mimetypes = ['text/x-sass']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
(r'[ \t]*\n', Whitespace),
- (r'[ \t]*', _indentation),
- ],
-
- 'content': [
- (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
- 'root'),
- (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
- 'root'),
- (r'@import', Keyword, 'import'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'content': [
+ (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
+ 'root'),
+ (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
+ 'root'),
+ (r'@import', Keyword, 'import'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
(r'(@mixin)( )([\w-]+)', bygroups(Keyword, Whitespace, Name.Function), 'value'),
(r'(@include)( )([\w-]+)', bygroups(Keyword, Whitespace, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'=[\w-]+', Name.Function, 'value'),
- (r'\+[\w-]+', Name.Decorator, 'value'),
- (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
- bygroups(Name.Variable, Operator), 'value'),
- (r':', Name.Attribute, 'old-style-attr'),
- (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
- default('selector'),
- ],
-
- 'single-comment': [
- (r'.+', Comment.Single),
+ (r'@extend', Keyword, 'selector'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'=[\w-]+', Name.Function, 'value'),
+ (r'\+[\w-]+', Name.Decorator, 'value'),
+ (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+ bygroups(Name.Variable, Operator), 'value'),
+ (r':', Name.Attribute, 'old-style-attr'),
+ (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
+ default('selector'),
+ ],
+
+ 'single-comment': [
+ (r'.+', Comment.Single),
(r'\n', Whitespace, 'root'),
- ],
-
- 'multi-comment': [
- (r'.+', Comment.Multiline),
+ ],
+
+ 'multi-comment': [
+ (r'.+', Comment.Multiline),
(r'\n', Whitespace, 'root'),
- ],
-
- 'import': [
+ ],
+
+ 'import': [
(r'[ \t]+', Whitespace),
- (r'\S+', String),
+ (r'\S+', String),
(r'\n', Whitespace, 'root'),
- ],
-
- 'old-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
+ ],
+
+ 'old-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
(r'([ \t]*)(=)', bygroups(Whitespace, Operator), 'value'),
- default('value'),
- ],
-
- 'new-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
+ default('value'),
+ ],
+
+ 'new-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
(r'([ \t]*)([=:])', bygroups(Whitespace, Operator), 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
for group, common in common_sass_tokens.items():
- tokens[group] = copy.copy(common)
+ tokens[group] = copy.copy(common)
tokens['value'].append((r'\n', Whitespace, 'root'))
tokens['selector'].append((r'\n', Whitespace, 'root'))
-
-
-class ScssLexer(RegexLexer):
- """
- For SCSS stylesheets.
- """
-
- name = 'SCSS'
- aliases = ['scss']
- filenames = ['*.scss']
- mimetypes = ['text/x-scss']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
+
+
+class ScssLexer(RegexLexer):
+ """
+ For SCSS stylesheets.
+ """
+
+ name = 'SCSS'
+ aliases = ['scss']
+ filenames = ['*.scss']
+ mimetypes = ['text/x-scss']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@import', Keyword, 'value'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@import', Keyword, 'value'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
(r'(@media)(\s+)', bygroups(Keyword, Whitespace), 'value'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
- # TODO: broken, and prone to infinite loops.
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+ # TODO: broken, and prone to infinite loops.
# (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
# (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
- default('selector'),
- ],
-
- 'attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'[ \t]*:', Operator, 'value'),
- default('#pop'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
+ default('selector'),
+ ],
+
+ 'attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'[ \t]*:', Operator, 'value'),
+ default('#pop'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
for group, common in common_sass_tokens.items():
- tokens[group] = copy.copy(common)
+ tokens[group] = copy.copy(common)
tokens['value'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
tokens['selector'].extend([(r'\n', Whitespace), (r'[;{}]', Punctuation, '#pop')])
-
-
-class LessCssLexer(CssLexer):
- """
- For `LESS <http://lesscss.org/>`_ styleshets.
-
- .. versionadded:: 2.1
- """
-
- name = 'LessCss'
- aliases = ['less']
- filenames = ['*.less']
- mimetypes = ['text/x-less-css']
-
- tokens = {
- 'root': [
- (r'@\w+', Name.Variable),
- inherit,
- ],
- 'content': [
+
+
+class LessCssLexer(CssLexer):
+ """
+ For `LESS <http://lesscss.org/>`_ styleshets.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'LessCss'
+ aliases = ['less']
+ filenames = ['*.less']
+ mimetypes = ['text/x-less-css']
+
+ tokens = {
+ 'root': [
+ (r'@\w+', Name.Variable),
+ inherit,
+ ],
+ 'content': [
(r'\{', Punctuation, '#push'),
(r'//.*\n', Comment.Single),
- inherit,
- ],
- }
+ inherit,
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/d.py b/contrib/python/Pygments/py3/pygments/lexers/d.py
index 7844de550f..bfa4d4aaab 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/d.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/d.py
@@ -1,256 +1,256 @@
-"""
- pygments.lexers.d
- ~~~~~~~~~~~~~~~~~
-
- Lexers for D languages.
-
+"""
+ pygments.lexers.d
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for D languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Keyword, Name, String, \
+from pygments.token import Text, Comment, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
-
-
-class DLexer(RegexLexer):
- """
- For D source.
-
- .. versionadded:: 1.2
- """
- name = 'D'
- filenames = ['*.d', '*.di']
- aliases = ['d']
- mimetypes = ['text/x-dsrc']
-
- tokens = {
- 'root': [
+
+__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
+
+
+class DLexer(RegexLexer):
+ """
+ For D source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'D'
+ filenames = ['*.d', '*.di']
+ aliases = ['d']
+ mimetypes = ['text/x-dsrc']
+
+ tokens = {
+ 'root': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- # (r'\\\n', Text), # line continuations
- # Comments
+ # (r'\\\n', Text), # line continuations
+ # Comments
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nested_comment'),
- # Keywords
- (words((
- 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
- 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
- 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
- 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
- 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
- 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
- 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
- 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
- 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nested_comment'),
+ # Keywords
+ (words((
+ 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
+ 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
+ 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
+ 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
+ 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
+ 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
+ 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
+ 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
+ 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
'template', 'this', 'throw', 'try', 'typeid', 'typeof',
- 'union', 'unittest', 'version', 'volatile', 'while', 'with',
- '__gshared', '__traits', '__vector', '__parameters'),
- suffix=r'\b'),
- Keyword),
- (words((
+ 'union', 'unittest', 'version', 'volatile', 'while', 'with',
+ '__gshared', '__traits', '__vector', '__parameters'),
+ suffix=r'\b'),
+ Keyword),
+ (words((
# Removed in 2.072
'typedef', ),
suffix=r'\b'),
Keyword.Removed),
(words((
- 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
- 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
- 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
- 'ushort', 'void', 'wchar'), suffix=r'\b'),
- Keyword.Type),
- (r'(false|true|null)\b', Keyword.Constant),
- (words((
+ 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
+ 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
+ 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
+ 'ushort', 'void', 'wchar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(false|true|null)\b', Keyword.Constant),
+ (words((
'__FILE__', '__FILE_FULL_PATH__', '__MODULE__', '__LINE__', '__FUNCTION__',
'__PRETTY_FUNCTION__', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__',
'__VENDOR__', '__VERSION__'), suffix=r'\b'),
- Keyword.Pseudo),
- (r'macro\b', Keyword.Reserved),
- (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
- # FloatLiteral
- # -- HexFloat
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+', Number.Bin),
- # -- Octal
- (r'0[0-7_]+', Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'r"[^"]*"[cwd]?', String),
- # -- AlternateWysiwygString
- (r'`[^`]*`[cwd]?', String),
- # -- DoubleQuotedString
+ Keyword.Pseudo),
+ (r'macro\b', Keyword.Reserved),
+ (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
+ # FloatLiteral
+ # -- HexFloat
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+', Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+', Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'r"[^"]*"[cwd]?', String),
+ # -- AlternateWysiwygString
+ (r'`[^`]*`[cwd]?', String),
+ # -- DoubleQuotedString
(r'"(\\\\|\\[^\\]|[^"\\])*"[cwd]?', String),
- # -- EscapeSequence
- (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
- r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
- String),
- # -- HexString
- (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
- # -- DelimitedString
- (r'q"\[', String, 'delimited_bracket'),
- (r'q"\(', String, 'delimited_parenthesis'),
- (r'q"<', String, 'delimited_angle'),
- (r'q"\{', String, 'delimited_curly'),
- (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
- (r'q"(.).*?\1"', String),
- # -- TokenString
- (r'q\{', String, 'token_string'),
- # Attributes
- (r'@([a-zA-Z_]\w*)?', Name.Decorator),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
- r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
- r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- # Line
+ # -- EscapeSequence
+ (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
+ r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
+ String),
+ # -- HexString
+ (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
+ # -- DelimitedString
+ (r'q"\[', String, 'delimited_bracket'),
+ (r'q"\(', String, 'delimited_parenthesis'),
+ (r'q"<', String, 'delimited_angle'),
+ (r'q"\{', String, 'delimited_curly'),
+ (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
+ (r'q"(.).*?\1"', String),
+ # -- TokenString
+ (r'q\{', String, 'token_string'),
+ # Attributes
+ (r'@([a-zA-Z_]\w*)?', Name.Decorator),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
+ r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
+ r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ # Line
(r'(#line)(\s)(.*)(\n)', bygroups(Comment.Special, Whitespace,
Comment.Special, Whitespace)),
- ],
- 'nested_comment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ],
- 'token_string': [
- (r'\{', Punctuation, 'token_string_nest'),
- (r'\}', String, '#pop'),
- include('root'),
- ],
- 'token_string_nest': [
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'delimited_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, 'delimited_inside_bracket'),
- (r'\]"', String, '#pop'),
- ],
- 'delimited_inside_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, '#push'),
- (r'\]', String, '#pop'),
- ],
- 'delimited_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, 'delimited_inside_parenthesis'),
- (r'\)"', String, '#pop'),
- ],
- 'delimited_inside_parenthesis': [
- (r'[^()]+', String),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
- 'delimited_angle': [
- (r'[^<>]+', String),
- (r'<', String, 'delimited_inside_angle'),
- (r'>"', String, '#pop'),
- ],
- 'delimited_inside_angle': [
- (r'[^<>]+', String),
- (r'<', String, '#push'),
- (r'>', String, '#pop'),
- ],
- 'delimited_curly': [
- (r'[^{}]+', String),
- (r'\{', String, 'delimited_inside_curly'),
- (r'\}"', String, '#pop'),
- ],
- 'delimited_inside_curly': [
- (r'[^{}]+', String),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- }
-
-
-class CrocLexer(RegexLexer):
- """
- For `Croc <http://jfbillingsley.com/croc>`_ source.
- """
- name = 'Croc'
- filenames = ['*.croc']
- aliases = ['croc']
- mimetypes = ['text/x-crocsrc']
-
- tokens = {
- 'root': [
+ ],
+ 'nested_comment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ],
+ 'token_string': [
+ (r'\{', Punctuation, 'token_string_nest'),
+ (r'\}', String, '#pop'),
+ include('root'),
+ ],
+ 'token_string_nest': [
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'delimited_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, 'delimited_inside_bracket'),
+ (r'\]"', String, '#pop'),
+ ],
+ 'delimited_inside_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, '#push'),
+ (r'\]', String, '#pop'),
+ ],
+ 'delimited_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, 'delimited_inside_parenthesis'),
+ (r'\)"', String, '#pop'),
+ ],
+ 'delimited_inside_parenthesis': [
+ (r'[^()]+', String),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+ 'delimited_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, 'delimited_inside_angle'),
+ (r'>"', String, '#pop'),
+ ],
+ 'delimited_inside_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, '#push'),
+ (r'>', String, '#pop'),
+ ],
+ 'delimited_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, 'delimited_inside_curly'),
+ (r'\}"', String, '#pop'),
+ ],
+ 'delimited_inside_curly': [
+ (r'[^{}]+', String),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ }
+
+
+class CrocLexer(RegexLexer):
+ """
+ For `Croc <http://jfbillingsley.com/croc>`_ source.
+ """
+ name = 'Croc'
+ filenames = ['*.croc']
+ aliases = ['croc']
+ mimetypes = ['text/x-crocsrc']
+
+ tokens = {
+ 'root': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- # Comments
+ # Comments
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'/\*', Comment.Multiline, 'nestedcomment'),
- # Keywords
- (words((
- 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
- 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
- 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
- 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
- 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
- Keyword),
- (r'(false|true|null)\b', Keyword.Constant),
- # FloatLiteral
- (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
- Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[bB][01][01_]*', Number.Bin),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
- # -- Decimal
- (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char),
- # StringLiteral
- # -- WysiwygString
- (r'@"(""|[^"])*"', String),
- (r'@`(``|[^`])*`', String),
- (r"@'(''|[^'])*'", String),
- # -- DoubleQuotedString
+ (r'/\*', Comment.Multiline, 'nestedcomment'),
+ # Keywords
+ (words((
+ 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
+ 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
+ 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
+ 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
+ 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
+ Keyword),
+ (r'(false|true|null)\b', Keyword.Constant),
+ # FloatLiteral
+ (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
+ Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[bB][01][01_]*', Number.Bin),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
+ # -- Decimal
+ (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'@"(""|[^"])*"', String),
+ (r'@`(``|[^`])*`', String),
+ (r"@'(''|[^'])*'", String),
+ # -- DoubleQuotedString
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
- r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
- r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nestedcomment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
-
-class MiniDLexer(CrocLexer):
- """
- For MiniD source. MiniD is now known as Croc.
- """
- name = 'MiniD'
- filenames = [] # don't lex .md as MiniD, reserve for Markdown
- aliases = ['minid']
- mimetypes = ['text/x-minidsrc']
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
+ r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
+ r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'nestedcomment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+
+class MiniDLexer(CrocLexer):
+ """
+ For MiniD source. MiniD is now known as Croc.
+ """
+ name = 'MiniD'
+ filenames = [] # don't lex .md as MiniD, reserve for Markdown
+ aliases = ['minid']
+ mimetypes = ['text/x-minidsrc']
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
index 6d9fb5464e..cdc37c1d01 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
@@ -1,124 +1,124 @@
-"""
- pygments.lexers.dalvik
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Dalvik VM-related languages.
-
+"""
+ pygments.lexers.dalvik
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for Dalvik VM-related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Keyword, Text, Comment, Name, String, Number, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Keyword, Text, Comment, Name, String, Number, \
Punctuation, Whitespace
-
-__all__ = ['SmaliLexer']
-
-
-class SmaliLexer(RegexLexer):
- """
- For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
- code.
-
- .. versionadded:: 1.6
- """
- name = 'Smali'
- aliases = ['smali']
- filenames = ['*.smali']
- mimetypes = ['text/smali']
-
- tokens = {
- 'root': [
- include('comment'),
- include('label'),
- include('field'),
- include('method'),
- include('class'),
- include('directive'),
- include('access-modifier'),
- include('instruction'),
- include('literal'),
- include('punctuation'),
- include('type'),
- include('whitespace')
- ],
- 'directive': [
+
+__all__ = ['SmaliLexer']
+
+
+class SmaliLexer(RegexLexer):
+ """
+ For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
+ code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Smali'
+ aliases = ['smali']
+ filenames = ['*.smali']
+ mimetypes = ['text/smali']
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('label'),
+ include('field'),
+ include('method'),
+ include('class'),
+ include('directive'),
+ include('access-modifier'),
+ include('instruction'),
+ include('literal'),
+ include('punctuation'),
+ include('type'),
+ include('whitespace')
+ ],
+ 'directive': [
(r'^([ \t]*)(\.(?:class|super|implements|field|subannotation|annotation|'
- r'enum|method|registers|locals|array-data|packed-switch|'
- r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
+ r'enum|method|registers|locals|array-data|packed-switch|'
+ r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
r'epilogue|source))', bygroups(Whitespace, Keyword)),
(r'^([ \t]*)(\.end)( )(field|subannotation|annotation|method|array-data|'
'packed-switch|sparse-switch|parameter|local)', bygroups(Whitespace, Keyword, Whitespace, Keyword)),
(r'^([ \t]*)(\.restart)( )(local)', bygroups(Whitespace, Keyword, Whitespace, Keyword)),
- ],
- 'access-modifier': [
- (r'(public|private|protected|static|final|synchronized|bridge|'
- r'varargs|native|abstract|strictfp|synthetic|constructor|'
- r'declared-synchronized|interface|enum|annotation|volatile|'
- r'transient)', Keyword),
- ],
- 'whitespace': [
+ ],
+ 'access-modifier': [
+ (r'(public|private|protected|static|final|synchronized|bridge|'
+ r'varargs|native|abstract|strictfp|synthetic|constructor|'
+ r'declared-synchronized|interface|enum|annotation|volatile|'
+ r'transient)', Keyword),
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- ],
- 'instruction': [
- (r'\b[vp]\d+\b', Name.Builtin), # registers
+ ],
+ 'instruction': [
+ (r'\b[vp]\d+\b', Name.Builtin), # registers
(r'(\b[a-z][A-Za-z0-9/-]+)(\s+)', bygroups(Text, Whitespace)), # instructions
- ],
- 'literal': [
- (r'".*"', String),
- (r'0x[0-9A-Fa-f]+t?', Number.Hex),
- (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+L?', Number.Integer),
- ],
- 'field': [
- (r'(\$?\b)([\w$]*)(:)',
- bygroups(Punctuation, Name.Variable, Punctuation)),
- ],
- 'method': [
- (r'<(?:cl)?init>', Name.Function), # constructor
- (r'(\$?\b)([\w$]*)(\()',
- bygroups(Punctuation, Name.Function, Punctuation)),
- ],
- 'label': [
- (r':\w+', Name.Label),
- ],
- 'class': [
- # class names in the form Lcom/namespace/ClassName;
- # I only want to color the ClassName part, so the namespace part is
- # treated as 'Text'
- (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
- bygroups(Keyword.Type, Text, Name.Class, Text)),
- ],
- 'punctuation': [
- (r'->', Punctuation),
- (r'[{},():=.-]', Punctuation),
- ],
- 'type': [
- (r'[ZBSCIJFDV\[]+', Keyword.Type),
- ],
- 'comment': [
- (r'#.*?\n', Comment),
- ],
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'\b((check-cast|instance-of|throw-verification-error'
- r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
- r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
- r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
- score += 0.3
- if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
- r'\b(array-data|class-change-error|declared-synchronized|'
- r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
- r'illegal-class-access|illegal-field-access|'
- r'illegal-method-access|instantiation-error|no-error|'
- r'no-such-class|no-such-field|no-such-method|'
- r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
- score += 0.6
- return score
+ ],
+ 'literal': [
+ (r'".*"', String),
+ (r'0x[0-9A-Fa-f]+t?', Number.Hex),
+ (r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+L?', Number.Integer),
+ ],
+ 'field': [
+ (r'(\$?\b)([\w$]*)(:)',
+ bygroups(Punctuation, Name.Variable, Punctuation)),
+ ],
+ 'method': [
+ (r'<(?:cl)?init>', Name.Function), # constructor
+ (r'(\$?\b)([\w$]*)(\()',
+ bygroups(Punctuation, Name.Function, Punctuation)),
+ ],
+ 'label': [
+ (r':\w+', Name.Label),
+ ],
+ 'class': [
+ # class names in the form Lcom/namespace/ClassName;
+ # I only want to color the ClassName part, so the namespace part is
+ # treated as 'Text'
+ (r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
+ bygroups(Keyword.Type, Text, Name.Class, Text)),
+ ],
+ 'punctuation': [
+ (r'->', Punctuation),
+ (r'[{},():=.-]', Punctuation),
+ ],
+ 'type': [
+ (r'[ZBSCIJFDV\[]+', Keyword.Type),
+ ],
+ 'comment': [
+ (r'#.*?\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'\b((check-cast|instance-of|throw-verification-error'
+ r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
+ r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
+ r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
+ r'\b(array-data|class-change-error|declared-synchronized|'
+ r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
+ r'illegal-class-access|illegal-field-access|'
+ r'illegal-method-access|instantiation-error|no-error|'
+ r'no-such-class|no-such-field|no-such-method|'
+ r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
+ score += 0.6
+ return score
diff --git a/contrib/python/Pygments/py3/pygments/lexers/data.py b/contrib/python/Pygments/py3/pygments/lexers/data.py
index c702d42093..72d8778e57 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/data.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/data.py
@@ -1,450 +1,450 @@
-"""
- pygments.lexers.data
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for data file format.
-
+"""
+ pygments.lexers.data
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for data file format.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import Lexer, ExtendedRegexLexer, LexerContext, \
include, bygroups
-from pygments.token import Text, Comment, Keyword, Name, String, Number, \
+from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Punctuation, Literal, Error, Whitespace
-
+
__all__ = ['YamlLexer', 'JsonLexer', 'JsonBareObjectLexer', 'JsonLdLexer']
-
-
-class YamlLexerContext(LexerContext):
- """Indentation context for the YAML lexer."""
-
- def __init__(self, *args, **kwds):
+
+
+class YamlLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
super().__init__(*args, **kwds)
- self.indent_stack = []
- self.indent = -1
- self.next_indent = 0
- self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
- """
- Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
- language.
-
- .. versionadded:: 0.11
- """
-
- name = 'YAML'
- aliases = ['yaml']
- filenames = ['*.yaml', '*.yml']
- mimetypes = ['text/x-yaml']
-
- def something(token_class):
- """Do not produce empty tokens."""
- def callback(lexer, match, context):
- text = match.group()
- if not text:
- return
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def reset_indent(token_class):
- """Reset the indentation levels."""
- def callback(lexer, match, context):
- text = match.group()
- context.indent_stack = []
- context.indent = -1
- context.next_indent = 0
- context.block_scalar_indent = None
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def save_indent(token_class, start=False):
- """Save a possible indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- extra = ''
- if start:
- context.next_indent = len(text)
- if context.next_indent < context.indent:
- while context.next_indent < context.indent:
- context.indent = context.indent_stack.pop()
- if context.next_indent > context.indent:
- extra = text[context.indent:]
- text = text[:context.indent]
- else:
- context.next_indent += len(text)
- if text:
- yield match.start(), token_class, text
- if extra:
- yield match.start()+len(text), token_class.Error, extra
- context.pos = match.end()
- return callback
-
- def set_indent(token_class, implicit=False):
- """Set the previously saved indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- if context.indent < context.next_indent:
- context.indent_stack.append(context.indent)
- context.indent = context.next_indent
- if not implicit:
- context.next_indent += len(text)
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def set_block_scalar_indent(token_class):
- """Set an explicit indentation level for a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- context.block_scalar_indent = None
- if not text:
- return
- increment = match.group(1)
- if increment:
- current_indent = max(context.indent, 0)
- increment = int(increment)
- context.block_scalar_indent = current_indent + increment
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_empty_line(indent_token_class, content_token_class):
- """Process an empty line in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if (context.block_scalar_indent is None or
- len(text) <= context.block_scalar_indent):
- if text:
- yield match.start(), indent_token_class, text
- else:
- indentation = text[:context.block_scalar_indent]
- content = text[context.block_scalar_indent:]
- yield match.start(), indent_token_class, indentation
- yield (match.start()+context.block_scalar_indent,
- content_token_class, content)
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_indent(token_class):
- """Process indentation spaces in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if context.block_scalar_indent is None:
- if len(text) <= max(context.indent, 0):
- context.stack.pop()
- context.stack.pop()
- return
- context.block_scalar_indent = len(text)
- else:
- if len(text) < context.block_scalar_indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_plain_scalar_indent(token_class):
- """Process indentation spaces in a plain scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if len(text) <= context.indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- tokens = {
- # the root rules
- 'root': [
- # ignored whitespaces
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+class YamlLexer(ExtendedRegexLexer):
+ """
+ Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
+ language.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ def something(token_class):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def reset_indent(token_class):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def save_indent(token_class, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), token_class, text
+ if extra:
+ yield match.start()+len(text), token_class.Error, extra
+ context.pos = match.end()
+ return callback
+
+ def set_indent(token_class, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def set_block_scalar_indent(token_class):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_empty_line(indent_token_class, content_token_class):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), indent_token_class, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), indent_token_class, indentation
+ yield (match.start()+context.block_scalar_indent,
+ content_token_class, content)
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_indent(token_class):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_plain_scalar_indent(token_class):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ tokens = {
+ # the root rules
+ 'root': [
+ # ignored whitespaces
(r'[ ]+(?=#|$)', Whitespace),
- # line breaks
+ # line breaks
(r'\n+', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # the '%YAML' directive
- (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
- # the %TAG directive
- (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
- # document start and document end indicators
- (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
- 'block-line'),
- # indentation spaces
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
+ 'block-line'),
+ # indentation spaces
(r'[ ]*(?!\s|$)', save_indent(Whitespace, start=True),
- ('block-line', 'indentation')),
- ],
-
- # trailing whitespaces after directives or a block scalar indicator
- 'ignored-line': [
- # ignored whitespaces
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
(r'[ ]+(?=#|$)', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # line break
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
(r'\n', Whitespace, '#pop:2'),
- ],
-
- # the %YAML directive
- 'yaml-directive': [
- # the version number
- (r'([ ]+)([0-9]+\.[0-9]+)',
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
bygroups(Whitespace, Number), 'ignored-line'),
- ],
-
+ ],
+
# the %TAG directive
- 'tag-directive': [
- # a tag handle and the corresponding prefix
- (r'([ ]+)(!|![\w-]*!)'
- r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![\w-]*!)'
+ r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)',
bygroups(Whitespace, Keyword.Type, Whitespace, Keyword.Type),
- 'ignored-line'),
- ],
-
- # block scalar indicators and indentation spaces
- 'indentation': [
- # trailing whitespaces are ignored
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
(r'[ ]*$', something(Whitespace), '#pop:2'),
# whitespaces preceding block collection indicators
(r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Whitespace)),
- # block collection indicators
- (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
- # the beginning a block line
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
(r'[ ]*', save_indent(Whitespace), '#pop'),
- ],
-
- # an indented line in the block context
- 'block-line': [
- # the line end
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
(r'[ ]*(?=#|$)', something(Whitespace), '#pop'),
- # whitespaces separating tokens
+ # whitespaces separating tokens
(r'[ ]+', Whitespace),
# key with colon
(r'''([^#,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, set_indent(Punctuation, implicit=True))),
- # tags, anchors and aliases,
- include('descriptors'),
- # block collections and scalars
- include('block-nodes'),
- # flow collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
- something(Name.Variable),
- 'plain-scalar-in-block-context'),
- ],
-
- # tags, anchors, aliases
- 'descriptors': [
- # a full-form tag
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)',
+ something(Name.Variable),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors': [
+ # a full-form tag
(r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type),
- # a tag in the form '!', '!suffix' or '!handle!suffix'
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
(r'!(?:[\w-]+!)?'
r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]*', Keyword.Type),
- # an anchor
- (r'&[\w-]+', Name.Label),
- # an alias
- (r'\*[\w-]+', Name.Variable),
- ],
-
- # block collections and scalars
- 'block-nodes': [
- # implicit key
- (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
- # literal and folded scalars
- (r'[|>]', Punctuation.Indicator,
- ('block-scalar-content', 'block-scalar-header')),
- ],
-
- # flow collections and quoted scalars
- 'flow-nodes': [
- # a flow sequence
- (r'\[', Punctuation.Indicator, 'flow-sequence'),
- # a flow mapping
- (r'\{', Punctuation.Indicator, 'flow-mapping'),
- # a single-quoted scalar
- (r'\'', String, 'single-quoted-scalar'),
- # a double-quoted scalar
- (r'\"', String, 'double-quoted-scalar'),
- ],
-
- # the content of a flow collection
- 'flow-collection': [
- # whitespaces
+ # an anchor
+ (r'&[\w-]+', Name.Label),
+ # an alias
+ (r'\*[\w-]+', Name.Variable),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', String, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', String, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
(r'[ ]+', Whitespace),
- # line breaks
+ # line breaks
(r'\n+', Whitespace),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # simple indicators
- (r'[?:,]', Punctuation.Indicator),
- # tags, anchors and aliases
- include('descriptors'),
- # nested collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
- something(Name.Variable),
- 'plain-scalar-in-flow-context'),
- ],
-
- # a flow sequence indicated by '[' and ']'
- 'flow-sequence': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\]', Punctuation.Indicator, '#pop'),
- ],
-
- # a flow mapping indicated by '{' and '}'
- 'flow-mapping': [
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])',
+ something(Name.Variable),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
# key with colon
(r'''([^,:?\[\]{}"'\n]+)(:)(?=[ ]|$)''',
bygroups(Name.Tag, Punctuation)),
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\}', Punctuation.Indicator, '#pop'),
- ],
-
- # block scalar lines
- 'block-scalar-content': [
- # line break
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
(r'\n', Whitespace),
- # empty line
- (r'^[ ]+$',
+ # empty line
+ (r'^[ ]+$',
parse_block_scalar_empty_line(Whitespace, Name.Constant)),
- # indentation spaces (we may leave the state here)
+ # indentation spaces (we may leave the state here)
(r'^[ ]*', parse_block_scalar_indent(Whitespace)),
- # line content
- (r'[\S\t ]+', Name.Constant),
- ],
-
- # the content of a literal or folded scalar
- 'block-scalar-header': [
- # indentation indicator followed by chomping flag
- (r'([1-9])?[+-]?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- # chomping flag followed by indentation indicator
- (r'[+-]?([1-9])?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- ],
-
- # ignored and regular whitespaces in quoted scalars
- 'quoted-scalar-whitespaces': [
- # leading and trailing whitespaces are ignored
+ # line content
+ (r'[\S\t ]+', Name.Constant),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
(r'^[ ]+', Whitespace),
(r'[ ]+$', Whitespace),
- # line breaks are ignored
+ # line breaks are ignored
(r'\n+', Whitespace),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- ],
-
- # single-quoted scalars
- 'single-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of the quote character
- (r'\'\'', String.Escape),
- # regular non-whitespace characters
- (r'[^\s\']+', String),
- # the closing quote
- (r'\'', String, '#pop'),
- ],
-
- # double-quoted scalars
- 'double-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of special characters
- (r'\\[0abt\tn\nvfre "\\N_LP]', String),
- # escape codes
- (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
- String.Escape),
- # regular non-whitespace characters
- (r'[^\s"\\]+', String),
- # the closing quote
- (r'"', String, '#pop'),
- ],
-
- # the beginning of a new line while scanning a plain scalar
- 'plain-scalar-in-block-context-new-line': [
- # empty lines
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s\']+', String),
+ # the closing quote
+ (r'\'', String, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', String),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ String.Escape),
+ # regular non-whitespace characters
+ (r'[^\s"\\]+', String),
+ # the closing quote
+ (r'"', String, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
(r'^[ ]+$', Whitespace),
- # line breaks
+ # line breaks
(r'\n+', Whitespace),
- # document start and document end indicators
- (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
- # indentation spaces (we may leave the block line state here)
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
(r'^[ ]*', parse_plain_scalar_indent(Whitespace), '#pop'),
- ],
-
- # a plain scalar in the block context
- 'plain-scalar-in-block-context': [
- # the scalar ends with the ':' indicator
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
(r'[ ]*(?=:[ ]|:$)', something(Whitespace), '#pop'),
- # the scalar ends with whitespaces followed by a comment
+ # the scalar ends with whitespaces followed by a comment
(r'[ ]+(?=#)', Whitespace, '#pop'),
- # trailing whitespaces are ignored
+ # trailing whitespaces are ignored
(r'[ ]+$', Whitespace),
- # line breaks are ignored
+ # line breaks are ignored
(r'\n+', Whitespace, 'plain-scalar-in-block-context-new-line'),
- # other whitespaces are a part of the value
- (r'[ ]+', Literal.Scalar.Plain),
- # regular non-whitespace characters
- (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
- ],
-
- # a plain scalar is the flow context
- 'plain-scalar-in-flow-context': [
- # the scalar ends with an indicator character
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
(r'[ ]*(?=[,:?\[\]{}])', something(Whitespace), '#pop'),
- # the scalar ends with a comment
+ # the scalar ends with a comment
(r'[ ]+(?=#)', Whitespace, '#pop'),
- # leading and trailing whitespaces are ignored
+ # leading and trailing whitespaces are ignored
(r'^[ ]+', Whitespace),
(r'[ ]+$', Whitespace),
- # line breaks are ignored
+ # line breaks are ignored
(r'\n+', Whitespace),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- # regular non-whitespace characters
- (r'[^\s,:?\[\]{}]+', Name.Variable),
- ],
-
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- if context is None:
- context = YamlLexerContext(text, 0)
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ # regular non-whitespace characters
+ (r'[^\s,:?\[\]{}]+', Name.Variable),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YamlLexerContext(text, 0)
return super().get_tokens_unprocessed(text, context)
-
-
+
+
class JsonLexer(Lexer):
- """
- For JSON data structures.
-
- .. versionadded:: 1.5
- """
-
- name = 'JSON'
+ """
+ For JSON data structures.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'JSON'
aliases = ['json', 'json-object']
filenames = ['*.json', 'Pipfile.lock']
mimetypes = ['application/json', 'application/json-object']
-
+
# No validation of integers, floats, or constants is done.
# As long as the characters are members of the following
# sets, the token will be considered valid. For example,
@@ -459,10 +459,10 @@ class JsonLexer(Lexer):
hexadecimals = set('0123456789abcdefABCDEF')
punctuations = set('{}[],')
whitespaces = {'\u0020', '\u000a', '\u000d', '\u0009'}
-
+
def get_tokens_unprocessed(self, text):
"""Parse JSON data."""
-
+
in_string = False
in_escape = False
in_unicode_escape = 0
@@ -471,9 +471,9 @@ class JsonLexer(Lexer):
in_number = False
in_float = False
in_punctuation = False
-
+
start = 0
-
+
# The queue is used to store data that may need to be tokenized
# differently based on what follows. In particular, JSON object
# keys are tokenized differently than string values, but cannot
@@ -493,7 +493,7 @@ class JsonLexer(Lexer):
# is encountered after the string closes.
#
queue = []
-
+
for stop, character in enumerate(text):
if in_string:
if in_unicode_escape:
@@ -504,35 +504,35 @@ class JsonLexer(Lexer):
else:
in_unicode_escape = 0
in_escape = False
-
+
elif in_escape:
if character == 'u':
in_unicode_escape = 4
else:
in_escape = False
-
+
elif character == '\\':
in_escape = True
-
+
elif character == '"':
queue.append((start, String.Double, text[start:stop + 1]))
in_string = False
in_escape = False
in_unicode_escape = 0
-
+
continue
-
+
elif in_whitespace:
if character in self.whitespaces:
continue
-
+
if queue:
queue.append((start, Whitespace, text[start:stop]))
else:
yield start, Whitespace, text[start:stop]
in_whitespace = False
# Fall through so the new character can be evaluated.
-
+
elif in_constant:
if character in self.constants:
continue
@@ -652,18 +652,18 @@ class JsonBareObjectLexer(JsonLexer):
mimetypes = []
-class JsonLdLexer(JsonLexer):
- """
+class JsonLdLexer(JsonLexer):
+ """
For `JSON-LD <https://json-ld.org/>`_ linked data.
-
- .. versionadded:: 2.0
- """
-
- name = 'JSON-LD'
- aliases = ['jsonld', 'json-ld']
- filenames = ['*.jsonld']
- mimetypes = ['application/ld+json']
-
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'JSON-LD'
+ aliases = ['jsonld', 'json-ld']
+ filenames = ['*.jsonld']
+ mimetypes = ['application/ld+json']
+
json_ld_keywords = {
'"@%s"' % keyword
for keyword in (
@@ -691,7 +691,7 @@ class JsonLdLexer(JsonLexer):
'version',
'vocab',
)
- }
+ }
def get_tokens_unprocessed(self, text):
for start, token, value in super().get_tokens_unprocessed(text):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/diff.py b/contrib/python/Pygments/py3/pygments/lexers/diff.py
index a694bd68e6..18b94cfe0d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/diff.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/diff.py
@@ -1,34 +1,34 @@
-"""
- pygments.lexers.diff
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for diff/patch formats.
-
+"""
+ pygments.lexers.diff
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for diff/patch formats.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
Literal, Whitespace
-
+
__all__ = ['DiffLexer', 'DarcsPatchLexer', 'WDiffLexer']
-
-
-class DiffLexer(RegexLexer):
- """
- Lexer for unified or context-style diffs or patches.
- """
-
- name = 'Diff'
- aliases = ['diff', 'udiff']
- filenames = ['*.diff', '*.patch']
- mimetypes = ['text/x-diff', 'text/x-patch']
-
- tokens = {
- 'root': [
+
+
+class DiffLexer(RegexLexer):
+ """
+ Lexer for unified or context-style diffs or patches.
+ """
+
+ name = 'Diff'
+ aliases = ['diff', 'udiff']
+ filenames = ['*.diff', '*.patch']
+ mimetypes = ['text/x-diff', 'text/x-patch']
+
+ tokens = {
+ 'root': [
(r'( )(.*)(\n)', bygroups(Whitespace, Text, Whitespace)),
(r'(\+.*)(\n)', bygroups(Generic.Inserted, Whitespace)),
(r'(-.*)(\n)', bygroups(Generic.Deleted, Whitespace)),
@@ -37,74 +37,74 @@ class DiffLexer(RegexLexer):
(r'((?:[Ii]ndex|diff).*)(\n)', bygroups(Generic.Heading, Whitespace)),
(r'(=.*)(\n)', bygroups(Generic.Heading, Whitespace)),
(r'(.*)(\n)', Whitespace),
- ]
- }
-
- def analyse_text(text):
- if text[:7] == 'Index: ':
- return True
- if text[:5] == 'diff ':
- return True
- if text[:4] == '--- ':
- return 0.9
-
-
-class DarcsPatchLexer(RegexLexer):
- """
- DarcsPatchLexer is a lexer for the various versions of the darcs patch
- format. Examples of this format are derived by commands such as
- ``darcs annotate --patch`` and ``darcs send``.
-
- .. versionadded:: 0.10
- """
-
- name = 'Darcs Patch'
- aliases = ['dpatch']
- filenames = ['*.dpatch', '*.darcspatch']
-
- DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
- 'replace')
-
- tokens = {
- 'root': [
- (r'<', Operator),
- (r'>', Operator),
- (r'\{', Operator),
- (r'\}', Operator),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
+ ]
+ }
+
+ def analyse_text(text):
+ if text[:7] == 'Index: ':
+ return True
+ if text[:5] == 'diff ':
+ return True
+ if text[:4] == '--- ':
+ return 0.9
+
+
+class DarcsPatchLexer(RegexLexer):
+ """
+ DarcsPatchLexer is a lexer for the various versions of the darcs patch
+ format. Examples of this format are derived by commands such as
+ ``darcs annotate --patch`` and ``darcs send``.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Darcs Patch'
+ aliases = ['dpatch']
+ filenames = ['*.dpatch', '*.darcspatch']
+
+ DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
+ 'replace')
+
+ tokens = {
+ 'root': [
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'\{', Operator),
+ (r'\}', Operator),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
Literal.Date, Whitespace, Operator)),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
bygroups(Operator, Keyword, Name, Whitespace, Name, Operator,
Literal.Date, Whitespace), 'comment'),
- (r'New patches:', Generic.Heading),
- (r'Context:', Generic.Heading),
- (r'Patch bundle hash:', Generic.Heading),
+ (r'New patches:', Generic.Heading),
+ (r'Context:', Generic.Heading),
+ (r'Patch bundle hash:', Generic.Heading),
(r'(\s*)(%s)(.*)(\n)' % '|'.join(DPATCH_KEYWORDS),
bygroups(Whitespace, Keyword, Text, Whitespace)),
- (r'\+', Generic.Inserted, "insert"),
- (r'-', Generic.Deleted, "delete"),
+ (r'\+', Generic.Inserted, "insert"),
+ (r'-', Generic.Deleted, "delete"),
(r'(.*)(\n)', bygroups(Text, Whitespace)),
- ],
- 'comment': [
- (r'[^\]].*\n', Comment),
- (r'\]', Operator, "#pop"),
- ],
- 'specialText': [ # darcs add [_CODE_] special operators for clarity
+ ],
+ 'comment': [
+ (r'[^\]].*\n', Comment),
+ (r'\]', Operator, "#pop"),
+ ],
+ 'specialText': [ # darcs add [_CODE_] special operators for clarity
(r'\n', Whitespace, "#pop"), # line-based
- (r'\[_[^_]*_]', Operator),
- ],
- 'insert': [
- include('specialText'),
- (r'\[', Generic.Inserted),
- (r'[^\n\[]+', Generic.Inserted),
- ],
- 'delete': [
- include('specialText'),
- (r'\[', Generic.Deleted),
- (r'[^\n\[]+', Generic.Deleted),
- ],
- }
+ (r'\[_[^_]*_]', Operator),
+ ],
+ 'insert': [
+ include('specialText'),
+ (r'\[', Generic.Inserted),
+ (r'[^\n\[]+', Generic.Inserted),
+ ],
+ 'delete': [
+ include('specialText'),
+ (r'\[', Generic.Deleted),
+ (r'[^\n\[]+', Generic.Deleted),
+ ],
+ }
class WDiffLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
index c04d2a0a92..24595f3555 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
@@ -1,416 +1,416 @@
-"""
- pygments.lexers.dotnet
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for .net languages.
-
+"""
+ pygments.lexers.dotnet
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for .net languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
using, this, default, words
from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
Name, String, Number, Literal, Other, Whitespace
from pygments.util import get_choice_opt
-from pygments import unistring as uni
-
-from pygments.lexers.html import XmlLexer
-
-__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
- 'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
-
-
-class CSharpLexer(RegexLexer):
- """
- For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
- source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 0.8
- """
-
- name = 'C#'
+from pygments import unistring as uni
+
+from pygments.lexers.html import XmlLexer
+
+__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
+ 'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
+
+
+class CSharpLexer(RegexLexer):
+ """
+ For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
+ source code.
+
+ Additional options accepted:
+
+ `unicodelevel`
+ Determines which Unicode characters this lexer allows for identifiers.
+ The possible values are:
+
+ * ``none`` -- only the ASCII letters and numbers are allowed. This
+ is the fastest selection.
+ * ``basic`` -- all Unicode characters from the specification except
+ category ``Lo`` are allowed.
+ * ``full`` -- all Unicode characters as specified in the C# specs
+ are allowed. Note that this means a considerable slowdown since the
+ ``Lo`` category has more than 40,000 characters in it!
+
+ The default value is ``basic``.
+
+ .. versionadded:: 0.8
+ """
+
+ name = 'C#'
aliases = ['csharp', 'c#', 'cs']
- filenames = ['*.cs']
- mimetypes = ['text/x-csharp'] # inferred
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
+ filenames = ['*.cs']
+ mimetypes = ['text/x-csharp'] # inferred
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+
+ levels = {
'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
+ 'full': ('@?(?:_|[^' +
+ uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ }
+
+ tokens = {}
+ token_variants = True
+
for levelname, cs_ident in levels.items():
- tokens[levelname] = {
- 'root': [
- # method names
+ tokens[levelname] = {
+ 'root': [
+ # method names
(r'^([ \t]*)((?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
bygroups(Whitespace, using(this), Name.Function, Whitespace,
Punctuation)),
(r'^(\s*)(\[.*?\])', bygroups(Whitespace, Name.Attribute)),
(r'[^\S\n]+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Whitespace),
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
(r'\$?"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
- r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
+ r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'(#)([ \t]*)(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b(.*?)(\n)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
Comment.Preproc, Whitespace)),
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Whitespace,
- Keyword)),
+ Keyword)),
(r'(abstract|as|async|await|base|break|by|case|catch|'
- r'checked|const|continue|default|delegate|'
- r'do|else|enum|event|explicit|extern|false|finally|'
- r'fixed|for|foreach|goto|if|implicit|in|interface|'
+ r'checked|const|continue|default|delegate|'
+ r'do|else|enum|event|explicit|extern|false|finally|'
+ r'fixed|for|foreach|goto|if|implicit|in|interface|'
r'internal|is|let|lock|new|null|on|operator|'
- r'out|override|params|private|protected|public|readonly|'
- r'ref|return|sealed|sizeof|stackalloc|static|'
- r'switch|this|throw|true|try|typeof|'
- r'unchecked|unsafe|virtual|void|while|'
- r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
+ r'out|override|params|private|protected|public|readonly|'
+ r'ref|return|sealed|sizeof|stackalloc|static|'
+ r'switch|this|throw|true|try|typeof|'
+ r'unchecked|unsafe|virtual|void|while|'
+ r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
r'descending|from|group|into|orderby|select|thenby|where|'
- r'join|equals)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
- r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
+ r'join|equals)\b', Keyword),
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
+ (r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
+ r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
(r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Whitespace), 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop'),
- default('#pop'),
- ],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
-
-class NemerleLexer(RegexLexer):
- """
- For `Nemerle <http://nemerle.org>`_ source code.
-
- Additional options accepted:
-
- `unicodelevel`
- Determines which Unicode characters this lexer allows for identifiers.
- The possible values are:
-
- * ``none`` -- only the ASCII letters and numbers are allowed. This
- is the fastest selection.
- * ``basic`` -- all Unicode characters from the specification except
- category ``Lo`` are allowed.
- * ``full`` -- all Unicode characters as specified in the C# specs
- are allowed. Note that this means a considerable slowdown since the
- ``Lo`` category has more than 40,000 characters in it!
-
- The default value is ``basic``.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nemerle'
- aliases = ['nemerle']
- filenames = ['*.n']
- mimetypes = ['text/x-nemerle'] # inferred
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- # for the range of allowed unicode characters in identifiers, see
- # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
-
- levels = {
+ (cs_ident, Name),
+ ],
+ 'class': [
+ (cs_ident, Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'namespace': [
+ (r'(?=\()', Text, '#pop'), # using (resource)
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop'),
+ ]
+ }
+
+ def __init__(self, **options):
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens), 'basic')
+ if level not in self._all_tokens:
+ # compile the regexes now
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+
+ RegexLexer.__init__(self, **options)
+
+
+class NemerleLexer(RegexLexer):
+ """
+ For `Nemerle <http://nemerle.org>`_ source code.
+
+ Additional options accepted:
+
+ `unicodelevel`
+ Determines which Unicode characters this lexer allows for identifiers.
+ The possible values are:
+
+ * ``none`` -- only the ASCII letters and numbers are allowed. This
+ is the fastest selection.
+ * ``basic`` -- all Unicode characters from the specification except
+ category ``Lo`` are allowed.
+ * ``full`` -- all Unicode characters as specified in the C# specs
+ are allowed. Note that this means a considerable slowdown since the
+ ``Lo`` category has more than 40,000 characters in it!
+
+ The default value is ``basic``.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nemerle'
+ aliases = ['nemerle']
+ filenames = ['*.n']
+ mimetypes = ['text/x-nemerle'] # inferred
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ # for the range of allowed unicode characters in identifiers, see
+ # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
+
+ levels = {
'none': r'@?[_a-zA-Z]\w*',
- 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'),
- 'full': ('@?(?:_|[^' +
- uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
- }
-
- tokens = {}
- token_variants = True
-
+ 'basic': ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
+ 'Cf', 'Mn', 'Mc') + ']*'),
+ 'full': ('@?(?:_|[^' +
+ uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ }
+
+ tokens = {}
+ token_variants = True
+
for levelname, cs_ident in levels.items():
- tokens[levelname] = {
- 'root': [
- # method names
+ tokens[levelname] = {
+ 'root': [
+ # method names
(r'^([ \t]*)((?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
- r'(\s*)(\()', # signature start
+ r'(' + cs_ident + ')' # method name
+ r'(\s*)(\()', # signature start
bygroups(Whitespace, using(this), Name.Function, Whitespace, \
Punctuation)),
(r'^(\s*)(\[.*?\])', bygroups(Whitespace, Name.Attribute)),
(r'[^\S\n]+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//.*?\n', Comment.Single),
- (r'/[*].*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single),
+ (r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Whitespace),
(r'(\$)(\s*)(")', bygroups(String, Whitespace, String),
'splice-string'),
(r'(\$)(\s*)(<#)', bygroups(String, Whitespace, String),
'splice-string2'),
- (r'<#', String, 'recursive-string'),
-
+ (r'<#', String, 'recursive-string'),
+
(r'(<\[)(\s*)(' + cs_ident + ':)?', bygroups(Keyword,
Whitespace, Keyword)),
- (r'\]\>', Keyword),
-
- # quasiquotation only
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
-
- (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
- (r'[{}]', Punctuation),
- (r'@"(""|[^"])*"', String),
+ (r'\]\>', Keyword),
+
+ # quasiquotation only
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+
+ (r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
+ (r'[{}]', Punctuation),
+ (r'@"(""|[^"])*"', String),
(r'"(\\\\|\\[^\\]|[^"\\\n])*["\n]', String),
- (r"'\\.'|'[^\\]'", String.Char),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
+ (r"'\\.'|'[^\\]'", String.Char),
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
(r'(#)([ \t]*)(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc), 'preproc'),
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Whitespace, Keyword)),
- (r'(abstract|and|as|base|catch|def|delegate|'
- r'enum|event|extern|false|finally|'
- r'fun|implements|interface|internal|'
- r'is|macro|match|matches|module|mutable|new|'
- r'null|out|override|params|partial|private|'
- r'protected|public|ref|sealed|static|'
- r'syntax|this|throw|true|try|type|typeof|'
- r'virtual|volatile|when|where|with|'
- r'assert|assert2|async|break|checked|continue|do|else|'
- r'ensures|for|foreach|if|late|lock|new|nolate|'
- r'otherwise|regexp|repeat|requires|return|surroundwith|'
- r'unchecked|unless|using|while|yield)\b', Keyword),
- (r'(global)(::)', bygroups(Keyword, Punctuation)),
- (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
- r'short|string|uint|ulong|ushort|void|array|list)\b\??',
- Keyword.Type),
+ (r'(abstract|and|as|base|catch|def|delegate|'
+ r'enum|event|extern|false|finally|'
+ r'fun|implements|interface|internal|'
+ r'is|macro|match|matches|module|mutable|new|'
+ r'null|out|override|params|partial|private|'
+ r'protected|public|ref|sealed|static|'
+ r'syntax|this|throw|true|try|type|typeof|'
+ r'virtual|volatile|when|where|with|'
+ r'assert|assert2|async|break|checked|continue|do|else|'
+ r'ensures|for|foreach|if|late|lock|new|nolate|'
+ r'otherwise|regexp|repeat|requires|return|surroundwith|'
+ r'unchecked|unless|using|while|yield)\b', Keyword),
+ (r'(global)(::)', bygroups(Keyword, Punctuation)),
+ (r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
+ r'short|string|uint|ulong|ushort|void|array|list)\b\??',
+ Keyword.Type),
(r'(:>?)(\s*)(' + cs_ident + r'\??)',
bygroups(Punctuation, Whitespace, Keyword.Type)),
- (r'(class|struct|variant|module)(\s+)',
+ (r'(class|struct|variant|module)(\s+)',
bygroups(Keyword, Whitespace), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Whitespace),
- 'namespace'),
- (cs_ident, Name),
- ],
- 'class': [
- (cs_ident, Name.Class, '#pop')
- ],
+ 'namespace'),
+ (cs_ident, Name),
+ ],
+ 'class': [
+ (cs_ident, Name.Class, '#pop')
+ ],
'preproc': [
(r'\w+', Comment.Preproc),
(r'[ \t]+', Whitespace),
(r'\n', Whitespace, '#pop')
],
- 'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
- ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
- ],
- 'splice-string': [
- (r'[^"$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'\\"', String),
- (r'"', String, '#pop')
- ],
- 'splice-string2': [
- (r'[^#<>$]', String),
- (r'\$' + cs_ident, Name),
- (r'(\$)(\()', bygroups(Name, Punctuation),
- 'splice-string-content'),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'recursive-string': [
- (r'[^#<>]', String),
- (r'<#', String, '#push'),
- (r'#>', String, '#pop')
- ],
- 'splice-string-content': [
- (r'if|match', Keyword),
- (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
- (cs_ident, Name),
- (r'\d+', Number),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop')
- ]
- }
-
- def __init__(self, **options):
- level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
- 'basic')
- if level not in self._all_tokens:
- # compile the regexes now
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
-
- RegexLexer.__init__(self, **options)
-
+ 'namespace': [
+ (r'(?=\()', Text, '#pop'), # using (resource)
+ ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
+ ],
+ 'splice-string': [
+ (r'[^"$]', String),
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+ (r'\\"', String),
+ (r'"', String, '#pop')
+ ],
+ 'splice-string2': [
+ (r'[^#<>$]', String),
+ (r'\$' + cs_ident, Name),
+ (r'(\$)(\()', bygroups(Name, Punctuation),
+ 'splice-string-content'),
+ (r'<#', String, '#push'),
+ (r'#>', String, '#pop')
+ ],
+ 'recursive-string': [
+ (r'[^#<>]', String),
+ (r'<#', String, '#push'),
+ (r'#>', String, '#pop')
+ ],
+ 'splice-string-content': [
+ (r'if|match', Keyword),
+ (r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
+ (cs_ident, Name),
+ (r'\d+', Number),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop')
+ ]
+ }
+
+ def __init__(self, **options):
+ level = get_choice_opt(options, 'unicodelevel', list(self.tokens),
+ 'basic')
+ if level not in self._all_tokens:
+ # compile the regexes now
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+
+ RegexLexer.__init__(self, **options)
+
def analyse_text(text):
"""Nemerle is quite similar to Python, but @if is relatively uncommon
elsewhere."""
result = 0
-
+
if '@if' in text:
result += 0.1
return result
-class BooLexer(RegexLexer):
- """
- For `Boo <http://boo.codehaus.org/>`_ source code.
- """
-
- name = 'Boo'
- aliases = ['boo']
- filenames = ['*.boo']
- mimetypes = ['text/x-boo']
-
- tokens = {
- 'root': [
+class BooLexer(RegexLexer):
+ """
+ For `Boo <http://boo.codehaus.org/>`_ source code.
+ """
+
+ name = 'Boo'
+ aliases = ['boo']
+ filenames = ['*.boo']
+ mimetypes = ['text/x-boo']
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'(#|//).*$', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment'),
- (r'[]{}:(),.;[]', Punctuation),
+ (r'(#|//).*$', Comment.Single),
+ (r'/[*]', Comment.Multiline, 'comment'),
+ (r'[]{}:(),.;[]', Punctuation),
(r'(\\)(\n)', bygroups(Text, Whitespace)),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
(r'/(\\\\|\\[^\\]|[^/\\\s])/', String.Regex),
(r'@/(\\\\|\\[^\\]|[^/\\])*/', String.Regex),
- (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
- (r'(as|abstract|callable|constructor|destructor|do|import|'
- r'enum|event|final|get|interface|internal|of|override|'
- r'partial|private|protected|public|return|set|static|'
- r'struct|transient|virtual|yield|super|and|break|cast|'
- r'continue|elif|else|ensure|except|for|given|goto|if|in|'
- r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
- r'while|from|as)\b', Keyword),
- (r'def(?=\s+\(.*?\))', Keyword),
+ (r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
+ (r'(as|abstract|callable|constructor|destructor|do|import|'
+ r'enum|event|final|get|interface|internal|of|override|'
+ r'partial|private|protected|public|return|set|static|'
+ r'struct|transient|virtual|yield|super|and|break|cast|'
+ r'continue|elif|else|ensure|except|for|given|goto|if|in|'
+ r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
+ r'while|from|as)\b', Keyword),
+ (r'def(?=\s+\(.*?\))', Keyword),
(r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
(r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
(r'(namespace)(\s+)', bygroups(Keyword, Whitespace), 'namespace'),
- (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
- r'assert|checked|enumerate|filter|getter|len|lock|map|'
- r'matrix|max|min|normalArrayIndexing|print|property|range|'
- r'rawArrayIndexing|required|typeof|unchecked|using|'
- r'yieldAll|zip)\b', Name.Builtin),
- (r'"""(\\\\|\\"|.*?)"""', String.Double),
+ (r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
+ r'assert|checked|enumerate|filter|getter|len|lock|map|'
+ r'matrix|max|min|normalArrayIndexing|print|property|range|'
+ r'rawArrayIndexing|required|typeof|unchecked|using|'
+ r'yieldAll|zip)\b', Name.Builtin),
+ (r'"""(\\\\|\\"|.*?)"""', String.Double),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name),
- (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
- (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
- (r'0\d+', Number.Oct),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer),
- ],
- 'comment': [
- ('/[*]', Comment.Multiline, '#push'),
- ('[*]/', Comment.Multiline, '#pop'),
- ('[^/*]', Comment.Multiline),
- ('[*/]', Comment.Multiline)
- ],
- 'funcname': [
+ (r'[a-zA-Z_]\w*', Name),
+ (r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
+ (r'[0-9][0-9.]*(ms?|d|h|s)', Number),
+ (r'0\d+', Number.Oct),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer),
+ ],
+ 'comment': [
+ ('/[*]', Comment.Multiline, '#push'),
+ ('[*]/', Comment.Multiline, '#pop'),
+ ('[^/*]', Comment.Multiline),
+ ('[*/]', Comment.Multiline)
+ ],
+ 'funcname': [
(r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'classname': [
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
+ ],
+ 'namespace': [
(r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ]
- }
-
-
-class VbNetLexer(RegexLexer):
- """
- For
- `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
- source code.
- """
-
- name = 'VB.net'
- aliases = ['vb.net', 'vbnet']
- filenames = ['*.vb', '*.bas']
- mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
-
+ ]
+ }
+
+
+class VbNetLexer(RegexLexer):
+ """
+ For
+ `Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
+ source code.
+ """
+
+ name = 'VB.net'
+ aliases = ['vb.net', 'vbnet']
+ filenames = ['*.vb', '*.bas']
+ mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+
uni_name = '[_' + uni.combine('Ll', 'Lt', 'Lm', 'Nl') + ']' + \
'[' + uni.combine('Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
- 'Cf', 'Mn', 'Mc') + ']*'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'^\s*<.*?>', Name.Attribute),
+ 'Cf', 'Mn', 'Mc') + ']*'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'^\s*<.*?>', Name.Attribute),
(r'\s+', Whitespace),
(r'\n', Whitespace),
(r'(rem\b.*?)(\n)', bygroups(Comment, Whitespace)),
(r"('.*?)(\n)", bygroups(Comment, Whitespace)),
- (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
- r'#ExternalSource.*?\n|#End\s+ExternalSource|'
- r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
- Comment.Preproc),
- (r'[(){}!#,.:]', Punctuation),
+ (r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|'
+ r'#ExternalSource.*?\n|#End\s+ExternalSource|'
+ r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
+ Comment.Preproc),
+ (r'[(){}!#,.:]', Punctuation),
(r'(Option)(\s+)(Strict|Explicit|Compare)(\s+)'
r'(On|Off|Binary|Text)', bygroups(Keyword.Declaration, Whitespace,
Keyword.Declaration, Whitespace, Keyword.Declaration)),
@@ -434,277 +434,277 @@ class VbNetLexer(RegexLexer):
'True', 'Try', 'TryCast', 'Wend', 'Using', 'When', 'While',
'Widening', 'With', 'WithEvents', 'WriteOnly'),
prefix=r'(?<!\.)', suffix=r'\b'), Keyword),
- (r'(?<!\.)End\b', Keyword, 'end'),
- (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
- (r'(?<!\.)(Function|Sub|Property)(\s+)',
+ (r'(?<!\.)End\b', Keyword, 'end'),
+ (r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
+ (r'(?<!\.)(Function|Sub|Property)(\s+)',
bygroups(Keyword, Whitespace), 'funcname'),
- (r'(?<!\.)(Class|Structure|Enum)(\s+)',
+ (r'(?<!\.)(Class|Structure|Enum)(\s+)',
bygroups(Keyword, Whitespace), 'classname'),
- (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
+ (r'(?<!\.)(Module|Namespace|Imports)(\s+)',
bygroups(Keyword, Whitespace), 'namespace'),
- (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
- r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
- r'UShort)\b', Keyword.Type),
- (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
- r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
- (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
- r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
- Operator),
- ('"', String, 'string'),
+ (r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
+ r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
+ r'UShort)\b', Keyword.Type),
+ (r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
+ r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
+ (r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
+ r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
+ Operator),
+ ('"', String, 'string'),
(r'(_)(\n)', bygroups(Text, Whitespace)), # Line continuation (must be before Name)
- (uni_name + '[%&@!#$]?', Name),
- ('#.*?#', Literal.Date),
- (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
- (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
- ],
- 'string': [
- (r'""', String),
- (r'"C?', String, '#pop'),
- (r'[^"]+', String),
- ],
- 'dim': [
- (uni_name, Name.Variable, '#pop'),
- default('#pop'), # any other syntax
- ],
- 'funcname': [
- (uni_name, Name.Function, '#pop'),
- ],
- 'classname': [
- (uni_name, Name.Class, '#pop'),
- ],
- 'namespace': [
- (uni_name, Name.Namespace),
- (r'\.', Name.Namespace),
- default('#pop'),
- ],
- 'end': [
+ (uni_name + '[%&@!#$]?', Name),
+ ('#.*?#', Literal.Date),
+ (r'(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?', Number.Float),
+ (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
+ ],
+ 'string': [
+ (r'""', String),
+ (r'"C?', String, '#pop'),
+ (r'[^"]+', String),
+ ],
+ 'dim': [
+ (uni_name, Name.Variable, '#pop'),
+ default('#pop'), # any other syntax
+ ],
+ 'funcname': [
+ (uni_name, Name.Function, '#pop'),
+ ],
+ 'classname': [
+ (uni_name, Name.Class, '#pop'),
+ ],
+ 'namespace': [
+ (uni_name, Name.Namespace),
+ (r'\.', Name.Namespace),
+ default('#pop'),
+ ],
+ 'end': [
(r'\s+', Whitespace),
- (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
- Keyword, '#pop'),
- default('#pop'),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
- return 0.5
-
-
-class GenericAspxLexer(RegexLexer):
- """
- Lexer for ASP.NET pages.
- """
-
- name = 'aspx-gen'
- filenames = []
- mimetypes = []
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
- Other,
- using(XmlLexer))),
- (r'(.+?)(?=<)', using(XmlLexer)),
- (r'.+', using(XmlLexer)),
- ],
- }
-
-
-# TODO support multiple languages within the same source file
-class CSharpAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting C# within ASP.NET pages.
- """
-
- name = 'aspx-cs'
- aliases = ['aspx-cs']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
+ (r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
+ Keyword, '#pop'),
+ default('#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*(#If|Module|Namespace)', text, re.MULTILINE):
+ return 0.5
+
+
+class GenericAspxLexer(RegexLexer):
+ """
+ Lexer for ASP.NET pages.
+ """
+
+ name = 'aspx-gen'
+ filenames = []
+ mimetypes = []
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
+ Other,
+ using(XmlLexer))),
+ (r'(.+?)(?=<)', using(XmlLexer)),
+ (r'.+', using(XmlLexer)),
+ ],
+ }
+
+
+# TODO support multiple languages within the same source file
+class CSharpAspxLexer(DelegatingLexer):
+ """
+ Lexer for highlighting C# within ASP.NET pages.
+ """
+
+ name = 'aspx-cs'
+ aliases = ['aspx-cs']
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(CSharpLexer, GenericAspxLexer, **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
- return 0.15
-
-
-class VbNetAspxLexer(DelegatingLexer):
- """
- Lexer for highlighting Visual Basic.net within ASP.NET pages.
- """
-
- name = 'aspx-vb'
- aliases = ['aspx-vb']
- filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
- mimetypes = []
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
+ return 0.2
+ elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
+ return 0.15
+
+
+class VbNetAspxLexer(DelegatingLexer):
+ """
+ Lexer for highlighting Visual Basic.net within ASP.NET pages.
+ """
+
+ name = 'aspx-vb'
+ aliases = ['aspx-vb']
+ filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(VbNetLexer, GenericAspxLexer, **options)
-
- def analyse_text(text):
- if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
- return 0.2
- elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
- return 0.15
-
-
-# Very close to functional.OcamlLexer
-class FSharpLexer(RegexLexer):
- """
+
+ def analyse_text(text):
+ if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
+ return 0.2
+ elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
+ return 0.15
+
+
+# Very close to functional.OcamlLexer
+class FSharpLexer(RegexLexer):
+ """
For the `F# language <https://fsharp.org/>`_ (version 3.0).
-
- .. versionadded:: 1.5
- """
-
+
+ .. versionadded:: 1.5
+ """
+
name = 'F#'
aliases = ['fsharp', 'f#']
- filenames = ['*.fs', '*.fsi']
- mimetypes = ['text/x-fsharp']
-
- keywords = [
- 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
- 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
- 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
- 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
- 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
- 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
- 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
- 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
- 'while', 'with', 'yield!', 'yield',
- ]
- # Reserved words; cannot hurt to color them as keywords too.
- keywords += [
- 'atomic', 'break', 'checked', 'component', 'const', 'constraint',
- 'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
- 'functor', 'include', 'method', 'mixin', 'object', 'parallel',
- 'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
- 'virtual', 'volatile',
- ]
- keyopts = [
+ filenames = ['*.fs', '*.fsi']
+ mimetypes = ['text/x-fsharp']
+
+ keywords = [
+ 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
+ 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
+ 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
+ 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
+ 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
+ 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
+ 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
+ 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
+ 'while', 'with', 'yield!', 'yield',
+ ]
+ # Reserved words; cannot hurt to color them as keywords too.
+ keywords += [
+ 'atomic', 'break', 'checked', 'component', 'const', 'constraint',
+ 'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
+ 'functor', 'include', 'method', 'mixin', 'object', 'parallel',
+ 'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
+ 'virtual', 'volatile',
+ ]
+ keyopts = [
'!=', '#', '&&', '&', r'\(', r'\)', r'\*', r'\+', ',', r'-\.',
'->', '-', r'\.\.', r'\.', '::', ':=', ':>', ':', ';;', ';', '<-',
r'<\]', '<', r'>\]', '>', r'\?\?', r'\?', r'\[<', r'\[\|', r'\[', r'\]',
'_', '`', r'\{', r'\|\]', r'\|', r'\}', '~', '<@@', '<@', '=', '@>', '@@>',
- ]
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'or', 'not']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = [
- 'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
- 'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
- 'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
- 'list', 'exn', 'obj', 'enum',
- ]
-
- # See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
- # http://fsharp.org/about/files/spec.pdf for reference. Good luck.
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbrafv]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\U[0-9a-fA-F]{8}', String.Escape),
- ],
- 'root': [
+ ]
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ['and', 'or', 'not']
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = [
+ 'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
+ 'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
+ 'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
+ 'list', 'exn', 'obj', 'enum',
+ ]
+
+ # See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
+ # http://fsharp.org/about/files/spec.pdf for reference. Good luck.
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbrafv]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\U[0-9a-fA-F]{8}', String.Escape),
+ ],
+ 'root': [
(r'\s+', Whitespace),
- (r'\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
+ (r'\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)',
+ Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name),
(r'(///.*?)(\n)', bygroups(String.Doc, Whitespace)),
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
- (r'\(\*(?!\))', Comment, 'comment'),
-
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
-
- (r'\b(open|module)(\s+)([\w.]+)',
+ (r'\(\*(?!\))', Comment, 'comment'),
+
+ (r'@"', String, 'lstring'),
+ (r'"""', String, 'tqs'),
+ (r'"', String, 'string'),
+
+ (r'\b(open|module)(\s+)([\w.]+)',
bygroups(Keyword, Whitespace, Name.Namespace)),
- (r'\b(let!?)(\s+)(\w+)',
+ (r'\b(let!?)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Name.Variable)),
- (r'\b(type)(\s+)(\w+)',
+ (r'\b(type)(\s+)(\w+)',
bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
+ (r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
bygroups(Keyword, Whitespace, Name, Punctuation, Name.Function)),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
- (r'(%s)' % '|'.join(keyopts), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
+ (r'(%s)' % '|'.join(keyopts), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r'(#)([ \t]*)(if|endif|else|line|nowarn|light|\d+)\b(.*?)(\n)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
Comment.Preproc, Whitespace)),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
- (r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
- (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
- Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'@?"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'dotted': [
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
+ (r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
+ (r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
+ Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'@?"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'dotted': [
(r'\s+', Whitespace),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- # e.g. dictionary index access
- default('#pop'),
- ],
- 'comment': [
- (r'[^(*)@"]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- # comments cannot be closed within strings in comments
- (r'@"', String, 'lstring'),
- (r'"""', String, 'tqs'),
- (r'"', String, 'string'),
- (r'[(*)@]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String),
- include('escape-sequence'),
- (r'\\\n', String),
- (r'\n', String), # newlines are allowed in any string
- (r'"B?', String, '#pop'),
- ],
- 'lstring': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'""', String),
- (r'"B?', String, '#pop'),
- ],
- 'tqs': [
- (r'[^"]+', String),
- (r'\n', String),
- (r'"""B?', String, '#pop'),
- (r'"', String),
- ],
- }
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ # e.g. dictionary index access
+ default('#pop'),
+ ],
+ 'comment': [
+ (r'[^(*)@"]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ # comments cannot be closed within strings in comments
+ (r'@"', String, 'lstring'),
+ (r'"""', String, 'tqs'),
+ (r'"', String, 'string'),
+ (r'[(*)@]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ include('escape-sequence'),
+ (r'\\\n', String),
+ (r'\n', String), # newlines are allowed in any string
+ (r'"B?', String, '#pop'),
+ ],
+ 'lstring': [
+ (r'[^"]+', String),
+ (r'\n', String),
+ (r'""', String),
+ (r'"B?', String, '#pop'),
+ ],
+ 'tqs': [
+ (r'[^"]+', String),
+ (r'\n', String),
+ (r'"""B?', String, '#pop'),
+ (r'"', String),
+ ],
+ }
def analyse_text(text):
"""F# doesn't have that many unique features -- |> and <| are weak
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dsls.py b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
index b6847d0447..98c9a2c056 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dsls.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
@@ -1,208 +1,208 @@
-"""
- pygments.lexers.dsls
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various domain-specific languages.
-
+"""
+ pygments.lexers.dsls
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various domain-specific languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import ExtendedRegexLexer, RegexLexer, bygroups, words, \
include, default, this, using, combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
+
__all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
- 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
+ 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer',
'CrmshLexer', 'ThriftLexer', 'FlatlineLexer', 'SnowballLexer']
-
-
-class ProtoBufLexer(RegexLexer):
- """
- Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
- definition files.
-
- .. versionadded:: 1.4
- """
-
- name = 'Protocol Buffer'
- aliases = ['protobuf', 'proto']
- filenames = ['*.proto']
-
- tokens = {
- 'root': [
+
+
+class ProtoBufLexer(RegexLexer):
+ """
+ Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+ definition files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Protocol Buffer'
+ aliases = ['protobuf', 'proto']
+ filenames = ['*.proto']
+
+ tokens = {
+ 'root': [
(r'[ \t]+', Whitespace),
(r'[,;{}\[\]()<>]', Punctuation),
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (words((
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (words((
'import', 'option', 'optional', 'required', 'repeated',
'reserved', 'default', 'packed', 'ctype', 'extensions', 'to',
'max', 'rpc', 'returns', 'oneof', 'syntax'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
- 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
- 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
- Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
+ Keyword),
+ (words((
+ 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
+ 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
+ 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace), 'package'),
- (r'(message|extend)(\s+)',
+ (r'(message|extend)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'message'),
- (r'(enum|group|service)(\s+)',
+ (r'(enum|group|service)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'type'),
- (r'\".*?\"', String),
- (r'\'.*?\'', String),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'(\-?(inf|nan))\b', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[+-=]', Operator),
- (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
+ (r'\".*?\"', String),
+ (r'\'.*?\'', String),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'(\-?(inf|nan))\b', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[+-=]', Operator),
+ (r'([a-zA-Z_][\w.]*)([ \t]*)(=)',
bygroups(Name.Attribute, Whitespace, Operator)),
(r'[a-zA-Z_][\w.]*', Name),
- ],
- 'package': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'message': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'type': [
- (r'[a-zA-Z_]\w*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class ThriftLexer(RegexLexer):
- """
- For `Thrift <https://thrift.apache.org/>`__ interface definitions.
-
- .. versionadded:: 2.1
- """
- name = 'Thrift'
- aliases = ['thrift']
- filenames = ['*.thrift']
- mimetypes = ['application/x-thrift']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- (r'"', String.Double, combined('stringescape', 'dqs')),
- (r'\'', String.Single, combined('stringescape', 'sqs')),
- (r'(namespace)(\s+)',
+ ],
+ 'package': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'message': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'type': [
+ (r'[a-zA-Z_]\w*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class ThriftLexer(RegexLexer):
+ """
+ For `Thrift <https://thrift.apache.org/>`__ interface definitions.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Thrift'
+ aliases = ['thrift']
+ filenames = ['*.thrift']
+ mimetypes = ['application/x-thrift']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ (r'"', String.Double, combined('stringescape', 'dqs')),
+ (r'\'', String.Single, combined('stringescape', 'sqs')),
+ (r'(namespace)(\s+)',
bygroups(Keyword.Namespace, Whitespace), 'namespace'),
- (r'(enum|union|struct|service|exception)(\s+)',
+ (r'(enum|union|struct|service|exception)(\s+)',
bygroups(Keyword.Declaration, Whitespace), 'class'),
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Whitespace, Operator)),
- include('keywords'),
- include('numbers'),
- (r'[&=]', Operator),
+ include('keywords'),
+ include('numbers'),
+ (r'[&=]', Operator),
(r'[:;,{}()<>\[\]]', Punctuation),
(r'[a-zA-Z_](\.\w|\w)*', Name),
- ],
- 'whitespace': [
+ ],
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
- ],
- 'comments': [
- (r'#.*$', Comment),
- (r'//.*?\n', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- ],
- 'stringescape': [
- (r'\\([\\nrt"\'])', String.Escape),
- ],
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'[^\\"\n]+', String.Double),
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r'[^\\\'\n]+', String.Single),
- ],
- 'namespace': [
+ ],
+ 'comments': [
+ (r'#.*$', Comment),
+ (r'//.*?\n', Comment),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ ],
+ 'stringescape': [
+ (r'\\([\\nrt"\'])', String.Escape),
+ ],
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'[^\\"\n]+', String.Double),
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r'[^\\\'\n]+', String.Single),
+ ],
+ 'namespace': [
(r'[a-z*](\.\w|\w)*', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- default('#pop'),
- ],
- 'keywords': [
- (r'(async|oneway|extends|throws|required|optional)\b', Keyword),
- (r'(true|false)\b', Keyword.Constant),
- (r'(const|typedef)\b', Keyword.Declaration),
- (words((
- 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
- 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
- 'php_namespace', 'py_module', 'perl_package',
- 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
- 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
- 'xsd_attrs', 'include'), suffix=r'\b'),
- Keyword.Namespace),
- (words((
- 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
+ default('#pop'),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ default('#pop'),
+ ],
+ 'keywords': [
+ (r'(async|oneway|extends|throws|required|optional)\b', Keyword),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(const|typedef)\b', Keyword.Declaration),
+ (words((
+ 'cpp_namespace', 'cpp_include', 'cpp_type', 'java_package',
+ 'cocoa_prefix', 'csharp_namespace', 'delphi_namespace',
+ 'php_namespace', 'py_module', 'perl_package',
+ 'ruby_namespace', 'smalltalk_category', 'smalltalk_prefix',
+ 'xsd_all', 'xsd_optional', 'xsd_nillable', 'xsd_namespace',
+ 'xsd_attrs', 'include'), suffix=r'\b'),
+ Keyword.Namespace),
+ (words((
+ 'void', 'bool', 'byte', 'i16', 'i32', 'i64', 'double',
'string', 'binary', 'map', 'list', 'set', 'slist',
- 'senum'), suffix=r'\b'),
- Keyword.Type),
- (words((
- 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
- '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
- 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
- 'break', 'case', 'catch', 'class', 'clone', 'continue',
- 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
- 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
- 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
- 'ensure', 'except', 'exec', 'finally', 'float', 'for',
- 'foreach', 'function', 'global', 'goto', 'if', 'implements',
- 'import', 'in', 'inline', 'instanceof', 'interface', 'is',
- 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
- 'or', 'pass', 'public', 'print', 'private', 'protected',
- 'raise', 'redo', 'rescue', 'retry', 'register', 'return',
- 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
- 'then', 'this', 'throw', 'transient', 'try', 'undef',
- 'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
- 'volatile', 'when', 'while', 'with', 'xor', 'yield'),
- prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'numbers': [
- (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
- (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- }
-
-
+ 'senum'), suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ 'BEGIN', 'END', '__CLASS__', '__DIR__', '__FILE__',
+ '__FUNCTION__', '__LINE__', '__METHOD__', '__NAMESPACE__',
+ 'abstract', 'alias', 'and', 'args', 'as', 'assert', 'begin',
+ 'break', 'case', 'catch', 'class', 'clone', 'continue',
+ 'declare', 'def', 'default', 'del', 'delete', 'do', 'dynamic',
+ 'elif', 'else', 'elseif', 'elsif', 'end', 'enddeclare',
+ 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile',
+ 'ensure', 'except', 'exec', 'finally', 'float', 'for',
+ 'foreach', 'function', 'global', 'goto', 'if', 'implements',
+ 'import', 'in', 'inline', 'instanceof', 'interface', 'is',
+ 'lambda', 'module', 'native', 'new', 'next', 'nil', 'not',
+ 'or', 'pass', 'public', 'print', 'private', 'protected',
+ 'raise', 'redo', 'rescue', 'retry', 'register', 'return',
+ 'self', 'sizeof', 'static', 'super', 'switch', 'synchronized',
+ 'then', 'this', 'throw', 'transient', 'try', 'undef',
+ 'unless', 'unsigned', 'until', 'use', 'var', 'virtual',
+ 'volatile', 'when', 'while', 'with', 'xor', 'yield'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'numbers': [
+ (r'[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)', Number.Float),
+ (r'[+-]?0x[0-9A-Fa-f]+', Number.Hex),
+ (r'[+-]?[0-9]+', Number.Integer),
+ ],
+ }
+
+
class ZeekLexer(RegexLexer):
- """
+ """
For `Zeek <https://www.zeek.org/>`_ scripts.
-
+
.. versionadded:: 2.5
- """
+ """
name = 'Zeek'
aliases = ['zeek', 'bro']
filenames = ['*.zeek', '*.bro']
-
+
_hex = r'[0-9a-fA-F]'
- _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
- _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
- tokens = {
- 'root': [
+ _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
+ _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
+
+ tokens = {
+ 'root': [
include('whitespace'),
include('comments'),
include('directives'),
@@ -288,7 +288,7 @@ class ZeekLexer(RegexLexer):
# operator.
(r'/(?=.*/)', String.Regex, 'regex'),
- (r'(T|F)\b', Keyword.Constant),
+ (r'(T|F)\b', Keyword.Constant),
# Port
(r'\d{1,5}/(udp|tcp|icmp|unknown)\b', Number),
@@ -305,13 +305,13 @@ class ZeekLexer(RegexLexer):
(_float + r'\b', Number.Float),
(r'(\d+)\b', Number.Integer),
- # Hostnames
- (_h + r'(\.' + _h + r')+', String),
+ # Hostnames
+ (_h + r'(\.' + _h + r')+', String),
],
'operators': [
(r'[!%*/+<=>~|&^-]', Operator),
- (r'([-+=&|]{2}|[+=!><-]=)', Operator),
+ (r'([-+=&|]{2}|[+=!><-]=)', Operator),
(r'(in|as|is|of)\b', Operator.Word),
(r'\??\$', Operator),
],
@@ -327,456 +327,456 @@ class ZeekLexer(RegexLexer):
'identifiers': [
(r'([a-zA-Z_]\w*)(::)', bygroups(Name, Punctuation)),
- (r'[a-zA-Z_]\w*', Name)
- ],
+ (r'[a-zA-Z_]\w*', Name)
+ ],
- 'string': [
+ 'string': [
(r'\\.', String.Escape),
(r'%-?[0-9]*(\.[0-9]+)?[DTd-gsx]', String.Escape),
- (r'"', String, '#pop'),
+ (r'"', String, '#pop'),
(r'.', String),
- ],
+ ],
- 'regex': [
+ 'regex': [
(r'\\.', String.Escape),
- (r'/', String.Regex, '#pop'),
+ (r'/', String.Regex, '#pop'),
(r'.', String.Regex),
],
- }
-
-
+ }
+
+
BroLexer = ZeekLexer
-class PuppetLexer(RegexLexer):
- """
- For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
-
- .. versionadded:: 1.6
- """
- name = 'Puppet'
- aliases = ['puppet']
- filenames = ['*.pp']
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('names'),
- include('numbers'),
- include('operators'),
- include('strings'),
-
- (r'[]{}:(),;[]', Punctuation),
+class PuppetLexer(RegexLexer):
+ """
+ For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Puppet'
+ aliases = ['puppet']
+ filenames = ['*.pp']
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('keywords'),
+ include('names'),
+ include('numbers'),
+ include('operators'),
+ include('strings'),
+
+ (r'[]{}:(),;[]', Punctuation),
(r'\s+', Whitespace),
- ],
-
- 'comments': [
+ ],
+
+ 'comments': [
(r'(\s*)(#.*)$', bygroups(Whitespace, Comment)),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
-
- 'operators': [
- (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
- (r'(in|and|or|not)\b', Operator.Word),
- ],
-
- 'names': [
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+
+ 'operators': [
+ (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
+ (r'(in|and|or|not)\b', Operator.Word),
+ ],
+
+ 'names': [
(r'[a-zA-Z_]\w*', Name.Attribute),
- (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
- String, Punctuation)),
- (r'\$\S+', Name.Variable),
- ],
-
- 'numbers': [
- # Copypasta from the Python lexer
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'keywords': [
- # Left out 'group' and 'require'
- # Since they're often used as attributes
- (words((
- 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
- 'check', 'class', 'computer', 'configured', 'contained',
- 'create_resources', 'crit', 'cron', 'debug', 'default',
- 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
- 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
- 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
- 'include', 'info', 'inherits', 'inline_template', 'installed',
- 'interface', 'k5login', 'latest', 'link', 'loglevel',
- 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
- 'mount', 'mounted', 'nagios_command', 'nagios_contact',
- 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
- 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
- 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
- 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
- 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
- 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
- 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
- 'service', 'sha1', 'shellquote', 'split', 'sprintf',
- 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
- 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
- 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
- 'zpool'), prefix='(?i)', suffix=r'\b'),
- Keyword),
- ],
-
- 'strings': [
- (r'"([^"])*"', String),
- (r"'(\\'|[^'])*'", String),
- ],
-
- }
-
-
-class RslLexer(RegexLexer):
- """
- `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
- language used in RAISE (Rigorous Approach to Industrial Software Engineering)
- method.
-
- .. versionadded:: 2.0
- """
- name = 'RSL'
- aliases = ['rsl']
- filenames = ['*.rsl']
- mimetypes = ['text/rsl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (words((
- 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
- 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
- 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
- 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
- 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
- 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
- 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
- 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
- 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
- 'with', 'write', '~isin', '-inflist', '-infset', '-list',
- '-set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'(variable|value)\b', Keyword.Declaration),
- (r'--.*?\n', Comment),
- (r'<:.*?:>', Comment),
- (r'\{!.*?!\}', Comment),
- (r'/\*.*?\*/', Comment),
+ (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
+ String, Punctuation)),
+ (r'\$\S+', Name.Variable),
+ ],
+
+ 'numbers': [
+ # Copypasta from the Python lexer
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+
+ 'keywords': [
+ # Left out 'group' and 'require'
+ # Since they're often used as attributes
+ (words((
+ 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
+ 'check', 'class', 'computer', 'configured', 'contained',
+ 'create_resources', 'crit', 'cron', 'debug', 'default',
+ 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
+ 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
+ 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
+ 'include', 'info', 'inherits', 'inline_template', 'installed',
+ 'interface', 'k5login', 'latest', 'link', 'loglevel',
+ 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
+ 'mount', 'mounted', 'nagios_command', 'nagios_contact',
+ 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
+ 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
+ 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
+ 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
+ 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
+ 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
+ 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
+ 'service', 'sha1', 'shellquote', 'split', 'sprintf',
+ 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
+ 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
+ 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
+ 'zpool'), prefix='(?i)', suffix=r'\b'),
+ Keyword),
+ ],
+
+ 'strings': [
+ (r'"([^"])*"', String),
+ (r"'(\\'|[^'])*'", String),
+ ],
+
+ }
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (words((
+ 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
+ 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
+ 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
+ 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
+ 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
+ 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
+ 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
+ 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
+ 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
+ 'with', 'write', '~isin', '-inflist', '-infset', '-list',
+ '-set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
(r'^([ \t]*)([\w]+)([ \t]*)(:[^:])', bygroups(Whitespace,
Name.Function, Whitespace, Name.Function)),
(r'(^[ \t]*)([\w]+)([ \t]*)(\([\w\s,]*\))([ \t]*)(is|as)',
bygroups(Whitespace, Name.Function, Whitespace, Text,
Whitespace, Keyword)),
- (r'\b[A-Z]\w*\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'".*"', String),
- (r'\'.\'', String.Char),
- (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
- r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
- Operator),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ (r'\b[A-Z]\w*\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"', String),
+ (r'\'.\'', String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """
- Check for the most common text in the beginning of a RSL file.
- """
- if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
- return 1.0
-
-
-class MscgenLexer(RegexLexer):
- """
- For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Mscgen'
- aliases = ['mscgen', 'msc']
- filenames = ['*.msc']
-
- _var = r'(\w+|"(?:\\"|[^"])*")'
-
- tokens = {
- 'root': [
- (r'msc\b', Keyword.Type),
- # Options
- (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
- r'|arcgradient|ARCGRADIENT)\b', Name.Property),
- # Operators
- (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
- (r'(\.|-|\|){3}', Keyword),
- (r'(?:-|=|\.|:){2}'
- r'|<<=>>|<->|<=>|<<>>|<:>'
- r'|->|=>>|>>|=>|:>|-x|-X'
- r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
- # Names
- (r'\*', Name.Builtin),
- (_var, Name.Variable),
- # Other
- (r'\[', Punctuation, 'attrs'),
- (r'\{|\}|,|;', Punctuation),
- include('comments')
- ],
- 'attrs': [
- (r'\]', Punctuation, '#pop'),
- (_var + r'(\s*)(=)(\s*)' + _var,
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+
+
+class MscgenLexer(RegexLexer):
+ """
+ For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Mscgen'
+ aliases = ['mscgen', 'msc']
+ filenames = ['*.msc']
+
+ _var = r'(\w+|"(?:\\"|[^"])*")'
+
+ tokens = {
+ 'root': [
+ (r'msc\b', Keyword.Type),
+ # Options
+ (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
+ r'|arcgradient|ARCGRADIENT)\b', Name.Property),
+ # Operators
+ (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
+ (r'(\.|-|\|){3}', Keyword),
+ (r'(?:-|=|\.|:){2}'
+ r'|<<=>>|<->|<=>|<<>>|<:>'
+ r'|->|=>>|>>|=>|:>|-x|-X'
+ r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
+ # Names
+ (r'\*', Name.Builtin),
+ (_var, Name.Variable),
+ # Other
+ (r'\[', Punctuation, 'attrs'),
+ (r'\{|\}|,|;', Punctuation),
+ include('comments')
+ ],
+ 'attrs': [
+ (r'\]', Punctuation, '#pop'),
+ (_var + r'(\s*)(=)(\s*)' + _var,
bygroups(Name.Attribute, Whitespace, Operator, Whitespace,
- String)),
- (r',', Punctuation),
- include('comments')
- ],
- 'comments': [
- (r'(?://|#).*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ String)),
+ (r',', Punctuation),
+ include('comments')
+ ],
+ 'comments': [
+ (r'(?://|#).*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
(r'[ \t\r\n]+', Whitespace)
- ]
- }
-
-
-class VGLLexer(RegexLexer):
- """
- For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
- source code.
-
- .. versionadded:: 1.6
- """
- name = 'VGL'
- aliases = ['vgl']
- filenames = ['*.rpf']
-
- flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\{[^}]*\}', Comment.Multiline),
- (r'declare', Keyword.Constant),
- (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
- r'|create|on|line|with|global|routine|value|endroutine|constant'
- r'|global|set|join|library|compile_option|file|exists|create|copy'
- r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
- Keyword),
- (r'(true|false|null|empty|error|locked)', Keyword.Constant),
- (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
- (r'"[^"]*"', String),
- (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
- (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
- (r'[a-z_$][\w$]*', Name),
+ ]
+ }
+
+
+class VGLLexer(RegexLexer):
+ """
+ For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'VGL'
+ aliases = ['vgl']
+ filenames = ['*.rpf']
+
+ flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\{[^}]*\}', Comment.Multiline),
+ (r'declare', Keyword.Constant),
+ (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
+ r'|create|on|line|with|global|routine|value|endroutine|constant'
+ r'|global|set|join|library|compile_option|file|exists|create|copy'
+ r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
+ Keyword),
+ (r'(true|false|null|empty|error|locked)', Keyword.Constant),
+ (r'[~^*#!%&\[\]()<>|+=:;,./?-]', Operator),
+ (r'"[^"]*"', String),
+ (r'(\.)([a-z_$][\w$]*)', bygroups(Operator, Name.Attribute)),
+ (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
+ (r'[a-z_$][\w$]*', Name),
(r'[\r\n]+', Whitespace),
(r'\s+', Whitespace)
- ]
- }
-
-
-class AlloyLexer(RegexLexer):
- """
- For `Alloy <http://alloy.mit.edu>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Alloy'
- aliases = ['alloy']
- filenames = ['*.als']
- mimetypes = ['text/x-alloy']
-
- flags = re.MULTILINE | re.DOTALL
-
- iden_rex = r'[a-zA-Z_][\w\']*'
+ ]
+ }
+
+
+class AlloyLexer(RegexLexer):
+ """
+ For `Alloy <http://alloy.mit.edu>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Alloy'
+ aliases = ['alloy']
+ filenames = ['*.als']
+ mimetypes = ['text/x-alloy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ iden_rex = r'[a-zA-Z_][\w\']*'
text_tuple = (r'[^\S\n]+', Whitespace)
-
- tokens = {
- 'sig': [
- (r'(extends)\b', Keyword, '#pop'),
- (iden_rex, Name),
- text_tuple,
- (r',', Punctuation),
- (r'\{', Operator, '#pop'),
- ],
- 'module': [
- text_tuple,
- (iden_rex, Name, '#pop'),
- ],
- 'fun': [
- text_tuple,
- (r'\{', Operator, '#pop'),
- (iden_rex, Name, '#pop'),
- ],
- 'root': [
- (r'--.*?$', Comment.Single),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- text_tuple,
+
+ tokens = {
+ 'sig': [
+ (r'(extends)\b', Keyword, '#pop'),
+ (iden_rex, Name),
+ text_tuple,
+ (r',', Punctuation),
+ (r'\{', Operator, '#pop'),
+ ],
+ 'module': [
+ text_tuple,
+ (iden_rex, Name, '#pop'),
+ ],
+ 'fun': [
+ text_tuple,
+ (r'\{', Operator, '#pop'),
+ (iden_rex, Name, '#pop'),
+ ],
+ 'root': [
+ (r'--.*?$', Comment.Single),
+ (r'//.*?$', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ text_tuple,
(r'(module|open)(\s+)', bygroups(Keyword.Namespace, Whitespace),
- 'module'),
+ 'module'),
(r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Whitespace), 'sig'),
- (r'(iden|univ|none)\b', Keyword.Constant),
- (r'(int|Int)\b', Keyword.Type),
- (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
- (r'(all|some|no|sum|disj|when|else)\b', Keyword),
- (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
- (r'(and|or|implies|iff|in)\b', Operator.Word),
+ (r'(iden|univ|none)\b', Keyword.Constant),
+ (r'(int|Int)\b', Keyword.Type),
+ (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
+ (r'(all|some|no|sum|disj|when|else)\b', Keyword),
+ (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
+ (r'(and|or|implies|iff|in)\b', Operator.Word),
(r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Whitespace), 'fun'),
- (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
- (r'[-+/*%=<>&!^|~{}\[\]().]', Operator),
- (iden_rex, Name),
- (r'[:,]', Punctuation),
- (r'[0-9]+', Number.Integer),
+ (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
+ (r'[-+/*%=<>&!^|~{}\[\]().]', Operator),
+ (iden_rex, Name),
+ (r'[:,]', Punctuation),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\n', Whitespace),
- ]
- }
-
-
-class PanLexer(RegexLexer):
- """
+ ]
+ }
+
+
+class PanLexer(RegexLexer):
+ """
Lexer for `pan <https://github.com/quattor/pan/>`_ source files.
-
- Based on tcsh lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pan'
- aliases = ['pan']
- filenames = ['*.pan']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\(', Keyword, 'paren'),
- (r'\{', Keyword, 'curly'),
- include('data'),
- ],
- 'basic': [
- (words((
- 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
- 'prefix', 'unique', 'object', 'foreach', 'include', 'template',
- 'function', 'variable', 'structure', 'extensible', 'declaration'),
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'\{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (words((
+ 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final',
+ 'prefix', 'unique', 'object', 'foreach', 'include', 'template',
+ 'function', 'variable', 'structure', 'extensible', 'declaration'),
prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'file_contents', 'format', 'index', 'length', 'match', 'matches',
- 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
- 'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
- 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
- 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
- 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
- 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
- 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
- 'path_exists', 'if_exists', 'return', 'value'),
+ Keyword),
+ (words((
+ 'file_contents', 'format', 'index', 'length', 'match', 'matches',
+ 'replace', 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase',
+ 'debug', 'error', 'traceback', 'deprecated', 'base64_decode',
+ 'base64_encode', 'digest', 'escape', 'unescape', 'append', 'create',
+ 'first', 'nlist', 'key', 'list', 'merge', 'next', 'prepend', 'is_boolean',
+ 'is_defined', 'is_double', 'is_list', 'is_long', 'is_nlist', 'is_null',
+ 'is_number', 'is_property', 'is_resource', 'is_string', 'to_boolean',
+ 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
+ 'path_exists', 'if_exists', 'return', 'value'),
prefix=r'\b', suffix=r'\b'),
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
(r'\s+', Whitespace),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- }
-
-
-class CrmshLexer(RegexLexer):
- """
- Lexer for `crmsh <http://crmsh.github.io/>`_ configuration files
- for Pacemaker clusters.
-
- .. versionadded:: 2.1
- """
- name = 'Crmsh'
- aliases = ['crmsh', 'pcmk']
- filenames = ['*.crmsh', '*.pcmk']
- mimetypes = []
-
- elem = words((
- 'node', 'primitive', 'group', 'clone', 'ms', 'location',
- 'colocation', 'order', 'fencing_topology', 'rsc_ticket',
- 'rsc_template', 'property', 'rsc_defaults',
- 'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
- 'tag'), suffix=r'(?![\w#$-])')
- sub = words((
- 'params', 'meta', 'operations', 'op', 'rule',
- 'attributes', 'utilization'), suffix=r'(?![\w#$-])')
- acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
- bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
- un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
- date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
- acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
- bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
- val_qual = (r'(?:string|version|number)')
- rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
- r'start|promote|demote|stop)')
-
- tokens = {
- 'root': [
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class CrmshLexer(RegexLexer):
+ """
+ Lexer for `crmsh <http://crmsh.github.io/>`_ configuration files
+ for Pacemaker clusters.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Crmsh'
+ aliases = ['crmsh', 'pcmk']
+ filenames = ['*.crmsh', '*.pcmk']
+ mimetypes = []
+
+ elem = words((
+ 'node', 'primitive', 'group', 'clone', 'ms', 'location',
+ 'colocation', 'order', 'fencing_topology', 'rsc_ticket',
+ 'rsc_template', 'property', 'rsc_defaults',
+ 'op_defaults', 'acl_target', 'acl_group', 'user', 'role',
+ 'tag'), suffix=r'(?![\w#$-])')
+ sub = words((
+ 'params', 'meta', 'operations', 'op', 'rule',
+ 'attributes', 'utilization'), suffix=r'(?![\w#$-])')
+ acl = words(('read', 'write', 'deny'), suffix=r'(?![\w#$-])')
+ bin_rel = words(('and', 'or'), suffix=r'(?![\w#$-])')
+ un_ops = words(('defined', 'not_defined'), suffix=r'(?![\w#$-])')
+ date_exp = words(('in_range', 'date', 'spec', 'in'), suffix=r'(?![\w#$-])')
+ acl_mod = (r'(?:tag|ref|reference|attribute|type|xpath)')
+ bin_ops = (r'(?:lt|gt|lte|gte|eq|ne)')
+ val_qual = (r'(?:string|version|number)')
+ rsc_role_action = (r'(?:Master|Started|Slave|Stopped|'
+ r'start|promote|demote|stop)')
+
+ tokens = {
+ 'root': [
(r'^(#.*)(\n)?', bygroups(Comment, Whitespace)),
- # attr=value (nvpair)
- (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
- bygroups(Name.Attribute, Punctuation, String)),
- # need this construct, otherwise numeric node ids
- # are matched as scores
- # elem id:
- (r'(node)(\s+)([\w#$-]+)(:)',
- bygroups(Keyword, Whitespace, Name, Punctuation)),
- # scores
- (r'([+-]?([0-9]+|inf)):', Number),
- # keywords (elements and other)
- (elem, Keyword),
- (sub, Keyword),
- (acl, Keyword),
- # binary operators
- (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
- # other operators
- (bin_rel, Operator.Word),
- (un_ops, Operator.Word),
- (date_exp, Operator.Word),
- # builtin attributes (e.g. #uname)
- (r'#[a-z]+(?![\w#$-])', Name.Builtin),
- # acl_mod:blah
- (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
- bygroups(Keyword, Punctuation, Name)),
- # rsc_id[:(role|action)]
- # NB: this matches all other identifiers
- (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
- bygroups(Name, Punctuation, Operator.Word)),
- # punctuation
+ # attr=value (nvpair)
+ (r'([\w#$-]+)(=)("(?:""|[^"])*"|\S+)',
+ bygroups(Name.Attribute, Punctuation, String)),
+ # need this construct, otherwise numeric node ids
+ # are matched as scores
+ # elem id:
+ (r'(node)(\s+)([\w#$-]+)(:)',
+ bygroups(Keyword, Whitespace, Name, Punctuation)),
+ # scores
+ (r'([+-]?([0-9]+|inf)):', Number),
+ # keywords (elements and other)
+ (elem, Keyword),
+ (sub, Keyword),
+ (acl, Keyword),
+ # binary operators
+ (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
+ # other operators
+ (bin_rel, Operator.Word),
+ (un_ops, Operator.Word),
+ (date_exp, Operator.Word),
+ # builtin attributes (e.g. #uname)
+ (r'#[a-z]+(?![\w#$-])', Name.Builtin),
+ # acl_mod:blah
+ (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
+ bygroups(Keyword, Punctuation, Name)),
+ # rsc_id[:(role|action)]
+ # NB: this matches all other identifiers
+ (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
+ bygroups(Name, Punctuation, Operator.Word)),
+ # punctuation
(r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
- (r'\s+|\n', Whitespace),
- ],
- }
+ (r'\s+|\n', Whitespace),
+ ],
+ }
class FlatlineLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dylan.py b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
index 74f81191dc..c745c14dc6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dylan.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
@@ -1,286 +1,286 @@
-"""
- pygments.lexers.dylan
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Dylan language.
-
+"""
+ pygments.lexers.dylan
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Dylan language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Literal, Whitespace
-
-__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
-
-
-class DylanLexer(RegexLexer):
- """
- For the `Dylan <http://www.opendylan.org/>`_ language.
-
- .. versionadded:: 0.7
- """
-
- name = 'Dylan'
- aliases = ['dylan']
- filenames = ['*.dylan', '*.dyl', '*.intr']
- mimetypes = ['text/x-dylan']
-
- flags = re.IGNORECASE
-
+
+__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
+
+
+class DylanLexer(RegexLexer):
+ """
+ For the `Dylan <http://www.opendylan.org/>`_ language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Dylan'
+ aliases = ['dylan']
+ filenames = ['*.dylan', '*.dyl', '*.intr']
+ mimetypes = ['text/x-dylan']
+
+ flags = re.IGNORECASE
+
builtins = {
- 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
- 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
- 'each-subclass', 'exception', 'exclude', 'function', 'generic',
- 'handler', 'inherited', 'inline', 'inline-only', 'instance',
- 'interface', 'import', 'keyword', 'library', 'macro', 'method',
- 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
+ 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
+ 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
+ 'each-subclass', 'exception', 'exclude', 'function', 'generic',
+ 'handler', 'inherited', 'inline', 'inline-only', 'instance',
+ 'interface', 'import', 'keyword', 'library', 'macro', 'method',
+ 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
'singleton', 'slot', 'thread', 'variable', 'virtual'}
-
+
keywords = {
- 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
- 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
- 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
- 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
+ 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
+ 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
+ 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
+ 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
'while'}
-
+
operators = {
- '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
+ '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
'>', '>=', '&', '|'}
-
+
functions = {
- 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
- 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
- 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
- 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
- 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
- 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
- 'condition-format-arguments', 'condition-format-string', 'conjoin',
- 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
- 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
- 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
- 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
- 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
- 'function-arguments', 'function-return-values',
- 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
- 'generic-function-methods', 'head', 'head-setter', 'identity',
- 'initialize', 'instance?', 'integral?', 'intersection',
- 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
- 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
- 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
- 'min', 'modulo', 'negative', 'negative?', 'next-method',
- 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
- 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
- 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
- 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
- 'remove-method', 'replace-elements!', 'replace-subsequence!',
- 'restart-query', 'return-allowed?', 'return-description',
- 'return-query', 'reverse', 'reverse!', 'round', 'round/',
- 'row-major-index', 'second', 'second-setter', 'shallow-copy',
- 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
- 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
- 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
- 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
- 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
+ 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
+ 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
+ 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
+ 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
+ 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
+ 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
+ 'condition-format-arguments', 'condition-format-string', 'conjoin',
+ 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
+ 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
+ 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
+ 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
+ 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
+ 'function-arguments', 'function-return-values',
+ 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
+ 'generic-function-methods', 'head', 'head-setter', 'identity',
+ 'initialize', 'instance?', 'integral?', 'intersection',
+ 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
+ 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
+ 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
+ 'min', 'modulo', 'negative', 'negative?', 'next-method',
+ 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
+ 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
+ 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
+ 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
+ 'remove-method', 'replace-elements!', 'replace-subsequence!',
+ 'restart-query', 'return-allowed?', 'return-description',
+ 'return-query', 'reverse', 'reverse!', 'round', 'round/',
+ 'row-major-index', 'second', 'second-setter', 'shallow-copy',
+ 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
+ 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
+ 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
+ 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
+ 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
'vector', 'zero?'}
-
- valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- lowercase_value = value.lower()
- if lowercase_value in self.builtins:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.keywords:
- yield index, Keyword, value
- continue
- if lowercase_value in self.functions:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.operators:
- yield index, Operator, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- # Whitespace
+
+ valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ lowercase_value = value.lower()
+ if lowercase_value in self.builtins:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.keywords:
+ yield index, Keyword, value
+ continue
+ if lowercase_value in self.functions:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.operators:
+ yield index, Operator, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ # Whitespace
(r'\s+', Whitespace),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Whitespace, String)),
-
- default('code') # no header match, switch to code
- ],
- 'code': [
- # Whitespace
+
+ default('code') # no header match, switch to code
+ ],
+ 'code': [
+ # Whitespace
(r'\s+', Whitespace),
-
- # single line comment
+
+ # single line comment
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
-
- # multi-line comment
- (r'/\*', Comment.Multiline, 'comment'),
-
- # strings and characters
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- # binary integer
- (r'#b[01]+', Number.Bin),
-
- # octal integer
- (r'#o[0-7]+', Number.Oct),
-
- # floating point
- (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
- # decimal integer
- (r'[-+]?\d+', Number.Integer),
-
- # hex integer
- (r'#x[0-9a-f]+', Number.Hex),
-
- # Macro parameters
- (r'(\?' + valid_name + ')(:)'
- r'(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'\?' + valid_name, Name.Tag),
-
- # Punctuation
- (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
-
- # Most operators are picked up as names and then re-flagged.
- # This one isn't valid in a name though, so we pick it up now.
- (r':=', Operator),
-
- # Pick up #t / #f before we match other stuff with #.
- (r'#[tf]', Literal),
-
- # #"foo" style keywords
- (r'#"', String.Symbol, 'keyword'),
-
- # #rest, #key, #all-keys, etc.
- (r'#[a-z0-9-]+', Keyword),
-
- # required-init-keyword: style keywords.
- (valid_name + ':', Keyword),
-
- # class names
+
+ # multi-line comment
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
+
+ # binary integer
+ (r'#b[01]+', Number.Bin),
+
+ # octal integer
+ (r'#o[0-7]+', Number.Oct),
+
+ # floating point
+ (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
+
+ # decimal integer
+ (r'[-+]?\d+', Number.Integer),
+
+ # hex integer
+ (r'#x[0-9a-f]+', Number.Hex),
+
+ # Macro parameters
+ (r'(\?' + valid_name + ')(:)'
+ r'(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'\?' + valid_name, Name.Tag),
+
+ # Punctuation
+ (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
+
+ # Most operators are picked up as names and then re-flagged.
+ # This one isn't valid in a name though, so we pick it up now.
+ (r':=', Operator),
+
+ # Pick up #t / #f before we match other stuff with #.
+ (r'#[tf]', Literal),
+
+ # #"foo" style keywords
+ (r'#"', String.Symbol, 'keyword'),
+
+ # #rest, #key, #all-keys, etc.
+ (r'#[a-z0-9-]+', Keyword),
+
+ # required-init-keyword: style keywords.
+ (valid_name + ':', Keyword),
+
+ # class names
('<' + valid_name + '>', Name.Class),
-
- # define variable forms.
+
+ # define variable forms.
(r'\*' + valid_name + r'\*', Name.Variable.Global),
-
- # define constant forms.
- (r'\$' + valid_name, Name.Constant),
-
- # everything else. We re-flag some of these in the method above.
- (valid_name, Name),
- ],
- 'comment': [
+
+ # define constant forms.
+ (r'\$' + valid_name, Name.Constant),
+
+ # everything else. We re-flag some of these in the method above.
+ (valid_name, Name),
+ ],
+ 'comment': [
(r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'keyword': [
- (r'"', String.Symbol, '#pop'),
- (r'[^\\"]+', String.Symbol), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
-
-class DylanLidLexer(RegexLexer):
- """
- For Dylan LID (Library Interchange Definition) files.
-
- .. versionadded:: 1.6
- """
-
- name = 'DylanLID'
- aliases = ['dylan-lid', 'lid']
- filenames = ['*.lid', '*.hdp']
- mimetypes = ['text/x-dylan-lid']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- # Whitespace
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'keyword': [
+ (r'"', String.Symbol, '#pop'),
+ (r'[^\\"]+', String.Symbol), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ]
+ }
+
+
+class DylanLidLexer(RegexLexer):
+ """
+ For Dylan LID (Library Interchange Definition) files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'DylanLID'
+ aliases = ['dylan-lid', 'lid']
+ filenames = ['*.lid', '*.hdp']
+ mimetypes = ['text/x-dylan-lid']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ # Whitespace
(r'\s+', Whitespace),
-
- # single line comment
+
+ # single line comment
(r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
-
- # lid header
- (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+
+ # lid header
+ (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
bygroups(Name.Attribute, Operator, Whitespace, String)),
- ]
- }
-
-
-class DylanConsoleLexer(Lexer):
- """
- For Dylan interactive console output like:
-
- .. sourcecode:: dylan-console
-
- ? let a = 1;
- => 1
- ? a
- => 1
-
- This is based on a copy of the RubyConsoleLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Dylan session'
- aliases = ['dylan-console', 'dylan-repl']
- filenames = ['*.dylan-console']
- mimetypes = ['text/x-dylan-console']
-
- _line_re = re.compile('.*?\n')
+ ]
+ }
+
+
+class DylanConsoleLexer(Lexer):
+ """
+ For Dylan interactive console output like:
+
+ .. sourcecode:: dylan-console
+
+ ? let a = 1;
+ => 1
+ ? a
+ => 1
+
+ This is based on a copy of the RubyConsoleLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Dylan session'
+ aliases = ['dylan-console', 'dylan-repl']
+ filenames = ['*.dylan-console']
+ mimetypes = ['text/x-dylan-console']
+
+ _line_re = re.compile('.*?\n')
_prompt_re = re.compile(r'\?| ')
-
- def get_tokens_unprocessed(self, text):
- dylexer = DylanLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in self._line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
+
+ def get_tokens_unprocessed(self, text):
+ dylexer = DylanLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in self._line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
yield from do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
yield from do_insertions(insertions,
dylexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ecl.py b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
index 47cad2220c..e7484761c6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ecl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
@@ -1,126 +1,126 @@
-"""
- pygments.lexers.ecl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the ECL language.
-
+"""
+ pygments.lexers.ecl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the ECL language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['ECLLexer']
-
-
-class ECLLexer(RegexLexer):
- """
- Lexer for the declarative big-data `ECL
+
+__all__ = ['ECLLexer']
+
+
+class ECLLexer(RegexLexer):
+ """
+ Lexer for the declarative big-data `ECL
<https://hpccsystems.com/training/documentation/ecl-language-reference/html>`_
- language.
-
- .. versionadded:: 1.5
- """
-
- name = 'ECL'
- aliases = ['ecl']
- filenames = ['*.ecl']
- mimetypes = ['application/x-ecl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('statements'),
- ],
- 'whitespace': [
+ language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'ECL'
+ aliases = ['ecl']
+ filenames = ['*.ecl']
+ mimetypes = ['application/x-ecl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('statements'),
+ ],
+ 'whitespace': [
(r'\s+', Whitespace),
- (r'\/\/.*', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- include('types'),
- include('keywords'),
- include('functions'),
- include('hash'),
- (r'"', String, 'string'),
- (r'\'', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
- (r'0x[0-9a-f]+[lu]*', Number.Hex),
- (r'0[0-7]+[lu]*', Number.Oct),
- (r'\d+[lu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]+', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[a-z_]\w*', Name),
- ],
- 'hash': [
- (r'^#.*$', Comment.Preproc),
- ],
- 'types': [
- (r'(RECORD|END)\D', Keyword.Declaration),
- (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
- r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
- r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
+ (r'\/\/.*', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ include('types'),
+ include('keywords'),
+ include('functions'),
+ include('hash'),
+ (r'"', String, 'string'),
+ (r'\'', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
+ (r'0x[0-9a-f]+[lu]*', Number.Hex),
+ (r'0[0-7]+[lu]*', Number.Oct),
+ (r'\d+[lu]*', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]+', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[a-z_]\w*', Name),
+ ],
+ 'hash': [
+ (r'^#.*$', Comment.Preproc),
+ ],
+ 'types': [
+ (r'(RECORD|END)\D', Keyword.Declaration),
+ (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
+ r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
+ r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
bygroups(Keyword.Type, Whitespace)),
- ],
- 'keywords': [
- (words((
- 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
- 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
- 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
- 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
- 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
- 'WAIT', 'WHEN'), suffix=r'\b'),
- Keyword.Reserved),
- # These are classed differently, check later
- (words((
- 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
- 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
- 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
- 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
- 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
- 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
- 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
- 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
- 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
- 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
- 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
- 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
- Keyword.Reserved),
- ],
- 'functions': [
- (words((
- 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
- 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
- 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
- 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
- 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
- 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
- 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
- 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
- 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
- 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
- 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
- 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
- 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
- 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
- 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
- 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
- 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
- Name.Function),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\'', String, '#pop'),
- (r'[^"\']+', String),
- ],
- }
+ ],
+ 'keywords': [
+ (words((
+ 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
+ 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
+ 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
+ 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
+ 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
+ 'WAIT', 'WHEN'), suffix=r'\b'),
+ Keyword.Reserved),
+ # These are classed differently, check later
+ (words((
+ 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
+ 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
+ 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
+ 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
+ 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
+ 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
+ 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
+ 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
+ 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
+ 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
+ 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
+ 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'functions': [
+ (words((
+ 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
+ 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
+ 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
+ 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
+ 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
+ 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
+ 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
+ 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
+ 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
+ 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
+ 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
+ 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
+ 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
+ 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
+ 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
+ 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
+ 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
+ Name.Function),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\'', String, '#pop'),
+ (r'[^"\']+', String),
+ ],
+ }
def analyse_text(text):
"""This is very difficult to guess relative to other business languages.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
index 599b446306..e5ab038bf7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
@@ -1,66 +1,66 @@
-"""
- pygments.lexers.eiffel
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Eiffel language.
-
+"""
+ pygments.lexers.eiffel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Eiffel language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['EiffelLexer']
-
-
-class EiffelLexer(RegexLexer):
- """
- For `Eiffel <http://www.eiffel.com>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Eiffel'
- aliases = ['eiffel']
- filenames = ['*.e']
- mimetypes = ['text/x-eiffel']
-
- tokens = {
- 'root': [
+
+__all__ = ['EiffelLexer']
+
+
+class EiffelLexer(RegexLexer):
+ """
+ For `Eiffel <http://www.eiffel.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Eiffel'
+ aliases = ['eiffel']
+ filenames = ['*.e']
+ mimetypes = ['text/x-eiffel']
+
+ tokens = {
+ 'root': [
(r'[^\S\n]+', Whitespace),
(r'--.*?$', Comment.Single),
(r'[^\S\n]+', Whitespace),
- # Please note that keyword and operator are case insensitive.
- (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
+ # Please note that keyword and operator are case insensitive.
+ (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
(r'(?i)(not|xor|implies|or)\b', Operator.Word),
(r'(?i)(and)(?:(\s+)(then))?\b', bygroups(Operator.Word, Whitespace, Operator.Word)),
(r'(?i)(or)(?:(\s+)(else))?\b', bygroups(Operator.Word, Whitespace, Operator.Word)),
- (words((
- 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
- 'attribute', 'check', 'class', 'convert', 'create', 'debug',
- 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
- 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
- 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
- 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
- 'require', 'rescue', 'retry', 'select', 'separate', 'then',
- 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
- Keyword.Reserved),
+ (words((
+ 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
+ 'attribute', 'check', 'class', 'convert', 'create', 'debug',
+ 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
+ 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
+ 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
+ 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
+ 'require', 'rescue', 'retry', 'select', 'separate', 'then',
+ 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
+ Keyword.Reserved),
(r'"\[([^\]%]|%(.|\n)|\][^"])*?\]"', String),
- (r'"([^"%\n]|%.)*?"', String),
- include('numbers'),
- (r"'([^'%]|%'|%%)'", String.Char),
- (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
- (r"([{}():;,.])", Punctuation),
- (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
- (r'([A-Z][A-Z0-9_]*)', Name.Class),
+ (r'"([^"%\n]|%.)*?"', String),
+ include('numbers'),
+ (r"'([^'%]|%'|%%)'", String.Char),
+ (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\?!#%&@|+/\-=>*$<^\[\]])", Operator),
+ (r"([{}():;,.])", Punctuation),
+ (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
+ (r'([A-Z][A-Z0-9_]*)', Name.Class),
(r'\n+', Whitespace),
- ],
- 'numbers': [
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'0[bB][01]+', Number.Bin),
- (r'0[cC][0-7]+', Number.Oct),
- (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
- (r'[0-9]+', Number.Integer),
- ],
- }
+ ],
+ 'numbers': [
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[cC][0-7]+', Number.Oct),
+ (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/elm.py b/contrib/python/Pygments/py3/pygments/lexers/elm.py
index 298dbf5986..b387919f0a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/elm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/elm.py
@@ -1,123 +1,123 @@
-"""
- pygments.lexers.elm
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Elm programming language.
-
+"""
+ pygments.lexers.elm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Elm programming language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, words, include, bygroups
from pygments.token import Comment, Keyword, Name, Number, Punctuation, String, \
Text, Whitespace
-
-__all__ = ['ElmLexer']
-
-
-class ElmLexer(RegexLexer):
- """
- For `Elm <http://elm-lang.org/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'Elm'
- aliases = ['elm']
- filenames = ['*.elm']
- mimetypes = ['text/x-elm']
-
+
+__all__ = ['ElmLexer']
+
+
+class ElmLexer(RegexLexer):
+ """
+ For `Elm <http://elm-lang.org/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Elm'
+ aliases = ['elm']
+ filenames = ['*.elm']
+ mimetypes = ['text/x-elm']
+
validName = r'[a-z_][a-zA-Z0-9_\']*'
-
- specialName = r'^main '
-
- builtinOps = (
- '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
- '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
- '..', '.', '->', '-', '++', '+', '*', '&&', '%',
- )
-
- reservedWords = words((
- 'alias', 'as', 'case', 'else', 'if', 'import', 'in',
- 'let', 'module', 'of', 'port', 'then', 'type', 'where',
+
+ specialName = r'^main '
+
+ builtinOps = (
+ '~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
+ '=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
+ '..', '.', '->', '-', '++', '+', '*', '&&', '%',
+ )
+
+ reservedWords = words((
+ 'alias', 'as', 'case', 'else', 'if', 'import', 'in',
+ 'let', 'module', 'of', 'port', 'then', 'type', 'where',
), suffix=r'\b')
-
- tokens = {
- 'root': [
-
- # Comments
+
+ tokens = {
+ 'root': [
+
+ # Comments
(r'\{-', Comment.Multiline, 'comment'),
- (r'--.*', Comment.Single),
-
- # Whitespace
+ (r'--.*', Comment.Single),
+
+ # Whitespace
(r'\s+', Whitespace),
-
- # Strings
- (r'"', String, 'doublequote'),
-
- # Modules
+
+ # Strings
+ (r'"', String, 'doublequote'),
+
+ # Modules
(r'^(\s*)(module)(\s*)', bygroups(Whitespace, Keyword.Namespace,
Whitespace), 'imports'),
-
- # Imports
+
+ # Imports
(r'^(\s*)(import)(\s*)', bygroups(Whitespace, Keyword.Namespace,
Whitespace), 'imports'),
-
- # Shaders
- (r'\[glsl\|.*', Name.Entity, 'shader'),
-
- # Keywords
- (reservedWords, Keyword.Reserved),
-
- # Types
+
+ # Shaders
+ (r'\[glsl\|.*', Name.Entity, 'shader'),
+
+ # Keywords
+ (reservedWords, Keyword.Reserved),
+
+ # Types
(r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
-
- # Main
- (specialName, Keyword.Reserved),
-
- # Prefix Operators
- (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
-
- # Infix Operators
+
+ # Main
+ (specialName, Keyword.Reserved),
+
+ # Prefix Operators
+ (words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
+
+ # Infix Operators
(words(builtinOps), Name.Function),
-
- # Numbers
- include('numbers'),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Parens
+
+ # Numbers
+ include('numbers'),
+
+ # Variable Names
+ (validName, Name.Variable),
+
+ # Parens
(r'[,()\[\]{}]', Punctuation),
-
- ],
-
- 'comment': [
+
+ ],
+
+ 'comment': [
(r'-(?!\})', Comment.Multiline),
(r'\{-', Comment.Multiline, 'comment'),
- (r'[^-}]', Comment.Multiline),
+ (r'[^-}]', Comment.Multiline),
(r'-\}', Comment.Multiline, '#pop'),
- ],
-
- 'doublequote': [
+ ],
+
+ 'doublequote': [
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\[nrfvb\\"]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- 'imports': [
- (r'\w+(\.\w+)*', Name.Class, '#pop'),
- ],
-
- 'numbers': [
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+', Number.Integer),
- ],
-
- 'shader': [
- (r'\|(?!\])', Name.Entity),
- (r'\|\]', Name.Entity, '#pop'),
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ 'imports': [
+ (r'\w+(\.\w+)*', Name.Class, '#pop'),
+ ],
+
+ 'numbers': [
+ (r'_?\d+\.(?=\d+)', Number.Float),
+ (r'_?\d+', Number.Integer),
+ ],
+
+ 'shader': [
+ (r'\|(?!\])', Name.Entity),
+ (r'\|\]', Name.Entity, '#pop'),
(r'(.*)(\n)', bygroups(Name.Entity, Whitespace)),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/erlang.py b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
index 2563ffc263..537b384322 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/erlang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
@@ -1,120 +1,120 @@
-"""
- pygments.lexers.erlang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Erlang.
-
+"""
+ pygments.lexers.erlang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Erlang.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
- include, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
+ include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
-
-__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
- 'ElixirLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class ErlangLexer(RegexLexer):
- """
- For the Erlang functional programming language.
-
- Blame Jeremy Thurgood (http://jerith.za.net/).
-
- .. versionadded:: 0.9
- """
-
- name = 'Erlang'
- aliases = ['erlang']
- filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
- mimetypes = ['text/x-erlang']
-
- keywords = (
- 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
- 'let', 'of', 'query', 'receive', 'try', 'when',
- )
-
- builtins = ( # See erlang(3) man page
- 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
- 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
- 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
- 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
- 'float', 'float_to_list', 'fun_info', 'fun_to_list',
- 'function_exported', 'garbage_collect', 'get', 'get_keys',
- 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
- 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
- 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
- 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
- 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
- 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
- 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
- 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
- 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
- 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
- 'pid_to_list', 'port_close', 'port_command', 'port_connect',
- 'port_control', 'port_call', 'port_info', 'port_to_list',
- 'process_display', 'process_flag', 'process_info', 'purge_module',
- 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
- 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
- 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
- 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
- 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
- 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
- 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
- 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
- )
-
- operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
- word_operators = (
- 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
- 'div', 'not', 'or', 'orelse', 'rem', 'xor'
- )
-
- atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
-
- variable_re = r'(?:[A-Z_]\w*)'
-
+
+__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
+ 'ElixirLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class ErlangLexer(RegexLexer):
+ """
+ For the Erlang functional programming language.
+
+ Blame Jeremy Thurgood (http://jerith.za.net/).
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'Erlang'
+ aliases = ['erlang']
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
+ mimetypes = ['text/x-erlang']
+
+ keywords = (
+ 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
+ 'let', 'of', 'query', 'receive', 'try', 'when',
+ )
+
+ builtins = ( # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ )
+
+ operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
+ word_operators = (
+ 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
+ 'div', 'not', 'or', 'orelse', 'rem', 'xor'
+ )
+
+ atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
+
+ variable_re = r'(?:[A-Z_]\w*)'
+
esc_char_re = r'[bdefnrstv\'"\\]'
esc_octal_re = r'[0-7][0-7]?[0-7]?'
esc_hex_re = r'(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})'
esc_ctrl_re = r'\^[a-zA-Z]'
escape_re = r'(?:\\(?:'+esc_char_re+r'|'+esc_octal_re+r'|'+esc_hex_re+r'|'+esc_ctrl_re+r'))'
-
- macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
- base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
- tokens = {
- 'root': [
+
+ macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
+
+ base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'(%.*)(\n)', bygroups(Comment, Whitespace)),
- (words(keywords, suffix=r'\b'), Keyword),
- (words(builtins, suffix=r'\b'), Name.Builtin),
- (words(word_operators, suffix=r'\b'), Operator.Word),
- (r'^-', Punctuation, 'directive'),
- (operators, Operator),
- (r'"', String, 'string'),
- (r'<<', Name.Label),
- (r'>>', Name.Label),
- ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
- ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words(word_operators, suffix=r'\b'), Operator.Word),
+ (r'^-', Punctuation, 'directive'),
+ (operators, Operator),
+ (r'"', String, 'string'),
+ (r'<<', Name.Label),
+ (r'>>', Name.Label),
+ ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
+ ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
bygroups(Name.Function, Whitespace, Punctuation)),
- (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
- (r'[+-]?\d+', Number.Integer),
- (r'[+-]?\d+.\d+', Number.Float),
- (r'[]\[:_@\".{}()|;,]', Punctuation),
- (variable_re, Name.Variable),
- (atom_re, Name),
- (r'\?'+macro_re, Name.Constant),
- (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
- (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
+ (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
+ (r'[+-]?\d+', Number.Integer),
+ (r'[+-]?\d+.\d+', Number.Float),
+ (r'[]\[:_@\".{}()|;,]', Punctuation),
+ (variable_re, Name.Variable),
+ (atom_re, Name),
+ (r'\?'+macro_re, Name.Constant),
+ (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
+ (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
# Erlang script shebang
(r'\A#!.+\n', Comment.Hashbang),
@@ -122,21 +122,21 @@ class ErlangLexer(RegexLexer):
# EEP 43: Maps
# http://www.erlang.org/eeps/eep-0043.html
(r'#\{', Punctuation, 'map_key'),
- ],
- 'string': [
- (escape_re, String.Escape),
- (r'"', String, '#pop'),
+ ],
+ 'string': [
+ (escape_re, String.Escape),
+ (r'"', String, '#pop'),
(r'~[0-9.*]*[~#+BPWXb-ginpswx]', String.Interpol),
- (r'[^"\\~]+', String),
- (r'~', String),
- ],
- 'directive': [
- (r'(define)(\s*)(\()('+macro_re+r')',
+ (r'[^"\\~]+', String),
+ (r'~', String),
+ ],
+ 'directive': [
+ (r'(define)(\s*)(\()('+macro_re+r')',
bygroups(Name.Entity, Whitespace, Punctuation, Name.Constant), '#pop'),
- (r'(record)(\s*)(\()('+macro_re+r')',
+ (r'(record)(\s*)(\()('+macro_re+r')',
bygroups(Name.Entity, Whitespace, Punctuation, Name.Label), '#pop'),
- (atom_re, Name.Entity, '#pop'),
- ],
+ (atom_re, Name.Entity, '#pop'),
+ ],
'map_key': [
include('root'),
(r'=>', Punctuation, 'map_val'),
@@ -148,146 +148,146 @@ class ErlangLexer(RegexLexer):
(r',', Punctuation, '#pop'),
(r'(?=\})', Punctuation, '#pop'),
],
- }
-
-
-class ErlangShellLexer(Lexer):
- """
- Shell sessions in erl (for Erlang code).
-
- .. versionadded:: 1.1
- """
- name = 'Erlang erl session'
- aliases = ['erl']
- filenames = ['*.erl-sh']
- mimetypes = ['text/x-erl-shellsession']
-
+ }
+
+
+class ErlangShellLexer(Lexer):
+ """
+ Shell sessions in erl (for Erlang code).
+
+ .. versionadded:: 1.1
+ """
+ name = 'Erlang erl session'
+ aliases = ['erl']
+ filenames = ['*.erl-sh']
+ mimetypes = ['text/x-erl-shellsession']
+
_prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
-
- def get_tokens_unprocessed(self, text):
- erlexer = ErlangLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
+
+ def get_tokens_unprocessed(self, text):
+ erlexer = ErlangLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
yield from do_insertions(insertions,
erlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('*'):
- yield match.start(), Generic.Traceback, line
- else:
- yield match.start(), Generic.Output, line
- if curcode:
+ curcode = ''
+ insertions = []
+ if line.startswith('*'):
+ yield match.start(), Generic.Traceback, line
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
yield from do_insertions(insertions,
erlexer.get_tokens_unprocessed(curcode))
-
-
-def gen_elixir_string_rules(name, symbol, token):
- states = {}
- states['string_' + name] = [
- (r'[^#%s\\]+' % (symbol,), token),
- include('escapes'),
- (r'\\.', token),
- (r'(%s)' % (symbol,), bygroups(token), "#pop"),
- include('interpol')
- ]
- return states
-
-
+
+
+def gen_elixir_string_rules(name, symbol, token):
+ states = {}
+ states['string_' + name] = [
+ (r'[^#%s\\]+' % (symbol,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'(%s)' % (symbol,), bygroups(token), "#pop"),
+ include('interpol')
+ ]
+ return states
+
+
def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
- if interpol:
- return [
+ if interpol:
+ return [
(r'[^#%s\\]+' % (term_class,), token),
- include('escapes'),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- include('interpol')
- ]
- else:
- return [
+ include('escapes'),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ include('interpol')
+ ]
+ else:
+ return [
(r'[^%s\\]+' % (term_class,), token),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- ]
-
-
-class ElixirLexer(RegexLexer):
- """
- For the `Elixir language <http://elixir-lang.org>`_.
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir'
- aliases = ['elixir', 'ex', 'exs']
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ ]
+
+
+class ElixirLexer(RegexLexer):
+ """
+ For the `Elixir language <http://elixir-lang.org>`_.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir'
+ aliases = ['elixir', 'ex', 'exs']
filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
- mimetypes = ['text/x-elixir']
-
- KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
- KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
- BUILTIN = (
- 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
+ mimetypes = ['text/x-elixir']
+
+ KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
+ KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
+ BUILTIN = (
+ 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
'quote', 'unquote', 'unquote_splicing', 'throw', 'super',
- )
- BUILTIN_DECLARATION = (
- 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
+ )
+ BUILTIN_DECLARATION = (
+ 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback',
- )
-
- BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
- CONSTANT = ('nil', 'true', 'false')
-
- PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
-
- OPERATORS3 = (
- '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
- '~>>', '<~>', '|~>', '<|>',
- )
- OPERATORS2 = (
- '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
- '->', '<-', '|', '.', '=', '~>', '<~',
- )
- OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
-
- PUNCTUATION = (
+ )
+
+ BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
+ CONSTANT = ('nil', 'true', 'false')
+
+ PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
+
+ OPERATORS3 = (
+ '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
+ '~>>', '<~>', '|~>', '<|>',
+ )
+ OPERATORS2 = (
+ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
+ '->', '<-', '|', '.', '=', '~>', '<~',
+ )
+ OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
+
+ PUNCTUATION = (
'\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']',
- )
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.KEYWORD:
- yield index, Keyword, value
- elif value in self.KEYWORD_OPERATOR:
- yield index, Operator.Word, value
- elif value in self.BUILTIN:
- yield index, Keyword, value
- elif value in self.BUILTIN_DECLARATION:
- yield index, Keyword.Declaration, value
- elif value in self.BUILTIN_NAMESPACE:
- yield index, Keyword.Namespace, value
- elif value in self.CONSTANT:
- yield index, Name.Constant, value
- elif value in self.PSEUDO_VAR:
- yield index, Name.Builtin.Pseudo, value
- else:
- yield index, token, value
- else:
- yield index, token, value
-
- def gen_elixir_sigil_rules():
- # all valid sigil terminators (excluding heredocs)
- terminators = [
+ )
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.KEYWORD:
+ yield index, Keyword, value
+ elif value in self.KEYWORD_OPERATOR:
+ yield index, Operator.Word, value
+ elif value in self.BUILTIN:
+ yield index, Keyword, value
+ elif value in self.BUILTIN_DECLARATION:
+ yield index, Keyword.Declaration, value
+ elif value in self.BUILTIN_NAMESPACE:
+ yield index, Keyword.Namespace, value
+ elif value in self.CONSTANT:
+ yield index, Name.Constant, value
+ elif value in self.PSEUDO_VAR:
+ yield index, Name.Builtin.Pseudo, value
+ else:
+ yield index, token, value
+ else:
+ yield index, token, value
+
+ def gen_elixir_sigil_rules():
+ # all valid sigil terminators (excluding heredocs)
+ terminators = [
(r'\{', r'\}', '}', 'cb'),
(r'\[', r'\]', r'\]', 'sb'),
(r'\(', r'\)', ')', 'pa'),
@@ -296,236 +296,236 @@ class ElixirLexer(RegexLexer):
(r'\|', r'\|', '|', 'pipe'),
('"', '"', '"', 'quot'),
("'", "'", "'", 'apos'),
- ]
-
- # heredocs have slightly different rules
- triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
-
- token = String.Other
- states = {'sigils': []}
-
- for term, name in triquotes:
- states['sigils'] += [
- (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-intp')),
- (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-no-intp')),
- ]
-
- states[name + '-end'] = [
- (r'[a-zA-Z]+', token, '#pop'),
- default('#pop'),
- ]
- states[name + '-intp'] = [
+ ]
+
+ # heredocs have slightly different rules
+ triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
+
+ token = String.Other
+ states = {'sigils': []}
+
+ for term, name in triquotes:
+ states['sigils'] += [
+ (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-intp')),
+ (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-no-intp')),
+ ]
+
+ states[name + '-end'] = [
+ (r'[a-zA-Z]+', token, '#pop'),
+ default('#pop'),
+ ]
+ states[name + '-intp'] = [
(r'^(\s*)(' + term + ')', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_interpol'),
- ]
- states[name + '-no-intp'] = [
+ include('heredoc_interpol'),
+ ]
+ states[name + '-no-intp'] = [
(r'^(\s*)(' + term +')', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_no_interpol'),
- ]
-
+ include('heredoc_no_interpol'),
+ ]
+
for lterm, rterm, rterm_class, name in terminators:
- states['sigils'] += [
- (r'~[a-z]' + lterm, token, name + '-intp'),
- (r'~[A-Z]' + lterm, token, name + '-no-intp'),
- ]
+ states['sigils'] += [
+ (r'~[a-z]' + lterm, token, name + '-intp'),
+ (r'~[A-Z]' + lterm, token, name + '-no-intp'),
+ ]
states[name + '-intp'] = \
gen_elixir_sigstr_rules(rterm, rterm_class, token)
- states[name + '-no-intp'] = \
+ states[name + '-no-intp'] = \
gen_elixir_sigstr_rules(rterm, rterm_class, token, interpol=False)
-
- return states
-
- op3_re = "|".join(re.escape(s) for s in OPERATORS3)
- op2_re = "|".join(re.escape(s) for s in OPERATORS2)
- op1_re = "|".join(re.escape(s) for s in OPERATORS1)
- ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
- punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
+
+ return states
+
+ op3_re = "|".join(re.escape(s) for s in OPERATORS3)
+ op2_re = "|".join(re.escape(s) for s in OPERATORS2)
+ op1_re = "|".join(re.escape(s) for s in OPERATORS1)
+ ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
+ punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
alnum = r'\w'
- name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
- modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
- complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
- special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
-
- long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
- hex_char_re = r'(\\x[\da-fA-F]{1,2})'
- escape_char_re = r'(\\[abdefnrstv])'
-
- tokens = {
- 'root': [
+ name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
+ modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
+ complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
+ special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
+
+ long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
+ hex_char_re = r'(\\x[\da-fA-F]{1,2})'
+ escape_char_re = r'(\\[abdefnrstv])'
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'#.*$', Comment.Single),
-
- # Various kinds of characters
- (r'(\?)' + long_hex_char_re,
- bygroups(String.Char,
- String.Escape, Number.Hex, String.Escape)),
- (r'(\?)' + hex_char_re,
- bygroups(String.Char, String.Escape)),
- (r'(\?)' + escape_char_re,
- bygroups(String.Char, String.Escape)),
- (r'\?\\?.', String.Char),
-
- # '::' has to go before atoms
- (r':::', String.Symbol),
- (r'::', Operator),
-
- # atoms
- (r':' + special_atom_re, String.Symbol),
- (r':' + complex_name_re, String.Symbol),
- (r':"', String.Symbol, 'string_double_atom'),
- (r":'", String.Symbol, 'string_single_atom'),
-
- # [keywords: ...]
- (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
- bygroups(String.Symbol, Punctuation)),
-
- # @attributes
- (r'@' + name_re, Name.Attribute),
-
- # identifiers
- (name_re, Name),
- (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
-
- # operators and punctuation
- (op3_re, Operator),
- (op2_re, Operator),
- (punctuation_re, Punctuation),
- (r'&\d', Name.Entity), # anon func arguments
- (op1_re, Operator),
-
- # numbers
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[\da-fA-F]+', Number.Hex),
- (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
- (r'\d(_?\d)*', Number.Integer),
-
- # strings and heredocs
+ (r'#.*$', Comment.Single),
+
+ # Various kinds of characters
+ (r'(\?)' + long_hex_char_re,
+ bygroups(String.Char,
+ String.Escape, Number.Hex, String.Escape)),
+ (r'(\?)' + hex_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'(\?)' + escape_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'\?\\?.', String.Char),
+
+ # '::' has to go before atoms
+ (r':::', String.Symbol),
+ (r'::', Operator),
+
+ # atoms
+ (r':' + special_atom_re, String.Symbol),
+ (r':' + complex_name_re, String.Symbol),
+ (r':"', String.Symbol, 'string_double_atom'),
+ (r":'", String.Symbol, 'string_single_atom'),
+
+ # [keywords: ...]
+ (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
+ bygroups(String.Symbol, Punctuation)),
+
+ # @attributes
+ (r'@' + name_re, Name.Attribute),
+
+ # identifiers
+ (name_re, Name),
+ (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
+
+ # operators and punctuation
+ (op3_re, Operator),
+ (op2_re, Operator),
+ (punctuation_re, Punctuation),
+ (r'&\d', Name.Entity), # anon func arguments
+ (op1_re, Operator),
+
+ # numbers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
+ (r'\d(_?\d)*', Number.Integer),
+
+ # strings and heredocs
(r'(""")(\s*)', bygroups(String.Heredoc, Whitespace),
'heredoc_double'),
(r"(''')(\s*)$", bygroups(String.Heredoc, Whitespace),
'heredoc_single'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single'),
-
- include('sigils'),
-
- (r'%\{', Punctuation, 'map_key'),
- (r'\{', Punctuation, 'tuple'),
- ],
- 'heredoc_double': [
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single'),
+
+ include('sigils'),
+
+ (r'%\{', Punctuation, 'map_key'),
+ (r'\{', Punctuation, 'tuple'),
+ ],
+ 'heredoc_double': [
(r'^(\s*)(""")', bygroups(Whitespace, String.Heredoc), '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_single': [
- (r"^\s*'''", String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_interpol': [
- (r'[^#\\\n]+', String.Heredoc),
- include('escapes'),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- include('interpol'),
- ],
- 'heredoc_no_interpol': [
- (r'[^\\\n]+', String.Heredoc),
- (r'\\.', String.Heredoc),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_single': [
+ (r"^\s*'''", String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_interpol': [
+ (r'[^#\\\n]+', String.Heredoc),
+ include('escapes'),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ include('interpol'),
+ ],
+ 'heredoc_no_interpol': [
+ (r'[^\\\n]+', String.Heredoc),
+ (r'\\.', String.Heredoc),
(r'\n+', Whitespace),
- ],
- 'escapes': [
- (long_hex_char_re,
- bygroups(String.Escape, Number.Hex, String.Escape)),
- (hex_char_re, String.Escape),
- (escape_char_re, String.Escape),
- ],
- 'interpol': [
- (r'#\{', String.Interpol, 'interpol_string'),
- ],
- 'interpol_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'map_key': [
- include('root'),
- (r':', Punctuation, 'map_val'),
- (r'=>', Punctuation, 'map_val'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'map_val': [
- include('root'),
- (r',', Punctuation, '#pop'),
- (r'(?=\})', Punctuation, '#pop'),
- ],
- 'tuple': [
- include('root'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
- tokens.update(gen_elixir_string_rules('double', '"', String.Double))
- tokens.update(gen_elixir_string_rules('single', "'", String.Single))
- tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
- tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
- tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
- """
- For Elixir interactive console (iex) output like:
-
- .. sourcecode:: iex
-
- iex> [head | tail] = [1,2,3]
- [1,2,3]
- iex> head
- 1
- iex> tail
- [2,3]
- iex> [head | tail]
- [1,2,3]
- iex> length [head | tail]
- 3
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir iex session'
- aliases = ['iex']
- mimetypes = ['text/x-elixir-shellsession']
-
+ ],
+ 'escapes': [
+ (long_hex_char_re,
+ bygroups(String.Escape, Number.Hex, String.Escape)),
+ (hex_char_re, String.Escape),
+ (escape_char_re, String.Escape),
+ ],
+ 'interpol': [
+ (r'#\{', String.Interpol, 'interpol_string'),
+ ],
+ 'interpol_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'map_key': [
+ include('root'),
+ (r':', Punctuation, 'map_val'),
+ (r'=>', Punctuation, 'map_val'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'map_val': [
+ include('root'),
+ (r',', Punctuation, '#pop'),
+ (r'(?=\})', Punctuation, '#pop'),
+ ],
+ 'tuple': [
+ include('root'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ }
+ tokens.update(gen_elixir_string_rules('double', '"', String.Double))
+ tokens.update(gen_elixir_string_rules('single', "'", String.Single))
+ tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
+ tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
+ tokens.update(gen_elixir_sigil_rules())
+
+
+class ElixirConsoleLexer(Lexer):
+ """
+ For Elixir interactive console (iex) output like:
+
+ .. sourcecode:: iex
+
+ iex> [head | tail] = [1,2,3]
+ [1,2,3]
+ iex> head
+ 1
+ iex> tail
+ [2,3]
+ iex> [head | tail]
+ [1,2,3]
+ iex> length [head | tail]
+ 3
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir iex session'
+ aliases = ['iex']
+ mimetypes = ['text/x-elixir-shellsession']
+
_prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
-
- def get_tokens_unprocessed(self, text):
- exlexer = ElixirLexer(**self.options)
-
- curcode = ''
- in_error = False
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
+
+ def get_tokens_unprocessed(self, text):
+ exlexer = ElixirLexer(**self.options)
+
+ curcode = ''
+ in_error = False
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
if line.startswith('** '):
- in_error = True
- insertions.append((len(curcode),
- [(0, Generic.Error, line[:-1])]))
- curcode += line[-1:]
- else:
- m = self._prompt_re.match(line)
- if m is not None:
- in_error = False
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
+ in_error = True
+ insertions.append((len(curcode),
+ [(0, Generic.Error, line[:-1])]))
+ curcode += line[-1:]
+ else:
+ m = self._prompt_re.match(line)
+ if m is not None:
+ in_error = False
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
yield from do_insertions(
insertions, exlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- token = Generic.Error if in_error else Generic.Output
- yield match.start(), token, line
- if curcode:
+ curcode = ''
+ insertions = []
+ token = Generic.Error if in_error else Generic.Output
+ yield match.start(), token, line
+ if curcode:
yield from do_insertions(
insertions, exlexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
index a884d4687b..5a72397166 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
@@ -1,58 +1,58 @@
-"""
- pygments.lexers.esoteric
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for esoteric languages.
-
+"""
+ pygments.lexers.esoteric
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for esoteric languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace
-
+
__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
'CapDLLexer', 'AheuiLexer']
-
-
-class BrainfuckLexer(RegexLexer):
- """
- Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
- language.
- """
-
- name = 'Brainfuck'
- aliases = ['brainfuck', 'bf']
- filenames = ['*.bf', '*.b']
- mimetypes = ['application/x-brainfuck']
-
- tokens = {
- 'common': [
- # use different colors for different instruction types
- (r'[.,]+', Name.Tag),
- (r'[+-]+', Name.Builtin),
- (r'[<>]+', Name.Variable),
- (r'[^.,+\-<>\[\]]+', Comment),
- ],
- 'root': [
- (r'\[', Keyword, 'loop'),
- (r'\]', Error),
- include('common'),
- ],
- 'loop': [
- (r'\[', Keyword, '#push'),
- (r'\]', Keyword, '#pop'),
- include('common'),
- ]
- }
-
+
+
+class BrainfuckLexer(RegexLexer):
+ """
+ Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
+ language.
+ """
+
+ name = 'Brainfuck'
+ aliases = ['brainfuck', 'bf']
+ filenames = ['*.bf', '*.b']
+ mimetypes = ['application/x-brainfuck']
+
+ tokens = {
+ 'common': [
+ # use different colors for different instruction types
+ (r'[.,]+', Name.Tag),
+ (r'[+-]+', Name.Builtin),
+ (r'[<>]+', Name.Variable),
+ (r'[^.,+\-<>\[\]]+', Comment),
+ ],
+ 'root': [
+ (r'\[', Keyword, 'loop'),
+ (r'\]', Error),
+ include('common'),
+ ],
+ 'loop': [
+ (r'\[', Keyword, '#push'),
+ (r'\]', Keyword, '#pop'),
+ include('common'),
+ ]
+ }
+
def analyse_text(text):
"""It's safe to assume that a program which mostly consists of + -
and < > is brainfuck."""
plus_minus_count = 0
greater_less_count = 0
-
+
range_to_check = max(256, len(text))
for c in text[:range_to_check]:
@@ -73,104 +73,104 @@ class BrainfuckLexer(RegexLexer):
return result
-class BefungeLexer(RegexLexer):
- """
- Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
- language.
-
- .. versionadded:: 0.7
- """
- name = 'Befunge'
- aliases = ['befunge']
- filenames = ['*.befunge']
- mimetypes = ['application/x-befunge']
-
- tokens = {
- 'root': [
- (r'[0-9a-f]', Number),
- (r'[+*/%!`-]', Operator), # Traditional math
- (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
- (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
- (r'[|_mw]', Keyword),
- (r'[{}]', Name.Tag), # Befunge-98 stack ops
- (r'".*?"', String.Double), # Strings don't appear to allow escapes
- (r'\'.', String.Single), # Single character
- (r'[#;]', Comment), # Trampoline... depends on direction hit
- (r'[pg&~=@iotsy]', Keyword), # Misc
- (r'[()A-Z]', Comment), # Fingerprints
+class BefungeLexer(RegexLexer):
+ """
+ Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
+ language.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Befunge'
+ aliases = ['befunge']
+ filenames = ['*.befunge']
+ mimetypes = ['application/x-befunge']
+
+ tokens = {
+ 'root': [
+ (r'[0-9a-f]', Number),
+ (r'[+*/%!`-]', Operator), # Traditional math
+ (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
+ (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
+ (r'[|_mw]', Keyword),
+ (r'[{}]', Name.Tag), # Befunge-98 stack ops
+ (r'".*?"', String.Double), # Strings don't appear to allow escapes
+ (r'\'.', String.Single), # Single character
+ (r'[#;]', Comment), # Trampoline... depends on direction hit
+ (r'[pg&~=@iotsy]', Keyword), # Misc
+ (r'[()A-Z]', Comment), # Fingerprints
(r'\s+', Whitespace), # Whitespace doesn't matter
- ],
- }
-
-
-class CAmkESLexer(RegexLexer):
- """
- Basic lexer for the input language for the
- `CAmkES <https://sel4.systems/CAmkES/>`_ component platform.
-
- .. versionadded:: 2.1
- """
- name = 'CAmkES'
- aliases = ['camkes', 'idl4']
- filenames = ['*.camkes', '*.idl4']
-
- tokens = {
+ ],
+ }
+
+
+class CAmkESLexer(RegexLexer):
+ """
+ Basic lexer for the input language for the
+ `CAmkES <https://sel4.systems/CAmkES/>`_ component platform.
+
+ .. versionadded:: 2.1
+ """
+ name = 'CAmkES'
+ aliases = ['camkes', 'idl4']
+ filenames = ['*.camkes', '*.idl4']
+
+ tokens = {
'root': [
- # C pre-processor directive
+ # C pre-processor directive
(r'^(\s*)(#.*)(\n)', bygroups(Whitespace, Comment.Preproc,
Whitespace)),
-
- # Whitespace, comments
+
+ # Whitespace, comments
(r'\s+', Whitespace),
- (r'/\*(.|\n)*?\*/', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
(r'//.*$', Comment),
-
+
(r'[\[(){},.;\]]', Punctuation),
(r'[~!%^&*+=|?:<>/-]', Operator),
-
- (words(('assembly', 'attribute', 'component', 'composition',
- 'configuration', 'connection', 'connector', 'consumes',
+
+ (words(('assembly', 'attribute', 'component', 'composition',
+ 'configuration', 'connection', 'connector', 'consumes',
'control', 'dataport', 'Dataport', 'Dataports', 'emits',
'event', 'Event', 'Events', 'export', 'from', 'group',
'hardware', 'has', 'interface', 'Interface', 'maybe',
'procedure', 'Procedure', 'Procedures', 'provides',
'template', 'thread', 'threads', 'to', 'uses', 'with'),
suffix=r'\b'), Keyword),
-
- (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
- 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
- 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
+
+ (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
+ 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
+ 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
'refin', 'semaphore', 'signed', 'string', 'struct',
'uint16_t', 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t',
'unsigned', 'void'),
suffix=r'\b'), Keyword.Type),
-
- # Recognised attributes
- (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
- (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
- Keyword.Reserved),
-
- # CAmkES-level include
+
+ # Recognised attributes
+ (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
+ (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
+ Keyword.Reserved),
+
+ # CAmkES-level include
(r'(import)(\s+)((?:<[^>]*>|"[^"]*");)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc)),
-
- # C-level include
+
+ # C-level include
(r'(include)(\s+)((?:<[^>]*>|"[^"]*");)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc)),
-
- # Literals
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'-?[\d]+', Number),
- (r'-?[\d]+\.[\d]+', Number.Float),
- (r'"[^"]*"', String),
+
+ # Literals
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'-?[\d]+', Number),
+ (r'-?[\d]+\.[\d]+', Number.Float),
+ (r'"[^"]*"', String),
(r'[Tt]rue|[Ff]alse', Name.Builtin),
-
- # Identifiers
- (r'[a-zA-Z_]\w*', Name),
- ],
- }
-
-
+
+ # Identifiers
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ }
+
+
class CapDLLexer(RegexLexer):
"""
Basic lexer for
@@ -231,58 +231,58 @@ class CapDLLexer(RegexLexer):
}
-class RedcodeLexer(RegexLexer):
- """
- A simple Redcode lexer based on ICWS'94.
- Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- .. versionadded:: 0.8
- """
- name = 'Redcode'
- aliases = ['redcode']
- filenames = ['*.cw']
-
- opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
- 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
- 'ORG', 'EQU', 'END')
- modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
-
- tokens = {
- 'root': [
- # Whitespace:
+class RedcodeLexer(RegexLexer):
+ """
+ A simple Redcode lexer based on ICWS'94.
+ Contributed by Adam Blinkinsop <blinks@acm.org>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Redcode'
+ aliases = ['redcode']
+ filenames = ['*.cw']
+
+ opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
+ 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
+ 'ORG', 'EQU', 'END')
+ modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
(r'\s+', Whitespace),
- (r';.*$', Comment.Single),
- # Lexemes:
- # Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
- (r'[A-Za-z_]\w+', Name),
- # Operators
- (r'[-+*/%]', Operator),
- (r'[#$@<>]', Operator), # mode
- (r'[.,]', Punctuation), # mode
- # Numbers
- (r'[-+]?\d+', Number.Integer),
- ],
- }
-
-
+ (r';.*$', Comment.Single),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
+ (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
+ (r'[A-Za-z_]\w+', Name),
+ # Operators
+ (r'[-+*/%]', Operator),
+ (r'[#$@<>]', Operator), # mode
+ (r'[.,]', Punctuation), # mode
+ # Numbers
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ }
+
+
class AheuiLexer(RegexLexer):
- """
+ """
Aheui_ Lexer.
-
+
Aheui_ is esoteric language based on Korean alphabets.
.. _Aheui: http://aheui.github.io/
- """
-
+ """
+
name = 'Aheui'
aliases = ['aheui']
filenames = ['*.aheui']
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
('['
'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
@@ -301,5 +301,5 @@ class AheuiLexer(RegexLexer):
'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
']', Operator),
('.', Comment),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
index 6d282c96bb..1713cc5bc9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
@@ -1,40 +1,40 @@
-"""
- pygments.lexers.ezhil
- ~~~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for Ezhil language.
+"""
+ pygments.lexers.ezhil
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for Ezhil language.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
from pygments.lexer import RegexLexer, include, words, bygroups
-from pygments.token import Keyword, Text, Comment, Name
+from pygments.token import Keyword, Text, Comment, Name
from pygments.token import String, Number, Punctuation, Operator, Whitespace
+
+__all__ = ['EzhilLexer']
+
-__all__ = ['EzhilLexer']
-
-
-class EzhilLexer(RegexLexer):
- """
- Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
-
- .. versionadded:: 2.1
- """
- name = 'Ezhil'
- aliases = ['ezhil']
- filenames = ['*.n']
- mimetypes = ['text/x-ezhil']
- flags = re.MULTILINE | re.UNICODE
- # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
- # This much simpler version is close enough, and includes combining marks.
+class EzhilLexer(RegexLexer):
+ """
+ Lexer for `Ezhil, a Tamil script-based programming language <http://ezhillang.org>`_
+
+ .. versionadded:: 2.1
+ """
+ name = 'Ezhil'
+ aliases = ['ezhil']
+ filenames = ['*.n']
+ mimetypes = ['text/x-ezhil']
+ flags = re.MULTILINE | re.UNICODE
+ # Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
+ # This much simpler version is close enough, and includes combining marks.
_TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
- tokens = {
- 'root': [
- include('keywords'),
+ tokens = {
+ 'root': [
+ include('keywords'),
(r'#.*$', Comment.Single),
- (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
+ (r'[@+/*,^\-%]|[!<>=]=?|&&?|\|\|?', Operator),
('இல்', Operator.Word),
(words(('assert', 'max', 'min',
'நீளம்', 'சரம்_இடமாற்று', 'சரம்_கண்டுபிடி',
@@ -43,25 +43,25 @@ class EzhilLexer(RegexLexer):
'கோப்பை_திற', 'கோப்பை_எழுது', 'கோப்பை_மூடு',
'pi', 'sin', 'cos', 'tan', 'sqrt', 'hypot', 'pow',
'exp', 'log', 'log10', 'exit',
- ), suffix=r'\b'), Name.Builtin),
- (r'(True|False)\b', Keyword.Constant),
+ ), suffix=r'\b'), Name.Builtin),
+ (r'(True|False)\b', Keyword.Constant),
(r'[^\S\n]+', Whitespace),
- include('identifier'),
- include('literal'),
- (r'[(){}\[\]:;.]', Punctuation),
- ],
- 'keywords': [
+ include('identifier'),
+ include('literal'),
+ (r'[(){}\[\]:;.]', Punctuation),
+ ],
+ 'keywords': [
('பதிப்பி|தேர்ந்தெடு|தேர்வு|ஏதேனில்|ஆனால்|இல்லைஆனால்|இல்லை|ஆக|ஒவ்வொன்றாக|இல்|வரை|செய்|முடியேனில்|பின்கொடு|முடி|நிரல்பாகம்|தொடர்|நிறுத்து|நிரல்பாகம்', Keyword),
- ],
- 'identifier': [
+ ],
+ 'identifier': [
('(?:'+_TALETTERS+')(?:[0-9]|'+_TALETTERS+')*', Name),
- ],
- 'literal': [
- (r'".*?"', String),
- (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
- (r'(?u)\d+', Number.Integer),
- ]
- }
+ ],
+ 'literal': [
+ (r'".*?"', String),
+ (r'(?u)\d+((\.\d*)?[eE][+-]?\d+|\.\d*)', Number.Float),
+ (r'(?u)\d+', Number.Integer),
+ ]
+ }
def analyse_text(text):
"""This language uses Tamil-script. We'll assume that if there's a
@@ -71,6 +71,6 @@ class EzhilLexer(RegexLexer):
if len(re.findall(r'[\u0b80-\u0bff]', text)) > 10:
return 0.25
- def __init__(self, **options):
+ def __init__(self, **options):
super().__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/factor.py b/contrib/python/Pygments/py3/pygments/lexers/factor.py
index 05159835d4..be496b4171 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/factor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/factor.py
@@ -1,266 +1,266 @@
-"""
- pygments.lexers.factor
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Factor language.
-
+"""
+ pygments.lexers.factor
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Factor language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default, words
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words
from pygments.token import Text, Comment, Keyword, Name, String, Number, \
Whitespace, Punctuation
-
-__all__ = ['FactorLexer']
-
-
-class FactorLexer(RegexLexer):
- """
- Lexer for the `Factor <http://factorcode.org>`_ language.
-
- .. versionadded:: 1.4
- """
- name = 'Factor'
- aliases = ['factor']
- filenames = ['*.factor']
- mimetypes = ['text/x-factor']
-
- flags = re.MULTILINE | re.UNICODE
-
- builtin_kernel = words((
- '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
- '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
- '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
- '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
- 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
- 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
- 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
- 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
- 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
- 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
- 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
- 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
- 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
+
+__all__ = ['FactorLexer']
+
+
+class FactorLexer(RegexLexer):
+ """
+ Lexer for the `Factor <http://factorcode.org>`_ language.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Factor'
+ aliases = ['factor']
+ filenames = ['*.factor']
+ mimetypes = ['text/x-factor']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ builtin_kernel = words((
+ '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
+ '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
+ '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
+ '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
+ 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
+ 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
+ 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
+ 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
+ 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
+ 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
+ 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
+ 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
+ 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
'wrapper', 'wrapper?', 'xor'), suffix=r'(\s+)')
-
- builtin_assocs = words((
- '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
- 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
- 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
- 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
- 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
- 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
- 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
- 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
- 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
- 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
- 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
+
+ builtin_assocs = words((
+ '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
+ 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
+ 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
+ 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
+ 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
+ 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
+ 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
+ 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
+ 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
+ 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
+ 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'(\s+)')
-
- builtin_combinators = words((
- '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
- '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
- 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
- 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
- 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
+
+ builtin_combinators = words((
+ '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
+ '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
+ 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
+ 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
+ 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'(\s+)')
-
- builtin_math = words((
- '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
- '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
- '(each-integer)', '(find-integer)', '*', '+', '?1+',
- 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
- 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
- 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
- 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
- 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
- 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
- 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
- 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
- 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
- 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
- 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
- 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
- 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
- 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
+
+ builtin_math = words((
+ '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
+ '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
+ '(each-integer)', '(find-integer)', '*', '+', '?1+',
+ 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
+ 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
+ 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
+ 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
+ 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
+ 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
+ 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
+ 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
+ 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
+ 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
+ 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
+ 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
+ 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
+ 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
'zero?'), suffix=r'(\s+)')
-
- builtin_sequences = words((
- '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
- '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
- '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
- '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
- 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
- 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
- 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
- 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
- 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
- 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
- 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
- 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
- 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
- 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
- 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
- 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
- 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
- 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
- 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
- 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
- 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
- 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
- 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
- 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
- 'non-negative-integer-expected', 'non-negative-integer-expected?',
- 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
- 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
- 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
- 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
- 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
- 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
- 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
- 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
- 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
- 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
- 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
- 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
- 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
- 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
- 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
- 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
+
+ builtin_sequences = words((
+ '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
+ '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
+ '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
+ '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
+ 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
+ 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
+ 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
+ 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
+ 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
+ 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
+ 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
+ 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
+ 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
+ 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
+ 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
+ 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
+ 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
+ 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
+ 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
+ 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
+ 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
+ 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
+ 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
+ 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
+ 'non-negative-integer-expected', 'non-negative-integer-expected?',
+ 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
+ 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
+ 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
+ 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
+ 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
+ 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
+ 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
+ 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
+ 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
+ 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
+ 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
+ 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
+ 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
+ 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
+ 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
+ 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
'when-empty'), suffix=r'(\s+)')
-
- builtin_namespaces = words((
- '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
- 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
- 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
- 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
+
+ builtin_namespaces = words((
+ '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
+ 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
+ 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
+ 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
suffix=r'(\s+)')
-
- builtin_arrays = words((
- '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
+
+ builtin_arrays = words((
+ '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
'array?', 'pair', 'pair?', 'resize-array'), suffix=r'(\s+)')
-
- builtin_io = words((
- '(each-stream-block-slice)', '(each-stream-block)',
- '(stream-contents-by-block)', '(stream-contents-by-element)',
- '(stream-contents-by-length-or-block)',
- '(stream-contents-by-length)', '+byte+', '+character+',
- 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
- 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
- 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
- 'error-stream', 'flush', 'input-stream', 'input-stream?',
- 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
- 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
- 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
- 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
- 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
- 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
- 'stream-copy*', 'stream-element-type', 'stream-flush',
- 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
- 'stream-read', 'stream-read-into', 'stream-read-partial',
- 'stream-read-partial-into', 'stream-read-partial-unsafe',
- 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
- 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
- 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
- 'with-error-stream', 'with-error-stream*', 'with-error>output',
- 'with-input-output+error-streams',
- 'with-input-output+error-streams*', 'with-input-stream',
- 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
- 'with-output>error', 'with-output+error-stream',
- 'with-output+error-stream*', 'with-streams', 'with-streams*',
+
+ builtin_io = words((
+ '(each-stream-block-slice)', '(each-stream-block)',
+ '(stream-contents-by-block)', '(stream-contents-by-element)',
+ '(stream-contents-by-length-or-block)',
+ '(stream-contents-by-length)', '+byte+', '+character+',
+ 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
+ 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
+ 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
+ 'error-stream', 'flush', 'input-stream', 'input-stream?',
+ 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
+ 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
+ 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
+ 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
+ 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
+ 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
+ 'stream-copy*', 'stream-element-type', 'stream-flush',
+ 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
+ 'stream-read', 'stream-read-into', 'stream-read-partial',
+ 'stream-read-partial-into', 'stream-read-partial-unsafe',
+ 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
+ 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
+ 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
+ 'with-error-stream', 'with-error-stream*', 'with-error>output',
+ 'with-input-output+error-streams',
+ 'with-input-output+error-streams*', 'with-input-stream',
+ 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
+ 'with-output>error', 'with-output+error-stream',
+ 'with-output+error-stream*', 'with-streams', 'with-streams*',
'write', 'write1'), suffix=r'(\s+)')
-
- builtin_strings = words((
- '1string', '<string>', '>string', 'resize-string', 'string',
+
+ builtin_strings = words((
+ '1string', '<string>', '>string', 'resize-string', 'string',
'string?'), suffix=r'(\s+)')
-
- builtin_vectors = words((
- '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
+
+ builtin_vectors = words((
+ '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
suffix=r'(\s+)')
-
- builtin_continuations = words((
- '<condition>', '<continuation>', '<restart>', 'attempt-all',
- 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
- 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
- 'condition?', 'continuation', 'continuation?', 'continue',
- 'continue-restart', 'continue-with', 'current-continuation',
- 'error', 'error-continuation', 'error-in-thread', 'error-thread',
- 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
- 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
- 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
+
+ builtin_continuations = words((
+ '<condition>', '<continuation>', '<restart>', 'attempt-all',
+ 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
+ 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
+ 'condition?', 'continuation', 'continuation?', 'continue',
+ 'continue-restart', 'continue-with', 'current-continuation',
+ 'error', 'error-continuation', 'error-in-thread', 'error-thread',
+ 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
+ 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
+ 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
'throw-restarts', 'with-datastack', 'with-return'), suffix=r'(\s+)')
-
- tokens = {
- 'root': [
- # factor allows a file to start with a shebang
- (r'#!.*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
+
+ tokens = {
+ 'root': [
+ # factor allows a file to start with a shebang
+ (r'#!.*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
(r'\s+', Whitespace),
-
- # defining words
- (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
+
+ # defining words
+ (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
- (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
+ (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace,
Name.Function)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace,
Name.Class)),
- (r'(GENERIC:)(\s+)(\S+)',
+ (r'(GENERIC:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
- (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+ (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace,
Name.Function)),
(r'(\()(\s)', bygroups(Name.Function, Whitespace), 'stackeffect'),
(r'(;)(\s)', bygroups(Keyword, Whitespace)),
-
- # imports and namespaces
- (r'(USING:)(\s+)',
+
+ # imports and namespaces
+ (r'(USING:)(\s+)',
bygroups(Keyword.Namespace, Whitespace), 'vocabs'),
- (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
+ (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
- (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
+ (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
Whitespace, Name.Namespace)),
- (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
+ (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
bygroups(Keyword.Namespace, Whitespace, Name.Namespace,
Whitespace), 'words'),
(r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+)(=>)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
Name.Namespace, Whitespace, Punctuation, Whitespace,
Name.Function)),
- (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
+ (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function, Whitespace,
Name.Function)),
- (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
+ (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
bygroups(Keyword.Namespace, Whitespace, Name.Function)),
-
- # tuples and classes
+
+ # tuples and classes
(r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Punctuation,
Whitespace, Name.Class), 'slots'),
- (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
+ (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class), 'slots'),
- (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
+ (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class)),
(r'(PREDICATE:)(\s+)(\S+)(\s+)(<)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace,
Punctuation, Whitespace, Name.Class)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function, Whitespace, Name.Class)),
- (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
+ (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Name.Class)),
(r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Function)),
(r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Whitespace, Name.Class)),
- (r'SINGLETONS:', Keyword, 'classes'),
-
- # other syntax
- (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
+ (r'SINGLETONS:', Keyword, 'classes'),
+
+ # other syntax
+ (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
bygroups(Keyword, Whitespace, Name.Function)),
(r'(SYMBOLS:)(\s+)', bygroups(Keyword, Whitespace), 'words'),
(r'(SYNTAX:)(\s+)', bygroups(Keyword, Whitespace)),
@@ -280,45 +280,45 @@ class FactorLexer(RegexLexer):
Text, Whitespace, Name.Function, Whitespace,
Name.Function, Whitespace,
Punctuation, Whitespace, Text, Punctuation, Whitespace)),
-
- # vocab.private
+
+ # vocab.private
(r'(<PRIVATE|PRIVATE>)(\s)', bygroups(Keyword.Namespace, Whitespace)),
-
- # strings
+
+ # strings
(r'"""\s(?:.|\n)*?\s"""', String),
- (r'"(?:\\\\|\\"|[^"])*"', String),
+ (r'"(?:\\\\|\\"|[^"])*"', String),
(r'(\S+")(\s+)((?:\\\\|\\"|[^"])*")',
bygroups(String, Whitespace, String)),
(r'(CHAR:)(\s+)(\\[\\abfnrstv]|[^\\]\S*)(\s)',
bygroups(String.Char, Whitespace, String.Char, Whitespace)),
-
- # comments
- (r'!\s+.*$', Comment),
- (r'#!\s+.*$', Comment),
+
+ # comments
+ (r'!\s+.*$', Comment),
+ (r'#!\s+.*$', Comment),
(r'/\*\s+(?:.|\n)*?\s\*/', Comment),
-
- # boolean constants
+
+ # boolean constants
(r'[tf]\b', Name.Constant),
-
- # symbols and literals
- (r'[\\$]\s+\S+', Name.Constant),
- (r'M\\\s+\S+\s+\S+', Name.Constant),
-
- # numbers
- (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
- (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
- (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'0b[01]+\s', Number.Bin),
- (r'0o[0-7]+\s', Number.Oct),
- (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
- (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
-
- # keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
- Keyword),
-
- # builtins
+
+ # symbols and literals
+ (r'[\\$]\s+\S+', Name.Constant),
+ (r'M\\\s+\S+\s+\S+', Name.Constant),
+
+ # numbers
+ (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'0b[01]+\s', Number.Bin),
+ (r'0o[0-7]+\s', Number.Oct),
+ (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+ (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+
+ # keywords
+ (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
+ Keyword),
+
+ # builtins
(builtin_kernel, bygroups(Name.Builtin, Whitespace)),
(builtin_assocs, bygroups(Name.Builtin, Whitespace)),
(builtin_combinators, bygroups(Name.Builtin, Whitespace)),
@@ -330,38 +330,38 @@ class FactorLexer(RegexLexer):
(builtin_strings, bygroups(Name.Builtin, Whitespace)),
(builtin_vectors, bygroups(Name.Builtin, Whitespace)),
(builtin_continuations, bygroups(Name.Builtin, Whitespace)),
-
- # everything else is text
- (r'\S+', Text),
- ],
- 'stackeffect': [
+
+ # everything else is text
+ (r'\S+', Text),
+ ],
+ 'stackeffect': [
(r'\s+', Whitespace),
(r'(\()(\s+)', bygroups(Name.Function, Whitespace), 'stackeffect'),
(r'(\))(\s+)', bygroups(Name.Function, Whitespace), '#pop'),
(r'(--)(\s+)', bygroups(Name.Function, Whitespace)),
- (r'\S+', Name.Variable),
- ],
- 'slots': [
+ (r'\S+', Name.Variable),
+ ],
+ 'slots': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
(r'(\{)(\s+)(\S+)(\s+)([^}]+)(\s+)(\})(\s+)',
bygroups(Text, Whitespace, Name.Variable, Whitespace,
Text, Whitespace, Text, Whitespace)),
- (r'\S+', Name.Variable),
- ],
- 'vocabs': [
+ (r'\S+', Name.Variable),
+ ],
+ 'vocabs': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Namespace),
- ],
- 'classes': [
+ (r'\S+', Name.Namespace),
+ ],
+ 'classes': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Class),
- ],
- 'words': [
+ (r'\S+', Name.Class),
+ ],
+ 'words': [
(r'\s+', Whitespace),
(r'(;)(\s+)', bygroups(Keyword, Whitespace), '#pop'),
- (r'\S+', Name.Function),
- ],
- }
+ (r'\S+', Name.Function),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fantom.py b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
index 7272e006bd..55ad67bdb9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fantom.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
@@ -1,251 +1,251 @@
-"""
- pygments.lexers.fantom
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Fantom language.
-
+"""
+ pygments.lexers.fantom
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Fantom language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from string import Template
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from string import Template
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Whitespace
-
-__all__ = ['FantomLexer']
-
-
-class FantomLexer(RegexLexer):
- """
- For Fantom source code.
-
- .. versionadded:: 1.5
- """
- name = 'Fantom'
- aliases = ['fan']
- filenames = ['*.fan']
- mimetypes = ['application/x-fantom']
-
- # often used regexes
- def s(str):
- return Template(str).substitute(
- dict(
- pod=r'[\"\w\.]+',
- eos=r'\n|;',
- id=r'[a-zA-Z_]\w*',
- # all chars which can be part of type definition. Starts with
- # either letter, or [ (maps), or | (funcs)
- type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
- )
- )
-
- tokens = {
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
+
+__all__ = ['FantomLexer']
+
+
+class FantomLexer(RegexLexer):
+ """
+ For Fantom source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fantom'
+ aliases = ['fan']
+ filenames = ['*.fan']
+ mimetypes = ['application/x-fantom']
+
+ # often used regexes
+ def s(str):
+ return Template(str).substitute(
+ dict(
+ pod=r'[\"\w\.]+',
+ eos=r'\n|;',
+ id=r'[a-zA-Z_]\w*',
+ # all chars which can be part of type definition. Starts with
+ # either letter, or [ (maps), or | (funcs)
+ type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]|\->?]*?',
+ )
+ )
+
+ tokens = {
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
(r'//.*?$', Comment.Single), # Single line
- # TODO: highlight references in fandocs
+ # TODO: highlight references in fandocs
(r'\*\*.*?$', Comment.Special), # Fandoc
(r'#.*$', Comment.Single) # Shell-style
- ],
- 'literals': [
- (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
- (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
- (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
- (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
- (r'\b-?[\d_]+', Number.Integer), # Int
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
- (r'"', Punctuation, 'insideStr'), # Opening quote
- (r'`', Punctuation, 'insideUri'), # Opening accent
- (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
- (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, String, Punctuation)),
- (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, Name.Function)),
- (r'\[,\]', Literal), # Empty list
- (s(r'($type)(\[,\])'), # Typed empty list
- bygroups(using(this, state='inType'), Literal)),
- (r'\[:\]', Literal), # Empty Map
- (s(r'($type)(\[:\])'),
- bygroups(using(this, state='inType'), Literal)),
- ],
- 'insideStr': [
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'"', Punctuation, '#pop'), # Closing quot
- (r'.', String) # String content
- ],
- 'insideUri': [ # TODO: remove copy/paste str/uri
- (r'\\\\', String.Escape), # Escaped backslash
- (r'\\"', String.Escape), # Escaped "
- (r'\\`', String.Escape), # Escaped `
- (r'\$\w+', String.Interpol), # Subst var
- (r'\$\{.*?\}', String.Interpol), # Subst expr
- (r'`', Punctuation, '#pop'), # Closing tick
- (r'.', String.Backtick) # URI content
- ],
- 'protectionKeywords': [
- (r'\b(public|protected|private|internal)\b', Keyword),
- ],
- 'typeKeywords': [
- (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
- ],
- 'methodKeywords': [
- (r'\b(abstract|native|once|override|static|virtual|final)\b',
- Keyword),
- ],
- 'fieldKeywords': [
- (r'\b(abstract|const|final|native|override|static|virtual|'
- r'readonly)\b', Keyword)
- ],
- 'otherKeywords': [
- (words((
- 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
- 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
- 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
- ],
- 'operators': [
- (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
- ],
- 'inType': [
- (r'[\[\]|\->:?]', Punctuation),
- (s(r'$id'), Name.Class),
- default('#pop'),
-
- ],
- 'root': [
- include('comments'),
- include('protectionKeywords'),
- include('typeKeywords'),
- include('methodKeywords'),
- include('fieldKeywords'),
- include('literals'),
- include('otherKeywords'),
- include('operators'),
- (r'using\b', Keyword.Namespace, 'using'), # Using stmt
- (r'@\w+', Name.Decorator, 'facet'), # Symbol
+ ],
+ 'literals': [
+ (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
+ (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
+ (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
+ (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
+ (r'\b-?[\d_]+', Number.Integer), # Int
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
+ (r'"', Punctuation, 'insideStr'), # Opening quote
+ (r'`', Punctuation, 'insideUri'), # Opening accent
+ (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
+ (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, String, Punctuation)),
+ (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, Name.Function)),
+ (r'\[,\]', Literal), # Empty list
+ (s(r'($type)(\[,\])'), # Typed empty list
+ bygroups(using(this, state='inType'), Literal)),
+ (r'\[:\]', Literal), # Empty Map
+ (s(r'($type)(\[:\])'),
+ bygroups(using(this, state='inType'), Literal)),
+ ],
+ 'insideStr': [
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'"', Punctuation, '#pop'), # Closing quot
+ (r'.', String) # String content
+ ],
+ 'insideUri': [ # TODO: remove copy/paste str/uri
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\$\{.*?\}', String.Interpol), # Subst expr
+ (r'`', Punctuation, '#pop'), # Closing tick
+ (r'.', String.Backtick) # URI content
+ ],
+ 'protectionKeywords': [
+ (r'\b(public|protected|private|internal)\b', Keyword),
+ ],
+ 'typeKeywords': [
+ (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
+ ],
+ 'methodKeywords': [
+ (r'\b(abstract|native|once|override|static|virtual|final)\b',
+ Keyword),
+ ],
+ 'fieldKeywords': [
+ (r'\b(abstract|const|final|native|override|static|virtual|'
+ r'readonly)\b', Keyword)
+ ],
+ 'otherKeywords': [
+ (words((
+ 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
+ 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
+ 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
+ ],
+ 'operators': [
+ (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
+ ],
+ 'inType': [
+ (r'[\[\]|\->:?]', Punctuation),
+ (s(r'$id'), Name.Class),
+ default('#pop'),
+
+ ],
+ 'root': [
+ include('comments'),
+ include('protectionKeywords'),
+ include('typeKeywords'),
+ include('methodKeywords'),
+ include('fieldKeywords'),
+ include('literals'),
+ include('otherKeywords'),
+ include('operators'),
+ (r'using\b', Keyword.Namespace, 'using'), # Using stmt
+ (r'@\w+', Name.Decorator, 'facet'), # Symbol
(r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Class),
- 'inheritance'), # Inheritance list
-
- # Type var := val
- (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
+ 'inheritance'), # Inheritance list
+
+ # Type var := val
+ (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
bygroups(using(this, state='inType'), Whitespace,
Name.Variable, Whitespace, Operator)),
-
- # var := val
- (s(r'($id)(\s*)(:=)'),
+
+ # var := val
+ (s(r'($id)(\s*)(:=)'),
bygroups(Name.Variable, Whitespace, Operator)),
-
- # .someId( or ->someId( ###
- (s(r'(\.|(?:\->))($id)(\s*)(\()'),
+
+ # .someId( or ->someId( ###
+ (s(r'(\.|(?:\->))($id)(\s*)(\()'),
bygroups(Operator, Name.Function, Whitespace, Punctuation),
- 'insideParen'),
-
- # .someId or ->someId
- (s(r'(\.|(?:\->))($id)'),
- bygroups(Operator, Name.Function)),
-
- # new makeXXX (
- (r'(new)(\s+)(make\w*)(\s*)(\()',
+ 'insideParen'),
+
+ # .someId or ->someId
+ (s(r'(\.|(?:\->))($id)'),
+ bygroups(Operator, Name.Function)),
+
+ # new makeXXX (
+ (r'(new)(\s+)(make\w*)(\s*)(\()',
bygroups(Keyword, Whitespace, Name.Function, Whitespace, Punctuation),
- 'insideMethodDeclArgs'),
-
- # Type name (
- (s(r'($type)([ \t]+)' # Return type and whitespace
- r'($id)(\s*)(\()'), # method name + open brace
+ 'insideMethodDeclArgs'),
+
+ # Type name (
+ (s(r'($type)([ \t]+)' # Return type and whitespace
+ r'($id)(\s*)(\()'), # method name + open brace
bygroups(using(this, state='inType'), Whitespace,
Name.Function, Whitespace, Punctuation),
- 'insideMethodDeclArgs'),
-
- # ArgType argName,
- (s(r'($type)(\s+)($id)(\s*)(,)'),
+ 'insideMethodDeclArgs'),
+
+ # ArgType argName,
+ (s(r'($type)(\s+)($id)(\s*)(,)'),
bygroups(using(this, state='inType'), Whitespace, Name.Variable,
Whitespace, Punctuation)),
-
- # ArgType argName)
- # Covered in 'insideParen' state
-
- # ArgType argName -> ArgType|
- (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
+
+ # ArgType argName)
+ # Covered in 'insideParen' state
+
+ # ArgType argName -> ArgType|
+ (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
bygroups(using(this, state='inType'), Whitespace, Name.Variable,
Whitespace, Punctuation, Whitespace, using(this, state='inType'),
- Punctuation)),
-
- # ArgType argName|
- (s(r'($type)(\s+)($id)(\s*)(\|)'),
+ Punctuation)),
+
+ # ArgType argName|
+ (s(r'($type)(\s+)($id)(\s*)(\|)'),
bygroups(using(this, state='inType'), Whitespace, Name.Variable,
Whitespace, Punctuation)),
-
- # Type var
- (s(r'($type)([ \t]+)($id)'),
+
+ # Type var
+ (s(r'($type)([ \t]+)($id)'),
bygroups(using(this, state='inType'), Whitespace,
- Name.Variable)),
-
- (r'\(', Punctuation, 'insideParen'),
- (r'\{', Punctuation, 'insideBrace'),
+ Name.Variable)),
+
+ (r'\(', Punctuation, 'insideParen'),
+ (r'\{', Punctuation, 'insideBrace'),
(r'\s+', Whitespace),
- (r'.', Text)
- ],
- 'insideParen': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'insideMethodDeclArgs': [
- (r'\)', Punctuation, '#pop'),
- (s(r'($type)(\s+)($id)(\s*)(\))'),
+ (r'.', Text)
+ ],
+ 'insideParen': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'insideMethodDeclArgs': [
+ (r'\)', Punctuation, '#pop'),
+ (s(r'($type)(\s+)($id)(\s*)(\))'),
bygroups(using(this, state='inType'), Whitespace, Name.Variable,
Whitespace, Punctuation), '#pop'),
- include('root'),
- ],
- 'insideBrace': [
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'inheritance': [
+ include('root'),
+ ],
+ 'insideBrace': [
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'inheritance': [
(r'\s+', Whitespace), # Whitespace
- (r':|,', Punctuation),
- (r'(?:(\w+)(::))?(\w+)',
- bygroups(Name.Namespace, Punctuation, Name.Class)),
- (r'\{', Punctuation, '#pop')
- ],
- 'using': [
+ (r':|,', Punctuation),
+ (r'(?:(\w+)(::))?(\w+)',
+ bygroups(Name.Namespace, Punctuation, Name.Class)),
+ (r'\{', Punctuation, '#pop')
+ ],
+ 'using': [
(r'[ \t]+', Whitespace), # consume whitespaces
- (r'(\[)(\w+)(\])',
- bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
- (r'(\")?([\w.]+)(\")?',
- bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
- (r'::', Punctuation, 'usingClass'),
- default('#pop')
- ],
- 'usingClass': [
+ (r'(\[)(\w+)(\])',
+ bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
+ (r'(\")?([\w.]+)(\")?',
+ bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
+ (r'::', Punctuation, 'usingClass'),
+ default('#pop')
+ ],
+ 'usingClass': [
(r'[ \t]+', Whitespace), # consume whitespaces
- (r'(as)(\s+)(\w+)',
+ (r'(as)(\s+)(\w+)',
bygroups(Keyword.Declaration, Whitespace, Name.Class), '#pop:2'),
- (r'[\w$]+', Name.Class),
- default('#pop:2') # jump out to root state
- ],
- 'facet': [
+ (r'[\w$]+', Name.Class),
+ default('#pop:2') # jump out to root state
+ ],
+ 'facet': [
(r'\s+', Whitespace),
- (r'\{', Punctuation, 'facetFields'),
- default('#pop')
- ],
- 'facetFields': [
- include('comments'),
- include('literals'),
- include('operators'),
+ (r'\{', Punctuation, 'facetFields'),
+ default('#pop')
+ ],
+ 'facetFields': [
+ include('comments'),
+ include('literals'),
+ include('operators'),
(r'\s+', Whitespace),
(r'(\s*)(\w+)(\s*)(=)', bygroups(Whitespace, Name, Whitespace, Operator)),
- (r'\}', Punctuation, '#pop'),
+ (r'\}', Punctuation, '#pop'),
(r'\s+', Whitespace),
- (r'.', Text)
- ],
- }
+ (r'.', Text)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/felix.py b/contrib/python/Pygments/py3/pygments/lexers/felix.py
index 55dee25e6d..191c9cf53d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/felix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/felix.py
@@ -1,275 +1,275 @@
-"""
- pygments.lexers.felix
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Felix language.
-
+"""
+ pygments.lexers.felix
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Felix language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words, \
- combined
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['FelixLexer']
-
-
-class FelixLexer(RegexLexer):
- """
- For `Felix <http://www.felix-lang.org>`_ source code.
-
- .. versionadded:: 1.2
- """
-
- name = 'Felix'
- aliases = ['felix', 'flx']
- filenames = ['*.flx', '*.flxh']
- mimetypes = ['text/x-felix']
-
- preproc = (
- 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
- )
-
- keywords = (
- '_', '_deref', 'all', 'as',
- 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
- 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
- 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
- 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
- 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
- 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
- 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
- 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
- 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
- 'when', 'whilst', 'with', 'yield',
- )
-
- keyword_directives = (
- '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
- 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
- 'package', 'private', 'pod', 'property', 'public', 'publish',
- 'requires', 'todo', 'virtual', 'use',
- )
-
- keyword_declarations = (
- 'def', 'let', 'ref', 'val', 'var',
- )
-
- keyword_types = (
- 'unit', 'void', 'any', 'bool',
- 'byte', 'offset',
- 'address', 'caddress', 'cvaddress', 'vaddress',
- 'tiny', 'short', 'int', 'long', 'vlong',
- 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
- 'int8', 'int16', 'int32', 'int64',
- 'uint8', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'ldouble',
- 'complex', 'dcomplex', 'lcomplex',
- 'imaginary', 'dimaginary', 'limaginary',
- 'char', 'wchar', 'uchar',
- 'charp', 'charcp', 'ucharp', 'ucharcp',
- 'string', 'wstring', 'ustring',
- 'cont',
- 'array', 'varray', 'list',
- 'lvalue', 'opt', 'slice',
- )
-
- keyword_constants = (
- 'false', 'true',
- )
-
- operator_words = (
- 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
- )
-
- name_builtins = (
- '_svc', 'while',
- )
-
- name_pseudo = (
- 'root', 'self', 'this',
- )
-
- decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Keywords
- (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
- 'union'), suffix=r'\b'),
- Keyword, 'funcname'),
- (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
- Keyword, 'classname'),
- (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
- (words(keywords, suffix=r'\b'), Keyword),
- (words(keyword_directives, suffix=r'\b'), Name.Decorator),
- (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
- (words(keyword_types, suffix=r'\b'), Keyword.Type),
- (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
-
- # Operators
- include('operators'),
-
- # Float Literal
- # -- Hex Float
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
- Number.Float),
-
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
- # -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
- # Strings
- ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
- ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
- ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
- ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
- ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
- ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
- ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
- ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
- # Punctuation
- (r'[\[\]{}:(),;?]', Punctuation),
-
- # Labels
- (r'[a-zA-Z_]\w*:>', Name.Label),
-
- # Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'whitespace': [
+
+__all__ = ['FelixLexer']
+
+
+class FelixLexer(RegexLexer):
+ """
+ For `Felix <http://www.felix-lang.org>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+
+ name = 'Felix'
+ aliases = ['felix', 'flx']
+ filenames = ['*.flx', '*.flxh']
+ mimetypes = ['text/x-felix']
+
+ preproc = (
+ 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
+ )
+
+ keywords = (
+ '_', '_deref', 'all', 'as',
+ 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
+ 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
+ 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
+ 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
+ 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
+ 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
+ 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
+ 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
+ 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
+ 'when', 'whilst', 'with', 'yield',
+ )
+
+ keyword_directives = (
+ '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
+ 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
+ 'package', 'private', 'pod', 'property', 'public', 'publish',
+ 'requires', 'todo', 'virtual', 'use',
+ )
+
+ keyword_declarations = (
+ 'def', 'let', 'ref', 'val', 'var',
+ )
+
+ keyword_types = (
+ 'unit', 'void', 'any', 'bool',
+ 'byte', 'offset',
+ 'address', 'caddress', 'cvaddress', 'vaddress',
+ 'tiny', 'short', 'int', 'long', 'vlong',
+ 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
+ 'int8', 'int16', 'int32', 'int64',
+ 'uint8', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'ldouble',
+ 'complex', 'dcomplex', 'lcomplex',
+ 'imaginary', 'dimaginary', 'limaginary',
+ 'char', 'wchar', 'uchar',
+ 'charp', 'charcp', 'ucharp', 'ucharcp',
+ 'string', 'wstring', 'ustring',
+ 'cont',
+ 'array', 'varray', 'list',
+ 'lvalue', 'opt', 'slice',
+ )
+
+ keyword_constants = (
+ 'false', 'true',
+ )
+
+ operator_words = (
+ 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
+ )
+
+ name_builtins = (
+ '_svc', 'while',
+ )
+
+ name_pseudo = (
+ 'root', 'self', 'this',
+ )
+
+ decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Keywords
+ (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
+ 'union'), suffix=r'\b'),
+ Keyword, 'funcname'),
+ (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
+ Keyword, 'classname'),
+ (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(keyword_directives, suffix=r'\b'), Name.Decorator),
+ (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(keyword_types, suffix=r'\b'), Keyword.Type),
+ (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
+
+ # Operators
+ include('operators'),
+
+ # Float Literal
+ # -- Hex Float
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
+ Number.Float),
+
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
+
+ # Strings
+ ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
+ ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
+ ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
+ ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
+ ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
+ ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
+
+ # Punctuation
+ (r'[\[\]{}:(),;?]', Punctuation),
+
+ # Labels
+ (r'[a-zA-Z_]\w*:>', Name.Label),
+
+ # Identifiers
+ (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
+ (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'whitespace': [
(r'\s+', Whitespace),
-
- include('comment'),
-
- # Preprocessor
+
+ include('comment'),
+
+ # Preprocessor
(r'(#)(\s*)(if)(\s+)(0)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
Whitespace, Comment.Preproc), 'if0'),
- (r'#', Comment.Preproc, 'macro'),
- ],
- 'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
- (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
- ],
- 'comment': [
+ (r'#', Comment.Preproc, 'macro'),
+ ],
+ 'operators': [
+ (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
+ (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
+ ],
+ 'comment': [
(r'//(.*?)$', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment2'),
- ],
- 'comment2': [
- (r'[^/*]', Comment.Multiline),
- (r'/[*]', Comment.Multiline, '#push'),
- (r'[*]/', Comment.Multiline, '#pop'),
- (r'[/*]', Comment.Multiline),
- ],
- 'if0': [
+ (r'/[*]', Comment.Multiline, 'comment2'),
+ ],
+ 'comment2': [
+ (r'[^/*]', Comment.Multiline),
+ (r'/[*]', Comment.Multiline, '#push'),
+ (r'[*]/', Comment.Multiline, '#pop'),
+ (r'[/*]', Comment.Multiline),
+ ],
+ 'if0': [
(r'^(\s*)(#if.*?(?<!\\))(\n)',
bygroups(Whitespace, Comment, Whitespace), '#push'),
(r'^(\s*)(#endif.*?(?<!\\))(\n)',
bygroups(Whitespace, Comment, Whitespace), '#pop'),
(r'(.*?)(\n)', bygroups(Comment, Whitespace)),
- ],
- 'macro': [
- include('comment'),
- (r'(import|include)(\s+)(<[^>]*?>)',
+ ],
+ 'macro': [
+ include('comment'),
+ (r'(import|include)(\s+)(<[^>]*?>)',
bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r'(import|include)(\s+)("[^"]*?")',
+ (r'(import|include)(\s+)("[^"]*?")',
bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r"(import|include)(\s+)('[^']*?')",
+ (r"(import|include)(\s+)('[^']*?')",
bygroups(Comment.Preproc, Whitespace, String), '#pop'),
- (r'[^/\n]+', Comment.Preproc),
- # (r'/[*](.|\n)*?[*]/', Comment),
- # (r'//.*?\n', Comment, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
+ (r'[^/\n]+', Comment.Preproc),
+ # (r'/[*](.|\n)*?[*]/', Comment),
+ # (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
(r'\n', Whitespace, '#pop'),
- ],
- 'funcname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- # anonymous functions
- (r'(?=\()', Text, '#pop'),
- ],
- 'classname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # anonymous classes
- (r'(?=\{)', Text, '#pop'),
- ],
- 'modulename': [
- include('whitespace'),
- (r'\[', Punctuation, ('modulename2', 'tvarlist')),
- default('modulename2'),
- ],
- 'modulename2': [
- include('whitespace'),
- (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
- ],
- 'tvarlist': [
- include('whitespace'),
- include('operators'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(with|where)\b', Keyword),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ ],
+ 'funcname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ # anonymous functions
+ (r'(?=\()', Text, '#pop'),
+ ],
+ 'classname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # anonymous classes
+ (r'(?=\{)', Text, '#pop'),
+ ],
+ 'modulename': [
+ include('whitespace'),
+ (r'\[', Punctuation, ('modulename2', 'tvarlist')),
+ default('modulename2'),
+ ],
+ 'modulename2': [
+ include('whitespace'),
+ (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
+ ],
+ 'tvarlist': [
+ include('whitespace'),
+ include('operators'),
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(with|where)\b', Keyword),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- # included here again for raw strings
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- # included here again for raw strings
- (r"\\\\|\\'|\\\n", String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ # included here again for raw strings
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ # included here again for raw strings
+ (r"\\\\|\\'|\\\n", String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fortran.py b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
index b5d977eaf0..ea0f072ad8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fortran.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
@@ -1,212 +1,212 @@
-"""
- pygments.lexers.fortran
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Fortran languages.
-
+"""
+ pygments.lexers.fortran
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Fortran languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, bygroups, include, words, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['FortranLexer', 'FortranFixedLexer']
-
-
-class FortranLexer(RegexLexer):
- """
- Lexer for FORTRAN 90 code.
-
- .. versionadded:: 0.10
- """
- name = 'Fortran'
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['FortranLexer', 'FortranFixedLexer']
+
+
+class FortranLexer(RegexLexer):
+ """
+ Lexer for FORTRAN 90 code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Fortran'
aliases = ['fortran', 'f90']
- filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
- mimetypes = ['text/x-fortran']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
- # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
- # Logical (?): NOT, AND, OR, EQV, NEQV
-
- # Builtins:
- # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
- tokens = {
- 'root': [
- (r'^#.*\n', Comment.Preproc),
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-z][\w$]*', Name),
- include('nums'),
+ filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
+ mimetypes = ['text/x-fortran']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
+ # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
+ # Logical (?): NOT, AND, OR, EQV, NEQV
+
+ # Builtins:
+ # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
+
+ tokens = {
+ 'root': [
+ (r'^#.*\n', Comment.Preproc),
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ (r'[a-z][\w$]*', Name),
+ include('nums'),
(r'[\s]+', Text.Whitespace),
- ],
- 'core': [
- # Statements
+ ],
+ 'core': [
+ # Statements
(r'\b(DO)(\s+)(CONCURRENT)\b', bygroups(Keyword, Text.Whitespace, Keyword)),
(r'\b(GO)(\s*)(TO)\b', bygroups(Keyword, Text.Whitespace, Keyword)),
- (words((
- 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
- 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
- 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
+ (words((
+ 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
+ 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
+ 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
'CODIMENSION', 'COMMON', 'CONTIGUOUS', 'CONTAINS',
- 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
- 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
+ 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
+ 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
'ENDASSOCIATE', 'ENDBLOCK', 'ENDDO', 'ENDENUM', 'ENDFORALL',
'ENDFUNCTION', 'ENDIF', 'ENDINTERFACE', 'ENDMODULE', 'ENDPROGRAM',
'ENDSELECT', 'ENDSUBMODULE', 'ENDSUBROUTINE', 'ENDTYPE', 'ENDWHERE',
'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'ERROR STOP', 'EXIT',
'EXTENDS', 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
'FUNCTION', 'GENERIC', 'IF', 'IMAGES', 'IMPLICIT',
- 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
- 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
+ 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
+ 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'ONLY', 'OPEN',
'OPTIONAL', 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT',
'PRIVATE', 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
- 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
- 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
- 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
- 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
- Keyword),
-
- # Data Types
- (words((
- 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
- 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG',
- 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T',
- 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T',
- 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T',
- 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE',
- 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX',
- 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'),
- prefix=r'\b', suffix=r'\s*\b'),
- Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
- (r'(::)', Keyword.Declaration),
-
- (r'[()\[\],:&%;.]', Punctuation),
- # Intrinsics
- (words((
- 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
- 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
- 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
- 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
- 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
- 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
- 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
- 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
- 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
- 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
- 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
- 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
- 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
- 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
- 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
- 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
- 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
- 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
- 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
- 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
- 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
- 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
- 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
- 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
- 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
- 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
- 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
- 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
- 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
- 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
- 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
- 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
- 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
- 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
- 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
- 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
- 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
- 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
- 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
- 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
- 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
- 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
- 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
- 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
- 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
- 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
- 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
- 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
- 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
- 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
- Name.Builtin),
-
- # Booleans
- (r'\.(true|false)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- ],
-
- 'nums': [
+ 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
+ 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
+ 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
+ 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+
+ # Data Types
+ (words((
+ 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
+ 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG',
+ 'C_SIGNED_CHAR', 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T',
+ 'C_INT64_T', 'C_INT_LEAST8_T', 'C_INT_LEAST16_T', 'C_INT_LEAST32_T',
+ 'C_INT_LEAST64_T', 'C_INT_FAST8_T', 'C_INT_FAST16_T', 'C_INT_FAST32_T',
+ 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE',
+ 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX',
+ 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', 'C_FUNPTR'),
+ prefix=r'\b', suffix=r'\s*\b'),
+ Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+
+ (r'(::)', Keyword.Declaration),
+
+ (r'[()\[\],:&%;.]', Punctuation),
+ # Intrinsics
+ (words((
+ 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
+ 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
+ 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
+ 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
+ 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
+ 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
+ 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
+ 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
+ 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
+ 'CSin', 'CSqRt', 'CTime', 'C_Loc', 'C_Associated',
+ 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
+ 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
+ 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
+ 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
+ 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJN', 'DbesY',
+ 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
+ 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DMax',
+ 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
+ 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
+ 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
+ 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
+ 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
+ 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
+ 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
+ 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
+ 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
+ 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
+ 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
+ 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
+ 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
+ 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
+ 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
+ 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
+ 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
+ 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
+ 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
+ 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
+ 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
+ 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
+ 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
+ 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
+ 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
+ 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
+ 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
+ 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
+ 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
+ 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
+ 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
+ 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
+ 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+
+ # Booleans
+ (r'\.(true|false)\.', Name.Builtin),
+ # Comparing Operators
+ (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
+ ],
+
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ ],
+
+ 'nums': [
(r'\d+(?![.e])(_([1-9]|[a-z]\w*))?', Number.Integer),
(r'[+-]?\d*\.\d+([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
(r'[+-]?\d+\.\d*([ed][-+]?\d+)?(_([1-9]|[a-z]\w*))?', Number.Float),
(r'[+-]?\d+(\.\d*)?[ed][-+]?\d+(_([1-9]|[a-z]\w*))?', Number.Float),
- ],
- }
-
-
-class FortranFixedLexer(RegexLexer):
- """
- Lexer for fixed format Fortran.
-
- .. versionadded:: 2.1
- """
- name = 'FortranFixed'
- aliases = ['fortranfixed']
- filenames = ['*.f', '*.F']
-
- flags = re.IGNORECASE
-
- def _lex_fortran(self, match, ctx=None):
- """Lex a line just as free form fortran without line break."""
- lexer = FortranLexer()
- text = match.group(0) + "\n"
- for index, token, value in lexer.get_tokens_unprocessed(text):
- value = value.replace('\n', '')
- if value != '':
- yield index, token, value
-
- tokens = {
- 'root': [
- (r'[C*].*\n', Comment),
- (r'#.*\n', Comment.Preproc),
- (r' {0,4}!.*\n', Comment),
- (r'(.{5})', Name.Label, 'cont-char'),
- (r'.*\n', using(FortranLexer)),
- ],
- 'cont-char': [
- (' ', Text, 'code'),
- ('0', Comment, 'code'),
+ ],
+ }
+
+
+class FortranFixedLexer(RegexLexer):
+ """
+ Lexer for fixed format Fortran.
+
+ .. versionadded:: 2.1
+ """
+ name = 'FortranFixed'
+ aliases = ['fortranfixed']
+ filenames = ['*.f', '*.F']
+
+ flags = re.IGNORECASE
+
+ def _lex_fortran(self, match, ctx=None):
+ """Lex a line just as free form fortran without line break."""
+ lexer = FortranLexer()
+ text = match.group(0) + "\n"
+ for index, token, value in lexer.get_tokens_unprocessed(text):
+ value = value.replace('\n', '')
+ if value != '':
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ (r'[C*].*\n', Comment),
+ (r'#.*\n', Comment.Preproc),
+ (r' {0,4}!.*\n', Comment),
+ (r'(.{5})', Name.Label, 'cont-char'),
+ (r'.*\n', using(FortranLexer)),
+ ],
+ 'cont-char': [
+ (' ', Text, 'code'),
+ ('0', Comment, 'code'),
('.', Generic.Strong, 'code'),
- ],
- 'code': [
- (r'(.{66})(.*)(\n)',
+ ],
+ 'code': [
+ (r'(.{66})(.*)(\n)',
bygroups(_lex_fortran, Comment, Text.Whitespace), 'root'),
(r'(.*)(\n)', bygroups(_lex_fortran, Text.Whitespace), 'root'),
default('root'),
]
- }
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
index 0b756d4da3..b274e61753 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
@@ -1,427 +1,427 @@
-"""
- pygments.lexers.foxpro
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Simple lexer for Microsoft Visual FoxPro source code.
-
+"""
+ pygments.lexers.foxpro
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Simple lexer for Microsoft Visual FoxPro source code.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String
-
-__all__ = ['FoxProLexer']
-
-
-class FoxProLexer(RegexLexer):
- """Lexer for Microsoft Visual FoxPro language.
-
- FoxPro syntax allows to shorten all keywords and function names
- to 4 characters. Shortened forms are not recognized by this lexer.
-
- .. versionadded:: 1.6
- """
-
- name = 'FoxPro'
- aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
- filenames = ['*.PRG', '*.prg']
- mimetype = []
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer
+from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
+ Name, String
+
+__all__ = ['FoxProLexer']
+
+
+class FoxProLexer(RegexLexer):
+ """Lexer for Microsoft Visual FoxPro language.
+
+ FoxPro syntax allows to shorten all keywords and function names
+ to 4 characters. Shortened forms are not recognized by this lexer.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'FoxPro'
+ aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
+ filenames = ['*.PRG', '*.prg']
+ mimetype = []
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
(r';\s*\n', Punctuation), # consume newline
- (r'(^|\n)\s*', Text, 'newline'),
-
- # Square brackets may be used for array indices
- # and for string literal. Look for arrays
- # before matching string literals.
- (r'(?<=\w)\[[0-9, ]+\]', Text),
- (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
- (r'(^\s*\*|&&|&amp;&amp;).*?\n', Comment.Single),
-
- (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
- r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
- r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
- r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
- r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
- r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
- r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
- r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
- r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
- r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
- r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
- r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
- r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
- r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
- r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
- r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
- r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
- r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
- r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
- r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
- r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
- r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
- r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
- r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
- r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
- r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
- r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
- r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
- r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
- r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
- r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
- r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
- r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
- r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
- r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
- r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
- r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
- r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
- r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
- r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
- r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
- r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
- r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
- r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
- r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
- r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
- r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
- r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
- r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
- r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
- r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
- r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
- r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
- r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
- r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
- r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
- r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
- r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
- r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
- r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
- r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
- r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
- r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
- r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
- r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
- r'YEAR)(?=\s*\()', Name.Function),
-
- (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
- r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
- r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
- r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
- r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
- r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
- r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
- r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
- r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
- r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
- r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
- r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
- r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
-
- (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
-
- (r'Application|CheckBox|Collection|Column|ComboBox|'
- r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
- r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
- r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
- r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
- r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
- r'Project|Relation|ReportListener|Separator|Servers|Server|'
- r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
- r'XMLAdapter|XMLField|XMLTable', Name.Class),
-
- (r'm\.[a-z_]\w*', Name.Variable),
- (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
-
- (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
- r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
- r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
- r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
- r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
- r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
- r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
- r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
- r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
- r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
- r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
- r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
- r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
- r'BreakOnError|BufferModeOverride|BufferMode|'
- r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
- r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
- r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
- r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
- r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
- r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
- r'ContinuousScroll|ControlBox|ControlCount|Controls|'
- r'ControlSource|ConversionFunc|Count|CurrentControl|'
- r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
- r'CursorSchema|CursorSource|CursorStatus|Curvature|'
- r'Database|DataSessionID|DataSession|DataSourceType|'
- r'DataSource|DataType|DateFormat|DateMark|Debug|'
- r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
- r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
- r'DeleteCmd|DeleteMark|Description|Desktop|'
- r'Details|DisabledBackColor|DisabledForeColor|'
- r'DisabledItemBackColor|DisabledItemForeColor|'
- r'DisabledPicture|DisableEncode|DisplayCount|'
- r'DisplayValue|Dockable|Docked|DockPosition|'
- r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
- r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
- r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
- r'DynamicFontItalic|DynamicFontStrikethru|'
- r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
- r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
- r'DynamicLineHeight|EditorOptions|Enabled|'
- r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
- r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
- r'FetchMemoDataSource|FetchMemo|FetchSize|'
- r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
- r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
- r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
- r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
- r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
- r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
- r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
- r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
- r'HelpContextID|HideSelection|HighlightBackColor|'
- r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
- r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
- r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
- r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
- r'InsertCmdDataSource|InsertCmdRefreshCmd|'
- r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
- r'InsertCmd|Instancing|IntegralHeight|'
- r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
- r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
- r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
- r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
- r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
- r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
- r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
- r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
- r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
- r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
- r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
- r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
- r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
- r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
- r'NumberOfElements|Object|OLEClass|OLEDragMode|'
- r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
- r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
- r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
- r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
- r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
- r'OutputPageCount|OutputType|PageCount|PageHeight|'
- r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
- r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
- r'Parent|Partition|PasswordChar|PictureMargin|'
- r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
- r'PictureVal|Picture|Prepared|'
- r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
- r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
- r'ProjectHookLibrary|ProjectHook|QuietMode|'
- r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
- r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
- r'RecordSource|RefreshAlias|'
- r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
- r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
- r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
- r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
- r'Rotation|RowColChange|RowHeight|RowSourceType|'
- r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
- r'Seconds|SelectCmd|SelectedID|'
- r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
- r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
- r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
- r'ServerClass|ServerHelpFile|ServerName|'
- r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
- r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
- r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
- r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
- r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
- r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
- r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
- r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
- r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
- r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
- r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
- r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
- r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
- r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
- r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
- r'VersionCompany|VersionCopyright|VersionDescription|'
- r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
- r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
- r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
- r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
- r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
- r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
- r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
- r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
- r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
- r'XSDtype|ZoomBox)', Name.Attribute),
-
- (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
- r'AddProperty|AddTableSchema|AddToSCC|Add|'
- r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
- r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
- r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
- r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
- r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
- r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
- r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
- r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
- r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
- r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
- r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
- r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
- r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
- r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
- r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
- r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
- r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
- r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
- r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
-
- (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
- r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
- r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
- r'AfterCursorUpdate|AfterDelete|AfterInsert|'
- r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
- r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
- r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
- r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
- r'BeforeInsert|BeforeDock|BeforeOpenTables|'
- r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
- r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
- r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
- r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
- r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
- r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
- r'dbc_AfterDropOffline|dbc_AfterDropTable|'
- r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
- r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
- r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
- r'dbc_AfterRenameTable|dbc_AfterRenameView|'
- r'dbc_AfterValidateData|dbc_BeforeAddTable|'
- r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
- r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
- r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
- r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
- r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
- r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
- r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
- r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
- r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
- r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
- r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
- r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
- r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
- r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
- r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
- r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
- r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
- r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
- r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
- r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
- r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
- r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
- r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
- r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
-
- (r'\s+', Text),
- # everything else is not colored
- (r'.', Text),
- ],
- 'newline': [
- (r'\*.*?$', Comment.Single, '#pop'),
- (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
- r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
- r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
- r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
- r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
- r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
- r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
- r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
- r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
- r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
- r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
- r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
- r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
- r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
- r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
- r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
- r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
- r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
- r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
- r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
- r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
- r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
- r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
- r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
- r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
- r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
- r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
- r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
- r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
- r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
- r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
- r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
- r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
- r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
- r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
- r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
- r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
- r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
- r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
- r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
- r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
- r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
- r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
- r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
- r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
- r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
- r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
- r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
- r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
- r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
- r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
- r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
- r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
- r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
- r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
- r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
- r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
- r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
- r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
- r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
- r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
- r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
- r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
- r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
- r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
- r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
- r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
- r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
- r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
- r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
- r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
- r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
- r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
- r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
- Keyword.Reserved, '#pop'),
- (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
- Comment.Preproc, '#pop'),
- (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
- (r'.', Text, '#pop'),
- ],
- }
+ (r'(^|\n)\s*', Text, 'newline'),
+
+ # Square brackets may be used for array indices
+ # and for string literal. Look for arrays
+ # before matching string literals.
+ (r'(?<=\w)\[[0-9, ]+\]', Text),
+ (r'\'[^\'\n]*\'|"[^"\n]*"|\[[^]*]\]', String),
+ (r'(^\s*\*|&&|&amp;&amp;).*?\n', Comment.Single),
+
+ (r'(ABS|ACLASS|ACOPY|ACOS|ADATABASES|ADBOBJECTS|ADDBS|'
+ r'ADDPROPERTY|ADEL|ADIR|ADLLS|ADOCKSTATE|AELEMENT|AERROR|'
+ r'AEVENTS|AFIELDS|AFONT|AGETCLASS|AGETFILEVERSION|AINS|'
+ r'AINSTANCE|ALANGUAGE|ALEN|ALIAS|ALINES|ALLTRIM|'
+ r'AMEMBERS|AMOUSEOBJ|ANETRESOURCES|APRINTERS|APROCINFO|'
+ r'ASC|ASCAN|ASELOBJ|ASESSIONS|ASIN|ASORT|ASQLHANDLES|'
+ r'ASTACKINFO|ASUBSCRIPT|AT|AT_C|ATAGINFO|ATAN|ATC|ATCC|'
+ r'ATCLINE|ATLINE|ATN2|AUSED|AVCXCLASSES|BAR|BARCOUNT|'
+ r'BARPROMPT|BETWEEN|BINDEVENT|BINTOC|BITAND|BITCLEAR|'
+ r'BITLSHIFT|BITNOT|BITOR|BITRSHIFT|BITSET|BITTEST|BITXOR|'
+ r'BOF|CANDIDATE|CAPSLOCK|CAST|CDOW|CDX|CEILING|CHR|CHRSAW|'
+ r'CHRTRAN|CHRTRANC|CLEARRESULTSET|CMONTH|CNTBAR|CNTPAD|COL|'
+ r'COM|Functions|COMARRAY|COMCLASSINFO|COMPOBJ|COMPROP|'
+ r'COMRETURNERROR|COS|CPCONVERT|CPCURRENT|CPDBF|CREATEBINARY|'
+ r'CREATEOBJECT|CREATEOBJECTEX|CREATEOFFLINE|CTOBIN|CTOD|'
+ r'CTOT|CURDIR|CURSORGETPROP|CURSORSETPROP|CURSORTOXML|'
+ r'CURVAL|DATE|DATETIME|DAY|DBC|DBF|DBGETPROP|DBSETPROP|'
+ r'DBUSED|DDEAbortTrans|DDEAdvise|DDEEnabled|DDEExecute|'
+ r'DDEInitiate|DDELastError|DDEPoke|DDERequest|DDESetOption|'
+ r'DDESetService|DDESetTopic|DDETerminate|DEFAULTEXT|'
+ r'DELETED|DESCENDING|DIFFERENCE|DIRECTORY|DISKSPACE|'
+ r'DisplayPath|DMY|DODEFAULT|DOW|DRIVETYPE|DROPOFFLINE|'
+ r'DTOC|DTOR|DTOS|DTOT|EDITSOURCE|EMPTY|EOF|ERROR|EVAL(UATE)?|'
+ r'EVENTHANDLER|EVL|EXECSCRIPT|EXP|FCHSIZE|FCLOSE|FCOUNT|'
+ r'FCREATE|FDATE|FEOF|FERROR|FFLUSH|FGETS|FIELD|FILE|'
+ r'FILETOSTR|FILTER|FKLABEL|FKMAX|FLDLIST|FLOCK|FLOOR|'
+ r'FONTMETRIC|FOPEN|FOR|FORCEEXT|FORCEPATH|FOUND|FPUTS|'
+ r'FREAD|FSEEK|FSIZE|FTIME|FULLPATH|FV|FWRITE|'
+ r'GETAUTOINCVALUE|GETBAR|GETCOLOR|GETCP|GETDIR|GETENV|'
+ r'GETFILE|GETFLDSTATE|GETFONT|GETINTERFACE|'
+ r'GETNEXTMODIFIED|GETOBJECT|GETPAD|GETPEM|GETPICT|'
+ r'GETPRINTER|GETRESULTSET|GETWORDCOUNT|GETWORDNUM|'
+ r'GETCURSORADAPTER|GOMONTH|HEADER|HOME|HOUR|ICASE|'
+ r'IDXCOLLATE|IIF|IMESTATUS|INDBC|INDEXSEEK|INKEY|INLIST|'
+ r'INPUTBOX|INSMODE|INT|ISALPHA|ISBLANK|ISCOLOR|ISDIGIT|'
+ r'ISEXCLUSIVE|ISFLOCKED|ISLEADBYTE|ISLOWER|ISMEMOFETCHED|'
+ r'ISMOUSE|ISNULL|ISPEN|ISREADONLY|ISRLOCKED|'
+ r'ISTRANSACTABLE|ISUPPER|JUSTDRIVE|JUSTEXT|JUSTFNAME|'
+ r'JUSTPATH|JUSTSTEM|KEY|KEYMATCH|LASTKEY|LEFT|LEFTC|LEN|'
+ r'LENC|LIKE|LIKEC|LINENO|LOADPICTURE|LOCFILE|LOCK|LOG|'
+ r'LOG10|LOOKUP|LOWER|LTRIM|LUPDATE|MAKETRANSACTABLE|MAX|'
+ r'MCOL|MDOWN|MDX|MDY|MEMLINES|MEMORY|MENU|MESSAGE|'
+ r'MESSAGEBOX|MIN|MINUTE|MLINE|MOD|MONTH|MRKBAR|MRKPAD|'
+ r'MROW|MTON|MWINDOW|NDX|NEWOBJECT|NORMALIZE|NTOM|NUMLOCK|'
+ r'NVL|OBJNUM|OBJTOCLIENT|OBJVAR|OCCURS|OEMTOANSI|OLDVAL|'
+ r'ON|ORDER|OS|PAD|PADL|PARAMETERS|PAYMENT|PCOL|PCOUNT|'
+ r'PEMSTATUS|PI|POPUP|PRIMARY|PRINTSTATUS|PRMBAR|PRMPAD|'
+ r'PROGRAM|PROMPT|PROPER|PROW|PRTINFO|PUTFILE|PV|QUARTER|'
+ r'RAISEEVENT|RAND|RAT|RATC|RATLINE|RDLEVEL|READKEY|RECCOUNT|'
+ r'RECNO|RECSIZE|REFRESH|RELATION|REPLICATE|REQUERY|RGB|'
+ r'RGBSCHEME|RIGHT|RIGHTC|RLOCK|ROUND|ROW|RTOD|RTRIM|'
+ r'SAVEPICTURE|SCHEME|SCOLS|SEC|SECONDS|SEEK|SELECT|SET|'
+ r'SETFLDSTATE|SETRESULTSET|SIGN|SIN|SKPBAR|SKPPAD|SOUNDEX|'
+ r'SPACE|SQLCANCEL|SQLCOLUMNS|SQLCOMMIT|SQLCONNECT|'
+ r'SQLDISCONNECT|SQLEXEC|SQLGETPROP|SQLIDLEDISCONNECT|'
+ r'SQLMORERESULTS|SQLPREPARE|SQLROLLBACK|SQLSETPROP|'
+ r'SQLSTRINGCONNECT|SQLTABLES|SQRT|SROWS|STR|STRCONV|'
+ r'STREXTRACT|STRTOFILE|STRTRAN|STUFF|STUFFC|SUBSTR|'
+ r'SUBSTRC|SYS|SYSMETRIC|TABLEREVERT|TABLEUPDATE|TAG|'
+ r'TAGCOUNT|TAGNO|TAN|TARGET|TEXTMERGE|TIME|TRANSFORM|'
+ r'TRIM|TTOC|TTOD|TXNLEVEL|TXTWIDTH|TYPE|UNBINDEVENTS|'
+ r'UNIQUE|UPDATED|UPPER|USED|VAL|VARREAD|VARTYPE|VERSION|'
+ r'WBORDER|WCHILD|WCOLS|WDOCKABLE|WEEK|WEXIST|WFONT|WLAST|'
+ r'WLCOL|WLROW|WMAXIMUM|WMINIMUM|WONTOP|WOUTPUT|WPARENT|'
+ r'WREAD|WROWS|WTITLE|WVISIBLE|XMLTOCURSOR|XMLUPDATEGRAM|'
+ r'YEAR)(?=\s*\()', Name.Function),
+
+ (r'_ALIGNMENT|_ASCIICOLS|_ASCIIROWS|_ASSIST|_BEAUTIFY|_BOX|'
+ r'_BROWSER|_BUILDER|_CALCMEM|_CALCVALUE|_CLIPTEXT|_CONVERTER|'
+ r'_COVERAGE|_CUROBJ|_DBLCLICK|_DIARYDATE|_DOS|_FOXDOC|_FOXREF|'
+ r'_GALLERY|_GENGRAPH|_GENHTML|_GENMENU|_GENPD|_GENSCRN|'
+ r'_GENXTAB|_GETEXPR|_INCLUDE|_INCSEEK|_INDENT|_LMARGIN|_MAC|'
+ r'_MENUDESIGNER|_MLINE|_PADVANCE|_PAGENO|_PAGETOTAL|_PBPAGE|'
+ r'_PCOLNO|_PCOPIES|_PDRIVER|_PDSETUP|_PECODE|_PEJECT|_PEPAGE|'
+ r'_PLENGTH|_PLINENO|_PLOFFSET|_PPITCH|_PQUALITY|_PRETEXT|'
+ r'_PSCODE|_PSPACING|_PWAIT|_RMARGIN|_REPORTBUILDER|'
+ r'_REPORTOUTPUT|_REPORTPREVIEW|_SAMPLES|_SCCTEXT|_SCREEN|'
+ r'_SHELL|_SPELLCHK|_STARTUP|_TABS|_TALLY|_TASKPANE|_TEXT|'
+ r'_THROTTLE|_TOOLBOX|_TOOLTIPTIMEOUT|_TRANSPORT|_TRIGGERLEVEL|'
+ r'_UNIX|_VFP|_WINDOWS|_WIZARD|_WRAP', Keyword.Pseudo),
+
+ (r'THISFORMSET|THISFORM|THIS', Name.Builtin),
+
+ (r'Application|CheckBox|Collection|Column|ComboBox|'
+ r'CommandButton|CommandGroup|Container|Control|CursorAdapter|'
+ r'Cursor|Custom|DataEnvironment|DataObject|EditBox|'
+ r'Empty|Exception|Fields|Files|File|FormSet|Form|FoxCode|'
+ r'Grid|Header|Hyperlink|Image|Label|Line|ListBox|Objects|'
+ r'OptionButton|OptionGroup|PageFrame|Page|ProjectHook|Projects|'
+ r'Project|Relation|ReportListener|Separator|Servers|Server|'
+ r'Session|Shape|Spinner|Tables|TextBox|Timer|ToolBar|'
+ r'XMLAdapter|XMLField|XMLTable', Name.Class),
+
+ (r'm\.[a-z_]\w*', Name.Variable),
+ (r'\.(F|T|AND|OR|NOT|NULL)\.|\b(AND|OR|NOT|NULL)\b', Operator.Word),
+
+ (r'\.(ActiveColumn|ActiveControl|ActiveForm|ActivePage|'
+ r'ActiveProject|ActiveRow|AddLineFeeds|ADOCodePage|Alias|'
+ r'Alignment|Align|AllowAddNew|AllowAutoColumnFit|'
+ r'AllowCellSelection|AllowDelete|AllowHeaderSizing|'
+ r'AllowInsert|AllowModalMessages|AllowOutput|AllowRowSizing|'
+ r'AllowSimultaneousFetch|AllowTabs|AllowUpdate|'
+ r'AlwaysOnBottom|AlwaysOnTop|Anchor|Application|'
+ r'AutoActivate|AutoCenter|AutoCloseTables|AutoComplete|'
+ r'AutoCompSource|AutoCompTable|AutoHideScrollBar|'
+ r'AutoIncrement|AutoOpenTables|AutoRelease|AutoSize|'
+ r'AutoVerbMenu|AutoYield|BackColor|ForeColor|BackStyle|'
+ r'BaseClass|BatchUpdateCount|BindControls|BorderColor|'
+ r'BorderStyle|BorderWidth|BoundColumn|BoundTo|Bound|'
+ r'BreakOnError|BufferModeOverride|BufferMode|'
+ r'BuildDateTime|ButtonCount|Buttons|Cancel|Caption|'
+ r'Centered|Century|ChildAlias|ChildOrder|ChildTable|'
+ r'ClassLibrary|Class|ClipControls|Closable|CLSID|CodePage|'
+ r'ColorScheme|ColorSource|ColumnCount|ColumnLines|'
+ r'ColumnOrder|Columns|ColumnWidths|CommandClauses|'
+ r'Comment|CompareMemo|ConflictCheckCmd|ConflictCheckType|'
+ r'ContinuousScroll|ControlBox|ControlCount|Controls|'
+ r'ControlSource|ConversionFunc|Count|CurrentControl|'
+ r'CurrentDataSession|CurrentPass|CurrentX|CurrentY|'
+ r'CursorSchema|CursorSource|CursorStatus|Curvature|'
+ r'Database|DataSessionID|DataSession|DataSourceType|'
+ r'DataSource|DataType|DateFormat|DateMark|Debug|'
+ r'DeclareXMLPrefix|DEClassLibrary|DEClass|DefaultFilePath|'
+ r'Default|DefOLELCID|DeleteCmdDataSourceType|DeleteCmdDataSource|'
+ r'DeleteCmd|DeleteMark|Description|Desktop|'
+ r'Details|DisabledBackColor|DisabledForeColor|'
+ r'DisabledItemBackColor|DisabledItemForeColor|'
+ r'DisabledPicture|DisableEncode|DisplayCount|'
+ r'DisplayValue|Dockable|Docked|DockPosition|'
+ r'DocumentFile|DownPicture|DragIcon|DragMode|DrawMode|'
+ r'DrawStyle|DrawWidth|DynamicAlignment|DynamicBackColor|'
+ r'DynamicForeColor|DynamicCurrentControl|DynamicFontBold|'
+ r'DynamicFontItalic|DynamicFontStrikethru|'
+ r'DynamicFontUnderline|DynamicFontName|DynamicFontOutline|'
+ r'DynamicFontShadow|DynamicFontSize|DynamicInputMask|'
+ r'DynamicLineHeight|EditorOptions|Enabled|'
+ r'EnableHyperlinks|Encrypted|ErrorNo|Exclude|Exclusive|'
+ r'FetchAsNeeded|FetchMemoCmdList|FetchMemoDataSourceType|'
+ r'FetchMemoDataSource|FetchMemo|FetchSize|'
+ r'FileClassLibrary|FileClass|FillColor|FillStyle|Filter|'
+ r'FirstElement|FirstNestedTable|Flags|FontBold|FontItalic|'
+ r'FontStrikethru|FontUnderline|FontCharSet|FontCondense|'
+ r'FontExtend|FontName|FontOutline|FontShadow|FontSize|'
+ r'ForceCloseTag|Format|FormCount|FormattedOutput|Forms|'
+ r'FractionDigits|FRXDataSession|FullName|GDIPlusGraphics|'
+ r'GridLineColor|GridLines|GridLineWidth|HalfHeightCaption|'
+ r'HeaderClassLibrary|HeaderClass|HeaderHeight|Height|'
+ r'HelpContextID|HideSelection|HighlightBackColor|'
+ r'HighlightForeColor|HighlightStyle|HighlightRowLineWidth|'
+ r'HighlightRow|Highlight|HomeDir|Hours|HostName|'
+ r'HScrollSmallChange|hWnd|Icon|IncrementalSearch|Increment|'
+ r'InitialSelectedAlias|InputMask|InsertCmdDataSourceType|'
+ r'InsertCmdDataSource|InsertCmdRefreshCmd|'
+ r'InsertCmdRefreshFieldList|InsertCmdRefreshKeyFieldList|'
+ r'InsertCmd|Instancing|IntegralHeight|'
+ r'Interval|IMEMode|IsAttribute|IsBase64|IsBinary|IsNull|'
+ r'IsDiffGram|IsLoaded|ItemBackColor,|ItemData|ItemIDData|'
+ r'ItemTips|IXMLDOMElement|KeyboardHighValue|KeyboardLowValue|'
+ r'Keyfield|KeyFieldList|KeyPreview|KeySort|LanguageOptions|'
+ r'LeftColumn|Left|LineContents|LineNo|LineSlant|LinkMaster|'
+ r'ListCount|ListenerType|ListIndex|ListItemID|ListItem|'
+ r'List|LockColumnsLeft|LockColumns|LockScreen|MacDesktop|'
+ r'MainFile|MapN19_4ToCurrency|MapBinary|MapVarchar|Margin|'
+ r'MaxButton|MaxHeight|MaxLeft|MaxLength|MaxRecords|MaxTop|'
+ r'MaxWidth|MDIForm|MemberClassLibrary|MemberClass|'
+ r'MemoWindow|Message|MinButton|MinHeight|MinWidth|'
+ r'MouseIcon|MousePointer|Movable|MoverBars|MultiSelect|'
+ r'Name|NestedInto|NewIndex|NewItemID|NextSiblingTable|'
+ r'NoCpTrans|NoDataOnLoad|NoData|NullDisplay|'
+ r'NumberOfElements|Object|OLEClass|OLEDragMode|'
+ r'OLEDragPicture|OLEDropEffects|OLEDropHasData|'
+ r'OLEDropMode|OLEDropTextInsertion|OLELCID|'
+ r'OLERequestPendingTimeout|OLEServerBusyRaiseError|'
+ r'OLEServerBusyTimeout|OLETypeAllowed|OneToMany|'
+ r'OpenViews|OpenWindow|Optimize|OrderDirection|Order|'
+ r'OutputPageCount|OutputType|PageCount|PageHeight|'
+ r'PageNo|PageOrder|Pages|PageTotal|PageWidth|'
+ r'PanelLink|Panel|ParentAlias|ParentClass|ParentTable|'
+ r'Parent|Partition|PasswordChar|PictureMargin|'
+ r'PicturePosition|PictureSpacing|PictureSelectionDisplay|'
+ r'PictureVal|Picture|Prepared|'
+ r'PolyPoints|PreserveWhiteSpace|PreviewContainer|'
+ r'PrintJobName|Procedure|PROCESSID|ProgID|ProjectHookClass|'
+ r'ProjectHookLibrary|ProjectHook|QuietMode|'
+ r'ReadCycle|ReadLock|ReadMouse|ReadObject|ReadOnly|'
+ r'ReadSave|ReadTimeout|RecordMark|RecordSourceType|'
+ r'RecordSource|RefreshAlias|'
+ r'RefreshCmdDataSourceType|RefreshCmdDataSource|RefreshCmd|'
+ r'RefreshIgnoreFieldList|RefreshTimeStamp|RelationalExpr|'
+ r'RelativeColumn|RelativeRow|ReleaseType|Resizable|'
+ r'RespectCursorCP|RespectNesting|RightToLeft|RotateFlip|'
+ r'Rotation|RowColChange|RowHeight|RowSourceType|'
+ r'RowSource|ScaleMode|SCCProvider|SCCStatus|ScrollBars|'
+ r'Seconds|SelectCmd|SelectedID|'
+ r'SelectedItemBackColor|SelectedItemForeColor|Selected|'
+ r'SelectionNamespaces|SelectOnEntry|SelLength|SelStart|'
+ r'SelText|SendGDIPlusImage|SendUpdates|ServerClassLibrary|'
+ r'ServerClass|ServerHelpFile|ServerName|'
+ r'ServerProject|ShowTips|ShowInTaskbar|ShowWindow|'
+ r'Sizable|SizeBox|SOM|Sorted|Sparse|SpecialEffect|'
+ r'SpinnerHighValue|SpinnerLowValue|SplitBar|StackLevel|'
+ r'StartMode|StatusBarText|StatusBar|Stretch|StrictDateEntry|'
+ r'Style|TabIndex|Tables|TabOrientation|Tabs|TabStop|'
+ r'TabStretch|TabStyle|Tag|TerminateRead|Text|Themes|'
+ r'ThreadID|TimestampFieldList|TitleBar|ToolTipText|'
+ r'TopIndex|TopItemID|Top|TwoPassProcess|TypeLibCLSID|'
+ r'TypeLibDesc|TypeLibName|Type|Unicode|UpdatableFieldList|'
+ r'UpdateCmdDataSourceType|UpdateCmdDataSource|'
+ r'UpdateCmdRefreshCmd|UpdateCmdRefreshFieldList|'
+ r'UpdateCmdRefreshKeyFieldList|UpdateCmd|'
+ r'UpdateGramSchemaLocation|UpdateGram|UpdateNameList|UpdateType|'
+ r'UseCodePage|UseCursorSchema|UseDeDataSource|UseMemoSize|'
+ r'UserValue|UseTransactions|UTF8Encoded|Value|VersionComments|'
+ r'VersionCompany|VersionCopyright|VersionDescription|'
+ r'VersionNumber|VersionProduct|VersionTrademarks|Version|'
+ r'VFPXMLProgID|ViewPortHeight|ViewPortLeft|'
+ r'ViewPortTop|ViewPortWidth|VScrollSmallChange|View|Visible|'
+ r'VisualEffect|WhatsThisButton|WhatsThisHelpID|WhatsThisHelp|'
+ r'WhereType|Width|WindowList|WindowState|WindowType|WordWrap|'
+ r'WrapCharInCDATA|WrapInCDATA|WrapMemoInCDATA|XMLAdapter|'
+ r'XMLConstraints|XMLNameIsXPath|XMLNamespace|XMLName|'
+ r'XMLPrefix|XMLSchemaLocation|XMLTable|XMLType|'
+ r'XSDfractionDigits|XSDmaxLength|XSDtotalDigits|'
+ r'XSDtype|ZoomBox)', Name.Attribute),
+
+ (r'\.(ActivateCell|AddColumn|AddItem|AddListItem|AddObject|'
+ r'AddProperty|AddTableSchema|AddToSCC|Add|'
+ r'ApplyDiffgram|Attach|AutoFit|AutoOpen|Box|Build|'
+ r'CancelReport|ChangesToCursor|CheckIn|CheckOut|Circle|'
+ r'CleanUp|ClearData|ClearStatus|Clear|CloneObject|CloseTables|'
+ r'Close|Cls|CursorAttach|CursorDetach|CursorFill|'
+ r'CursorRefresh|DataToClip|DelayedMemoFetch|DeleteColumn|'
+ r'Dock|DoMessage|DoScroll|DoStatus|DoVerb|Drag|Draw|Eval|'
+ r'GetData|GetDockState|GetFormat|GetKey|GetLatestVersion|'
+ r'GetPageHeight|GetPageWidth|Help|Hide|IncludePageInOutput|'
+ r'IndexToItemID|ItemIDToIndex|Item|LoadXML|Line|Modify|'
+ r'MoveItem|Move|Nest|OLEDrag|OnPreviewClose|OutputPage|'
+ r'Point|Print|PSet|Quit|ReadExpression|ReadMethod|'
+ r'RecordRefresh|Refresh|ReleaseXML|Release|RemoveFromSCC|'
+ r'RemoveItem|RemoveListItem|RemoveObject|Remove|'
+ r'Render|Requery|RequestData|ResetToDefault|Reset|Run|'
+ r'SaveAsClass|SaveAs|SetAll|SetData|SetFocus|SetFormat|'
+ r'SetMain|SetVar|SetViewPort|ShowWhatsThis|Show|'
+ r'SupportsListenerType|TextHeight|TextWidth|ToCursor|'
+ r'ToXML|UndoCheckOut|Unnest|UpdateStatus|WhatsThisMode|'
+ r'WriteExpression|WriteMethod|ZOrder)', Name.Function),
+
+ (r'\.(Activate|AdjustObjectSize|AfterBand|AfterBuild|'
+ r'AfterCloseTables|AfterCursorAttach|AfterCursorClose|'
+ r'AfterCursorDetach|AfterCursorFill|AfterCursorRefresh|'
+ r'AfterCursorUpdate|AfterDelete|AfterInsert|'
+ r'AfterRecordRefresh|AfterUpdate|AfterDock|AfterReport|'
+ r'AfterRowColChange|BeforeBand|BeforeCursorAttach|'
+ r'BeforeCursorClose|BeforeCursorDetach|BeforeCursorFill|'
+ r'BeforeCursorRefresh|BeforeCursorUpdate|BeforeDelete|'
+ r'BeforeInsert|BeforeDock|BeforeOpenTables|'
+ r'BeforeRecordRefresh|BeforeReport|BeforeRowColChange|'
+ r'BeforeUpdate|Click|dbc_Activate|dbc_AfterAddTable|'
+ r'dbc_AfterAppendProc|dbc_AfterCloseTable|dbc_AfterCopyProc|'
+ r'dbc_AfterCreateConnection|dbc_AfterCreateOffline|'
+ r'dbc_AfterCreateTable|dbc_AfterCreateView|dbc_AfterDBGetProp|'
+ r'dbc_AfterDBSetProp|dbc_AfterDeleteConnection|'
+ r'dbc_AfterDropOffline|dbc_AfterDropTable|'
+ r'dbc_AfterModifyConnection|dbc_AfterModifyProc|'
+ r'dbc_AfterModifyTable|dbc_AfterModifyView|dbc_AfterOpenTable|'
+ r'dbc_AfterRemoveTable|dbc_AfterRenameConnection|'
+ r'dbc_AfterRenameTable|dbc_AfterRenameView|'
+ r'dbc_AfterValidateData|dbc_BeforeAddTable|'
+ r'dbc_BeforeAppendProc|dbc_BeforeCloseTable|'
+ r'dbc_BeforeCopyProc|dbc_BeforeCreateConnection|'
+ r'dbc_BeforeCreateOffline|dbc_BeforeCreateTable|'
+ r'dbc_BeforeCreateView|dbc_BeforeDBGetProp|'
+ r'dbc_BeforeDBSetProp|dbc_BeforeDeleteConnection|'
+ r'dbc_BeforeDropOffline|dbc_BeforeDropTable|'
+ r'dbc_BeforeModifyConnection|dbc_BeforeModifyProc|'
+ r'dbc_BeforeModifyTable|dbc_BeforeModifyView|'
+ r'dbc_BeforeOpenTable|dbc_BeforeRemoveTable|'
+ r'dbc_BeforeRenameConnection|dbc_BeforeRenameTable|'
+ r'dbc_BeforeRenameView|dbc_BeforeValidateData|'
+ r'dbc_CloseData|dbc_Deactivate|dbc_ModifyData|dbc_OpenData|'
+ r'dbc_PackData|DblClick|Deactivate|Deleted|Destroy|DoCmd|'
+ r'DownClick|DragDrop|DragOver|DropDown|ErrorMessage|Error|'
+ r'EvaluateContents|GotFocus|Init|InteractiveChange|KeyPress|'
+ r'LoadReport|Load|LostFocus|Message|MiddleClick|MouseDown|'
+ r'MouseEnter|MouseLeave|MouseMove|MouseUp|MouseWheel|Moved|'
+ r'OLECompleteDrag|OLEDragOver|OLEGiveFeedback|OLESetData|'
+ r'OLEStartDrag|OnMoveItem|Paint|ProgrammaticChange|'
+ r'QueryAddFile|QueryModifyFile|QueryNewFile|QueryRemoveFile|'
+ r'QueryRunFile|QueryUnload|RangeHigh|RangeLow|ReadActivate|'
+ r'ReadDeactivate|ReadShow|ReadValid|ReadWhen|Resize|'
+ r'RightClick|SCCInit|SCCDestroy|Scrolled|Timer|UIEnable|'
+ r'UnDock|UnloadReport|Unload|UpClick|Valid|When)', Name.Function),
+
+ (r'\s+', Text),
+ # everything else is not colored
+ (r'.', Text),
+ ],
+ 'newline': [
+ (r'\*.*?$', Comment.Single, '#pop'),
+ (r'(ACCEPT|ACTIVATE\s*MENU|ACTIVATE\s*POPUP|ACTIVATE\s*SCREEN|'
+ r'ACTIVATE\s*WINDOW|APPEND|APPEND\s*FROM|APPEND\s*FROM\s*ARRAY|'
+ r'APPEND\s*GENERAL|APPEND\s*MEMO|ASSIST|AVERAGE|BLANK|BROWSE|'
+ r'BUILD\s*APP|BUILD\s*EXE|BUILD\s*PROJECT|CALCULATE|CALL|'
+ r'CANCEL|CHANGE|CLEAR|CLOSE|CLOSE\s*MEMO|COMPILE|CONTINUE|'
+ r'COPY\s*FILE|COPY\s*INDEXES|COPY\s*MEMO|COPY\s*STRUCTURE|'
+ r'COPY\s*STRUCTURE\s*EXTENDED|COPY\s*TAG|COPY\s*TO|'
+ r'COPY\s*TO\s*ARRAY|COUNT|CREATE|CREATE\s*COLOR\s*SET|'
+ r'CREATE\s*CURSOR|CREATE\s*FROM|CREATE\s*LABEL|CREATE\s*MENU|'
+ r'CREATE\s*PROJECT|CREATE\s*QUERY|CREATE\s*REPORT|'
+ r'CREATE\s*SCREEN|CREATE\s*TABLE|CREATE\s*VIEW|DDE|'
+ r'DEACTIVATE\s*MENU|DEACTIVATE\s*POPUP|DEACTIVATE\s*WINDOW|'
+ r'DECLARE|DEFINE\s*BAR|DEFINE\s*BOX|DEFINE\s*MENU|'
+ r'DEFINE\s*PAD|DEFINE\s*POPUP|DEFINE\s*WINDOW|DELETE|'
+ r'DELETE\s*FILE|DELETE\s*TAG|DIMENSION|DIRECTORY|DISPLAY|'
+ r'DISPLAY\s*FILES|DISPLAY\s*MEMORY|DISPLAY\s*STATUS|'
+ r'DISPLAY\s*STRUCTURE|DO|EDIT|EJECT|EJECT\s*PAGE|ERASE|'
+ r'EXIT|EXPORT|EXTERNAL|FILER|FIND|FLUSH|FUNCTION|GATHER|'
+ r'GETEXPR|GO|GOTO|HELP|HIDE\s*MENU|HIDE\s*POPUP|'
+ r'HIDE\s*WINDOW|IMPORT|INDEX|INPUT|INSERT|JOIN|KEYBOARD|'
+ r'LABEL|LIST|LOAD|LOCATE|LOOP|MENU|MENU\s*TO|MODIFY\s*COMMAND|'
+ r'MODIFY\s*FILE|MODIFY\s*GENERAL|MODIFY\s*LABEL|MODIFY\s*MEMO|'
+ r'MODIFY\s*MENU|MODIFY\s*PROJECT|MODIFY\s*QUERY|'
+ r'MODIFY\s*REPORT|MODIFY\s*SCREEN|MODIFY\s*STRUCTURE|'
+ r'MODIFY\s*WINDOW|MOVE\s*POPUP|MOVE\s*WINDOW|NOTE|'
+ r'ON\s*APLABOUT|ON\s*BAR|ON\s*ERROR|ON\s*ESCAPE|'
+ r'ON\s*EXIT\s*BAR|ON\s*EXIT\s*MENU|ON\s*EXIT\s*PAD|'
+ r'ON\s*EXIT\s*POPUP|ON\s*KEY|ON\s*KEY\s*=|ON\s*KEY\s*LABEL|'
+ r'ON\s*MACHELP|ON\s*PAD|ON\s*PAGE|ON\s*READERROR|'
+ r'ON\s*SELECTION\s*BAR|ON\s*SELECTION\s*MENU|'
+ r'ON\s*SELECTION\s*PAD|ON\s*SELECTION\s*POPUP|ON\s*SHUTDOWN|'
+ r'PACK|PARAMETERS|PLAY\s*MACRO|POP\s*KEY|POP\s*MENU|'
+ r'POP\s*POPUP|PRIVATE|PROCEDURE|PUBLIC|PUSH\s*KEY|'
+ r'PUSH\s*MENU|PUSH\s*POPUP|QUIT|READ|READ\s*MENU|RECALL|'
+ r'REINDEX|RELEASE|RELEASE\s*MODULE|RENAME|REPLACE|'
+ r'REPLACE\s*FROM\s*ARRAY|REPORT|RESTORE\s*FROM|'
+ r'RESTORE\s*MACROS|RESTORE\s*SCREEN|RESTORE\s*WINDOW|'
+ r'RESUME|RETRY|RETURN|RUN|RUN\s*\/N"|RUNSCRIPT|'
+ r'SAVE\s*MACROS|SAVE\s*SCREEN|SAVE\s*TO|SAVE\s*WINDOWS|'
+ r'SCATTER|SCROLL|SEEK|SELECT|SET|SET\s*ALTERNATE|'
+ r'SET\s*ANSI|SET\s*APLABOUT|SET\s*AUTOSAVE|SET\s*BELL|'
+ r'SET\s*BLINK|SET\s*BLOCKSIZE|SET\s*BORDER|SET\s*BRSTATUS|'
+ r'SET\s*CARRY|SET\s*CENTURY|SET\s*CLEAR|SET\s*CLOCK|'
+ r'SET\s*COLLATE|SET\s*COLOR\s*OF|SET\s*COLOR\s*OF\s*SCHEME|'
+ r'SET\s*COLOR\s*SET|SET\s*COLOR\s*TO|SET\s*COMPATIBLE|'
+ r'SET\s*CONFIRM|SET\s*CONSOLE|SET\s*CURRENCY|SET\s*CURSOR|'
+ r'SET\s*DATE|SET\s*DEBUG|SET\s*DECIMALS|SET\s*DEFAULT|'
+ r'SET\s*DELETED|SET\s*DELIMITERS|SET\s*DEVELOPMENT|'
+ r'SET\s*DEVICE|SET\s*DISPLAY|SET\s*DOHISTORY|SET\s*ECHO|'
+ r'SET\s*ESCAPE|SET\s*EXACT|SET\s*EXCLUSIVE|SET\s*FIELDS|'
+ r'SET\s*FILTER|SET\s*FIXED|SET\s*FORMAT|SET\s*FULLPATH|'
+ r'SET\s*FUNCTION|SET\s*HEADINGS|SET\s*HELP|SET\s*HELPFILTER|'
+ r'SET\s*HOURS|SET\s*INDEX|SET\s*INTENSITY|SET\s*KEY|'
+ r'SET\s*KEYCOMP|SET\s*LIBRARY|SET\s*LOCK|SET\s*LOGERRORS|'
+ r'SET\s*MACDESKTOP|SET\s*MACHELP|SET\s*MACKEY|SET\s*MARGIN|'
+ r'SET\s*MARK\s*OF|SET\s*MARK\s*TO|SET\s*MEMOWIDTH|'
+ r'SET\s*MESSAGE|SET\s*MOUSE|SET\s*MULTILOCKS|SET\s*NEAR|'
+ r'SET\s*NOCPTRANS|SET\s*NOTIFY|SET\s*ODOMETER|SET\s*OPTIMIZE|'
+ r'SET\s*ORDER|SET\s*PALETTE|SET\s*PATH|SET\s*PDSETUP|'
+ r'SET\s*POINT|SET\s*PRINTER|SET\s*PROCEDURE|SET\s*READBORDER|'
+ r'SET\s*REFRESH|SET\s*RELATION|SET\s*RELATION\s*OFF|'
+ r'SET\s*REPROCESS|SET\s*RESOURCE|SET\s*SAFETY|SET\s*SCOREBOARD|'
+ r'SET\s*SEPARATOR|SET\s*SHADOWS|SET\s*SKIP|SET\s*SKIP\s*OF|'
+ r'SET\s*SPACE|SET\s*STATUS|SET\s*STATUS\s*BAR|SET\s*STEP|'
+ r'SET\s*STICKY|SET\s*SYSMENU|SET\s*TALK|SET\s*TEXTMERGE|'
+ r'SET\s*TEXTMERGE\s*DELIMITERS|SET\s*TOPIC|SET\s*TRBETWEEN|'
+ r'SET\s*TYPEAHEAD|SET\s*UDFPARMS|SET\s*UNIQUE|SET\s*VIEW|'
+ r'SET\s*VOLUME|SET\s*WINDOW\s*OF\s*MEMO|SET\s*XCMDFILE|'
+ r'SHOW\s*GET|SHOW\s*GETS|SHOW\s*MENU|SHOW\s*OBJECT|'
+ r'SHOW\s*POPUP|SHOW\s*WINDOW|SIZE\s*POPUP|SKIP|SORT|'
+ r'STORE|SUM|SUSPEND|TOTAL|TYPE|UNLOCK|UPDATE|USE|WAIT|'
+ r'ZAP|ZOOM\s*WINDOW|DO\s*CASE|CASE|OTHERWISE|ENDCASE|'
+ r'DO\s*WHILE|ENDDO|FOR|ENDFOR|NEXT|IF|ELSE|ENDIF|PRINTJOB|'
+ r'ENDPRINTJOB|SCAN|ENDSCAN|TEXT|ENDTEXT|=)',
+ Keyword.Reserved, '#pop'),
+ (r'#\s*(IF|ELIF|ELSE|ENDIF|DEFINE|IFDEF|IFNDEF|INCLUDE)',
+ Comment.Preproc, '#pop'),
+ (r'(m\.)?[a-z_]\w*', Name.Variable, '#pop'),
+ (r'.', Text, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/functional.py b/contrib/python/Pygments/py3/pygments/lexers/functional.py
index e33a72e21e..c5aa576668 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/functional.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/functional.py
@@ -1,20 +1,20 @@
-"""
- pygments.lexers.functional
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+"""
+ pygments.lexers.functional
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
- NewLispLexer, ShenLexer
-from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
- KokaLexer
-from pygments.lexers.theorem import CoqLexer
-from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
- ElixirConsoleLexer, ElixirLexer
-from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
+ NewLispLexer, ShenLexer
+from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
+ KokaLexer
+from pygments.lexers.theorem import CoqLexer
+from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
+ ElixirConsoleLexer, ElixirLexer
+from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/go.py b/contrib/python/Pygments/py3/pygments/lexers/go.py
index 33a57ce813..576dcf9b94 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/go.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/go.py
@@ -1,100 +1,100 @@
-"""
- pygments.lexers.go
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for the Google Go language.
-
+"""
+ pygments.lexers.go
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Google Go language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['GoLexer']
-
-
-class GoLexer(RegexLexer):
- """
- For `Go <http://golang.org>`_ source.
-
- .. versionadded:: 1.2
- """
- name = 'Go'
- filenames = ['*.go']
+
+__all__ = ['GoLexer']
+
+
+class GoLexer(RegexLexer):
+ """
+ For `Go <http://golang.org>`_ source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Go'
+ filenames = ['*.go']
aliases = ['go', 'golang']
- mimetypes = ['text/x-gosrc']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
+ mimetypes = ['text/x-gosrc']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuations
(r'//(.*?)$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(import|package)\b', Keyword.Namespace),
- (r'(var|func|struct|map|chan|type|interface|const)\b',
- Keyword.Declaration),
- (words((
- 'break', 'default', 'select', 'case', 'defer', 'go',
- 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
- 'continue', 'for', 'return'), suffix=r'\b'),
- Keyword),
- (r'(true|false|iota|nil)\b', Keyword.Constant),
- # It seems the builtin types aren't actually keywords, but
- # can be used as functions. So we need two declarations.
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr',
- 'print', 'println', 'panic', 'recover', 'close', 'complex',
- 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
- 'new', 'make'), suffix=r'\b(\()'),
- bygroups(Name.Builtin, Punctuation)),
- (words((
- 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
- 'int', 'int8', 'int16', 'int32', 'int64',
- 'float', 'float32', 'float64',
- 'complex64', 'complex128', 'byte', 'rune',
- 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
- Keyword.Type),
- # imaginary_lit
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
- # float_lit
- (r'\d+(\.\d+[eE][+\-]?\d+|'
- r'\.\d*|[eE][+\-]?\d+)', Number.Float),
- (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
- # int_lit
- # -- octal_lit
- (r'0[0-7]+', Number.Oct),
- # -- hex_lit
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal_lit
- (r'(0|[1-9][0-9]*)', Number.Integer),
- # char_lit
- (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
- String.Char),
- # StringLiteral
- # -- raw_string_lit
- (r'`[^`]*`', String),
- # -- interpreted_string_lit
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'(import|package)\b', Keyword.Namespace),
+ (r'(var|func|struct|map|chan|type|interface|const)\b',
+ Keyword.Declaration),
+ (words((
+ 'break', 'default', 'select', 'case', 'defer', 'go',
+ 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
+ 'continue', 'for', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|iota|nil)\b', Keyword.Constant),
+ # It seems the builtin types aren't actually keywords, but
+ # can be used as functions. So we need two declarations.
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr',
+ 'print', 'println', 'panic', 'recover', 'close', 'complex',
+ 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
+ 'new', 'make'), suffix=r'\b(\()'),
+ bygroups(Name.Builtin, Punctuation)),
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
+ Keyword.Type),
+ # imaginary_lit
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+ # float_lit
+ (r'\d+(\.\d+[eE][+\-]?\d+|'
+ r'\.\d*|[eE][+\-]?\d+)', Number.Float),
+ (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
+ # int_lit
+ # -- octal_lit
+ (r'0[0-7]+', Number.Oct),
+ # -- hex_lit
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- decimal_lit
+ (r'(0|[1-9][0-9]*)', Number.Integer),
+ # char_lit
+ (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
+ String.Char),
+ # StringLiteral
+ # -- raw_string_lit
+ (r'`[^`]*`', String),
+ # -- interpreted_string_lit
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Tokens
- (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
- r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
- (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
- # identifier
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
+ # Tokens
+ (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
+ r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
+ (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
+ # identifier
+ (r'[^\W\d]\w*', Name.Other),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
index ff57c99917..98190e963e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
@@ -1,135 +1,135 @@
-"""
- pygments.lexers.grammar_notation
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
+"""
+ pygments.lexers.grammar_notation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Lexers for grammar notations like BNF.
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import re
-
+
from pygments.lexer import RegexLexer, bygroups, include, this, using, words
from pygments.token import Comment, Keyword, Literal, Name, Number, \
Operator, Punctuation, String, Text, Whitespace
-
+
__all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer']
+
-
-class BnfLexer(RegexLexer):
- """
+class BnfLexer(RegexLexer):
+ """
This lexer is for grammar notations which are similar to
- original BNF.
-
- In order to maximize a number of targets of this lexer,
- let's decide some designs:
-
- * We don't distinguish `Terminal Symbol`.
-
- * We do assume that `NonTerminal Symbol` are always enclosed
- with arrow brackets.
-
- * We do assume that `NonTerminal Symbol` may include
- any printable characters except arrow brackets and ASCII 0x20.
- This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
-
- * We do assume that target notation doesn't support comment.
-
- * We don't distinguish any operators and punctuation except
- `::=`.
-
- Though these desision making might cause too minimal highlighting
- and you might be disappointed, but it is reasonable for us.
-
- .. versionadded:: 2.1
- """
-
- name = 'BNF'
- aliases = ['bnf']
- filenames = ['*.bnf']
- mimetypes = ['text/x-bnf']
-
- tokens = {
- 'root': [
- (r'(<)([ -;=?-~]+)(>)',
- bygroups(Punctuation, Name.Class, Punctuation)),
-
- # an only operator
- (r'::=', Operator),
-
- # fallback
- (r'[^<>:]+', Text), # for performance
- (r'.', Text),
- ],
- }
-
-
-class AbnfLexer(RegexLexer):
- """
- Lexer for `IETF 7405 ABNF
- <http://www.ietf.org/rfc/rfc7405.txt>`_
- (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_)
- grammars.
-
- .. versionadded:: 2.1
- """
-
- name = 'ABNF'
- aliases = ['abnf']
- filenames = ['*.abnf']
- mimetypes = ['text/x-abnf']
-
- _core_rules = (
- 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
- 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
- 'SP', 'VCHAR', 'WSP')
-
- tokens = {
- 'root': [
- # comment
- (r';.*$', Comment.Single),
-
- # quoted
- # double quote itself in this state, it is as '%x22'.
- (r'(%[si])?"[^"]*"', Literal),
-
- # binary (but i have never seen...)
- (r'%b[01]+\-[01]+\b', Literal), # range
- (r'%b[01]+(\.[01]+)*\b', Literal), # concat
-
- # decimal
- (r'%d[0-9]+\-[0-9]+\b', Literal), # range
- (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
-
- # hexadecimal
- (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
- (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
-
- # repetition (<a>*<b>element) including nRule
- (r'\b[0-9]+\*[0-9]+', Operator),
- (r'\b[0-9]+\*', Operator),
- (r'\b[0-9]+', Operator),
- (r'\*', Operator),
-
- # Strictly speaking, these are not keyword but
- # are called `Core Rule'.
- (words(_core_rules, suffix=r'\b'), Keyword),
-
- # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
+ original BNF.
+
+ In order to maximize a number of targets of this lexer,
+ let's decide some designs:
+
+ * We don't distinguish `Terminal Symbol`.
+
+ * We do assume that `NonTerminal Symbol` are always enclosed
+ with arrow brackets.
+
+ * We do assume that `NonTerminal Symbol` may include
+ any printable characters except arrow brackets and ASCII 0x20.
+ This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
+
+ * We do assume that target notation doesn't support comment.
+
+ * We don't distinguish any operators and punctuation except
+ `::=`.
+
+ Though these desision making might cause too minimal highlighting
+ and you might be disappointed, but it is reasonable for us.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'BNF'
+ aliases = ['bnf']
+ filenames = ['*.bnf']
+ mimetypes = ['text/x-bnf']
+
+ tokens = {
+ 'root': [
+ (r'(<)([ -;=?-~]+)(>)',
+ bygroups(Punctuation, Name.Class, Punctuation)),
+
+ # an only operator
+ (r'::=', Operator),
+
+ # fallback
+ (r'[^<>:]+', Text), # for performance
+ (r'.', Text),
+ ],
+ }
+
+
+class AbnfLexer(RegexLexer):
+ """
+ Lexer for `IETF 7405 ABNF
+ <http://www.ietf.org/rfc/rfc7405.txt>`_
+ (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_)
+ grammars.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ABNF'
+ aliases = ['abnf']
+ filenames = ['*.abnf']
+ mimetypes = ['text/x-abnf']
+
+ _core_rules = (
+ 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
+ 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
+ 'SP', 'VCHAR', 'WSP')
+
+ tokens = {
+ 'root': [
+ # comment
+ (r';.*$', Comment.Single),
+
+ # quoted
+ # double quote itself in this state, it is as '%x22'.
+ (r'(%[si])?"[^"]*"', Literal),
+
+ # binary (but i have never seen...)
+ (r'%b[01]+\-[01]+\b', Literal), # range
+ (r'%b[01]+(\.[01]+)*\b', Literal), # concat
+
+ # decimal
+ (r'%d[0-9]+\-[0-9]+\b', Literal), # range
+ (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
+
+ # hexadecimal
+ (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
+ (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
+
+ # repetition (<a>*<b>element) including nRule
+ (r'\b[0-9]+\*[0-9]+', Operator),
+ (r'\b[0-9]+\*', Operator),
+ (r'\b[0-9]+', Operator),
+ (r'\*', Operator),
+
+ # Strictly speaking, these are not keyword but
+ # are called `Core Rule'.
+ (words(_core_rules, suffix=r'\b'), Keyword),
+
+ # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
(r'[a-zA-Z][a-zA-Z0-9-]*\b', Name.Class),
-
- # operators
- (r'(=/|=|/)', Operator),
-
- # punctuation
- (r'[\[\]()]', Punctuation),
-
- # fallback
+
+ # operators
+ (r'(=/|=|/)', Operator),
+
+ # punctuation
+ (r'[\[\]()]', Punctuation),
+
+ # fallback
(r'\s+', Whitespace),
- (r'.', Text),
- ],
- }
+ (r'.', Text),
+ ],
+ }
class JsgfLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graph.py b/contrib/python/Pygments/py3/pygments/lexers/graph.py
index 2af56af26b..bc1cb70efa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graph.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graph.py
@@ -1,56 +1,56 @@
-"""
- pygments.lexers.graph
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for graph query languages.
-
+"""
+ pygments.lexers.graph
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for graph query languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, bygroups, using, this, words
-from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
- String, Number, Whitespace
-
-
-__all__ = ['CypherLexer']
-
-
-class CypherLexer(RegexLexer):
- """
- For `Cypher Query Language
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace
+
+
+__all__ = ['CypherLexer']
+
+
+class CypherLexer(RegexLexer):
+ """
+ For `Cypher Query Language
<https://neo4j.com/docs/developer-manual/3.3/cypher/>`_
-
+
For the Cypher version in Neo4j 3.3
-
- .. versionadded:: 2.0
- """
- name = 'Cypher'
- aliases = ['cypher']
- filenames = ['*.cyp', '*.cypher']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- include('comment'),
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cypher'
+ aliases = ['cypher']
+ filenames = ['*.cyp', '*.cypher']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ include('comment'),
include('clauses'),
- include('keywords'),
- include('relations'),
- include('strings'),
- include('whitespace'),
- include('barewords'),
- ],
- 'comment': [
+ include('keywords'),
+ include('relations'),
+ include('strings'),
+ include('whitespace'),
+ include('barewords'),
+ ],
+ 'comment': [
(r'^.*//.*$', Comment.Single),
- ],
- 'keywords': [
- (r'(create|order|match|limit|set|skip|start|return|with|where|'
+ ],
+ 'keywords': [
+ (r'(create|order|match|limit|set|skip|start|return|with|where|'
r'delete|foreach|not|by|true|false)\b', Keyword),
- ],
- 'clauses': [
+ ],
+ 'clauses': [
# based on https://neo4j.com/docs/cypher-refcard/3.3/
(r'(create)(\s+)(index|unique)\b',
bygroups(Keyword, Whitespace, Keyword)),
@@ -82,24 +82,24 @@ class CypherLexer(RegexLexer):
'foreach', 'in', 'limit', 'match', 'merge', 'none', 'not', 'null',
'remove', 'return', 'set', 'skip', 'single', 'start', 'then', 'union',
'unwind', 'yield', 'where', 'when', 'with'), suffix=r'\b'), Keyword),
- ],
- 'relations': [
- (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
- (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
- (r'-->|<--|\[|\]', Operator),
- (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
- (r'[.*{}]', Punctuation),
- ],
- 'strings': [
- (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
- (r'`(?:``|[^`])+`', Name.Variable),
- ],
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'barewords': [
- (r'[a-z]\w*', Name),
- (r'\d+', Number),
- ],
- }
+ ],
+ 'relations': [
+ (r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
+ (r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'(-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
+ (r'-->|<--|\[|\]', Operator),
+ (r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
+ (r'[.*{}]', Punctuation),
+ ],
+ 'strings': [
+ (r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
+ (r'`(?:``|[^`])+`', Name.Variable),
+ ],
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'barewords': [
+ (r'[a-z]\w*', Name),
+ (r'\d+', Number),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphics.py b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
index 9f3e4a4431..be52f8ea61 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphics.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
@@ -1,50 +1,50 @@
-"""
- pygments.lexers.graphics
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for computer graphics and plotting related languages.
-
+"""
+ pygments.lexers.graphics
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer graphics and plotting related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include, bygroups, using, \
- this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include, bygroups, using, \
+ this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
Number, Punctuation, String, Whitespace
-
-__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
+
+__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
'PovrayLexer', 'HLSLShaderLexer']
-
-
-class GLShaderLexer(RegexLexer):
- """
- GLSL (OpenGL Shader) lexer.
-
- .. versionadded:: 1.1
- """
- name = 'GLSL'
- aliases = ['glsl']
- filenames = ['*.vert', '*.frag', '*.geo']
- mimetypes = ['text/x-glslsrc']
-
- tokens = {
- 'root': [
+
+
+class GLShaderLexer(RegexLexer):
+ """
+ GLSL (OpenGL Shader) lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'GLSL'
+ aliases = ['glsl']
+ filenames = ['*.vert', '*.frag', '*.geo']
+ mimetypes = ['text/x-glslsrc']
+
+ tokens = {
+ 'root': [
(r'^#.*$', Comment.Preproc),
(r'//.*$', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),\[\]]', Punctuation),
- # FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (words((
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (words((
# Storage qualifiers
'attribute', 'const', 'uniform', 'varying',
'buffer', 'shared', 'in', 'out',
@@ -68,12 +68,12 @@ class GLShaderLexer(RegexLexer):
'break', 'continue', 'do', 'for', 'while', 'switch',
'case', 'default', 'if', 'else', 'subroutine',
'discard', 'return', 'struct'),
- prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
# Boolean values
'true', 'false'),
- prefix=r'\b', suffix=r'\b'),
+ prefix=r'\b', suffix=r'\b'),
Keyword.Constant),
(words((
# Miscellaneous types
@@ -141,13 +141,13 @@ class GLShaderLexer(RegexLexer):
Keyword.Reserved),
# All names beginning with "gl_" are reserved.
(r'gl_\w*', Name.Builtin),
- (r'[a-zA-Z_]\w*', Name),
- (r'\.', Punctuation),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'\.', Punctuation),
(r'\s+', Whitespace),
- ],
- }
-
-
+ ],
+ }
+
+
class HLSLShaderLexer(RegexLexer):
"""
HLSL (Microsoft Direct3D Shader) lexer.
@@ -302,482 +302,482 @@ class HLSLShaderLexer(RegexLexer):
}
-class PostScriptLexer(RegexLexer):
- """
- Lexer for PostScript files.
-
- The PostScript Language Reference published by Adobe at
- <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
- is the authority for this.
-
- .. versionadded:: 1.4
- """
- name = 'PostScript'
- aliases = ['postscript', 'postscr']
- filenames = ['*.ps', '*.eps']
- mimetypes = ['application/postscript']
-
- delimiter = r'()<>\[\]{}/%\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
- tokens = {
- 'root': [
- # All comment types
+class PostScriptLexer(RegexLexer):
+ """
+ Lexer for PostScript files.
+
+ The PostScript Language Reference published by Adobe at
+ <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+ is the authority for this.
+
+ .. versionadded:: 1.4
+ """
+ name = 'PostScript'
+ aliases = ['postscript', 'postscr']
+ filenames = ['*.ps', '*.eps']
+ mimetypes = ['application/postscript']
+
+ delimiter = r'()<>\[\]{}/%\s'
+ delimiter_end = r'(?=[%s])' % delimiter
+
+ valid_name_chars = r'[^%s]' % delimiter
+ valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+ tokens = {
+ 'root': [
+ # All comment types
(r'^%!.+$', Comment.Preproc),
(r'%%.*$', Comment.Special),
- (r'(^%.*\n){2,}', Comment.Multiline),
+ (r'(^%.*\n){2,}', Comment.Multiline),
(r'%.*$', Comment.Single),
-
- # String literals are awkward; enter separate state.
- (r'\(', String, 'stringliteral'),
-
- (r'[{}<>\[\]]', Punctuation),
-
- # Numbers
- (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
- # Slight abuse: use Oct to signify any explicit base system
- (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
- r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
- (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
- + delimiter_end, Number.Float),
- (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
- # References
- (r'\/%s' % valid_name, Name.Variable),
-
- # Names
- (valid_name, Name.Function), # Anything else is executed
-
- # These keywords taken from
- # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
- # Is there an authoritative list anywhere that doesn't involve
- # trawling documentation?
-
- (r'(false|true)' + delimiter_end, Keyword.Constant),
-
- # Conditionals / flow control
- (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
- + delimiter_end, Keyword.Reserved),
-
- (words((
- 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
- 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
- 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
- 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
- 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
- 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
- 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
- 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
- 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
- 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
- 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
- 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
- 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
- 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
- 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
- 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
- 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
- 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
- Name.Builtin),
-
+
+ # String literals are awkward; enter separate state.
+ (r'\(', String, 'stringliteral'),
+
+ (r'[{}<>\[\]]', Punctuation),
+
+ # Numbers
+ (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+ # Slight abuse: use Oct to signify any explicit base system
+ (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+ r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+ (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+ + delimiter_end, Number.Float),
+ (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+ # References
+ (r'\/%s' % valid_name, Name.Variable),
+
+ # Names
+ (valid_name, Name.Function), # Anything else is executed
+
+ # These keywords taken from
+ # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+ # Is there an authoritative list anywhere that doesn't involve
+ # trawling documentation?
+
+ (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+ # Conditionals / flow control
+ (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
+ + delimiter_end, Keyword.Reserved),
+
+ (words((
+ 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
+ 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
+ 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
+ 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
+ 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
+ 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
+ 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
+ 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
+ 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
+ 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
+ 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
+ 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
+ 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
+ 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
+ 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
+ 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
+ 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
+ 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
+ Name.Builtin),
+
(r'\s+', Whitespace),
- ],
-
- 'stringliteral': [
- (r'[^()\\]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
-
- 'escape': [
- (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class AsymptoteLexer(RegexLexer):
- """
- For `Asymptote <http://asymptote.sf.net/>`_ source code.
-
- .. versionadded:: 1.2
- """
- name = 'Asymptote'
+ ],
+
+ 'stringliteral': [
+ (r'[^()\\]+', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+
+ 'escape': [
+ (r'[0-8]{3}|n|r|t|b|f|\\|\(|\)', String.Escape, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class AsymptoteLexer(RegexLexer):
+ """
+ For `Asymptote <http://asymptote.sf.net/>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Asymptote'
aliases = ['asymptote', 'asy']
- filenames = ['*.asy']
- mimetypes = ['text/x-asymptote']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'whitespace': [
+ filenames = ['*.asy']
+ mimetypes = ['text/x-asymptote']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+
+ tokens = {
+ 'whitespace': [
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(Text, Whitespace)), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- # simple string (TeX friendly)
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
+ ],
+ 'statements': [
+ # simple string (TeX friendly)
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # C style string (with character escapes)
- (r"'", String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
- r'return|break|continue|struct|typedef|new|access|import|'
- r'unravel|from|include|quote|static|public|private|restricted|'
- r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
- # Since an asy-type-name can be also an asy-function-name,
- # in the following we test if the string " [a-zA-Z]" follows
- # the Keyword.Type.
- # Of course it is not perfect !
- (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
- r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
- r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
- r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
- r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
- r'path3|pen|picture|point|position|projection|real|revolution|'
- r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
- r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
- r'transformation|tree|triangle|trilinear|triple|vector|'
- r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
- # Now the asy-type-name which are not asy-function-name
- # except yours !
- # Perhaps useless
- (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
- r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
- r'picture|position|real|revolution|slice|splitface|ticksgridT|'
- r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
+ # C style string (with character escapes)
+ (r"'", String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
+ (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
+ r'return|break|continue|struct|typedef|new|access|import|'
+ r'unravel|from|include|quote|static|public|private|restricted|'
+ r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
+ # Since an asy-type-name can be also an asy-function-name,
+ # in the following we test if the string " [a-zA-Z]" follows
+ # the Keyword.Type.
+ # Of course it is not perfect !
+ (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
+ r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
+ r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
+ r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
+ r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
+ r'path3|pen|picture|point|position|projection|real|revolution|'
+ r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
+ r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
+ r'transformation|tree|triangle|trilinear|triple|vector|'
+ r'vertex|void)(?=\s+[a-zA-Z])', Keyword.Type),
+ # Now the asy-type-name which are not asy-function-name
+ # except yours !
+ # Perhaps useless
+ (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
+ r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
+ r'picture|position|real|revolution|slice|splitface|ticksgridT|'
+ r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(\{)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- default('statement'),
- ],
- 'statement': [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'string': [
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'\n', String),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String),
- (r'\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
-
-
-def _shortened(word):
- dpos = word.find('$')
- return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
- for i in range(len(word), dpos, -1))
-
-
-def _shortened_many(*words):
- return '|'.join(map(_shortened, words))
-
-
-class GnuplotLexer(RegexLexer):
- """
- For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
-
- .. versionadded:: 0.11
- """
-
- name = 'Gnuplot'
- aliases = ['gnuplot']
- filenames = ['*.plot', '*.plt']
- mimetypes = ['text/x-gnuplot']
-
- tokens = {
- 'root': [
- include('whitespace'),
- (_shortened('bi$nd'), Keyword, 'bind'),
- (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
- (_shortened('f$it'), Keyword, 'fit'),
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
- (r'else\b', Keyword),
- (_shortened('pa$use'), Keyword, 'pause'),
- (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
- (_shortened('sa$ve'), Keyword, 'save'),
- (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
- (_shortened_many('sh$ow', 'uns$et'),
- Keyword, ('noargs', 'optionarg')),
- (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
- 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
- 'pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'sy$stem', 'up$date'),
- Keyword, 'genericargs'),
- (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'test$'),
- Keyword, 'noargs'),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(\{)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'\n', String),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String),
+ (r'\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in ASYFUNCNAME:
+ token = Name.Function
+ elif token is Name and value in ASYVARNAME:
+ token = Name.Variable
+ yield index, token, value
+
+
+def _shortened(word):
+ dpos = word.find('$')
+ return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
+ for i in range(len(word), dpos, -1))
+
+
+def _shortened_many(*words):
+ return '|'.join(map(_shortened, words))
+
+
+class GnuplotLexer(RegexLexer):
+ """
+ For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'Gnuplot'
+ aliases = ['gnuplot']
+ filenames = ['*.plot', '*.plt']
+ mimetypes = ['text/x-gnuplot']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (_shortened('bi$nd'), Keyword, 'bind'),
+ (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
+ (_shortened('f$it'), Keyword, 'fit'),
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
+ (r'else\b', Keyword),
+ (_shortened('pa$use'), Keyword, 'pause'),
+ (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
+ (_shortened('sa$ve'), Keyword, 'save'),
+ (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
+ (_shortened_many('sh$ow', 'uns$et'),
+ Keyword, ('noargs', 'optionarg')),
+ (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
+ 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
+ 'pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'sy$stem', 'up$date'),
+ Keyword, 'genericargs'),
+ (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'test$'),
+ Keyword, 'noargs'),
(r'([a-zA-Z_]\w*)(\s*)(=)',
bygroups(Name.Variable, Whitespace, Operator), 'genericargs'),
(r'([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
bygroups(Name.Function, Whitespace, Operator), 'genericargs'),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r';', Keyword),
- ],
- 'comment': [
- (r'[^\\\n]', Comment),
- (r'\\\n', Comment),
- (r'\\', Comment),
- # don't add the newline to the Comment token
- default('#pop'),
- ],
- 'whitespace': [
- ('#', Comment, 'comment'),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r';', Keyword),
+ ],
+ 'comment': [
+ (r'[^\\\n]', Comment),
+ (r'\\\n', Comment),
+ (r'\\', Comment),
+ # don't add the newline to the Comment token
+ default('#pop'),
+ ],
+ 'whitespace': [
+ ('#', Comment, 'comment'),
(r'[ \t\v\f]+', Whitespace),
- ],
- 'noargs': [
- include('whitespace'),
- # semicolon and newline end the argument list
- (r';', Punctuation, '#pop'),
+ ],
+ 'noargs': [
+ include('whitespace'),
+ # semicolon and newline end the argument list
+ (r';', Punctuation, '#pop'),
(r'\n', Whitespace, '#pop'),
- ],
- 'dqstring': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
+ ],
+ 'dqstring': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
(r'\n', Whitespace, '#pop'), # newline ends the string too
- ],
- 'sqstring': [
- (r"''", String), # escaped single quote
- (r"'", String, '#pop'),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # normal backslash
+ ],
+ 'sqstring': [
+ (r"''", String), # escaped single quote
+ (r"'", String, '#pop'),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # normal backslash
(r'\n', Whitespace, '#pop'), # newline ends the string too
- ],
- 'genericargs': [
- include('noargs'),
- (r'"', String, 'dqstring'),
- (r"'", String, 'sqstring'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'-?\d+', Number.Integer),
- ('[,.~!%^&*+=|?:<>/-]', Operator),
+ ],
+ 'genericargs': [
+ include('noargs'),
+ (r'"', String, 'dqstring'),
+ (r"'", String, 'sqstring'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'-?\d+', Number.Integer),
+ ('[,.~!%^&*+=|?:<>/-]', Operator),
(r'[{}()\[\]]', Punctuation),
- (r'(eq|ne)\b', Operator.Word),
- (r'([a-zA-Z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_]\w*', Name),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r'(eq|ne)\b', Operator.Word),
+ (r'([a-zA-Z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
(r'(\\)(\n)', bygroups(Text, Whitespace)),
- ],
- 'optionarg': [
- include('whitespace'),
- (_shortened_many(
- "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
- "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
- "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
- "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
- "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
- "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
- "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
- "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
- "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
- "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
- "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
- "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
- "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
- "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
- "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
- "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
- "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
- "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
- "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
- "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
- "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
- "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
- "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
- "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
- "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
- "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
- "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
- "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
- ],
- 'bind': [
- ('!', Keyword, '#pop'),
- (_shortened('all$windows'), Name.Builtin),
- include('genericargs'),
- ],
- 'quit': [
- (r'gnuplot\b', Keyword),
- include('noargs'),
- ],
- 'fit': [
- (r'via\b', Name.Builtin),
- include('plot'),
- ],
- 'if': [
- (r'\)', Punctuation, '#pop'),
- include('genericargs'),
- ],
- 'pause': [
- (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
- (_shortened('key$press'), Name.Builtin),
- include('genericargs'),
- ],
- 'plot': [
- (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
- 'mat$rix', 's$mooth', 'thru$', 't$itle',
- 'not$itle', 'u$sing', 'w$ith'),
- Name.Builtin),
- include('genericargs'),
- ],
- 'save': [
- (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
- Name.Builtin),
- include('genericargs'),
- ],
- }
-
-
-class PovrayLexer(RegexLexer):
- """
- For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
-
- .. versionadded:: 0.11
- """
- name = 'POVRay'
- aliases = ['pov']
- filenames = ['*.pov', '*.inc']
- mimetypes = ['text/x-povray']
-
- tokens = {
- 'root': [
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ ],
+ 'optionarg': [
+ include('whitespace'),
+ (_shortened_many(
+ "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
+ "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
+ "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
+ "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
+ "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
+ "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
+ "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
+ "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
+ "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
+ "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
+ "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
+ "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
+ "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
+ "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
+ "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
+ "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
+ "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
+ "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
+ "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
+ "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
+ "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
+ "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
+ "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
+ "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
+ "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
+ "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
+ "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
+ "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
+ ],
+ 'bind': [
+ ('!', Keyword, '#pop'),
+ (_shortened('all$windows'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'quit': [
+ (r'gnuplot\b', Keyword),
+ include('noargs'),
+ ],
+ 'fit': [
+ (r'via\b', Name.Builtin),
+ include('plot'),
+ ],
+ 'if': [
+ (r'\)', Punctuation, '#pop'),
+ include('genericargs'),
+ ],
+ 'pause': [
+ (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
+ (_shortened('key$press'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'plot': [
+ (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
+ 'mat$rix', 's$mooth', 'thru$', 't$itle',
+ 'not$itle', 'u$sing', 'w$ith'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ 'save': [
+ (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ }
+
+
+class PovrayLexer(RegexLexer):
+ """
+ For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'POVRay'
+ aliases = ['pov']
+ filenames = ['*.pov', '*.inc']
+ mimetypes = ['text/x-povray']
+
+ tokens = {
+ 'root': [
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'//.*$', Comment.Single),
- (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
- (words((
- 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
- 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
- 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
- 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
- 'write'), prefix=r'#', suffix=r'\b'),
- Comment.Preproc),
- (words((
- 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
- 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
- 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
- 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
- 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
- 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
- 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
- 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
- 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
- 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
- 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
- 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
- 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
- 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
- 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
- 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
- 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
- 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
- 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
- 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
- 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
- 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
- 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
- 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
- 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
- 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
- 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
- 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
- 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
- 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
- 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
- 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
- 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
- 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
- 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
- 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
- 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
- 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
- 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
- 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
- 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
- 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
- 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
- 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
- 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
- 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
- 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
- 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (words((
- 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
- 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
- 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
- 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
- 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
- Name.Builtin),
- # TODO: <=, etc
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=]', Operator),
- (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
+ (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
+ (words((
+ 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
+ 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
+ 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
+ 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
+ 'write'), prefix=r'#', suffix=r'\b'),
+ Comment.Preproc),
+ (words((
+ 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
+ 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
+ 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
+ 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
+ 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
+ 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
+ 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
+ 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
+ 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
+ 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
+ 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
+ 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
+ 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
+ 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
+ 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
+ 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
+ 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
+ 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
+ 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
+ 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
+ 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
+ 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
+ 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
+ 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
+ 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
+ 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
+ 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
+ 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
+ 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
+ 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
+ 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
+ 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
+ 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
+ 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
+ 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
+ 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
+ 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
+ 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
+ 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
+ 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
+ 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
+ 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
+ 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
+ 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
+ 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
+ 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
+ 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
+ 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
+ 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
+ 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
+ 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
+ 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
+ Name.Builtin),
+ # TODO: <=, etc
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=]', Operator),
+ (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r'\s+', Whitespace),
- ]
- }
+ ]
+ }
def analyse_text(text):
"""POVRAY is similar to JSON/C, but the combination of camera and
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haskell.py b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
index 6ab0f3340e..4eb4c2371c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haskell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
@@ -1,77 +1,77 @@
-"""
- pygments.lexers.haskell
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haskell and related languages.
-
+"""
+ pygments.lexers.haskell
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haskell and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
default, include, inherit
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
-from pygments import unistring as uni
-
+from pygments import unistring as uni
+
__all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
- 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
- 'LiterateCryptolLexer', 'KokaLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class HaskellLexer(RegexLexer):
- """
- A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 0.8
- """
- name = 'Haskell'
- aliases = ['haskell', 'hs']
- filenames = ['*.hs']
- mimetypes = ['text/x-haskell']
-
- flags = re.MULTILINE | re.UNICODE
-
- reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
+ 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
+ 'LiterateCryptolLexer', 'KokaLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class HaskellLexer(RegexLexer):
+ """
+ A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Haskell'
+ aliases = ['haskell', 'hs']
+ filenames = ['*.hs']
+ mimetypes = ['text/x-haskell']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
'family', 'if', 'in', 'infix[lr]?', 'instance',
- 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
+ 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
(r'\s+', Whitespace),
- # (r'--\s*|.*$', Comment.Doc),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r"'[^\\]'", String.Char), # this has to come before the TH quote
- (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
- (r"'?[_" + uni.Ll + r"][\w']*", Name),
- (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r"'[^\\]'", String.Char), # this has to come before the TH quote
+ (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
+ (r"'?[_" + uni.Ll + r"][\w']*", Name),
+ (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')[" + uni.Lu + r"][\w\']*", Keyword.Type),
(r"(')\[[^\]]*\]", Keyword.Type), # tuples and lists get special treatment in GHC
(r"(')\([^)]*\)", Keyword.Type), # ..
(r"(')[:!#$%&*+.\\/<=>?@^|~-]+", Keyword.Type), # promoted type operators
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*_*[pP][+-]?\d(_*\d)*', Number.Float),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*\.[\da-fA-F](_*[\da-fA-F])*'
r'(_*[pP][+-]?\d(_*\d)*)?', Number.Float),
@@ -81,83 +81,83 @@ class HaskellLexer(RegexLexer):
(r'0[oO]_*[0-7](_*[0-7])*', Number.Oct),
(r'0[xX]_*[\da-fA-F](_*[\da-fA-F])*', Number.Hex),
(r'\d(_*\d)*', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
(r'\s+', Whitespace),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Name), '#pop'),
- # import X hiding (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
+ # import X hiding (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Punctuation), 'funclist'),
- # import X (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ # import X (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
(r'\s+', Whitespace),
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
+ (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
(r'\s+', Whitespace),
- (r'[' + uni.Lu + r']\w*', Keyword.Type),
- (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
+ (r'[' + uni.Lu + r']\w*', Keyword.Type),
+ (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][' + uni.Lu + r'@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
(r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
-
-
+ ],
+ }
+
+
class HspecLexer(HaskellLexer):
"""
A Haskell lexer with support for Hspec constructs.
@@ -180,690 +180,690 @@ class HspecLexer(HaskellLexer):
}
-class IdrisLexer(RegexLexer):
- """
- A lexer for the dependently typed programming language Idris.
-
- Based on the Haskell and Agda Lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Idris'
- aliases = ['idris', 'idr']
- filenames = ['*.idr']
- mimetypes = ['text/x-idris']
-
- reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
- 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
- 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
- 'total', 'partial',
+class IdrisLexer(RegexLexer):
+ """
+ A lexer for the dependently typed programming language Idris.
+
+ Based on the Haskell and Agda Lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Idris'
+ aliases = ['idris', 'idr']
+ filenames = ['*.idr']
+ mimetypes = ['text/x-idris']
+
+ reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
+ 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
+ 'total', 'partial',
'interface', 'implementation', 'export', 'covering', 'constructor',
- 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
- 'pattern', 'term', 'syntax', 'prefix',
- 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
- 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
-
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
- 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
-
- tokens = {
- 'root': [
- # Comments
+ 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
+ 'pattern', 'term', 'syntax', 'prefix',
+ 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
+ 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
+
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
+ 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
+
+ tokens = {
+ 'root': [
+ # Comments
(r'^(\s*)(%%(%s))' % '|'.join(directives),
bygroups(Whitespace, Keyword.Reserved)),
(r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Whitespace, Comment.Single)),
(r'(\s*)(\|{3}.*?)$', bygroups(Whitespace, Comment.Single)),
(r'(\s*)(\{-)', bygroups(Whitespace, Comment.Multiline), 'comment'),
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
bygroups(Whitespace, Name.Function, Whitespace, Operator.Word, Whitespace)),
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace), 'module'),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- (r'[a-z][\w\']*', Text),
- # Special Symbols
- (r'(<-|::|->|=>|=)', Operator.Word), # specials
- (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ (r'[a-z][\w\']*', Text),
+ # Special Symbols
+ (r'(<-|::|->|=>|=)', Operator.Word), # specials
+ (r'([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
(r'\s+?', Whitespace), # Whitespace
- ],
- 'module': [
+ ],
+ 'module': [
(r'\s+', Whitespace),
- (r'([A-Z][\w.]*)(\s+)(\()',
+ (r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
(r'\s+', Whitespace),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--.*$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'\{-', Comment.Multiline, '#push'),
- (r'-\}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']", String.Char),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ (r'--.*$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'\{-', Comment.Multiline, '#push'),
+ (r'-\}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']", String.Char),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
(r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop')
- ],
- }
-
-
-class AgdaLexer(RegexLexer):
- """
- For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
- dependently typed functional programming language and proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Agda'
- aliases = ['agda']
- filenames = ['*.agda']
- mimetypes = ['text/x-agda']
-
- reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
- 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
- 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
- 'pattern', 'postulate', 'primitive', 'private',
- 'quote', 'quoteGoal', 'quoteTerm',
- 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
- 'unquote', 'unquoteDecl', 'using', 'where', 'with']
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
+ ],
+ }
+
+
+class AgdaLexer(RegexLexer):
+ """
+ For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
+ dependently typed functional programming language and proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Agda'
+ aliases = ['agda']
+ filenames = ['*.agda']
+ mimetypes = ['text/x-agda']
+
+ reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
+ 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
+ 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
+ 'pattern', 'postulate', 'primitive', 'private',
+ 'quote', 'quoteGoal', 'quoteTerm',
+ 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
+ 'unquote', 'unquoteDecl', 'using', 'where', 'with']
+
+ tokens = {
+ 'root': [
+ # Declaration
+ (r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
bygroups(Whitespace, Name.Function, Whitespace, Operator.Word, Whitespace)),
- # Comments
- (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
- (r'\{-', Comment.Multiline, 'comment'),
- # Holes
- (r'\{!', Comment.Directive, 'hole'),
- # Lexemes:
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ # Comments
+ (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
+ (r'\{-', Comment.Multiline, 'comment'),
+ # Holes
+ (r'\{!', Comment.Directive, 'hole'),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace), 'module'),
(r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type),
- # Special Symbols
- (r'(\(|\)|\{|\})', Operator),
+ # Special Symbols
+ (r'(\(|\)|\{|\})', Operator),
(r'(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)', Operator.Word),
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s(){}]+', Text),
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s(){}]+', Text),
(r'\s+?', Whitespace), # Whitespace
- ],
- 'hole': [
- # Holes
- (r'[^!{}]+', Comment.Directive),
- (r'\{!', Comment.Directive, '#push'),
- (r'!\}', Comment.Directive, '#pop'),
- (r'[!{}]', Comment.Directive),
- ],
- 'module': [
- (r'\{-', Comment.Multiline, 'comment'),
- (r'[a-zA-Z][\w.]*', Name, '#pop'),
+ ],
+ 'hole': [
+ # Holes
+ (r'[^!{}]+', Comment.Directive),
+ (r'\{!', Comment.Directive, '#push'),
+ (r'!\}', Comment.Directive, '#pop'),
+ (r'[!{}]', Comment.Directive),
+ ],
+ 'module': [
+ (r'\{-', Comment.Multiline, 'comment'),
+ (r'[a-zA-Z][\w.]*', Name, '#pop'),
(r'[\W0-9_]+', Text)
- ],
- 'comment': HaskellLexer.tokens['comment'],
- 'character': HaskellLexer.tokens['character'],
- 'string': HaskellLexer.tokens['string'],
- 'escape': HaskellLexer.tokens['escape']
- }
-
-
-class CryptolLexer(RegexLexer):
- """
- FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 2.0
- """
- name = 'Cryptol'
- aliases = ['cryptol', 'cry']
- filenames = ['*.cry']
- mimetypes = ['text/x-cryptol']
-
- reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
- 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
- 'max', 'min', 'module', 'newtype', 'pragma', 'property',
- 'then', 'type', 'where', 'width')
- ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
- 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
- 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
- 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
-
- tokens = {
- 'root': [
- # Whitespace:
+ ],
+ 'comment': HaskellLexer.tokens['comment'],
+ 'character': HaskellLexer.tokens['character'],
+ 'string': HaskellLexer.tokens['string'],
+ 'escape': HaskellLexer.tokens['escape']
+ }
+
+
+class CryptolLexer(RegexLexer):
+ """
+ FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cryptol'
+ aliases = ['cryptol', 'cry']
+ filenames = ['*.cry']
+ mimetypes = ['text/x-cryptol']
+
+ reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
+ 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
+ 'max', 'min', 'module', 'newtype', 'pragma', 'property',
+ 'then', 'type', 'where', 'width')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
(r'\s+', Whitespace),
- # (r'--\s*|.*$', Comment.Doc),
- (r'//.*$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_a-z][\w\']*', Name.Function),
- (r"'?[_a-z][\w']*", Name),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'//.*$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'^[_a-z][\w\']*', Name.Function),
+ (r"'?[_a-z][\w']*", Name),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
(r'\s+', Whitespace),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([A-Z][\w.]*)(\s+)(as)(\s+)([A-Z][\w.]*)',
bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Name), '#pop'),
- # import X hiding (functions)
- (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
+ # import X hiding (functions)
+ (r'([A-Z][\w.]*)(\s+)(hiding)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Keyword, Whitespace, Punctuation), 'funclist'),
- # import X (functions)
- (r'([A-Z][\w.]*)(\s+)(\()',
+ # import X (functions)
+ (r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
(r'\s+', Whitespace),
- (r'([A-Z][\w.]*)(\s+)(\()',
+ (r'([A-Z][\w.]*)(\s+)(\()',
bygroups(Name.Namespace, Whitespace, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
(r'\s+', Whitespace),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- # TODO: these don't match the comments in docs, remove.
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ # TODO: these don't match the comments in docs, remove.
# (r'--(?![!#$%&*+./<=>?@^|_~:\\]).*?$', Comment.Single),
# (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
(r'(\s+)(\\)', bygroups(Whitespace, String.Escape), '#pop'),
- ],
- }
-
+ ],
+ }
+
EXTRA_KEYWORDS = {'join', 'split', 'reverse', 'transpose', 'width',
'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
'trace'}
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Name.Builtin, value
- else:
- yield index, token, value
-
-
-class LiterateLexer(Lexer):
- """
- Base class for lexers of literate file formats based on LaTeX or Bird-style
- (prefixing each code line with ">").
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
- """
-
- bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
- def __init__(self, baselexer, **options):
- self.baselexer = baselexer
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- style = self.options.get('litstyle')
- if style is None:
- style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
- code = ''
- insertions = []
- if style == 'bird':
- # bird-style
- for match in line_re.finditer(text):
- line = match.group()
- m = self.bird_re.match(line)
- if m:
- insertions.append((len(code),
- [(0, Comment.Special, m.group(1))]))
- code += m.group(2)
- else:
- insertions.append((len(code), [(0, Text, line)]))
- else:
- # latex-style
- from pygments.lexers.markup import TexLexer
- lxlexer = TexLexer(**self.options)
- codelines = 0
- latex = ''
- for match in line_re.finditer(text):
- line = match.group()
- if codelines:
- if line.lstrip().startswith('\\end{code}'):
- codelines = 0
- latex += line
- else:
- code += line
- elif line.lstrip().startswith('\\begin{code}'):
- codelines = 1
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- latex = ''
- else:
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ else:
+ yield index, token, value
+
+
+class LiterateLexer(Lexer):
+ """
+ Base class for lexers of literate file formats based on LaTeX or Bird-style
+ (prefixing each code line with ">").
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+ """
+
+ bird_re = re.compile(r'(>[ \t]*)(.*\n)')
+
+ def __init__(self, baselexer, **options):
+ self.baselexer = baselexer
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ style = self.options.get('litstyle')
+ if style is None:
+ style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
+
+ code = ''
+ insertions = []
+ if style == 'bird':
+ # bird-style
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self.bird_re.match(line)
+ if m:
+ insertions.append((len(code),
+ [(0, Comment.Special, m.group(1))]))
+ code += m.group(2)
+ else:
+ insertions.append((len(code), [(0, Text, line)]))
+ else:
+ # latex-style
+ from pygments.lexers.markup import TexLexer
+ lxlexer = TexLexer(**self.options)
+ codelines = 0
+ latex = ''
+ for match in line_re.finditer(text):
+ line = match.group()
+ if codelines:
+ if line.lstrip().startswith('\\end{code}'):
+ codelines = 0
+ latex += line
+ else:
+ code += line
+ elif line.lstrip().startswith('\\begin{code}'):
+ codelines = 1
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ latex = ''
+ else:
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
yield from do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code))
-
-
-class LiterateHaskellLexer(LiterateLexer):
- """
- For Literate Haskell (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
- """
- name = 'Literate Haskell'
+
+
+class LiterateHaskellLexer(LiterateLexer):
+ """
+ For Literate Haskell (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Literate Haskell'
aliases = ['literate-haskell', 'lhaskell', 'lhs']
- filenames = ['*.lhs']
- mimetypes = ['text/x-literate-haskell']
-
- def __init__(self, **options):
- hslexer = HaskellLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateIdrisLexer(LiterateLexer):
- """
- For Literate Idris (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Idris'
+ filenames = ['*.lhs']
+ mimetypes = ['text/x-literate-haskell']
+
+ def __init__(self, **options):
+ hslexer = HaskellLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateIdrisLexer(LiterateLexer):
+ """
+ For Literate Idris (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Idris'
aliases = ['literate-idris', 'lidris', 'lidr']
- filenames = ['*.lidr']
- mimetypes = ['text/x-literate-idris']
-
- def __init__(self, **options):
- hslexer = IdrisLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateAgdaLexer(LiterateLexer):
- """
- For Literate Agda source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Agda'
+ filenames = ['*.lidr']
+ mimetypes = ['text/x-literate-idris']
+
+ def __init__(self, **options):
+ hslexer = IdrisLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateAgdaLexer(LiterateLexer):
+ """
+ For Literate Agda source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Agda'
aliases = ['literate-agda', 'lagda']
- filenames = ['*.lagda']
- mimetypes = ['text/x-literate-agda']
-
- def __init__(self, **options):
- agdalexer = AgdaLexer(**options)
- LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
-
-
-class LiterateCryptolLexer(LiterateLexer):
- """
- For Literate Cryptol (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Cryptol'
+ filenames = ['*.lagda']
+ mimetypes = ['text/x-literate-agda']
+
+ def __init__(self, **options):
+ agdalexer = AgdaLexer(**options)
+ LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
+
+
+class LiterateCryptolLexer(LiterateLexer):
+ """
+ For Literate Cryptol (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Cryptol'
aliases = ['literate-cryptol', 'lcryptol', 'lcry']
- filenames = ['*.lcry']
- mimetypes = ['text/x-literate-cryptol']
-
- def __init__(self, **options):
- crylexer = CryptolLexer(**options)
- LiterateLexer.__init__(self, crylexer, **options)
-
-
-class KokaLexer(RegexLexer):
- """
- Lexer for the `Koka <http://koka.codeplex.com>`_
- language.
-
- .. versionadded:: 1.6
- """
-
- name = 'Koka'
- aliases = ['koka']
- filenames = ['*.kk', '*.kki']
- mimetypes = ['text/x-koka']
-
- keywords = [
- 'infix', 'infixr', 'infixl',
- 'type', 'cotype', 'rectype', 'alias',
- 'struct', 'con',
- 'fun', 'function', 'val', 'var',
- 'external',
- 'if', 'then', 'else', 'elif', 'return', 'match',
- 'private', 'public', 'private',
- 'module', 'import', 'as',
- 'include', 'inline',
- 'rec',
- 'try', 'yield', 'enum',
- 'interface', 'instance',
- ]
-
- # keywords that are followed by a type
- typeStartKeywords = [
- 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
- ]
-
- # keywords valid in a type
- typekeywords = [
- 'forall', 'exists', 'some', 'with',
- ]
-
- # builtin names and special names
- builtin = [
- 'for', 'while', 'repeat',
- 'foreach', 'foreach-indexed',
- 'error', 'catch', 'finally',
- 'cs', 'js', 'file', 'ref', 'assigned',
- ]
-
- # symbols that can be in an operator
- symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
-
- # symbol boundary: an operator keyword should not be followed by any of these
+ filenames = ['*.lcry']
+ mimetypes = ['text/x-literate-cryptol']
+
+ def __init__(self, **options):
+ crylexer = CryptolLexer(**options)
+ LiterateLexer.__init__(self, crylexer, **options)
+
+
+class KokaLexer(RegexLexer):
+ """
+ Lexer for the `Koka <http://koka.codeplex.com>`_
+ language.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Koka'
+ aliases = ['koka']
+ filenames = ['*.kk', '*.kki']
+ mimetypes = ['text/x-koka']
+
+ keywords = [
+ 'infix', 'infixr', 'infixl',
+ 'type', 'cotype', 'rectype', 'alias',
+ 'struct', 'con',
+ 'fun', 'function', 'val', 'var',
+ 'external',
+ 'if', 'then', 'else', 'elif', 'return', 'match',
+ 'private', 'public', 'private',
+ 'module', 'import', 'as',
+ 'include', 'inline',
+ 'rec',
+ 'try', 'yield', 'enum',
+ 'interface', 'instance',
+ ]
+
+ # keywords that are followed by a type
+ typeStartKeywords = [
+ 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
+ ]
+
+ # keywords valid in a type
+ typekeywords = [
+ 'forall', 'exists', 'some', 'with',
+ ]
+
+ # builtin names and special names
+ builtin = [
+ 'for', 'while', 'repeat',
+ 'foreach', 'foreach-indexed',
+ 'error', 'catch', 'finally',
+ 'cs', 'js', 'file', 'ref', 'assigned',
+ ]
+
+ # symbols that can be in an operator
+ symbols = r'[$%&*+@!/\\^~=.:\-?|<>]+'
+
+ # symbol boundary: an operator keyword should not be followed by any of these
sboundary = '(?!' + symbols + ')'
-
- # name boundary: a keyword should not be followed by any of these
+
+ # name boundary: a keyword should not be followed by any of these
boundary = r'(?![\w/])'
-
- # koka token abstractions
- tokenType = Name.Attribute
- tokenTypeDef = Name.Class
- tokenConstructor = Generic.Emph
-
- # main lexer
- tokens = {
- 'root': [
- include('whitespace'),
-
- # go into type mode
- (r'::?' + sboundary, tokenType, 'type'),
+
+ # koka token abstractions
+ tokenType = Name.Attribute
+ tokenTypeDef = Name.Class
+ tokenConstructor = Generic.Emph
+
+ # main lexer
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # go into type mode
+ (r'::?' + sboundary, tokenType, 'type'),
(r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'alias-type'),
+ 'alias-type'),
(r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ 'struct-type'),
+ ((r'(%s)' % '|'.join(typeStartKeywords)) +
r'(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
- 'type'),
-
- # special sequences of tokens (we use ?: for non-capturing group as
- # required by 'bygroups')
+ 'type'),
+
+ # special sequences of tokens (we use ?: for non-capturing group as
+ # required by 'bygroups')
(r'(module)(\s+)(interface(?=\s))?(\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
+ (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
r'(?:(\s*)(=)(\s*)(qualified)?(\s*)'
- r'((?:[a-z]\w*/)*[a-z]\w*))?',
+ r'((?:[a-z]\w*/)*[a-z]\w*))?',
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace, Keyword, Whitespace,
Keyword, Whitespace, Name.Namespace)),
-
+
(r'^(public|private)?(\s+)?(function|fun|val)'
- r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
(r'^(?:(public|private)(?=\s+external))?((?<!^)\s+)?(external)(\s+)(inline(?=\s))?(\s+)?'
- r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::?|:=|\->|[=.]' + sboundary, Keyword),
-
- # names
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenConstructor)),
- (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
- (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
- bygroups(Name.Namespace, Name)),
- (r'_\w*', Name.Variable),
-
- # literal string
- (r'@"', String.Double, 'litstring'),
-
- # operators
- (symbols + "|/(?![*/])", Operator),
- (r'`', Operator),
- (r'[{}()\[\];,]', Punctuation),
-
- # literals. No check for literal characters with len > 1
- (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- ],
-
- # type started by alias
- 'alias-type': [
- (r'=', Keyword),
- include('type')
- ],
-
- # type started by struct
- 'struct-type': [
- (r'(?=\((?!,*\)))', Punctuation, '#pop'),
- include('type')
- ],
-
- # type started by colon
- 'type': [
- (r'[(\[<]', tokenType, 'type-nested'),
- include('type-content')
- ],
-
- # type nested in brackets: can contain parameters, comma etc.
- 'type-nested': [
- (r'[)\]>]', tokenType, '#pop'),
- (r'[(\[<]', tokenType, 'type-nested'),
- (r',', tokenType),
- (r'([a-z]\w*)(\s*)(:)(?!:)',
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
+ (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
+ (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
+ (r'::?|:=|\->|[=.]' + sboundary, Keyword),
+
+ # names
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenConstructor)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
+ (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
+ bygroups(Name.Namespace, Name)),
+ (r'_\w*', Name.Variable),
+
+ # literal string
+ (r'@"', String.Double, 'litstring'),
+
+ # operators
+ (symbols + "|/(?![*/])", Operator),
+ (r'`', Operator),
+ (r'[{}()\[\];,]', Punctuation),
+
+ # literals. No check for literal characters with len > 1
+ (r'[0-9]+\.[0-9]+([eE][\-+]?[0-9]+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ (r"'", String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ ],
+
+ # type started by alias
+ 'alias-type': [
+ (r'=', Keyword),
+ include('type')
+ ],
+
+ # type started by struct
+ 'struct-type': [
+ (r'(?=\((?!,*\)))', Punctuation, '#pop'),
+ include('type')
+ ],
+
+ # type started by colon
+ 'type': [
+ (r'[(\[<]', tokenType, 'type-nested'),
+ include('type-content')
+ ],
+
+ # type nested in brackets: can contain parameters, comma etc.
+ 'type-nested': [
+ (r'[)\]>]', tokenType, '#pop'),
+ (r'[(\[<]', tokenType, 'type-nested'),
+ (r',', tokenType),
+ (r'([a-z]\w*)(\s*)(:)(?!:)',
bygroups(Name, Whitespace, tokenType)), # parameter name
- include('type-content')
- ],
-
- # shared contents of a type
- 'type-content': [
- include('whitespace'),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
- Keyword, '#pop'), # need to match because names overlap...
-
- # kinds
- (r'[EPHVX]' + boundary, tokenType),
-
- # type names
- (r'[a-z][0-9]*(?![\w/])', tokenType),
- (r'_\w*', tokenType.Variable), # Generic.Emph
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenType)),
- (r'((?:[a-z]\w*/)*)([a-z]\w+)',
- bygroups(Name.Namespace, tokenType)),
-
- # type keyword operators
- (r'::|->|[.:|]', tokenType),
-
- # catchall
- default('#pop')
- ],
-
- # comments and literals
- 'whitespace': [
+ include('type-content')
+ ],
+
+ # shared contents of a type
+ 'type-content': [
+ include('whitespace'),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
+ (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
+ Keyword, '#pop'), # need to match because names overlap...
+
+ # kinds
+ (r'[EPHVX]' + boundary, tokenType),
+
+ # type names
+ (r'[a-z][0-9]*(?![\w/])', tokenType),
+ (r'_\w*', tokenType.Variable), # Generic.Emph
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenType)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w+)',
+ bygroups(Name.Namespace, tokenType)),
+
+ # type keyword operators
+ (r'::|->|[.:|]', tokenType),
+
+ # catchall
+ default('#pop')
+ ],
+
+ # comments and literals
+ 'whitespace': [
(r'(\n\s*)(#.*)$', bygroups(Whitespace, Comment.Preproc)),
(r'\s+', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment.Single)
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'litstring': [
- (r'[^"]+', String.Double),
- (r'""', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'string': [
- (r'[^\\"\n]+', String.Double),
- include('escape-sequence'),
- (r'["\n]', String.Double, '#pop'),
- ],
- 'char': [
- (r'[^\\\'\n]+', String.Char),
- include('escape-sequence'),
- (r'[\'\n]', String.Char, '#pop'),
- ],
- 'escape-sequence': [
- (r'\\[nrt\\"\']', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- # Yes, \U literals are 6 hex digits.
- (r'\\U[0-9a-fA-F]{6}', String.Escape)
- ]
- }
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'//.*$', Comment.Single)
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'litstring': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"\n]+', String.Double),
+ include('escape-sequence'),
+ (r'["\n]', String.Double, '#pop'),
+ ],
+ 'char': [
+ (r'[^\\\'\n]+', String.Char),
+ include('escape-sequence'),
+ (r'[\'\n]', String.Char, '#pop'),
+ ],
+ 'escape-sequence': [
+ (r'\\[nrt\\"\']', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ # Yes, \U literals are 6 hex digits.
+ (r'\\U[0-9a-fA-F]{6}', String.Escape)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haxe.py b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
index ee587e99b7..a9b2b1db0e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haxe.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
@@ -1,935 +1,935 @@
-"""
- pygments.lexers.haxe
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Haxe and related stuff.
-
+"""
+ pygments.lexers.haxe
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haxe and related stuff.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
- default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-__all__ = ['HaxeLexer', 'HxmlLexer']
-
-
-class HaxeLexer(ExtendedRegexLexer):
- """
- For Haxe source code (http://haxe.org/).
-
- .. versionadded:: 1.3
- """
-
- name = 'Haxe'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+__all__ = ['HaxeLexer', 'HxmlLexer']
+
+
+class HaxeLexer(ExtendedRegexLexer):
+ """
+ For Haxe source code (http://haxe.org/).
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haxe'
aliases = ['haxe', 'hxsl', 'hx']
- filenames = ['*.hx', '*.hxsl']
- mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
-
- # keywords extracted from lexer.mll in the haxe compiler source
- keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
- r'break|return|continue|extends|implements|import|'
- r'switch|case|default|public|private|try|untyped|'
- r'catch|new|this|throw|extern|enum|in|interface|'
- r'cast|override|dynamic|typedef|package|'
- r'inline|using|null|true|false|abstract)\b')
-
- # idtype in lexer.mll
- typeid = r'_*[A-Z]\w*'
-
- # combined ident and dollar and idtype
+ filenames = ['*.hx', '*.hxsl']
+ mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+
+ # keywords extracted from lexer.mll in the haxe compiler source
+ keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
+ r'break|return|continue|extends|implements|import|'
+ r'switch|case|default|public|private|try|untyped|'
+ r'catch|new|this|throw|extern|enum|in|interface|'
+ r'cast|override|dynamic|typedef|package|'
+ r'inline|using|null|true|false|abstract)\b')
+
+ # idtype in lexer.mll
+ typeid = r'_*[A-Z]\w*'
+
+ # combined ident and dollar and idtype
ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + r'|_+|\$\w+)'
-
- binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
- r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
- r'/|\-|=>|=)')
-
- # ident except keywords
- ident_no_keyword = r'(?!' + keyword + ')' + ident
-
- flags = re.DOTALL | re.MULTILINE
-
- preproc_stack = []
-
- def preproc_callback(self, match, ctx):
- proc = match.group(2)
-
- if proc == 'if':
- # store the current stack
- self.preproc_stack.append(ctx.stack[:])
- elif proc in ['else', 'elseif']:
- # restore the stack back to right before #if
- if self.preproc_stack:
- ctx.stack = self.preproc_stack[-1][:]
- elif proc == 'end':
- # remove the saved stack of previous #if
- if self.preproc_stack:
- self.preproc_stack.pop()
-
- # #if and #elseif should follow by an expr
- if proc in ['if', 'elseif']:
- ctx.stack.append('preproc-expr')
-
- # #error can be optionally follow by the error msg
- if proc in ['error']:
- ctx.stack.append('preproc-error')
-
+
+ binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
+ r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
+ r'/|\-|=>|=)')
+
+ # ident except keywords
+ ident_no_keyword = r'(?!' + keyword + ')' + ident
+
+ flags = re.DOTALL | re.MULTILINE
+
+ preproc_stack = []
+
+ def preproc_callback(self, match, ctx):
+ proc = match.group(2)
+
+ if proc == 'if':
+ # store the current stack
+ self.preproc_stack.append(ctx.stack[:])
+ elif proc in ['else', 'elseif']:
+ # restore the stack back to right before #if
+ if self.preproc_stack:
+ ctx.stack = self.preproc_stack[-1][:]
+ elif proc == 'end':
+ # remove the saved stack of previous #if
+ if self.preproc_stack:
+ self.preproc_stack.pop()
+
+ # #if and #elseif should follow by an expr
+ if proc in ['if', 'elseif']:
+ ctx.stack.append('preproc-expr')
+
+ # #error can be optionally follow by the error msg
+ if proc in ['error']:
+ ctx.stack.append('preproc-error')
+
yield match.start(), Comment.Preproc, '#' + proc
- ctx.pos = match.end()
-
- tokens = {
- 'root': [
- include('spaces'),
- include('meta'),
- (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
- (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
- (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
- (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
- (r'(?:enum)\b', Keyword.Declaration, 'enum'),
- (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
-
- # top-level expression
- # although it is not supported in haxe, but it is common to write
- # expression in web pages the positive lookahead here is to prevent
- # an infinite loop at the EOF
- (r'(?=.)', Text, 'expr-statement'),
- ],
-
- # space/tab/comment/preproc
- 'spaces': [
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('spaces'),
+ include('meta'),
+ (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
+ (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
+ (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
+ (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
+ (r'(?:enum)\b', Keyword.Declaration, 'enum'),
+ (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
+
+ # top-level expression
+ # although it is not supported in haxe, but it is common to write
+ # expression in web pages the positive lookahead here is to prevent
+ # an infinite loop at the EOF
+ (r'(?=.)', Text, 'expr-statement'),
+ ],
+
+ # space/tab/comment/preproc
+ 'spaces': [
(r'\s+', Whitespace),
- (r'//[^\n\r]*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
- ],
-
- 'string-single-interpol': [
- (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
- (r'\$\$', String.Escape),
- (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
- include('string-single'),
- ],
-
- 'string-single': [
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Single),
- ],
-
- 'string-double': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Double),
- ],
-
- 'string-interpol-close': [
- (r'\$'+ident, String.Interpol),
- (r'\}', String.Interpol, '#pop'),
- ],
-
- 'package': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'import': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\*', Keyword), # wildcard import
- (r'\.', Punctuation, 'import-ident'),
- (r'in', Keyword.Namespace, 'ident'),
- default('#pop'),
- ],
-
- 'import-ident': [
- include('spaces'),
- (r'\*', Keyword, '#pop'), # wildcard import
- (ident, Name.Namespace, '#pop'),
- ],
-
- 'using': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'preproc-error': [
+ (r'//[^\n\r]*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
+ ],
+
+ 'string-single-interpol': [
+ (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
+ (r'\$\$', String.Escape),
+ (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
+ include('string-single'),
+ ],
+
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Single),
+ ],
+
+ 'string-double': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Double),
+ ],
+
+ 'string-interpol-close': [
+ (r'\$'+ident, String.Interpol),
+ (r'\}', String.Interpol, '#pop'),
+ ],
+
+ 'package': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'import': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\*', Keyword), # wildcard import
+ (r'\.', Punctuation, 'import-ident'),
+ (r'in', Keyword.Namespace, 'ident'),
+ default('#pop'),
+ ],
+
+ 'import-ident': [
+ include('spaces'),
+ (r'\*', Keyword, '#pop'), # wildcard import
+ (ident, Name.Namespace, '#pop'),
+ ],
+
+ 'using': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'preproc-error': [
(r'\s+', Whitespace),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- default('#pop'),
- ],
-
- 'preproc-expr': [
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ default('#pop'),
+ ],
+
+ 'preproc-expr': [
(r'\s+', Whitespace),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, '#pop'),
-
- # Float
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]+', Number.Float),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, '#pop'),
+
+ # Float
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]+', Number.Float),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- 'preproc-parenthesis': [
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ 'preproc-parenthesis': [
(r'\s+', Whitespace),
- (r'\)', Comment.Preproc, '#pop'),
- default('preproc-expr-in-parenthesis'),
- ],
-
- 'preproc-expr-chain': [
+ (r'\)', Comment.Preproc, '#pop'),
+ default('preproc-expr-in-parenthesis'),
+ ],
+
+ 'preproc-expr-chain': [
(r'\s+', Whitespace),
- (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
- default('#pop'),
- ],
-
- # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
- 'preproc-expr-in-parenthesis': [
+ (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
+ default('#pop'),
+ ],
+
+ # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
+ 'preproc-expr-in-parenthesis': [
(r'\s+', Whitespace),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc,
- ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc,
+ ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
-
- # String
- (r"'", String.Single,
- ('#pop', 'preproc-expr-chain', 'string-single')),
- (r'"', String.Double,
- ('#pop', 'preproc-expr-chain', 'string-double')),
- ],
-
- 'abstract': [
- include('spaces'),
- default(('#pop', 'abstract-body', 'abstract-relation',
- 'abstract-opaque', 'type-param-constraint', 'type-name')),
- ],
-
- 'abstract-body': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'class-body')),
- ],
-
- 'abstract-opaque': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
- default('#pop'),
- ],
-
- 'abstract-relation': [
- include('spaces'),
- (r'(?:to|from)', Keyword.Declaration, 'type'),
- (r',', Punctuation),
- default('#pop'),
- ],
-
- 'meta': [
- include('spaces'),
- (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
- ],
-
- # optional colon
- 'meta-colon': [
- include('spaces'),
- (r':', Name.Decorator, '#pop'),
- default('#pop'),
- ],
-
- # same as 'ident' but set token as Name.Decorator instead of Name
- 'meta-ident': [
- include('spaces'),
- (ident, Name.Decorator, '#pop'),
- ],
-
- 'meta-body': [
- include('spaces'),
- (r'\(', Name.Decorator, ('#pop', 'meta-call')),
- default('#pop'),
- ],
-
- 'meta-call': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- default(('#pop', 'meta-call-sep', 'expr')),
- ],
-
- 'meta-call-sep': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r',', Punctuation, ('#pop', 'meta-call')),
- ],
-
- 'typedef': [
- include('spaces'),
- default(('#pop', 'typedef-body', 'type-param-constraint',
- 'type-name')),
- ],
-
- 'typedef-body': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
- ],
-
- 'enum': [
- include('spaces'),
- default(('#pop', 'enum-body', 'bracket-open',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'enum-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
- ],
-
- 'enum-member': [
- include('spaces'),
- (r'\(', Punctuation,
- ('#pop', 'semicolon', 'flag', 'function-param')),
- default(('#pop', 'semicolon', 'flag')),
- ],
-
- 'class': [
- include('spaces'),
- default(('#pop', 'class-body', 'bracket-open', 'extends',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'extends': [
- include('spaces'),
- (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
- (r',', Punctuation), # the comma is made optional here, since haxe2
- # requires the comma but haxe3 does not allow it
- default('#pop'),
- ],
-
- 'bracket-open': [
- include('spaces'),
- (r'\{', Punctuation, '#pop'),
- ],
-
- 'bracket-close': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'class-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (r'(?:static|public|private|override|dynamic|inline|macro)\b',
- Keyword.Declaration),
- default('class-member'),
- ],
-
- 'class-member': [
- include('spaces'),
- (r'(var)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'var')),
- (r'(function)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'class-method')),
- ],
-
- # local function, anonymous or not
- 'function-local': [
- include('spaces'),
- (ident_no_keyword, Name.Function,
- ('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- default(('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- ],
-
- 'optional-expr': [
- include('spaces'),
- include('expr'),
- default('#pop'),
- ],
-
- 'class-method': [
- include('spaces'),
- (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
- 'function-param', 'parenthesis-open',
- 'type-param-constraint')),
- ],
-
- # function arguments
- 'function-param': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (ident_no_keyword, Name,
- ('#pop', 'function-param-sep', 'assign', 'flag')),
- ],
-
- 'function-param-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'function-param')),
- ],
-
- 'prop-get-set': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
- default('#pop'),
- ],
-
- 'prop-get-set-opt': [
- include('spaces'),
- (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
- (ident_no_keyword, Text, '#pop'), # custom getter/setter
- ],
-
- 'expr-statement': [
- include('spaces'),
- # makes semicolon optional here, just to avoid checking the last
- # one is bracket or not.
- default(('#pop', 'optional-semicolon', 'expr')),
- ],
-
- 'expr': [
- include('spaces'),
- (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
- 'meta-ident', 'meta-colon')),
- (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
- (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
- (r'(?:static|public|private|override|dynamic|inline)\b',
- Keyword.Declaration),
- (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
- 'function-local')),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
- (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
- (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
- (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
- (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
- (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
- (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
- (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
- (r'(?:if)\b', Keyword, ('#pop', 'if')),
- (r'(?:do)\b', Keyword, ('#pop', 'do')),
- (r'(?:while)\b', Keyword, ('#pop', 'while')),
- (r'(?:for)\b', Keyword, ('#pop', 'for')),
- (r'(?:untyped|throw)\b', Keyword),
- (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
- (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
- (r'(?:continue|break)\b', Keyword, '#pop'),
- (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
- (ident_no_keyword, Name, ('#pop', 'expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
+
+ # String
+ (r"'", String.Single,
+ ('#pop', 'preproc-expr-chain', 'string-single')),
+ (r'"', String.Double,
+ ('#pop', 'preproc-expr-chain', 'string-double')),
+ ],
+
+ 'abstract': [
+ include('spaces'),
+ default(('#pop', 'abstract-body', 'abstract-relation',
+ 'abstract-opaque', 'type-param-constraint', 'type-name')),
+ ],
+
+ 'abstract-body': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ ],
+
+ 'abstract-opaque': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
+ default('#pop'),
+ ],
+
+ 'abstract-relation': [
+ include('spaces'),
+ (r'(?:to|from)', Keyword.Declaration, 'type'),
+ (r',', Punctuation),
+ default('#pop'),
+ ],
+
+ 'meta': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
+ ],
+
+ # optional colon
+ 'meta-colon': [
+ include('spaces'),
+ (r':', Name.Decorator, '#pop'),
+ default('#pop'),
+ ],
+
+ # same as 'ident' but set token as Name.Decorator instead of Name
+ 'meta-ident': [
+ include('spaces'),
+ (ident, Name.Decorator, '#pop'),
+ ],
+
+ 'meta-body': [
+ include('spaces'),
+ (r'\(', Name.Decorator, ('#pop', 'meta-call')),
+ default('#pop'),
+ ],
+
+ 'meta-call': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ default(('#pop', 'meta-call-sep', 'expr')),
+ ],
+
+ 'meta-call-sep': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r',', Punctuation, ('#pop', 'meta-call')),
+ ],
+
+ 'typedef': [
+ include('spaces'),
+ default(('#pop', 'typedef-body', 'type-param-constraint',
+ 'type-name')),
+ ],
+
+ 'typedef-body': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
+ ],
+
+ 'enum': [
+ include('spaces'),
+ default(('#pop', 'enum-body', 'bracket-open',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'enum-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
+ ],
+
+ 'enum-member': [
+ include('spaces'),
+ (r'\(', Punctuation,
+ ('#pop', 'semicolon', 'flag', 'function-param')),
+ default(('#pop', 'semicolon', 'flag')),
+ ],
+
+ 'class': [
+ include('spaces'),
+ default(('#pop', 'class-body', 'bracket-open', 'extends',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'extends': [
+ include('spaces'),
+ (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
+ (r',', Punctuation), # the comma is made optional here, since haxe2
+ # requires the comma but haxe3 does not allow it
+ default('#pop'),
+ ],
+
+ 'bracket-open': [
+ include('spaces'),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ 'bracket-close': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'class-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (r'(?:static|public|private|override|dynamic|inline|macro)\b',
+ Keyword.Declaration),
+ default('class-member'),
+ ],
+
+ 'class-member': [
+ include('spaces'),
+ (r'(var)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'var')),
+ (r'(function)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'class-method')),
+ ],
+
+ # local function, anonymous or not
+ 'function-local': [
+ include('spaces'),
+ (ident_no_keyword, Name.Function,
+ ('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ default(('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ ],
+
+ 'optional-expr': [
+ include('spaces'),
+ include('expr'),
+ default('#pop'),
+ ],
+
+ 'class-method': [
+ include('spaces'),
+ (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
+ 'function-param', 'parenthesis-open',
+ 'type-param-constraint')),
+ ],
+
+ # function arguments
+ 'function-param': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (ident_no_keyword, Name,
+ ('#pop', 'function-param-sep', 'assign', 'flag')),
+ ],
+
+ 'function-param-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'function-param')),
+ ],
+
+ 'prop-get-set': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
+ default('#pop'),
+ ],
+
+ 'prop-get-set-opt': [
+ include('spaces'),
+ (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
+ (ident_no_keyword, Text, '#pop'), # custom getter/setter
+ ],
+
+ 'expr-statement': [
+ include('spaces'),
+ # makes semicolon optional here, just to avoid checking the last
+ # one is bracket or not.
+ default(('#pop', 'optional-semicolon', 'expr')),
+ ],
+
+ 'expr': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
+ 'meta-ident', 'meta-colon')),
+ (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
+ (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
+ (r'(?:static|public|private|override|dynamic|inline)\b',
+ Keyword.Declaration),
+ (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
+ 'function-local')),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
+ (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
+ (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
+ (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
+ (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
+ (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
+ (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
+ (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
+ (r'(?:if)\b', Keyword, ('#pop', 'if')),
+ (r'(?:do)\b', Keyword, ('#pop', 'do')),
+ (r'(?:while)\b', Keyword, ('#pop', 'while')),
+ (r'(?:for)\b', Keyword, ('#pop', 'for')),
+ (r'(?:untyped|throw)\b', Keyword),
+ (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
+ (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
+ (r'(?:continue|break)\b', Keyword, '#pop'),
+ (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
+ (ident_no_keyword, Name, ('#pop', 'expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, ('#pop', 'expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
-
- # String
- (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
- (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
-
- # EReg
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
+
+ # String
+ (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
+ (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
+
+ # EReg
(r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
-
- # Array
- (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
- ],
-
- 'expr-chain': [
- include('spaces'),
- (r'(?:\+\+|\-\-)', Operator),
- (binop, Operator, ('#pop', 'expr')),
- (r'(?:in)\b', Keyword, ('#pop', 'expr')),
- (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
- (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
- (r'\[', Punctuation, 'array-access'),
- (r'\(', Punctuation, 'call'),
- default('#pop'),
- ],
-
- # macro reification
- 'macro': [
- include('spaces'),
- include('meta'),
- (r':', Punctuation, ('#pop', 'type')),
-
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
- (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
- (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
- (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
-
- default(('#pop', 'expr')),
- ],
-
- 'macro-class': [
- (r'\{', Punctuation, ('#pop', 'class-body')),
- include('class')
- ],
-
- # cast can be written as "cast expr" or "cast(expr, type)"
- 'cast': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'cast-type', 'expr')),
- default(('#pop', 'expr')),
- ],
-
- # optionally give a type as the 2nd argument of cast()
- 'cast-type': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- 'catch': [
- include('spaces'),
- (r'(?:catch)\b', Keyword, ('expr', 'function-param',
- 'parenthesis-open')),
- default('#pop'),
- ],
-
- # do-while loop
- 'do': [
- include('spaces'),
- default(('#pop', 'do-while', 'expr')),
- ],
-
- # the while after do
- 'do-while': [
- include('spaces'),
- (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
- 'parenthesis-open')),
- ],
-
- 'while': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'for': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'if': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
- 'parenthesis')),
- ],
-
- 'else': [
- include('spaces'),
- (r'(?:else)\b', Keyword, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- 'switch': [
- include('spaces'),
- default(('#pop', 'switch-body', 'bracket-open', 'expr')),
- ],
-
- 'switch-body': [
- include('spaces'),
- (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'case': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- default(('#pop', 'case-sep', 'case-guard', 'expr')),
- ],
-
- 'case-sep': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'case')),
- ],
-
- 'case-guard': [
- include('spaces'),
- (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
- default('#pop'),
- ],
-
- # optional multiple expr under a case
- 'case-block': [
- include('spaces'),
- (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
- default('#pop'),
- ],
-
- 'new': [
- include('spaces'),
- default(('#pop', 'call', 'parenthesis-open', 'type')),
- ],
-
- 'array-decl': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- default(('#pop', 'array-decl-sep', 'expr')),
- ],
-
- 'array-decl-sep': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'array-decl')),
- ],
-
- 'array-access': [
- include('spaces'),
- default(('#pop', 'array-access-close', 'expr')),
- ],
-
- 'array-access-close': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- ],
-
- 'comma': [
- include('spaces'),
- (r',', Punctuation, '#pop'),
- ],
-
- 'colon': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- ],
-
- 'semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- ],
-
- 'optional-semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- default('#pop'),
- ],
-
- # identity that CAN be a Haxe keyword
- 'ident': [
- include('spaces'),
- (ident, Name, '#pop'),
- ],
-
- 'dollar': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
- default(('#pop', 'expr-chain')),
- ],
-
- 'type-name': [
- include('spaces'),
- (typeid, Name, '#pop'),
- ],
-
- 'type-full-name': [
- include('spaces'),
- (r'\.', Punctuation, 'ident'),
- default('#pop'),
- ],
-
- 'type': [
- include('spaces'),
- (r'\?', Punctuation),
- (ident, Name, ('#pop', 'type-check', 'type-full-name')),
- (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
- (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
- ],
-
- 'type-parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'type')),
- ],
-
- 'type-check': [
- include('spaces'),
- (r'->', Punctuation, ('#pop', 'type')),
- (r'<(?!=)', Punctuation, 'type-param'),
- default('#pop'),
- ],
-
- 'type-struct': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (r'>', Punctuation, ('comma', 'type')),
- (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
- include('class-body'),
- ],
-
- 'type-struct-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-struct')),
- ],
-
- # type-param can be a normal type or a constant literal...
- 'type-param-type': [
- # Float
- (r'\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
+
+ # Array
+ (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
+ ],
+
+ 'expr-chain': [
+ include('spaces'),
+ (r'(?:\+\+|\-\-)', Operator),
+ (binop, Operator, ('#pop', 'expr')),
+ (r'(?:in)\b', Keyword, ('#pop', 'expr')),
+ (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
+ (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
+ (r'\[', Punctuation, 'array-access'),
+ (r'\(', Punctuation, 'call'),
+ default('#pop'),
+ ],
+
+ # macro reification
+ 'macro': [
+ include('spaces'),
+ include('meta'),
+ (r':', Punctuation, ('#pop', 'type')),
+
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
+ (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
+ (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
+ (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
+
+ default(('#pop', 'expr')),
+ ],
+
+ 'macro-class': [
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ include('class')
+ ],
+
+ # cast can be written as "cast expr" or "cast(expr, type)"
+ 'cast': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'cast-type', 'expr')),
+ default(('#pop', 'expr')),
+ ],
+
+ # optionally give a type as the 2nd argument of cast()
+ 'cast-type': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ 'catch': [
+ include('spaces'),
+ (r'(?:catch)\b', Keyword, ('expr', 'function-param',
+ 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # do-while loop
+ 'do': [
+ include('spaces'),
+ default(('#pop', 'do-while', 'expr')),
+ ],
+
+ # the while after do
+ 'do-while': [
+ include('spaces'),
+ (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
+ 'parenthesis-open')),
+ ],
+
+ 'while': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'for': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'if': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
+ 'parenthesis')),
+ ],
+
+ 'else': [
+ include('spaces'),
+ (r'(?:else)\b', Keyword, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ 'switch': [
+ include('spaces'),
+ default(('#pop', 'switch-body', 'bracket-open', 'expr')),
+ ],
+
+ 'switch-body': [
+ include('spaces'),
+ (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'case': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ default(('#pop', 'case-sep', 'case-guard', 'expr')),
+ ],
+
+ 'case-sep': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'case')),
+ ],
+
+ 'case-guard': [
+ include('spaces'),
+ (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # optional multiple expr under a case
+ 'case-block': [
+ include('spaces'),
+ (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
+ default('#pop'),
+ ],
+
+ 'new': [
+ include('spaces'),
+ default(('#pop', 'call', 'parenthesis-open', 'type')),
+ ],
+
+ 'array-decl': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ default(('#pop', 'array-decl-sep', 'expr')),
+ ],
+
+ 'array-decl-sep': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'array-decl')),
+ ],
+
+ 'array-access': [
+ include('spaces'),
+ default(('#pop', 'array-access-close', 'expr')),
+ ],
+
+ 'array-access-close': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ 'comma': [
+ include('spaces'),
+ (r',', Punctuation, '#pop'),
+ ],
+
+ 'colon': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ ],
+
+ 'semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ ],
+
+ 'optional-semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ default('#pop'),
+ ],
+
+ # identity that CAN be a Haxe keyword
+ 'ident': [
+ include('spaces'),
+ (ident, Name, '#pop'),
+ ],
+
+ 'dollar': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
+ default(('#pop', 'expr-chain')),
+ ],
+
+ 'type-name': [
+ include('spaces'),
+ (typeid, Name, '#pop'),
+ ],
+
+ 'type-full-name': [
+ include('spaces'),
+ (r'\.', Punctuation, 'ident'),
+ default('#pop'),
+ ],
+
+ 'type': [
+ include('spaces'),
+ (r'\?', Punctuation),
+ (ident, Name, ('#pop', 'type-check', 'type-full-name')),
+ (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
+ (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
+ ],
+
+ 'type-parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'type')),
+ ],
+
+ 'type-check': [
+ include('spaces'),
+ (r'->', Punctuation, ('#pop', 'type')),
+ (r'<(?!=)', Punctuation, 'type-param'),
+ default('#pop'),
+ ],
+
+ 'type-struct': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (r'>', Punctuation, ('comma', 'type')),
+ (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
+ include('class-body'),
+ ],
+
+ 'type-struct-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-struct')),
+ ],
+
+ # type-param can be a normal type or a constant literal...
+ 'type-param-type': [
+ # Float
+ (r'\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]*[eE][+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
(r'[0-9]+\.(?!' + ident + r'|\.\.)', Number.Float, '#pop'),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
-
- # EReg
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+
+ # EReg
(r'~/(\\\\|\\[^\\]|[^/\\\n])*/[gim]*', String.Regex, '#pop'),
-
- # Array
- (r'\[', Operator, ('#pop', 'array-decl')),
-
- include('type'),
- ],
-
- # type-param part of a type
- # ie. the <A,B> path in Map<A,B>
- 'type-param': [
- include('spaces'),
- default(('#pop', 'type-param-sep', 'type-param-type')),
- ],
-
- 'type-param-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param')),
- ],
-
- # optional type-param that may include constraint
- # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
- 'type-param-constraint': [
- include('spaces'),
- (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- default('#pop'),
- ],
-
- 'type-param-constraint-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- ],
-
- # the optional constraint inside type-param
- 'type-param-constraint-flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
- default('#pop'),
- ],
-
- 'type-param-constraint-flag-type': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
- 'type')),
- default(('#pop', 'type')),
- ],
-
- 'type-param-constraint-flag-type-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, 'type'),
- ],
-
- # a parenthesis expr that contain exactly one expr
- 'parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'flag', 'expr')),
- ],
-
- 'parenthesis-open': [
- include('spaces'),
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'parenthesis-close': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- ],
-
- 'var': [
- include('spaces'),
- (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
- ],
-
- # optional more var decl.
- 'var-sep': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'var')),
- default('#pop'),
- ],
-
- # optional assignment
- 'assign': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- # optional type flag
- 'flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- # colon as part of a ternary operator (?:)
- 'ternary': [
- include('spaces'),
- (r':', Operator, '#pop'),
- ],
-
- # function call
- 'call': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- default(('#pop', 'call-sep', 'expr')),
- ],
-
- # after a call param
- 'call-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'call')),
- ],
-
- # bracket can be block or object
- 'bracket': [
- include('spaces'),
- (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
- ('#pop', 'bracket-check')),
- (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
- (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
- default(('#pop', 'block')),
- ],
-
- 'bracket-check': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
- default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
- ],
-
- # code block
- 'block': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default('expr-statement'),
- ],
-
- # object in key-value pairs
- 'object': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
- ],
-
- # a key of an object
- 'ident-or-string': [
- include('spaces'),
- (ident_no_keyword, Name, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- # after a key-value pair in object
- 'object-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'object')),
- ],
-
-
-
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
-
-
-class HxmlLexer(RegexLexer):
- """
- Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Hxml'
- aliases = ['haxeml', 'hxml']
- filenames = ['*.hxml']
-
- tokens = {
- 'root': [
- # Seperator
- (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
- # Compiler switches with one dash
- (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
- # Compilerswitches with two dashes
- (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
- r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
- # Targets and other options that take an argument
- (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
- r'cp|cmd)( +)(.+)',
- bygroups(Punctuation, Keyword, Whitespace, String)),
- # Options that take only numerical arguments
- (r'(-)(swf-version)( +)(\d+)',
+
+ # Array
+ (r'\[', Operator, ('#pop', 'array-decl')),
+
+ include('type'),
+ ],
+
+ # type-param part of a type
+ # ie. the <A,B> path in Map<A,B>
+ 'type-param': [
+ include('spaces'),
+ default(('#pop', 'type-param-sep', 'type-param-type')),
+ ],
+
+ 'type-param-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param')),
+ ],
+
+ # optional type-param that may include constraint
+ # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
+ 'type-param-constraint': [
+ include('spaces'),
+ (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ ],
+
+ # the optional constraint inside type-param
+ 'type-param-constraint-flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-flag-type': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
+ 'type')),
+ default(('#pop', 'type')),
+ ],
+
+ 'type-param-constraint-flag-type-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, 'type'),
+ ],
+
+ # a parenthesis expr that contain exactly one expr
+ 'parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'flag', 'expr')),
+ ],
+
+ 'parenthesis-open': [
+ include('spaces'),
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'parenthesis-close': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+
+ 'var': [
+ include('spaces'),
+ (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
+ ],
+
+ # optional more var decl.
+ 'var-sep': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'var')),
+ default('#pop'),
+ ],
+
+ # optional assignment
+ 'assign': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ # optional type flag
+ 'flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ # colon as part of a ternary operator (?:)
+ 'ternary': [
+ include('spaces'),
+ (r':', Operator, '#pop'),
+ ],
+
+ # function call
+ 'call': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ default(('#pop', 'call-sep', 'expr')),
+ ],
+
+ # after a call param
+ 'call-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'call')),
+ ],
+
+ # bracket can be block or object
+ 'bracket': [
+ include('spaces'),
+ (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
+ ('#pop', 'bracket-check')),
+ (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
+ (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
+ default(('#pop', 'block')),
+ ],
+
+ 'bracket-check': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
+ default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
+ ],
+
+ # code block
+ 'block': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default('expr-statement'),
+ ],
+
+ # object in key-value pairs
+ 'object': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
+ ],
+
+ # a key of an object
+ 'ident-or-string': [
+ include('spaces'),
+ (ident_no_keyword, Name, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ # after a key-value pair in object
+ 'object-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'object')),
+ ],
+
+
+
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+
+
+class HxmlLexer(RegexLexer):
+ """
+ Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Hxml'
+ aliases = ['haxeml', 'hxml']
+ filenames = ['*.hxml']
+
+ tokens = {
+ 'root': [
+ # Seperator
+ (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
+ # Compiler switches with one dash
+ (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
+ # Compilerswitches with two dashes
+ (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
+ r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
+ # Targets and other options that take an argument
+ (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
+ r'cp|cmd)( +)(.+)',
+ bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Options that take only numerical arguments
+ (r'(-)(swf-version)( +)(\d+)',
bygroups(Punctuation, Keyword, Whitespace, Number.Integer)),
- # An Option that defines the size, the fps and the background
- # color of an flash movie
- (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
- Punctuation, Number.Integer, Punctuation, Number.Integer,
- Punctuation, Number.Hex)),
- # options with two dashes that takes arguments
- (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
- r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
- # Single line comment, multiline ones are not allowed.
- (r'#.*', Comment.Single)
- ]
- }
+ # An Option that defines the size, the fps and the background
+ # color of an flash movie
+ (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
+ bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
+ Punctuation, Number.Integer, Punctuation, Number.Integer,
+ Punctuation, Number.Hex)),
+ # options with two dashes that takes arguments
+ (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
+ r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Single line comment, multiline ones are not allowed.
+ (r'#.*', Comment.Single)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hdl.py b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
index e96f79a475..24e1c36aa1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hdl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
@@ -1,134 +1,134 @@
-"""
- pygments.lexers.hdl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for hardware descriptor languages.
-
+"""
+ pygments.lexers.hdl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for hardware descriptor languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, include, using, this, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, include, using, this, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
-
-
-class VerilogLexer(RegexLexer):
- """
- For verilog source code with preprocessor directives.
-
- .. versionadded:: 1.4
- """
- name = 'verilog'
- aliases = ['verilog', 'v']
- filenames = ['*.v']
- mimetypes = ['text/x-verilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- (r'^\s*`define', Comment.Preproc, 'macro'),
+
+__all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
+
+
+class VerilogLexer(RegexLexer):
+ """
+ For verilog source code with preprocessor directives.
+
+ .. versionadded:: 1.4
+ """
+ name = 'verilog'
+ aliases = ['verilog', 'v']
+ filenames = ['*.v']
+ mimetypes = ['text/x-verilog']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ (r'^\s*`define', Comment.Preproc, 'macro'),
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
- (r'([0-9]+)|(\'b)[01]+', Number.Bin),
- (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
- (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
- (r'\'[01xz]', Number),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;\']', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'[{}#@]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex),
+ (r'([0-9]+)|(\'b)[01]+', Number.Bin),
+ (r'([0-9]+)|(\'d)[0-9]+', Number.Integer),
+ (r'([0-9]+)|(\'o)[0-7]+', Number.Oct),
+ (r'\'[01xz]', Number),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;\']', Punctuation),
+ (r'`[a-zA-Z_]\w*', Name.Constant),
+
(r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text)),
(r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Text),
- 'import'),
-
- (words((
- 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
- 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
- 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
- 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
- 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
- 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
- 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
- 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
- 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
- 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
- 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
- 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
- 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
- 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
- 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
- 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
- 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
-
- (words((
- 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
- 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
- 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
- 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
- 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
- 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
- 'undef'), prefix=r'`', suffix=r'\b'),
- Comment.Preproc),
-
- (words((
- 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
- 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
- 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
- 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
- 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
- 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
- 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
- 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
- prefix=r'\$', suffix=r'\b'),
- Name.Builtin),
-
- (words((
- 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
- 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
+ 'import'),
+
+ (words((
+ 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
+ 'assign', 'automatic', 'begin', 'break', 'buf', 'bufif0', 'bufif1',
+ 'case', 'casex', 'casez', 'cmos', 'const', 'continue', 'deassign',
+ 'default', 'defparam', 'disable', 'do', 'edge', 'else', 'end', 'endcase',
+ 'endfunction', 'endgenerate', 'endmodule', 'endpackage', 'endprimitive',
+ 'endspecify', 'endtable', 'endtask', 'enum', 'event', 'final', 'for',
+ 'force', 'forever', 'fork', 'function', 'generate', 'genvar', 'highz0',
+ 'highz1', 'if', 'initial', 'inout', 'input', 'integer', 'join', 'large',
+ 'localparam', 'macromodule', 'medium', 'module', 'nand', 'negedge',
+ 'nmos', 'nor', 'not', 'notif0', 'notif1', 'or', 'output', 'packed',
+ 'parameter', 'pmos', 'posedge', 'primitive', 'pull0', 'pull1',
+ 'pulldown', 'pullup', 'rcmos', 'ref', 'release', 'repeat', 'return',
+ 'rnmos', 'rpmos', 'rtran', 'rtranif0', 'rtranif1', 'scalared', 'signed',
+ 'small', 'specify', 'specparam', 'strength', 'string', 'strong0',
+ 'strong1', 'struct', 'table', 'task', 'tran', 'tranif0', 'tranif1',
+ 'type', 'typedef', 'unsigned', 'var', 'vectored', 'void', 'wait',
+ 'weak0', 'weak1', 'while', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+
+ (words((
+ 'accelerate', 'autoexpand_vectornets', 'celldefine', 'default_nettype',
+ 'else', 'elsif', 'endcelldefine', 'endif', 'endprotect', 'endprotected',
+ 'expand_vectornets', 'ifdef', 'ifndef', 'include', 'noaccelerate',
+ 'noexpand_vectornets', 'noremove_gatenames', 'noremove_netnames',
+ 'nounconnected_drive', 'protect', 'protected', 'remove_gatenames',
+ 'remove_netnames', 'resetall', 'timescale', 'unconnected_drive',
+ 'undef'), prefix=r'`', suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
+ 'bits', 'bitstoreal', 'bitstoshortreal', 'countdrivers', 'display', 'fclose',
+ 'fdisplay', 'finish', 'floor', 'fmonitor', 'fopen', 'fstrobe', 'fwrite',
+ 'getpattern', 'history', 'incsave', 'input', 'itor', 'key', 'list', 'log',
+ 'monitor', 'monitoroff', 'monitoron', 'nokey', 'nolog', 'printtimescale',
+ 'random', 'readmemb', 'readmemh', 'realtime', 'realtobits', 'reset',
+ 'reset_count', 'reset_value', 'restart', 'rtoi', 'save', 'scale', 'scope',
+ 'shortrealtobits', 'showscopes', 'showvariables', 'showvars', 'sreadmemb',
+ 'sreadmemh', 'stime', 'stop', 'strobe', 'time', 'timeformat', 'write'),
+ prefix=r'\$', suffix=r'\b'),
+ Name.Builtin),
+
+ (words((
+ 'byte', 'shortint', 'int', 'longint', 'integer', 'time',
+ 'bit', 'logic', 'reg', 'supply0', 'supply1', 'tri', 'triand',
'trior', 'tri0', 'tri1', 'trireg', 'uwire', 'wire', 'wand', 'wor'
- 'shortreal', 'real', 'realtime'), suffix=r'\b'),
- Keyword.Type),
+ 'shortreal', 'real', 'realtime'), suffix=r'\b'),
+ Keyword.Type),
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
(r'\\(\S+)', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
(r'\n', Whitespace, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
+ ],
+ 'import': [
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
+ ]
+ }
+
def analyse_text(text):
"""Verilog code will use one of reg/wire/assign for sure, and that
is not common elsewhere."""
@@ -139,41 +139,41 @@ class VerilogLexer(RegexLexer):
result += 0.1
if 'assign' in text:
result += 0.1
-
+
return result
-class SystemVerilogLexer(RegexLexer):
- """
- Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
- 1800-2009 standard.
-
- .. versionadded:: 1.5
- """
- name = 'systemverilog'
- aliases = ['systemverilog', 'sv']
- filenames = ['*.sv', '*.svh']
- mimetypes = ['text/x-systemverilog']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
+class SystemVerilogLexer(RegexLexer):
+ """
+ Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
+ 1800-2009 standard.
+
+ .. versionadded:: 1.5
+ """
+ name = 'systemverilog'
+ aliases = ['systemverilog', 'sv']
+ filenames = ['*.sv', '*.svh']
+ mimetypes = ['text/x-systemverilog']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
(r'^(\s*)(`define)', bygroups(Whitespace, Comment.Preproc), 'macro'),
(r'^(\s*)(package)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace)),
(r'^(\s*)(import)(\s+)', bygroups(Whitespace, Keyword.Namespace, Whitespace), 'import'),
-
+
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'[{}#@]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'[{}#@]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
(r'([1-9][_0-9]*)?\s*\'[sS]?[bB]\s*[xXzZ?01][_xXzZ?01]*',
Number.Bin),
@@ -187,13 +187,13 @@ class SystemVerilogLexer(RegexLexer):
(r'\'[01xXzZ]', Number),
(r'[0-9][_0-9]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
(words(('inside', 'dist'), suffix=r'\b'), Operator.Word),
(r'[()\[\],.;\'$]', Punctuation),
- (r'`[a-zA-Z_]\w*', Name.Constant),
-
- (words((
+ (r'`[a-zA-Z_]\w*', Name.Constant),
+
+ (words((
'accept_on', 'alias', 'always', 'always_comb', 'always_ff',
'always_latch', 'and', 'assert', 'assign', 'assume', 'automatic',
'before', 'begin', 'bind', 'bins', 'binsof', 'break', 'buf',
@@ -235,8 +235,8 @@ class SystemVerilogLexer(RegexLexer):
'weak1', 'while', 'wildcard', 'with', 'within',
'xnor', 'xor'),
suffix=r'\b'),
- Keyword),
-
+ Keyword),
+
(r'(class)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Whitespace, Name.Class)),
(r'(extends)(\s+)([a-zA-Z_]\w*)',
@@ -244,7 +244,7 @@ class SystemVerilogLexer(RegexLexer):
(r'(endclass\b)(?:(\s*)(:)(\s*)([a-zA-Z_]\w*))?',
bygroups(Keyword.Declaration, Whitespace, Punctuation, Whitespace, Name.Class)),
- (words((
+ (words((
# Variable types
'bit', 'byte', 'chandle', 'const', 'event', 'int', 'integer',
'logic', 'longint', 'real', 'realtime', 'reg', 'shortint',
@@ -262,10 +262,10 @@ class SystemVerilogLexer(RegexLexer):
'`endcelldefine', '`endif', '`ifdef', '`ifndef', '`include',
'`line', '`nounconnected_drive', '`pragma', '`resetall',
'`timescale', '`unconnected_drive', '`undef', '`undefineall'),
- suffix=r'\b'),
- Comment.Preproc),
-
- (words((
+ suffix=r'\b'),
+ Comment.Preproc),
+
+ (words((
# Simulation control tasks (20.2)
'$exit', '$finish', '$stop',
# Simulation time functions (20.3)
@@ -343,123 +343,123 @@ class SystemVerilogLexer(RegexLexer):
'$dumpon', '$dumpports', '$dumpportsall', '$dumpportsflush',
'$dumpportslimit', '$dumpportsoff', '$dumpportson', '$dumpvars',
), suffix=r'\b'),
- Name.Builtin),
-
+ Name.Builtin),
+
(r'[a-zA-Z_]\w*:(?!:)', Name.Label),
(r'\$?[a-zA-Z_]\w*', Name),
(r'\\(\S+)', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
(r'//.*?$', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
(r'\n', Whitespace, '#pop'),
- ],
- 'import': [
- (r'[\w:]+\*?', Name.Namespace, '#pop')
- ]
- }
-
-
-class VhdlLexer(RegexLexer):
- """
- For VHDL source code.
-
- .. versionadded:: 1.5
- """
- name = 'vhdl'
- aliases = ['vhdl']
- filenames = ['*.vhdl', '*.vhd']
- mimetypes = ['text/x-vhdl']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
+ ],
+ 'import': [
+ (r'[\w:]+\*?', Name.Namespace, '#pop')
+ ]
+ }
+
+
+class VhdlLexer(RegexLexer):
+ """
+ For VHDL source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'vhdl'
+ aliases = ['vhdl']
+ filenames = ['*.vhdl', '*.vhd']
+ mimetypes = ['text/x-vhdl']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
- (r'--.*?$', Comment.Single),
- (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r"'[a-z_]\w*", Name.Attribute),
- (r'[()\[\],.;\']', Punctuation),
+ (r'--.*?$', Comment.Single),
+ (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r"'[a-z_]\w*", Name.Attribute),
+ (r'[()\[\],.;\']', Punctuation),
(r'"[^\n\\"]*"', String),
-
- (r'(library)(\s+)([a-z_]\w*)',
+
+ (r'(library)(\s+)([a-z_]\w*)',
bygroups(Keyword, Whitespace, Name.Namespace)),
(r'(use)(\s+)(entity)', bygroups(Keyword, Whitespace, Keyword)),
(r'(use)(\s+)([a-z_][\w.]*\.)(all)',
bygroups(Keyword, Whitespace, Name.Namespace, Keyword)),
- (r'(use)(\s+)([a-z_][\w.]*)',
+ (r'(use)(\s+)([a-z_][\w.]*)',
bygroups(Keyword, Whitespace, Name.Namespace)),
(r'(std|ieee)(\.[a-z_]\w*)',
bygroups(Name.Namespace, Name.Namespace)),
(words(('std', 'ieee', 'work'), suffix=r'\b'),
Name.Namespace),
- (r'(entity|component)(\s+)([a-z_]\w*)',
+ (r'(entity|component)(\s+)([a-z_]\w*)',
bygroups(Keyword, Whitespace, Name.Class)),
- (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
- r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
+ (r'(architecture|configuration)(\s+)([a-z_]\w*)(\s+)'
+ r'(of)(\s+)([a-z_]\w*)(\s+)(is)',
bygroups(Keyword, Whitespace, Name.Class, Whitespace, Keyword, Whitespace,
Name.Class, Whitespace, Keyword)),
(r'([a-z_]\w*)(:)(\s+)(process|for)',
bygroups(Name.Class, Operator, Whitespace, Keyword)),
(r'(end)(\s+)', bygroups(using(this), Whitespace), 'endblock'),
-
- include('types'),
- include('keywords'),
- include('numbers'),
-
- (r'[a-z_]\w*', Name),
- ],
- 'endblock': [
- include('keywords'),
- (r'[a-z_]\w*', Name.Class),
+
+ include('types'),
+ include('keywords'),
+ include('numbers'),
+
+ (r'[a-z_]\w*', Name),
+ ],
+ 'endblock': [
+ include('keywords'),
+ (r'[a-z_]\w*', Name.Class),
(r'\s+', Whitespace),
- (r';', Punctuation, '#pop'),
- ],
- 'types': [
- (words((
- 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
- 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
- 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
+ (r';', Punctuation, '#pop'),
+ ],
+ 'types': [
+ (words((
+ 'boolean', 'bit', 'character', 'severity_level', 'integer', 'time',
+ 'delay_length', 'natural', 'positive', 'string', 'bit_vector',
+ 'file_open_kind', 'file_open_status', 'std_ulogic', 'std_ulogic_vector',
'std_logic', 'std_logic_vector', 'signed', 'unsigned'), suffix=r'\b'),
- Keyword.Type),
- ],
- 'keywords': [
- (words((
- 'abs', 'access', 'after', 'alias', 'all', 'and',
- 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
- 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
- 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
- 'entity', 'exit', 'file', 'for', 'function', 'generate',
- 'generic', 'group', 'guarded', 'if', 'impure', 'in',
- 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
- 'literal', 'loop', 'map', 'mod', 'nand', 'new',
- 'next', 'nor', 'not', 'null', 'of', 'on',
- 'open', 'or', 'others', 'out', 'package', 'port',
- 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
+ Keyword.Type),
+ ],
+ 'keywords': [
+ (words((
+ 'abs', 'access', 'after', 'alias', 'all', 'and',
+ 'architecture', 'array', 'assert', 'attribute', 'begin', 'block',
+ 'body', 'buffer', 'bus', 'case', 'component', 'configuration',
+ 'constant', 'disconnect', 'downto', 'else', 'elsif', 'end',
+ 'entity', 'exit', 'file', 'for', 'function', 'generate',
+ 'generic', 'group', 'guarded', 'if', 'impure', 'in',
+ 'inertial', 'inout', 'is', 'label', 'library', 'linkage',
+ 'literal', 'loop', 'map', 'mod', 'nand', 'new',
+ 'next', 'nor', 'not', 'null', 'of', 'on',
+ 'open', 'or', 'others', 'out', 'package', 'port',
+ 'postponed', 'procedure', 'process', 'pure', 'range', 'record',
'register', 'reject', 'rem', 'return', 'rol', 'ror', 'select',
'severity', 'signal', 'shared', 'sla', 'sll', 'sra',
- 'srl', 'subtype', 'then', 'to', 'transport', 'type',
- 'units', 'until', 'use', 'variable', 'wait', 'when',
- 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
- Keyword),
- ],
- 'numbers': [
- (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
- (r'\d+', Number.Integer),
- (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
- (r'X"[0-9a-f_]+"', Number.Hex),
- (r'O"[0-7_]+"', Number.Oct),
- (r'B"[01_]+"', Number.Bin),
- ],
- }
+ 'srl', 'subtype', 'then', 'to', 'transport', 'type',
+ 'units', 'until', 'use', 'variable', 'wait', 'when',
+ 'while', 'with', 'xnor', 'xor'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'numbers': [
+ (r'\d{1,2}#[0-9a-f_]+#?', Number.Integer),
+ (r'\d+', Number.Integer),
+ (r'(\d+\.\d*|\.\d+|\d+)E[+-]?\d+', Number.Float),
+ (r'X"[0-9a-f_]+"', Number.Hex),
+ (r'O"[0-7_]+"', Number.Oct),
+ (r'B"[01_]+"', Number.Bin),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
index 041d7f6c25..b15cdd75ed 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
@@ -1,53 +1,53 @@
-"""
- pygments.lexers.hexdump
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for hexadecimal dumps.
-
+"""
+ pygments.lexers.hexdump
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for hexadecimal dumps.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import Text, Name, Number, String, Punctuation, Whitespace
+
+__all__ = ['HexdumpLexer']
+
+
+class HexdumpLexer(RegexLexer):
+ """
+ For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
+ ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
+
+ .. sourcecode:: hexdump
+
+ 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
+ 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
+
+ The specific supported formats are the outputs of:
+
+ * ``hexdump FILE``
+ * ``hexdump -C FILE`` -- the `canonical` format used in the example.
+ * ``hd FILE`` -- same as ``hexdump -C FILE``.
+ * ``hexcat FILE``
+ * ``od -t x1z FILE``
+ * ``xxd FILE``
+ * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-__all__ = ['HexdumpLexer']
-
-
-class HexdumpLexer(RegexLexer):
- """
- For typical hex dump output formats by the UNIX and GNU/Linux tools ``hexdump``,
- ``hd``, ``hexcat``, ``od`` and ``xxd``, and the DOS tool ``DEBUG``. For example:
-
- .. sourcecode:: hexdump
-
- 00000000 7f 45 4c 46 02 01 01 00 00 00 00 00 00 00 00 00 |.ELF............|
- 00000010 02 00 3e 00 01 00 00 00 c5 48 40 00 00 00 00 00 |..>......H@.....|
-
- The specific supported formats are the outputs of:
-
- * ``hexdump FILE``
- * ``hexdump -C FILE`` -- the `canonical` format used in the example.
- * ``hd FILE`` -- same as ``hexdump -C FILE``.
- * ``hexcat FILE``
- * ``od -t x1z FILE``
- * ``xxd FILE``
- * ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
- .. versionadded:: 2.1
- """
- name = 'Hexdump'
- aliases = ['hexdump']
-
- hd = r'[0-9A-Ha-h]'
-
- tokens = {
- 'root': [
+ .. versionadded:: 2.1
+ """
+ name = 'Hexdump'
+ aliases = ['hexdump']
+
+ hd = r'[0-9A-Ha-h]'
+
+ tokens = {
+ 'root': [
(r'\n', Whitespace),
- include('offset'),
+ include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{16})(\<)$',
bygroups(Whitespace, Punctuation, String, Punctuation), 'bracket-strings'),
(r'(\s{2,3})(\|)(.{16})(\|)$',
@@ -59,44 +59,44 @@ class HexdumpLexer(RegexLexer):
(r'(\s{2,3})(.{1,15})$', bygroups(Whitespace, String)),
(r'(\s{2,3})(.{16}|.{20})$', bygroups(Whitespace, String), 'nonpiped-strings'),
(r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'offset': [
- (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
- (r'^'+hd+'+', Name.Label),
- ],
- 'offset-mode': [
+ (r'^\*', Punctuation),
+ ],
+ 'offset': [
+ (r'^('+hd+'+)(:)', bygroups(Name.Label, Punctuation), 'offset-mode'),
+ (r'^'+hd+'+', Name.Label),
+ ],
+ 'offset-mode': [
(r'\s', Whitespace, '#pop'),
- (hd+'+', Name.Label),
- (r':', Punctuation)
- ],
- 'piped-strings': [
+ (hd+'+', Name.Label),
+ (r':', Punctuation)
+ ],
+ 'piped-strings': [
(r'\n', Whitespace),
- include('offset'),
- (hd+r'{2}', Number.Hex),
+ include('offset'),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\|)(.{1,16})(\|)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'bracket-strings': [
+ (r'^\*', Punctuation),
+ ],
+ 'bracket-strings': [
(r'\n', Whitespace),
- include('offset'),
- (hd+r'{2}', Number.Hex),
+ include('offset'),
+ (hd+r'{2}', Number.Hex),
(r'(\s{2,3})(\>)(.{1,16})(\<)$',
bygroups(Whitespace, Punctuation, String, Punctuation)),
(r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- 'nonpiped-strings': [
+ (r'^\*', Punctuation),
+ ],
+ 'nonpiped-strings': [
(r'\n', Whitespace),
- include('offset'),
+ include('offset'),
(r'('+hd+r'{2})(\-)('+hd+r'{2})',
bygroups(Number.Hex, Punctuation, Number.Hex)),
- (hd+r'{2}', Number.Hex),
+ (hd+r'{2}', Number.Hex),
(r'(\s{19,})(.{1,20}?)$', bygroups(Whitespace, String)),
(r'(\s{2,3})(.{1,20})$', bygroups(Whitespace, String)),
(r'\s', Whitespace),
- (r'^\*', Punctuation),
- ],
- }
+ (r'^\*', Punctuation),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/html.py b/contrib/python/Pygments/py3/pygments/lexers/html.py
index 2e29f453cd..07daae481f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/html.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/html.py
@@ -1,603 +1,603 @@
-"""
- pygments.lexers.html
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for HTML, XML and related markup.
-
+"""
+ pygments.lexers.html
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for HTML, XML and related markup.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation
-from pygments.util import looks_like_xml, html_doctype_matches
-
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.jvm import ScalaLexer
-from pygments.lexers.css import CssLexer, _indentation, _starts_block
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.util import looks_like_xml, html_doctype_matches
+
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.jvm import ScalaLexer
+from pygments.lexers.css import CssLexer, _indentation, _starts_block
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
'ScamlLexer', 'PugLexer']
-
-
-class HtmlLexer(RegexLexer):
- """
- For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
- by the appropriate lexer.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
- mimetypes = ['text/html', 'application/xhtml+xml']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+
+
+class HtmlLexer(RegexLexer):
+ """
+ For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
+ by the appropriate lexer.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
+ mimetypes = ['text/html', 'application/xhtml+xml']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
(r'<!--(.|\n)*?-->', Comment.Multiline),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'(<)(\s*)(script)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('script-content', 'tag')),
- (r'(<)(\s*)(style)(\s*)',
- bygroups(Punctuation, Text, Name.Tag, Text),
- ('style-content', 'tag')),
- # note: this allows tag names not used in HTML like <x:with-dash>,
- # this is to support yet-unknown template engines and the like
- (r'(<)(\s*)([\w:.-]+)',
- bygroups(Punctuation, Text, Name.Tag), 'tag'),
- (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation)),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
- 'attr'),
- (r'[\w:-]+', Name.Attribute),
- (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
- ],
- 'script-content': [
- (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation), '#pop'),
- (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'(<)(\s*)(script)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('script-content', 'tag')),
+ (r'(<)(\s*)(style)(\s*)',
+ bygroups(Punctuation, Text, Name.Tag, Text),
+ ('style-content', 'tag')),
+ # note: this allows tag names not used in HTML like <x:with-dash>,
+ # this is to support yet-unknown template engines and the like
+ (r'(<)(\s*)([\w:.-]+)',
+ bygroups(Punctuation, Text, Name.Tag), 'tag'),
+ (r'(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation)),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'([\w:-]+\s*)(=)(\s*)', bygroups(Name.Attribute, Operator, Text),
+ 'attr'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'(/?)(\s*)(>)', bygroups(Punctuation, Text, Punctuation), '#pop'),
+ ],
+ 'script-content': [
+ (r'(<)(\s*)(/)(\s*)(script)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation), '#pop'),
+ (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
# fallback cases for when there is no closing script tag
# first look for newline and then go back into root state
# if that fails just read the rest of the file
# this is similar to the error handling logic in lexer.py
(r'.+?\n', using(JavascriptLexer), '#pop'),
(r'.+', using(JavascriptLexer), '#pop'),
- ],
- 'style-content': [
- (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
- bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
- Punctuation),'#pop'),
- (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
+ ],
+ 'style-content': [
+ (r'(<)(\s*)(/)(\s*)(style)(\s*)(>)',
+ bygroups(Punctuation, Text, Punctuation, Text, Name.Tag, Text,
+ Punctuation),'#pop'),
+ (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
# fallback cases for when there is no closing style tag
# first look for newline and then go back into root state
# if that fails just read the rest of the file
# this is similar to the error handling logic in lexer.py
(r'.+?\n', using(CssLexer), '#pop'),
(r'.+', using(CssLexer), '#pop'),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if html_doctype_matches(text):
- return 0.5
-
-
-class DtdLexer(RegexLexer):
- """
- A lexer for DTDs (Document Type Definitions).
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'DTD'
- aliases = ['dtd']
- filenames = ['*.dtd']
- mimetypes = ['application/xml-dtd']
-
- tokens = {
- 'root': [
- include('common'),
-
- (r'(<!ELEMENT)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'element'),
- (r'(<!ATTLIST)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'attlist'),
- (r'(<!ENTITY)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Entity), 'entity'),
- (r'(<!NOTATION)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'notation'),
- (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
- bygroups(Keyword, Name.Entity, Text, Keyword)),
-
- (r'(<!DOCTYPE)(\s+)([^>\s]+)',
- bygroups(Keyword, Text, Name.Tag)),
- (r'PUBLIC|SYSTEM', Keyword.Constant),
- (r'[\[\]>]', Keyword),
- ],
-
- 'common': [
- (r'\s+', Text),
- (r'(%|&)[^;]*;', Name.Entity),
- ('<!--', Comment, 'comment'),
- (r'[(|)*,?+]', Operator),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- ],
-
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
-
- 'element': [
- include('common'),
- (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Tag),
- (r'>', Keyword, '#pop'),
- ],
-
- 'attlist': [
- include('common'),
- (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
- Keyword.Constant),
- (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
- (r'xml:space|xml:lang', Keyword.Reserved),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
-
- 'entity': [
- include('common'),
- (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Entity),
- (r'>', Keyword, '#pop'),
- ],
-
- 'notation': [
- include('common'),
- (r'SYSTEM|PUBLIC', Keyword.Constant),
- (r'[^>\s|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if not looks_like_xml(text) and \
- ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
- return 0.8
-
-
-class XmlLexer(RegexLexer):
- """
- Generic lexer for XML (eXtensible Markup Language).
- """
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- name = 'XML'
- aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
- '*.wsdl', '*.wsf']
- mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
- 'application/rss+xml', 'application/atom+xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if html_doctype_matches(text):
+ return 0.5
+
+
+class DtdLexer(RegexLexer):
+ """
+ A lexer for DTDs (Document Type Definitions).
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ name = 'DTD'
+ aliases = ['dtd']
+ filenames = ['*.dtd']
+ mimetypes = ['application/xml-dtd']
+
+ tokens = {
+ 'root': [
+ include('common'),
+
+ (r'(<!ELEMENT)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'element'),
+ (r'(<!ATTLIST)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'attlist'),
+ (r'(<!ENTITY)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Entity), 'entity'),
+ (r'(<!NOTATION)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'notation'),
+ (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
+ bygroups(Keyword, Name.Entity, Text, Keyword)),
+
+ (r'(<!DOCTYPE)(\s+)([^>\s]+)',
+ bygroups(Keyword, Text, Name.Tag)),
+ (r'PUBLIC|SYSTEM', Keyword.Constant),
+ (r'[\[\]>]', Keyword),
+ ],
+
+ 'common': [
+ (r'\s+', Text),
+ (r'(%|&)[^;]*;', Name.Entity),
+ ('<!--', Comment, 'comment'),
+ (r'[(|)*,?+]', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\'[^\']*\'', String.Single),
+ ],
+
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+
+ 'element': [
+ include('common'),
+ (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Tag),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'attlist': [
+ include('common'),
+ (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
+ Keyword.Constant),
+ (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
+ (r'xml:space|xml:lang', Keyword.Reserved),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'entity': [
+ include('common'),
+ (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Entity),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'notation': [
+ include('common'),
+ (r'SYSTEM|PUBLIC', Keyword.Constant),
+ (r'[^>\s|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if not looks_like_xml(text) and \
+ ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
+ return 0.8
+
+
+class XmlLexer(RegexLexer):
+ """
+ Generic lexer for XML (eXtensible Markup Language).
+ """
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ name = 'XML'
+ aliases = ['xml']
+ filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
+ '*.wsdl', '*.wsf']
+ mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
+ 'application/rss+xml', 'application/atom+xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
(r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
(r'<!--(.|\n)*?-->', Comment.Multiline),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
(r'\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if looks_like_xml(text):
- return 0.45 # less than HTML
-
-
-class XsltLexer(XmlLexer):
- """
- A lexer for XSLT.
-
- .. versionadded:: 0.10
- """
-
- name = 'XSLT'
- aliases = ['xslt']
- filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
- mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if looks_like_xml(text):
+ return 0.45 # less than HTML
+
+
+class XsltLexer(XmlLexer):
+ """
+ A lexer for XSLT.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'XSLT'
+ aliases = ['xslt']
+ filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
+ mimetypes = ['application/xsl+xml', 'application/xslt+xml']
+
EXTRA_KEYWORDS = {
- 'apply-imports', 'apply-templates', 'attribute',
- 'attribute-set', 'call-template', 'choose', 'comment',
- 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
- 'for-each', 'if', 'import', 'include', 'key', 'message',
- 'namespace-alias', 'number', 'otherwise', 'output', 'param',
- 'preserve-space', 'processing-instruction', 'sort',
- 'strip-space', 'stylesheet', 'template', 'text', 'transform',
- 'value-of', 'variable', 'when', 'with-param'
+ 'apply-imports', 'apply-templates', 'attribute',
+ 'attribute-set', 'call-template', 'choose', 'comment',
+ 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
+ 'for-each', 'if', 'import', 'include', 'key', 'message',
+ 'namespace-alias', 'number', 'otherwise', 'output', 'param',
+ 'preserve-space', 'processing-instruction', 'sort',
+ 'strip-space', 'stylesheet', 'template', 'text', 'transform',
+ 'value-of', 'variable', 'when', 'with-param'
}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
- m = re.match('</?xsl:([^>]*)/?>?', value)
-
- if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
- yield index, Keyword, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- if looks_like_xml(text) and '<xsl' in text:
- return 0.8
-
-
-class HamlLexer(ExtendedRegexLexer):
- """
- For Haml markup.
-
- .. versionadded:: 1.3
- """
-
- name = 'Haml'
- aliases = ['haml']
- filenames = ['*.haml']
- mimetypes = ['text/x-haml']
-
- flags = re.IGNORECASE
- # Haml can include " |\n" anywhere,
- # which is ignored and used to wrap long lines.
- # To accomodate this, use this custom faux dot instead.
- _dot = r'(?: \|\n(?=.* \|)|.)'
-
- # In certain places, a comma at the end of the line
- # allows line wrapping as well.
- _comma_dot = r'(?:,\s*\n|' + _dot + ')'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
+ m = re.match('</?xsl:([^>]*)/?>?', value)
+
+ if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ if looks_like_xml(text) and '<xsl' in text:
+ return 0.8
+
+
+class HamlLexer(ExtendedRegexLexer):
+ """
+ For Haml markup.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haml'
+ aliases = ['haml']
+ filenames = ['*.haml']
+ mimetypes = ['text/x-haml']
+
+ flags = re.IGNORECASE
+ # Haml can include " |\n" anywhere,
+ # which is ignored and used to wrap long lines.
+ # To accomodate this, use this custom faux dot instead.
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+
+ # In certain places, a comma at the end of the line
+ # allows line wrapping as well.
+ _comma_dot = r'(?:,\s*\n|' + _dot + ')'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'haml-comment-block'), '#pop'),
- (r'(-)(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'haml-comment-block'), '#pop'),
+ (r'(-)(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(RubyLexer)),
(r'\[' + _dot + r'*?\]', using(RubyLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
(r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'haml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'haml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class ScamlLexer(ExtendedRegexLexer):
- """
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- .. versionadded:: 1.4
- """
-
- name = 'Scaml'
- aliases = ['scaml']
- filenames = ['*.scaml']
- mimetypes = ['text/x-scaml']
-
- flags = re.IGNORECASE
- # Scaml does not yet support the " |\n" notation to
- # wrap long lines. Once it does, use the custom faux
- # dot instead.
- # _dot = r'(?: \|\n(?=.* \|)|.)'
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+ """
+ For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Scaml'
+ aliases = ['scaml']
+ filenames = ['*.scaml']
+ mimetypes = ['text/x-scaml']
+
+ flags = re.IGNORECASE
+ # Scaml does not yet support the " |\n" notation to
+ # wrap long lines. Once it does, use the custom faux
+ # dot instead.
+ # _dot = r'(?: \|\n(?=.* \|)|.)'
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
(r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
(r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
class PugLexer(ExtendedRegexLexer):
- """
+ """
For Pug markup.
Pug is a variant of Scaml, see:
- http://scalate.fusesource.org/documentation/scaml-reference.html
-
- .. versionadded:: 1.4
- """
-
+ http://scalate.fusesource.org/documentation/scaml-reference.html
+
+ .. versionadded:: 1.4
+ """
+
name = 'Pug'
aliases = ['pug', 'jade']
filenames = ['*.pug', '*.jade']
mimetypes = ['text/x-pug', 'text/x-jade']
-
- flags = re.IGNORECASE
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)), 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+
+ flags = re.IGNORECASE
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)), 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
(r'(/)(\[' + _dot + r'*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- (r'\|', Text, 'eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ (r'\|', Text, 'eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
(r'\{(,\n|' + _dot + r')*?\}', using(ScalaLexer)),
(r'\[' + _dot + r'*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
(r"'(\\\\|\\[^\\]|[^'\\\n])*'", String, '#pop'),
(r'"(\\\\|\\[^\\]|[^"\\\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
(r'(#\{)(' + _dot + r'*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
JadeLexer = PugLexer # compat
diff --git a/contrib/python/Pygments/py3/pygments/lexers/idl.py b/contrib/python/Pygments/py3/pygments/lexers/idl.py
index 22b8346ac3..e978183b21 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/idl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/idl.py
@@ -1,262 +1,262 @@
-"""
- pygments.lexers.idl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for IDL.
-
+"""
+ pygments.lexers.idl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for IDL.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, Number, String
-
-__all__ = ['IDLLexer']
-
-
-class IDLLexer(RegexLexer):
- """
- Pygments Lexer for IDL (Interactive Data Language).
-
- .. versionadded:: 1.6
- """
- name = 'IDL'
- aliases = ['idl']
- filenames = ['*.pro']
- mimetypes = ['text/idl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- _RESERVED = (
- 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, Number, String
+
+__all__ = ['IDLLexer']
+
+
+class IDLLexer(RegexLexer):
+ """
+ Pygments Lexer for IDL (Interactive Data Language).
+
+ .. versionadded:: 1.6
+ """
+ name = 'IDL'
+ aliases = ['idl']
+ filenames = ['*.pro']
+ mimetypes = ['text/idl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ _RESERVED = (
+ 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
'continue', 'do', 'else', 'end', 'endcase', 'endelse',
- 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
- 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
- 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
- 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
- 'repeat', 'switch', 'then', 'until', 'while', 'xor')
- """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
- _BUILTIN_LIB = (
- 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
- 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
- 'arg_present', 'array_equal', 'array_indices', 'arrow',
- 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
- 'a_correlate', 'bandpass_filter', 'bandreject_filter',
- 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
- 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
- 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
- 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
- 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
- 'bytscl', 'caldat', 'calendar', 'call_external',
- 'call_function', 'call_method', 'call_procedure', 'canny',
+ 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
+ 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
+ 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
+ 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
+ 'repeat', 'switch', 'then', 'until', 'while', 'xor')
+ """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
+
+ _BUILTIN_LIB = (
+ 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
+ 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
+ 'arg_present', 'array_equal', 'array_indices', 'arrow',
+ 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
+ 'a_correlate', 'bandpass_filter', 'bandreject_filter',
+ 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
+ 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
+ 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
+ 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
+ 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
+ 'bytscl', 'caldat', 'calendar', 'call_external',
+ 'call_function', 'call_method', 'call_procedure', 'canny',
'catch', 'cd', r'cdf_\w*', 'ceil', 'chebyshev',
- 'check_math',
- 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
- 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
- 'cmyk_convert', 'colorbar', 'colorize_sample',
- 'colormap_applicable', 'colormap_gradient',
- 'colormap_rotation', 'colortable', 'color_convert',
- 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
- 'command_line_args', 'complex', 'complexarr', 'complexround',
- 'compute_mesh_normals', 'cond', 'congrid', 'conj',
- 'constrained_min', 'contour', 'convert_coord', 'convol',
- 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
- 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
- 'create_view', 'crossp', 'crvlength', 'cti_test',
- 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
- 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
- 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
- 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
- 'cw_form', 'cw_fslider', 'cw_light_editor',
- 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
- 'cw_palette_editor', 'cw_palette_editor_get',
- 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
- 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
- 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
- 'define_msgblk', 'define_msgblk_from_file', 'defroi',
- 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
- 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
- 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
- 'dialog_printersetup', 'dialog_printjob',
- 'dialog_read_image', 'dialog_write_image', 'digital_filter',
- 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
- 'dlm_load', 'dlm_register', 'doc_library', 'double',
- 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
- 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
+ 'check_math',
+ 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
+ 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
+ 'cmyk_convert', 'colorbar', 'colorize_sample',
+ 'colormap_applicable', 'colormap_gradient',
+ 'colormap_rotation', 'colortable', 'color_convert',
+ 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
+ 'command_line_args', 'complex', 'complexarr', 'complexround',
+ 'compute_mesh_normals', 'cond', 'congrid', 'conj',
+ 'constrained_min', 'contour', 'convert_coord', 'convol',
+ 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
+ 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
+ 'create_view', 'crossp', 'crvlength', 'cti_test',
+ 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
+ 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
+ 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
+ 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
+ 'cw_form', 'cw_fslider', 'cw_light_editor',
+ 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
+ 'cw_palette_editor', 'cw_palette_editor_get',
+ 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
+ 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
+ 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
+ 'define_msgblk', 'define_msgblk_from_file', 'defroi',
+ 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
+ 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
+ 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
+ 'dialog_printersetup', 'dialog_printjob',
+ 'dialog_read_image', 'dialog_write_image', 'digital_filter',
+ 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
+ 'dlm_load', 'dlm_register', 'doc_library', 'double',
+ 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
+ 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
'eof', r'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
- 'erode', 'errorplot', 'errplot', 'estimator_filter',
- 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
- 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
- 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
- 'file_dirname', 'file_expand_path', 'file_info',
- 'file_lines', 'file_link', 'file_mkdir', 'file_move',
- 'file_poll_input', 'file_readlink', 'file_same',
- 'file_search', 'file_test', 'file_which', 'findgen',
- 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
- 'fltarr', 'flush', 'format_axis_values', 'free_lun',
- 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
- 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
- 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
- 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
- 'getwindows', 'get_drive_list', 'get_dxf_objects',
- 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
+ 'erode', 'errorplot', 'errplot', 'estimator_filter',
+ 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
+ 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
+ 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
+ 'file_dirname', 'file_expand_path', 'file_info',
+ 'file_lines', 'file_link', 'file_mkdir', 'file_move',
+ 'file_poll_input', 'file_readlink', 'file_same',
+ 'file_search', 'file_test', 'file_which', 'findgen',
+ 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
+ 'fltarr', 'flush', 'format_axis_values', 'free_lun',
+ 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
+ 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
+ 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
+ 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
+ 'getwindows', 'get_drive_list', 'get_dxf_objects',
+ 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
'greg2jul', r'grib_\w*', 'grid3', 'griddata',
- 'grid_input', 'grid_tps', 'gs_iter',
+ 'grid_input', 'grid_tps', 'gs_iter',
r'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
- 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
+ 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
'hanning', 'hash', r'hdf_\w*', 'heap_free',
- 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
- 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
- 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
- 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
- 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
- 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
- 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
- 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
- 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
- 'image_cont', 'image_statistics', 'imaginary', 'imap',
- 'indgen', 'intarr', 'interpol', 'interpolate',
- 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
- 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
- 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
- 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
- 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
- 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
- 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
- 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
- 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
- 'label_region', 'ladfit', 'laguerre', 'laplacian',
- 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
- 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
- 'la_gm_linear_model', 'la_hqr', 'la_invert',
- 'la_least_squares', 'la_least_square_equality',
- 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
- 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
- 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
- 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
- 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
- 'lngamma', 'lnp_test', 'loadct', 'locale_get',
- 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
- 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
- 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
- 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
- 'map_continents', 'map_grid', 'map_image', 'map_patch',
- 'map_proj_forward', 'map_proj_image', 'map_proj_info',
- 'map_proj_init', 'map_proj_inverse', 'map_set',
- 'matrix_multiply', 'matrix_power', 'max', 'md_test',
- 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
- 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
- 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
- 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
- 'message', 'min', 'min_curve_surf', 'mk_html_help',
- 'modifyct', 'moment', 'morph_close', 'morph_distance',
- 'morph_gradient', 'morph_hitormiss', 'morph_open',
- 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
+ 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
+ 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
+ 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
+ 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
+ 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
+ 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
+ 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
+ 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
+ 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
+ 'image_cont', 'image_statistics', 'imaginary', 'imap',
+ 'indgen', 'intarr', 'interpol', 'interpolate',
+ 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
+ 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
+ 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
+ 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
+ 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
+ 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
+ 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
+ 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
+ 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
+ 'label_region', 'ladfit', 'laguerre', 'laplacian',
+ 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
+ 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
+ 'la_gm_linear_model', 'la_hqr', 'la_invert',
+ 'la_least_squares', 'la_least_square_equality',
+ 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
+ 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
+ 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
+ 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
+ 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
+ 'lngamma', 'lnp_test', 'loadct', 'locale_get',
+ 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
+ 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
+ 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
+ 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
+ 'map_continents', 'map_grid', 'map_image', 'map_patch',
+ 'map_proj_forward', 'map_proj_image', 'map_proj_info',
+ 'map_proj_init', 'map_proj_inverse', 'map_set',
+ 'matrix_multiply', 'matrix_power', 'max', 'md_test',
+ 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
+ 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
+ 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
+ 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
+ 'message', 'min', 'min_curve_surf', 'mk_html_help',
+ 'modifyct', 'moment', 'morph_close', 'morph_distance',
+ 'morph_gradient', 'morph_hitormiss', 'morph_open',
+ 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
r'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
- 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
- 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
- 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
- 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
- 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
- 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
- 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
- 'polar_contour', 'polar_surface', 'poly', 'polyfill',
- 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
- 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
- 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
- 'print', 'printd', 'product', 'profile', 'profiler',
- 'profiles', 'project_vol', 'psafm', 'pseudo',
- 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
- 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
- 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
- 'query_csv', 'query_dicom', 'query_gif', 'query_image',
- 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
- 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
- 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
- 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
- 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
- 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
- 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
- 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
- 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
- 'read_xwd', 'real_part', 'rebin', 'recall_commands',
- 'recon3', 'reduce_colors', 'reform', 'region_grow',
- 'register_cursor', 'regress', 'replicate',
- 'replicate_inplace', 'resolve_all', 'resolve_routine',
- 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
- 'rot', 'rotate', 'round', 'routine_filepath',
- 'routine_info', 'rs_test', 'r_correlate', 'r_test',
- 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
- 'scope_traceback', 'scope_varfetch', 'scope_varname',
- 'search2d', 'search3d', 'sem_create', 'sem_delete',
- 'sem_lock', 'sem_release', 'setenv', 'set_plot',
- 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
- 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
- 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
- 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
- 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
- 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
- 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
- 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
- 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
- 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
- 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
- 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
- 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
- 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
- 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
- 'tanh', 'tek_color', 'temporary', 'tetra_clip',
- 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
- 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
- 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
- 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
- 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
- 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
- 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
- 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
- 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
- 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
- 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
- 'where', 'widget_base', 'widget_button', 'widget_combobox',
- 'widget_control', 'widget_displaycontextmen', 'widget_draw',
- 'widget_droplist', 'widget_event', 'widget_info',
- 'widget_label', 'widget_list', 'widget_propertysheet',
- 'widget_slider', 'widget_tab', 'widget_table',
- 'widget_text', 'widget_tree', 'widget_tree_move',
- 'widget_window', 'wiener_filter', 'window', 'writeu',
- 'write_bmp', 'write_csv', 'write_gif', 'write_image',
- 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
- 'write_png', 'write_ppm', 'write_spr', 'write_srf',
- 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
- 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
- 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
- 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
- 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
- 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
- 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
- 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
- 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
- 'xmtool', 'xobjview', 'xobjview_rotate',
- 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
- 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
- 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
- 'xyouts', 'zoom', 'zoom_24')
- """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
- tokens = {
- 'root': [
+ 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
+ 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
+ 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
+ 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
+ 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
+ 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
+ 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
+ 'polar_contour', 'polar_surface', 'poly', 'polyfill',
+ 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
+ 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
+ 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
+ 'print', 'printd', 'product', 'profile', 'profiler',
+ 'profiles', 'project_vol', 'psafm', 'pseudo',
+ 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
+ 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
+ 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
+ 'query_csv', 'query_dicom', 'query_gif', 'query_image',
+ 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
+ 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
+ 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
+ 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
+ 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
+ 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
+ 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
+ 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
+ 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
+ 'read_xwd', 'real_part', 'rebin', 'recall_commands',
+ 'recon3', 'reduce_colors', 'reform', 'region_grow',
+ 'register_cursor', 'regress', 'replicate',
+ 'replicate_inplace', 'resolve_all', 'resolve_routine',
+ 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
+ 'rot', 'rotate', 'round', 'routine_filepath',
+ 'routine_info', 'rs_test', 'r_correlate', 'r_test',
+ 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
+ 'scope_traceback', 'scope_varfetch', 'scope_varname',
+ 'search2d', 'search3d', 'sem_create', 'sem_delete',
+ 'sem_lock', 'sem_release', 'setenv', 'set_plot',
+ 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
+ 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
+ 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
+ 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
+ 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
+ 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
+ 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
+ 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
+ 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
+ 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
+ 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
+ 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
+ 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
+ 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
+ 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
+ 'tanh', 'tek_color', 'temporary', 'tetra_clip',
+ 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
+ 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
+ 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
+ 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
+ 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
+ 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
+ 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
+ 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
+ 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
+ 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
+ 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
+ 'where', 'widget_base', 'widget_button', 'widget_combobox',
+ 'widget_control', 'widget_displaycontextmen', 'widget_draw',
+ 'widget_droplist', 'widget_event', 'widget_info',
+ 'widget_label', 'widget_list', 'widget_propertysheet',
+ 'widget_slider', 'widget_tab', 'widget_table',
+ 'widget_text', 'widget_tree', 'widget_tree_move',
+ 'widget_window', 'wiener_filter', 'window', 'writeu',
+ 'write_bmp', 'write_csv', 'write_gif', 'write_image',
+ 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
+ 'write_png', 'write_ppm', 'write_spr', 'write_srf',
+ 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
+ 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
+ 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
+ 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
+ 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
+ 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
+ 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
+ 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
+ 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
+ 'xmtool', 'xobjview', 'xobjview_rotate',
+ 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
+ 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
+ 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
+ 'xyouts', 'zoom', 'zoom_24')
+ """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
+
+ tokens = {
+ 'root': [
(r'^\s*;.*?\n', Comment.Single),
- (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
- (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
- (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
- (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
- (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
- (r'"[^\"]*"', String.Double),
- (r"'[^\']*'", String.Single),
+ (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
+ (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
+ (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
+ (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
+ (r'"[^\"]*"', String.Double),
+ (r"'[^\']*'", String.Single),
(r'\b[+\-]?([0-9]*\.[0-9]+|[0-9]+\.[0-9]*)(D|E)?([+\-]?[0-9]+)?\b',
Number.Float),
(r'\b\'[+\-]?[0-9A-F]+\'X(U?(S?|L{1,2})|B)\b', Number.Hex),
@@ -264,9 +264,9 @@ class IDLLexer(RegexLexer):
(r'\b[+\-]?[0-9]+U?L{1,2}\b', Number.Integer.Long),
(r'\b[+\-]?[0-9]+U?S?\b', Number.Integer),
(r'\b[+\-]?[0-9]+B\b', Number),
- (r'.', Text),
- ]
- }
+ (r'.', Text),
+ ]
+ }
def analyse_text(text):
"""endelse seems to be unique to IDL, endswitch is rare at least."""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/igor.py b/contrib/python/Pygments/py3/pygments/lexers/igor.py
index e843d081f1..426050e508 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/igor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/igor.py
@@ -1,52 +1,52 @@
-"""
- pygments.lexers.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Igor Pro.
-
+"""
+ pygments.lexers.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Igor Pro.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Keyword, Name, String
-
-__all__ = ['IgorLexer']
-
-
-class IgorLexer(RegexLexer):
- """
- Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- .. versionadded:: 2.0
- """
-
- name = 'Igor'
- aliases = ['igor', 'igorpro']
- filenames = ['*.ipf']
- mimetypes = ['text/ipf']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- flowControl = (
- 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
- 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
- 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
- )
- types = (
- 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
- 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Keyword, Name, String
+
+__all__ = ['IgorLexer']
+
+
+class IgorLexer(RegexLexer):
+ """
+ Pygments Lexer for Igor Pro procedure files (.ipf).
+ See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Igor'
+ aliases = ['igor', 'igorpro']
+ filenames = ['*.ipf']
+ mimetypes = ['text/ipf']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ flowControl = (
+ 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
+ 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
+ 'break', 'continue', 'return', 'AbortOnRTE', 'AbortOnValue'
+ )
+ types = (
+ 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
+ 'STRUCT', 'dfref', 'funcref', 'char', 'uchar', 'int16', 'uint16', 'int32',
'uint32', 'int64', 'uint64', 'float', 'double'
- )
- keywords = (
- 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
- 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
- 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
- )
- operations = (
+ )
+ keywords = (
+ 'override', 'ThreadSafe', 'MultiThread', 'static', 'Proc',
+ 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'function', 'end',
+ 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu'
+ )
+ operations = (
'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame',
'AddWavesToBoxPlot', 'AddWavesToViolinPlot', 'AdoptFiles', 'APMath', 'Append',
'AppendBoxPlot', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour',
@@ -134,7 +134,7 @@ class IgorLexer(RegexLexer):
'ModifyContour', 'ModifyControl', 'ModifyControlList', 'ModifyFreeAxis',
'ModifyGizmo', 'ModifyGraph', 'ModifyImage', 'ModifyLayout', 'ModifyPanel',
'ModifyTable', 'ModifyViolinPlot', 'ModifyWaterfall', 'MoveDataFolder',
- 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
'MoveWave', 'MoveWindow', 'MultiTaperPSD', 'MultiThreadingControl',
'NC_CloseFile', 'NC_DumpErrors', 'NC_Inquire', 'NC_ListAttributes',
'NC_ListObjects', 'NC_LoadData', 'NC_OpenFile', 'NeuralNetworkRun',
@@ -169,7 +169,7 @@ class IgorLexer(RegexLexer):
'StatsCircularTwoSampleTest', 'StatsCochranTest', 'StatsContingencyTable',
'StatsDIPTest', 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKDE', 'StatsKendallTauTest',
- 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
+ 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
'StatsLinearRegression', 'StatsMultiCorrelationTest', 'StatsNPMCTest',
'StatsNPNominalSRTest', 'StatsQuantiles', 'StatsRankCorrelationTest',
'StatsResample', 'StatsSample', 'StatsScheffeTest', 'StatsShapiroWilkTest',
@@ -188,8 +188,8 @@ class IgorLexer(RegexLexer):
'VISAReadBinaryWave', 'VISAReadWave', 'VISAWrite', 'VISAWriteBinary',
'VISAWriteBinaryWave', 'VISAWriteWave', 'WaveMeanStdv', 'WaveStats',
'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', 'XLLoadWave'
- )
- functions = (
+ )
+ functions = (
'abs', 'acos', 'acosh', 'AddListItem', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD',
'alog', 'AnnotationInfo', 'AnnotationList', 'area', 'areaXY', 'asin', 'asinh',
'atan', 'atanh', 'atan2', 'AxisInfo', 'AxisList', 'AxisValFromPixel',
@@ -321,9 +321,9 @@ class IgorLexer(RegexLexer):
'StatsInvMaxwellCDF', 'StatsInvMooreCDF', 'StatsInvNBinomialCDF',
'StatsInvNCChiCDF', 'StatsInvNCFCDF', 'StatsInvNormalCDF', 'StatsInvParetoCDF',
'StatsInvPoissonCDF', 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
- 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
- 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
- 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
+ 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
+ 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
+ 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF', 'StatsLogNormalCDF',
'StatsLogNormalPDF', 'StatsMaxwellCDF', 'StatsMaxwellPDF', 'StatsMedian',
'StatsMooreCDF', 'StatsNBinomialCDF', 'StatsNBinomialPDF', 'StatsNCChiCDF',
@@ -333,7 +333,7 @@ class IgorLexer(RegexLexer):
'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF', 'StatsRayleighCDF',
'StatsRayleighPDF', 'StatsRectangularCDF', 'StatsRectangularPDF', 'StatsRunsCDF',
'StatsSpearmanRhoCDF', 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
- 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
+ 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise', 'StatsVonMisesPDF',
'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF', 'StatsWeibullPDF',
'StopMSTimer', 'StringByKey', 'stringCRC', 'StringFromList', 'StringList',
@@ -394,26 +394,26 @@ class IgorLexer(RegexLexer):
'zeromq_client_send', 'zeromq_handler_start', 'zeromq_handler_stop',
'zeromq_server_bind', 'zeromq_server_recv', 'zeromq_server_send', 'zeromq_set',
'zeromq_stop', 'zeromq_test_callfunction', 'zeromq_test_serializeWave', 'zeta'
- )
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String),
- # Flow Control.
- (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
- # Types.
- (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- # Keywords.
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # Built-in operations.
- (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
- # Built-in functions.
- (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
- # Compiler directives.
+ )
+
+ tokens = {
+ 'root': [
+ (r'//.*$', Comment.Single),
+ (r'"([^"\\]|\\.)*"', String),
+ # Flow Control.
+ (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Types.
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ # Keywords.
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # Built-in operations.
+ (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
+ # Built-in functions.
+ (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
+ # Compiler directives.
(r'^#(include|pragma|define|undef|ifdef|ifndef|if|elif|else|endif)',
- Name.Decorator),
- (r'[^a-z"/]+$', Text),
- (r'.', Text),
- ],
- }
+ Name.Decorator),
+ (r'[^a-z"/]+$', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/inferno.py b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
index befe42ab51..dccf71fef2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/inferno.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
@@ -1,95 +1,95 @@
-"""
- pygments.lexers.inferno
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Inferno os and all the related stuff.
-
+"""
+ pygments.lexers.inferno
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Inferno os and all the related stuff.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
- Name, String, Number
-
-__all__ = ['LimboLexer']
-
-
-class LimboLexer(RegexLexer):
- """
- Lexer for `Limbo programming language <http://www.vitanuova.com/inferno/limbo.html>`_
-
- TODO:
- - maybe implement better var declaration highlighting
- - some simple syntax error highlighting
-
- .. versionadded:: 2.0
- """
- name = 'Limbo'
- aliases = ['limbo']
- filenames = ['*.b']
- mimetypes = ['text/limbo']
-
- tokens = {
- 'whitespace': [
- (r'^(\s*)([a-zA-Z_]\w*:(\s*)\n)',
- bygroups(Text, Name.Label)),
- (r'\n', Text),
- (r'\s+', Text),
- (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\', String), # stray backslash
- ],
- 'statements': [
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
- (r'16r[0-9a-fA-F]+', Number.Hex),
- (r'8r[0-7]+', Number.Oct),
- (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
- (r'[()\[\],.]', Punctuation),
- (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
- (r'(alt|break|case|continue|cyclic|do|else|exit'
- r'for|hd|if|implement|import|include|len|load|or'
- r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
- (r'(byte|int|big|real|string|array|chan|list|adt'
- r'|fn|ref|of|module|self|type)\b', Keyword.Type),
- (r'(con|iota|nil)\b', Keyword.Constant),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Punctuation, Text, Comment, Operator, Keyword, \
+ Name, String, Number
+
+__all__ = ['LimboLexer']
+
+
+class LimboLexer(RegexLexer):
+ """
+ Lexer for `Limbo programming language <http://www.vitanuova.com/inferno/limbo.html>`_
+
+ TODO:
+ - maybe implement better var declaration highlighting
+ - some simple syntax error highlighting
+
+ .. versionadded:: 2.0
+ """
+ name = 'Limbo'
+ aliases = ['limbo']
+ filenames = ['*.b']
+ mimetypes = ['text/limbo']
+
+ tokens = {
+ 'whitespace': [
+ (r'^(\s*)([a-zA-Z_]\w*:(\s*)\n)',
+ bygroups(Text, Name.Label)),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'#(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\', String), # stray backslash
+ ],
+ 'statements': [
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])', Number.Float),
+ (r'16r[0-9a-fA-F]+', Number.Hex),
+ (r'8r[0-7]+', Number.Oct),
+ (r'((([1-3]\d)|([2-9]))r)?(\d+)', Number.Integer),
+ (r'[()\[\],.]', Punctuation),
+ (r'[~!%^&*+=|?:<>/-]|(->)|(<-)|(=>)|(::)', Operator),
+ (r'(alt|break|case|continue|cyclic|do|else|exit'
+ r'for|hd|if|implement|import|include|len|load|or'
+ r'pick|return|spawn|tagof|tl|to|while)\b', Keyword),
+ (r'(byte|int|big|real|string|array|chan|list|adt'
+ r'|fn|ref|of|module|self|type)\b', Keyword.Type),
+ (r'(con|iota|nil)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'root': [
- include('whitespace'),
- default('statement'),
- ],
- }
-
- def analyse_text(text):
- # Any limbo module implements something
- if re.search(r'^implement \w+;', text, re.MULTILINE):
- return 0.7
-
-# TODO:
-# - Make lexers for:
-# - asm sources
-# - man pages
-# - mkfiles
-# - module definitions
-# - namespace definitions
-# - shell scripts
-# - maybe keyfiles and fonts
-# they all seem to be quite similar to their equivalents
-# from unix world, so there should not be a lot of problems
+ ],
+ 'statement' : [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'root': [
+ include('whitespace'),
+ default('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ # Any limbo module implements something
+ if re.search(r'^implement \w+;', text, re.MULTILINE):
+ return 0.7
+
+# TODO:
+# - Make lexers for:
+# - asm sources
+# - man pages
+# - mkfiles
+# - module definitions
+# - namespace definitions
+# - shell scripts
+# - maybe keyfiles and fonts
+# they all seem to be quite similar to their equivalents
+# from unix world, so there should not be a lot of problems
diff --git a/contrib/python/Pygments/py3/pygments/lexers/installers.py b/contrib/python/Pygments/py3/pygments/lexers/installers.py
index 1f7b283146..635a28e067 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/installers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/installers.py
@@ -1,327 +1,327 @@
-"""
- pygments.lexers.installers
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for installer/packager DSLs and formats.
-
+"""
+ pygments.lexers.installers
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for installer/packager DSLs and formats.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Punctuation, Generic, Number, Whitespace
-
-__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
- 'DebianControlLexer']
-
-
-class NSISLexer(RegexLexer):
- """
- For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
-
- .. versionadded:: 1.6
- """
- name = 'NSIS'
- aliases = ['nsis', 'nsi', 'nsh']
- filenames = ['*.nsi', '*.nsh']
- mimetypes = ['text/x-nsis']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[;#].*\n', Comment),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'str_double'),
- (r'`', String.Backtick, 'str_backtick'),
- include('macro'),
- include('interpol'),
- include('basic'),
- (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
- (r'/[a-z_]\w*', Name.Attribute),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation, Generic, Number, Whitespace
+
+__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
+ 'DebianControlLexer']
+
+
+class NSISLexer(RegexLexer):
+ """
+ For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
+
+ .. versionadded:: 1.6
+ """
+ name = 'NSIS'
+ aliases = ['nsis', 'nsi', 'nsh']
+ filenames = ['*.nsi', '*.nsh']
+ mimetypes = ['text/x-nsis']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[;#].*\n', Comment),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'str_double'),
+ (r'`', String.Backtick, 'str_backtick'),
+ include('macro'),
+ include('interpol'),
+ include('basic'),
+ (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
+ (r'/[a-z_]\w*', Name.Attribute),
(r'\s+', Whitespace),
(r'[\w.]+', Text),
- ],
- 'basic': [
- (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
- bygroups(Keyword.Namespace, Punctuation, Name.Function)),
- (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
- (r'[|+-]', Operator),
- (r'\\', Punctuation),
- (r'\b(Abort|Add(?:BrandingImage|Size)|'
- r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
- r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
- r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
- r'ComponentText|CopyFiles|CRCCheck|'
- r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
- r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
- r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
- r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
- r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
- r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
- r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
- r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
- r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
- r'InstDirError|LabelAddress|TempFileName)|'
- r'Goto|HideWindow|Icon|'
- r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
- r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
- r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
- r'IsWindow|LangString(?:UP)?|'
- r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
- r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
- r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
- r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
- r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
- r'Return|RMDir|SearchPath|Section(?:Divider|End|'
- r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
- r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
- r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
- r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
- r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
- r'Silent|StaticBkColor)|'
- r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
- r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
- r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
- r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
- r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
- r'XPStyle)\b', Keyword),
- (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
- r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
- r'HK(CC|CR|CU|DD|LM|PD|U)|'
- r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
- r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
- r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
- r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
- r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
- r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
- r'YESNO(?:CANCEL)?)|SET|SHCTX|'
- r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
- r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
- r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
- r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
- r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
- r'true|try|user|zlib)\b', Name.Constant),
- ],
- 'macro': [
- (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
- r'delfilefile|echo(?:message)?|else|endif|error|execute|'
- r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
- r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
- r'warning)\b', Comment.Preproc),
- ],
- 'interpol': [
- (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
- (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
- r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
- r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
- r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
- r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
- r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
- Name.Builtin),
- (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
- (r'\$[a-z_]\w*', Name.Variable),
- ],
- 'str_double': [
+ ],
+ 'basic': [
+ (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
+ bygroups(Text, Keyword, Text, Name.Function)),
+ (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
+ bygroups(Keyword.Namespace, Punctuation, Name.Function)),
+ (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B', Operator),
+ (r'[|+-]', Operator),
+ (r'\\', Punctuation),
+ (r'\b(Abort|Add(?:BrandingImage|Size)|'
+ r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
+ r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
+ r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
+ r'ComponentText|CopyFiles|CRCCheck|'
+ r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
+ r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
+ r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
+ r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
+ r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
+ r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
+ r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
+ r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
+ r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
+ r'InstDirError|LabelAddress|TempFileName)|'
+ r'Goto|HideWindow|Icon|'
+ r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
+ r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
+ r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
+ r'IsWindow|LangString(?:UP)?|'
+ r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
+ r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
+ r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
+ r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
+ r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
+ r'Return|RMDir|SearchPath|Section(?:Divider|End|'
+ r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
+ r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
+ r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
+ r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
+ r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
+ r'Silent|StaticBkColor)|'
+ r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
+ r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
+ r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
+ r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
+ r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
+ r'XPStyle)\b', Keyword),
+ (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
+ r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
+ r'HK(CC|CR|CU|DD|LM|PD|U)|'
+ r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
+ r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
+ r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
+ r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
+ r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
+ r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
+ r'YESNO(?:CANCEL)?)|SET|SHCTX|'
+ r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
+ r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
+ r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
+ r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
+ r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
+ r'true|try|user|zlib)\b', Name.Constant),
+ ],
+ 'macro': [
+ (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
+ r'delfilefile|echo(?:message)?|else|endif|error|execute|'
+ r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
+ r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
+ r'warning)\b', Comment.Preproc),
+ ],
+ 'interpol': [
+ (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
+ (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
+ r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
+ r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
+ r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
+ r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
+ r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
+ Name.Builtin),
+ (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
+ (r'\$[a-z_]\w*', Name.Variable),
+ ],
+ 'str_double': [
(r'"', String.Double, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
(r'[^"]+', String.Double),
- ],
- 'str_backtick': [
+ ],
+ 'str_backtick': [
(r'`', String.Double, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
(r'[^`]+', String.Double),
- ],
- }
-
-
-class RPMSpecLexer(RegexLexer):
- """
- For RPM ``.spec`` files.
-
- .. versionadded:: 1.6
- """
-
- name = 'RPMSpec'
- aliases = ['spec']
- filenames = ['*.spec']
- mimetypes = ['text/x-rpm-spec']
-
- _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
- 'post[a-z]*|trigger[a-z]*|files)')
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment),
- include('basic'),
- ],
- 'description': [
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'changelog': [
- (r'\*.*\n', Generic.Subheading),
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'basic': [
- include('macro'),
- (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
- r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
- r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
- bygroups(Generic.Heading, Punctuation, using(this))),
- (r'^%description', Name.Decorator, 'description'),
- (r'^%changelog', Name.Decorator, 'changelog'),
- (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
- (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
- r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
- Keyword),
- include('interpol'),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'string'),
- (r'.', Text),
- ],
- 'macro': [
- (r'%define.*\n', Comment.Preproc),
- (r'%\{\!\?.*%define.*\}', Comment.Preproc),
- (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
- bygroups(Comment.Preproc, Text)),
- ],
- 'interpol': [
- (r'%\{?__[a-z_]+\}?', Name.Function),
- (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
- (r'%\{\?\w+\}', Name.Variable),
- (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
- (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
- ]
- }
-
-
-class SourcesListLexer(RegexLexer):
- """
- Lexer that highlights debian sources.list files.
-
- .. versionadded:: 0.7
- """
-
- name = 'Debian Sourcelist'
+ ],
+ }
+
+
+class RPMSpecLexer(RegexLexer):
+ """
+ For RPM ``.spec`` files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'RPMSpec'
+ aliases = ['spec']
+ filenames = ['*.spec']
+ mimetypes = ['text/x-rpm-spec']
+
+ _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
+ 'post[a-z]*|trigger[a-z]*|files)')
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment),
+ include('basic'),
+ ],
+ 'description': [
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'changelog': [
+ (r'\*.*\n', Generic.Subheading),
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'basic': [
+ include('macro'),
+ (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
+ r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
+ r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
+ r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
+ bygroups(Generic.Heading, Punctuation, using(this))),
+ (r'^%description', Name.Decorator, 'description'),
+ (r'^%changelog', Name.Decorator, 'changelog'),
+ (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
+ (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
+ r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
+ Keyword),
+ include('interpol'),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'string'),
+ (r'.', Text),
+ ],
+ 'macro': [
+ (r'%define.*\n', Comment.Preproc),
+ (r'%\{\!\?.*%define.*\}', Comment.Preproc),
+ (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
+ bygroups(Comment.Preproc, Text)),
+ ],
+ 'interpol': [
+ (r'%\{?__[a-z_]+\}?', Name.Function),
+ (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
+ (r'%\{\?\w+\}', Name.Variable),
+ (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
+ (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
+ ]
+ }
+
+
+class SourcesListLexer(RegexLexer):
+ """
+ Lexer that highlights debian sources.list files.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Debian Sourcelist'
aliases = ['debsources', 'sourceslist', 'sources.list']
- filenames = ['sources.list']
- mimetype = ['application/x-debian-sourceslist']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment),
- (r'^(deb(?:-src)?)(\s+)',
- bygroups(Keyword, Text), 'distribution')
- ],
- 'distribution': [
- (r'#.*?$', Comment, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\s$[]+', String),
- (r'\[', String.Other, 'escaped-distribution'),
- (r'\$', String),
- (r'\s+', Text, 'components')
- ],
- 'escaped-distribution': [
- (r'\]', String.Other, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\]$]+', String.Other),
- (r'\$', String.Other)
- ],
- 'components': [
- (r'#.*?$', Comment, '#pop:2'),
- (r'$', Text, '#pop:2'),
- (r'\s+', Text),
- (r'\S+', Keyword.Pseudo),
- ]
- }
-
- def analyse_text(text):
- for line in text.splitlines():
- line = line.strip()
- if line.startswith('deb ') or line.startswith('deb-src '):
- return True
-
-
-class DebianControlLexer(RegexLexer):
- """
- Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
+ filenames = ['sources.list']
+ mimetype = ['application/x-debian-sourceslist']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment),
+ (r'^(deb(?:-src)?)(\s+)',
+ bygroups(Keyword, Text), 'distribution')
+ ],
+ 'distribution': [
+ (r'#.*?$', Comment, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\s$[]+', String),
+ (r'\[', String.Other, 'escaped-distribution'),
+ (r'\$', String),
+ (r'\s+', Text, 'components')
+ ],
+ 'escaped-distribution': [
+ (r'\]', String.Other, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\]$]+', String.Other),
+ (r'\$', String.Other)
+ ],
+ 'components': [
+ (r'#.*?$', Comment, '#pop:2'),
+ (r'$', Text, '#pop:2'),
+ (r'\s+', Text),
+ (r'\S+', Keyword.Pseudo),
+ ]
+ }
+
+ def analyse_text(text):
+ for line in text.splitlines():
+ line = line.strip()
+ if line.startswith('deb ') or line.startswith('deb-src '):
+ return True
+
+
+class DebianControlLexer(RegexLexer):
+ """
+ Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
+
Specification of `control`` files is available at
https://www.debian.org/doc/debian-policy/ch-controlfields.html
- .. versionadded:: 0.9
- """
- name = 'Debian Control file'
+ .. versionadded:: 0.9
+ """
+ name = 'Debian Control file'
aliases = ['debcontrol', 'control']
- filenames = ['control']
-
- tokens = {
- 'root': [
- (r'^(Description)', Keyword, 'description'),
+ filenames = ['control']
+
+ tokens = {
+ 'root': [
+ (r'^(Description)', Keyword, 'description'),
(r'^(Maintainer|Uploaders)(:\s*)', bygroups(Keyword, Text),
'maintainer'),
(r'^((?:Build-|Pre-)?Depends(?:-Indep|-Arch)?)(:\s*)',
bygroups(Keyword, Text), 'depends'),
(r'^(Recommends|Suggests|Enhances)(:\s*)', bygroups(Keyword, Text),
'depends'),
- (r'^((?:Python-)?Version)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
- bygroups(Keyword, Whitespace, String)),
- ],
- 'maintainer': [
+ (r'^((?:Python-)?Version)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
+ bygroups(Keyword, Whitespace, String)),
+ ],
+ 'maintainer': [
(r'<[^>]+>$', Generic.Strong, '#pop'),
- (r'<[^>]+>', Generic.Strong),
- (r',\n?', Text),
+ (r'<[^>]+>', Generic.Strong),
+ (r',\n?', Text),
(r'[^,<]+$', Text, '#pop'),
(r'[^,<]+', Text),
- ],
- 'description': [
- (r'(.*)(Homepage)(: )(\S+)',
- bygroups(Text, String, Name, Name.Class)),
- (r':.*\n', Generic.Strong),
- (r' .*\n', Text),
- default('#pop'),
- ],
- 'depends': [
+ ],
+ 'description': [
+ (r'(.*)(Homepage)(: )(\S+)',
+ bygroups(Text, String, Name, Name.Class)),
+ (r':.*\n', Generic.Strong),
+ (r' .*\n', Text),
+ default('#pop'),
+ ],
+ 'depends': [
(r'(\$)(\{)(\w+\s*:\s*\w+)(\})',
bygroups(Operator, Text, Name.Entity, Text)),
- (r'\(', Text, 'depend_vers'),
- (r'\|', Operator),
+ (r'\(', Text, 'depend_vers'),
+ (r'\|', Operator),
(r',\n', Text),
(r'\n', Text, '#pop'),
(r'[,\s]', Text),
(r'[+.a-zA-Z0-9-]+', Name.Function),
- (r'\[.*?\]', Name.Entity),
- ],
- 'depend_vers': [
+ (r'\[.*?\]', Name.Entity),
+ ],
+ 'depend_vers': [
(r'\)', Text, '#pop'),
(r'([><=]+)(\s*)([^)]+)', bygroups(Operator, Text, Number)),
- ]
- }
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
index c309d19248..fd4c285c49 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
@@ -1,1356 +1,1356 @@
-"""
- pygments.lexers.int_fiction
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for interactive fiction languages.
-
+"""
+ pygments.lexers.int_fiction
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for interactive fiction languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-
-__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
- 'Tads3Lexer']
-
-
-class Inform6Lexer(RegexLexer):
- """
- For `Inform 6 <http://inform-fiction.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6'
- aliases = ['inform6', 'i6']
- filenames = ['*.inf']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _name = r'[a-zA-Z_]\w*'
-
- # Inform 7 maps these four character classes to their ASCII
- # equivalents. To support Inform 6 inclusions within Inform 7,
- # Inform6Lexer maps them too.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+
+__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
+ 'Tads3Lexer']
+
+
+class Inform6Lexer(RegexLexer):
+ """
+ For `Inform 6 <http://inform-fiction.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6'
+ aliases = ['inform6', 'i6']
+ filenames = ['*.inf']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _name = r'[a-zA-Z_]\w*'
+
+ # Inform 7 maps these four character classes to their ASCII
+ # equivalents. To support Inform 6 inclusions within Inform 7,
+ # Inform6Lexer maps them too.
_dash = '\\-\u2010-\u2014'
_dquote = '"\u201c\u201d'
_squote = "'\u2018\u2019"
_newline = '\\n\u0085\u2028\u2029'
-
- tokens = {
- 'root': [
- (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
- 'directive'),
- default('directive')
- ],
- '_whitespace': [
- (r'\s+', Text),
- (r'![^%s]*' % _newline, Comment.Single)
- ],
- 'default': [
- include('_whitespace'),
- (r'\[', Punctuation, 'many-values'), # Array initialization
- (r':|(?=;)', Punctuation, '#pop'),
- (r'<', Punctuation), # Second angle bracket in an action statement
- default(('expression', '_expression'))
- ],
-
- # Expressions
- '_expression': [
- include('_whitespace'),
- (r'(?=sp\b)', Text, '#pop'),
- (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
- ('#pop', 'value')),
- (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
- (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
- ],
- 'expression': [
- include('_whitespace'),
- (r'\(', Punctuation, ('expression', '_expression')),
- (r'\)', Punctuation, '#pop'),
- (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
- (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
- (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
- (r',', Punctuation, '_expression'),
- (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
- Operator, '_expression'),
- (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
- '_expression'),
- (r'sp\b', Name),
- (r'\?~?', Name.Label, 'label?'),
- (r'[@{]', Error),
- default('#pop')
- ],
- '_assembly-expression': [
- (r'\(', Punctuation, ('#push', '_expression')),
- (r'[\[\]]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, '_expression'),
- (r'sp\b', Keyword.Pseudo),
- (r';', Punctuation, '#pop:3'),
- include('expression')
- ],
- '_for-expression': [
- (r'\)', Punctuation, '#pop:2'),
- (r':', Punctuation, '#pop'),
- include('expression')
- ],
- '_keyword-expression': [
- (r'(from|near|to)\b', Keyword, '_expression'),
- include('expression')
- ],
- '_list-expression': [
- (r',', Punctuation, '#pop'),
- include('expression')
- ],
- '_object-expression': [
- (r'has\b', Keyword.Declaration, '#pop'),
- include('_list-expression')
- ],
-
- # Values
- 'value': [
- include('_whitespace'),
- # Strings
- (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
+
+ tokens = {
+ 'root': [
+ (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
+ 'directive'),
+ default('directive')
+ ],
+ '_whitespace': [
+ (r'\s+', Text),
+ (r'![^%s]*' % _newline, Comment.Single)
+ ],
+ 'default': [
+ include('_whitespace'),
+ (r'\[', Punctuation, 'many-values'), # Array initialization
+ (r':|(?=;)', Punctuation, '#pop'),
+ (r'<', Punctuation), # Second angle bracket in an action statement
+ default(('expression', '_expression'))
+ ],
+
+ # Expressions
+ '_expression': [
+ include('_whitespace'),
+ (r'(?=sp\b)', Text, '#pop'),
+ (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
+ ('#pop', 'value')),
+ (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
+ (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
+ ],
+ 'expression': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('expression', '_expression')),
+ (r'\)', Punctuation, '#pop'),
+ (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
+ (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
+ (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
+ (r',', Punctuation, '_expression'),
+ (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
+ Operator, '_expression'),
+ (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
+ '_expression'),
+ (r'sp\b', Name),
+ (r'\?~?', Name.Label, 'label?'),
+ (r'[@{]', Error),
+ default('#pop')
+ ],
+ '_assembly-expression': [
+ (r'\(', Punctuation, ('#push', '_expression')),
+ (r'[\[\]]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, '_expression'),
+ (r'sp\b', Keyword.Pseudo),
+ (r';', Punctuation, '#pop:3'),
+ include('expression')
+ ],
+ '_for-expression': [
+ (r'\)', Punctuation, '#pop:2'),
+ (r':', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_keyword-expression': [
+ (r'(from|near|to)\b', Keyword, '_expression'),
+ include('expression')
+ ],
+ '_list-expression': [
+ (r',', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_object-expression': [
+ (r'has\b', Keyword.Declaration, '#pop'),
+ include('_list-expression')
+ ],
+
+ # Values
+ 'value': [
+ include('_whitespace'),
+ # Strings
+ (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
(r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'([%s])(@.{2})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
- # Numbers
- (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
- Number.Float, '#pop'),
- (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'\$\$[01]+', Number.Bin, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
- # Values prefixed by hashes
- (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
- (r'(#g\$)(%s)' % _name,
- bygroups(Operator, Name.Variable.Global), '#pop'),
- (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
- (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
- (r'#', Name.Builtin, ('#pop', 'system-constant')),
- # System functions
- (words((
- 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
- 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Metaclasses
- (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
- # Veneer routines
- (words((
- 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
- 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
- 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
- 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
- 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
- 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
- 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
- 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
- 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other built-in symbols
- (words((
- 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
- 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
- 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
- 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
- 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
- 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
- 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
- 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
- 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
- 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
- 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
- prefix='(?i)', suffix=r'\b'),
- Name.Builtin, '#pop'),
- # Other values
- (_name, Name, '#pop')
- ],
- # Strings
- 'dictionary-word': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _squote, String.Single),
- (r'[({]', String.Single),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'([%s])(@.{2})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
+ # Numbers
+ (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
+ Number.Float, '#pop'),
+ (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'\$\$[01]+', Number.Bin, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+ # Values prefixed by hashes
+ (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
+ (r'(#g\$)(%s)' % _name,
+ bygroups(Operator, Name.Variable.Global), '#pop'),
+ (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
+ (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
+ (r'#', Name.Builtin, ('#pop', 'system-constant')),
+ # System functions
+ (words((
+ 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
+ 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Metaclasses
+ (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
+ # Veneer routines
+ (words((
+ 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
+ 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
+ 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
+ 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
+ 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
+ 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
+ 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
+ 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
+ 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other built-in symbols
+ (words((
+ 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
+ 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
+ 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
+ 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
+ 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
+ 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
+ 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
+ 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
+ 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
+ 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
+ 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other values
+ (_name, Name, '#pop')
+ ],
+ # Strings
+ 'dictionary-word': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _squote, String.Single),
+ (r'[({]', String.Single),
(r'@\{[0-9a-fA-F]*\}', String.Escape),
- (r'@.{2}', String.Escape),
- (r'[%s]' % _squote, String.Single, '#pop')
- ],
- 'string': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _dquote, String.Double),
- (r'[({]', String.Double),
- (r'\\', String.Escape),
- (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
- (_newline, _newline), String.Escape),
+ (r'@.{2}', String.Escape),
+ (r'[%s]' % _squote, String.Single, '#pop')
+ ],
+ 'string': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _dquote, String.Double),
+ (r'[({]', String.Double),
+ (r'\\', String.Escape),
+ (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
+ (_newline, _newline), String.Escape),
(r'@(\\\s*[%s]\s*)*\{((\\\s*[%s]\s*)*[0-9a-fA-F])*'
- r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
- String.Escape),
- (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
- String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- 'plain-string': [
- (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
- (r'[~^({\[\]]', String.Double),
- (r'\\', String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- # Names
- '_constant': [
- include('_whitespace'),
- (_name, Name.Constant, '#pop'),
- include('value')
- ],
- '_global': [
- include('_whitespace'),
- (_name, Name.Variable.Global, '#pop'),
- include('value')
- ],
- 'label?': [
- include('_whitespace'),
- (_name, Name.Label, '#pop'),
- default('#pop')
- ],
- 'variable?': [
- include('_whitespace'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- # Values after hashes
- 'obsolete-dictionary-word': [
- (r'\S\w*', String.Other, '#pop')
- ],
- 'system-constant': [
- include('_whitespace'),
- (_name, Name.Builtin, '#pop')
- ],
-
- # Directives
- 'directive': [
- include('_whitespace'),
- (r'#', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'\[', Punctuation,
- ('default', 'statements', 'locals', 'routine-name?')),
- (words((
- 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
- 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
- 'version'), prefix='(?i)', suffix=r'\b'),
- Keyword, 'default'),
- (r'(?i)(array|global)\b', Keyword,
- ('default', 'directive-keyword?', '_global')),
- (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
- (r'(?i)class\b', Keyword,
- ('object-body', 'duplicates', 'class-name')),
- (r'(?i)(constant|default)\b', Keyword,
- ('default', 'expression', '_constant')),
- (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
- (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
- (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
- (r'(?i)import\b', Keyword, 'manifest'),
+ r'(\\\s*[%s]\s*)*\}' % (_newline, _newline, _newline),
+ String.Escape),
+ (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
+ String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ 'plain-string': [
+ (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
+ (r'[~^({\[\]]', String.Double),
+ (r'\\', String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ # Names
+ '_constant': [
+ include('_whitespace'),
+ (_name, Name.Constant, '#pop'),
+ include('value')
+ ],
+ '_global': [
+ include('_whitespace'),
+ (_name, Name.Variable.Global, '#pop'),
+ include('value')
+ ],
+ 'label?': [
+ include('_whitespace'),
+ (_name, Name.Label, '#pop'),
+ default('#pop')
+ ],
+ 'variable?': [
+ include('_whitespace'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ # Values after hashes
+ 'obsolete-dictionary-word': [
+ (r'\S\w*', String.Other, '#pop')
+ ],
+ 'system-constant': [
+ include('_whitespace'),
+ (_name, Name.Builtin, '#pop')
+ ],
+
+ # Directives
+ 'directive': [
+ include('_whitespace'),
+ (r'#', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'\[', Punctuation,
+ ('default', 'statements', 'locals', 'routine-name?')),
+ (words((
+ 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
+ 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
+ 'version'), prefix='(?i)', suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(?i)(array|global)\b', Keyword,
+ ('default', 'directive-keyword?', '_global')),
+ (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
+ (r'(?i)class\b', Keyword,
+ ('object-body', 'duplicates', 'class-name')),
+ (r'(?i)(constant|default)\b', Keyword,
+ ('default', 'expression', '_constant')),
+ (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
+ (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
+ (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
+ (r'(?i)import\b', Keyword, 'manifest'),
(r'(?i)(include|link|origsource)\b', Keyword,
('default', 'before-plain-string?')),
- (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
- (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
- (r'(?i)(nearby|object)\b', Keyword,
- ('object-body', '_object-head')),
- (r'(?i)property\b', Keyword,
- ('default', 'alias?', '_constant', 'property-keyword*')),
- (r'(?i)replace\b', Keyword,
- ('default', 'routine-name?', 'routine-name?')),
- (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
- (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
- (r'(?i)trace\b', Keyword,
- ('default', 'trace-keyword?', 'trace-keyword?')),
- (r'(?i)zcharacter\b', Keyword,
- ('default', 'directive-keyword?', 'directive-keyword?')),
- (_name, Name.Class, ('object-body', '_object-head'))
- ],
- # [, Replace, Stub
- 'routine-name?': [
- include('_whitespace'),
- (_name, Name.Function, '#pop'),
- default('#pop')
- ],
- 'locals': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'\*', Punctuation),
- (r'"', String.Double, 'plain-string'),
- (_name, Name.Variable)
- ],
- # Array
- 'many-values': [
- include('_whitespace'),
- (r';', Punctuation),
- (r'\]', Punctuation, '#pop'),
- (r':', Error),
- default(('expression', '_expression'))
- ],
- # Attribute, Property
- 'alias?': [
- include('_whitespace'),
- (r'alias\b', Keyword, ('#pop', '_constant')),
- default('#pop')
- ],
- # Class, Object, Nearby
- 'class-name': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class, '#pop')
- ],
- 'duplicates': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
- default('#pop')
- ],
- '_object-head': [
- (r'[%s]>' % _dash, Punctuation),
- (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
- include('_global')
- ],
- 'object-body': [
- include('_whitespace'),
- (r';', Punctuation, '#pop:2'),
- (r',', Punctuation),
- (r'class\b', Keyword.Declaration, 'class-segment'),
- (r'(has|private|with)\b', Keyword.Declaration),
- (r':', Error),
- default(('_object-expression', '_expression'))
- ],
- 'class-segment': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class),
- default('value')
- ],
- # Extend, Verb
- 'grammar': [
- include('_whitespace'),
- (r'=', Punctuation, ('#pop', 'default')),
- (r'\*', Punctuation, ('#pop', 'grammar-line')),
- default('_directive-keyword')
- ],
- 'grammar-line': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'[/*]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, 'value'),
- (r'(noun|scope)\b', Keyword, '=routine'),
- default('_directive-keyword')
- ],
- '=routine': [
- include('_whitespace'),
- (r'=', Punctuation, 'routine-name?'),
- default('#pop')
- ],
- # Import
- 'manifest': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(?i)global\b', Keyword, '_global'),
- default('_global')
- ],
- # Include, Link, Message
- 'diagnostic': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
+ (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
+ (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
+ (r'(?i)(nearby|object)\b', Keyword,
+ ('object-body', '_object-head')),
+ (r'(?i)property\b', Keyword,
+ ('default', 'alias?', '_constant', 'property-keyword*')),
+ (r'(?i)replace\b', Keyword,
+ ('default', 'routine-name?', 'routine-name?')),
+ (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
+ (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
+ (r'(?i)trace\b', Keyword,
+ ('default', 'trace-keyword?', 'trace-keyword?')),
+ (r'(?i)zcharacter\b', Keyword,
+ ('default', 'directive-keyword?', 'directive-keyword?')),
+ (_name, Name.Class, ('object-body', '_object-head'))
+ ],
+ # [, Replace, Stub
+ 'routine-name?': [
+ include('_whitespace'),
+ (_name, Name.Function, '#pop'),
+ default('#pop')
+ ],
+ 'locals': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'\*', Punctuation),
+ (r'"', String.Double, 'plain-string'),
+ (_name, Name.Variable)
+ ],
+ # Array
+ 'many-values': [
+ include('_whitespace'),
+ (r';', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ (r':', Error),
+ default(('expression', '_expression'))
+ ],
+ # Attribute, Property
+ 'alias?': [
+ include('_whitespace'),
+ (r'alias\b', Keyword, ('#pop', '_constant')),
+ default('#pop')
+ ],
+ # Class, Object, Nearby
+ 'class-name': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class, '#pop')
+ ],
+ 'duplicates': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
+ default('#pop')
+ ],
+ '_object-head': [
+ (r'[%s]>' % _dash, Punctuation),
+ (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
+ include('_global')
+ ],
+ 'object-body': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop:2'),
+ (r',', Punctuation),
+ (r'class\b', Keyword.Declaration, 'class-segment'),
+ (r'(has|private|with)\b', Keyword.Declaration),
+ (r':', Error),
+ default(('_object-expression', '_expression'))
+ ],
+ 'class-segment': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class),
+ default('value')
+ ],
+ # Extend, Verb
+ 'grammar': [
+ include('_whitespace'),
+ (r'=', Punctuation, ('#pop', 'default')),
+ (r'\*', Punctuation, ('#pop', 'grammar-line')),
+ default('_directive-keyword')
+ ],
+ 'grammar-line': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'[/*]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, 'value'),
+ (r'(noun|scope)\b', Keyword, '=routine'),
+ default('_directive-keyword')
+ ],
+ '=routine': [
+ include('_whitespace'),
+ (r'=', Punctuation, 'routine-name?'),
+ default('#pop')
+ ],
+ # Import
+ 'manifest': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(?i)global\b', Keyword, '_global'),
+ default('_global')
+ ],
+ # Include, Link, Message
+ 'diagnostic': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
default(('#pop', 'before-plain-string?', 'directive-keyword?'))
- ],
+ ],
'before-plain-string?': [
- include('_whitespace'),
+ include('_whitespace'),
(r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
default('#pop')
- ],
- 'message-string': [
- (r'[~^]+', String.Escape),
- include('plain-string')
- ],
-
- # Keywords used in directives
- '_directive-keyword!': [
- include('_whitespace'),
- (words((
- 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
- 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
- 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
- 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
- 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
- Keyword, '#pop'),
+ ],
+ 'message-string': [
+ (r'[~^]+', String.Escape),
+ include('plain-string')
+ ],
+
+ # Keywords used in directives
+ '_directive-keyword!': [
+ include('_whitespace'),
+ (words((
+ 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
+ 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
+ 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
+ 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
+ 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
+ Keyword, '#pop'),
(r'static\b', Keyword),
- (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
- ],
- '_directive-keyword': [
- include('_directive-keyword!'),
- include('value')
- ],
- 'directive-keyword?': [
- include('_directive-keyword!'),
- default('#pop')
- ],
- 'property-keyword*': [
- include('_whitespace'),
- (r'(additive|long)\b', Keyword),
- default('#pop')
- ],
- 'trace-keyword?': [
- include('_whitespace'),
- (words((
- 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
- 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
- Keyword, '#pop'),
- default('#pop')
- ],
-
- # Statements
- 'statements': [
- include('_whitespace'),
- (r'\]', Punctuation, '#pop'),
- (r'[;{}]', Punctuation),
- (words((
- 'box', 'break', 'continue', 'default', 'give', 'inversion',
- 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
- 'spaces', 'string', 'until'), suffix=r'\b'),
- Keyword, 'default'),
- (r'(do|else)\b', Keyword),
- (r'(font|style)\b', Keyword,
- ('default', 'miscellaneous-keyword?')),
- (r'for\b', Keyword, ('for', '(?')),
- (r'(if|switch|while)', Keyword,
- ('expression', '_expression', '(?')),
- (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
- (r'objectloop\b', Keyword,
- ('_keyword-expression', 'variable?', '(?')),
- (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
- (r'\.', Name.Label, 'label?'),
- (r'@', Keyword, 'opcode'),
- (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
- (r'<', Punctuation, 'default'),
- (r'move\b', Keyword,
- ('default', '_keyword-expression', '_expression')),
- default(('default', '_keyword-expression', '_expression'))
- ],
- 'miscellaneous-keyword?': [
- include('_whitespace'),
- (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
- Keyword, '#pop'),
- (r'(a|A|an|address|char|name|number|object|property|string|the|'
- r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
- '#pop'),
- (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
- '#pop'),
- default('#pop')
- ],
- '(?': [
- include('_whitespace'),
- (r'\(', Punctuation, '#pop'),
- default('#pop')
- ],
- 'for': [
- include('_whitespace'),
- (r';', Punctuation, ('_for-expression', '_expression')),
- default(('_for-expression', '_expression'))
- ],
- 'print-list': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r':', Error),
- default(('_list-expression', '_expression', '_list-expression', 'form'))
- ],
- 'form': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
- default('#pop')
- ],
-
- # Assembly
- 'opcode': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
- (_name, Keyword, 'operands')
- ],
- 'operands': [
- (r':', Error),
- default(('_assembly-expression', '_expression'))
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # 'in' is either a keyword or an operator.
- # If the token two tokens after 'in' is ')', 'in' is a keyword:
- # objectloop(a in b)
- # Otherwise, it is an operator:
- # objectloop(a in b && true)
- objectloop_queue = []
- objectloop_token_count = -1
- previous_token = None
- for index, token, value in RegexLexer.get_tokens_unprocessed(self,
- text):
- if previous_token is Name.Variable and value == 'in':
- objectloop_queue = [[index, token, value]]
- objectloop_token_count = 2
- elif objectloop_token_count > 0:
- if token not in Comment and token not in Text:
- objectloop_token_count -= 1
- objectloop_queue.append((index, token, value))
- else:
- if objectloop_token_count == 0:
- if objectloop_queue[-1][2] == ')':
- objectloop_queue[0][1] = Keyword
- while objectloop_queue:
- yield objectloop_queue.pop(0)
- objectloop_token_count = -1
- yield index, token, value
- if token not in Comment and token not in Text:
- previous_token = token
- while objectloop_queue:
- yield objectloop_queue.pop(0)
-
+ (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
+ ],
+ '_directive-keyword': [
+ include('_directive-keyword!'),
+ include('value')
+ ],
+ 'directive-keyword?': [
+ include('_directive-keyword!'),
+ default('#pop')
+ ],
+ 'property-keyword*': [
+ include('_whitespace'),
+ (r'(additive|long)\b', Keyword),
+ default('#pop')
+ ],
+ 'trace-keyword?': [
+ include('_whitespace'),
+ (words((
+ 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
+ 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
+ Keyword, '#pop'),
+ default('#pop')
+ ],
+
+ # Statements
+ 'statements': [
+ include('_whitespace'),
+ (r'\]', Punctuation, '#pop'),
+ (r'[;{}]', Punctuation),
+ (words((
+ 'box', 'break', 'continue', 'default', 'give', 'inversion',
+ 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
+ 'spaces', 'string', 'until'), suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(do|else)\b', Keyword),
+ (r'(font|style)\b', Keyword,
+ ('default', 'miscellaneous-keyword?')),
+ (r'for\b', Keyword, ('for', '(?')),
+ (r'(if|switch|while)', Keyword,
+ ('expression', '_expression', '(?')),
+ (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
+ (r'objectloop\b', Keyword,
+ ('_keyword-expression', 'variable?', '(?')),
+ (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
+ (r'\.', Name.Label, 'label?'),
+ (r'@', Keyword, 'opcode'),
+ (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
+ (r'<', Punctuation, 'default'),
+ (r'move\b', Keyword,
+ ('default', '_keyword-expression', '_expression')),
+ default(('default', '_keyword-expression', '_expression'))
+ ],
+ 'miscellaneous-keyword?': [
+ include('_whitespace'),
+ (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
+ Keyword, '#pop'),
+ (r'(a|A|an|address|char|name|number|object|property|string|the|'
+ r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
+ '#pop'),
+ (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
+ '#pop'),
+ default('#pop')
+ ],
+ '(?': [
+ include('_whitespace'),
+ (r'\(', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'for': [
+ include('_whitespace'),
+ (r';', Punctuation, ('_for-expression', '_expression')),
+ default(('_for-expression', '_expression'))
+ ],
+ 'print-list': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r':', Error),
+ default(('_list-expression', '_expression', '_list-expression', 'form'))
+ ],
+ 'form': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
+ default('#pop')
+ ],
+
+ # Assembly
+ 'opcode': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
+ (_name, Keyword, 'operands')
+ ],
+ 'operands': [
+ (r':', Error),
+ default(('_assembly-expression', '_expression'))
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # 'in' is either a keyword or an operator.
+ # If the token two tokens after 'in' is ')', 'in' is a keyword:
+ # objectloop(a in b)
+ # Otherwise, it is an operator:
+ # objectloop(a in b && true)
+ objectloop_queue = []
+ objectloop_token_count = -1
+ previous_token = None
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self,
+ text):
+ if previous_token is Name.Variable and value == 'in':
+ objectloop_queue = [[index, token, value]]
+ objectloop_token_count = 2
+ elif objectloop_token_count > 0:
+ if token not in Comment and token not in Text:
+ objectloop_token_count -= 1
+ objectloop_queue.append((index, token, value))
+ else:
+ if objectloop_token_count == 0:
+ if objectloop_queue[-1][2] == ')':
+ objectloop_queue[0][1] = Keyword
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+ objectloop_token_count = -1
+ yield index, token, value
+ if token not in Comment and token not in Text:
+ previous_token = token
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+
def analyse_text(text):
"""We try to find a keyword which seem relatively common, unfortunately
there is a decent overlap with Smalltalk keywords otherwise here.."""
result = 0
if re.search('\borigsource\b', text, re.IGNORECASE):
result += 0.05
-
+
return result
-class Inform7Lexer(RegexLexer):
- """
- For `Inform 7 <http://inform7.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 7'
- aliases = ['inform7', 'i7']
- filenames = ['*.ni', '*.i7x']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _dash = Inform6Lexer._dash
- _dquote = Inform6Lexer._dquote
- _newline = Inform6Lexer._newline
- _start = r'\A|(?<=[%s])' % _newline
-
- # There are three variants of Inform 7, differing in how to
- # interpret at signs and braces in I6T. In top-level inclusions, at
- # signs in the first column are inweb syntax. In phrase definitions
- # and use options, tokens in braces are treated as I7. Use options
- # also interpret "{N}".
- tokens = {}
- token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
-
- for level in token_variants:
- tokens[level] = {
- '+i6-root': list(Inform6Lexer.tokens['root']),
- '+i6t-root': [ # For Inform6TemplateLexer
- (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
- ('directive', '+p'))
- ],
- 'root': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]' % _dquote, Generic.Heading,
- ('+main', '+titling', '+titling-string')),
- default(('+main', '+heading?'))
- ],
- '+titling-string': [
- (r'[^%s]+' % _dquote, Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '#pop')
- ],
- '+titling': [
- (r'\[', Comment.Multiline, '+comment'),
- (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
- (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
- Text, ('#pop', '+heading?')),
- (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
- (r'[|%s]' % _newline, Generic.Heading)
- ],
- '+main': [
- (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
- (r'[%s]' % _dquote, String.Double, '+text'),
- (r':', Text, '+phrase-definition'),
- (r'(?i)\bas\b', Text, '+use-option'),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-not-inline'), Punctuation)),
- (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
- (_start, _dquote, _newline), Text, '+heading?'),
- (r'(?i)[a(|%s]' % _newline, Text)
- ],
- '+phrase-definition': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive',
- 'default', 'statements'),
- i6t='+i6t-inline'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+use-option': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-use-option'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+comment': [
- (r'[^\[\]]+', Comment.Multiline),
- (r'\[', Comment.Multiline, '#push'),
- (r'\]', Comment.Multiline, '#pop')
- ],
- '+text': [
- (r'[^\[%s]+' % _dquote, String.Double),
- (r'\[.*?\]', String.Interpol),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- '+heading?': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
- (r'[%s]{1,3}' % _dash, Text),
- (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
- Generic.Heading, '#pop'),
- default('#pop')
- ],
- '+documentation-heading': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(?i)documentation\s+', Text, '+documentation-heading2'),
- default('#pop')
- ],
- '+documentation-heading2': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s' % _dash, Text, '+documentation'),
- default('#pop:2')
- ],
- '+documentation': [
- (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
- (_start, _newline), Generic.Heading),
- (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
- Generic.Subheading),
- (r'((%s)\t.*?[%s])+' % (_start, _newline),
- using(this, state='+main')),
- (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
- (r'\[', Comment.Multiline, '+comment'),
- ],
- '+i6t-not-inline': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p')
- ],
- '+i6t-use-option': [
- include('+i6t-not-inline'),
- (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
- ],
- '+i6t-inline': [
- (r'(\{)(\S[^}]*)?(\})',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+i6t': [
- (r'(\{[%s])(![^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Comment.Single, Punctuation)),
- (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation), '+lines'),
- (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation)),
- (r'(\(\+)(.*?)(\+\)|\Z)',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+p': [
- (r'[^@]+', Comment.Preproc),
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc, '#pop'),
- (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading),
- (r'@', Comment.Preproc)
- ],
- '+lines': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p'),
- (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
- (r'![^%s]*' % _newline, Comment.Single),
- (r'(\{)([%s]endlines)(\})' % _dash,
- bygroups(Punctuation, Keyword, Punctuation), '#pop'),
- (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
- ]
- }
- # Inform 7 can include snippets of Inform 6 template language,
- # so all of Inform6Lexer's states are copied here, with
- # modifications to account for template syntax. Inform7Lexer's
- # own states begin with '+' to avoid name conflicts. Some of
- # Inform6Lexer's states begin with '_': these are not modified.
- # They deal with template syntax either by including modified
- # states, or by matching r'' then pushing to modified states.
- for token in Inform6Lexer.tokens:
- if token == 'root':
- continue
- tokens[level][token] = list(Inform6Lexer.tokens[token])
- if not token.startswith('_'):
- tokens[level][token][:0] = [include('+i6t'), include(level)]
-
- def __init__(self, **options):
- level = options.get('i6t', '+i6t-not-inline')
- if level not in self._all_tokens:
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
- RegexLexer.__init__(self, **options)
-
-
-class Inform6TemplateLexer(Inform7Lexer):
- """
- For `Inform 6 template
- <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6 template'
- aliases = ['i6t']
- filenames = ['*.i6t']
-
- def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
- return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
-
-
-class Tads3Lexer(RegexLexer):
- """
- For `TADS 3 <http://www.tads.org/>`_ source code.
- """
-
- name = 'TADS 3'
- aliases = ['tads3']
- filenames = ['*.t']
-
- flags = re.DOTALL | re.MULTILINE
-
- _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
- _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
- _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
- r'[0-3]?[0-7]{1,2}))')
- _name = r'(?:[_a-zA-Z]\w*)'
- _no_quote = r'(?=\s|\\?>)'
- _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
- r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
- _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
- _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
-
- def _make_string_state(triple, double, verbatim=None, _escape=_escape):
- if verbatim:
- verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
- re.escape(c.upper()))
- for c in verbatim])
- char = r'"' if double else r"'"
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
- tag_state_name = '%sqt' % prefix
- state = []
- if triple:
- state += [
- (r'%s{3,}' % char, token, '#pop'),
- (r'\\%s+' % char, String.Escape),
- (char, token)
- ]
- else:
- state.append((char, token, '#pop'))
- state += [
- include('s/verbatim'),
- (r'[^\\<&{}%s]+' % char, token)
- ]
- if verbatim:
- # This regex can't use `(?i)` because escape sequences are
- # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
- state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
- (_escape, verbatim),
- Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
- else:
- state += [
- (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
- (char, char, escaped_quotes, _escape), Comment.Multiline),
- (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/listing' % prefix, tag_state_name)),
- (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
- (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
- (char, char, escaped_quotes, _escape), Name.Tag,
- tag_state_name),
- include('s/entity')
- ]
- state += [
- include('s/escape'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'[\\&{}<]', token)
- ]
- return state
-
- def _make_tag_state(triple, double, _escape=_escape):
- char = r'"' if double else r"'"
- quantifier = r'{3,}' if triple else r''
- state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
- token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- return [
- (r'%s%s' % (char, quantifier), token, '#pop:2'),
- (r'(\s|\\\n)+', Text),
- (r'(=)(\\?")', bygroups(Punctuation, String.Double),
- 'dqs/%s' % state_name),
- (r"(=)(\\?')", bygroups(Punctuation, String.Single),
- 'sqs/%s' % state_name),
- (r'=', Punctuation, 'uqs/%s' % state_name),
- (r'\\?>', Name.Tag, '#pop'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
- (char, char, escaped_quotes, _escape), Name.Attribute),
- include('s/escape'),
- include('s/verbatim'),
- include('s/entity'),
- (r'[\\{}&]', Name.Attribute)
- ]
-
- def _make_attribute_value_state(terminator, host_triple, host_double,
- _escape=_escape):
- token = (String.Double if terminator == r'"' else
- String.Single if terminator == r"'" else String.Other)
- host_char = r'"' if host_double else r"'"
- host_quantifier = r'{3,}' if host_triple else r''
- host_token = String.Double if host_double else String.Single
- escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
- if host_triple else r'')
- return [
- (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
- (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
- token, '#pop'),
- include('s/verbatim'),
- include('s/entity'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (host_char, host_char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
- token),
- include('s/escape'),
- (r'["\'\s&{<}\\]', token)
- ]
-
- tokens = {
- 'root': [
+class Inform7Lexer(RegexLexer):
+ """
+ For `Inform 7 <http://inform7.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 7'
+ aliases = ['inform7', 'i7']
+ filenames = ['*.ni', '*.i7x']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _dash = Inform6Lexer._dash
+ _dquote = Inform6Lexer._dquote
+ _newline = Inform6Lexer._newline
+ _start = r'\A|(?<=[%s])' % _newline
+
+ # There are three variants of Inform 7, differing in how to
+ # interpret at signs and braces in I6T. In top-level inclusions, at
+ # signs in the first column are inweb syntax. In phrase definitions
+ # and use options, tokens in braces are treated as I7. Use options
+ # also interpret "{N}".
+ tokens = {}
+ token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
+
+ for level in token_variants:
+ tokens[level] = {
+ '+i6-root': list(Inform6Lexer.tokens['root']),
+ '+i6t-root': [ # For Inform6TemplateLexer
+ (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
+ ('directive', '+p'))
+ ],
+ 'root': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]' % _dquote, Generic.Heading,
+ ('+main', '+titling', '+titling-string')),
+ default(('+main', '+heading?'))
+ ],
+ '+titling-string': [
+ (r'[^%s]+' % _dquote, Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '#pop')
+ ],
+ '+titling': [
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
+ (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
+ Text, ('#pop', '+heading?')),
+ (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
+ (r'[|%s]' % _newline, Generic.Heading)
+ ],
+ '+main': [
+ (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
+ (r'[%s]' % _dquote, String.Double, '+text'),
+ (r':', Text, '+phrase-definition'),
+ (r'(?i)\bas\b', Text, '+use-option'),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-not-inline'), Punctuation)),
+ (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
+ (_start, _dquote, _newline), Text, '+heading?'),
+ (r'(?i)[a(|%s]' % _newline, Text)
+ ],
+ '+phrase-definition': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive',
+ 'default', 'statements'),
+ i6t='+i6t-inline'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+use-option': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-use-option'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+comment': [
+ (r'[^\[\]]+', Comment.Multiline),
+ (r'\[', Comment.Multiline, '#push'),
+ (r'\]', Comment.Multiline, '#pop')
+ ],
+ '+text': [
+ (r'[^\[%s]+' % _dquote, String.Double),
+ (r'\[.*?\]', String.Interpol),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ '+heading?': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
+ (r'[%s]{1,3}' % _dash, Text),
+ (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
+ Generic.Heading, '#pop'),
+ default('#pop')
+ ],
+ '+documentation-heading': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(?i)documentation\s+', Text, '+documentation-heading2'),
+ default('#pop')
+ ],
+ '+documentation-heading2': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s' % _dash, Text, '+documentation'),
+ default('#pop:2')
+ ],
+ '+documentation': [
+ (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
+ (_start, _newline), Generic.Heading),
+ (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
+ Generic.Subheading),
+ (r'((%s)\t.*?[%s])+' % (_start, _newline),
+ using(this, state='+main')),
+ (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ ],
+ '+i6t-not-inline': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p')
+ ],
+ '+i6t-use-option': [
+ include('+i6t-not-inline'),
+ (r'(\{)(N)(\})', bygroups(Punctuation, Text, Punctuation))
+ ],
+ '+i6t-inline': [
+ (r'(\{)(\S[^}]*)?(\})',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+i6t': [
+ (r'(\{[%s])(![^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Comment.Single, Punctuation)),
+ (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation), '+lines'),
+ (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation)),
+ (r'(\(\+)(.*?)(\+\)|\Z)',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+p': [
+ (r'[^@]+', Comment.Preproc),
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc, '#pop'),
+ (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading),
+ (r'@', Comment.Preproc)
+ ],
+ '+lines': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p'),
+ (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
+ (r'![^%s]*' % _newline, Comment.Single),
+ (r'(\{)([%s]endlines)(\})' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation), '#pop'),
+ (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
+ ]
+ }
+ # Inform 7 can include snippets of Inform 6 template language,
+ # so all of Inform6Lexer's states are copied here, with
+ # modifications to account for template syntax. Inform7Lexer's
+ # own states begin with '+' to avoid name conflicts. Some of
+ # Inform6Lexer's states begin with '_': these are not modified.
+ # They deal with template syntax either by including modified
+ # states, or by matching r'' then pushing to modified states.
+ for token in Inform6Lexer.tokens:
+ if token == 'root':
+ continue
+ tokens[level][token] = list(Inform6Lexer.tokens[token])
+ if not token.startswith('_'):
+ tokens[level][token][:0] = [include('+i6t'), include(level)]
+
+ def __init__(self, **options):
+ level = options.get('i6t', '+i6t-not-inline')
+ if level not in self._all_tokens:
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+ RegexLexer.__init__(self, **options)
+
+
+class Inform6TemplateLexer(Inform7Lexer):
+ """
+ For `Inform 6 template
+ <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6 template'
+ aliases = ['i6t']
+ filenames = ['*.i6t']
+
+ def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
+ return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
+
+
+class Tads3Lexer(RegexLexer):
+ """
+ For `TADS 3 <http://www.tads.org/>`_ source code.
+ """
+
+ name = 'TADS 3'
+ aliases = ['tads3']
+ filenames = ['*.t']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _comment_single = r'(?://(?:[^\\\n]|\\+[\w\W])*$)'
+ _comment_multiline = r'(?:/\*(?:[^*]|\*(?!/))*\*/)'
+ _escape = (r'(?:\\(?:[\n\\<>"\'^v bnrt]|u[\da-fA-F]{,4}|x[\da-fA-F]{,2}|'
+ r'[0-3]?[0-7]{1,2}))')
+ _name = r'(?:[_a-zA-Z]\w*)'
+ _no_quote = r'(?=\s|\\?>)'
+ _operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
+ r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
+ _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
+ _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
+
+ def _make_string_state(triple, double, verbatim=None, _escape=_escape):
+ if verbatim:
+ verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
+ re.escape(c.upper()))
+ for c in verbatim])
+ char = r'"' if double else r"'"
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
+ tag_state_name = '%sqt' % prefix
+ state = []
+ if triple:
+ state += [
+ (r'%s{3,}' % char, token, '#pop'),
+ (r'\\%s+' % char, String.Escape),
+ (char, token)
+ ]
+ else:
+ state.append((char, token, '#pop'))
+ state += [
+ include('s/verbatim'),
+ (r'[^\\<&{}%s]+' % char, token)
+ ]
+ if verbatim:
+ # This regex can't use `(?i)` because escape sequences are
+ # case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
+ state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
+ (_escape, verbatim),
+ Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
+ else:
+ state += [
+ (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
+ (char, char, escaped_quotes, _escape), Comment.Multiline),
+ (r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/listing' % prefix, tag_state_name)),
+ (r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
+ ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
+ (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
+ (char, char, escaped_quotes, _escape), Name.Tag,
+ tag_state_name),
+ include('s/entity')
+ ]
+ state += [
+ include('s/escape'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'[\\&{}<]', token)
+ ]
+ return state
+
+ def _make_tag_state(triple, double, _escape=_escape):
+ char = r'"' if double else r"'"
+ quantifier = r'{3,}' if triple else r''
+ state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
+ token = String.Double if double else String.Single
+ escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ return [
+ (r'%s%s' % (char, quantifier), token, '#pop:2'),
+ (r'(\s|\\\n)+', Text),
+ (r'(=)(\\?")', bygroups(Punctuation, String.Double),
+ 'dqs/%s' % state_name),
+ (r"(=)(\\?')", bygroups(Punctuation, String.Single),
+ 'sqs/%s' % state_name),
+ (r'=', Punctuation, 'uqs/%s' % state_name),
+ (r'\\?>', Name.Tag, '#pop'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (char, char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
+ (char, char, escaped_quotes, _escape), Name.Attribute),
+ include('s/escape'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'[\\{}&]', Name.Attribute)
+ ]
+
+ def _make_attribute_value_state(terminator, host_triple, host_double,
+ _escape=_escape):
+ token = (String.Double if terminator == r'"' else
+ String.Single if terminator == r"'" else String.Other)
+ host_char = r'"' if host_double else r"'"
+ host_quantifier = r'{3,}' if host_triple else r''
+ host_token = String.Double if host_double else String.Single
+ escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
+ if host_triple else r'')
+ return [
+ (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
+ (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
+ token, '#pop'),
+ include('s/verbatim'),
+ include('s/entity'),
+ (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
+ (host_char, host_char, escaped_quotes, _escape), String.Interpol),
+ (r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
+ token),
+ include('s/escape'),
+ (r'["\'\s&{<}\\]', token)
+ ]
+
+ tokens = {
+ 'root': [
('\ufeff', Text),
- (r'\{', Punctuation, 'object-body'),
- (r';+', Punctuation),
- (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
- r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
- r'invokee|local|nil|new|operator|replaced|return|self|switch|'
- r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
- (r'(%s)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?/root', 'more/parameters', 'main/parameters')),
- include('whitespace'),
- (r'\++', Punctuation),
- (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
- (r'(?!\Z)', Text, 'main/root')
- ],
- 'main/root': [
- include('main/basic'),
- default(('#pop', 'object-body/no-braces', 'classes', 'class'))
- ],
- 'object-body/no-braces': [
- (r';', Punctuation, '#pop'),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('object-body')
- ],
- 'object-body': [
- (r';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r':', Punctuation, ('classes', 'class')),
- (r'(%s?)(%s*)(\()' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation),
- ('block?', 'more/parameters', 'main/parameters')),
- (r'(%s)(%s*)(\{)' % (_name, _ws),
- bygroups(Name.Function, using(this, state='whitespace'),
- Punctuation), 'block'),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation),
- ('object-body/no-braces', 'classes', 'class')),
- include('whitespace'),
- (r'->|%s' % _operator, Punctuation, 'main'),
- default('main/object-body')
- ],
- 'main/object-body': [
- include('main/basic'),
- (r'(%s)(%s*)(=?)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace'),
- Punctuation), ('#pop', 'more', 'main')),
- default('#pop:2')
- ],
- 'block?/root': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
+ (r'\{', Punctuation, 'object-body'),
+ (r';+', Punctuation),
+ (r'(?=(argcount|break|case|catch|continue|default|definingobj|'
+ r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
+ r'invokee|local|nil|new|operator|replaced|return|self|switch|'
+ r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
+ (r'(%s)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?/root', 'more/parameters', 'main/parameters')),
+ include('whitespace'),
+ (r'\++', Punctuation),
+ (r'[^\s!"%-(*->@-_a-z{-~]+', Error), # Averts an infinite loop
+ (r'(?!\Z)', Text, 'main/root')
+ ],
+ 'main/root': [
+ include('main/basic'),
+ default(('#pop', 'object-body/no-braces', 'classes', 'class'))
+ ],
+ 'object-body/no-braces': [
+ (r';', Punctuation, '#pop'),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('object-body')
+ ],
+ 'object-body': [
+ (r';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r':', Punctuation, ('classes', 'class')),
+ (r'(%s?)(%s*)(\()' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation),
+ ('block?', 'more/parameters', 'main/parameters')),
+ (r'(%s)(%s*)(\{)' % (_name, _ws),
+ bygroups(Name.Function, using(this, state='whitespace'),
+ Punctuation), 'block'),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation),
+ ('object-body/no-braces', 'classes', 'class')),
+ include('whitespace'),
+ (r'->|%s' % _operator, Punctuation, 'main'),
+ default('main/object-body')
+ ],
+ 'main/object-body': [
+ include('main/basic'),
+ (r'(%s)(%s*)(=?)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace'),
+ Punctuation), ('#pop', 'more', 'main')),
+ default('#pop:2')
+ ],
+ 'block?/root': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
(r'(?=[\[\'"<(:])', Text, # It might be a VerbRule macro.
- ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
- # It might be a macro like DefineAction.
- default(('#pop', 'object-body/no-braces'))
- ],
- 'block?': [
- (r'\{', Punctuation, ('#pop', 'block')),
- include('whitespace'),
- default('#pop')
- ],
- 'block/basic': [
- (r'[;:]+', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- (r'default\b', Keyword.Reserved),
- (r'(%s)(%s*)(:)' % (_name, _ws),
- bygroups(Name.Label, using(this, state='whitespace'),
- Punctuation)),
- include('whitespace')
- ],
- 'block': [
- include('block/basic'),
- (r'(?!\Z)', Text, ('more', 'main'))
- ],
- 'block/embed': [
- (r'>>', String.Interpol, '#pop'),
- include('block/basic'),
- (r'(?!\Z)', Text, ('more/embed', 'main'))
- ],
- 'main/basic': [
- include('whitespace'),
- (r'\(', Punctuation, ('#pop', 'more', 'main')),
- (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
- (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
- 'more/parameters', 'main/parameters')),
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
- (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
- Number.Float, '#pop'),
- (r'0[0-7]+', Number.Oct, '#pop'),
- (r'\d+', Number.Integer, '#pop'),
- (r'"""', String.Double, ('#pop', 'tdqs')),
- (r"'''", String.Single, ('#pop', 'tsqs')),
- (r'"', String.Double, ('#pop', 'dqs')),
- (r"'", String.Single, ('#pop', 'sqs')),
- (r'R"""', String.Regex, ('#pop', 'tdqr')),
- (r"R'''", String.Regex, ('#pop', 'tsqr')),
- (r'R"', String.Regex, ('#pop', 'dqr')),
- (r"R'", String.Regex, ('#pop', 'sqr')),
- # Two-token keywords
- (r'(extern)(%s+)(object\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved)),
- (r'(function|method)(%s*)(\()' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Punctuation),
- ('#pop', 'block?', 'more/parameters', 'main/parameters')),
- (r'(modify)(%s+)(grammar\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved),
- ('#pop', 'object-body/no-braces', ':', 'grammar')),
- (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'))),
- (r'(object)(%s+)(template\b)' % _ws,
- bygroups(Keyword.Reserved, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'template')),
- (r'(string)(%s+)(template\b)' % _ws,
- bygroups(Keyword, using(this, state='whitespace'),
- Keyword.Reserved), ('#pop', 'function-name')),
- # Keywords
- (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
- Name.Builtin, '#pop'),
- (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
- (r'(case|extern|if|intrinsic|return|static|while)\b',
- Keyword.Reserved),
- (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
- (r'class\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'class')),
- (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
- (r'(dictionary|property)\b', Keyword.Reserved,
- ('#pop', 'constants')),
- (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
- (r'export\b', Keyword.Reserved, ('#pop', 'main')),
- (r'(for|foreach)\b', Keyword.Reserved,
- ('#pop', 'more/inner', 'main/inner')),
- (r'(function|method)\b', Keyword.Reserved,
- ('#pop', 'block?', 'function-name')),
- (r'grammar\b', Keyword.Reserved,
- ('#pop', 'object-body/no-braces', 'grammar')),
- (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
- (r'local\b', Keyword.Reserved,
- ('#pop', 'more/local', 'main/local')),
- (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
- '#pop'),
- (r'new\b', Keyword.Reserved, ('#pop', 'class')),
- (r'(nil|true)\b', Keyword.Constant, '#pop'),
- (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
- (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
- (r'propertyset\b', Keyword.Reserved,
- ('#pop', 'propertyset', 'main')),
- (r'self\b', Name.Builtin.Pseudo, '#pop'),
- (r'template\b', Keyword.Reserved, ('#pop', 'template')),
- # Operators
- (r'(__objref|defined)(%s*)(\()' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator), ('#pop', 'more/__objref', 'main')),
- (r'delegated\b', Operator.Word),
- # Compiler-defined macros and built-in properties
- (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
- r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
- r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
- r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
- r'miscVocab|sourceTextGroup|sourceTextGroupName|'
- r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
- ],
- 'main': [
- include('main/basic'),
- (_name, Name, '#pop'),
- default('#pop')
- ],
- 'more/basic': [
- (r'\(', Punctuation, ('more/list', 'main')),
- (r'\[', Punctuation, ('more', 'main')),
- (r'\.{3}', Punctuation),
- (r'->|\.\.', Punctuation, 'main'),
- (r'(?=;)|[:)\]]', Punctuation, '#pop'),
- include('whitespace'),
- (_operator, Operator, 'main'),
- (r'\?', Operator, ('main', 'more/conditional', 'main')),
- (r'(is|not)(%s+)(in\b)' % _ws,
- bygroups(Operator.Word, using(this, state='whitespace'),
- Operator.Word)),
- (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
- ],
- 'more': [
- include('more/basic'),
- default('#pop')
- ],
- # Then expression (conditional operator)
- 'more/conditional': [
- (r':(?!:)', Operator, '#pop'),
- include('more')
- ],
- # Embedded expressions
- 'more/embed': [
- (r'>>', String.Interpol, '#pop:2'),
- include('more')
- ],
- # For/foreach loop initializer or short-form anonymous function
- 'main/inner': [
- (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
- (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
- include('main')
- ],
- 'more/inner': [
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, 'main/inner'),
- (r'(in|step)\b', Keyword, 'main/inner'),
- include('more')
- ],
- # Local
- 'main/local': [
- (_name, Name.Variable, '#pop'),
- include('whitespace')
- ],
- 'more/local': [
- (r',', Punctuation, 'main/local'),
- include('more')
- ],
- # List
- 'more/list': [
- (r'[,:]', Punctuation, 'main'),
- include('more')
- ],
- # Parameter list
- 'main/parameters': [
- (r'(%s)(%s*)(?=:)' % (_name, _ws),
- bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
- (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
- bygroups(Name.Class, using(this, state='whitespace'),
- Name.Variable), '#pop'),
- (r'\[+', Punctuation),
- include('main/basic'),
- (_name, Name.Variable, '#pop'),
- default('#pop')
- ],
- 'more/parameters': [
- (r'(:)(%s*(?=[?=,:)]))' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'))),
- (r'[?\]]+', Punctuation),
- (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
- (r',', Punctuation, 'main/parameters'),
- (r'=', Punctuation, ('more/parameter', 'main')),
- include('more')
- ],
- 'more/parameter': [
- (r'(?=[,)])', Text, '#pop'),
- include('more')
- ],
- 'multimethod?': [
- (r'multimethod\b', Keyword, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
-
- # Statements and expressions
- 'more/__objref': [
- (r',', Punctuation, 'mode'),
- (r'\)', Operator, '#pop'),
- include('more')
- ],
- 'mode': [
- (r'(error|warn)\b', Keyword, '#pop'),
- include('whitespace')
- ],
- 'catch': [
- (r'\(+', Punctuation),
- (_name, Name.Exception, ('#pop', 'variables')),
- include('whitespace')
- ],
- 'enum': [
- include('whitespace'),
- (r'token\b', Keyword, ('#pop', 'constants')),
- default(('#pop', 'constants'))
- ],
- 'grammar': [
- (r'\)+', Punctuation),
- (r'\(', Punctuation, 'grammar-tag'),
- (r':', Punctuation, 'grammar-rules'),
- (_name, Name.Class),
- include('whitespace')
- ],
- 'grammar-tag': [
- include('whitespace'),
- (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
- r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
- r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
- r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
- r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
- r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
- r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
- ],
- 'grammar-rules': [
- include('string'),
- include('whitespace'),
- (r'(\[)(%s*)(badness)' % _ws,
- bygroups(Punctuation, using(this, state='whitespace'), Keyword),
- 'main'),
- (r'->|%s|[()]' % _operator, Punctuation),
- (_name, Name.Constant),
- default('#pop:2')
- ],
- ':': [
- (r':', Punctuation, '#pop')
- ],
- 'function-name': [
- (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
- (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
- (_name, Name.Function, '#pop'),
- include('whitespace')
- ],
- 'inherited': [
- (r'<', Punctuation, ('#pop', 'classes', 'class')),
- include('whitespace'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'operator': [
- (r'negate\b', Operator.Word, '#pop'),
- include('whitespace'),
- (_operator, Operator),
- default('#pop')
- ],
- 'propertyset': [
- (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
- (r'\{', Punctuation, ('#pop', 'object-body')),
- include('whitespace')
- ],
- 'template': [
- (r'(?=;)', Text, '#pop'),
- include('string'),
- (r'inherited\b', Keyword.Reserved),
- include('whitespace'),
- (r'->|\?|%s' % _operator, Punctuation),
- (_name, Name.Variable)
- ],
-
- # Identifiers
- 'class': [
- (r'\*|\.{3}', Punctuation, '#pop'),
- (r'object\b', Keyword.Reserved, '#pop'),
- (r'transient\b', Keyword.Reserved),
- (_name, Name.Class, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'classes': [
- (r'[:,]', Punctuation, 'class'),
- include('whitespace'),
- (r'>', Punctuation, '#pop'),
- default('#pop')
- ],
- 'constants': [
- (r',+', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'property\b', Keyword.Reserved),
- (_name, Name.Constant),
- include('whitespace')
- ],
- 'label': [
- (_name, Name.Label, '#pop'),
- include('whitespace'),
- default('#pop')
- ],
- 'variables': [
- (r',+', Punctuation),
- (r'\)', Punctuation, '#pop'),
- include('whitespace'),
- (_name, Name.Variable)
- ],
-
- # Whitespace and comments
- 'whitespace': [
- (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
- Comment.Preproc),
- (_comment_single, Comment.Single),
- (_comment_multiline, Comment.Multiline),
- (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
- ],
-
- # Strings
- 'string': [
- (r'"""', String.Double, 'tdqs'),
- (r"'''", String.Single, 'tsqs'),
- (r'"', String.Double, 'dqs'),
- (r"'", String.Single, 'sqs')
- ],
- 's/escape': [
- (r'\{\{|\}\}|%s' % _escape, String.Escape)
- ],
- 's/verbatim': [
- (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
- r'first\s+time|one\s+of|only|or|otherwise|'
- r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
- r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
- (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
- r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
- String.Interpol, ('block/embed', 'more/embed', 'main'))
- ],
- 's/entity': [
- (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
- ],
- 'tdqs': _make_string_state(True, True),
- 'tsqs': _make_string_state(True, False),
- 'dqs': _make_string_state(False, True),
- 'sqs': _make_string_state(False, False),
- 'tdqs/listing': _make_string_state(True, True, 'listing'),
- 'tsqs/listing': _make_string_state(True, False, 'listing'),
- 'dqs/listing': _make_string_state(False, True, 'listing'),
- 'sqs/listing': _make_string_state(False, False, 'listing'),
- 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
- 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
- 'dqs/xmp': _make_string_state(False, True, 'xmp'),
- 'sqs/xmp': _make_string_state(False, False, 'xmp'),
-
- # Tags
- 'tdqt': _make_tag_state(True, True),
- 'tsqt': _make_tag_state(True, False),
- 'dqt': _make_tag_state(False, True),
- 'sqt': _make_tag_state(False, False),
- 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
- 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
- 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
- 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
- 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
- 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
- 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
- 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
- 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
- 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
- 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
- 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
-
- # Regular expressions
- 'tdqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"*', String.Regex),
- (r'"{3,}', String.Regex, '#pop'),
- (r'"', String.Regex)
- ],
- 'tsqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'*", String.Regex),
- (r"'{3,}", String.Regex, '#pop'),
- (r"'", String.Regex)
- ],
- 'dqr': [
- (r'[^\\"]+', String.Regex),
- (r'\\"?', String.Regex),
- (r'"', String.Regex, '#pop')
- ],
- 'sqr': [
- (r"[^\\']+", String.Regex),
- (r"\\'?", String.Regex),
- (r"'", String.Regex, '#pop')
- ]
- }
-
- def get_tokens_unprocessed(self, text, **kwargs):
- pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
- if_false_level = 0
- for index, token, value in (
- RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
- if if_false_level == 0: # Not in a false #if
- if (token is Comment.Preproc and
- re.match(r'%sif%s+(0|nil)%s*$\n?' %
- (pp, self._ws_pp, self._ws_pp), value)):
- if_false_level = 1
- else: # In a false #if
- if token is Comment.Preproc:
- if (if_false_level == 1 and
- re.match(r'%sel(if|se)\b' % pp, value)):
- if_false_level = 0
- elif re.match(r'%sif' % pp, value):
- if_false_level += 1
- elif re.match(r'%sendif\b' % pp, value):
- if_false_level -= 1
- else:
- token = Comment
- yield index, token, value
+ ('#pop', 'object-body/no-braces', 'grammar', 'grammar-rules')),
+ # It might be a macro like DefineAction.
+ default(('#pop', 'object-body/no-braces'))
+ ],
+ 'block?': [
+ (r'\{', Punctuation, ('#pop', 'block')),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'block/basic': [
+ (r'[;:]+', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ (r'default\b', Keyword.Reserved),
+ (r'(%s)(%s*)(:)' % (_name, _ws),
+ bygroups(Name.Label, using(this, state='whitespace'),
+ Punctuation)),
+ include('whitespace')
+ ],
+ 'block': [
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more', 'main'))
+ ],
+ 'block/embed': [
+ (r'>>', String.Interpol, '#pop'),
+ include('block/basic'),
+ (r'(?!\Z)', Text, ('more/embed', 'main'))
+ ],
+ 'main/basic': [
+ include('whitespace'),
+ (r'\(', Punctuation, ('#pop', 'more', 'main')),
+ (r'\[', Punctuation, ('#pop', 'more/list', 'main')),
+ (r'\{', Punctuation, ('#pop', 'more/inner', 'main/inner',
+ 'more/parameters', 'main/parameters')),
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'(?i)0x[\da-f]+', Number.Hex, '#pop'),
+ (r'(\d+\.(?!\.)\d*|\.\d+)([eE][-+]?\d+)?|\d+[eE][-+]?\d+',
+ Number.Float, '#pop'),
+ (r'0[0-7]+', Number.Oct, '#pop'),
+ (r'\d+', Number.Integer, '#pop'),
+ (r'"""', String.Double, ('#pop', 'tdqs')),
+ (r"'''", String.Single, ('#pop', 'tsqs')),
+ (r'"', String.Double, ('#pop', 'dqs')),
+ (r"'", String.Single, ('#pop', 'sqs')),
+ (r'R"""', String.Regex, ('#pop', 'tdqr')),
+ (r"R'''", String.Regex, ('#pop', 'tsqr')),
+ (r'R"', String.Regex, ('#pop', 'dqr')),
+ (r"R'", String.Regex, ('#pop', 'sqr')),
+ # Two-token keywords
+ (r'(extern)(%s+)(object\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved)),
+ (r'(function|method)(%s*)(\()' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Punctuation),
+ ('#pop', 'block?', 'more/parameters', 'main/parameters')),
+ (r'(modify)(%s+)(grammar\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved),
+ ('#pop', 'object-body/no-braces', ':', 'grammar')),
+ (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'))),
+ (r'(object)(%s+)(template\b)' % _ws,
+ bygroups(Keyword.Reserved, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'template')),
+ (r'(string)(%s+)(template\b)' % _ws,
+ bygroups(Keyword, using(this, state='whitespace'),
+ Keyword.Reserved), ('#pop', 'function-name')),
+ # Keywords
+ (r'(argcount|definingobj|invokee|replaced|targetobj|targetprop)\b',
+ Name.Builtin, '#pop'),
+ (r'(break|continue|goto)\b', Keyword.Reserved, ('#pop', 'label')),
+ (r'(case|extern|if|intrinsic|return|static|while)\b',
+ Keyword.Reserved),
+ (r'catch\b', Keyword.Reserved, ('#pop', 'catch')),
+ (r'class\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'class')),
+ (r'(default|do|else|finally|try)\b', Keyword.Reserved, '#pop'),
+ (r'(dictionary|property)\b', Keyword.Reserved,
+ ('#pop', 'constants')),
+ (r'enum\b', Keyword.Reserved, ('#pop', 'enum')),
+ (r'export\b', Keyword.Reserved, ('#pop', 'main')),
+ (r'(for|foreach)\b', Keyword.Reserved,
+ ('#pop', 'more/inner', 'main/inner')),
+ (r'(function|method)\b', Keyword.Reserved,
+ ('#pop', 'block?', 'function-name')),
+ (r'grammar\b', Keyword.Reserved,
+ ('#pop', 'object-body/no-braces', 'grammar')),
+ (r'inherited\b', Keyword.Reserved, ('#pop', 'inherited')),
+ (r'local\b', Keyword.Reserved,
+ ('#pop', 'more/local', 'main/local')),
+ (r'(modify|replace|switch|throw|transient)\b', Keyword.Reserved,
+ '#pop'),
+ (r'new\b', Keyword.Reserved, ('#pop', 'class')),
+ (r'(nil|true)\b', Keyword.Constant, '#pop'),
+ (r'object\b', Keyword.Reserved, ('#pop', 'object-body/no-braces')),
+ (r'operator\b', Keyword.Reserved, ('#pop', 'operator')),
+ (r'propertyset\b', Keyword.Reserved,
+ ('#pop', 'propertyset', 'main')),
+ (r'self\b', Name.Builtin.Pseudo, '#pop'),
+ (r'template\b', Keyword.Reserved, ('#pop', 'template')),
+ # Operators
+ (r'(__objref|defined)(%s*)(\()' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator), ('#pop', 'more/__objref', 'main')),
+ (r'delegated\b', Operator.Word),
+ # Compiler-defined macros and built-in properties
+ (r'(__DATE__|__DEBUG|__LINE__|__FILE__|'
+ r'__TADS_MACRO_FORMAT_VERSION|__TADS_SYS_\w*|__TADS_SYSTEM_NAME|'
+ r'__TADS_VERSION_MAJOR|__TADS_VERSION_MINOR|__TADS3|__TIME__|'
+ r'construct|finalize|grammarInfo|grammarTag|lexicalParent|'
+ r'miscVocab|sourceTextGroup|sourceTextGroupName|'
+ r'sourceTextGroupOrder|sourceTextOrder)\b', Name.Builtin, '#pop')
+ ],
+ 'main': [
+ include('main/basic'),
+ (_name, Name, '#pop'),
+ default('#pop')
+ ],
+ 'more/basic': [
+ (r'\(', Punctuation, ('more/list', 'main')),
+ (r'\[', Punctuation, ('more', 'main')),
+ (r'\.{3}', Punctuation),
+ (r'->|\.\.', Punctuation, 'main'),
+ (r'(?=;)|[:)\]]', Punctuation, '#pop'),
+ include('whitespace'),
+ (_operator, Operator, 'main'),
+ (r'\?', Operator, ('main', 'more/conditional', 'main')),
+ (r'(is|not)(%s+)(in\b)' % _ws,
+ bygroups(Operator.Word, using(this, state='whitespace'),
+ Operator.Word)),
+ (r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
+ ],
+ 'more': [
+ include('more/basic'),
+ default('#pop')
+ ],
+ # Then expression (conditional operator)
+ 'more/conditional': [
+ (r':(?!:)', Operator, '#pop'),
+ include('more')
+ ],
+ # Embedded expressions
+ 'more/embed': [
+ (r'>>', String.Interpol, '#pop:2'),
+ include('more')
+ ],
+ # For/foreach loop initializer or short-form anonymous function
+ 'main/inner': [
+ (r'\(', Punctuation, ('#pop', 'more/inner', 'main/inner')),
+ (r'local\b', Keyword.Reserved, ('#pop', 'main/local')),
+ include('main')
+ ],
+ 'more/inner': [
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, 'main/inner'),
+ (r'(in|step)\b', Keyword, 'main/inner'),
+ include('more')
+ ],
+ # Local
+ 'main/local': [
+ (_name, Name.Variable, '#pop'),
+ include('whitespace')
+ ],
+ 'more/local': [
+ (r',', Punctuation, 'main/local'),
+ include('more')
+ ],
+ # List
+ 'more/list': [
+ (r'[,:]', Punctuation, 'main'),
+ include('more')
+ ],
+ # Parameter list
+ 'main/parameters': [
+ (r'(%s)(%s*)(?=:)' % (_name, _ws),
+ bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
+ (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
+ bygroups(Name.Class, using(this, state='whitespace'),
+ Name.Variable), '#pop'),
+ (r'\[+', Punctuation),
+ include('main/basic'),
+ (_name, Name.Variable, '#pop'),
+ default('#pop')
+ ],
+ 'more/parameters': [
+ (r'(:)(%s*(?=[?=,:)]))' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'))),
+ (r'[?\]]+', Punctuation),
+ (r'[:)]', Punctuation, ('#pop', 'multimethod?')),
+ (r',', Punctuation, 'main/parameters'),
+ (r'=', Punctuation, ('more/parameter', 'main')),
+ include('more')
+ ],
+ 'more/parameter': [
+ (r'(?=[,)])', Text, '#pop'),
+ include('more')
+ ],
+ 'multimethod?': [
+ (r'multimethod\b', Keyword, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+
+ # Statements and expressions
+ 'more/__objref': [
+ (r',', Punctuation, 'mode'),
+ (r'\)', Operator, '#pop'),
+ include('more')
+ ],
+ 'mode': [
+ (r'(error|warn)\b', Keyword, '#pop'),
+ include('whitespace')
+ ],
+ 'catch': [
+ (r'\(+', Punctuation),
+ (_name, Name.Exception, ('#pop', 'variables')),
+ include('whitespace')
+ ],
+ 'enum': [
+ include('whitespace'),
+ (r'token\b', Keyword, ('#pop', 'constants')),
+ default(('#pop', 'constants'))
+ ],
+ 'grammar': [
+ (r'\)+', Punctuation),
+ (r'\(', Punctuation, 'grammar-tag'),
+ (r':', Punctuation, 'grammar-rules'),
+ (_name, Name.Class),
+ include('whitespace')
+ ],
+ 'grammar-tag': [
+ include('whitespace'),
+ (r'"""([^\\"<]|""?(?!")|\\"+|\\.|<(?!<))+("{3,}|<<)|'
+ r'R"""([^\\"]|""?(?!")|\\"+|\\.)+"{3,}|'
+ r"'''([^\\'<]|''?(?!')|\\'+|\\.|<(?!<))+('{3,}|<<)|"
+ r"R'''([^\\']|''?(?!')|\\'+|\\.)+'{3,}|"
+ r'"([^\\"<]|\\.|<(?!<))+("|<<)|R"([^\\"]|\\.)+"|'
+ r"'([^\\'<]|\\.|<(?!<))+('|<<)|R'([^\\']|\\.)+'|"
+ r"([^)\s\\/]|/(?![/*]))+|\)", String.Other, '#pop')
+ ],
+ 'grammar-rules': [
+ include('string'),
+ include('whitespace'),
+ (r'(\[)(%s*)(badness)' % _ws,
+ bygroups(Punctuation, using(this, state='whitespace'), Keyword),
+ 'main'),
+ (r'->|%s|[()]' % _operator, Punctuation),
+ (_name, Name.Constant),
+ default('#pop:2')
+ ],
+ ':': [
+ (r':', Punctuation, '#pop')
+ ],
+ 'function-name': [
+ (r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
+ (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
+ (_name, Name.Function, '#pop'),
+ include('whitespace')
+ ],
+ 'inherited': [
+ (r'<', Punctuation, ('#pop', 'classes', 'class')),
+ include('whitespace'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'operator': [
+ (r'negate\b', Operator.Word, '#pop'),
+ include('whitespace'),
+ (_operator, Operator),
+ default('#pop')
+ ],
+ 'propertyset': [
+ (r'\(', Punctuation, ('more/parameters', 'main/parameters')),
+ (r'\{', Punctuation, ('#pop', 'object-body')),
+ include('whitespace')
+ ],
+ 'template': [
+ (r'(?=;)', Text, '#pop'),
+ include('string'),
+ (r'inherited\b', Keyword.Reserved),
+ include('whitespace'),
+ (r'->|\?|%s' % _operator, Punctuation),
+ (_name, Name.Variable)
+ ],
+
+ # Identifiers
+ 'class': [
+ (r'\*|\.{3}', Punctuation, '#pop'),
+ (r'object\b', Keyword.Reserved, '#pop'),
+ (r'transient\b', Keyword.Reserved),
+ (_name, Name.Class, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'classes': [
+ (r'[:,]', Punctuation, 'class'),
+ include('whitespace'),
+ (r'>', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'constants': [
+ (r',+', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'property\b', Keyword.Reserved),
+ (_name, Name.Constant),
+ include('whitespace')
+ ],
+ 'label': [
+ (_name, Name.Label, '#pop'),
+ include('whitespace'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r',+', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ include('whitespace'),
+ (_name, Name.Variable)
+ ],
+
+ # Whitespace and comments
+ 'whitespace': [
+ (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
+ Comment.Preproc),
+ (_comment_single, Comment.Single),
+ (_comment_multiline, Comment.Multiline),
+ (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
+ ],
+
+ # Strings
+ 'string': [
+ (r'"""', String.Double, 'tdqs'),
+ (r"'''", String.Single, 'tsqs'),
+ (r'"', String.Double, 'dqs'),
+ (r"'", String.Single, 'sqs')
+ ],
+ 's/escape': [
+ (r'\{\{|\}\}|%s' % _escape, String.Escape)
+ ],
+ 's/verbatim': [
+ (r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
+ r'first\s+time|one\s+of|only|or|otherwise|'
+ r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
+ r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
+ (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
+ r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
+ String.Interpol, ('block/embed', 'more/embed', 'main'))
+ ],
+ 's/entity': [
+ (r'(?i)&(#(x[\da-f]+|\d+)|[a-z][\da-z]*);?', Name.Entity)
+ ],
+ 'tdqs': _make_string_state(True, True),
+ 'tsqs': _make_string_state(True, False),
+ 'dqs': _make_string_state(False, True),
+ 'sqs': _make_string_state(False, False),
+ 'tdqs/listing': _make_string_state(True, True, 'listing'),
+ 'tsqs/listing': _make_string_state(True, False, 'listing'),
+ 'dqs/listing': _make_string_state(False, True, 'listing'),
+ 'sqs/listing': _make_string_state(False, False, 'listing'),
+ 'tdqs/xmp': _make_string_state(True, True, 'xmp'),
+ 'tsqs/xmp': _make_string_state(True, False, 'xmp'),
+ 'dqs/xmp': _make_string_state(False, True, 'xmp'),
+ 'sqs/xmp': _make_string_state(False, False, 'xmp'),
+
+ # Tags
+ 'tdqt': _make_tag_state(True, True),
+ 'tsqt': _make_tag_state(True, False),
+ 'dqt': _make_tag_state(False, True),
+ 'sqt': _make_tag_state(False, False),
+ 'dqs/tdqt': _make_attribute_value_state(r'"', True, True),
+ 'dqs/tsqt': _make_attribute_value_state(r'"', True, False),
+ 'dqs/dqt': _make_attribute_value_state(r'"', False, True),
+ 'dqs/sqt': _make_attribute_value_state(r'"', False, False),
+ 'sqs/tdqt': _make_attribute_value_state(r"'", True, True),
+ 'sqs/tsqt': _make_attribute_value_state(r"'", True, False),
+ 'sqs/dqt': _make_attribute_value_state(r"'", False, True),
+ 'sqs/sqt': _make_attribute_value_state(r"'", False, False),
+ 'uqs/tdqt': _make_attribute_value_state(_no_quote, True, True),
+ 'uqs/tsqt': _make_attribute_value_state(_no_quote, True, False),
+ 'uqs/dqt': _make_attribute_value_state(_no_quote, False, True),
+ 'uqs/sqt': _make_attribute_value_state(_no_quote, False, False),
+
+ # Regular expressions
+ 'tdqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"*', String.Regex),
+ (r'"{3,}', String.Regex, '#pop'),
+ (r'"', String.Regex)
+ ],
+ 'tsqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'*", String.Regex),
+ (r"'{3,}", String.Regex, '#pop'),
+ (r"'", String.Regex)
+ ],
+ 'dqr': [
+ (r'[^\\"]+', String.Regex),
+ (r'\\"?', String.Regex),
+ (r'"', String.Regex, '#pop')
+ ],
+ 'sqr': [
+ (r"[^\\']+", String.Regex),
+ (r"\\'?", String.Regex),
+ (r"'", String.Regex, '#pop')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text, **kwargs):
+ pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
+ if_false_level = 0
+ for index, token, value in (
+ RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
+ if if_false_level == 0: # Not in a false #if
+ if (token is Comment.Preproc and
+ re.match(r'%sif%s+(0|nil)%s*$\n?' %
+ (pp, self._ws_pp, self._ws_pp), value)):
+ if_false_level = 1
+ else: # In a false #if
+ if token is Comment.Preproc:
+ if (if_false_level == 1 and
+ re.match(r'%sel(if|se)\b' % pp, value)):
+ if_false_level = 0
+ elif re.match(r'%sif' % pp, value):
+ if_false_level += 1
+ elif re.match(r'%sendif\b' % pp, value):
+ if_false_level -= 1
+ else:
+ token = Comment
+ yield index, token, value
def analyse_text(text):
"""This is a rather generic descriptive language without strong
diff --git a/contrib/python/Pygments/py3/pygments/lexers/iolang.py b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
index c1fbe9084e..d47eec6ac7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/iolang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
@@ -1,62 +1,62 @@
-"""
- pygments.lexers.iolang
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Io language.
-
+"""
+ pygments.lexers.iolang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Io language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number
-
-__all__ = ['IoLexer']
-
-
-class IoLexer(RegexLexer):
- """
- For `Io <http://iolanguage.com/>`_ (a small, prototype-based
- programming language) source.
-
- .. versionadded:: 0.10
- """
- name = 'Io'
- filenames = ['*.io']
- aliases = ['io']
- mimetypes = ['text/x-iosrc']
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'#(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nestedcomment'),
- # DoubleQuotedString
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+
+__all__ = ['IoLexer']
+
+
+class IoLexer(RegexLexer):
+ """
+ For `Io <http://iolanguage.com/>`_ (a small, prototype-based
+ programming language) source.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Io'
+ filenames = ['*.io']
+ aliases = ['io']
+ mimetypes = ['text/x-iosrc']
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'#(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nestedcomment'),
+ # DoubleQuotedString
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # Operators
- (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
- Operator),
- # keywords
- (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
- Keyword),
- # constants
- (r'(nil|false|true)\b', Name.Constant),
- # names
- (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
- Name.Builtin),
+ # Operators
+ (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
+ Operator),
+ # keywords
+ (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
+ Keyword),
+ # constants
+ (r'(nil|false|true)\b', Name.Constant),
+ # names
+ (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
+ Name.Builtin),
(r'[a-zA-Z_]\w*', Name),
- # numbers
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'nestedcomment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ]
- }
+ # numbers
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'nestedcomment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/j.py b/contrib/python/Pygments/py3/pygments/lexers/j.py
index 8a3ddcbdd1..9220af2c46 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/j.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/j.py
@@ -1,145 +1,145 @@
-"""
- pygments.lexers.j
- ~~~~~~~~~~~~~~~~~
-
- Lexer for the J programming language.
-
+"""
+ pygments.lexers.j
+ ~~~~~~~~~~~~~~~~~
+
+ Lexer for the J programming language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
- String, Text
-
-__all__ = ['JLexer']
-
-
-class JLexer(RegexLexer):
- """
- For `J <http://jsoftware.com/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'J'
- aliases = ['j']
- filenames = ['*.ijs']
- mimetypes = ['text/x-j']
-
- validName = r'\b[a-zA-Z]\w*'
-
- tokens = {
- 'root': [
- # Shebang script
- (r'#!.*$', Comment.Preproc),
-
- # Comments
- (r'NB\..*', Comment.Single),
- (r'\n+\s*Note', Comment.Multiline, 'comment'),
- (r'\s*Note.*', Comment.Single),
-
- # Whitespace
- (r'\s+', Text),
-
- # Strings
- (r"'", String, 'singlequote'),
-
- # Definitions
- (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, \
+ String, Text
+
+__all__ = ['JLexer']
+
+
+class JLexer(RegexLexer):
+ """
+ For `J <http://jsoftware.com/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'J'
+ aliases = ['j']
+ filenames = ['*.ijs']
+ mimetypes = ['text/x-j']
+
+ validName = r'\b[a-zA-Z]\w*'
+
+ tokens = {
+ 'root': [
+ # Shebang script
+ (r'#!.*$', Comment.Preproc),
+
+ # Comments
+ (r'NB\..*', Comment.Single),
+ (r'\n+\s*Note', Comment.Multiline, 'comment'),
+ (r'\s*Note.*', Comment.Single),
+
+ # Whitespace
+ (r'\s+', Text),
+
+ # Strings
+ (r"'", String, 'singlequote'),
+
+ # Definitions
+ (r'0\s+:\s*0|noun\s+define\s*$', Name.Entity, 'nounDefinition'),
(r'(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b',
- Name.Function, 'explicitDefinition'),
-
- # Flow Control
+ Name.Function, 'explicitDefinition'),
+
+ # Flow Control
(words(('for_', 'goto_', 'label_'), suffix=validName+r'\.'), Name.Label),
- (words((
- 'assert', 'break', 'case', 'catch', 'catchd',
- 'catcht', 'continue', 'do', 'else', 'elseif',
- 'end', 'fcase', 'for', 'if', 'return',
- 'select', 'throw', 'try', 'while', 'whilst',
+ (words((
+ 'assert', 'break', 'case', 'catch', 'catchd',
+ 'catcht', 'continue', 'do', 'else', 'elseif',
+ 'end', 'fcase', 'for', 'if', 'return',
+ 'select', 'throw', 'try', 'while', 'whilst',
), suffix=r'\.'), Name.Label),
-
- # Variable Names
- (validName, Name.Variable),
-
- # Standard Library
- (words((
- 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
- 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
- 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
- 'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
- 'clear', 'cutLF', 'cutopen', 'datatype', 'def',
- 'dfh', 'drop', 'each', 'echo', 'empty',
- 'erase', 'every', 'evtloop', 'exit', 'expand',
- 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
- 'getenv', 'hfd', 'inv', 'inverse', 'iospath',
- 'isatty', 'isutf8', 'items', 'leaf', 'list',
+
+ # Variable Names
+ (validName, Name.Variable),
+
+ # Standard Library
+ (words((
+ 'ARGV', 'CR', 'CRLF', 'DEL', 'Debug',
+ 'EAV', 'EMPTY', 'FF', 'JVERSION', 'LF',
+ 'LF2', 'Note', 'TAB', 'alpha17', 'alpha27',
+ 'apply', 'bind', 'boxopen', 'boxxopen', 'bx',
+ 'clear', 'cutLF', 'cutopen', 'datatype', 'def',
+ 'dfh', 'drop', 'each', 'echo', 'empty',
+ 'erase', 'every', 'evtloop', 'exit', 'expand',
+ 'fetch', 'file2url', 'fixdotdot', 'fliprgb', 'getargs',
+ 'getenv', 'hfd', 'inv', 'inverse', 'iospath',
+ 'isatty', 'isutf8', 'items', 'leaf', 'list',
'nameclass', 'namelist', 'names', 'nc',
'nl', 'on', 'pick', 'rows',
- 'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
- 'sort', 'split', 'stderr', 'stdin', 'stdout',
- 'table', 'take', 'timespacex', 'timex', 'tmoutput',
- 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
- 'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
- 'uucp',
- )), Name.Function),
-
- # Copula
- (r'=[.:]', Operator),
-
- # Builtins
+ 'script', 'scriptd', 'sign', 'sminfo', 'smoutput',
+ 'sort', 'split', 'stderr', 'stdin', 'stdout',
+ 'table', 'take', 'timespacex', 'timex', 'tmoutput',
+ 'toCRLF', 'toHOST', 'toJ', 'tolower', 'toupper',
+ 'type', 'ucp', 'ucpcount', 'usleep', 'utf8',
+ 'uucp',
+ )), Name.Function),
+
+ # Copula
+ (r'=[.:]', Operator),
+
+ # Builtins
(r'[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/?]', Operator),
-
- # Short Keywords
- (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
- (r'[aDiLpqsStux]\:', Keyword.Reserved),
- (r'(_[0-9])\:', Keyword.Constant),
-
- # Parens
- (r'\(', Punctuation, 'parentheses'),
-
- # Numbers
- include('numbers'),
- ],
-
- 'comment': [
- (r'[^)]', Comment.Multiline),
- (r'^\)', Comment.Multiline, '#pop'),
- (r'[)]', Comment.Multiline),
- ],
-
- 'explicitDefinition': [
- (r'\b[nmuvxy]\b', Name.Decorator),
- include('root'),
- (r'[^)]', Name),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', Name),
- ],
-
- 'numbers': [
- (r'\b_{1,2}\b', Number),
- (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
- (r'_?\d+\.(?=\d+)', Number.Float),
- (r'_?\d+x', Number.Integer.Long),
- (r'_?\d+', Number.Integer),
- ],
-
- 'nounDefinition': [
- (r'[^)]', String),
- (r'^\)', Name.Label, '#pop'),
- (r'[)]', String),
- ],
-
- 'parentheses': [
- (r'\)', Punctuation, '#pop'),
- # include('nounDefinition'),
- include('explicitDefinition'),
- include('root'),
- ],
-
- 'singlequote': [
- (r"[^']", String),
- (r"''", String),
- (r"'", String, '#pop'),
- ],
- }
+
+ # Short Keywords
+ (r'[abCdDeEfHiIjLMoprtT]\.', Keyword.Reserved),
+ (r'[aDiLpqsStux]\:', Keyword.Reserved),
+ (r'(_[0-9])\:', Keyword.Constant),
+
+ # Parens
+ (r'\(', Punctuation, 'parentheses'),
+
+ # Numbers
+ include('numbers'),
+ ],
+
+ 'comment': [
+ (r'[^)]', Comment.Multiline),
+ (r'^\)', Comment.Multiline, '#pop'),
+ (r'[)]', Comment.Multiline),
+ ],
+
+ 'explicitDefinition': [
+ (r'\b[nmuvxy]\b', Name.Decorator),
+ include('root'),
+ (r'[^)]', Name),
+ (r'^\)', Name.Label, '#pop'),
+ (r'[)]', Name),
+ ],
+
+ 'numbers': [
+ (r'\b_{1,2}\b', Number),
+ (r'_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?', Number),
+ (r'_?\d+\.(?=\d+)', Number.Float),
+ (r'_?\d+x', Number.Integer.Long),
+ (r'_?\d+', Number.Integer),
+ ],
+
+ 'nounDefinition': [
+ (r'[^)]', String),
+ (r'^\)', Name.Label, '#pop'),
+ (r'[)]', String),
+ ],
+
+ 'parentheses': [
+ (r'\)', Punctuation, '#pop'),
+ # include('nounDefinition'),
+ include('explicitDefinition'),
+ include('root'),
+ ],
+
+ 'singlequote': [
+ (r"[^']", String),
+ (r"''", String),
+ (r"'", String, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/javascript.py b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
index 7ddd1148e6..69b402ff39 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/javascript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
@@ -1,70 +1,70 @@
-"""
- pygments.lexers.javascript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for JavaScript and related languages.
-
+"""
+ pygments.lexers.javascript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for JavaScript and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import bygroups, combined, default, do_insertions, include, \
inherit, Lexer, RegexLexer, this, using, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Other, Generic
from pygments.util import get_bool_opt
-import pygments.unistring as uni
-
-__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
- 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
+import pygments.unistring as uni
+
+__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
+ 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
'CoffeeScriptLexer', 'MaskLexer', 'EarlGreyLexer', 'JuttleLexer',
'NodeConsoleLexer']
-
-JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
- ']|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Mn', 'Mc', 'Nd', 'Pc') +
+
+JS_IDENT_START = ('(?:[$_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') +
+ ']|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT_PART = ('(?:[$' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Mn', 'Mc', 'Nd', 'Pc') +
'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
-JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
-
+JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
+
line_re = re.compile('.*?\n')
-
-class JavascriptLexer(RegexLexer):
- """
- For JavaScript source code.
- """
-
- name = 'JavaScript'
+
+class JavascriptLexer(RegexLexer):
+ """
+ For JavaScript source code.
+ """
+
+ name = 'JavaScript'
aliases = ['javascript', 'js']
filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
- mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript']
-
- flags = re.DOTALL | re.UNICODE | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ mimetypes = ['application/javascript', 'application/x-javascript',
+ 'text/x-javascript', 'text/javascript']
+
+ flags = re.DOTALL | re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'\A#! ?/.*?\n', Comment.Hashbang), # recognized by node.js
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
# Numeric literals
(r'0[bB][01]+n?', Number.Bin),
@@ -79,15 +79,15 @@ class JavascriptLexer(RegexLexer):
(r'\.\.\.|=>', Punctuation),
(r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
(r'(typeof|instanceof|in|void|delete|new)\b', Operator.Word, 'slashstartsregex'),
# Match stuff like: constructor
(r'\b(constructor|from|as)\b', Keyword.Reserved),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|yield|await|async|this|of|static|export|'
r'import|debugger|extends|super)\b', Keyword, 'slashstartsregex'),
(r'(var|let|const|with|function|class)\b', Keyword.Declaration, 'slashstartsregex'),
@@ -95,11 +95,11 @@ class JavascriptLexer(RegexLexer):
(r'(abstract|boolean|byte|char|double|enum|final|float|goto|'
r'implements|int|interface|long|native|package|private|protected|'
r'public|short|synchronized|throws|transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|BigInt|Function|Math|ArrayBuffer|'
r'Number|Object|RegExp|String|Promise|Proxy|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|DataView|'
r'document|window|globalThis|global|Symbol|Intl|'
r'WeakSet|WeakMap|Set|Map|Reflect|JSON|Atomics|'
@@ -114,26 +114,26 @@ class JavascriptLexer(RegexLexer):
# Match stuff like: function() {...}
(r'([a-zA-Z_?.$][\w?.$]*)(?=\(\) \{)', Name.Other, 'slashstartsregex'),
- (JS_IDENT, Name.Other),
+ (JS_IDENT, Name.Other),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'`', String.Backtick, 'interp'),
- ],
- 'interp': [
- (r'`', String.Backtick, '#pop'),
+ (r'`', String.Backtick, 'interp'),
+ ],
+ 'interp': [
+ (r'`', String.Backtick, '#pop'),
(r'\\.', String.Backtick),
(r'\$\{', String.Interpol, 'interp-inside'),
- (r'\$', String.Backtick),
- (r'[^`\\$]+', String.Backtick),
- ],
- 'interp-inside': [
- # TODO: should this include single-line comments and allow nesting strings?
+ (r'\$', String.Backtick),
+ (r'[^`\\$]+', String.Backtick),
+ ],
+ 'interp-inside': [
+ # TODO: should this include single-line comments and allow nesting strings?
(r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- }
-
-
+ include('root'),
+ ],
+ }
+
+
class TypeScriptLexer(JavascriptLexer):
"""
For `TypeScript <http://typescriptlang.org/>`_ source code.
@@ -171,471 +171,471 @@ class TypeScriptLexer(JavascriptLexer):
}
-class KalLexer(RegexLexer):
- """
- For `Kal`_ source code.
-
- .. _Kal: http://rzimmerman.github.io/kal
-
-
- .. versionadded:: 2.0
- """
-
- name = 'Kal'
- aliases = ['kal']
- filenames = ['*.kal']
- mimetypes = ['text/kal', 'application/kal']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'functiondef': [
- (r'[$a-zA-Z_][\w$]*\s*', Name.Function, '#pop'),
- include('commentsandwhitespace'),
- ],
- 'classdef': [
- (r'\binherits\s+from\b', Keyword),
- (r'[$a-zA-Z_][\w$]*\s*\n', Name.Class, '#pop'),
- (r'[$a-zA-Z_][\w$]*\s*', Name.Class),
- include('commentsandwhitespace'),
- ],
- 'listcomprehension': [
- (r'\]', Punctuation, '#pop'),
- (r'\b(property|value)\b', Keyword),
- include('root'),
- ],
- 'waitfor': [
- (r'\n', Punctuation, '#pop'),
- (r'\bfrom\b', Keyword),
- include('root'),
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+class KalLexer(RegexLexer):
+ """
+ For `Kal`_ source code.
+
+ .. _Kal: http://rzimmerman.github.io/kal
+
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Kal'
+ aliases = ['kal']
+ filenames = ['*.kal']
+ mimetypes = ['text/kal', 'application/kal']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'functiondef': [
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Function, '#pop'),
+ include('commentsandwhitespace'),
+ ],
+ 'classdef': [
+ (r'\binherits\s+from\b', Keyword),
+ (r'[$a-zA-Z_][\w$]*\s*\n', Name.Class, '#pop'),
+ (r'[$a-zA-Z_][\w$]*\s*', Name.Class),
+ include('commentsandwhitespace'),
+ ],
+ 'listcomprehension': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\b(property|value)\b', Keyword),
+ include('root'),
+ ],
+ 'waitfor': [
+ (r'\n', Punctuation, '#pop'),
+ (r'\bfrom\b', Keyword),
+ include('root'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gimuysd]+\b|\B)', String.Regex),
- (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
- Operator),
- (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
- r'doesnt\s+exist)\b', Operator.Word),
- (r'(?:\([^()]+\))?\s*>', Name.Function),
- (r'[{(]', Punctuation),
- (r'\[', Punctuation, 'listcomprehension'),
- (r'[})\].,]', Punctuation),
- (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
- (r'\bclass\b', Keyword.Declaration, 'classdef'),
- (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
- (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
- (r'(?<![.$])(for(\s+(parallel|series))?|in|of|while|until|'
- r'break|return|continue|'
- r'when|if|unless|else|otherwise|except\s+when|'
- r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
- r'typeof|instanceof|super|run\s+in\s+parallel|'
- r'inherits\s+from)\b', Keyword),
- (r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
+ (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
+ Operator),
+ (r'\b(and|or|isnt|is|not|but|bitwise|mod|\^|xor|exists|'
+ r'doesnt\s+exist)\b', Operator.Word),
+ (r'(?:\([^()]+\))?\s*>', Name.Function),
+ (r'[{(]', Punctuation),
+ (r'\[', Punctuation, 'listcomprehension'),
+ (r'[})\].,]', Punctuation),
+ (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
+ (r'\bclass\b', Keyword.Declaration, 'classdef'),
+ (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
+ (r'\b(me|this)(\.[$a-zA-Z_][\w.$]*)?\b', Name.Variable.Instance),
+ (r'(?<![.$])(for(\s+(parallel|series))?|in|of|while|until|'
+ r'break|return|continue|'
+ r'when|if|unless|else|otherwise|except\s+when|'
+ r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
+ r'typeof|instanceof|super|run\s+in\s+parallel|'
+ r'inherits\s+from)\b', Keyword),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|nothing|none|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|'
r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|'
r'window|globalThis|Symbol|print)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all kal strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class LiveScriptLexer(RegexLexer):
- """
- For `LiveScript`_ source code.
-
+ (r'[$a-zA-Z_][\w.$]*\s*(:|[+\-*/]?\=)?\b', Name.Variable),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all kal strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class LiveScriptLexer(RegexLexer):
+ """
+ For `LiveScript`_ source code.
+
.. _LiveScript: https://livescript.net/
-
- .. versionadded:: 1.6
- """
-
- name = 'LiveScript'
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'LiveScript'
aliases = ['livescript', 'live-script']
- filenames = ['*.ls']
- mimetypes = ['text/livescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'#.*?\n', Comment.Single),
- ],
- 'multilineregex': [
- include('commentsandwhitespace'),
+ filenames = ['*.ls']
+ mimetypes = ['text/livescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'#.*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ include('commentsandwhitespace'),
(r'//([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'/', String.Regex),
- (r'[^/#]+', String.Regex)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'//', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ (r'/', String.Regex),
+ (r'[^/#]+', String.Regex)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
(r'/', Operator, '#pop'),
- default('#pop'),
- ],
- 'root': [
+ default('#pop'),
+ ],
+ 'root': [
(r'\A(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
- r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
- (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
- r'[+*`%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
- r'return|continue|switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by|const|var|to|til)\b', Keyword,
- 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|'
- r'null|NaN|Infinity|undefined|void)\b',
- Keyword.Constant),
+ include('commentsandwhitespace'),
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
+ (r'\+\+|&&|(?<![.$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
+ r'[+*`%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|loop|break|'
+ r'return|continue|switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
+ 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|'
+ r'null|NaN|Infinity|undefined|void)\b',
+ Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|'
r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
r'globalThis|Symbol|Symbol|BigInt)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
- (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- (r'\\\S+', String),
- (r'<\[.*?\]>', String),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class DartLexer(RegexLexer):
- """
+ (r'[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.\-:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w-]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ (r'\\\S+', String),
+ (r'<\[.*?\]>', String),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class DartLexer(RegexLexer):
+ """
For `Dart <http://dart.dev/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Dart'
- aliases = ['dart']
- filenames = ['*.dart']
- mimetypes = ['text/x-dart']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('string_literal'),
- (r'#!(.*?)$', Comment.Preproc),
- (r'\b(import|export)\b', Keyword, 'import_decl'),
- (r'\b(library|source|part of|part)\b', Keyword),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Dart'
+ aliases = ['dart']
+ filenames = ['*.dart']
+ mimetypes = ['text/x-dart']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('string_literal'),
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'\b(import|export)\b', Keyword, 'import_decl'),
+ (r'\b(library|source|part of|part)\b', Keyword),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
(r'\b(class|extension|mixin)\b(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
+ bygroups(Keyword.Declaration, Text), 'class'),
(r'\b(as|assert|break|case|catch|const|continue|default|do|else|finally|'
r'for|if|in|is|new|rethrow|return|super|switch|this|throw|try|while)\b',
- Keyword),
+ Keyword),
(r'\b(abstract|async|await|const|covariant|extends|external|factory|final|'
r'get|implements|late|native|on|operator|required|set|static|sync|typedef|'
r'var|with|yield)\b', Keyword.Declaration),
(r'\b(bool|double|dynamic|int|num|Function|Never|Null|Object|String|void)\b',
Keyword.Type),
- (r'\b(false|null|true)\b', Keyword.Constant),
- (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
+ (r'\b(false|null|true)\b', Keyword.Constant),
+ (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
(r'@[a-zA-Z_$]\w*', Name.Decorator),
- (r'[a-zA-Z_$]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[(){}\[\],.;]', Punctuation),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
- (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
- (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
- (r'\n', Text)
- # pseudo-keyword negate intentionally left out
- ],
- 'class': [
- (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
- ],
- 'import_decl': [
- include('string_literal'),
- (r'\s+', Text),
+ (r'[a-zA-Z_$]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[(){}\[\],.;]', Punctuation),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
+ (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
+ (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
+ (r'\n', Text)
+ # pseudo-keyword negate intentionally left out
+ ],
+ 'class': [
+ (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
+ ],
+ 'import_decl': [
+ include('string_literal'),
+ (r'\s+', Text),
(r'\b(as|deferred|show|hide)\b', Keyword),
- (r'[a-zA-Z_$]\w*', Name),
- (r'\,', Punctuation),
- (r'\;', Punctuation, '#pop')
- ],
- 'string_literal': [
- # Raw strings.
- (r'r"""([\w\W]*?)"""', String.Double),
- (r"r'''([\w\W]*?)'''", String.Single),
- (r'r"(.*?)"', String.Double),
- (r"r'(.*?)'", String.Single),
- # Normal Strings.
- (r'"""', String.Double, 'string_double_multiline'),
- (r"'''", String.Single, 'string_single_multiline'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single')
- ],
- 'string_common': [
- (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
- String.Escape),
- (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
- (r'(\$\{)(.*?)(\})',
- bygroups(String.Interpol, using(this), String.Interpol))
- ],
- 'string_double': [
- (r'"', String.Double, '#pop'),
- (r'[^"$\\\n]+', String.Double),
- include('string_common'),
- (r'\$+', String.Double)
- ],
- 'string_double_multiline': [
- (r'"""', String.Double, '#pop'),
- (r'[^"$\\]+', String.Double),
- include('string_common'),
- (r'(\$|\")+', String.Double)
- ],
- 'string_single': [
- (r"'", String.Single, '#pop'),
- (r"[^'$\\\n]+", String.Single),
- include('string_common'),
- (r'\$+', String.Single)
- ],
- 'string_single_multiline': [
- (r"'''", String.Single, '#pop'),
- (r'[^\'$\\]+', String.Single),
- include('string_common'),
- (r'(\$|\')+', String.Single)
- ]
- }
-
-
-class LassoLexer(RegexLexer):
- """
- For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
- syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
- HTML, use the `LassoHtmlLexer`.
-
- Additional options accepted:
-
- `builtinshighlighting`
- If given and ``True``, highlight builtin types, traits, methods, and
- members (default: ``True``).
- `requiredelimiters`
- If given and ``True``, only highlight code between delimiters as Lasso
- (default: ``False``).
-
- .. versionadded:: 1.6
- """
-
- name = 'Lasso'
- aliases = ['lasso', 'lassoscript']
- filenames = ['*.lasso', '*.lasso[89]']
- alias_filenames = ['*.incl', '*.inc', '*.las']
- mimetypes = ['text/x-lasso']
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'\,', Punctuation),
+ (r'\;', Punctuation, '#pop')
+ ],
+ 'string_literal': [
+ # Raw strings.
+ (r'r"""([\w\W]*?)"""', String.Double),
+ (r"r'''([\w\W]*?)'''", String.Single),
+ (r'r"(.*?)"', String.Double),
+ (r"r'(.*?)'", String.Single),
+ # Normal Strings.
+ (r'"""', String.Double, 'string_double_multiline'),
+ (r"'''", String.Single, 'string_single_multiline'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single')
+ ],
+ 'string_common': [
+ (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])",
+ String.Escape),
+ (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol))
+ ],
+ 'string_double': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"$\\\n]+', String.Double),
+ include('string_common'),
+ (r'\$+', String.Double)
+ ],
+ 'string_double_multiline': [
+ (r'"""', String.Double, '#pop'),
+ (r'[^"$\\]+', String.Double),
+ include('string_common'),
+ (r'(\$|\")+', String.Double)
+ ],
+ 'string_single': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'$\\\n]+", String.Single),
+ include('string_common'),
+ (r'\$+', String.Single)
+ ],
+ 'string_single_multiline': [
+ (r"'''", String.Single, '#pop'),
+ (r'[^\'$\\]+', String.Single),
+ include('string_common'),
+ (r'(\$|\')+', String.Single)
+ ]
+ }
+
+
+class LassoLexer(RegexLexer):
+ """
+ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
+ syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
+ HTML, use the `LassoHtmlLexer`.
+
+ Additional options accepted:
+
+ `builtinshighlighting`
+ If given and ``True``, highlight builtin types, traits, methods, and
+ members (default: ``True``).
+ `requiredelimiters`
+ If given and ``True``, only highlight code between delimiters as Lasso
+ (default: ``False``).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Lasso'
+ aliases = ['lasso', 'lassoscript']
+ filenames = ['*.lasso', '*.lasso[89]']
+ alias_filenames = ['*.incl', '*.inc', '*.las']
+ mimetypes = ['text/x-lasso']
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^#![ \S]+lasso9\b', Comment.Preproc, 'lasso'),
(r'(?=\[|<)', Other, 'delimiters'),
- (r'\s+', Other),
- default(('delimiters', 'lassofile')),
- ],
- 'delimiters': [
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Comment.Preproc, 'squarebrackets'),
+ (r'\s+', Other),
+ default(('delimiters', 'lassofile')),
+ ],
+ 'delimiters': [
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Comment.Preproc, 'squarebrackets'),
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'nosquarebrackets': [
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Other),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'nosquarebrackets': [
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Other),
(r'<\?(lasso(script)?|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'noprocess': [
- (r'\[/noprocess\]', Comment.Preproc, '#pop'),
- (r'\[', Other),
- (r'[^[]', Other),
- ],
- 'squarebrackets': [
- (r'\]', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'anglebrackets': [
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'lassofile': [
- (r'\]|\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'whitespacecomments': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*\*!.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- ],
- 'lasso': [
- # whitespace/comments
- include('whitespacecomments'),
-
- # literals
- (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'(infinity|NaN)\b', Number),
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
- (r'`[^`]*`', String.Backtick),
-
- # names
- (r'\$[a-z_][\w.]*', Name.Variable),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'noprocess': [
+ (r'\[/noprocess\]', Comment.Preproc, '#pop'),
+ (r'\[', Other),
+ (r'[^[]', Other),
+ ],
+ 'squarebrackets': [
+ (r'\]', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'anglebrackets': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'lassofile': [
+ (r'\]|\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'whitespacecomments': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*\*!.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ ],
+ 'lasso': [
+ # whitespace/comments
+ include('whitespacecomments'),
+
+ # literals
+ (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'(infinity|NaN)\b', Number),
+ (r"'", String.Single, 'singlestring'),
+ (r'"', String.Double, 'doublestring'),
+ (r'`[^`]*`', String.Backtick),
+
+ # names
+ (r'\$[a-z_][\w.]*', Name.Variable),
(r'#([a-z_][\w.]*|\d+\b)', Name.Variable.Instance),
- (r"(\.\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
- (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
- (r'(\.\.?\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
- (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Operator, Name.Other.Member)),
- (r'(?<!->)(self|inherited|currentcapture|givenblock)\b',
- Name.Builtin.Pseudo),
- (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
- r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
- r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
- r'Error_InvalidDatabase|Error_InvalidPassword|'
- r'Error_InvalidUsername|Error_ModuleNotFound|'
- r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
- r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
- r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
- r'Error_UpdateError)\b', Name.Exception),
-
- # definitions
- (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
- (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator,
- Name.Function), 'signature'),
- (r'(define)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
- (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
- 'signature'),
- (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Text, Name.Function)),
-
- # keywords
- (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
- (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
- (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
- r'null|boolean|bytes|keyword|list|locale|queue|set|stack|'
- r'staticarray)\b', Keyword.Type),
- (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
- (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
- (r'require\b', Keyword, 'requiresection'),
- (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
- (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
- r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
- r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
- r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
+ (r"(\.\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
+ (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
+ (r'(\.\.?\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
+ (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Operator, Name.Other.Member)),
+ (r'(?<!->)(self|inherited|currentcapture|givenblock)\b',
+ Name.Builtin.Pseudo),
+ (r'-(?!infinity)[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
+ r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
+ r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
+ r'Error_InvalidDatabase|Error_InvalidPassword|'
+ r'Error_InvalidUsername|Error_ModuleNotFound|'
+ r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
+ r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
+ r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
+ r'Error_UpdateError)\b', Name.Exception),
+
+ # definitions
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator,
+ Name.Function), 'signature'),
+ (r'(define)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
+ (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
+ 'signature'),
+ (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword, Text, Name.Function)),
+
+ # keywords
+ (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
+ (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
+ (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
+ r'null|boolean|bytes|keyword|list|locale|queue|set|stack|'
+ r'staticarray)\b', Keyword.Type),
+ (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
+ (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
+ (r'require\b', Keyword, 'requiresection'),
+ (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
+ (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
+ r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
+ r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
+ r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|Link_FirstRecord|'
r'Link_LastGroup|Link_LastRecord|Link_NextGroup|Link_NextRecord|'
r'Link_PrevGroup|Link_PrevRecord|Log|Loop|Output_None|Portal|'
@@ -650,778 +650,778 @@ class LassoLexer(RegexLexer):
r'join|let|match|max|min|on|order|parent|protected|provide|public|'
r'require|returnhome|skip|split_thread|sum|take|thread|to|trait|'
r'type|where|with|yield|yieldhome)\b',
- bygroups(Punctuation, Keyword)),
-
- # other
- (r',', Punctuation, 'commamember'),
- (r'(and|or|not)\b', Operator.Word),
- (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
- bygroups(Name, Name.Label, Operator)),
- (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
- (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
- bygroups(Operator, Operator.Word)),
- (r':=|[-+*/%=<>&|!?\\]+', Operator),
- (r'[{}():;,@^]', Punctuation),
- ],
- 'singlestring': [
- (r"'", String.Single, '#pop'),
- (r"[^'\\]+", String.Single),
- include('escape'),
- (r"\\", String.Single),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- include('escape'),
- (r'\\', String.Double),
- ],
- 'escape': [
- (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|'
- r'[abefnrtv?"\'\\]|$)', String.Escape),
- ],
- 'signature': [
- (r'=>', Operator, '#pop'),
- (r'\)', Punctuation, '#pop'),
- (r'[(,]', Punctuation, 'parameter'),
- include('lasso'),
- ],
- 'parameter': [
- (r'\)', Punctuation, '#pop'),
- (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
- (r'\.\.\.', Name.Builtin.Pseudo),
- include('lasso'),
- ],
- 'requiresection': [
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
- (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r',', Punctuation),
- include('whitespacecomments'),
- ],
- 'requiresignature': [
- (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- (r'-?[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'\.\.\.', Name.Builtin.Pseudo),
- (r'[(,]', Punctuation),
- include('whitespacecomments'),
- ],
- 'commamember': [
- (r'(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
- Name.Function, 'signature'),
- include('whitespacecomments'),
- default('#pop'),
- ],
- }
-
- def __init__(self, **options):
- self.builtinshighlighting = get_bool_opt(
- options, 'builtinshighlighting', True)
- self.requiredelimiters = get_bool_opt(
- options, 'requiredelimiters', False)
-
- self._builtins = set()
- self._members = set()
- if self.builtinshighlighting:
- from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
+ bygroups(Punctuation, Keyword)),
+
+ # other
+ (r',', Punctuation, 'commamember'),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
+ bygroups(Name, Name.Label, Operator)),
+ (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
+ (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
+ bygroups(Operator, Operator.Word)),
+ (r':=|[-+*/%=<>&|!?\\]+', Operator),
+ (r'[{}():;,@^]', Punctuation),
+ ],
+ 'singlestring': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'\\]+", String.Single),
+ include('escape'),
+ (r"\\", String.Single),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ include('escape'),
+ (r'\\', String.Double),
+ ],
+ 'escape': [
+ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:\n\r]+:|'
+ r'[abefnrtv?"\'\\]|$)', String.Escape),
+ ],
+ 'signature': [
+ (r'=>', Operator, '#pop'),
+ (r'\)', Punctuation, '#pop'),
+ (r'[(,]', Punctuation, 'parameter'),
+ include('lasso'),
+ ],
+ 'parameter': [
+ (r'\)', Punctuation, '#pop'),
+ (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ include('lasso'),
+ ],
+ 'requiresection': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
+ (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r',', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'requiresignature': [
+ (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
+ (r'\)', Punctuation, '#pop:2'),
+ (r'-?[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ (r'[(,]', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'commamember': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
+ Name.Function, 'signature'),
+ include('whitespacecomments'),
+ default('#pop'),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.builtinshighlighting = get_bool_opt(
+ options, 'builtinshighlighting', True)
+ self.requiredelimiters = get_bool_opt(
+ options, 'requiredelimiters', False)
+
+ self._builtins = set()
+ self._members = set()
+ if self.builtinshighlighting:
+ from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
for key, value in BUILTINS.items():
- self._builtins.update(value)
+ self._builtins.update(value)
for key, value in MEMBERS.items():
- self._members.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.requiredelimiters:
- stack.append('delimiters')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if (token is Name.Other and value.lower() in self._builtins or
- token is Name.Other.Member and
- value.lower().rstrip('=') in self._members):
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if 'bin/lasso9' in text:
- rv += 0.8
- if re.search(r'<\?lasso', text, re.I):
- rv += 0.4
- if re.search(r'local\(', text, re.I):
- rv += 0.4
- return rv
-
-
-class ObjectiveJLexer(RegexLexer):
- """
- For Objective-J source code with preprocessor directives.
-
- .. versionadded:: 1.3
- """
-
- name = 'Objective-J'
- aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
- filenames = ['*.j']
- mimetypes = ['text/x-objective-j']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # function definition
- (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
- bygroups(using(this), using(this, state='function_signature'),
- using(this))),
-
- # class definition
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- 'classname'),
- (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
- 'forward_classname'),
- (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
-
- include('statements'),
- ('[{()}]', Punctuation),
- (';', Punctuation),
- ],
- 'whitespace': [
- (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
-
- (r'#if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
-
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'<!--', Comment),
- ],
- 'slashstartsregex': [
- include('whitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop'),
- ],
- 'statements': [
- (r'(L|@)?"', String, 'string'),
- (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
+ self._members.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.requiredelimiters:
+ stack.append('delimiters')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if (token is Name.Other and value.lower() in self._builtins or
+ token is Name.Other.Member and
+ value.lower().rstrip('=') in self._members):
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if 'bin/lasso9' in text:
+ rv += 0.8
+ if re.search(r'<\?lasso', text, re.I):
+ rv += 0.4
+ if re.search(r'local\(', text, re.I):
+ rv += 0.4
+ return rv
+
+
+class ObjectiveJLexer(RegexLexer):
+ """
+ For Objective-J source code with preprocessor directives.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Objective-J'
+ aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
+ filenames = ['*.j']
+ mimetypes = ['text/x-objective-j']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # function definition
+ (r'^(' + _ws + r'[+-]' + _ws + r')([(a-zA-Z_].*?[^(])(' + _ws + r'\{)',
+ bygroups(using(this), using(this, state='function_signature'),
+ using(this))),
+
+ # class definition
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+ (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
+ 'forward_classname'),
+ (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
+
+ include('statements'),
+ ('[{()}]', Punctuation),
+ (';', Punctuation),
+ ],
+ 'whitespace': [
+ (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+
+ (r'#if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'<!--', Comment),
+ ],
+ 'slashstartsregex': [
+ include('whitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop'),
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop'),
+ ],
+ 'statements': [
+ (r'(L|@)?"', String, 'string'),
+ (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
-
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
- r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
- (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
- Keyword.Type),
-
- (r'(self|super)\b', Name.Builtin),
-
- (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
- r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
- r'SQRT2)\b', Keyword.Constant),
-
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
+ r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
+
+ (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+
+ (r'(@selector|@private|@protected|@public|@encode|'
+ r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+ r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
+
+ (r'(int|long|float|short|double|char|unsigned|signed|void|'
+ r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
+ Keyword.Type),
+
+ (r'(self|super)\b', Name.Builtin),
+
+ (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
+ r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
+ r'SQRT2)\b', Keyword.Constant),
+
(r'(Array|Boolean|Date|Error|Function|Math|'
r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window|globalThis|Symbol)\b', Name.Builtin),
-
- (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
- bygroups(Name.Function, using(this))),
-
- (r'[$a-zA-Z_]\w*', Name),
- ],
- 'classname': [
- # interface definition that inherits
- (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
- r')([a-zA-Z_]\w*)?',
- bygroups(Name.Class, using(this), Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
- bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
- ],
- 'forward_classname': [
- (r'([a-zA-Z_]\w*)(\s*,\s*)',
- bygroups(Name.Class, Text), '#push'),
- (r'([a-zA-Z_]\w*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop'),
- ],
- 'function_signature': [
- include('whitespace'),
-
- # start of a selector w/ parameters
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), 'function_parameters'),
-
- # no-param function
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), "#pop"),
-
- # no return type given, start of a selector w/ parameters
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(Name.Function), 'function_parameters'),
-
- # no return type given, no-param function
- (r'([$a-zA-Z_]\w+)', # function name
- bygroups(Name.Function), "#pop"),
-
- default('#pop'),
- ],
- 'function_parameters': [
- include('whitespace'),
-
- # parameters
- (r'(\(' + _ws + ')' # open paren
- r'([^)]+)' # type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # param name
- bygroups(using(this), Keyword.Type, using(this), Text)),
-
- # one piece of a selector name
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- Name.Function),
-
- # smallest possible selector piece
- (r'(:)', Name.Function),
-
- # var args
- (r'(,' + _ws + r'\.\.\.)', using(this)),
-
- # param name
- (r'([$a-zA-Z_]\w+)', Text),
- ],
- 'expression': [
- (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
- Punctuation)),
- (r'(\))', Punctuation, "#pop"),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- def analyse_text(text):
+
+ (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
+ bygroups(Name.Function, using(this))),
+
+ (r'[$a-zA-Z_]\w*', Name),
+ ],
+ 'classname': [
+ # interface definition that inherits
+ (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
+ r')([a-zA-Z_]\w*)?',
+ bygroups(Name.Class, using(this), Name.Class), '#pop'),
+ # interface definition for a category
+ (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
+ bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
+ # simple interface / implementation
+ (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
+ ],
+ 'forward_classname': [
+ (r'([a-zA-Z_]\w*)(\s*,\s*)',
+ bygroups(Name.Class, Text), '#push'),
+ (r'([a-zA-Z_]\w*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop'),
+ ],
+ 'function_signature': [
+ include('whitespace'),
+
+ # start of a selector w/ parameters
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), 'function_parameters'),
+
+ # no-param function
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), "#pop"),
+
+ # no return type given, start of a selector w/ parameters
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(Name.Function), 'function_parameters'),
+
+ # no return type given, no-param function
+ (r'([$a-zA-Z_]\w+)', # function name
+ bygroups(Name.Function), "#pop"),
+
+ default('#pop'),
+ ],
+ 'function_parameters': [
+ include('whitespace'),
+
+ # parameters
+ (r'(\(' + _ws + ')' # open paren
+ r'([^)]+)' # type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # param name
+ bygroups(using(this), Keyword.Type, using(this), Text)),
+
+ # one piece of a selector name
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ Name.Function),
+
+ # smallest possible selector piece
+ (r'(:)', Name.Function),
+
+ # var args
+ (r'(,' + _ws + r'\.\.\.)', using(this)),
+
+ # param name
+ (r'([$a-zA-Z_]\w+)', Text),
+ ],
+ 'expression': [
+ (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
+ Punctuation)),
+ (r'(\))', Punctuation, "#pop"),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
if re.search(r'^\s*@import\s+[<"]', text, re.MULTILINE):
- # special directive found in most Objective-J files
- return True
- return False
-
-
-class CoffeeScriptLexer(RegexLexer):
- """
- For `CoffeeScript`_ source code.
-
- .. _CoffeeScript: http://coffeescript.org
-
- .. versionadded:: 1.3
- """
-
- name = 'CoffeeScript'
+ # special directive found in most Objective-J files
+ return True
+ return False
+
+
+class CoffeeScriptLexer(RegexLexer):
+ """
+ For `CoffeeScript`_ source code.
+
+ .. _CoffeeScript: http://coffeescript.org
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'CoffeeScript'
aliases = ['coffeescript', 'coffee-script', 'coffee']
- filenames = ['*.coffee']
- mimetypes = ['text/coffeescript']
-
+ filenames = ['*.coffee']
+ mimetypes = ['text/coffeescript']
+
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&|\^/])=?')
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'multilineregex': [
- (r'[^/#]+', String.Regex),
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ (r'[^/#]+', String.Regex),
(r'///([gimuysd]+\b|\B)', String.Regex, '#pop'),
- (r'#\{', String.Interpol, 'interpoling_string'),
- (r'[/#]', String.Regex),
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ (r'#\{', String.Interpol, 'interpoling_string'),
+ (r'[/#]', String.Regex),
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'///', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
# This isn't really guarding against mishighlighting well-formed
# code, just the ability to infinite-loop between root and
# slashstartsregex.
(r'/', Operator, '#pop'),
- default('#pop'),
- ],
- 'root': [
- include('commentsandwhitespace'),
+ default('#pop'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
(r'\A(?=\s|/)', Text, 'slashstartsregex'),
(_operator_re, Operator, 'slashstartsregex'),
(r'(?:\([^()]*\))?\s*[=-]>', Name.Function, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![.$])(for|own|in|of|while|until|'
- r'loop|break|return|continue|'
- r'switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(?<![.$])(true|false|yes|no|on|off|null|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![.$])(for|own|in|of|while|until|'
+ r'loop|break|return|continue|'
+ r'switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
+ (r'(?<![.$])(true|false|yes|no|on|off|null|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|'
r'Number|Object|RegExp|String|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|globalThis|Symbol)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w.:$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
(r'@?[$a-zA-Z_][\w$]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#\{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class MaskLexer(RegexLexer):
- """
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#\{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class MaskLexer(RegexLexer):
+ """
For `Mask <https://github.com/atmajs/MaskJS>`__ markup.
-
- .. versionadded:: 2.0
- """
- name = 'Mask'
- aliases = ['mask']
- filenames = ['*.mask']
- mimetypes = ['text/x-mask']
-
- flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'[{};>]', Punctuation),
- (r"'''", String, 'string-trpl-single'),
- (r'"""', String, 'string-trpl-double'),
- (r"'", String, 'string-single'),
- (r'"', String, 'string-double'),
- (r'([\w-]+)', Name.Tag, 'node'),
- (r'([^.#;{>\s]+)', Name.Class, 'node'),
- (r'(#[\w-]+)', Name.Function, 'node'),
- (r'(\.[\w-]+)', Name.Variable.Class, 'node')
- ],
- 'string-base': [
- (r'\\.', String.Escape),
- (r'~\[', String.Interpol, 'interpolation'),
- (r'.', String.Single),
- ],
- 'string-single': [
- (r"'", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-double': [
- (r'"', String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-single': [
- (r"'''", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-double': [
- (r'"""', String.Single, '#pop'),
- include('string-base')
- ],
- 'interpolation': [
- (r'\]', String.Interpol, '#pop'),
- (r'\s*:', String.Interpol, 'expression'),
- (r'\s*\w+:', Name.Other),
- (r'[^\]]+', String.Interpol)
- ],
- 'expression': [
- (r'[^\]]+', using(JavascriptLexer), '#pop')
- ],
- 'node': [
- (r'\s+', Text),
- (r'\.', Name.Variable.Class, 'node-class'),
- (r'\#', Name.Function, 'node-id'),
- (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'[>{;]', Punctuation, '#pop')
- ],
- 'node-class': [
- (r'[\w-]+', Name.Variable.Class),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-id': [
- (r'[\w-]+', Name.Function),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-attr-value': [
- (r'\s+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r"'", String, 'string-single-pop2'),
- (r'"', String, 'string-double-pop2'),
- default('#pop')
- ],
- 'node-attr-style-value': [
- (r'\s+', Text),
- (r"'", String.Single, 'css-single-end'),
- (r'"', String.Single, 'css-double-end'),
- include('node-attr-value')
- ],
- 'css-base': [
- (r'\s+', Text),
- (r";", Punctuation),
- (r"[\w\-]+\s*:", Name.Builtin)
- ],
- 'css-single-end': [
- include('css-base'),
- (r"'", String.Single, '#pop:2'),
- (r"[^;']+", Name.Entity)
- ],
- 'css-double-end': [
- include('css-base'),
- (r'"', String.Single, '#pop:2'),
- (r'[^;"]+', Name.Entity)
- ],
- 'string-single-pop2': [
- (r"'", String.Single, '#pop:2'),
- include('string-base')
- ],
- 'string-double-pop2': [
- (r'"', String.Single, '#pop:2'),
- include('string-base')
- ],
- }
-
-
-class EarlGreyLexer(RegexLexer):
- """
- For `Earl-Grey`_ source code.
-
- .. _Earl-Grey: https://breuleux.github.io/earl-grey/
-
- .. versionadded: 2.1
- """
-
- name = 'Earl Grey'
- aliases = ['earl-grey', 'earlgrey', 'eg']
- filenames = ['*.eg']
- mimetypes = ['text/x-earl-grey']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- include('control'),
- (r'[^\S\n]+', Text),
- (r';;.*\n', Comment),
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mask'
+ aliases = ['mask']
+ filenames = ['*.mask']
+ mimetypes = ['text/x-mask']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'[{};>]', Punctuation),
+ (r"'''", String, 'string-trpl-single'),
+ (r'"""', String, 'string-trpl-double'),
+ (r"'", String, 'string-single'),
+ (r'"', String, 'string-double'),
+ (r'([\w-]+)', Name.Tag, 'node'),
+ (r'([^.#;{>\s]+)', Name.Class, 'node'),
+ (r'(#[\w-]+)', Name.Function, 'node'),
+ (r'(\.[\w-]+)', Name.Variable.Class, 'node')
+ ],
+ 'string-base': [
+ (r'\\.', String.Escape),
+ (r'~\[', String.Interpol, 'interpolation'),
+ (r'.', String.Single),
+ ],
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-double': [
+ (r'"', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-single': [
+ (r"'''", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-double': [
+ (r'"""', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'interpolation': [
+ (r'\]', String.Interpol, '#pop'),
+ (r'\s*:', String.Interpol, 'expression'),
+ (r'\s*\w+:', Name.Other),
+ (r'[^\]]+', String.Interpol)
+ ],
+ 'expression': [
+ (r'[^\]]+', using(JavascriptLexer), '#pop')
+ ],
+ 'node': [
+ (r'\s+', Text),
+ (r'\.', Name.Variable.Class, 'node-class'),
+ (r'\#', Name.Function, 'node-id'),
+ (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'[>{;]', Punctuation, '#pop')
+ ],
+ 'node-class': [
+ (r'[\w-]+', Name.Variable.Class),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-id': [
+ (r'[\w-]+', Name.Function),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-attr-value': [
+ (r'\s+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r"'", String, 'string-single-pop2'),
+ (r'"', String, 'string-double-pop2'),
+ default('#pop')
+ ],
+ 'node-attr-style-value': [
+ (r'\s+', Text),
+ (r"'", String.Single, 'css-single-end'),
+ (r'"', String.Single, 'css-double-end'),
+ include('node-attr-value')
+ ],
+ 'css-base': [
+ (r'\s+', Text),
+ (r";", Punctuation),
+ (r"[\w\-]+\s*:", Name.Builtin)
+ ],
+ 'css-single-end': [
+ include('css-base'),
+ (r"'", String.Single, '#pop:2'),
+ (r"[^;']+", Name.Entity)
+ ],
+ 'css-double-end': [
+ include('css-base'),
+ (r'"', String.Single, '#pop:2'),
+ (r'[^;"]+', Name.Entity)
+ ],
+ 'string-single-pop2': [
+ (r"'", String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ 'string-double-pop2': [
+ (r'"', String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ }
+
+
+class EarlGreyLexer(RegexLexer):
+ """
+ For `Earl-Grey`_ source code.
+
+ .. _Earl-Grey: https://breuleux.github.io/earl-grey/
+
+ .. versionadded: 2.1
+ """
+
+ name = 'Earl Grey'
+ aliases = ['earl-grey', 'earlgrey', 'eg']
+ filenames = ['*.eg']
+ mimetypes = ['text/x-earl-grey']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ include('control'),
+ (r'[^\S\n]+', Text),
+ (r';;.*\n', Comment),
(r'[\[\]{}:(),;]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- include('errors'),
- (words((
- 'with', 'where', 'when', 'and', 'not', 'or', 'in',
- 'as', 'of', 'is'),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ include('errors'),
+ (words((
+ 'with', 'where', 'when', 'and', 'not', 'or', 'in',
+ 'as', 'of', 'is'),
prefix=r'(?<=\s|\[)', suffix=r'(?![\w$\-])'),
- Operator.Word),
+ Operator.Word),
(r'[*@]?->', Name.Function),
- (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
- (r'\.{2,3}', Operator.Word), # Range Operator
- (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
+ (r'[+\-*/~^<>%&|?!@#.]*=', Operator.Word),
+ (r'\.{2,3}', Operator.Word), # Range Operator
+ (r'([+*/~^<>&|?!]+)|([#\-](?=\s))|@@+(?=\s)|=+', Operator),
(r'(?<![\w$\-])(var|let)(?:[^\w$])', Keyword.Declaration),
- include('keywords'),
- include('builtins'),
- include('assignment'),
- (r'''(?x)
+ include('keywords'),
+ include('builtins'),
+ include('assignment'),
+ (r'''(?x)
(?:()([a-zA-Z$_](?:[\w$\-]*[\w$])?)|
(?<=[\s{\[(])(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?))
- (?=.*%)''',
- bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
- (r'[rR]?`', String.Backtick, 'bt'),
- (r'[rR]?```', String.Backtick, 'tbt'),
+ (?=.*%)''',
+ bygroups(Punctuation, Name.Tag, Punctuation, Name.Class.Start), 'dbs'),
+ (r'[rR]?`', String.Backtick, 'bt'),
+ (r'[rR]?```', String.Backtick, 'tbt'),
(r'(?<=[\s\[{(,;])\.([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
r'(?=[\s\]}),;])', String.Symbol),
- include('nested'),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
- (r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
- (r'"""', String, combined('stringescape', 'tdqs')),
- include('tuple'),
- include('import_paths'),
- include('name'),
- include('numbers'),
- ],
- 'dbs': [
+ include('nested'),
+ (r'(?:[rR]|[rR]\.[gmi]{1,3})?"', String, combined('stringescape', 'dqs')),
+ (r'(?:[rR]|[rR]\.[gmi]{1,3})?\'', String, combined('stringescape', 'sqs')),
+ (r'"""', String, combined('stringescape', 'tdqs')),
+ include('tuple'),
+ include('import_paths'),
+ include('name'),
+ include('numbers'),
+ ],
+ 'dbs': [
(r'(\.)([a-zA-Z$_](?:[\w$\-]*[\w$])?)(?=[.\[\s])',
- bygroups(Punctuation, Name.Class.DBS)),
+ bygroups(Punctuation, Name.Class.DBS)),
(r'(\[)([\^#][a-zA-Z$_](?:[\w$\-]*[\w$])?)(\])',
- bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
- (r'\s+', Text),
- (r'%', Operator.DBS, '#pop'),
- ],
- 'import_paths': [
- (r'(?<=[\s:;,])(\.{1,3}(?:[\w\-]*/)*)(\w(?:[\w\-]*\w)*)(?=[\s;,])',
- bygroups(Text.Whitespace, Text)),
- ],
- 'assignment': [
+ bygroups(Punctuation, Name.Entity.DBS, Punctuation)),
+ (r'\s+', Text),
+ (r'%', Operator.DBS, '#pop'),
+ ],
+ 'import_paths': [
+ (r'(?<=[\s:;,])(\.{1,3}(?:[\w\-]*/)*)(\w(?:[\w\-]*\w)*)(?=[\s;,])',
+ bygroups(Text.Whitespace, Text)),
+ ],
+ 'assignment': [
(r'(\.)?([a-zA-Z$_](?:[\w$\-]*[\w$])?)'
- r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
- bygroups(Punctuation, Name.Variable))
- ],
- 'errors': [
- (words(('Error', 'TypeError', 'ReferenceError'),
+ r'(?=\s+[+\-*/~^<>%&|?!@#.]*\=\s)',
+ bygroups(Punctuation, Name.Variable))
+ ],
+ 'errors': [
+ (words(('Error', 'TypeError', 'ReferenceError'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Exception),
- (r'''(?x)
+ Name.Exception),
+ (r'''(?x)
(?<![\w$])
E\.[\w$](?:[\w$\-]*[\w$])?
(?:\.[\w$](?:[\w$\-]*[\w$])?)*
(?=[({\[?!\s])''',
- Name.Exception),
- ],
- 'control': [
- (r'''(?x)
+ Name.Exception),
+ ],
+ 'control': [
+ (r'''(?x)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?!\n)\s+
- (?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
+ (?!\n)\s+
+ (?!and|as|each\*|each|in|is|mod|of|or|when|where|with)
(?=(?:[+\-*/~^<>%&|?!@#.])?[a-zA-Z$_](?:[\w$-]*[\w$])?)''',
- Keyword.Control),
+ Keyword.Control),
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(?!\n)\s+(?=[\'"\d{\[(])',
- Keyword.Control),
- (r'''(?x)
- (?:
- (?<=[%=])|
- (?<=[=\-]>)|
- (?<=with|each|with)|
- (?<=each\*|where)
- )(\s+)
+ Keyword.Control),
+ (r'''(?x)
+ (?:
+ (?<=[%=])|
+ (?<=[=\-]>)|
+ (?<=with|each|with)|
+ (?<=each\*|where)
+ )(\s+)
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Text, Keyword.Control, Punctuation)),
- (r'''(?x)
- (?<![+\-*/~^<>%&|?!@#.])(\s+)
+ bygroups(Text, Keyword.Control, Punctuation)),
+ (r'''(?x)
+ (?<![+\-*/~^<>%&|?!@#.])(\s+)
([a-zA-Z$_](?:[\w$-]*[\w$])?)(:)''',
- bygroups(Text, Keyword.Control, Punctuation)),
- ],
- 'nested': [
- (r'''(?x)
+ bygroups(Text, Keyword.Control, Punctuation)),
+ ],
+ 'nested': [
+ (r'''(?x)
(?<=[\w$\]})])(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
- (?=\s+with(?:\s|\n))''',
- bygroups(Punctuation, Name.Function)),
- (r'''(?x)
- (?<!\s)(\.)
+ (?=\s+with(?:\s|\n))''',
+ bygroups(Punctuation, Name.Function)),
+ (r'''(?x)
+ (?<!\s)(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
(?=[}\]).,;:\s])''',
- bygroups(Punctuation, Name.Field)),
- (r'''(?x)
+ bygroups(Punctuation, Name.Field)),
+ (r'''(?x)
(?<=[\w$\]})])(\.)
([a-zA-Z$_](?:[\w$-]*[\w$])?)
(?=[\[{(:])''',
- bygroups(Punctuation, Name.Function)),
- ],
- 'keywords': [
- (words((
- 'each', 'each*', 'mod', 'await', 'break', 'chain',
- 'continue', 'elif', 'expr-value', 'if', 'match',
- 'return', 'yield', 'pass', 'else', 'require', 'var',
- 'let', 'async', 'method', 'gen'),
+ bygroups(Punctuation, Name.Function)),
+ ],
+ 'keywords': [
+ (words((
+ 'each', 'each*', 'mod', 'await', 'break', 'chain',
+ 'continue', 'elif', 'expr-value', 'if', 'match',
+ 'return', 'yield', 'pass', 'else', 'require', 'var',
+ 'let', 'async', 'method', 'gen'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Keyword.Pseudo),
- (words(('this', 'self', '@'),
+ Keyword.Pseudo),
+ (words(('this', 'self', '@'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Constant),
- (words((
- 'Function', 'Object', 'Array', 'String', 'Number',
- 'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
+ Keyword.Constant),
+ (words((
+ 'Function', 'Object', 'Array', 'String', 'Number',
+ 'Boolean', 'ErrorFactory', 'ENode', 'Promise'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$])'),
- Keyword.Type),
- ],
- 'builtins': [
- (words((
- 'send', 'object', 'keys', 'items', 'enumerate', 'zip',
- 'product', 'neighbours', 'predicate', 'equal',
- 'nequal', 'contains', 'repr', 'clone', 'range',
- 'getChecker', 'get-checker', 'getProperty', 'get-property',
- 'getProjector', 'get-projector', 'consume', 'take',
- 'promisify', 'spawn', 'constructor'),
+ Keyword.Type),
+ ],
+ 'builtins': [
+ (words((
+ 'send', 'object', 'keys', 'items', 'enumerate', 'zip',
+ 'product', 'neighbours', 'predicate', 'equal',
+ 'nequal', 'contains', 'repr', 'clone', 'range',
+ 'getChecker', 'get-checker', 'getProperty', 'get-property',
+ 'getProjector', 'get-projector', 'consume', 'take',
+ 'promisify', 'spawn', 'constructor'),
prefix=r'(?<![\w\-#.])', suffix=r'(?![\w\-.])'),
- Name.Builtin),
- (words((
- 'true', 'false', 'null', 'undefined'),
+ Name.Builtin),
+ (words((
+ 'true', 'false', 'null', 'undefined'),
prefix=r'(?<![\w\-$.])', suffix=r'(?![\w\-$.])'),
- Name.Constant),
- ],
- 'name': [
+ Name.Constant),
+ ],
+ 'name': [
(r'@([a-zA-Z$_](?:[\w$-]*[\w$])?)', Name.Variable.Instance),
(r'([a-zA-Z$_](?:[\w$-]*[\w$])?)(\+\+|\-\-)?',
- bygroups(Name.Symbol, Operator.Word))
- ],
- 'tuple': [
+ bygroups(Name.Symbol, Operator.Word))
+ ],
+ 'tuple': [
(r'#[a-zA-Z_][\w\-]*(?=[\s{(,;])', Name.Namespace)
- ],
- 'interpoling_string': [
- (r'\}', String.Interpol, '#pop'),
- include('root')
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'[^\\\'"]', String),
- (r'[\'"\\]', String),
- (r'\n', String) # All strings are multiline in EG
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape),
- (r'\{', String.Interpol, 'interpoling_string'),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- ],
- 'bt': [
- (r'`', String.Backtick, '#pop'),
- (r'(?<!`)\n', String.Backtick),
- (r'\^=?', String.Escape),
- (r'.+', String.Backtick),
- ],
- 'tbt': [
- (r'```', String.Backtick, '#pop'),
- (r'\n', String.Backtick),
- (r'\^=?', String.Escape),
+ ],
+ 'interpoling_string': [
+ (r'\}', String.Interpol, '#pop'),
+ include('root')
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'[^\\\'"]', String),
+ (r'[\'"\\]', String),
+ (r'\n', String) # All strings are multiline in EG
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape),
+ (r'\{', String.Interpol, 'interpoling_string'),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ ],
+ 'bt': [
+ (r'`', String.Backtick, '#pop'),
+ (r'(?<!`)\n', String.Backtick),
+ (r'\^=?', String.Escape),
+ (r'.+', String.Backtick),
+ ],
+ 'tbt': [
+ (r'```', String.Backtick, '#pop'),
+ (r'\n', String.Backtick),
+ (r'\^=?', String.Escape),
(r'[^`]+', String.Backtick),
- ],
- 'numbers': [
- (r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'8r[0-7]+', Number.Oct),
- (r'2r[01]+', Number.Bin),
- (r'16r[a-fA-F0-9]+', Number.Hex),
+ ],
+ 'numbers': [
+ (r'\d+\.(?!\.)\d*([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'8r[0-7]+', Number.Oct),
+ (r'2r[01]+', Number.Bin),
+ (r'16r[a-fA-F0-9]+', Number.Hex),
(r'([3-79]|[12][0-9]|3[0-6])r[a-zA-Z\d]+(\.[a-zA-Z\d]+)?',
Number.Radix),
- (r'\d+', Number.Integer)
- ],
- }
+ (r'\d+', Number.Integer)
+ ],
+ }
class JuttleLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/julia.py b/contrib/python/Pygments/py3/pygments/lexers/julia.py
index 390d5d7158..d81dbc1a22 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/julia.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/julia.py
@@ -1,53 +1,53 @@
-"""
- pygments.lexers.julia
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Julia language.
-
+"""
+ pygments.lexers.julia
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Julia language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
from pygments.util import shebang_matches
from pygments.lexers._julia_builtins import OPERATORS_LIST, DOTTED_OPERATORS_LIST, \
KEYWORD_LIST, BUILTIN_LIST, LITERAL_LIST
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
-
+
+__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
+
# see https://docs.julialang.org/en/v1/manual/variables/#Allowed-Variable-Names
allowed_variable = \
'(?:[a-zA-Z_\u00A1-\U0010ffff][a-zA-Z_0-9!\u00A1-\U0010ffff]*)'
# see https://github.com/JuliaLang/julia/blob/master/src/flisp/julia_opsuffs.h
operator_suffixes = r'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹᴺᴼᴾᴿᵀᵁᵂᵃᵇᵈᵉᵍᵏᵐᵒᵖᵗᵘᵛᵝᵞᵟᵠᵡᵢᵣᵤᵥᵦᵧᵨᵩᵪᶜᶠᶥᶦᶫᶰᶸᶻᶿ′″‴‵‶‷⁗⁰ⁱ⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎ₐₑₒₓₕₖₗₘₙₚₛₜⱼⱽ]*'
-
-class JuliaLexer(RegexLexer):
- """
- For `Julia <http://julialang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Julia'
- aliases = ['julia', 'jl']
- filenames = ['*.jl']
- mimetypes = ['text/x-julia', 'application/x-julia']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'#=', Comment.Multiline, "blockcomment"),
- (r'#.*$', Comment),
+
+class JuliaLexer(RegexLexer):
+ """
+ For `Julia <http://julialang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Julia'
+ aliases = ['julia', 'jl']
+ filenames = ['*.jl']
+ mimetypes = ['text/x-julia', 'application/x-julia']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'#=', Comment.Multiline, "blockcomment"),
+ (r'#.*$', Comment),
(r'[\[\](),;]', Punctuation),
-
+
# symbols
# intercept range expressions first
(r'(' + allowed_variable + r')(\s*)(:)(' + allowed_variable + ')',
@@ -55,7 +55,7 @@ class JuliaLexer(RegexLexer):
# then match :name which does not follow closing brackets, digits, or the
# ::, <:, and :> operators
(r'(?<![\]):<>\d.])(:' + allowed_variable + ')', String.Symbol),
-
+
# type assertions - excludes expressions like ::typeof(sin) and ::avec[1]
(r'(?<=::)(\s*)(' + allowed_variable + r')\b(?![(\[])', bygroups(Text, Keyword.Type)),
# type comparisons
@@ -79,17 +79,17 @@ class JuliaLexer(RegexLexer):
# NOTE
# Patterns below work only for definition sites and thus hardly reliable.
#
- # functions
+ # functions
# (r'(function)(\s+)(' + allowed_variable + ')',
# bygroups(Keyword, Text, Name.Function)),
-
- # chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
- r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
- # try to match trailing transpose
+
+ # chars
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
+ r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
+
+ # try to match trailing transpose
(r'(?<=[.\w)\]])(\'' + operator_suffixes + ')+', Operator),
-
+
# raw strings
(r'(raw)(""")', bygroups(String.Affix, String), 'tqrawstring'),
(r'(raw)(")', bygroups(String.Affix, String), 'rawstring'),
@@ -129,10 +129,10 @@ class JuliaLexer(RegexLexer):
# builtin literals
(words(LITERAL_LIST, suffix=r'\b'), Name.Builtin),
- # names
+ # names
(allowed_variable, Name),
-
- # numbers
+
+ # numbers
(r'(\d+((_\d+)+)?\.(?!\.)(\d+((_\d+)+)?)?|\.\d+((_\d+)+)?)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+((_\d+)+)?[eEf][+-]?[0-9]+', Number.Float),
(r'0x[a-fA-F0-9]+((_[a-fA-F0-9]+)+)?(\.([a-fA-F0-9]+((_[a-fA-F0-9]+)+)?)?)?p[+-]?\d+', Number.Float),
@@ -143,14 +143,14 @@ class JuliaLexer(RegexLexer):
# single dot operator matched last to permit e.g. ".1" as a float
(words(['.']), Operator),
- ],
-
- "blockcomment": [
- (r'[^=#]', Comment.Multiline),
- (r'#=', Comment.Multiline, '#push'),
- (r'=#', Comment.Multiline, '#pop'),
- (r'[=#]', Comment.Multiline),
- ],
+ ],
+
+ "blockcomment": [
+ (r'[^=#]', Comment.Multiline),
+ (r'#=', Comment.Multiline, '#push'),
+ (r'=#', Comment.Multiline, '#pop'),
+ (r'[=#]', Comment.Multiline),
+ ],
'curly': [
(r'\{', Punctuation, '#push'),
@@ -182,7 +182,7 @@ class JuliaLexer(RegexLexer):
include('root'),
],
- 'string': [
+ 'string': [
(r'(")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
# FIXME: This escape pattern is not perfect.
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
@@ -192,7 +192,7 @@ class JuliaLexer(RegexLexer):
String.Interpol),
(r'[^"$%\\]+', String),
(r'.', String),
- ],
+ ],
'tqstring': [
(r'(""")(' + allowed_variable + r'|\d+)?', bygroups(String, String.Affix), '#pop'),
(r'\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)', String.Escape),
@@ -226,33 +226,33 @@ class JuliaLexer(RegexLexer):
(r'[^\\`$]+', String.Backtick),
(r'.', String.Backtick),
],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'julia')
-
-
-class JuliaConsoleLexer(Lexer):
- """
- For Julia console sessions. Modeled after MatlabSessionLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Julia console'
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'julia')
+
+
+class JuliaConsoleLexer(Lexer):
+ """
+ For Julia console sessions. Modeled after MatlabSessionLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia console'
aliases = ['jlcon', 'julia-repl']
-
- def get_tokens_unprocessed(self, text):
- jllexer = JuliaLexer(**self.options)
+
+ def get_tokens_unprocessed(self, text):
+ jllexer = JuliaLexer(**self.options)
start = 0
- curcode = ''
- insertions = []
+ curcode = ''
+ insertions = []
output = False
error = False
-
+
for line in text.splitlines(True):
- if line.startswith('julia>'):
+ if line.startswith('julia>'):
insertions.append((len(curcode), [(0, Generic.Prompt, line[:6])]))
- curcode += line[6:]
+ curcode += line[6:]
output = False
error = False
elif line.startswith('help?>') or line.startswith('shell>'):
@@ -263,12 +263,12 @@ class JuliaConsoleLexer(Lexer):
elif line.startswith(' ') and not output:
insertions.append((len(curcode), [(0, Text, line[:6])]))
curcode += line[6:]
- else:
- if curcode:
+ else:
+ if curcode:
yield from do_insertions(
insertions, jllexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
+ curcode = ''
+ insertions = []
if line.startswith('ERROR: ') or error:
yield start, Generic.Error, line
error = True
@@ -276,7 +276,7 @@ class JuliaConsoleLexer(Lexer):
yield start, Generic.Output, line
output = True
start += len(line)
-
+
if curcode:
yield from do_insertions(
insertions, jllexer.get_tokens_unprocessed(curcode))
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jvm.py b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
index 4ffc5c7fdf..3ad2f6dad7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jvm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
@@ -1,78 +1,78 @@
-"""
- pygments.lexers.jvm
- ~~~~~~~~~~~~~~~~~~~
-
- Pygments lexers for JVM languages.
-
+"""
+ pygments.lexers.jvm
+ ~~~~~~~~~~~~~~~~~~~
+
+ Pygments lexers for JVM languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import shebang_matches
-from pygments import unistring as uni
-
-__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
- 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
- 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ this, combined, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+from pygments import unistring as uni
+
+__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
+ 'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
+ 'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer']
-
-
-class JavaLexer(RegexLexer):
- """
+
+
+class JavaLexer(RegexLexer):
+ """
For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
- """
-
- name = 'Java'
- aliases = ['java']
- filenames = ['*.java']
- mimetypes = ['text/x-java']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- tokens = {
- 'root': [
+ """
+
+ name = 'Java'
+ aliases = ['java']
+ filenames = ['*.java']
+ mimetypes = ['text/x-java']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ tokens = {
+ 'root': [
(r'(^\s*)((?:(?:public|private|protected|static|strictfp)(?:\s+))*)(record)\b',
bygroups(Text, using(this), Keyword.Declaration), 'class'),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- # keywords: go before method names to avoid lexing "throw new XYZ"
- # as a method signature
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
- Keyword),
- # method names
- (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w$]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ # keywords: go before method names to avoid lexing "throw new XYZ"
+ # as a method signature
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
+ Keyword),
+ # method names
+ (r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
- (r'@[^\W\d][\w.]*', Name.Decorator),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
+ (r'@[^\W\d][\w.]*', Name.Decorator),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|sealed|static|strictfp|super|synchronized|throws|'
r'transient|volatile|yield)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(true|false|null)\b', Keyword.Constant),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)\b', Keyword.Declaration, 'class'),
(r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
'var'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
(r'"', String, 'string'),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
Name.Attribute)),
(r'^(\s*)(default)(:)', bygroups(Text, Keyword, Punctuation)),
(r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Text, Name.Label,
Punctuation)),
- (r'([^\W\d]|\$)[\w$]*', Name),
+ (r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
@@ -87,18 +87,18 @@ class JavaLexer(RegexLexer):
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\]<>|+=/?-]', Operator),
(r'[{}();:.,]', Punctuation),
- (r'\n', Text)
- ],
- 'class': [
+ (r'\n', Text)
+ ],
+ 'class': [
(r'\s+', Text),
- (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
- ],
+ (r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
+ ],
'var': [
(r'([^\W\d]|\$)[\w$]*', Name, '#pop')
],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
'string': [
(r'[^\\"]+', String),
(r'\\\\', String), # Escaped backslash
@@ -106,60 +106,60 @@ class JavaLexer(RegexLexer):
(r'\\', String), # Bare backslash
(r'"', String, '#pop'), # Closing quote
],
- }
-
-
-class AspectJLexer(JavaLexer):
- """
- For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'AspectJ'
- aliases = ['aspectj']
- filenames = ['*.aj']
- mimetypes = ['text/x-aspectj']
-
+ }
+
+
+class AspectJLexer(JavaLexer):
+ """
+ For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'AspectJ'
+ aliases = ['aspectj']
+ filenames = ['*.aj']
+ mimetypes = ['text/x-aspectj']
+
aj_keywords = {
- 'aspect', 'pointcut', 'privileged', 'call', 'execution',
- 'initialization', 'preinitialization', 'handler', 'get', 'set',
- 'staticinitialization', 'target', 'args', 'within', 'withincode',
- 'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
- 'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
- 'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
- 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
- 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
- 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
+ 'aspect', 'pointcut', 'privileged', 'call', 'execution',
+ 'initialization', 'preinitialization', 'handler', 'get', 'set',
+ 'staticinitialization', 'target', 'args', 'within', 'withincode',
+ 'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
+ 'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
+ 'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
+ 'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
+ 'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
+ 'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
}
aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.aj_keywords:
- yield index, Keyword, value
- elif token is Name.Label and value in self.aj_inter_type:
- yield index, Keyword, value[:-1]
- yield index, Operator, value[-1]
- elif token is Name.Decorator and value in self.aj_inter_type_annotation:
- yield index, Keyword, value
- else:
- yield index, token, value
-
-
-class ScalaLexer(RegexLexer):
- """
- For `Scala <http://www.scala-lang.org>`_ source code.
- """
-
- name = 'Scala'
- aliases = ['scala']
- filenames = ['*.scala']
- mimetypes = ['text/x-scala']
-
- flags = re.MULTILINE | re.DOTALL
-
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.aj_keywords:
+ yield index, Keyword, value
+ elif token is Name.Label and value in self.aj_inter_type:
+ yield index, Keyword, value[:-1]
+ yield index, Operator, value[-1]
+ elif token is Name.Decorator and value in self.aj_inter_type_annotation:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+
+class ScalaLexer(RegexLexer):
+ """
+ For `Scala <http://www.scala-lang.org>`_ source code.
+ """
+
+ name = 'Scala'
+ aliases = ['scala']
+ filenames = ['*.scala']
+ mimetypes = ['text/x-scala']
+
+ flags = re.MULTILINE | re.DOTALL
+
opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
@@ -174,27 +174,27 @@ class ScalaLexer(RegexLexer):
anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
notStartOfComment = r'(?!//|/\*)'
endOfLineMaybeWithComment = r'(?=\s*(//|$))'
-
+
keywords = (
'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
'catch', 'finally', 'try'
)
-
+
operators = (
'<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
'<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
'+', '*', '%', '~', '\\'
)
-
+
storage_modifiers = (
'private', 'protected', 'synchronized', '@volatile', 'abstract',
'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
'@native'
)
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
include('whitespace'),
include('comments'),
include('script-header'),
@@ -309,7 +309,7 @@ class ScalaLexer(RegexLexer):
],
'constants': [
(r'\b(this|super)\b', Name.Builtin.Pseudo),
- (r'(true|false|null)\b', Keyword.Constant),
+ (r'(true|false|null)\b', Keyword.Constant),
(r'0[xX][0-9a-fA-F_]*', Number.Hex),
(r'([0-9][0-9_]*\.[0-9][0-9_]*|\.[0-9][0-9_]*)'
r'([eE][+-]?[0-9][0-9_]*)?[fFdD]?', Number.Float),
@@ -317,22 +317,22 @@ class ScalaLexer(RegexLexer):
(r'[0-9]+([eE][+-]?[0-9]+)[fFdD]?', Number.Float),
(r'[0-9]+[lL]', Number.Integer.Long),
(r'[0-9]+', Number.Integer),
- (r'""".*?"""(?!")', String),
+ (r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"(')(\\.)(')", bygroups(String.Char, String.Escape, String.Char)),
(r"'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- ],
+ ],
"strings": [
(r'[fs]"""', String, 'interpolated-string-triple'),
(r'[fs]"', String, 'interpolated-string'),
(r'raw"(\\\\|\\"|[^"])*"', String),
- ],
+ ],
'symbols': [
(r"('%s)(?!')" % plainid, String.Symbol),
- ],
+ ],
'singleton-type': [
(r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
- ],
+ ],
'inline': [
# inline is a soft modifer, only highlighted if followed by if,
# match or parameters.
@@ -353,19 +353,19 @@ class ScalaLexer(RegexLexer):
],
# States
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
- ],
+ ],
'import-path': [
(r'(?<=[\n;:])', Text, '#pop'),
include('comments'),
(r'\b(given)\b', Keyword),
include('qualified-name'),
(r'\{', Punctuation, 'import-path-curly-brace'),
- ],
+ ],
'import-path-curly-brace': [
include('whitespace'),
include('comments'),
@@ -375,7 +375,7 @@ class ScalaLexer(RegexLexer):
(r',', Punctuation),
(r'[\[\]]', Punctuation),
include('qualified-name'),
- ],
+ ],
'export-path': [
(r'(?<=[\n;:])', Text, '#pop'),
include('comments'),
@@ -397,19 +397,19 @@ class ScalaLexer(RegexLexer):
include('qualified-name'),
],
'interpolated-string-triple': [
- (r'"""(?!")', String, '#pop'),
- (r'"', String),
+ (r'"""(?!")', String, '#pop'),
+ (r'"', String),
include('interpolated-string-common'),
- ],
+ ],
'interpolated-string': [
- (r'"', String, '#pop'),
+ (r'"', String, '#pop'),
include('interpolated-string-common'),
- ],
+ ],
'interpolated-string-brace': [
- (r'\}', String.Interpol, '#pop'),
+ (r'\}', String.Interpol, '#pop'),
(r'\{', Punctuation, 'interpolated-string-nested-brace'),
- include('root'),
- ],
+ include('root'),
+ ],
'interpolated-string-nested-brace': [
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
@@ -432,135 +432,135 @@ class ScalaLexer(RegexLexer):
(r'\$\{', String.Interpol, 'interpolated-string-brace'),
(r'\\.', String),
],
- }
-
-
-class GosuLexer(RegexLexer):
- """
- For Gosu source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu'
- aliases = ['gosu']
- filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
- mimetypes = ['text/x-gosu']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
- r'index|while|do|continue|break|return|try|catch|finally|this|'
- r'throw|new|switch|case|default|eval|super|outer|classpath|'
- r'using)\b', Keyword),
- (r'(var|delegate|construct|function|private|internal|protected|'
- r'public|abstract|override|final|static|extends|transient|'
- r'implements|represents|readonly)\b', Keyword.Declaration),
- (r'(property\s+)(get|set)?', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
- (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(uses)(\s+)([\w.]+\*?)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'"', String, 'string'),
- (r'(\??[.#])([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'(:)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_$]\w*', Name),
- (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\n', Text)
- ],
- 'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)',
- bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
- ],
- 'string': [
- (r'"', String, '#pop'),
- include('templateText')
- ],
- 'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
- ],
- 'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
- ],
- }
-
-
-class GosuTemplateLexer(Lexer):
- """
- For Gosu templates.
-
- .. versionadded:: 1.5
- """
-
- name = 'Gosu Template'
- aliases = ['gst']
- filenames = ['*.gst']
- mimetypes = ['text/x-gosu-template']
-
- def get_tokens_unprocessed(self, text):
- lexer = GosuLexer()
- stack = ['templateText']
+ }
+
+
+class GosuLexer(RegexLexer):
+ """
+ For Gosu source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Gosu'
+ aliases = ['gosu']
+ filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
+ mimetypes = ['text/x-gosu']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
+ r'index|while|do|continue|break|return|try|catch|finally|this|'
+ r'throw|new|switch|case|default|eval|super|outer|classpath|'
+ r'using)\b', Keyword),
+ (r'(var|delegate|construct|function|private|internal|protected|'
+ r'public|abstract|override|final|static|extends|transient|'
+ r'implements|represents|readonly)\b', Keyword.Declaration),
+ (r'(property\s+)(get|set)?', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
+ (r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(uses)(\s+)([\w.]+\*?)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'"', String, 'string'),
+ (r'(\??[.#])([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'(:)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'templateText': [
+ (r'(\\<)|(\\\$)', String),
+ (r'(<%@\s+)(extends|params)',
+ bygroups(Operator, Name.Decorator), 'stringTemplate'),
+ (r'<%!--.*?--%>', Comment.Multiline),
+ (r'(<%)|(<%=)', Operator, 'stringTemplate'),
+ (r'\$\{', Operator, 'stringTemplateShorthand'),
+ (r'.', String)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ include('templateText')
+ ],
+ 'stringTemplate': [
+ (r'"', String, 'string'),
+ (r'%>', Operator, '#pop'),
+ include('root')
+ ],
+ 'stringTemplateShorthand': [
+ (r'"', String, 'string'),
+ (r'\{', Operator, 'stringTemplateShorthand'),
+ (r'\}', Operator, '#pop'),
+ include('root')
+ ],
+ }
+
+
+class GosuTemplateLexer(Lexer):
+ """
+ For Gosu templates.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Gosu Template'
+ aliases = ['gst']
+ filenames = ['*.gst']
+ mimetypes = ['text/x-gosu-template']
+
+ def get_tokens_unprocessed(self, text):
+ lexer = GosuLexer()
+ stack = ['templateText']
yield from lexer.get_tokens_unprocessed(text, stack)
-
-
-class GroovyLexer(RegexLexer):
- """
- For `Groovy <http://groovy.codehaus.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Groovy'
- aliases = ['groovy']
- filenames = ['*.groovy','*.gradle']
- mimetypes = ['text/x-groovy']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # Groovy allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc, 'base'),
- default('base'),
- ],
- 'base': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
+
+
+class GroovyLexer(RegexLexer):
+ """
+ For `Groovy <http://groovy.codehaus.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Groovy'
+ aliases = ['groovy']
+ filenames = ['*.groovy','*.gradle']
+ mimetypes = ['text/x-groovy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # Groovy allows a file to start with a shebang
+ (r'#!(.*?)$', Comment.Preproc, 'base'),
+ default('base'),
+ ],
+ 'base': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
# keywords: go before method names to avoid lexing "throw new XYZ"
# as a method signature
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
- Keyword),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
+ Keyword),
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'('
@@ -571,536 +571,536 @@ class GroovyLexer(RegexLexer):
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'""".*?"""', String.Double),
- (r"'''.*?'''", String.Single),
+ (r'(abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'""".*?"""', String.Double),
+ (r"'''.*?'''", String.Single),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'\$/((?!/\$).)*/\$', String),
+ (r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\[^\\]|[^/\\])*/', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'groovy')
-
-
-class IokeLexer(RegexLexer):
- """
- For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
- prototype based programming language) source.
-
- .. versionadded:: 1.4
- """
- name = 'Ioke'
- filenames = ['*.ik']
- aliases = ['ioke', 'ik']
- mimetypes = ['text/x-iokesrc']
- tokens = {
- 'interpolatableText': [
- (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
- r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
- (r'#\{', Punctuation, 'textInterpolationRoot')
- ],
-
- 'text': [
- (r'(?<!\\)"', String, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String)
- ],
-
- 'documentation': [
- (r'(?<!\\)"', String.Doc, '#pop'),
- include('interpolatableText'),
- (r'[^"]', String.Doc)
- ],
-
- 'textInterpolationRoot': [
- (r'\}', Punctuation, '#pop'),
- include('root')
- ],
-
- 'slashRegexp': [
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'groovy')
+
+
+class IokeLexer(RegexLexer):
+ """
+ For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
+ prototype based programming language) source.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Ioke'
+ filenames = ['*.ik']
+ aliases = ['ioke', 'ik']
+ mimetypes = ['text/x-iokesrc']
+ tokens = {
+ 'interpolatableText': [
+ (r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
+ r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
+ (r'#\{', Punctuation, 'textInterpolationRoot')
+ ],
+
+ 'text': [
+ (r'(?<!\\)"', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String)
+ ],
+
+ 'documentation': [
+ (r'(?<!\\)"', String.Doc, '#pop'),
+ include('interpolatableText'),
+ (r'[^"]', String.Doc)
+ ],
+
+ 'textInterpolationRoot': [
+ (r'\}', Punctuation, '#pop'),
+ include('root')
+ ],
+
+ 'slashRegexp': [
(r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\/', String.Regex),
- (r'[^/]', String.Regex)
- ],
-
- 'squareRegexp': [
+ include('interpolatableText'),
+ (r'\\/', String.Regex),
+ (r'[^/]', String.Regex)
+ ],
+
+ 'squareRegexp': [
(r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
- include('interpolatableText'),
- (r'\\]', String.Regex),
- (r'[^\]]', String.Regex)
- ],
-
- 'squareText': [
- (r'(?<!\\)]', String, '#pop'),
- include('interpolatableText'),
- (r'[^\]]', String)
- ],
-
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
-
- # Comments
- (r';(.*?)\n', Comment),
- (r'\A#!(.*?)\n', Comment),
-
- # Regexps
- (r'#/', String.Regex, 'slashRegexp'),
- (r'#r\[', String.Regex, 'squareRegexp'),
-
- # Symbols
- (r':[\w!:?]+', String.Symbol),
- (r'[\w!:?]+:(?![\w!?])', String.Other),
+ include('interpolatableText'),
+ (r'\\]', String.Regex),
+ (r'[^\]]', String.Regex)
+ ],
+
+ 'squareText': [
+ (r'(?<!\\)]', String, '#pop'),
+ include('interpolatableText'),
+ (r'[^\]]', String)
+ ],
+
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ # Comments
+ (r';(.*?)\n', Comment),
+ (r'\A#!(.*?)\n', Comment),
+
+ # Regexps
+ (r'#/', String.Regex, 'slashRegexp'),
+ (r'#r\[', String.Regex, 'squareRegexp'),
+
+ # Symbols
+ (r':[\w!:?]+', String.Symbol),
+ (r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol),
-
- # Documentation
- (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
- r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
- r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
-
- # Text
- (r'"', String, 'text'),
- (r'#\[', String, 'squareText'),
-
- # Mimic
- (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
-
- # Assignment
- (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
- Name.Variable),
-
- # keywords
- (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
- r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
- r'with)(?![\w!:?])', Keyword.Reserved),
-
- # Origin
- (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
-
- # Base
- (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
- r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
- r'(?![\w!:?])', Keyword),
-
- # Ground
- (r'(stackTraceAsText)(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Literals
- (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Case
- (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
- r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Reflection
- (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
- r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
- r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehaviour Aspects
- (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour
- (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
- r'(?![\w!:?])', Keyword),
- (r'(use|destructuring)', Keyword.Reserved),
-
- # DefaultBehavior BaseBehavior
- (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
- r'documentation|identity|removeCell!|undefineCell)'
- r'(?![\w!:?])', Keyword),
-
- # DefaultBehavior Internal
- (r'(internal:compositeRegexp|internal:concatenateText|'
- r'internal:createDecimal|internal:createNumber|'
- r'internal:createRegexp|internal:createText)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # DefaultBehaviour Conditions
- (r'(availableRestarts|bind|error\!|findRestart|handle|'
- r'invokeRestart|rescue|restart|signal\!|warn\!)'
- r'(?![\w!:?])', Keyword.Reserved),
-
- # constants
- (r'(nil|false|true)(?![\w!:?])', Name.Constant),
-
- # names
- (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
- r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
- r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
- r'Conditions|Definitions|FlowControl|Internal|Literals|'
- r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
- r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
- r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
- r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
- r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
- r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
-
- # functions
+
+ # Documentation
+ (r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
+ r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
+ r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
+
+ # Text
+ (r'"', String, 'text'),
+ (r'#\[', String, 'squareText'),
+
+ # Mimic
+ (r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
+
+ # Assignment
+ (r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
+ Name.Variable),
+
+ # keywords
+ (r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
+ r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
+ r'with)(?![\w!:?])', Keyword.Reserved),
+
+ # Origin
+ (r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
+
+ # Base
+ (r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
+ r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
+ r'(?![\w!:?])', Keyword),
+
+ # Ground
+ (r'(stackTraceAsText)(?![\w!:?])', Keyword),
+
+ # DefaultBehaviour Literals
+ (r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Case
+ (r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
+ r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Reflection
+ (r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
+ r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
+ r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
+ r'(?![\w!:?])', Keyword),
+
+ # DefaultBehaviour Aspects
+ (r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour
+ (r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
+ r'(?![\w!:?])', Keyword),
+ (r'(use|destructuring)', Keyword.Reserved),
+
+ # DefaultBehavior BaseBehavior
+ (r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
+ r'documentation|identity|removeCell!|undefineCell)'
+ r'(?![\w!:?])', Keyword),
+
+ # DefaultBehavior Internal
+ (r'(internal:compositeRegexp|internal:concatenateText|'
+ r'internal:createDecimal|internal:createNumber|'
+ r'internal:createRegexp|internal:createText)'
+ r'(?![\w!:?])', Keyword.Reserved),
+
+ # DefaultBehaviour Conditions
+ (r'(availableRestarts|bind|error\!|findRestart|handle|'
+ r'invokeRestart|rescue|restart|signal\!|warn\!)'
+ r'(?![\w!:?])', Keyword.Reserved),
+
+ # constants
+ (r'(nil|false|true)(?![\w!:?])', Name.Constant),
+
+ # names
+ (r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
+ r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
+ r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
+ r'Conditions|Definitions|FlowControl|Internal|Literals|'
+ r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
+ r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
+ r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
+ r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
+ r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
+ r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
+
+ # functions
('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
'(?![\\w!:?])', Name.Function),
-
- # Numbers
- (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
- (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'-?\d+', Number.Integer),
-
- (r'#\(', Punctuation),
-
- # Operators
- (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
- r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
- r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
- r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
- r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
- r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
+
+ # Numbers
+ (r'-?0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'-?\d+', Number.Integer),
+
+ (r'#\(', Punctuation),
+
+ # Operators
+ (r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
+ r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
+ r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
+ r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
+ r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
+ r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
- (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
- Operator),
-
- # Punctuation
- (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
-
- # kinds
- (r'[A-Z][\w!:?]*', Name.Class),
-
- # default cellnames
- (r'[a-z_][\w!:?]*', Name)
- ]
- }
-
-
-class ClojureLexer(RegexLexer):
- """
- Lexer for `Clojure <http://clojure.org/>`_ source code.
-
- .. versionadded:: 0.11
- """
- name = 'Clojure'
- aliases = ['clojure', 'clj']
- filenames = ['*.clj']
- mimetypes = ['text/x-clojure', 'application/x-clojure']
-
- special_forms = (
- '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
- )
-
- # It's safe to consider 'ns' a declaration thing because it defines a new
- # namespace.
- declarations = (
- 'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
- 'defstruct', 'defonce', 'declare', 'definline', 'definterface',
- 'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
- )
-
- builtins = (
- '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
- 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
- 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
- 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
- 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
- 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
- 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
- 'butlast', 'byte', 'cast', 'char', 'children', 'class',
- 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
- 'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
- 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
- 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
- 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
- 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
- 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
- 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
- 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
- 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
- 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
- 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
- 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
- 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
- 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
- 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
- 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
- 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
- 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
- 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
- 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
- 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
- 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
- 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
- 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
- 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
- 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
- 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
- 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
- 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
- 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
- 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
- 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
- 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
- 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
- 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
- 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
- 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
- 'vector?', 'when', 'when-first', 'when-let', 'when-not',
- 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
-
- # TODO / should divide keywords/symbols into namespace/rest
- # but that's hard, so just pretend / is part of the name
+ (r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
+ Operator),
+
+ # Punctuation
+ (r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
+
+ # kinds
+ (r'[A-Z][\w!:?]*', Name.Class),
+
+ # default cellnames
+ (r'[a-z_][\w!:?]*', Name)
+ ]
+ }
+
+
+class ClojureLexer(RegexLexer):
+ """
+ Lexer for `Clojure <http://clojure.org/>`_ source code.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Clojure'
+ aliases = ['clojure', 'clj']
+ filenames = ['*.clj']
+ mimetypes = ['text/x-clojure', 'application/x-clojure']
+
+ special_forms = (
+ '.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
+ )
+
+ # It's safe to consider 'ns' a declaration thing because it defines a new
+ # namespace.
+ declarations = (
+ 'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
+ 'defstruct', 'defonce', 'declare', 'definline', 'definterface',
+ 'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
+ )
+
+ builtins = (
+ '*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
+ 'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
+ 'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
+ 'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
+ 'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
+ 'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
+ 'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
+ 'butlast', 'byte', 'cast', 'char', 'children', 'class',
+ 'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
+ 'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
+ 'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
+ 'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
+ 'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
+ 'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
+ 'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
+ 'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
+ 'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
+ 'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
+ 'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
+ 'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
+ 'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
+ 'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
+ 'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
+ 'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
+ 'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
+ 'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
+ 'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
+ 'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
+ 'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
+ 'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
+ 'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
+ 'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
+ 'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
+ 're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
+ 'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
+ 'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
+ 'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
+ 'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
+ 'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
+ 'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
+ 'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
+ 'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
+ 'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
+ 'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
+ 'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
+ 'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
+ 'vector?', 'when', 'when-first', 'when-let', 'when-not',
+ 'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+
+ # TODO / should divide keywords/symbols into namespace/rest
+ # but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0x-?[abcdef\d]+', Number.Hex),
-
- # strings, symbols and characters
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0x-?[abcdef\d]+', Number.Hex),
+
+ # strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
-
- # keywords
- (r'::?#?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- # highlight the special forms
- (words(special_forms, suffix=' '), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (words(declarations, suffix=' '), Keyword.Declaration),
-
- # highlight the builtins
- (words(builtins, suffix=' '), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Clojure accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Clojure accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- ],
- }
-
-
-class ClojureScriptLexer(ClojureLexer):
- """
- Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
- source code.
-
- .. versionadded:: 2.0
- """
- name = 'ClojureScript'
- aliases = ['clojurescript', 'cljs']
- filenames = ['*.cljs']
- mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
-
-
-class TeaLangLexer(RegexLexer):
- """
- For `Tea <http://teatrove.org/>`_ source code. Only used within a
- TeaTemplateLexer.
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
- (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
- Keyword),
- (r'(as|call|define)\b', Keyword.Declaration),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+
+ # keywords
+ (r'::?#?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ # highlight the special forms
+ (words(special_forms, suffix=' '), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (words(declarations, suffix=' '), Keyword.Declaration),
+
+ # highlight the builtins
+ (words(builtins, suffix=' '), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Clojure accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Clojure accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ ],
+ }
+
+
+class ClojureScriptLexer(ClojureLexer):
+ """
+ Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ClojureScript'
+ aliases = ['clojurescript', 'cljs']
+ filenames = ['*.cljs']
+ mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
+
+
+class TeaLangLexer(RegexLexer):
+ """
+ For `Tea <http://teatrove.org/>`_ source code. Only used within a
+ TeaTemplateLexer.
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
+ (r'(and|break|else|foreach|if|in|not|or|reverse)\b',
+ Keyword),
+ (r'(as|call|define)\b', Keyword.Declaration),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_\$]\w*', Name),
- (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'template': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class CeylonLexer(RegexLexer):
- """
- For `Ceylon <http://ceylon-lang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Ceylon'
- aliases = ['ceylon']
- filenames = ['*.ceylon']
- mimetypes = ['text/x-ceylon']
-
- flags = re.MULTILINE | re.DOTALL
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
- r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
- r'annotation|sealed)\b', Name.Decorator),
- (r'(break|case|catch|continue|else|finally|for|in|'
- r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
- r'nonempty|then|outer|assert|let)\b', Keyword),
- (r'(abstracts|extends|satisfies|'
- r'super|given|of|out|assign)\b', Keyword.Declaration),
- (r'(function|value|void|new)\b',
- Keyword.Type),
- (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface|object|alias)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_\$]\w*', Name),
+ (r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'template': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CeylonLexer(RegexLexer):
+ """
+ For `Ceylon <http://ceylon-lang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Ceylon'
+ aliases = ['ceylon']
+ filenames = ['*.ceylon']
+ mimetypes = ['text/x-ceylon']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
+ r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
+ r'annotation|sealed)\b', Name.Decorator),
+ (r'(break|case|catch|continue|else|finally|for|in|'
+ r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
+ r'nonempty|then|outer|assert|let)\b', Keyword),
+ (r'(abstracts|extends|satisfies|'
+ r'super|given|of|out|assign)\b', Keyword.Declaration),
+ (r'(function|value|void|new)\b',
+ Keyword.Type),
+ (r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface|object|alias)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
- (r'(\.)([a-z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
- (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
- (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
- Number.Float),
- (r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
- (r'#[0-9a-fA-F]+', Number.Hex),
- (r'\$([01]{4})(_[01]{4})+', Number.Bin),
- (r'\$[01]+', Number.Bin),
- (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
- (r'[0-9]+[kMGTP]?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[A-Za-z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[a-z][\w.]*',
- Name.Namespace, '#pop')
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- }
-
-
-class KotlinLexer(RegexLexer):
- """
- For `Kotlin <http://kotlinlang.org/>`_
- source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Kotlin'
- aliases = ['kotlin']
+ (r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
+ (r'(\.)([a-z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
+ (r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
+ (r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
+ Number.Float),
+ (r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
+ (r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
+ Number.Float),
+ (r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
+ (r'#[0-9a-fA-F]+', Number.Hex),
+ (r'\$([01]{4})(_[01]{4})+', Number.Bin),
+ (r'\$[01]+', Number.Bin),
+ (r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
+ (r'[0-9]+[kMGTP]?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[A-Za-z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[a-z][\w.]*',
+ Name.Namespace, '#pop')
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ }
+
+
+class KotlinLexer(RegexLexer):
+ """
+ For `Kotlin <http://kotlinlang.org/>`_
+ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Kotlin'
+ aliases = ['kotlin']
filenames = ['*.kt', '*.kts']
- mimetypes = ['text/x-kotlin']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
- '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
- 'Mn', 'Mc') + ']*')
+ mimetypes = ['text/x-kotlin']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
+ '[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
+ 'Mn', 'Mc') + ']*')
kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc', 'Zs')
+ r'\'~!%^&*()+=|\[\]:;,.<>/\?-]*')
-
+
kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
modifiers = (r'actual|abstract|annotation|companion|const|crossinline|'
@@ -1108,17 +1108,17 @@ class KotlinLexer(RegexLexer):
r'internal|lateinit|noinline|open|operator|override|private|'
r'protected|public|sealed|suspend|tailrec')
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
# Whitespaces
- (r'[^\S\n]+', Text),
+ (r'[^\S\n]+', Text),
(r'\s+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'\n', Text),
# Comments
- (r'//.*?\n', Comment.Single),
+ (r'//.*?\n', Comment.Single),
(r'^#!/.+?\n', Comment.Single), # shebang for kotlin scripts
- (r'/[*].*?[*]/', Comment.Multiline),
+ (r'/[*].*?[*]/', Comment.Multiline),
# Keywords
(r'as\?', Keyword),
(r'(as|break|by|catch|constructor|continue|do|dynamic|else|finally|'
@@ -1155,26 +1155,26 @@ class KotlinLexer(RegexLexer):
(r'((?:(?:' + modifiers + r')\s+)*)(fun)(\s+)',
bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'function'),
# Operators
- (r'::|!!|\?[:.]', Operator),
+ (r'::|!!|\?[:.]', Operator),
(r'[~^*!%&\[\]<>|+=/?-]', Operator),
# Punctuation
(r'[{}();:.,]', Punctuation),
# Strings
(r'"""', String, 'multiline_string'),
(r'"', String, 'string'),
- (r"'\\.'|'[^\\]'", String.Char),
+ (r"'\\.'|'[^\\]'", String.Char),
# Numbers
- (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
# Identifiers
(r'' + kt_id + r'((\?[^.])?)', Name) # additionally handle nullable types
- ],
- 'class': [
- (kt_id, Name.Class, '#pop')
- ],
+ ],
+ 'class': [
+ (kt_id, Name.Class, '#pop')
+ ],
'variable': [
(kt_id, Name.Variable, '#pop')
- ],
+ ],
'destructuring_assignment': [
(r',', Punctuation),
(r'\s+', Text),
@@ -1183,11 +1183,11 @@ class KotlinLexer(RegexLexer):
(r'<', Operator, 'generic'),
(r'\)', Punctuation, '#pop')
],
- 'function': [
+ 'function': [
(r'<', Operator, 'generic'),
(r'' + kt_id + r'(\.)' + kt_id, bygroups(Name, Punctuation, Name.Function), '#pop'),
- (kt_id, Name.Function, '#pop')
- ],
+ (kt_id, Name.Function, '#pop')
+ ],
'generic': [
(r'(>)(\s*)', bygroups(Operator, Text), '#pop'),
(r':', Punctuation),
@@ -1230,515 +1230,515 @@ class KotlinLexer(RegexLexer):
(r'\}', Punctuation, '#pop'),
include('root')
]
- }
-
-
-class XtendLexer(RegexLexer):
- """
- For `Xtend <http://xtend-lang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Xtend'
- aliases = ['xtend']
- filenames = ['*.xtend']
- mimetypes = ['text/x-xtend']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
- bygroups(using(this), Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
- r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
- Keyword),
- (r'(def|abstract|const|enum|extends|final|implements|native|private|'
- r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
- (r'(boolean|byte|char|double|float|int|long|short|void)\b',
- Keyword.Type),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
- 'class'),
- (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r"(''')", String, 'template'),
+ }
+
+
+class XtendLexer(RegexLexer):
+ """
+ For `Xtend <http://xtend-lang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Xtend'
+ aliases = ['xtend']
+ filenames = ['*.xtend']
+ mimetypes = ['text/x-xtend']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
+ bygroups(using(this), Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
+ r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
+ Keyword),
+ (r'(def|abstract|const|enum|extends|final|implements|native|private|'
+ r'protected|public|static|strictfp|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Declaration),
+ (r'(boolean|byte|char|double|float|int|long|short|void)\b',
+ Keyword.Type),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
+ 'class'),
+ (r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r"(''')", String, 'template'),
(r'(\u00BB)', String, 'template'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text)
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- 'template': [
- (r"'''", String, '#pop'),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text)
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ 'template': [
+ (r"'''", String, '#pop'),
(r'\u00AB', String, '#pop'),
- (r'.', String)
- ],
- }
-
-
-class PigLexer(RegexLexer):
- """
- For `Pig Latin <https://pig.apache.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pig'
- aliases = ['pig']
- filenames = ['*.pig']
- mimetypes = ['text/x-pig']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'--.*', Comment),
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
- include('keywords'),
- include('types'),
- include('builtins'),
- include('punct'),
- include('operators'),
- (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- (r'([a-z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[()#:]', Text),
- (r'[^(:#\'")\s]+', Text),
- (r'\S+\s+', Text) # TODO: make tests pass without \s+
- ],
- 'keywords': [
- (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
- r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
- r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
- r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
- r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
- r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
- r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
- r'stream|through|union|using|void)\b', Keyword)
- ],
- 'builtins': [
- (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
- r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
- r'TOKENIZE)\b', Name.Builtin)
- ],
- 'types': [
- (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
- r'int|long|tuple)\b', Keyword.Type)
- ],
- 'punct': [
- (r'[;(){}\[\]]', Punctuation),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class GoloLexer(RegexLexer):
- """
- For `Golo <http://golo-lang.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Golo'
- filenames = ['*.golo']
- aliases = ['golo']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
-
- (r'#.*$', Comment),
-
- (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
- Operator),
- (r'(?<=[^-])(-)(?=[^-])', Operator),
-
- (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
- (r'[]{}|(),[]', Punctuation),
-
- (r'(module|import)(\s+)',
- bygroups(Keyword.Namespace, Text),
- 'modname'),
- (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
- (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
-
- (r'(let|var)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'varname'),
- (r'(struct)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'structname'),
- (r'(function)(\s+)',
- bygroups(Keyword.Declaration, Text),
- 'funcname'),
-
- (r'(null|true|false)\b', Keyword.Constant),
- (r'(augment|pimp'
- r'|if|else|case|match|return'
- r'|case|when|then|otherwise'
- r'|while|for|foreach'
- r'|try|catch|finally|throw'
- r'|local'
- r'|continue|break)\b', Keyword),
-
- (r'(map|array|list|set|vector|tuple)(\[)',
- bygroups(Name.Builtin, Punctuation)),
- (r'(print|println|readln|raise|fun'
- r'|asInterfaceInstance)\b', Name.Builtin),
- (r'(`?[a-zA-Z_][\w$]*)(\()',
- bygroups(Name.Function, Punctuation)),
-
- (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'-?\d[\d_]*L', Number.Integer.Long),
- (r'-?\d[\d_]*', Number.Integer),
-
+ (r'.', String)
+ ],
+ }
+
+
+class PigLexer(RegexLexer):
+ """
+ For `Pig Latin <https://pig.apache.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pig'
+ aliases = ['pig']
+ filenames = ['*.pig']
+ mimetypes = ['text/x-pig']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'--.*', Comment),
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
+ include('keywords'),
+ include('types'),
+ include('builtins'),
+ include('punct'),
+ include('operators'),
+ (r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ (r'([a-z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[()#:]', Text),
+ (r'[^(:#\'")\s]+', Text),
+ (r'\S+\s+', Text) # TODO: make tests pass without \s+
+ ],
+ 'keywords': [
+ (r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
+ r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
+ r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
+ r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
+ r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
+ r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
+ r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
+ r'stream|through|union|using|void)\b', Keyword)
+ ],
+ 'builtins': [
+ (r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
+ r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
+ r'TOKENIZE)\b', Name.Builtin)
+ ],
+ 'types': [
+ (r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
+ r'int|long|tuple)\b', Keyword.Type)
+ ],
+ 'punct': [
+ (r'[;(){}\[\]]', Punctuation),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class GoloLexer(RegexLexer):
+ """
+ For `Golo <http://golo-lang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Golo'
+ filenames = ['*.golo']
+ aliases = ['golo']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+
+ (r'#.*$', Comment),
+
+ (r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
+ Operator),
+ (r'(?<=[^-])(-)(?=[^-])', Operator),
+
+ (r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
+ (r'[]{}|(),[]', Punctuation),
+
+ (r'(module|import)(\s+)',
+ bygroups(Keyword.Namespace, Text),
+ 'modname'),
+ (r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
+ (r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
+
+ (r'(let|var)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'varname'),
+ (r'(struct)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'structname'),
+ (r'(function)(\s+)',
+ bygroups(Keyword.Declaration, Text),
+ 'funcname'),
+
+ (r'(null|true|false)\b', Keyword.Constant),
+ (r'(augment|pimp'
+ r'|if|else|case|match|return'
+ r'|case|when|then|otherwise'
+ r'|while|for|foreach'
+ r'|try|catch|finally|throw'
+ r'|local'
+ r'|continue|break)\b', Keyword),
+
+ (r'(map|array|list|set|vector|tuple)(\[)',
+ bygroups(Name.Builtin, Punctuation)),
+ (r'(print|println|readln|raise|fun'
+ r'|asInterfaceInstance)\b', Name.Builtin),
+ (r'(`?[a-zA-Z_][\w$]*)(\()',
+ bygroups(Name.Function, Punctuation)),
+
+ (r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'-?\d[\d_]*L', Number.Integer.Long),
+ (r'-?\d[\d_]*', Number.Integer),
+
(r'`?[a-zA-Z_][\w$]*', Name),
- (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
-
- (r'"""', String, combined('stringescape', 'triplestring')),
- (r'"', String, combined('stringescape', 'doublestring')),
- (r"'", String, combined('stringescape', 'singlestring')),
- (r'----((.|\n)*?)----', String.Doc)
-
- ],
-
- 'funcname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
- ],
- 'modname': [
- (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
- ],
- 'structname': [
- (r'`?[\w.]+\*?', Name.Class, '#pop')
- ],
- 'varname': [
- (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
- ],
- 'string': [
- (r'[^\\\'"\n]+', String),
- (r'[\'"\\]', String)
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'triplestring': [
- (r'"""', String, '#pop'),
- include('string'),
- (r'\n', String),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- include('string'),
- ],
- 'singlestring': [
- (r"'", String, '#pop'),
- include('string'),
- ],
- 'operators': [
- (r'[#=,./%+\-?]', Operator),
- (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
- (r'(==|<=|<|>=|>|!=)', Operator),
- ],
- }
-
-
-class JasminLexer(RegexLexer):
- """
- For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Jasmin'
- aliases = ['jasmin', 'jasminxt']
- filenames = ['*.j']
-
- _whitespace = r' \n\t\r'
- _ws = r'(?:[%s]+)' % _whitespace
- _separator = r'%s:=' % _whitespace
- _break = r'(?=[%s]|$)' % _separator
- _name = r'[^%s]+' % _separator
- _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
-
- tokens = {
- 'default': [
- (r'\n', Text, '#pop'),
- (r"'", String.Single, ('#pop', 'quote')),
- (r'"', String.Double, 'string'),
- (r'=', Punctuation),
- (r':', Punctuation, 'label'),
- (_ws, Text),
- (r';.*', Comment.Single),
- (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
- (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
- (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
- r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
- (r'\$%s' % _name, Name.Variable),
-
- # Directives
- (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
- (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
- r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
- r'annotation|bridge|class|default|enum|field|final|fpstrict|'
- r'interface|native|private|protected|public|signature|static|'
- r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
- Keyword.Reserved),
- (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
- (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
- r'invisibleparam|outer|visible|visibleparam)%s' % _break,
- Keyword.Reserved, 'class/convert-dots'),
- (r'\.field%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'field')),
- (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
- 'no-verification'),
- (r'\.method%s' % _break, Keyword.Reserved, 'method'),
- (r'\.set%s' % _break, Keyword.Reserved, 'var'),
- (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
- (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
- (r'is%s' % _break, Keyword.Reserved,
- ('descriptor/convert-dots', 'var')),
- (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
- (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
-
- # Instructions
- (words((
- 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
- 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
- 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
- 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
- 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
- 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
- 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
- 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
- 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
- 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
- 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
- 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
- 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
- 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
- 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
- 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
- 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
- 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
- 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
- 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
- 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
- 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
- (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
- Keyword.Reserved, 'class/no-dots'),
- (r'invoke(dynamic|interface|nonvirtual|special|'
- r'static|virtual)%s' % _break, Keyword.Reserved,
- 'invocation'),
- (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'field')),
- (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
- ('descriptor/no-dots', 'static')),
- (words((
- 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
- 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
- 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
- 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
- Keyword.Reserved, 'label'),
- (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
- 'descriptor/convert-dots'),
- (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
- ],
- 'quote': [
- (r"'", String.Single, '#pop'),
- (r'\\u[\da-fA-F]{4}', String.Escape),
- (r"[^'\\]+", String.Single)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
- String.Escape),
- (r'[^"\\]+', String.Double)
- ],
- 'root': [
- (r'\n+', Text),
- (r"'", String.Single, 'quote'),
- include('default'),
- (r'(%s)([ \t\r]*)(:)' % _name,
- bygroups(Name.Label, Text, Punctuation)),
- (_name, String.Other)
- ],
- 'annotation': [
- (r'\n', Text, ('#pop', 'annotation-body')),
- (r'default%s' % _break, Keyword.Reserved,
- ('#pop', 'annotation-default')),
- include('default')
- ],
- 'annotation-body': [
- (r'\n+', Text),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-default': [
- (r'\n+', Text),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- default(('annotation-items', 'descriptor/no-dots'))
- ],
- 'annotation-items': [
- (r"'", String.Single, 'quote'),
- include('default'),
- (_name, String.Other)
- ],
- 'caught-exception': [
- (r'all%s' % _break, Keyword, '#pop'),
- include('exception')
- ],
- 'class/convert-dots': [
- include('default'),
- (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'class/no-dots': [
- include('default'),
- (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class), '#pop')
- ],
- 'descriptor/convert-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptor/no-dots': [
- include('default'),
- (r'\[+', Punctuation),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
- bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
- '#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
- default('#pop')
- ],
- 'descriptors/convert-dots': [
- (r'\)', Punctuation, '#pop'),
- default('descriptor/convert-dots')
- ],
- 'enclosing-method': [
- (_ws, Text),
- (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
- default(('#pop', 'class/convert-dots'))
- ],
- 'exception': [
- include('default'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
- bygroups(Name.Namespace, Name.Exception), '#pop')
- ],
- 'field': [
- (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
- '#pop')
- ],
- 'invocation': [
- include('default'),
- (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'label': [
- include('default'),
- (_name, Name.Label, '#pop')
- ],
- 'method': [
- include('default'),
- (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
- ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
- 'descriptor/convert-dots'))
- ],
- 'no-verification': [
- (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
- include('default')
- ],
- 'static': [
- include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
- bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
- ],
- 'table': [
- (r'\n+', Text),
- (r'default%s' % _break, Keyword.Reserved, '#pop'),
- include('default'),
- (_name, Name.Label)
- ],
- 'var': [
- include('default'),
- (_name, Name.Variable, '#pop')
- ],
- 'verification': [
- include('default'),
- (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
- _break, Keyword, '#pop'),
- (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
- (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
- ]
- }
-
- def analyse_text(text):
- score = 0
- if re.search(r'^\s*\.class\s', text, re.MULTILINE):
- score += 0.5
- if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
- score += 0.3
- if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
- r'inner|interface|limit|set|signature|stack)\b', text,
- re.MULTILINE):
- score += 0.6
+ (r'@[a-zA-Z_][\w$.]*', Name.Decorator),
+
+ (r'"""', String, combined('stringescape', 'triplestring')),
+ (r'"', String, combined('stringescape', 'doublestring')),
+ (r"'", String, combined('stringescape', 'singlestring')),
+ (r'----((.|\n)*?)----', String.Doc)
+
+ ],
+
+ 'funcname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ ],
+ 'modname': [
+ (r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
+ ],
+ 'structname': [
+ (r'`?[\w.]+\*?', Name.Class, '#pop')
+ ],
+ 'varname': [
+ (r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\\'"\n]+', String),
+ (r'[\'"\\]', String)
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'triplestring': [
+ (r'"""', String, '#pop'),
+ include('string'),
+ (r'\n', String),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ include('string'),
+ ],
+ 'singlestring': [
+ (r"'", String, '#pop'),
+ include('string'),
+ ],
+ 'operators': [
+ (r'[#=,./%+\-?]', Operator),
+ (r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
+ (r'(==|<=|<|>=|>|!=)', Operator),
+ ],
+ }
+
+
+class JasminLexer(RegexLexer):
+ """
+ For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Jasmin'
+ aliases = ['jasmin', 'jasminxt']
+ filenames = ['*.j']
+
+ _whitespace = r' \n\t\r'
+ _ws = r'(?:[%s]+)' % _whitespace
+ _separator = r'%s:=' % _whitespace
+ _break = r'(?=[%s]|$)' % _separator
+ _name = r'[^%s]+' % _separator
+ _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
+
+ tokens = {
+ 'default': [
+ (r'\n', Text, '#pop'),
+ (r"'", String.Single, ('#pop', 'quote')),
+ (r'"', String.Double, 'string'),
+ (r'=', Punctuation),
+ (r':', Punctuation, 'label'),
+ (_ws, Text),
+ (r';.*', Comment.Single),
+ (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
+ (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
+ (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
+ r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
+ (r'\$%s' % _name, Name.Variable),
+
+ # Directives
+ (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
+ (r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
+ r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
+ r'annotation|bridge|class|default|enum|field|final|fpstrict|'
+ r'interface|native|private|protected|public|signature|static|'
+ r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
+ Keyword.Reserved),
+ (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
+ (r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
+ r'invisibleparam|outer|visible|visibleparam)%s' % _break,
+ Keyword.Reserved, 'class/convert-dots'),
+ (r'\.field%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'field')),
+ (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
+ 'no-verification'),
+ (r'\.method%s' % _break, Keyword.Reserved, 'method'),
+ (r'\.set%s' % _break, Keyword.Reserved, 'var'),
+ (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
+ (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
+ (r'is%s' % _break, Keyword.Reserved,
+ ('descriptor/convert-dots', 'var')),
+ (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
+ (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
+
+ # Instructions
+ (words((
+ 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
+ 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
+ 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
+ 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
+ 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
+ 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
+ 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
+ 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
+ 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
+ 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
+ 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
+ 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
+ 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
+ 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
+ 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
+ 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
+ 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
+ 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
+ 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
+ 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
+ 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
+ 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
+ (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
+ Keyword.Reserved, 'class/no-dots'),
+ (r'invoke(dynamic|interface|nonvirtual|special|'
+ r'static|virtual)%s' % _break, Keyword.Reserved,
+ 'invocation'),
+ (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'field')),
+ (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
+ ('descriptor/no-dots', 'static')),
+ (words((
+ 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
+ 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
+ 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
+ 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
+ Keyword.Reserved, 'label'),
+ (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
+ 'descriptor/convert-dots'),
+ (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
+ ],
+ 'quote': [
+ (r"'", String.Single, '#pop'),
+ (r'\\u[\da-fA-F]{4}', String.Escape),
+ (r"[^'\\]+", String.Single)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
+ String.Escape),
+ (r'[^"\\]+', String.Double)
+ ],
+ 'root': [
+ (r'\n+', Text),
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (r'(%s)([ \t\r]*)(:)' % _name,
+ bygroups(Name.Label, Text, Punctuation)),
+ (_name, String.Other)
+ ],
+ 'annotation': [
+ (r'\n', Text, ('#pop', 'annotation-body')),
+ (r'default%s' % _break, Keyword.Reserved,
+ ('#pop', 'annotation-default')),
+ include('default')
+ ],
+ 'annotation-body': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-default': [
+ (r'\n+', Text),
+ (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ default(('annotation-items', 'descriptor/no-dots'))
+ ],
+ 'annotation-items': [
+ (r"'", String.Single, 'quote'),
+ include('default'),
+ (_name, String.Other)
+ ],
+ 'caught-exception': [
+ (r'all%s' % _break, Keyword, '#pop'),
+ include('exception')
+ ],
+ 'class/convert-dots': [
+ include('default'),
+ (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'class/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class), '#pop')
+ ],
+ 'descriptor/convert-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptor/no-dots': [
+ include('default'),
+ (r'\[+', Punctuation),
+ (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
+ '#pop'),
+ (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ default('#pop')
+ ],
+ 'descriptors/convert-dots': [
+ (r'\)', Punctuation, '#pop'),
+ default('descriptor/convert-dots')
+ ],
+ 'enclosing-method': [
+ (_ws, Text),
+ (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
+ default(('#pop', 'class/convert-dots'))
+ ],
+ 'exception': [
+ include('default'),
+ (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Exception), '#pop')
+ ],
+ 'field': [
+ (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
+ '#pop')
+ ],
+ 'invocation': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'label': [
+ include('default'),
+ (_name, Name.Label, '#pop')
+ ],
+ 'method': [
+ include('default'),
+ (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
+ ('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
+ 'descriptor/convert-dots'))
+ ],
+ 'no-verification': [
+ (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
+ include('default')
+ ],
+ 'static': [
+ include('default'),
+ (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
+ (_unqualified_name, _separator, _unqualified_name, _name),
+ bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
+ ],
+ 'table': [
+ (r'\n+', Text),
+ (r'default%s' % _break, Keyword.Reserved, '#pop'),
+ include('default'),
+ (_name, Name.Label)
+ ],
+ 'var': [
+ include('default'),
+ (_name, Name.Variable, '#pop')
+ ],
+ 'verification': [
+ include('default'),
+ (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
+ _break, Keyword, '#pop'),
+ (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
+ (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
+ ]
+ }
+
+ def analyse_text(text):
+ score = 0
+ if re.search(r'^\s*\.class\s', text, re.MULTILINE):
+ score += 0.5
+ if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
+ score += 0.3
+ if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
+ r'inner|interface|limit|set|signature|stack)\b', text,
+ re.MULTILINE):
+ score += 0.6
return min(score, 1.0)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lisp.py b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
index 5628e336ca..00b7b4e2d0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/lisp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
@@ -1,100 +1,100 @@
-"""
- pygments.lexers.lisp
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Lispy languages.
-
+"""
+ pygments.lexers.lisp
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Lispy languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal, Error
-
-from pygments.lexers.python import PythonLexer
-
-__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal, Error
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
'XtlangLexer', 'FennelLexer']
-
-
-class SchemeLexer(RegexLexer):
- """
- A Scheme lexer, parsing a stream and outputting the tokens
- needed to highlight scheme code.
- This lexer could be most probably easily subclassed to parse
- other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
-
- This parser is checked with pastes from the LISP pastebin
- at http://paste.lisp.org/ to cover as much syntax as possible.
-
- It supports the full Scheme syntax as defined in R5RS.
-
- .. versionadded:: 0.6
- """
- name = 'Scheme'
- aliases = ['scheme', 'scm']
- filenames = ['*.scm', '*.ss']
- mimetypes = ['text/x-scheme', 'application/x-scheme']
-
+
+
+class SchemeLexer(RegexLexer):
+ """
+ A Scheme lexer, parsing a stream and outputting the tokens
+ needed to highlight scheme code.
+ This lexer could be most probably easily subclassed to parse
+ other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
+
+ This parser is checked with pastes from the LISP pastebin
+ at http://paste.lisp.org/ to cover as much syntax as possible.
+
+ It supports the full Scheme syntax as defined in R5RS.
+
+ .. versionadded:: 0.6
+ """
+ name = 'Scheme'
+ aliases = ['scheme', 'scm']
+ filenames = ['*.scm', '*.ss']
+ mimetypes = ['text/x-scheme', 'application/x-scheme']
+
flags = re.DOTALL | re.MULTILINE
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- keywords = (
- 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
- 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
- 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
- 'let-syntax', 'letrec-syntax', 'syntax-rules'
- )
- builtins = (
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
- 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
- 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
- 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
- 'cadr', 'call-with-current-continuation', 'call-with-input-file',
- 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
- 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
- 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
- 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
- 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
- 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
- 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
- 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
- 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
- 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
- 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
- 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
- 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
- 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
- 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
- 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
- 'null?', 'number->string', 'number?', 'numerator', 'odd?',
- 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
- 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
- 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
- 'remainder', 'reverse', 'round', 'scheme-report-environment',
- 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
- 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
- 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
- 'string-copy', 'string-fill!', 'string-length', 'string-ref',
- 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
- 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
- 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
- 'vector', 'vector->list', 'vector-fill!', 'vector-length',
- 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
- 'with-output-to-file', 'write', 'write-char', 'zero?'
- )
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ keywords = (
+ 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
+ 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
+ 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
+ 'let-syntax', 'letrec-syntax', 'syntax-rules'
+ )
+ builtins = (
+ '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
+ 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
+ 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
+ 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
+ 'cadr', 'call-with-current-continuation', 'call-with-input-file',
+ 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
+ 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
+ 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
+ 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
+ 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
+ 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
+ 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
+ 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
+ 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
+ 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
+ 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
+ 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
+ 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
+ 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
+ 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
+ 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
+ 'null?', 'number->string', 'number?', 'numerator', 'odd?',
+ 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
+ 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
+ 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
+ 'remainder', 'reverse', 'round', 'scheme-report-environment',
+ 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
+ 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
+ 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
+ 'string-copy', 'string-fill!', 'string-length', 'string-ref',
+ 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
+ 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
+ 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
+ 'vector', 'vector->list', 'vector-fill!', 'vector-length',
+ 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
+ 'with-output-to-file', 'write', 'write-char', 'zero?'
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
+
# The 'scheme-root' state parses as many expressions as needed, always
# delegating to the 'scheme-value' state. The latter parses one complete
# expression and immediately pops back. This is needed for the LilyPondLexer.
@@ -106,67 +106,67 @@ class SchemeLexer(RegexLexer):
# work to 'scheme-root'; this is so that LilyPondLexer can inherit
# 'scheme-root' and redefine 'root'.
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
default('scheme-root'),
],
'scheme-root': [
default('value'),
],
'value': [
- # the comments
- # and going to the end of the line
+ # the comments
+ # and going to the end of the line
(r';.*?$', Comment.Single),
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
- # commented form (entire sexpr folliwng)
- (r'#;\s*\(', Comment, 'commented-form'),
- # signifies that the program text that follows is written with the
- # lexical and datum syntax described in r6rs
- (r'#!r6rs', Comment),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+ # commented form (entire sexpr folliwng)
+ (r'#;\s*\(', Comment, 'commented-form'),
+ # signifies that the program text that follows is written with the
+ # lexical and datum syntax described in r6rs
+ (r'#!r6rs', Comment),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
(r'-?\d+\.\d+', Number.Float, '#pop'),
(r'-?\d+', Number.Integer, '#pop'),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String, "#pop"),
(r"'" + valid_name, String.Symbol, "#pop"),
(r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char, "#pop"),
-
- # constants
+
+ # constants
(r'(#t|#f)', Name.Constant, '#pop'),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
Keyword,
'#pop'),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
(r"(?<='\()" + valid_name, Name.Variable, '#pop'),
(r"(?<=#\()" + valid_name, Name.Variable, '#pop'),
-
- # highlight the builtins
+
+ # highlight the builtins
(r"(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
Name.Builtin,
'#pop'),
-
- # the remaining functions
+
+ # the remaining functions
(r'(?<=\()' + valid_name, Name.Function, '#pop'),
- # find the remaining variables
+ # find the remaining variables
(valid_name, Name.Variable, '#pop'),
-
- # the famous parentheses!
+
+ # the famous parentheses!
# Push scheme-root to enter a state that will parse as many things
# as needed in the parentheses.
@@ -175,327 +175,327 @@ class SchemeLexer(RegexLexer):
# we get back to a state parsing expressions as needed in the
# enclosing context.
(r'\)|\]', Punctuation, '#pop:3'),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'\(', Comment, '#push'),
- (r'\)', Comment, '#pop'),
- (r'[^()]+', Comment),
- ],
- }
-
-
-class CommonLispLexer(RegexLexer):
- """
- A Common Lisp lexer.
-
- .. versionadded:: 0.9
- """
- name = 'Common Lisp'
- aliases = ['common-lisp', 'cl', 'lisp']
- filenames = ['*.cl', '*.lisp']
- mimetypes = ['text/x-common-lisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- def __init__(self, **options):
- from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
- SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
- BUILTIN_TYPES, BUILTIN_CLASSES
- self.builtin_function = BUILTIN_FUNCTIONS
- self.special_forms = SPECIAL_FORMS
- self.macros = MACROS
- self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
- self.declarations = DECLARATIONS
- self.builtin_types = BUILTIN_TYPES
- self.builtin_classes = BUILTIN_CLASSES
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in self.builtin_function:
- yield index, Name.Builtin, value
- continue
- if value in self.special_forms:
- yield index, Keyword, value
- continue
- if value in self.macros:
- yield index, Name.Builtin, value
- continue
- if value in self.lambda_list_keywords:
- yield index, Keyword, value
- continue
- if value in self.declarations:
- yield index, Keyword, value
- continue
- if value in self.builtin_types:
- yield index, Keyword.Type, value
- continue
- if value in self.builtin_classes:
- yield index, Name.Class, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'multiline-comment': [
- (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form': [
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop'),
- (r'[^()]+', Comment.Preproc),
- ],
- 'body': [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
-
- # encoding comment (?)
- (r'#\d*Y.*$', Comment.Special),
-
- # strings and characters
- (r'"(\\.|\\\n|[^"\\])*"', String),
- # quoting
- (r":" + symbol, String.Symbol),
- (r"::" + symbol, String.Symbol),
- (r":#" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # sharpsign strings and characters
- (r"#\\." + terminated, String.Char),
- (r"#\\" + symbol, String.Char),
-
- # vector
- (r'#\(', Operator, 'body'),
-
- # bitstring
- (r'#\d*\*[01]*', Literal.Other),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read-time and load-time evaluation
- (r'#[.,]', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
-
- # complex
- (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
-
- # array
- (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # structure
- (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # path
- (r'#p?"(\\.|[^"])*"', Literal.Other),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # read-time comment
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment, '#push'),
+ (r'\)', Comment, '#pop'),
+ (r'[^()]+', Comment),
+ ],
+ }
+
+
+class CommonLispLexer(RegexLexer):
+ """
+ A Common Lisp lexer.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Common Lisp'
+ aliases = ['common-lisp', 'cl', 'lisp']
+ filenames = ['*.cl', '*.lisp']
+ mimetypes = ['text/x-common-lisp']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
+
+ def __init__(self, **options):
+ from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
+ SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
+ BUILTIN_TYPES, BUILTIN_CLASSES
+ self.builtin_function = BUILTIN_FUNCTIONS
+ self.special_forms = SPECIAL_FORMS
+ self.macros = MACROS
+ self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
+ self.declarations = DECLARATIONS
+ self.builtin_types = BUILTIN_TYPES
+ self.builtin_classes = BUILTIN_CLASSES
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in self.builtin_function:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in self.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.lambda_list_keywords:
+ yield index, Keyword, value
+ continue
+ if value in self.declarations:
+ yield index, Keyword, value
+ continue
+ if value in self.builtin_types:
+ yield index, Keyword.Type, value
+ continue
+ if value in self.builtin_classes:
+ yield index, Name.Class, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment.Preproc, '#push'),
+ (r'\)', Comment.Preproc, '#pop'),
+ (r'[^()]+', Comment.Preproc),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+
+ # encoding comment (?)
+ (r'#\d*Y.*$', Comment.Special),
+
+ # strings and characters
+ (r'"(\\.|\\\n|[^"\\])*"', String),
+ # quoting
+ (r":" + symbol, String.Symbol),
+ (r"::" + symbol, String.Symbol),
+ (r":#" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
+ terminated, Number.Float),
+
+ # sharpsign strings and characters
+ (r"#\\." + terminated, String.Char),
+ (r"#\\" + symbol, String.Char),
+
+ # vector
+ (r'#\(', Operator, 'body'),
+
+ # bitstring
+ (r'#\d*\*[01]*', Literal.Other),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read-time and load-time evaluation
+ (r'#[.,]', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
+
+ # complex
+ (r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
+
+ # array
+ (r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # structure
+ (r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # path
+ (r'#p?"(\\.|[^"])*"', Literal.Other),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # read-time comment
(r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'),
-
- # read-time conditional
- (r'#[+-]', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
+
+ # read-time conditional
+ (r'#[+-]', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
(r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
-
-
-class HyLexer(RegexLexer):
- """
- Lexer for `Hy <http://hylang.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Hy'
- aliases = ['hylang']
- filenames = ['*.hy']
- mimetypes = ['text/x-hy', 'application/x-hy']
-
- special_forms = (
- 'cond', 'for', '->', '->>', 'car',
- 'cdr', 'first', 'rest', 'let', 'when', 'unless',
- 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
- ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
- 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
- 'foreach', 'while',
- 'eval-and-compile', 'eval-when-compile'
- )
-
- declarations = (
- 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
- )
-
- hy_builtins = ()
-
- hy_core = (
- 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
- 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
- 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
- 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
- )
-
- builtins = hy_builtins + hy_core
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
+
+
+class HyLexer(RegexLexer):
+ """
+ Lexer for `Hy <http://hylang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Hy'
+ aliases = ['hylang']
+ filenames = ['*.hy']
+ mimetypes = ['text/x-hy', 'application/x-hy']
+
+ special_forms = (
+ 'cond', 'for', '->', '->>', 'car',
+ 'cdr', 'first', 'rest', 'let', 'when', 'unless',
+ 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
+ ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
+ 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
+ 'foreach', 'while',
+ 'eval-and-compile', 'eval-when-compile'
+ )
+
+ declarations = (
+ 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
+ )
+
+ hy_builtins = ()
+
+ hy_core = (
+ 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
+ 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
+ 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
+ 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
+ )
+
+ builtins = hy_builtins + hy_core
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
-
- def _multi_escape(entries):
- return words(entries, suffix=' ')
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
-
- # strings, symbols and characters
+
+ def _multi_escape(entries):
+ return words(entries, suffix=' ')
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+
+ # strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-
- # keywords
- (r'::?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- include('py-keywords'),
- include('py-builtins'),
-
- # highlight the special forms
- (_multi_escape(special_forms), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
-
- # highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Hy accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Hy accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
-
- ],
- 'py-keywords': PythonLexer.tokens['keywords'],
- 'py-builtins': PythonLexer.tokens['builtins'],
- }
-
- def analyse_text(text):
- if '(import ' in text or '(defn ' in text:
- return 0.9
-
-
-class RacketLexer(RegexLexer):
- """
- Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
- known as PLT Scheme).
-
- .. versionadded:: 1.6
- """
-
- name = 'Racket'
- aliases = ['racket', 'rkt']
- filenames = ['*.rkt', '*.rktd', '*.rktl']
- mimetypes = ['text/x-racket', 'application/x-racket']
-
- # Generated by example.rkt
- _keywords = (
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+
+ # keywords
+ (r'::?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ include('py-keywords'),
+ include('py-builtins'),
+
+ # highlight the special forms
+ (_multi_escape(special_forms), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (_multi_escape(declarations), Keyword.Declaration),
+
+ # highlight the builtins
+ (_multi_escape(builtins), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Hy accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Hy accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+
+ ],
+ 'py-keywords': PythonLexer.tokens['keywords'],
+ 'py-builtins': PythonLexer.tokens['builtins'],
+ }
+
+ def analyse_text(text):
+ if '(import ' in text or '(defn ' in text:
+ return 0.9
+
+
+class RacketLexer(RegexLexer):
+ """
+ Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
+ known as PLT Scheme).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Racket'
+ aliases = ['racket', 'rkt']
+ filenames = ['*.rkt', '*.rktd', '*.rktl']
+ mimetypes = ['text/x-racket', 'application/x-racket']
+
+ # Generated by example.rkt
+ _keywords = (
'#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
'#%plain-app', '#%plain-lambda', '#%plain-module-begin',
'#%printing-module-begin', '#%provide', '#%require',
@@ -589,10 +589,10 @@ class RacketLexer(RegexLexer):
'when', 'with-continuation-mark', 'with-contract',
'with-contract-continuation-mark', 'with-handlers', 'with-handlers*',
'with-method', 'with-syntax', 'λ'
- )
-
- # Generated by example.rkt
- _builtins = (
+ )
+
+ # Generated by example.rkt
+ _builtins = (
'*', '*list/c', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c',
'>', '>/c', '>=', '>=/c', 'abort-current-continuation', 'abs',
'absolute-path?', 'acos', 'add-between', 'add1', 'alarm-evt',
@@ -1269,959 +1269,959 @@ class RacketLexer(RegexLexer):
'write-special-avail*', 'write-special-evt', 'write-string',
'write-to-file', 'writeln', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a',
'~e', '~r', '~s', '~v'
- )
-
- _opening_parenthesis = r'[([{]'
- _closing_parenthesis = r'[)\]}]'
- _delimiters = r'()[\]{}",\'`;\s'
+ )
+
+ _opening_parenthesis = r'[([{]'
+ _closing_parenthesis = r'[)\]}]'
+ _delimiters = r'()[\]{}",\'`;\s'
_symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
- _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
- _exponent = r'(?:[defls][-+]?\d+)'
- _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
- _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
- r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
- _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
- _exponent)
- _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
- _inexact_special = r'(?:(?:inf|nan)\.[0f])'
- _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
- _inexact_special)
- _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
-
- tokens = {
- 'root': [
- (_closing_parenthesis, Error),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'datum': [
- (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
+ _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
+ _exponent = r'(?:[defls][-+]?\d+)'
+ _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
+ _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
+ r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
+ _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
+ _exponent)
+ _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
+ _inexact_special = r'(?:(?:inf|nan)\.[0f])'
+ _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
+ _inexact_special)
+ _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
+
+ tokens = {
+ 'root': [
+ (_closing_parenthesis, Error),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'datum': [
+ (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
(r';[^\n\r\x85\u2028\u2029]*', Comment.Single),
- (r'#\|', Comment.Multiline, 'block-comment'),
-
- # Whitespaces
- (r'(?u)\s+', Text),
-
- # Numbers: Keep in mind Racket reader hash prefixes, which
- # can denote the base or the type. These don't map neatly
- # onto Pygments token types; some judgment calls here.
-
- # #d or no prefix
- (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
- Number.Integer, '#pop'),
- (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
- (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
- (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
- (_exact_decimal_prefix, _inexact_normal_no_hashes,
- _inexact_normal_no_hashes, _inexact_normal_no_hashes,
- _delimiters), Number, '#pop'),
-
- # Inexact without explicit #i
- (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
- (_inexact_real, _inexact_unsigned, _inexact_unsigned,
- _inexact_real, _inexact_real, _delimiters), Number.Float,
- '#pop'),
-
- # The remaining extflonums
- (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
- (_inexact_simple, _delimiters), Number.Float, '#pop'),
-
- # #b
+ (r'#\|', Comment.Multiline, 'block-comment'),
+
+ # Whitespaces
+ (r'(?u)\s+', Text),
+
+ # Numbers: Keep in mind Racket reader hash prefixes, which
+ # can denote the base or the type. These don't map neatly
+ # onto Pygments token types; some judgment calls here.
+
+ # #d or no prefix
+ (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
+ Number.Integer, '#pop'),
+ (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
+ (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
+ (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
+ (_exact_decimal_prefix, _inexact_normal_no_hashes,
+ _inexact_normal_no_hashes, _inexact_normal_no_hashes,
+ _delimiters), Number, '#pop'),
+
+ # Inexact without explicit #i
+ (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
+ (_inexact_real, _inexact_unsigned, _inexact_unsigned,
+ _inexact_real, _inexact_real, _delimiters), Number.Float,
+ '#pop'),
+
+ # The remaining extflonums
+ (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
+ (_inexact_simple, _delimiters), Number.Float, '#pop'),
+
+ # #b
(r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
-
- # #o
+
+ # #o
(r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
-
- # #x
+
+ # #x
(r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
-
- # #i is always inexact, i.e. float
+
+ # #i is always inexact, i.e. float
(r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
-
- # Strings and characters
- (r'#?"', String.Double, ('#pop', 'string')),
- (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
- (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
- (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
- (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
-
- # Constants
- (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
-
- # Keyword argument names (e.g. #:keyword)
+
+ # Strings and characters
+ (r'#?"', String.Double, ('#pop', 'string')),
+ (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
+ (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
+ (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
+ (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
+
+ # Constants
+ (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
+
+ # Keyword argument names (e.g. #:keyword)
(r'(?u)#:%s' % _symbol, Keyword.Declaration, '#pop'),
-
- # Reader extensions
- (r'(#lang |#!)(\S+)',
- bygroups(Keyword.Namespace, Name.Namespace)),
- (r'#reader', Keyword.Namespace, 'quoted-datum'),
-
- # Other syntax
- (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
- (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
- Operator, ('#pop', 'quoted-datum'))
- ],
- 'datum*': [
- (r'`|,@?', Operator),
- (_symbol, String.Symbol, '#pop'),
- (r'[|\\]', Error),
- default('#pop')
- ],
- 'list': [
- (_closing_parenthesis, Punctuation, '#pop')
- ],
- 'unquoted-datum': [
- include('datum'),
- (r'quote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quoted-datum')),
- (r'`', Operator, ('#pop', 'quasiquoted-datum')),
- (r'quasiquote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quasiquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
- (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
- Keyword, '#pop'),
- (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
- Name.Builtin, '#pop'),
- (_symbol, Name, '#pop'),
- include('datum*')
- ],
- 'unquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'quasiquoted-datum': [
- include('datum'),
- (r',@?', Operator, ('#pop', 'unquoted-datum')),
- (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'unquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
- include('datum*')
- ],
- 'quasiquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quasiquoted-datum')
- ],
- 'quoted-datum': [
- include('datum'),
- (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
- include('datum*')
- ],
- 'quoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quoted-datum')
- ],
- 'block-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^#|]+|.', Comment.Multiline)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
- r'U[\da-fA-F]{1,8}|.)', String.Escape),
- (r'[^\\"]+', String.Double)
- ]
- }
-
-
-class NewLispLexer(RegexLexer):
- """
+
+ # Reader extensions
+ (r'(#lang |#!)(\S+)',
+ bygroups(Keyword.Namespace, Name.Namespace)),
+ (r'#reader', Keyword.Namespace, 'quoted-datum'),
+
+ # Other syntax
+ (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
+ (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
+ Operator, ('#pop', 'quoted-datum'))
+ ],
+ 'datum*': [
+ (r'`|,@?', Operator),
+ (_symbol, String.Symbol, '#pop'),
+ (r'[|\\]', Error),
+ default('#pop')
+ ],
+ 'list': [
+ (_closing_parenthesis, Punctuation, '#pop')
+ ],
+ 'unquoted-datum': [
+ include('datum'),
+ (r'quote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quoted-datum')),
+ (r'`', Operator, ('#pop', 'quasiquoted-datum')),
+ (r'quasiquote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quasiquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
+ (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Keyword, '#pop'),
+ (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Name.Builtin, '#pop'),
+ (_symbol, Name, '#pop'),
+ include('datum*')
+ ],
+ 'unquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'quasiquoted-datum': [
+ include('datum'),
+ (r',@?', Operator, ('#pop', 'unquoted-datum')),
+ (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'unquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
+ include('datum*')
+ ],
+ 'quasiquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quasiquoted-datum')
+ ],
+ 'quoted-datum': [
+ include('datum'),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
+ include('datum*')
+ ],
+ 'quoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quoted-datum')
+ ],
+ 'block-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^#|]+|.', Comment.Multiline)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
+ r'U[\da-fA-F]{1,8}|.)', String.Escape),
+ (r'[^\\"]+', String.Double)
+ ]
+ }
+
+
+class NewLispLexer(RegexLexer):
+ """
For `newLISP. <http://www.newlisp.org/>`_ source code (version 10.3.0).
-
- .. versionadded:: 1.5
- """
-
- name = 'NewLisp'
- aliases = ['newlisp']
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'NewLisp'
+ aliases = ['newlisp']
filenames = ['*.lsp', '*.nl', '*.kif']
- mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
- flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
-
- # list of built-in functions for newLISP version 10.3
- builtins = (
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
- '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
- '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
- '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
- 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
- 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
- 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
- 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
- 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
- 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
- 'close', 'command-event', 'cond', 'cons', 'constant',
- 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
- 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
- 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
- 'def-new', 'default', 'define-macro', 'define',
- 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
- 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
- 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
- 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
- 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
- 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
- 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
- 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
- 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
- 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
- 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
- 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
- 'last', 'legal?', 'length', 'let', 'letex', 'letn',
- 'list?', 'list', 'load', 'local', 'log', 'lookup',
- 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
- 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
- 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
- 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
- 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
- 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
- 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
- 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
- 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
- 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
- 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
- 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
- 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
- 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
- 'read-key', 'read-line', 'read-utf8', 'reader-event',
- 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
- 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
- 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
- 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
- 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
- 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
- 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
- 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
- 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
- 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
- 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
- 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
- 'write', 'write-char', 'write-file', 'write-line',
- 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
- )
-
- # valid names
- valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
-
- tokens = {
- 'root': [
- # shebang
- (r'#!(.*?)$', Comment.Preproc),
- # comments starting with semicolon
- (r';.*$', Comment.Single),
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # whitespace
- (r'\s+', Text),
-
- # strings, symbols and characters
+ mimetypes = ['text/x-newlisp', 'application/x-newlisp']
+
+ flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
+
+ # list of built-in functions for newLISP version 10.3
+ builtins = (
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin',
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
+ 'close', 'command-event', 'cond', 'cons', 'constant',
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
+ 'def-new', 'default', 'define-macro', 'define',
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
+ 'last', 'legal?', 'length', 'let', 'letex', 'letn',
+ 'list?', 'list', 'load', 'local', 'log', 'lookup',
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
+ 'read-key', 'read-line', 'read-utf8', 'reader-event',
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
+ 'write', 'write-char', 'write-file', 'write-line',
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
+ )
+
+ # valid names
+ valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
+
+ tokens = {
+ 'root': [
+ # shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ # comments starting with semicolon
+ (r';.*$', Comment.Single),
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-
- # braces
- (r'\{', String, "bracestring"),
-
- # [text] ... [/text] delimited strings
- (r'\[text\]*', String, "tagstring"),
-
- # 'special' operators...
- (r"('|:)", Operator),
-
- # highlight the builtins
- (words(builtins, suffix=r'\b'),
- Keyword),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Variable),
-
- # the remaining variables
- (valid_name, String.Symbol),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
-
- # braced strings...
- 'bracestring': [
- (r'\{', String, "#push"),
- (r'\}', String, "#pop"),
- ('[^{}]+', String),
- ],
-
- # tagged [text]...[/text] delimited strings...
- 'tagstring': [
- (r'(?s)(.*?)(\[/text\])', String, '#pop'),
- ],
- }
-
-
-class EmacsLispLexer(RegexLexer):
- """
- An ELisp lexer, parsing a stream and outputting the tokens
- needed to highlight elisp code.
-
- .. versionadded:: 2.1
- """
- name = 'EmacsLisp'
+
+ # braces
+ (r'\{', String, "bracestring"),
+
+ # [text] ... [/text] delimited strings
+ (r'\[text\]*', String, "tagstring"),
+
+ # 'special' operators...
+ (r"('|:)", Operator),
+
+ # highlight the builtins
+ (words(builtins, suffix=r'\b'),
+ Keyword),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Variable),
+
+ # the remaining variables
+ (valid_name, String.Symbol),
+
+ # parentheses
+ (r'(\(|\))', Punctuation),
+ ],
+
+ # braced strings...
+ 'bracestring': [
+ (r'\{', String, "#push"),
+ (r'\}', String, "#pop"),
+ ('[^{}]+', String),
+ ],
+
+ # tagged [text]...[/text] delimited strings...
+ 'tagstring': [
+ (r'(?s)(.*?)(\[/text\])', String, '#pop'),
+ ],
+ }
+
+
+class EmacsLispLexer(RegexLexer):
+ """
+ An ELisp lexer, parsing a stream and outputting the tokens
+ needed to highlight elisp code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'EmacsLisp'
aliases = ['emacs-lisp', 'elisp', 'emacs']
- filenames = ['*.el']
- mimetypes = ['text/x-elisp', 'application/x-elisp']
-
- flags = re.MULTILINE
-
- # couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
-
- # symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
-
+ filenames = ['*.el']
+ mimetypes = ['text/x-elisp', 'application/x-elisp']
+
+ flags = re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
+
macros = {
- 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
- 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
- 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
- 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
- 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
- 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
- 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
- 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
- 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
- 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
- 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
- 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
- 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
- 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
- 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
- 'defgroup', 'define-advice', 'define-alternatives',
- 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
- 'define-global-minor-mode', 'define-globalized-minor-mode',
- 'define-minor-mode', 'define-modify-macro',
- 'define-obsolete-face-alias', 'define-obsolete-function-alias',
- 'define-obsolete-variable-alias', 'define-setf-expander',
- 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
- 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
- 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
- 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
- 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
- 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
- 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
- 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
- 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
- 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
- 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
- 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
- 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
- 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
- 'while-no-input', 'with-case-table', 'with-category-table',
- 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
- 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
- 'with-output-to-string', 'with-output-to-temp-buffer',
- 'with-parsed-tramp-file-name', 'with-selected-frame',
- 'with-selected-window', 'with-silent-modifications', 'with-slots',
- 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
- 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
- 'with-tramp-file-property', 'with-tramp-progress-reporter',
- 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
- 'return-from',
+ 'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
+ 'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
+ 'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
+ 'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
+ 'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
+ 'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
+ 'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
+ 'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
+ 'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
+ 'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
+ 'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
+ 'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
+ 'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
+ 'declaim', 'declare', 'declare-function', 'def-edebug-spec',
+ 'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
+ 'defgroup', 'define-advice', 'define-alternatives',
+ 'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
+ 'define-global-minor-mode', 'define-globalized-minor-mode',
+ 'define-minor-mode', 'define-modify-macro',
+ 'define-obsolete-face-alias', 'define-obsolete-function-alias',
+ 'define-obsolete-variable-alias', 'define-setf-expander',
+ 'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
+ 'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
+ 'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
+ 'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
+ 'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
+ 'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
+ 'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
+ 'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
+ 'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
+ 'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
+ 'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
+ 'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
+ 'save-window-excursion', 'setf', 'setq-local', 'shiftf',
+ 'track-mouse', 'typecase', 'unless', 'use-package', 'when',
+ 'while-no-input', 'with-case-table', 'with-category-table',
+ 'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
+ 'with-eval-after-load', 'with-file-modes', 'with-local-quit',
+ 'with-output-to-string', 'with-output-to-temp-buffer',
+ 'with-parsed-tramp-file-name', 'with-selected-frame',
+ 'with-selected-window', 'with-silent-modifications', 'with-slots',
+ 'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
+ 'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
+ 'with-tramp-file-property', 'with-tramp-progress-reporter',
+ 'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
+ 'return-from',
}
-
+
special_forms = {
- 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
- 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
- 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
- 'save-restriction', 'setq', 'setq-default', 'subr-arity',
- 'unwind-protect', 'while',
+ 'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
+ 'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
+ 'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
+ 'save-restriction', 'setq', 'setq-default', 'subr-arity',
+ 'unwind-protect', 'while',
}
-
+
builtin_function = {
- '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
- 'Snarf-documentation', 'abort-recursive-edit', 'abs',
- 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
- 'active-minibuffer-window', 'add-face-text-property',
- 'add-name-to-file', 'add-text-properties', 'all-completions',
- 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
- 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
- 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
- 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
- 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
- 'base64-decode-region', 'base64-decode-string',
- 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
- 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
- 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
- 'bool-vector-count-consecutive', 'bool-vector-count-population',
- 'bool-vector-exclusive-or', 'bool-vector-intersection',
- 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
- 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
- 'buffer-base-buffer', 'buffer-chars-modified-tick',
- 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
- 'buffer-list', 'buffer-live-p', 'buffer-local-value',
- 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
- 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
- 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
- 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
- 'byte-to-position', 'byte-to-string', 'byteorder',
- 'call-interactively', 'call-last-kbd-macro', 'call-process',
- 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
- 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
- 'car-safe', 'case-table-p', 'category-docstring',
- 'category-set-mnemonics', 'category-table', 'category-table-p',
- 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
- 'cdr-safe', 'ceiling', 'char-after', 'char-before',
- 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
- 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
- 'char-table-p', 'char-table-parent', 'char-table-range',
- 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
- 'charset-after', 'charset-id-internal', 'charset-plist',
- 'charset-priority-list', 'charsetp', 'check-coding-system',
- 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
- 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
- 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
- 'close-font', 'clrhash', 'coding-system-aliases',
- 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
- 'coding-system-plist', 'coding-system-priority-list',
- 'coding-system-put', 'color-distance', 'color-gray-p',
- 'color-supported-p', 'combine-after-change-execute',
- 'command-error-default-function', 'command-remapping', 'commandp',
- 'compare-buffer-substrings', 'compare-strings',
- 'compare-window-configurations', 'completing-read',
- 'compose-region-internal', 'compose-string-internal',
- 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
- 'consp', 'constrain-to-field', 'continue-process',
- 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
- 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
- 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
- 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
- 'current-buffer', 'current-case-table', 'current-column',
- 'current-global-map', 'current-idle-time', 'current-indentation',
- 'current-input-mode', 'current-local-map', 'current-message',
- 'current-minor-mode-maps', 'current-time', 'current-time-string',
- 'current-time-zone', 'current-window-configuration',
- 'cygwin-convert-file-name-from-windows',
- 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
- 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
- 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
- 'decode-big5-char', 'decode-char', 'decode-coding-region',
- 'decode-coding-string', 'decode-sjis-char', 'decode-time',
- 'default-boundp', 'default-file-modes', 'default-printer-name',
- 'default-toplevel-value', 'default-value', 'define-category',
- 'define-charset-alias', 'define-charset-internal',
- 'define-coding-system-alias', 'define-coding-system-internal',
- 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
- 'define-prefix-command', 'delete',
- 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
- 'delete-directory-internal', 'delete-field', 'delete-file',
- 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
- 'delete-process', 'delete-region', 'delete-terminal',
- 'delete-window-internal', 'delq', 'describe-buffer-bindings',
- 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
- 'detect-coding-string', 'ding', 'directory-file-name',
- 'directory-files', 'directory-files-and-attributes', 'discard-input',
- 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
- 'documentation-property', 'downcase', 'downcase-region',
- 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
- 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
- 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
- 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
- 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
- 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
- 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
- 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
- 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
- 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
- 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
- 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
- 'field-beginning', 'field-end', 'field-string',
- 'field-string-no-properties', 'file-accessible-directory-p',
- 'file-acl', 'file-attributes', 'file-attributes-lessp',
- 'file-directory-p', 'file-executable-p', 'file-exists-p',
- 'file-locked-p', 'file-modes', 'file-name-absolute-p',
- 'file-name-all-completions', 'file-name-as-directory',
- 'file-name-completion', 'file-name-directory',
- 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
- 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
- 'file-system-info', 'file-system-info', 'file-writable-p',
- 'fillarray', 'find-charset-region', 'find-charset-string',
- 'find-coding-systems-region-internal', 'find-composition-internal',
- 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
- 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
- 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
- 'font-family-list', 'font-get', 'font-get-glyphs',
- 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
- 'font-match-p', 'font-otf-alternates', 'font-put',
- 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
- 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
- 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
- 'force-window-update', 'format', 'format-mode-line',
- 'format-network-address', 'format-time-string', 'forward-char',
- 'forward-comment', 'forward-line', 'forward-word',
- 'frame-border-width', 'frame-bottom-divider-width',
- 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
- 'frame-char-width', 'frame-face-alist', 'frame-first-window',
- 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
- 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
- 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
- 'frame-pointer-visible-p', 'frame-right-divider-width',
- 'frame-root-window', 'frame-scroll-bar-height',
- 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
- 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
- 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
- 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
- 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
- 'function-equal', 'functionp', 'gap-position', 'gap-size',
- 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
- 'get-buffer', 'get-buffer-create', 'get-buffer-process',
- 'get-buffer-window', 'get-byte', 'get-char-property',
- 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
- 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
- 'get-process', 'get-screen-color', 'get-text-property',
- 'get-unicode-property-internal', 'get-unused-category',
- 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
- 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
- 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
- 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
- 'gnutls-get-initstage', 'gnutls-peer-status',
- 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
- 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
- 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
- 'hash-table-p', 'hash-table-rehash-size',
- 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
- 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
- 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
- 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
- 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
- 'input-pending-p', 'insert', 'insert-and-inherit',
- 'insert-before-markers', 'insert-before-markers-and-inherit',
- 'insert-buffer-substring', 'insert-byte', 'insert-char',
- 'insert-file-contents', 'insert-startup-screen', 'int86',
- 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
- 'intern-soft', 'internal--track-mouse', 'internal-char-font',
- 'internal-complete-buffer', 'internal-copy-lisp-face',
- 'internal-default-process-filter',
- 'internal-default-process-sentinel', 'internal-describe-syntax-value',
- 'internal-event-symbol-parse-modifiers',
- 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
- 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
- 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
- 'internal-make-lisp-face', 'internal-make-var-non-special',
- 'internal-merge-in-global-face',
- 'internal-set-alternative-font-family-alist',
- 'internal-set-alternative-font-registry-alist',
- 'internal-set-font-selection-order',
- 'internal-set-lisp-face-attribute',
- 'internal-set-lisp-face-attribute-from-resource',
- 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
- 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
- 'iso-charset', 'key-binding', 'key-description',
- 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
- 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
- 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
- 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
- 'libxml-parse-html-region', 'libxml-parse-xml-region',
- 'line-beginning-position', 'line-end-position', 'line-pixel-height',
- 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
- 'load-average', 'local-key-binding', 'local-variable-if-set-p',
- 'local-variable-p', 'locale-info', 'locate-file-internal',
- 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
- 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
- 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
- 'make-byte-code', 'make-category-set', 'make-category-table',
- 'make-char', 'make-char-table', 'make-directory-internal',
- 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
- 'make-indirect-buffer', 'make-keymap', 'make-list',
- 'make-local-variable', 'make-marker', 'make-network-process',
- 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
- 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
- 'make-terminal-frame', 'make-variable-buffer-local',
- 'make-variable-frame-local', 'make-vector', 'makunbound',
- 'map-char-table', 'map-charset-chars', 'map-keymap',
- 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
- 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
- 'marker-position', 'markerp', 'match-beginning', 'match-data',
- 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
- 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
- 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
- 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
- 'message-box', 'message-or-box', 'min',
- 'minibuffer-completion-contents', 'minibuffer-contents',
- 'minibuffer-contents-no-properties', 'minibuffer-depth',
- 'minibuffer-prompt', 'minibuffer-prompt-end',
- 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
- 'minor-mode-key-binding', 'mod', 'modify-category-entry',
- 'modify-frame-parameters', 'modify-syntax-entry',
- 'mouse-pixel-position', 'mouse-position', 'move-overlay',
- 'move-point-visually', 'move-to-column', 'move-to-window-line',
- 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
- 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
- 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
- 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
- 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
- 'natnump', 'nconc', 'network-interface-info',
- 'network-interface-list', 'new-fontset', 'newline-cache-check',
- 'next-char-property-change', 'next-frame', 'next-overlay-change',
- 'next-property-change', 'next-read-file-uses-dialog-p',
- 'next-single-char-property-change', 'next-single-property-change',
- 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
- 'number-or-marker-p', 'number-to-string', 'numberp',
- 'open-dribble-file', 'open-font', 'open-termscript',
- 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
- 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
- 'overlay-properties', 'overlay-put', 'overlay-recenter',
- 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
- 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
- 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
- 'point-max-marker', 'point-min', 'point-min-marker',
- 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
- 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
- 'posn-at-point', 'posn-at-x-y', 'preceding-char',
- 'prefix-numeric-value', 'previous-char-property-change',
- 'previous-frame', 'previous-overlay-change',
- 'previous-property-change', 'previous-single-char-property-change',
- 'previous-single-property-change', 'previous-window', 'prin1',
- 'prin1-to-string', 'princ', 'print', 'process-attributes',
- 'process-buffer', 'process-coding-system', 'process-command',
- 'process-connection', 'process-contact', 'process-datagram-address',
- 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
- 'process-id', 'process-inherit-coding-system-flag', 'process-list',
- 'process-mark', 'process-name', 'process-plist',
- 'process-query-on-exit-flag', 'process-running-child-p',
- 'process-send-eof', 'process-send-region', 'process-send-string',
- 'process-sentinel', 'process-status', 'process-tty-name',
- 'process-type', 'processp', 'profiler-cpu-log',
- 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
- 'profiler-memory-log', 'profiler-memory-running-p',
- 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
- 'purecopy', 'put', 'put-text-property',
- 'put-unicode-property-internal', 'puthash', 'query-font',
- 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
- 'rassq', 're-search-backward', 're-search-forward', 'read',
- 'read-buffer', 'read-char', 'read-char-exclusive',
- 'read-coding-system', 'read-command', 'read-event',
- 'read-from-minibuffer', 'read-from-string', 'read-function',
- 'read-key-sequence', 'read-key-sequence-vector',
- 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
- 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
- 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
- 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
- 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
- 'region-end', 'register-ccl-program', 'register-code-conversion-map',
- 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
- 'rename-buffer', 'rename-file', 'replace-match',
- 'reset-this-command-lengths', 'resize-mini-window-internal',
- 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
- 'run-hook-with-args', 'run-hook-with-args-until-failure',
- 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
- 'run-window-configuration-change-hook', 'run-window-scroll-functions',
- 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
- 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
- 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
- 'select-window', 'selected-frame', 'selected-window',
- 'self-insert-command', 'send-string-to-terminal', 'sequencep',
- 'serial-process-configure', 'set', 'set-buffer',
- 'set-buffer-auto-saved', 'set-buffer-major-mode',
- 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
- 'set-category-table', 'set-char-table-extra-slot',
- 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
- 'set-charset-priority', 'set-coding-system-priority',
- 'set-cursor-size', 'set-default', 'set-default-file-modes',
- 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
- 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
- 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
- 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
- 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
- 'set-keyboard-coding-system-internal', 'set-keymap-parent',
- 'set-marker', 'set-marker-insertion-type', 'set-match-data',
- 'set-message-beep', 'set-minibuffer-window',
- 'set-mouse-pixel-position', 'set-mouse-position',
- 'set-network-process-option', 'set-output-flow-control',
- 'set-process-buffer', 'set-process-coding-system',
- 'set-process-datagram-address', 'set-process-filter',
- 'set-process-filter-multibyte',
- 'set-process-inherit-coding-system-flag', 'set-process-plist',
- 'set-process-query-on-exit-flag', 'set-process-sentinel',
- 'set-process-window-size', 'set-quit-char',
- 'set-safe-terminal-coding-system-internal', 'set-screen-color',
- 'set-standard-case-table', 'set-syntax-table',
- 'set-terminal-coding-system-internal', 'set-terminal-local-value',
- 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
- 'set-visited-file-modtime', 'set-window-buffer',
- 'set-window-combination-limit', 'set-window-configuration',
- 'set-window-dedicated-p', 'set-window-display-table',
- 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
- 'set-window-new-normal', 'set-window-new-pixel',
- 'set-window-new-total', 'set-window-next-buffers',
- 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
- 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
- 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
- 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
- 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
- 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
- 'sort-charsets', 'special-variable-p', 'split-char',
- 'split-window-internal', 'sqrt', 'standard-case-table',
- 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
- 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
+ '%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
+ 'Snarf-documentation', 'abort-recursive-edit', 'abs',
+ 'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
+ 'active-minibuffer-window', 'add-face-text-property',
+ 'add-name-to-file', 'add-text-properties', 'all-completions',
+ 'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
+ 'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
+ 'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
+ 'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
+ 'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
+ 'base64-decode-region', 'base64-decode-string',
+ 'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
+ 'bidi-find-overridden-directionality', 'bidi-resolved-levels',
+ 'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
+ 'bool-vector-count-consecutive', 'bool-vector-count-population',
+ 'bool-vector-exclusive-or', 'bool-vector-intersection',
+ 'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
+ 'bool-vector-subsetp', 'bool-vector-union', 'boundp',
+ 'buffer-base-buffer', 'buffer-chars-modified-tick',
+ 'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
+ 'buffer-list', 'buffer-live-p', 'buffer-local-value',
+ 'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
+ 'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
+ 'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
+ 'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
+ 'byte-to-position', 'byte-to-string', 'byteorder',
+ 'call-interactively', 'call-last-kbd-macro', 'call-process',
+ 'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
+ 'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
+ 'car-safe', 'case-table-p', 'category-docstring',
+ 'category-set-mnemonics', 'category-table', 'category-table-p',
+ 'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
+ 'cdr-safe', 'ceiling', 'char-after', 'char-before',
+ 'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
+ 'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
+ 'char-table-p', 'char-table-parent', 'char-table-range',
+ 'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
+ 'charset-after', 'charset-id-internal', 'charset-plist',
+ 'charset-priority-list', 'charsetp', 'check-coding-system',
+ 'check-coding-systems-region', 'clear-buffer-auto-save-failure',
+ 'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
+ 'clear-image-cache', 'clear-string', 'clear-this-command-keys',
+ 'close-font', 'clrhash', 'coding-system-aliases',
+ 'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
+ 'coding-system-plist', 'coding-system-priority-list',
+ 'coding-system-put', 'color-distance', 'color-gray-p',
+ 'color-supported-p', 'combine-after-change-execute',
+ 'command-error-default-function', 'command-remapping', 'commandp',
+ 'compare-buffer-substrings', 'compare-strings',
+ 'compare-window-configurations', 'completing-read',
+ 'compose-region-internal', 'compose-string-internal',
+ 'composition-get-gstring', 'compute-motion', 'concat', 'cons',
+ 'consp', 'constrain-to-field', 'continue-process',
+ 'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
+ 'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
+ 'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
+ 'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
+ 'current-buffer', 'current-case-table', 'current-column',
+ 'current-global-map', 'current-idle-time', 'current-indentation',
+ 'current-input-mode', 'current-local-map', 'current-message',
+ 'current-minor-mode-maps', 'current-time', 'current-time-string',
+ 'current-time-zone', 'current-window-configuration',
+ 'cygwin-convert-file-name-from-windows',
+ 'cygwin-convert-file-name-to-windows', 'daemon-initialized',
+ 'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
+ 'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
+ 'decode-big5-char', 'decode-char', 'decode-coding-region',
+ 'decode-coding-string', 'decode-sjis-char', 'decode-time',
+ 'default-boundp', 'default-file-modes', 'default-printer-name',
+ 'default-toplevel-value', 'default-value', 'define-category',
+ 'define-charset-alias', 'define-charset-internal',
+ 'define-coding-system-alias', 'define-coding-system-internal',
+ 'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
+ 'define-prefix-command', 'delete',
+ 'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
+ 'delete-directory-internal', 'delete-field', 'delete-file',
+ 'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
+ 'delete-process', 'delete-region', 'delete-terminal',
+ 'delete-window-internal', 'delq', 'describe-buffer-bindings',
+ 'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
+ 'detect-coding-string', 'ding', 'directory-file-name',
+ 'directory-files', 'directory-files-and-attributes', 'discard-input',
+ 'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
+ 'documentation-property', 'downcase', 'downcase-region',
+ 'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
+ 'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
+ 'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
+ 'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
+ 'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
+ 'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
+ 'eql', 'equal', 'equal-including-properties', 'erase-buffer',
+ 'error-message-string', 'eval', 'eval-buffer', 'eval-region',
+ 'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
+ 'exp', 'expand-file-name', 'expt', 'external-debugging-output',
+ 'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
+ 'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
+ 'field-beginning', 'field-end', 'field-string',
+ 'field-string-no-properties', 'file-accessible-directory-p',
+ 'file-acl', 'file-attributes', 'file-attributes-lessp',
+ 'file-directory-p', 'file-executable-p', 'file-exists-p',
+ 'file-locked-p', 'file-modes', 'file-name-absolute-p',
+ 'file-name-all-completions', 'file-name-as-directory',
+ 'file-name-completion', 'file-name-directory',
+ 'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
+ 'file-regular-p', 'file-selinux-context', 'file-symlink-p',
+ 'file-system-info', 'file-system-info', 'file-writable-p',
+ 'fillarray', 'find-charset-region', 'find-charset-string',
+ 'find-coding-systems-region-internal', 'find-composition-internal',
+ 'find-file-name-handler', 'find-font', 'find-operation-coding-system',
+ 'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
+ 'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
+ 'font-family-list', 'font-get', 'font-get-glyphs',
+ 'font-get-system-font', 'font-get-system-normal-font', 'font-info',
+ 'font-match-p', 'font-otf-alternates', 'font-put',
+ 'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
+ 'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
+ 'fontset-list', 'fontset-list-all', 'force-mode-line-update',
+ 'force-window-update', 'format', 'format-mode-line',
+ 'format-network-address', 'format-time-string', 'forward-char',
+ 'forward-comment', 'forward-line', 'forward-word',
+ 'frame-border-width', 'frame-bottom-divider-width',
+ 'frame-can-run-window-configuration-change-hook', 'frame-char-height',
+ 'frame-char-width', 'frame-face-alist', 'frame-first-window',
+ 'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
+ 'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
+ 'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
+ 'frame-pointer-visible-p', 'frame-right-divider-width',
+ 'frame-root-window', 'frame-scroll-bar-height',
+ 'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
+ 'frame-text-cols', 'frame-text-height', 'frame-text-lines',
+ 'frame-text-width', 'frame-total-cols', 'frame-total-lines',
+ 'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
+ 'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
+ 'function-equal', 'functionp', 'gap-position', 'gap-size',
+ 'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
+ 'get-buffer', 'get-buffer-create', 'get-buffer-process',
+ 'get-buffer-window', 'get-byte', 'get-char-property',
+ 'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
+ 'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
+ 'get-process', 'get-screen-color', 'get-text-property',
+ 'get-unicode-property-internal', 'get-unused-category',
+ 'get-unused-iso-final-char', 'getenv-internal', 'gethash',
+ 'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
+ 'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
+ 'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
+ 'gnutls-get-initstage', 'gnutls-peer-status',
+ 'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
+ 'gpm-mouse-stop', 'group-gid', 'group-real-gid',
+ 'handle-save-session', 'handle-switch-frame', 'hash-table-count',
+ 'hash-table-p', 'hash-table-rehash-size',
+ 'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
+ 'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
+ 'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
+ 'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
+ 'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
+ 'input-pending-p', 'insert', 'insert-and-inherit',
+ 'insert-before-markers', 'insert-before-markers-and-inherit',
+ 'insert-buffer-substring', 'insert-byte', 'insert-char',
+ 'insert-file-contents', 'insert-startup-screen', 'int86',
+ 'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
+ 'intern-soft', 'internal--track-mouse', 'internal-char-font',
+ 'internal-complete-buffer', 'internal-copy-lisp-face',
+ 'internal-default-process-filter',
+ 'internal-default-process-sentinel', 'internal-describe-syntax-value',
+ 'internal-event-symbol-parse-modifiers',
+ 'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
+ 'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
+ 'internal-lisp-face-equal-p', 'internal-lisp-face-p',
+ 'internal-make-lisp-face', 'internal-make-var-non-special',
+ 'internal-merge-in-global-face',
+ 'internal-set-alternative-font-family-alist',
+ 'internal-set-alternative-font-registry-alist',
+ 'internal-set-font-selection-order',
+ 'internal-set-lisp-face-attribute',
+ 'internal-set-lisp-face-attribute-from-resource',
+ 'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
+ 'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
+ 'iso-charset', 'key-binding', 'key-description',
+ 'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
+ 'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
+ 'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
+ 'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
+ 'libxml-parse-html-region', 'libxml-parse-xml-region',
+ 'line-beginning-position', 'line-end-position', 'line-pixel-height',
+ 'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
+ 'load-average', 'local-key-binding', 'local-variable-if-set-p',
+ 'local-variable-p', 'locale-info', 'locate-file-internal',
+ 'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
+ 'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
+ 'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
+ 'make-byte-code', 'make-category-set', 'make-category-table',
+ 'make-char', 'make-char-table', 'make-directory-internal',
+ 'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
+ 'make-indirect-buffer', 'make-keymap', 'make-list',
+ 'make-local-variable', 'make-marker', 'make-network-process',
+ 'make-overlay', 'make-serial-process', 'make-sparse-keymap',
+ 'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
+ 'make-terminal-frame', 'make-variable-buffer-local',
+ 'make-variable-frame-local', 'make-vector', 'makunbound',
+ 'map-char-table', 'map-charset-chars', 'map-keymap',
+ 'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
+ 'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
+ 'marker-position', 'markerp', 'match-beginning', 'match-data',
+ 'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
+ 'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
+ 'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
+ 'menu-or-popup-active-p', 'merge-face-attribute', 'message',
+ 'message-box', 'message-or-box', 'min',
+ 'minibuffer-completion-contents', 'minibuffer-contents',
+ 'minibuffer-contents-no-properties', 'minibuffer-depth',
+ 'minibuffer-prompt', 'minibuffer-prompt-end',
+ 'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
+ 'minor-mode-key-binding', 'mod', 'modify-category-entry',
+ 'modify-frame-parameters', 'modify-syntax-entry',
+ 'mouse-pixel-position', 'mouse-position', 'move-overlay',
+ 'move-point-visually', 'move-to-column', 'move-to-window-line',
+ 'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
+ 'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
+ 'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
+ 'msdos-set-keyboard', 'msdos-set-mouse-buttons',
+ 'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
+ 'natnump', 'nconc', 'network-interface-info',
+ 'network-interface-list', 'new-fontset', 'newline-cache-check',
+ 'next-char-property-change', 'next-frame', 'next-overlay-change',
+ 'next-property-change', 'next-read-file-uses-dialog-p',
+ 'next-single-char-property-change', 'next-single-property-change',
+ 'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
+ 'number-or-marker-p', 'number-to-string', 'numberp',
+ 'open-dribble-file', 'open-font', 'open-termscript',
+ 'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
+ 'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
+ 'overlay-properties', 'overlay-put', 'overlay-recenter',
+ 'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
+ 'parse-partial-sexp', 'play-sound-internal', 'plist-get',
+ 'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
+ 'point-max-marker', 'point-min', 'point-min-marker',
+ 'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
+ 'posix-search-backward', 'posix-search-forward', 'posix-string-match',
+ 'posn-at-point', 'posn-at-x-y', 'preceding-char',
+ 'prefix-numeric-value', 'previous-char-property-change',
+ 'previous-frame', 'previous-overlay-change',
+ 'previous-property-change', 'previous-single-char-property-change',
+ 'previous-single-property-change', 'previous-window', 'prin1',
+ 'prin1-to-string', 'princ', 'print', 'process-attributes',
+ 'process-buffer', 'process-coding-system', 'process-command',
+ 'process-connection', 'process-contact', 'process-datagram-address',
+ 'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
+ 'process-id', 'process-inherit-coding-system-flag', 'process-list',
+ 'process-mark', 'process-name', 'process-plist',
+ 'process-query-on-exit-flag', 'process-running-child-p',
+ 'process-send-eof', 'process-send-region', 'process-send-string',
+ 'process-sentinel', 'process-status', 'process-tty-name',
+ 'process-type', 'processp', 'profiler-cpu-log',
+ 'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
+ 'profiler-memory-log', 'profiler-memory-running-p',
+ 'profiler-memory-start', 'profiler-memory-stop', 'propertize',
+ 'purecopy', 'put', 'put-text-property',
+ 'put-unicode-property-internal', 'puthash', 'query-font',
+ 'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
+ 'rassq', 're-search-backward', 're-search-forward', 'read',
+ 'read-buffer', 'read-char', 'read-char-exclusive',
+ 'read-coding-system', 'read-command', 'read-event',
+ 'read-from-minibuffer', 'read-from-string', 'read-function',
+ 'read-key-sequence', 'read-key-sequence-vector',
+ 'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
+ 'read-variable', 'recent-auto-save-p', 'recent-doskeys',
+ 'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
+ 'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
+ 'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
+ 'region-end', 'register-ccl-program', 'register-code-conversion-map',
+ 'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
+ 'rename-buffer', 'rename-file', 'replace-match',
+ 'reset-this-command-lengths', 'resize-mini-window-internal',
+ 'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
+ 'run-hook-with-args', 'run-hook-with-args-until-failure',
+ 'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
+ 'run-window-configuration-change-hook', 'run-window-scroll-functions',
+ 'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
+ 'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
+ 'search-backward', 'search-forward', 'secure-hash', 'select-frame',
+ 'select-window', 'selected-frame', 'selected-window',
+ 'self-insert-command', 'send-string-to-terminal', 'sequencep',
+ 'serial-process-configure', 'set', 'set-buffer',
+ 'set-buffer-auto-saved', 'set-buffer-major-mode',
+ 'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
+ 'set-category-table', 'set-char-table-extra-slot',
+ 'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
+ 'set-charset-priority', 'set-coding-system-priority',
+ 'set-cursor-size', 'set-default', 'set-default-file-modes',
+ 'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
+ 'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
+ 'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
+ 'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
+ 'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
+ 'set-keyboard-coding-system-internal', 'set-keymap-parent',
+ 'set-marker', 'set-marker-insertion-type', 'set-match-data',
+ 'set-message-beep', 'set-minibuffer-window',
+ 'set-mouse-pixel-position', 'set-mouse-position',
+ 'set-network-process-option', 'set-output-flow-control',
+ 'set-process-buffer', 'set-process-coding-system',
+ 'set-process-datagram-address', 'set-process-filter',
+ 'set-process-filter-multibyte',
+ 'set-process-inherit-coding-system-flag', 'set-process-plist',
+ 'set-process-query-on-exit-flag', 'set-process-sentinel',
+ 'set-process-window-size', 'set-quit-char',
+ 'set-safe-terminal-coding-system-internal', 'set-screen-color',
+ 'set-standard-case-table', 'set-syntax-table',
+ 'set-terminal-coding-system-internal', 'set-terminal-local-value',
+ 'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
+ 'set-visited-file-modtime', 'set-window-buffer',
+ 'set-window-combination-limit', 'set-window-configuration',
+ 'set-window-dedicated-p', 'set-window-display-table',
+ 'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
+ 'set-window-new-normal', 'set-window-new-pixel',
+ 'set-window-new-total', 'set-window-next-buffers',
+ 'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
+ 'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
+ 'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
+ 'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
+ 'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
+ 'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
+ 'sort-charsets', 'special-variable-p', 'split-char',
+ 'split-window-internal', 'sqrt', 'standard-case-table',
+ 'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
+ 'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
'string=', 'string<', 'string>', 'string-as-multibyte',
'string-as-unibyte', 'string-bytes', 'string-collate-equalp',
'string-collate-lessp', 'string-equal', 'string-greaterp',
- 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
- 'string-match', 'string-to-char', 'string-to-multibyte',
- 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
- 'string-width', 'stringp', 'subr-name', 'subrp',
- 'subst-char-in-region', 'substitute-command-keys',
- 'substitute-in-file-name', 'substring', 'substring-no-properties',
- 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
- 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
- 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
- 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
- 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
- 'terminal-local-value', 'terminal-name', 'terminal-parameter',
- 'terminal-parameters', 'terpri', 'test-completion',
- 'text-char-description', 'text-properties-at', 'text-property-any',
- 'text-property-not-all', 'this-command-keys',
- 'this-command-keys-vector', 'this-single-command-keys',
- 'this-single-command-raw-keys', 'time-add', 'time-less-p',
- 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
- 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
- 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
- 'truncate', 'try-completion', 'tty-display-color-cells',
- 'tty-display-color-p', 'tty-no-underline',
- 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
- 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
- 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
- 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
- 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
- 'upcase-initials-region', 'upcase-region', 'upcase-word',
- 'use-global-map', 'use-local-map', 'user-full-name',
- 'user-login-name', 'user-real-login-name', 'user-real-uid',
- 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
- 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
- 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
- 'w16-get-clipboard-data', 'w16-selection-exists-p',
- 'w16-set-clipboard-data', 'w32-battery-status',
- 'w32-default-color-map', 'w32-define-rgb-color',
- 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
- 'w32-frame-rect', 'w32-get-clipboard-data',
- 'w32-get-codepage-charset', 'w32-get-console-codepage',
- 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
- 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
- 'w32-get-locale-info', 'w32-get-valid-codepages',
- 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
- 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
- 'w32-register-hot-key', 'w32-registered-hot-keys',
- 'w32-selection-exists-p', 'w32-send-sys-command',
- 'w32-set-clipboard-data', 'w32-set-console-codepage',
- 'w32-set-console-output-codepage', 'w32-set-current-locale',
- 'w32-set-keyboard-layout', 'w32-set-process-priority',
- 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
- 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
- 'w32notify-add-watch', 'w32notify-rm-watch',
- 'waiting-for-user-input-p', 'where-is-internal', 'widen',
- 'widget-apply', 'widget-get', 'widget-put',
- 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
- 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
- 'window-combination-limit', 'window-configuration-frame',
- 'window-configuration-p', 'window-dedicated-p',
- 'window-display-table', 'window-edges', 'window-end', 'window-frame',
- 'window-fringes', 'window-header-line-height', 'window-hscroll',
- 'window-inside-absolute-pixel-edges', 'window-inside-edges',
- 'window-inside-pixel-edges', 'window-left-child',
- 'window-left-column', 'window-line-height', 'window-list',
- 'window-list-1', 'window-live-p', 'window-margins',
- 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
- 'window-new-pixel', 'window-new-total', 'window-next-buffers',
- 'window-next-sibling', 'window-normal-size', 'window-old-point',
- 'window-parameter', 'window-parameters', 'window-parent',
- 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
- 'window-pixel-top', 'window-pixel-width', 'window-point',
- 'window-prev-buffers', 'window-prev-sibling',
- 'window-redisplay-end-trigger', 'window-resize-apply',
- 'window-resize-apply-total', 'window-right-divider-width',
- 'window-scroll-bar-height', 'window-scroll-bar-width',
- 'window-scroll-bars', 'window-start', 'window-system',
- 'window-text-height', 'window-text-pixel-size', 'window-text-width',
- 'window-top-child', 'window-top-line', 'window-total-height',
- 'window-total-width', 'window-use-time', 'window-valid-p',
- 'window-vscroll', 'windowp', 'write-char', 'write-region',
- 'x-backspace-delete-keys-p', 'x-change-window-property',
- 'x-change-window-property', 'x-close-connection',
- 'x-close-connection', 'x-create-frame', 'x-create-frame',
- 'x-delete-window-property', 'x-delete-window-property',
- 'x-disown-selection-internal', 'x-display-backing-store',
- 'x-display-backing-store', 'x-display-color-cells',
- 'x-display-color-cells', 'x-display-grayscale-p',
- 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
- 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
- 'x-display-mm-width', 'x-display-monitor-attributes-list',
- 'x-display-pixel-height', 'x-display-pixel-height',
- 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
- 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
- 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
- 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
- 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
- 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
- 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
- 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
- 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
- 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
- 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
- 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
- 'x-send-client-message', 'x-server-max-request-size',
- 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
- 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
- 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
- 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
- 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
- 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
- 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
- 'forward-point',
+ 'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
+ 'string-match', 'string-to-char', 'string-to-multibyte',
+ 'string-to-number', 'string-to-syntax', 'string-to-unibyte',
+ 'string-width', 'stringp', 'subr-name', 'subrp',
+ 'subst-char-in-region', 'substitute-command-keys',
+ 'substitute-in-file-name', 'substring', 'substring-no-properties',
+ 'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
+ 'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
+ 'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
+ 'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
+ 'terminal-coding-system', 'terminal-list', 'terminal-live-p',
+ 'terminal-local-value', 'terminal-name', 'terminal-parameter',
+ 'terminal-parameters', 'terpri', 'test-completion',
+ 'text-char-description', 'text-properties-at', 'text-property-any',
+ 'text-property-not-all', 'this-command-keys',
+ 'this-command-keys-vector', 'this-single-command-keys',
+ 'this-single-command-raw-keys', 'time-add', 'time-less-p',
+ 'time-subtract', 'tool-bar-get-system-style', 'tool-bar-height',
+ 'tool-bar-pixel-width', 'top-level', 'trace-redisplay',
+ 'trace-to-stderr', 'translate-region-internal', 'transpose-regions',
+ 'truncate', 'try-completion', 'tty-display-color-cells',
+ 'tty-display-color-p', 'tty-no-underline',
+ 'tty-suppress-bold-inverse-default-colors', 'tty-top-frame',
+ 'tty-type', 'type-of', 'undo-boundary', 'unencodable-char-position',
+ 'unhandled-file-name-directory', 'unibyte-char-to-multibyte',
+ 'unibyte-string', 'unicode-property-table-internal', 'unify-charset',
+ 'unintern', 'unix-sync', 'unlock-buffer', 'upcase', 'upcase-initials',
+ 'upcase-initials-region', 'upcase-region', 'upcase-word',
+ 'use-global-map', 'use-local-map', 'user-full-name',
+ 'user-login-name', 'user-real-login-name', 'user-real-uid',
+ 'user-uid', 'variable-binding-locus', 'vconcat', 'vector',
+ 'vector-or-char-table-p', 'vectorp', 'verify-visited-file-modtime',
+ 'vertical-motion', 'visible-frame-list', 'visited-file-modtime',
+ 'w16-get-clipboard-data', 'w16-selection-exists-p',
+ 'w16-set-clipboard-data', 'w32-battery-status',
+ 'w32-default-color-map', 'w32-define-rgb-color',
+ 'w32-display-monitor-attributes-list', 'w32-frame-menu-bar-size',
+ 'w32-frame-rect', 'w32-get-clipboard-data',
+ 'w32-get-codepage-charset', 'w32-get-console-codepage',
+ 'w32-get-console-output-codepage', 'w32-get-current-locale-id',
+ 'w32-get-default-locale-id', 'w32-get-keyboard-layout',
+ 'w32-get-locale-info', 'w32-get-valid-codepages',
+ 'w32-get-valid-keyboard-layouts', 'w32-get-valid-locale-ids',
+ 'w32-has-winsock', 'w32-long-file-name', 'w32-reconstruct-hot-key',
+ 'w32-register-hot-key', 'w32-registered-hot-keys',
+ 'w32-selection-exists-p', 'w32-send-sys-command',
+ 'w32-set-clipboard-data', 'w32-set-console-codepage',
+ 'w32-set-console-output-codepage', 'w32-set-current-locale',
+ 'w32-set-keyboard-layout', 'w32-set-process-priority',
+ 'w32-shell-execute', 'w32-short-file-name', 'w32-toggle-lock-key',
+ 'w32-unload-winsock', 'w32-unregister-hot-key', 'w32-window-exists-p',
+ 'w32notify-add-watch', 'w32notify-rm-watch',
+ 'waiting-for-user-input-p', 'where-is-internal', 'widen',
+ 'widget-apply', 'widget-get', 'widget-put',
+ 'window-absolute-pixel-edges', 'window-at', 'window-body-height',
+ 'window-body-width', 'window-bottom-divider-width', 'window-buffer',
+ 'window-combination-limit', 'window-configuration-frame',
+ 'window-configuration-p', 'window-dedicated-p',
+ 'window-display-table', 'window-edges', 'window-end', 'window-frame',
+ 'window-fringes', 'window-header-line-height', 'window-hscroll',
+ 'window-inside-absolute-pixel-edges', 'window-inside-edges',
+ 'window-inside-pixel-edges', 'window-left-child',
+ 'window-left-column', 'window-line-height', 'window-list',
+ 'window-list-1', 'window-live-p', 'window-margins',
+ 'window-minibuffer-p', 'window-mode-line-height', 'window-new-normal',
+ 'window-new-pixel', 'window-new-total', 'window-next-buffers',
+ 'window-next-sibling', 'window-normal-size', 'window-old-point',
+ 'window-parameter', 'window-parameters', 'window-parent',
+ 'window-pixel-edges', 'window-pixel-height', 'window-pixel-left',
+ 'window-pixel-top', 'window-pixel-width', 'window-point',
+ 'window-prev-buffers', 'window-prev-sibling',
+ 'window-redisplay-end-trigger', 'window-resize-apply',
+ 'window-resize-apply-total', 'window-right-divider-width',
+ 'window-scroll-bar-height', 'window-scroll-bar-width',
+ 'window-scroll-bars', 'window-start', 'window-system',
+ 'window-text-height', 'window-text-pixel-size', 'window-text-width',
+ 'window-top-child', 'window-top-line', 'window-total-height',
+ 'window-total-width', 'window-use-time', 'window-valid-p',
+ 'window-vscroll', 'windowp', 'write-char', 'write-region',
+ 'x-backspace-delete-keys-p', 'x-change-window-property',
+ 'x-change-window-property', 'x-close-connection',
+ 'x-close-connection', 'x-create-frame', 'x-create-frame',
+ 'x-delete-window-property', 'x-delete-window-property',
+ 'x-disown-selection-internal', 'x-display-backing-store',
+ 'x-display-backing-store', 'x-display-color-cells',
+ 'x-display-color-cells', 'x-display-grayscale-p',
+ 'x-display-grayscale-p', 'x-display-list', 'x-display-list',
+ 'x-display-mm-height', 'x-display-mm-height', 'x-display-mm-width',
+ 'x-display-mm-width', 'x-display-monitor-attributes-list',
+ 'x-display-pixel-height', 'x-display-pixel-height',
+ 'x-display-pixel-width', 'x-display-pixel-width', 'x-display-planes',
+ 'x-display-planes', 'x-display-save-under', 'x-display-save-under',
+ 'x-display-screens', 'x-display-screens', 'x-display-visual-class',
+ 'x-display-visual-class', 'x-family-fonts', 'x-file-dialog',
+ 'x-file-dialog', 'x-file-dialog', 'x-focus-frame', 'x-frame-geometry',
+ 'x-frame-geometry', 'x-get-atom-name', 'x-get-resource',
+ 'x-get-selection-internal', 'x-hide-tip', 'x-hide-tip',
+ 'x-list-fonts', 'x-load-color-file', 'x-menu-bar-open-internal',
+ 'x-menu-bar-open-internal', 'x-open-connection', 'x-open-connection',
+ 'x-own-selection-internal', 'x-parse-geometry', 'x-popup-dialog',
+ 'x-popup-menu', 'x-register-dnd-atom', 'x-select-font',
+ 'x-select-font', 'x-selection-exists-p', 'x-selection-owner-p',
+ 'x-send-client-message', 'x-server-max-request-size',
+ 'x-server-max-request-size', 'x-server-vendor', 'x-server-vendor',
+ 'x-server-version', 'x-server-version', 'x-show-tip', 'x-show-tip',
+ 'x-synchronize', 'x-synchronize', 'x-uses-old-gtk-dialog',
+ 'x-window-property', 'x-window-property', 'x-wm-set-size-hint',
+ 'xw-color-defined-p', 'xw-color-defined-p', 'xw-color-values',
+ 'xw-color-values', 'xw-display-color-p', 'xw-display-color-p',
+ 'yes-or-no-p', 'zlib-available-p', 'zlib-decompress-region',
+ 'forward-point',
}
-
+
builtin_function_highlighted = {
- 'defvaralias', 'provide', 'require',
- 'with-no-warnings', 'define-widget', 'with-electric-help',
- 'throw', 'defalias', 'featurep'
+ 'defvaralias', 'provide', 'require',
+ 'with-no-warnings', 'define-widget', 'with-electric-help',
+ 'throw', 'defalias', 'featurep'
}
-
+
lambda_list_keywords = {
- '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
- '&rest', '&whole',
+ '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
+ '&rest', '&whole',
}
-
+
error_keywords = {
- 'cl-assert', 'cl-check-type', 'error', 'signal',
- 'user-error', 'warn',
+ 'cl-assert', 'cl-check-type', 'error', 'signal',
+ 'user-error', 'warn',
}
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in EmacsLispLexer.builtin_function:
- yield index, Name.Function, value
- continue
- if value in EmacsLispLexer.special_forms:
- yield index, Keyword, value
- continue
- if value in EmacsLispLexer.error_keywords:
- yield index, Name.Exception, value
- continue
- if value in EmacsLispLexer.builtin_function_highlighted:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.macros:
- yield index, Name.Builtin, value
- continue
- if value in EmacsLispLexer.lambda_list_keywords:
- yield index, Keyword.Pseudo, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- default('body'),
- ],
- 'body': [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # strings and characters
- (r'"', String, 'string'),
- (r'\?([^\\]|\\.)', String.Char),
- # quoting
- (r":" + symbol, Name.Builtin),
- (r"::" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
- terminated, Number.Float),
-
- # vectors
- (r'\[|\]', Punctuation),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read syntax for char tables
- (r'#\^\^?', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.|:)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in EmacsLispLexer.builtin_function:
+ yield index, Name.Function, value
+ continue
+ if value in EmacsLispLexer.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in EmacsLispLexer.error_keywords:
+ yield index, Name.Exception, value
+ continue
+ if value in EmacsLispLexer.builtin_function_highlighted:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in EmacsLispLexer.lambda_list_keywords:
+ yield index, Keyword.Pseudo, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ default('body'),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r'\?([^\\]|\\.)', String.Char),
+ # quoting
+ (r":" + symbol, Name.Builtin),
+ (r"::" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
+ terminated, Number.Float),
+
+ # vectors
+ (r'\[|\]', Punctuation),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read syntax for char tables
+ (r'#\^\^?', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?', Number),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.|:)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
(r'\*' + symbol + r'\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'#\(', Operator, 'body'),
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- 'string': [
- (r'[^"\\`]+', String),
- (r'`%s\'' % symbol, String.Symbol),
- (r'`', String),
- (r'\\.', String),
- (r'\\\n', String),
- (r'"', String, '#pop'),
- ],
- }
-
-
-class ShenLexer(RegexLexer):
- """
- Lexer for `Shen <http://shenlanguage.org/>`_ source code.
-
- .. versionadded:: 2.1
- """
- name = 'Shen'
- aliases = ['shen']
- filenames = ['*.shen']
- mimetypes = ['text/x-shen', 'application/x-shen']
-
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'#\(', Operator, 'body'),
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'[^"\\`]+', String),
+ (r'`%s\'' % symbol, String.Symbol),
+ (r'`', String),
+ (r'\\.', String),
+ (r'\\\n', String),
+ (r'"', String, '#pop'),
+ ],
+ }
+
+
+class ShenLexer(RegexLexer):
+ """
+ Lexer for `Shen <http://shenlanguage.org/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Shen'
+ aliases = ['shen']
+ filenames = ['*.shen']
+ mimetypes = ['text/x-shen', 'application/x-shen']
+
DECLARATIONS = (
'datatype', 'define', 'defmacro', 'defprolog', 'defcc',
'synonyms', 'declare', 'package', 'type', 'function',
)
-
+
SPECIAL_FORMS = (
'lambda', 'get', 'let', 'if', 'cases', 'cond', 'put', 'time', 'freeze',
'value', 'load', '$', 'protect', 'or', 'and', 'not', 'do', 'output',
'prolog?', 'trap-error', 'error', 'make-string', '/.', 'set', '@p',
'@s', '@v',
)
-
+
BUILTINS = (
'==', '=', '*', '+', '-', '/', '<', '>', '>=', '<=', '<-address',
'<-vector', 'abort', 'absvector', 'absvector?', 'address->', 'adjoin',
@@ -2248,197 +2248,197 @@ class ShenLexer(RegexLexer):
'verified', 'version', 'warn', 'when', 'write-byte', 'write-to-file',
'y-or-n?',
)
-
+
BUILTINS_ANYWHERE = ('where', 'skip', '>>', '_', '!', '<e>', '<!>')
-
+
MAPPINGS = {s: Keyword for s in DECLARATIONS}
- MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
- MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
-
+ MAPPINGS.update((s, Name.Builtin) for s in BUILTINS)
+ MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
+
valid_symbol_chars = r'[\w!$%*+,<=>?/.\'@&#:-]'
- valid_name = '%s+' % valid_symbol_chars
- symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
- variable = r'[A-Z]%s*' % valid_symbol_chars
-
- tokens = {
- 'string': [
- (r'"', String, '#pop'),
- (r'c#\d{1,3};', String.Escape),
- (r'~[ARS%]', String.Interpol),
- (r'(?s).', String),
- ],
-
- 'root': [
- (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\
- (r'\\\\.*', Comment.Single), # \\ ...
- (r'\s+', Text),
- (r'_{5,}', Punctuation),
- (r'={5,}', Punctuation),
- (r'(;|:=|\||--?>|<--?)', Punctuation),
- (r'(:-|:|\{|\})', Literal),
- (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'[+-]*\d+', Number.Integer),
- (r'"', String, 'string'),
- (variable, Name.Variable),
- (r'(true|false|<>|\[\])', Keyword.Pseudo),
- (symbol_name, Literal),
- (r'(\[|\]|\(|\))', Punctuation),
- ],
- }
-
- def get_tokens_unprocessed(self, text):
- tokens = RegexLexer.get_tokens_unprocessed(self, text)
- tokens = self._process_symbols(tokens)
- tokens = self._process_declarations(tokens)
- return tokens
-
- def _relevant(self, token):
- return token not in (Text, Comment.Single, Comment.Multiline)
-
- def _process_declarations(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- yield index, token, value
- if self._relevant(token):
- if opening_paren and token == Keyword and value in self.DECLARATIONS:
- declaration = value
+ valid_name = '%s+' % valid_symbol_chars
+ symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
+ variable = r'[A-Z]%s*' % valid_symbol_chars
+
+ tokens = {
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'c#\d{1,3};', String.Escape),
+ (r'~[ARS%]', String.Interpol),
+ (r'(?s).', String),
+ ],
+
+ 'root': [
+ (r'(?s)\\\*.*?\*\\', Comment.Multiline), # \* ... *\
+ (r'\\\\.*', Comment.Single), # \\ ...
+ (r'\s+', Text),
+ (r'_{5,}', Punctuation),
+ (r'={5,}', Punctuation),
+ (r'(;|:=|\||--?>|<--?)', Punctuation),
+ (r'(:-|:|\{|\})', Literal),
+ (r'[+-]*\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'[+-]*\d+', Number.Integer),
+ (r'"', String, 'string'),
+ (variable, Name.Variable),
+ (r'(true|false|<>|\[\])', Keyword.Pseudo),
+ (symbol_name, Literal),
+ (r'(\[|\]|\(|\))', Punctuation),
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ tokens = RegexLexer.get_tokens_unprocessed(self, text)
+ tokens = self._process_symbols(tokens)
+ tokens = self._process_declarations(tokens)
+ return tokens
+
+ def _relevant(self, token):
+ return token not in (Text, Comment.Single, Comment.Multiline)
+
+ def _process_declarations(self, tokens):
+ opening_paren = False
+ for index, token, value in tokens:
+ yield index, token, value
+ if self._relevant(token):
+ if opening_paren and token == Keyword and value in self.DECLARATIONS:
+ declaration = value
yield from self._process_declaration(declaration, tokens)
- opening_paren = value == '(' and token == Punctuation
-
- def _process_symbols(self, tokens):
- opening_paren = False
- for index, token, value in tokens:
- if opening_paren and token in (Literal, Name.Variable):
- token = self.MAPPINGS.get(value, Name.Function)
- elif token == Literal and value in self.BUILTINS_ANYWHERE:
- token = Name.Builtin
- opening_paren = value == '(' and token == Punctuation
- yield index, token, value
-
- def _process_declaration(self, declaration, tokens):
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
-
- if declaration == 'datatype':
- prev_was_colon = False
- token = Keyword.Type if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if prev_was_colon and token == Literal:
- token = Keyword.Type
- yield index, token, value
- if self._relevant(token):
- prev_was_colon = token == Literal and value == ':'
- elif declaration == 'package':
- token = Name.Namespace if token == Literal else token
- yield index, token, value
- elif declaration == 'define':
- token = Name.Function if token == Literal else token
- yield index, token, value
- for index, token, value in tokens:
- if self._relevant(token):
- break
- yield index, token, value
- if value == '{' and token == Literal:
- yield index, Punctuation, value
- for index, token, value in self._process_signature(tokens):
- yield index, token, value
- else:
- yield index, token, value
- else:
- token = Name.Function if token == Literal else token
- yield index, token, value
-
+ opening_paren = value == '(' and token == Punctuation
+
+ def _process_symbols(self, tokens):
+ opening_paren = False
+ for index, token, value in tokens:
+ if opening_paren and token in (Literal, Name.Variable):
+ token = self.MAPPINGS.get(value, Name.Function)
+ elif token == Literal and value in self.BUILTINS_ANYWHERE:
+ token = Name.Builtin
+ opening_paren = value == '(' and token == Punctuation
+ yield index, token, value
+
+ def _process_declaration(self, declaration, tokens):
+ for index, token, value in tokens:
+ if self._relevant(token):
+ break
+ yield index, token, value
+
+ if declaration == 'datatype':
+ prev_was_colon = False
+ token = Keyword.Type if token == Literal else token
+ yield index, token, value
+ for index, token, value in tokens:
+ if prev_was_colon and token == Literal:
+ token = Keyword.Type
+ yield index, token, value
+ if self._relevant(token):
+ prev_was_colon = token == Literal and value == ':'
+ elif declaration == 'package':
+ token = Name.Namespace if token == Literal else token
+ yield index, token, value
+ elif declaration == 'define':
+ token = Name.Function if token == Literal else token
+ yield index, token, value
+ for index, token, value in tokens:
+ if self._relevant(token):
+ break
+ yield index, token, value
+ if value == '{' and token == Literal:
+ yield index, Punctuation, value
+ for index, token, value in self._process_signature(tokens):
+ yield index, token, value
+ else:
+ yield index, token, value
+ else:
+ token = Name.Function if token == Literal else token
+ yield index, token, value
+
return
-
- def _process_signature(self, tokens):
- for index, token, value in tokens:
- if token == Literal and value == '}':
- yield index, Punctuation, value
+
+ def _process_signature(self, tokens):
+ for index, token, value in tokens:
+ if token == Literal and value == '}':
+ yield index, Punctuation, value
return
- elif token in (Literal, Name.Function):
- token = Name.Variable if value.istitle() else Keyword.Type
- yield index, token, value
-
-
+ elif token in (Literal, Name.Function):
+ token = Name.Variable if value.istitle() else Keyword.Type
+ yield index, token, value
+
+
class CPSALexer(RegexLexer):
- """
- A CPSA lexer based on the CPSA language as of version 2.2.12
-
- .. versionadded:: 2.1
- """
- name = 'CPSA'
- aliases = ['cpsa']
- filenames = ['*.cpsa']
- mimetypes = []
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- _keywords = (
- 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole',
- 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig',
- 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text',
- 'skey', 'akey', 'data', 'mesg',
- )
- _builtins = (
- 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp',
- )
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
+ """
+ A CPSA lexer based on the CPSA language as of version 2.2.12
+
+ .. versionadded:: 2.1
+ """
+ name = 'CPSA'
+ aliases = ['cpsa']
+ filenames = ['*.cpsa']
+ mimetypes = []
+
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ _keywords = (
+ 'herald', 'vars', 'defmacro', 'include', 'defprotocol', 'defrole',
+ 'defskeleton', 'defstrand', 'deflistener', 'non-orig', 'uniq-orig',
+ 'pen-non-orig', 'precedes', 'trace', 'send', 'recv', 'name', 'text',
+ 'skey', 'akey', 'data', 'mesg',
+ )
+ _builtins = (
+ 'cat', 'enc', 'hash', 'privk', 'pubk', 'invk', 'ltk', 'gen', 'exp',
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'" + valid_name, String.Symbol),
+ (r"'" + valid_name, String.Symbol),
(r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- (words(_keywords, suffix=r'\b'), Keyword),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- (words(_builtins, prefix=r'(?<=\()', suffix=r'\b'), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- }
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ (words(_keywords, suffix=r'\b'), Keyword),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
+ (words(_builtins, prefix=r'(?<=\()', suffix=r'\b'), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ ],
+ }
class XtlangLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/make.py b/contrib/python/Pygments/py3/pygments/lexers/make.py
index 3e317e819e..d115a65469 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/make.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/make.py
@@ -1,205 +1,205 @@
-"""
- pygments.lexers.make
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Makefiles and similar.
-
+"""
+ pygments.lexers.make
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Makefiles and similar.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
- do_insertions, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
+ do_insertions, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Punctuation, Whitespace
-from pygments.lexers.shell import BashLexer
-
-__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
-
-
-class MakefileLexer(Lexer):
- """
- Lexer for BSD and GNU make extensions (lenient enough to handle both in
- the same file even).
-
- *Rewritten in Pygments 0.10.*
- """
-
- name = 'Makefile'
- aliases = ['make', 'makefile', 'mf', 'bsdmake']
- filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
- mimetypes = ['text/x-makefile']
-
- r_special = re.compile(
- r'^(?:'
- # BSD Make
- r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
- # GNU Make
- r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
- # GNU Automake
- r'\s*(if|else|endif))(?=\s)')
- r_comment = re.compile(r'^\s*@?#')
-
- def get_tokens_unprocessed(self, text):
- ins = []
- lines = text.splitlines(True)
- done = ''
- lex = BaseMakefileLexer(**self.options)
- backslashflag = False
- for line in lines:
- if self.r_special.match(line) or backslashflag:
- ins.append((len(done), [(0, Comment.Preproc, line)]))
- backslashflag = line.strip().endswith('\\')
- elif self.r_comment.match(line):
- ins.append((len(done), [(0, Comment, line)]))
- else:
- done += line
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
+
+
+class MakefileLexer(Lexer):
+ """
+ Lexer for BSD and GNU make extensions (lenient enough to handle both in
+ the same file even).
+
+ *Rewritten in Pygments 0.10.*
+ """
+
+ name = 'Makefile'
+ aliases = ['make', 'makefile', 'mf', 'bsdmake']
+ filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
+ mimetypes = ['text/x-makefile']
+
+ r_special = re.compile(
+ r'^(?:'
+ # BSD Make
+ r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
+ # GNU Make
+ r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
+ # GNU Automake
+ r'\s*(if|else|endif))(?=\s)')
+ r_comment = re.compile(r'^\s*@?#')
+
+ def get_tokens_unprocessed(self, text):
+ ins = []
+ lines = text.splitlines(True)
+ done = ''
+ lex = BaseMakefileLexer(**self.options)
+ backslashflag = False
+ for line in lines:
+ if self.r_special.match(line) or backslashflag:
+ ins.append((len(done), [(0, Comment.Preproc, line)]))
+ backslashflag = line.strip().endswith('\\')
+ elif self.r_comment.match(line):
+ ins.append((len(done), [(0, Comment, line)]))
+ else:
+ done += line
yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
-
- def analyse_text(text):
- # Many makefiles have $(BIG_CAPS) style variables
- if re.search(r'\$\([A-Z_]+\)', text):
- return 0.1
-
-
-class BaseMakefileLexer(RegexLexer):
- """
- Lexer for simple Makefiles (no preprocessing).
-
- .. versionadded:: 0.10
- """
-
- name = 'Base Makefile'
- aliases = ['basemake']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # recipes (need to allow spaces because of expandtabs)
- (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
- # special variables
- (r'\$[<@$+%?|*]', Keyword),
+
+ def analyse_text(text):
+ # Many makefiles have $(BIG_CAPS) style variables
+ if re.search(r'\$\([A-Z_]+\)', text):
+ return 0.1
+
+
+class BaseMakefileLexer(RegexLexer):
+ """
+ Lexer for simple Makefiles (no preprocessing).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Base Makefile'
+ aliases = ['basemake']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # recipes (need to allow spaces because of expandtabs)
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ # special variables
+ (r'\$[<@$+%?|*]', Keyword),
(r'\s+', Whitespace),
- (r'#.*?\n', Comment),
+ (r'#.*?\n', Comment),
(r'((?:un)?export)(\s+)(?=[\w${}\t -]+\n)',
- bygroups(Keyword, Text), 'export'),
+ bygroups(Keyword, Text), 'export'),
(r'(?:un)?export\s+', Keyword),
- # assignment
+ # assignment
(r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
- bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
- # strings
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
+ # strings
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- # targets
- (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
- 'block-header'),
- # expansions
- (r'\$\(', Keyword, 'expansion'),
- ],
- 'expansion': [
+ # targets
+ (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
+ 'block-header'),
+ # expansions
+ (r'\$\(', Keyword, 'expansion'),
+ ],
+ 'expansion': [
(r'[^\w$().-]+', Text),
(r'[\w.-]+', Name.Variable),
- (r'\$', Keyword),
- (r'\(', Keyword, '#push'),
- (r'\)', Keyword, '#pop'),
- ],
- 'export': [
- (r'[\w${}-]+', Name.Variable),
- (r'\n', Text, '#pop'),
+ (r'\$', Keyword),
+ (r'\(', Keyword, '#push'),
+ (r'\)', Keyword, '#pop'),
+ ],
+ 'export': [
+ (r'[\w${}-]+', Name.Variable),
+ (r'\n', Text, '#pop'),
(r'\s+', Whitespace),
- ],
- 'block-header': [
- (r'[,|]', Punctuation),
- (r'#.*?\n', Comment, '#pop'),
- (r'\\\n', Text), # line continuation
- (r'\$\(', Keyword, 'expansion'),
- (r'[a-zA-Z_]+', Name),
- (r'\n', Text, '#pop'),
- (r'.', Text),
- ],
- }
-
-
-class CMakeLexer(RegexLexer):
- """
- Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
-
- .. versionadded:: 1.2
- """
- name = 'CMake'
- aliases = ['cmake']
- filenames = ['*.cmake', 'CMakeLists.txt']
- mimetypes = ['text/x-cmake']
-
- tokens = {
- 'root': [
- # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
- # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
- # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
- # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
- # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
- # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
- # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
- # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
- # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
- # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
- # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
- # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
- # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
- # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
- # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
- # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
- # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
- # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
- # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
- # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
- # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
- # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
- # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
- # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
- # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
- # r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
- Punctuation), 'args'),
- include('keywords'),
- include('ws')
- ],
- 'args': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
- (r'(?s)".*?"', String.Double),
- (r'\\\S+', String),
- (r'[^)$"# \t\n]+', String),
- (r'\n', Text), # explicitly legal
- include('keywords'),
- include('ws')
- ],
- 'string': [
-
- ],
- 'keywords': [
- (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
- r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
- ],
- 'ws': [
+ ],
+ 'block-header': [
+ (r'[,|]', Punctuation),
+ (r'#.*?\n', Comment, '#pop'),
+ (r'\\\n', Text), # line continuation
+ (r'\$\(', Keyword, 'expansion'),
+ (r'[a-zA-Z_]+', Name),
+ (r'\n', Text, '#pop'),
+ (r'.', Text),
+ ],
+ }
+
+
+class CMakeLexer(RegexLexer):
+ """
+ Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
+
+ .. versionadded:: 1.2
+ """
+ name = 'CMake'
+ aliases = ['cmake']
+ filenames = ['*.cmake', 'CMakeLists.txt']
+ mimetypes = ['text/x-cmake']
+
+ tokens = {
+ 'root': [
+ # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
+ # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
+ # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
+ # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
+ # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
+ # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
+ # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
+ # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
+ # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
+ # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
+ # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
+ # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
+ # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
+ # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
+ # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
+ # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
+ # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
+ # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
+ # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
+ # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
+ # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
+ # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
+ # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
+ # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
+ # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
+ # r'COUNTARGS)\b', Name.Builtin, 'args'),
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
+ Punctuation), 'args'),
+ include('keywords'),
+ include('ws')
+ ],
+ 'args': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
+ (r'(?s)".*?"', String.Double),
+ (r'\\\S+', String),
+ (r'[^)$"# \t\n]+', String),
+ (r'\n', Text), # explicitly legal
+ include('keywords'),
+ include('ws')
+ ],
+ 'string': [
+
+ ],
+ 'keywords': [
+ (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
+ r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
+ ],
+ 'ws': [
(r'[ \t]+', Whitespace),
- (r'#.*\n', Comment),
- ]
- }
-
- def analyse_text(text):
+ (r'#.*\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
exp = (
r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
r'(#[^\n]*)?$'
)
- if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
- return 0.8
- return 0.0
+ if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
+ return 0.8
+ return 0.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/markup.py b/contrib/python/Pygments/py3/pygments/lexers/markup.py
index e1a8429ef0..859398a133 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/markup.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/markup.py
@@ -1,499 +1,499 @@
-"""
- pygments.lexers.markup
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-HTML markup languages.
-
+"""
+ pygments.lexers.markup
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-HTML markup languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexers.html import XmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.css import CssLexer
-
-from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
- using, this, do_insertions, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other
-from pygments.util import get_bool_opt, ClassNotFound
-
-__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
- 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
- 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.css import CssLexer
+
+from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
+ using, this, do_insertions, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other
+from pygments.util import get_bool_opt, ClassNotFound
+
+__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
+ 'MozPreprocHashLexer', 'MozPreprocPercentLexer',
+ 'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer']
-
-
-class BBCodeLexer(RegexLexer):
- """
- A lexer that highlights BBCode(-like) syntax.
-
- .. versionadded:: 0.6
- """
-
- name = 'BBCode'
- aliases = ['bbcode']
- mimetypes = ['text/x-bbcode']
-
- tokens = {
- 'root': [
- (r'[^[]+', Text),
- # tag/end tag begin
- (r'\[/?\w+', Keyword, 'tag'),
- # stray bracket
- (r'\[', Text),
- ],
- 'tag': [
- (r'\s+', Text),
- # attribute with value
- (r'(\w+)(=)("?[^\s"\]]+"?)',
- bygroups(Name.Attribute, Operator, String)),
- # tag argument (a la [color=green])
- (r'(=)("?[^\s"\]]+"?)',
- bygroups(Operator, String)),
- # tag end
- (r'\]', Keyword, '#pop'),
- ],
- }
-
-
-class MoinWikiLexer(RegexLexer):
- """
- For MoinMoin (and Trac) Wiki markup.
-
- .. versionadded:: 0.7
- """
-
- name = 'MoinMoin/Trac Wiki markup'
- aliases = ['trac-wiki', 'moin']
- filenames = []
- mimetypes = ['text/x-trac-wiki']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
- # Titles
- (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
- bygroups(Generic.Heading, using(this), Generic.Heading, String)),
- # Literal code blocks, with optional shebang
- (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
- (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
- # Lists
- (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
- (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
- # Other Formatting
- (r'\[\[\w+.*?\]\]', Keyword), # Macro
- (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
- bygroups(Keyword, String, Keyword)), # Link
- (r'^----+$', Keyword), # Horizontal rules
- (r'[^\n\'\[{!_~^,|]+', Text),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'codeblock': [
- (r'\}\}\}', Name.Builtin, '#pop'),
- # these blocks are allowed to be nested in Trac, but not MoinMoin
- (r'\{\{\{', Text, '#push'),
- (r'[^{}]+', Comment.Preproc), # slurp boring text
- (r'.', Comment.Preproc), # allow loose { or }
- ],
- }
-
-
-class RstLexer(RegexLexer):
- """
- For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
-
- .. versionadded:: 0.7
-
- Additional options accepted:
-
- `handlecodeblocks`
- Highlight the contents of ``.. sourcecode:: language``,
- ``.. code:: language`` and ``.. code-block:: language``
- directives with a lexer for the given language (default:
- ``True``).
-
- .. versionadded:: 0.8
- """
- name = 'reStructuredText'
+
+
+class BBCodeLexer(RegexLexer):
+ """
+ A lexer that highlights BBCode(-like) syntax.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'BBCode'
+ aliases = ['bbcode']
+ mimetypes = ['text/x-bbcode']
+
+ tokens = {
+ 'root': [
+ (r'[^[]+', Text),
+ # tag/end tag begin
+ (r'\[/?\w+', Keyword, 'tag'),
+ # stray bracket
+ (r'\[', Text),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ # attribute with value
+ (r'(\w+)(=)("?[^\s"\]]+"?)',
+ bygroups(Name.Attribute, Operator, String)),
+ # tag argument (a la [color=green])
+ (r'(=)("?[^\s"\]]+"?)',
+ bygroups(Operator, String)),
+ # tag end
+ (r'\]', Keyword, '#pop'),
+ ],
+ }
+
+
+class MoinWikiLexer(RegexLexer):
+ """
+ For MoinMoin (and Trac) Wiki markup.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'MoinMoin/Trac Wiki markup'
+ aliases = ['trac-wiki', 'moin']
+ filenames = []
+ mimetypes = ['text/x-trac-wiki']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
+ (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
+ # Titles
+ (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
+ bygroups(Generic.Heading, using(this), Generic.Heading, String)),
+ # Literal code blocks, with optional shebang
+ (r'(\{\{\{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
+ (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
+ # Lists
+ (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
+ (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
+ # Other Formatting
+ (r'\[\[\w+.*?\]\]', Keyword), # Macro
+ (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
+ bygroups(Keyword, String, Keyword)), # Link
+ (r'^----+$', Keyword), # Horizontal rules
+ (r'[^\n\'\[{!_~^,|]+', Text),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'codeblock': [
+ (r'\}\}\}', Name.Builtin, '#pop'),
+ # these blocks are allowed to be nested in Trac, but not MoinMoin
+ (r'\{\{\{', Text, '#push'),
+ (r'[^{}]+', Comment.Preproc), # slurp boring text
+ (r'.', Comment.Preproc), # allow loose { or }
+ ],
+ }
+
+
+class RstLexer(RegexLexer):
+ """
+ For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
+
+ .. versionadded:: 0.7
+
+ Additional options accepted:
+
+ `handlecodeblocks`
+ Highlight the contents of ``.. sourcecode:: language``,
+ ``.. code:: language`` and ``.. code-block:: language``
+ directives with a lexer for the given language (default:
+ ``True``).
+
+ .. versionadded:: 0.8
+ """
+ name = 'reStructuredText'
aliases = ['restructuredtext', 'rst', 'rest']
- filenames = ['*.rst', '*.rest']
- mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
- flags = re.MULTILINE
-
- def _handle_sourcecode(self, match):
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), Punctuation, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator.Word, match.group(3)
- yield match.start(4), Punctuation, match.group(4)
- yield match.start(5), Text, match.group(5)
- yield match.start(6), Keyword, match.group(6)
- yield match.start(7), Text, match.group(7)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(6).strip())
- except ClassNotFound:
- pass
- indention = match.group(8)
- indention_size = len(indention)
- code = (indention + match.group(9) + match.group(10) + match.group(11))
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(8), String, code
- return
-
- # highlight the lines with the lexer.
- ins = []
- codelines = code.splitlines(True)
- code = ''
- for line in codelines:
- if len(line) > indention_size:
- ins.append((len(code), [(0, Text, line[:indention_size])]))
- code += line[indention_size:]
- else:
- code += line
+ filenames = ['*.rst', '*.rest']
+ mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
+ flags = re.MULTILINE
+
+ def _handle_sourcecode(self, match):
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), Punctuation, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator.Word, match.group(3)
+ yield match.start(4), Punctuation, match.group(4)
+ yield match.start(5), Text, match.group(5)
+ yield match.start(6), Keyword, match.group(6)
+ yield match.start(7), Text, match.group(7)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name(match.group(6).strip())
+ except ClassNotFound:
+ pass
+ indention = match.group(8)
+ indention_size = len(indention)
+ code = (indention + match.group(9) + match.group(10) + match.group(11))
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(8), String, code
+ return
+
+ # highlight the lines with the lexer.
+ ins = []
+ codelines = code.splitlines(True)
+ code = ''
+ for line in codelines:
+ if len(line) > indention_size:
+ ins.append((len(code), [(0, Text, line[:indention_size])]))
+ code += line[indention_size:]
+ else:
+ code += line
yield from do_insertions(ins, lexer.get_tokens_unprocessed(code))
-
- # from docutils.parsers.rst.states
+
+ # from docutils.parsers.rst.states
closers = '\'")]}>\u2019\u201d\xbb!?'
unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
-
- tokens = {
- 'root': [
- # Heading with overline
- (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
- r'(.+)(\n)(\1)(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading,
- Text, Generic.Heading, Text)),
- # Plain heading
- (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
- r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # Bulleted lists
- (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered lists
- (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered, but keep words at BOL from becoming lists
- (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Line blocks
- (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
- bygroups(Text, Operator, using(this, state='inline'))),
- # Sourcecode directives
- (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
+ end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
+ % (re.escape(unicode_delimiters),
+ re.escape(closers)))
+
+ tokens = {
+ 'root': [
+ # Heading with overline
+ (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
+ r'(.+)(\n)(\1)(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading,
+ Text, Generic.Heading, Text)),
+ # Plain heading
+ (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
+ r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading, Text)),
+ # Bulleted lists
+ (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered lists
+ (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered, but keep words at BOL from becoming lists
+ (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Line blocks
+ (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
+ bygroups(Text, Operator, using(this, state='inline'))),
+ # Sourcecode directives
+ (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*)?\n)+)',
- _handle_sourcecode),
- # A directive
- (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
- using(this, state='inline'))),
- # A reference target
- (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A footnote/citation target
- (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A substitution def
- (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
- Punctuation, Text, using(this, state='inline'))),
- # Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
+ _handle_sourcecode),
+ # A directive
+ (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
+ using(this, state='inline'))),
+ # A reference target
+ (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A footnote/citation target
+ (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A substitution def
+ (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
+ Punctuation, Text, using(this, state='inline'))),
+ # Comments
+ (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
# Field list marker
(r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
bygroups(Text, Name.Class, Text)),
- # Definition list
- (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
- bygroups(using(this, state='inline'), using(this, state='inline'))),
- # Code blocks
+ # Definition list
+ (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
+ bygroups(using(this, state='inline'), using(this, state='inline'))),
+ # Code blocks
(r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*)?\n)+)',
- bygroups(String.Escape, Text, String, String, Text, String)),
- include('inline'),
- ],
- 'inline': [
- (r'\\.', Text), # escape
- (r'``', String, 'literal'), # code
- (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
- bygroups(String, String.Interpol, String)),
- (r'`.+?`__?', String), # reference
- (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
- bygroups(Name.Variable, Name.Attribute)), # role
- (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
- bygroups(Name.Attribute, Name.Variable)), # role (content first)
- (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
- (r'\*.+?\*', Generic.Emph), # Emphasis
- (r'\[.*?\]_', String), # Footnote or citation
- (r'<.+?>', Name.Tag), # Hyperlink
- (r'[^\\\n\[*`:]+', Text),
- (r'.', Text),
- ],
- 'literal': [
- (r'[^`]+', String),
- (r'``' + end_string_suffix, String, '#pop'),
- (r'`', String),
- ]
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- if text[:2] == '..' and text[2:3] != '.':
- return 0.3
- p1 = text.find("\n")
- p2 = text.find("\n", p1 + 1)
- if (p2 > -1 and # has two lines
- p1 * 2 + 1 == p2 and # they are the same length
- text[p1+1] in '-=' and # the next line both starts and ends with
- text[p1+1] == text[p2-1]): # ...a sufficiently high header
- return 0.5
-
-
-class TexLexer(RegexLexer):
- """
- Lexer for the TeX and LaTeX typesetting languages.
- """
-
- name = 'TeX'
- aliases = ['tex', 'latex']
- filenames = ['*.tex', '*.aux', '*.toc']
- mimetypes = ['text/x-tex', 'text/x-latex']
-
- tokens = {
- 'general': [
- (r'%.*?\n', Comment),
- (r'[{}]', Name.Builtin),
- (r'[&_^]', Name.Builtin),
- ],
- 'root': [
- (r'\\\[', String.Backtick, 'displaymath'),
- (r'\\\(', String, 'inlinemath'),
- (r'\$\$', String.Backtick, 'displaymath'),
- (r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
- (r'\\$', Keyword),
- include('general'),
- (r'[^\\$%&_^{}]+', Text),
- ],
- 'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
- include('general'),
- (r'[0-9]+', Number),
- (r'[-=!+*/()\[\]]', Operator),
- (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
- ],
- 'inlinemath': [
- (r'\\\)', String, '#pop'),
- (r'\$', String, '#pop'),
- include('math'),
- ],
- 'displaymath': [
- (r'\\\]', String, '#pop'),
- (r'\$\$', String, '#pop'),
- (r'\$', Name.Builtin),
- include('math'),
- ],
- 'command': [
- (r'\[.*?\]', Name.Attribute),
- (r'\*', Keyword),
- default('#pop'),
- ],
- }
-
- def analyse_text(text):
- for start in ("\\documentclass", "\\input", "\\documentstyle",
- "\\relax"):
- if text[:len(start)] == start:
- return True
-
-
-class GroffLexer(RegexLexer):
- """
- Lexer for the (g)roff typesetting language, supporting groff
- extensions. Mainly useful for highlighting manpage sources.
-
- .. versionadded:: 0.6
- """
-
- name = 'Groff'
- aliases = ['groff', 'nroff', 'man']
+ bygroups(String.Escape, Text, String, String, Text, String)),
+ include('inline'),
+ ],
+ 'inline': [
+ (r'\\.', Text), # escape
+ (r'``', String, 'literal'), # code
+ (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
+ bygroups(String, String.Interpol, String)),
+ (r'`.+?`__?', String), # reference
+ (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
+ bygroups(Name.Variable, Name.Attribute)), # role
+ (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
+ bygroups(Name.Attribute, Name.Variable)), # role (content first)
+ (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
+ (r'\*.+?\*', Generic.Emph), # Emphasis
+ (r'\[.*?\]_', String), # Footnote or citation
+ (r'<.+?>', Name.Tag), # Hyperlink
+ (r'[^\\\n\[*`:]+', Text),
+ (r'.', Text),
+ ],
+ 'literal': [
+ (r'[^`]+', String),
+ (r'``' + end_string_suffix, String, '#pop'),
+ (r'`', String),
+ ]
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
+
+ def analyse_text(text):
+ if text[:2] == '..' and text[2:3] != '.':
+ return 0.3
+ p1 = text.find("\n")
+ p2 = text.find("\n", p1 + 1)
+ if (p2 > -1 and # has two lines
+ p1 * 2 + 1 == p2 and # they are the same length
+ text[p1+1] in '-=' and # the next line both starts and ends with
+ text[p1+1] == text[p2-1]): # ...a sufficiently high header
+ return 0.5
+
+
+class TexLexer(RegexLexer):
+ """
+ Lexer for the TeX and LaTeX typesetting languages.
+ """
+
+ name = 'TeX'
+ aliases = ['tex', 'latex']
+ filenames = ['*.tex', '*.aux', '*.toc']
+ mimetypes = ['text/x-tex', 'text/x-latex']
+
+ tokens = {
+ 'general': [
+ (r'%.*?\n', Comment),
+ (r'[{}]', Name.Builtin),
+ (r'[&_^]', Name.Builtin),
+ ],
+ 'root': [
+ (r'\\\[', String.Backtick, 'displaymath'),
+ (r'\\\(', String, 'inlinemath'),
+ (r'\$\$', String.Backtick, 'displaymath'),
+ (r'\$', String, 'inlinemath'),
+ (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ (r'\\$', Keyword),
+ include('general'),
+ (r'[^\\$%&_^{}]+', Text),
+ ],
+ 'math': [
+ (r'\\([a-zA-Z]+|.)', Name.Variable),
+ include('general'),
+ (r'[0-9]+', Number),
+ (r'[-=!+*/()\[\]]', Operator),
+ (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
+ ],
+ 'inlinemath': [
+ (r'\\\)', String, '#pop'),
+ (r'\$', String, '#pop'),
+ include('math'),
+ ],
+ 'displaymath': [
+ (r'\\\]', String, '#pop'),
+ (r'\$\$', String, '#pop'),
+ (r'\$', Name.Builtin),
+ include('math'),
+ ],
+ 'command': [
+ (r'\[.*?\]', Name.Attribute),
+ (r'\*', Keyword),
+ default('#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ for start in ("\\documentclass", "\\input", "\\documentstyle",
+ "\\relax"):
+ if text[:len(start)] == start:
+ return True
+
+
+class GroffLexer(RegexLexer):
+ """
+ Lexer for the (g)roff typesetting language, supporting groff
+ extensions. Mainly useful for highlighting manpage sources.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Groff'
+ aliases = ['groff', 'nroff', 'man']
filenames = ['*.[1-9]', '*.man', '*.1p', '*.3pm']
- mimetypes = ['application/x-troff', 'text/troff']
-
- tokens = {
- 'root': [
- (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
- (r'\.', Punctuation, 'request'),
- # Regular characters, slurp till we find a backslash or newline
- (r'[^\\\n]+', Text, 'textline'),
- default('textline'),
- ],
- 'textline': [
- include('escapes'),
- (r'[^\\\n]+', Text),
- (r'\n', Text, '#pop'),
- ],
- 'escapes': [
- # groff has many ways to write escapes.
- (r'\\"[^\n]*', Comment),
- (r'\\[fn]\w', String.Escape),
- (r'\\\(.{2}', String.Escape),
- (r'\\.\[.*\]', String.Escape),
- (r'\\.', String.Escape),
- (r'\\\n', Text, 'request'),
- ],
- 'request': [
- (r'\n', Text, '#pop'),
- include('escapes'),
- (r'"[^\n"]+"', String.Double),
- (r'\d+', Number),
- (r'\S+', String),
- (r'\s+', Text),
- ],
- }
-
- def analyse_text(text):
- if text[:1] != '.':
- return False
- if text[:3] == '.\\"':
- return True
- if text[:4] == '.TH ':
- return True
- if text[1:3].isalnum() and text[3].isspace():
- return 0.9
-
-
-class MozPreprocHashLexer(RegexLexer):
- """
- Lexer for Mozilla Preprocessor files (with '#' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozhashpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^#', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- 'exprstart': [
- (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
- (words((
- 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
- 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
- 'include', 'includesubst', 'error')),
- Comment.Preproc, '#pop'),
- ],
- 'expr': [
- (words(('!', '!=', '==', '&&', '||')), Operator),
- (r'(defined)(\()', bygroups(Keyword, Punctuation)),
- (r'\)', Punctuation),
- (r'[0-9]+', Number.Decimal),
- (r'__\w+?__', Name.Variable),
- (r'@\w+?@', Name.Class),
- (r'\w+', Name),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- (r'\S', Punctuation),
- ],
- }
-
-
-class MozPreprocPercentLexer(MozPreprocHashLexer):
- """
- Lexer for Mozilla Preprocessor files (with '%' as the marker).
-
- Other data is left untouched.
-
- .. versionadded:: 2.0
- """
- name = 'mozpercentpreproc'
- aliases = [name]
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'^%', Comment.Preproc, ('expr', 'exprstart')),
- (r'.+', Other),
- ],
- }
-
-
-class MozPreprocXulLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 2.0
- """
- name = "XUL+mozpreproc"
- aliases = ['xul+mozpreproc']
- filenames = ['*.xul.in']
- mimetypes = []
-
- def __init__(self, **options):
+ mimetypes = ['application/x-troff', 'text/troff']
+
+ tokens = {
+ 'root': [
+ (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
+ (r'\.', Punctuation, 'request'),
+ # Regular characters, slurp till we find a backslash or newline
+ (r'[^\\\n]+', Text, 'textline'),
+ default('textline'),
+ ],
+ 'textline': [
+ include('escapes'),
+ (r'[^\\\n]+', Text),
+ (r'\n', Text, '#pop'),
+ ],
+ 'escapes': [
+ # groff has many ways to write escapes.
+ (r'\\"[^\n]*', Comment),
+ (r'\\[fn]\w', String.Escape),
+ (r'\\\(.{2}', String.Escape),
+ (r'\\.\[.*\]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'\\\n', Text, 'request'),
+ ],
+ 'request': [
+ (r'\n', Text, '#pop'),
+ include('escapes'),
+ (r'"[^\n"]+"', String.Double),
+ (r'\d+', Number),
+ (r'\S+', String),
+ (r'\s+', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ if text[:1] != '.':
+ return False
+ if text[:3] == '.\\"':
+ return True
+ if text[:4] == '.TH ':
+ return True
+ if text[1:3].isalnum() and text[3].isspace():
+ return 0.9
+
+
+class MozPreprocHashLexer(RegexLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '#' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozhashpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^#', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ 'exprstart': [
+ (r'(literal)(.*)', bygroups(Comment.Preproc, Text), '#pop:2'),
+ (words((
+ 'define', 'undef', 'if', 'ifdef', 'ifndef', 'else', 'elif',
+ 'elifdef', 'elifndef', 'endif', 'expand', 'filter', 'unfilter',
+ 'include', 'includesubst', 'error')),
+ Comment.Preproc, '#pop'),
+ ],
+ 'expr': [
+ (words(('!', '!=', '==', '&&', '||')), Operator),
+ (r'(defined)(\()', bygroups(Keyword, Punctuation)),
+ (r'\)', Punctuation),
+ (r'[0-9]+', Number.Decimal),
+ (r'__\w+?__', Name.Variable),
+ (r'@\w+?@', Name.Class),
+ (r'\w+', Name),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'\S', Punctuation),
+ ],
+ }
+
+
+class MozPreprocPercentLexer(MozPreprocHashLexer):
+ """
+ Lexer for Mozilla Preprocessor files (with '%' as the marker).
+
+ Other data is left untouched.
+
+ .. versionadded:: 2.0
+ """
+ name = 'mozpercentpreproc'
+ aliases = [name]
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^%', Comment.Preproc, ('expr', 'exprstart')),
+ (r'.+', Other),
+ ],
+ }
+
+
+class MozPreprocXulLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `XmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "XUL+mozpreproc"
+ aliases = ['xul+mozpreproc']
+ filenames = ['*.xul.in']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(XmlLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `JavascriptLexer`.
-
- .. versionadded:: 2.0
- """
- name = "Javascript+mozpreproc"
- aliases = ['javascript+mozpreproc']
- filenames = ['*.js.in']
- mimetypes = []
-
- def __init__(self, **options):
+
+
+class MozPreprocJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "Javascript+mozpreproc"
+ aliases = ['javascript+mozpreproc']
+ filenames = ['*.js.in']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
-
-
-class MozPreprocCssLexer(DelegatingLexer):
- """
- Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
- `CssLexer`.
-
- .. versionadded:: 2.0
- """
- name = "CSS+mozpreproc"
- aliases = ['css+mozpreproc']
- filenames = ['*.css.in']
- mimetypes = []
-
- def __init__(self, **options):
+
+
+class MozPreprocCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
+ `CssLexer`.
+
+ .. versionadded:: 2.0
+ """
+ name = "CSS+mozpreproc"
+ aliases = ['css+mozpreproc']
+ filenames = ['*.css.in']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(CssLexer, MozPreprocPercentLexer, **options)
-
+
class MarkdownLexer(RegexLexer):
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/math.py b/contrib/python/Pygments/py3/pygments/lexers/math.py
index 88f810e70f..b523581e46 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/math.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/math.py
@@ -1,20 +1,20 @@
-"""
- pygments.lexers.math
- ~~~~~~~~~~~~~~~~~~~~
-
- Just export lexers that were contained in this module.
-
+"""
+ pygments.lexers.math
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexers that were contained in this module.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.python import NumPyLexer
-from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
- OctaveLexer, ScilabLexer
-from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
-from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
-from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
-from pygments.lexers.idl import IDLLexer
-from pygments.lexers.algebra import MuPADLexer
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.python import NumPyLexer
+from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
+ OctaveLexer, ScilabLexer
+from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
+from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
+from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
+from pygments.lexers.idl import IDLLexer
+from pygments.lexers.algebra import MuPADLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/matlab.py b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
index 445063935b..82ac29ecf1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/matlab.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
@@ -1,39 +1,39 @@
-"""
- pygments.lexers.matlab
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Matlab and related languages.
-
+"""
+ pygments.lexers.matlab
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Matlab and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
do_insertions, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
-
-from pygments.lexers import _scilab_builtins
-
-__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
-
-
-class MatlabLexer(RegexLexer):
- """
- For Matlab source code.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab'
- aliases = ['matlab']
- filenames = ['*.m']
- mimetypes = ['text/matlab']
-
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+from pygments.lexers import _scilab_builtins
+
+__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
+
+
+class MatlabLexer(RegexLexer):
+ """
+ For Matlab source code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab'
+ aliases = ['matlab']
+ filenames = ['*.m']
+ mimetypes = ['text/matlab']
+
_operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
- tokens = {
+ tokens = {
'expressions': [
# operators:
(_operators, Operator),
@@ -61,19 +61,19 @@ class MatlabLexer(RegexLexer):
(r'\s+', Whitespace),
(r'.', Text),
],
- 'root': [
- # line starting with '!' is sent as a system command. not sure what
- # label to use...
- (r'^!.*', String.Other),
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
+ 'root': [
+ # line starting with '!' is sent as a system command. not sure what
+ # label to use...
+ (r'^!.*', String.Other),
+ (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+ (r'%.*$', Comment),
(r'(\s*^\s*)(function)\b', bygroups(Whitespace, Keyword), 'deffunc'),
(r'(\s*^\s*)(properties)(\s+)(\()',
bygroups(Whitespace, Keyword, Whitespace, Punctuation),
('defprops', 'propattrs')),
(r'(\s*^\s*)(properties)\b',
bygroups(Whitespace, Keyword), 'defprops'),
-
+
# from 'iskeyword' on version 9.4 (R2018a):
# Check that there is no preceding dot, as keywords are valid field
# names.
@@ -84,7 +84,7 @@ class MatlabLexer(RegexLexer):
'try', 'while'),
prefix=r'(?<!\.)(\s*)(', suffix=r')\b'),
bygroups(Whitespace, Keyword)),
-
+
(
words(
[
@@ -2654,10 +2654,10 @@ class MatlabLexer(RegexLexer):
),
Name.Builtin
),
-
- # line continuation with following comment:
+
+ # line continuation with following comment:
(r'(\.\.\.)(.*)$', bygroups(Keyword, Comment)),
-
+
# command form:
# "How MATLAB Recognizes Command Syntax" specifies that an operator
# is recognized if it is either surrounded by spaces or by no
@@ -2669,21 +2669,21 @@ class MatlabLexer(RegexLexer):
bygroups(Whitespace, Name, Whitespace), 'commandargs'),
include('expressions')
- ],
- 'blockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'deffunc': [
+ ],
+ 'blockcomment': [
+ (r'^\s*%\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
+ 'deffunc': [
(r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
(r'(\s*)([a-zA-Z_]\w*)',
bygroups(Whitespace, Name.Function), '#pop'),
- ],
+ ],
'propattrs': [
(r'(\w+)(\s*)(=)(\s*)(\d+)',
bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
@@ -2718,9 +2718,9 @@ class MatlabLexer(RegexLexer):
(";", Punctuation, '#pop'),
default('#pop'),
]
- }
-
- def analyse_text(text):
+ }
+
+ def analyse_text(text):
# function declaration.
first_non_comment = next((line for line in text.splitlines()
if not re.match(r'^\s*%', text)), '').strip()
@@ -2729,53 +2729,53 @@ class MatlabLexer(RegexLexer):
return 1.
# comment
elif re.search(r'^\s*%', text, re.M):
- return 0.2
+ return 0.2
# system cmd
elif re.search(r'^!\w+', text, re.M):
- return 0.2
-
-
-line_re = re.compile('.*?\n')
-
-
-class MatlabSessionLexer(Lexer):
- """
- For Matlab sessions. Modeled after PythonConsoleLexer.
- Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab session'
- aliases = ['matlabsession']
-
- def get_tokens_unprocessed(self, text):
- mlexer = MatlabLexer(**self.options)
-
- curcode = ''
- insertions = []
+ return 0.2
+
+
+line_re = re.compile('.*?\n')
+
+
+class MatlabSessionLexer(Lexer):
+ """
+ For Matlab sessions. Modeled after PythonConsoleLexer.
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab session'
+ aliases = ['matlabsession']
+
+ def get_tokens_unprocessed(self, text):
+ mlexer = MatlabLexer(**self.options)
+
+ curcode = ''
+ insertions = []
continuation = False
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('>> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith('>>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:2])]))
- curcode += line[2:]
-
- elif line.startswith('???'):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- # line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('>> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:3])]))
+ curcode += line[3:]
+
+ elif line.startswith('>>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:2])]))
+ curcode += line[2:]
+
+ elif line.startswith('???'):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ # line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
elif continuation:
# line_start is the length of the most recent prompt symbol
line_start = len(insertions[-1][-1][-1])
@@ -2787,15 +2787,15 @@ class MatlabSessionLexer(Lexer):
curcode += line[line_start:]
else:
curcode += line
- else:
- if curcode:
+ else:
+ if curcode:
yield from do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
# Does not allow continuation if a comment is included after the ellipses.
# Continues any line that ends with ..., even comments (lines that start with %)
if line.strip().endswith('...'):
@@ -2803,355 +2803,355 @@ class MatlabSessionLexer(Lexer):
else:
continuation = False
- if curcode: # or item:
+ if curcode: # or item:
yield from do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode))
-
-
-class OctaveLexer(RegexLexer):
- """
- For GNU Octave source code.
-
- .. versionadded:: 1.5
- """
- name = 'Octave'
- aliases = ['octave']
- filenames = ['*.m']
- mimetypes = ['text/octave']
-
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # First dump all of the Octave manual into a plain text file:
- #
- # $ info octave --subnodes -o octave-manual
- #
- # Now grep through it:
-
- # for i in \
- # "Built-in Function" "Command" "Function File" \
- # "Loadable Function" "Mapping Function";
- # do
- # perl -e '@name = qw('"$i"');
- # print lc($name[0]),"_kw = [\n"';
- #
- # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
- # octave-manual | sort | uniq ;
- # echo "]" ;
- # echo;
- # done
-
- # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
- builtin_kw = (
- "addlistener", "addpath", "addproperty", "all",
- "and", "any", "argnames", "argv", "assignin",
- "atexit", "autoload",
- "available_graphics_toolkits", "beep_on_error",
- "bitand", "bitmax", "bitor", "bitshift", "bitxor",
- "cat", "cell", "cellstr", "char", "class", "clc",
- "columns", "command_line_path",
- "completion_append_char", "completion_matches",
- "complex", "confirm_recursive_rmdir", "cputime",
- "crash_dumps_octave_core", "ctranspose", "cumprod",
- "cumsum", "debug_on_error", "debug_on_interrupt",
- "debug_on_warning", "default_save_options",
- "dellistener", "diag", "diff", "disp",
- "doc_cache_file", "do_string_escapes", "double",
- "drawnow", "e", "echo_executing_commands", "eps",
- "eq", "errno", "errno_list", "error", "eval",
- "evalin", "exec", "exist", "exit", "eye", "false",
- "fclear", "fclose", "fcntl", "fdisp", "feof",
- "ferror", "feval", "fflush", "fgetl", "fgets",
- "fieldnames", "file_in_loadpath", "file_in_path",
- "filemarker", "filesep", "find_dir_in_path",
- "fixed_point_format", "fnmatch", "fopen", "fork",
- "formula", "fprintf", "fputs", "fread", "freport",
- "frewind", "fscanf", "fseek", "fskipl", "ftell",
- "functions", "fwrite", "ge", "genpath", "get",
- "getegid", "getenv", "geteuid", "getgid",
- "getpgrp", "getpid", "getppid", "getuid", "glob",
- "gt", "gui_mode", "history_control",
- "history_file", "history_size",
- "history_timestamp_format_string", "home",
- "horzcat", "hypot", "ifelse",
- "ignore_function_time_stamp", "inferiorto",
- "info_file", "info_program", "inline", "input",
- "intmax", "intmin", "ipermute",
- "is_absolute_filename", "isargout", "isbool",
- "iscell", "iscellstr", "ischar", "iscomplex",
- "isempty", "isfield", "isfloat", "isglobal",
- "ishandle", "isieee", "isindex", "isinteger",
- "islogical", "ismatrix", "ismethod", "isnull",
- "isnumeric", "isobject", "isreal",
- "is_rooted_relative_filename", "issorted",
- "isstruct", "isvarname", "kbhit", "keyboard",
- "kill", "lasterr", "lasterror", "lastwarn",
- "ldivide", "le", "length", "link", "linspace",
- "logical", "lstat", "lt", "make_absolute_filename",
- "makeinfo_program", "max_recursion_depth", "merge",
- "methods", "mfilename", "minus", "mislocked",
- "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
- "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
- "munlock", "nargin", "nargout",
- "native_float_format", "ndims", "ne", "nfields",
- "nnz", "norm", "not", "numel", "nzmax",
- "octave_config_info", "octave_core_file_limit",
- "octave_core_file_name",
- "octave_core_file_options", "ones", "or",
- "output_max_field_width", "output_precision",
- "page_output_immediately", "page_screen_output",
- "path", "pathsep", "pause", "pclose", "permute",
- "pi", "pipe", "plus", "popen", "power",
- "print_empty_dimensions", "printf",
- "print_struct_array_contents", "prod",
- "program_invocation_name", "program_name",
- "putenv", "puts", "pwd", "quit", "rats", "rdivide",
- "readdir", "readlink", "read_readline_init_file",
- "realmax", "realmin", "rehash", "rename",
- "repelems", "re_read_readline_init_file", "reset",
- "reshape", "resize", "restoredefaultpath",
- "rethrow", "rmdir", "rmfield", "rmpath", "rows",
- "save_header_format_string", "save_precision",
- "saving_history", "scanf", "set", "setenv",
- "shell_cmd", "sighup_dumps_octave_core",
- "sigterm_dumps_octave_core", "silent_functions",
- "single", "size", "size_equal", "sizemax",
- "sizeof", "sleep", "source", "sparse_auto_mutate",
- "split_long_rows", "sprintf", "squeeze", "sscanf",
- "stat", "stderr", "stdin", "stdout", "strcmp",
- "strcmpi", "string_fill_char", "strncmp",
- "strncmpi", "struct", "struct_levels_to_print",
- "strvcat", "subsasgn", "subsref", "sum", "sumsq",
- "superiorto", "suppress_verbose_help_message",
- "symlink", "system", "tic", "tilde_expand",
- "times", "tmpfile", "tmpnam", "toc", "toupper",
- "transpose", "true", "typeinfo", "umask", "uminus",
- "uname", "undo_string_escapes", "unlink", "uplus",
- "upper", "usage", "usleep", "vec", "vectorize",
- "vertcat", "waitpid", "warning", "warranty",
- "whos_line_format", "yes_or_no", "zeros",
- "inf", "Inf", "nan", "NaN")
-
- command_kw = ("close", "load", "who", "whos")
-
- function_kw = (
- "accumarray", "accumdim", "acosd", "acotd",
- "acscd", "addtodate", "allchild", "ancestor",
- "anova", "arch_fit", "arch_rnd", "arch_test",
- "area", "arma_rnd", "arrayfun", "ascii", "asctime",
- "asecd", "asind", "assert", "atand",
- "autoreg_matrix", "autumn", "axes", "axis", "bar",
- "barh", "bartlett", "bartlett_test", "beep",
- "betacdf", "betainv", "betapdf", "betarnd",
- "bicgstab", "bicubic", "binary", "binocdf",
- "binoinv", "binopdf", "binornd", "bitcmp",
- "bitget", "bitset", "blackman", "blanks",
- "blkdiag", "bone", "box", "brighten", "calendar",
- "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
- "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
- "chisquare_test_homogeneity",
- "chisquare_test_independence", "circshift", "cla",
- "clabel", "clf", "clock", "cloglog", "closereq",
- "colon", "colorbar", "colormap", "colperm",
- "comet", "common_size", "commutation_matrix",
- "compan", "compare_versions", "compass",
- "computer", "cond", "condest", "contour",
- "contourc", "contourf", "contrast", "conv",
- "convhull", "cool", "copper", "copyfile", "cor",
- "corrcoef", "cor_test", "cosd", "cotd", "cov",
- "cplxpair", "cross", "cscd", "cstrcat", "csvread",
- "csvwrite", "ctime", "cumtrapz", "curl", "cut",
- "cylinder", "date", "datenum", "datestr",
- "datetick", "datevec", "dblquad", "deal",
- "deblank", "deconv", "delaunay", "delaunayn",
- "delete", "demo", "detrend", "diffpara", "diffuse",
- "dir", "discrete_cdf", "discrete_inv",
- "discrete_pdf", "discrete_rnd", "display",
- "divergence", "dlmwrite", "dos", "dsearch",
- "dsearchn", "duplication_matrix", "durbinlevinson",
- "ellipsoid", "empirical_cdf", "empirical_inv",
- "empirical_pdf", "empirical_rnd", "eomday",
- "errorbar", "etime", "etreeplot", "example",
- "expcdf", "expinv", "expm", "exppdf", "exprnd",
- "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
- "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
- "factorial", "fail", "fcdf", "feather", "fftconv",
- "fftfilt", "fftshift", "figure", "fileattrib",
- "fileparts", "fill", "findall", "findobj",
- "findstr", "finv", "flag", "flipdim", "fliplr",
- "flipud", "fpdf", "fplot", "fractdiff", "freqz",
- "freqz_plot", "frnd", "fsolve",
- "f_test_regression", "ftp", "fullfile", "fzero",
- "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
- "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
- "geoinv", "geopdf", "geornd", "getfield", "ginput",
- "glpk", "gls", "gplot", "gradient",
- "graphics_toolkit", "gray", "grid", "griddata",
- "griddatan", "gtext", "gunzip", "gzip", "hadamard",
- "hamming", "hankel", "hanning", "hggroup",
- "hidden", "hilb", "hist", "histc", "hold", "hot",
- "hotelling_test", "housh", "hsv", "hurst",
- "hygecdf", "hygeinv", "hygepdf", "hygernd",
- "idivide", "ifftshift", "image", "imagesc",
- "imfinfo", "imread", "imshow", "imwrite", "index",
- "info", "inpolygon", "inputname", "interpft",
- "interpn", "intersect", "invhilb", "iqr", "isa",
- "isdefinite", "isdir", "is_duplicate_entry",
- "isequal", "isequalwithequalnans", "isfigure",
- "ishermitian", "ishghandle", "is_leap_year",
- "isletter", "ismac", "ismember", "ispc", "isprime",
- "isprop", "isscalar", "issquare", "isstrprop",
- "issymmetric", "isunix", "is_valid_file_id",
- "isvector", "jet", "kendall",
- "kolmogorov_smirnov_cdf",
- "kolmogorov_smirnov_test", "kruskal_wallis_test",
- "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
- "laplace_pdf", "laplace_rnd", "legend", "legendre",
- "license", "line", "linkprop", "list_primes",
- "loadaudio", "loadobj", "logistic_cdf",
- "logistic_inv", "logistic_pdf", "logistic_rnd",
- "logit", "loglog", "loglogerr", "logm", "logncdf",
- "logninv", "lognpdf", "lognrnd", "logspace",
- "lookfor", "ls_command", "lsqnonneg", "magic",
- "mahalanobis", "manova", "matlabroot",
- "mcnemar_test", "mean", "meansq", "median", "menu",
- "mesh", "meshc", "meshgrid", "meshz", "mexext",
- "mget", "mkpp", "mode", "moment", "movefile",
- "mpoles", "mput", "namelengthmax", "nargchk",
- "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
- "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
- "nonzeros", "normcdf", "normest", "norminv",
- "normpdf", "normrnd", "now", "nthroot", "null",
- "ocean", "ols", "onenormest", "optimget",
- "optimset", "orderfields", "orient", "orth",
- "pack", "pareto", "parseparams", "pascal", "patch",
- "pathdef", "pcg", "pchip", "pcolor", "pcr",
- "peaks", "periodogram", "perl", "perms", "pie",
- "pink", "planerot", "playaudio", "plot",
- "plotmatrix", "plotyy", "poisscdf", "poissinv",
- "poisspdf", "poissrnd", "polar", "poly",
- "polyaffine", "polyarea", "polyderiv", "polyfit",
- "polygcd", "polyint", "polyout", "polyreduce",
- "polyval", "polyvalm", "postpad", "powerset",
- "ppder", "ppint", "ppjumps", "ppplot", "ppval",
- "pqpnonneg", "prepad", "primes", "print",
- "print_usage", "prism", "probit", "qp", "qqplot",
- "quadcc", "quadgk", "quadl", "quadv", "quiver",
- "qzhess", "rainbow", "randi", "range", "rank",
- "ranks", "rat", "reallog", "realpow", "realsqrt",
- "record", "rectangle_lw", "rectangle_sw",
- "rectint", "refresh", "refreshdata",
- "regexptranslate", "repmat", "residue", "ribbon",
- "rindex", "roots", "rose", "rosser", "rotdim",
- "rref", "run", "run_count", "rundemos", "run_test",
- "runtests", "saveas", "saveaudio", "saveobj",
- "savepath", "scatter", "secd", "semilogx",
- "semilogxerr", "semilogy", "semilogyerr",
- "setaudio", "setdiff", "setfield", "setxor",
- "shading", "shift", "shiftdim", "sign_test",
- "sinc", "sind", "sinetone", "sinewave", "skewness",
- "slice", "sombrero", "sortrows", "spaugment",
- "spconvert", "spdiags", "spearman", "spectral_adf",
- "spectral_xdf", "specular", "speed", "spencer",
- "speye", "spfun", "sphere", "spinmap", "spline",
- "spones", "sprand", "sprandn", "sprandsym",
- "spring", "spstats", "spy", "sqp", "stairs",
- "statistics", "std", "stdnormal_cdf",
- "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
- "stem", "stft", "strcat", "strchr", "strjust",
- "strmatch", "strread", "strsplit", "strtok",
- "strtrim", "strtrunc", "structfun", "studentize",
- "subplot", "subsindex", "subspace", "substr",
- "substruct", "summer", "surf", "surface", "surfc",
- "surfl", "surfnorm", "svds", "swapbytes",
- "sylvester_matrix", "symvar", "synthesis", "table",
- "tand", "tar", "tcdf", "tempdir", "tempname",
- "test", "text", "textread", "textscan", "tinv",
- "title", "toeplitz", "tpdf", "trace", "trapz",
- "treelayout", "treeplot", "triangle_lw",
- "triangle_sw", "tril", "trimesh", "triplequad",
- "triplot", "trisurf", "triu", "trnd", "tsearchn",
- "t_test", "t_test_regression", "type", "unidcdf",
- "unidinv", "unidpdf", "unidrnd", "unifcdf",
- "unifinv", "unifpdf", "unifrnd", "union", "unique",
- "unix", "unmkpp", "unpack", "untabify", "untar",
- "unwrap", "unzip", "u_test", "validatestring",
- "vander", "var", "var_test", "vech", "ver",
- "version", "view", "voronoi", "voronoin",
- "waitforbuttonpress", "wavread", "wavwrite",
- "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
- "welch_test", "what", "white", "whitebg",
- "wienrnd", "wilcoxon_test", "wilkinson", "winter",
- "xlabel", "xlim", "ylabel", "yulewalker", "zip",
- "zlabel", "z_test")
-
- loadable_kw = (
- "airy", "amd", "balance", "besselh", "besseli",
- "besselj", "besselk", "bessely", "bitpack",
- "bsxfun", "builtin", "ccolamd", "cellfun",
- "cellslices", "chol", "choldelete", "cholinsert",
- "cholinv", "cholshift", "cholupdate", "colamd",
- "colloc", "convhulln", "convn", "csymamd",
- "cummax", "cummin", "daspk", "daspk_options",
- "dasrt", "dasrt_options", "dassl", "dassl_options",
- "dbclear", "dbdown", "dbstack", "dbstatus",
- "dbstop", "dbtype", "dbup", "dbwhere", "det",
- "dlmread", "dmperm", "dot", "eig", "eigs",
- "endgrent", "endpwent", "etree", "fft", "fftn",
- "fftw", "filter", "find", "full", "gcd",
- "getgrent", "getgrgid", "getgrnam", "getpwent",
- "getpwnam", "getpwuid", "getrusage", "givens",
- "gmtime", "gnuplot_binary", "hess", "ifft",
- "ifftn", "inv", "isdebugmode", "issparse", "kron",
- "localtime", "lookup", "lsode", "lsode_options",
- "lu", "luinc", "luupdate", "matrix_type", "max",
- "min", "mktime", "pinv", "qr", "qrdelete",
- "qrinsert", "qrshift", "qrupdate", "quad",
- "quad_options", "qz", "rand", "rande", "randg",
- "randn", "randp", "randperm", "rcond", "regexp",
- "regexpi", "regexprep", "schur", "setgrent",
- "setpwent", "sort", "spalloc", "sparse", "spparms",
- "sprank", "sqrtm", "strfind", "strftime",
- "strptime", "strrep", "svd", "svd_driver", "syl",
- "symamd", "symbfact", "symrcm", "time", "tsearch",
- "typecast", "urlread", "urlwrite")
-
- mapping_kw = (
- "abs", "acos", "acosh", "acot", "acoth", "acsc",
- "acsch", "angle", "arg", "asec", "asech", "asin",
- "asinh", "atan", "atanh", "beta", "betainc",
- "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
- "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
- "erfcx", "erfinv", "exp", "finite", "fix", "floor",
- "fmod", "gamma", "gammainc", "gammaln", "imag",
- "isalnum", "isalpha", "isascii", "iscntrl",
- "isdigit", "isfinite", "isgraph", "isinf",
- "islower", "isna", "isnan", "isprint", "ispunct",
- "isspace", "isupper", "isxdigit", "lcm", "lgamma",
- "log", "lower", "mod", "real", "rem", "round",
- "roundb", "sec", "sech", "sign", "sin", "sinh",
- "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
-
- builtin_consts = (
- "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
- "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
- "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
- "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
- "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
- "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
- "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
- "WSTOPSIG", "WTERMSIG", "WUNTRACED")
-
- tokens = {
- 'root': [
+
+
+class OctaveLexer(RegexLexer):
+ """
+ For GNU Octave source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Octave'
+ aliases = ['octave']
+ filenames = ['*.m']
+ mimetypes = ['text/octave']
+
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # First dump all of the Octave manual into a plain text file:
+ #
+ # $ info octave --subnodes -o octave-manual
+ #
+ # Now grep through it:
+
+ # for i in \
+ # "Built-in Function" "Command" "Function File" \
+ # "Loadable Function" "Mapping Function";
+ # do
+ # perl -e '@name = qw('"$i"');
+ # print lc($name[0]),"_kw = [\n"';
+ #
+ # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
+ # octave-manual | sort | uniq ;
+ # echo "]" ;
+ # echo;
+ # done
+
+ # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
+
+ builtin_kw = (
+ "addlistener", "addpath", "addproperty", "all",
+ "and", "any", "argnames", "argv", "assignin",
+ "atexit", "autoload",
+ "available_graphics_toolkits", "beep_on_error",
+ "bitand", "bitmax", "bitor", "bitshift", "bitxor",
+ "cat", "cell", "cellstr", "char", "class", "clc",
+ "columns", "command_line_path",
+ "completion_append_char", "completion_matches",
+ "complex", "confirm_recursive_rmdir", "cputime",
+ "crash_dumps_octave_core", "ctranspose", "cumprod",
+ "cumsum", "debug_on_error", "debug_on_interrupt",
+ "debug_on_warning", "default_save_options",
+ "dellistener", "diag", "diff", "disp",
+ "doc_cache_file", "do_string_escapes", "double",
+ "drawnow", "e", "echo_executing_commands", "eps",
+ "eq", "errno", "errno_list", "error", "eval",
+ "evalin", "exec", "exist", "exit", "eye", "false",
+ "fclear", "fclose", "fcntl", "fdisp", "feof",
+ "ferror", "feval", "fflush", "fgetl", "fgets",
+ "fieldnames", "file_in_loadpath", "file_in_path",
+ "filemarker", "filesep", "find_dir_in_path",
+ "fixed_point_format", "fnmatch", "fopen", "fork",
+ "formula", "fprintf", "fputs", "fread", "freport",
+ "frewind", "fscanf", "fseek", "fskipl", "ftell",
+ "functions", "fwrite", "ge", "genpath", "get",
+ "getegid", "getenv", "geteuid", "getgid",
+ "getpgrp", "getpid", "getppid", "getuid", "glob",
+ "gt", "gui_mode", "history_control",
+ "history_file", "history_size",
+ "history_timestamp_format_string", "home",
+ "horzcat", "hypot", "ifelse",
+ "ignore_function_time_stamp", "inferiorto",
+ "info_file", "info_program", "inline", "input",
+ "intmax", "intmin", "ipermute",
+ "is_absolute_filename", "isargout", "isbool",
+ "iscell", "iscellstr", "ischar", "iscomplex",
+ "isempty", "isfield", "isfloat", "isglobal",
+ "ishandle", "isieee", "isindex", "isinteger",
+ "islogical", "ismatrix", "ismethod", "isnull",
+ "isnumeric", "isobject", "isreal",
+ "is_rooted_relative_filename", "issorted",
+ "isstruct", "isvarname", "kbhit", "keyboard",
+ "kill", "lasterr", "lasterror", "lastwarn",
+ "ldivide", "le", "length", "link", "linspace",
+ "logical", "lstat", "lt", "make_absolute_filename",
+ "makeinfo_program", "max_recursion_depth", "merge",
+ "methods", "mfilename", "minus", "mislocked",
+ "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
+ "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
+ "munlock", "nargin", "nargout",
+ "native_float_format", "ndims", "ne", "nfields",
+ "nnz", "norm", "not", "numel", "nzmax",
+ "octave_config_info", "octave_core_file_limit",
+ "octave_core_file_name",
+ "octave_core_file_options", "ones", "or",
+ "output_max_field_width", "output_precision",
+ "page_output_immediately", "page_screen_output",
+ "path", "pathsep", "pause", "pclose", "permute",
+ "pi", "pipe", "plus", "popen", "power",
+ "print_empty_dimensions", "printf",
+ "print_struct_array_contents", "prod",
+ "program_invocation_name", "program_name",
+ "putenv", "puts", "pwd", "quit", "rats", "rdivide",
+ "readdir", "readlink", "read_readline_init_file",
+ "realmax", "realmin", "rehash", "rename",
+ "repelems", "re_read_readline_init_file", "reset",
+ "reshape", "resize", "restoredefaultpath",
+ "rethrow", "rmdir", "rmfield", "rmpath", "rows",
+ "save_header_format_string", "save_precision",
+ "saving_history", "scanf", "set", "setenv",
+ "shell_cmd", "sighup_dumps_octave_core",
+ "sigterm_dumps_octave_core", "silent_functions",
+ "single", "size", "size_equal", "sizemax",
+ "sizeof", "sleep", "source", "sparse_auto_mutate",
+ "split_long_rows", "sprintf", "squeeze", "sscanf",
+ "stat", "stderr", "stdin", "stdout", "strcmp",
+ "strcmpi", "string_fill_char", "strncmp",
+ "strncmpi", "struct", "struct_levels_to_print",
+ "strvcat", "subsasgn", "subsref", "sum", "sumsq",
+ "superiorto", "suppress_verbose_help_message",
+ "symlink", "system", "tic", "tilde_expand",
+ "times", "tmpfile", "tmpnam", "toc", "toupper",
+ "transpose", "true", "typeinfo", "umask", "uminus",
+ "uname", "undo_string_escapes", "unlink", "uplus",
+ "upper", "usage", "usleep", "vec", "vectorize",
+ "vertcat", "waitpid", "warning", "warranty",
+ "whos_line_format", "yes_or_no", "zeros",
+ "inf", "Inf", "nan", "NaN")
+
+ command_kw = ("close", "load", "who", "whos")
+
+ function_kw = (
+ "accumarray", "accumdim", "acosd", "acotd",
+ "acscd", "addtodate", "allchild", "ancestor",
+ "anova", "arch_fit", "arch_rnd", "arch_test",
+ "area", "arma_rnd", "arrayfun", "ascii", "asctime",
+ "asecd", "asind", "assert", "atand",
+ "autoreg_matrix", "autumn", "axes", "axis", "bar",
+ "barh", "bartlett", "bartlett_test", "beep",
+ "betacdf", "betainv", "betapdf", "betarnd",
+ "bicgstab", "bicubic", "binary", "binocdf",
+ "binoinv", "binopdf", "binornd", "bitcmp",
+ "bitget", "bitset", "blackman", "blanks",
+ "blkdiag", "bone", "box", "brighten", "calendar",
+ "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
+ "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
+ "chisquare_test_homogeneity",
+ "chisquare_test_independence", "circshift", "cla",
+ "clabel", "clf", "clock", "cloglog", "closereq",
+ "colon", "colorbar", "colormap", "colperm",
+ "comet", "common_size", "commutation_matrix",
+ "compan", "compare_versions", "compass",
+ "computer", "cond", "condest", "contour",
+ "contourc", "contourf", "contrast", "conv",
+ "convhull", "cool", "copper", "copyfile", "cor",
+ "corrcoef", "cor_test", "cosd", "cotd", "cov",
+ "cplxpair", "cross", "cscd", "cstrcat", "csvread",
+ "csvwrite", "ctime", "cumtrapz", "curl", "cut",
+ "cylinder", "date", "datenum", "datestr",
+ "datetick", "datevec", "dblquad", "deal",
+ "deblank", "deconv", "delaunay", "delaunayn",
+ "delete", "demo", "detrend", "diffpara", "diffuse",
+ "dir", "discrete_cdf", "discrete_inv",
+ "discrete_pdf", "discrete_rnd", "display",
+ "divergence", "dlmwrite", "dos", "dsearch",
+ "dsearchn", "duplication_matrix", "durbinlevinson",
+ "ellipsoid", "empirical_cdf", "empirical_inv",
+ "empirical_pdf", "empirical_rnd", "eomday",
+ "errorbar", "etime", "etreeplot", "example",
+ "expcdf", "expinv", "expm", "exppdf", "exprnd",
+ "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
+ "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
+ "factorial", "fail", "fcdf", "feather", "fftconv",
+ "fftfilt", "fftshift", "figure", "fileattrib",
+ "fileparts", "fill", "findall", "findobj",
+ "findstr", "finv", "flag", "flipdim", "fliplr",
+ "flipud", "fpdf", "fplot", "fractdiff", "freqz",
+ "freqz_plot", "frnd", "fsolve",
+ "f_test_regression", "ftp", "fullfile", "fzero",
+ "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
+ "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
+ "geoinv", "geopdf", "geornd", "getfield", "ginput",
+ "glpk", "gls", "gplot", "gradient",
+ "graphics_toolkit", "gray", "grid", "griddata",
+ "griddatan", "gtext", "gunzip", "gzip", "hadamard",
+ "hamming", "hankel", "hanning", "hggroup",
+ "hidden", "hilb", "hist", "histc", "hold", "hot",
+ "hotelling_test", "housh", "hsv", "hurst",
+ "hygecdf", "hygeinv", "hygepdf", "hygernd",
+ "idivide", "ifftshift", "image", "imagesc",
+ "imfinfo", "imread", "imshow", "imwrite", "index",
+ "info", "inpolygon", "inputname", "interpft",
+ "interpn", "intersect", "invhilb", "iqr", "isa",
+ "isdefinite", "isdir", "is_duplicate_entry",
+ "isequal", "isequalwithequalnans", "isfigure",
+ "ishermitian", "ishghandle", "is_leap_year",
+ "isletter", "ismac", "ismember", "ispc", "isprime",
+ "isprop", "isscalar", "issquare", "isstrprop",
+ "issymmetric", "isunix", "is_valid_file_id",
+ "isvector", "jet", "kendall",
+ "kolmogorov_smirnov_cdf",
+ "kolmogorov_smirnov_test", "kruskal_wallis_test",
+ "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
+ "laplace_pdf", "laplace_rnd", "legend", "legendre",
+ "license", "line", "linkprop", "list_primes",
+ "loadaudio", "loadobj", "logistic_cdf",
+ "logistic_inv", "logistic_pdf", "logistic_rnd",
+ "logit", "loglog", "loglogerr", "logm", "logncdf",
+ "logninv", "lognpdf", "lognrnd", "logspace",
+ "lookfor", "ls_command", "lsqnonneg", "magic",
+ "mahalanobis", "manova", "matlabroot",
+ "mcnemar_test", "mean", "meansq", "median", "menu",
+ "mesh", "meshc", "meshgrid", "meshz", "mexext",
+ "mget", "mkpp", "mode", "moment", "movefile",
+ "mpoles", "mput", "namelengthmax", "nargchk",
+ "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
+ "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
+ "nonzeros", "normcdf", "normest", "norminv",
+ "normpdf", "normrnd", "now", "nthroot", "null",
+ "ocean", "ols", "onenormest", "optimget",
+ "optimset", "orderfields", "orient", "orth",
+ "pack", "pareto", "parseparams", "pascal", "patch",
+ "pathdef", "pcg", "pchip", "pcolor", "pcr",
+ "peaks", "periodogram", "perl", "perms", "pie",
+ "pink", "planerot", "playaudio", "plot",
+ "plotmatrix", "plotyy", "poisscdf", "poissinv",
+ "poisspdf", "poissrnd", "polar", "poly",
+ "polyaffine", "polyarea", "polyderiv", "polyfit",
+ "polygcd", "polyint", "polyout", "polyreduce",
+ "polyval", "polyvalm", "postpad", "powerset",
+ "ppder", "ppint", "ppjumps", "ppplot", "ppval",
+ "pqpnonneg", "prepad", "primes", "print",
+ "print_usage", "prism", "probit", "qp", "qqplot",
+ "quadcc", "quadgk", "quadl", "quadv", "quiver",
+ "qzhess", "rainbow", "randi", "range", "rank",
+ "ranks", "rat", "reallog", "realpow", "realsqrt",
+ "record", "rectangle_lw", "rectangle_sw",
+ "rectint", "refresh", "refreshdata",
+ "regexptranslate", "repmat", "residue", "ribbon",
+ "rindex", "roots", "rose", "rosser", "rotdim",
+ "rref", "run", "run_count", "rundemos", "run_test",
+ "runtests", "saveas", "saveaudio", "saveobj",
+ "savepath", "scatter", "secd", "semilogx",
+ "semilogxerr", "semilogy", "semilogyerr",
+ "setaudio", "setdiff", "setfield", "setxor",
+ "shading", "shift", "shiftdim", "sign_test",
+ "sinc", "sind", "sinetone", "sinewave", "skewness",
+ "slice", "sombrero", "sortrows", "spaugment",
+ "spconvert", "spdiags", "spearman", "spectral_adf",
+ "spectral_xdf", "specular", "speed", "spencer",
+ "speye", "spfun", "sphere", "spinmap", "spline",
+ "spones", "sprand", "sprandn", "sprandsym",
+ "spring", "spstats", "spy", "sqp", "stairs",
+ "statistics", "std", "stdnormal_cdf",
+ "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
+ "stem", "stft", "strcat", "strchr", "strjust",
+ "strmatch", "strread", "strsplit", "strtok",
+ "strtrim", "strtrunc", "structfun", "studentize",
+ "subplot", "subsindex", "subspace", "substr",
+ "substruct", "summer", "surf", "surface", "surfc",
+ "surfl", "surfnorm", "svds", "swapbytes",
+ "sylvester_matrix", "symvar", "synthesis", "table",
+ "tand", "tar", "tcdf", "tempdir", "tempname",
+ "test", "text", "textread", "textscan", "tinv",
+ "title", "toeplitz", "tpdf", "trace", "trapz",
+ "treelayout", "treeplot", "triangle_lw",
+ "triangle_sw", "tril", "trimesh", "triplequad",
+ "triplot", "trisurf", "triu", "trnd", "tsearchn",
+ "t_test", "t_test_regression", "type", "unidcdf",
+ "unidinv", "unidpdf", "unidrnd", "unifcdf",
+ "unifinv", "unifpdf", "unifrnd", "union", "unique",
+ "unix", "unmkpp", "unpack", "untabify", "untar",
+ "unwrap", "unzip", "u_test", "validatestring",
+ "vander", "var", "var_test", "vech", "ver",
+ "version", "view", "voronoi", "voronoin",
+ "waitforbuttonpress", "wavread", "wavwrite",
+ "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
+ "welch_test", "what", "white", "whitebg",
+ "wienrnd", "wilcoxon_test", "wilkinson", "winter",
+ "xlabel", "xlim", "ylabel", "yulewalker", "zip",
+ "zlabel", "z_test")
+
+ loadable_kw = (
+ "airy", "amd", "balance", "besselh", "besseli",
+ "besselj", "besselk", "bessely", "bitpack",
+ "bsxfun", "builtin", "ccolamd", "cellfun",
+ "cellslices", "chol", "choldelete", "cholinsert",
+ "cholinv", "cholshift", "cholupdate", "colamd",
+ "colloc", "convhulln", "convn", "csymamd",
+ "cummax", "cummin", "daspk", "daspk_options",
+ "dasrt", "dasrt_options", "dassl", "dassl_options",
+ "dbclear", "dbdown", "dbstack", "dbstatus",
+ "dbstop", "dbtype", "dbup", "dbwhere", "det",
+ "dlmread", "dmperm", "dot", "eig", "eigs",
+ "endgrent", "endpwent", "etree", "fft", "fftn",
+ "fftw", "filter", "find", "full", "gcd",
+ "getgrent", "getgrgid", "getgrnam", "getpwent",
+ "getpwnam", "getpwuid", "getrusage", "givens",
+ "gmtime", "gnuplot_binary", "hess", "ifft",
+ "ifftn", "inv", "isdebugmode", "issparse", "kron",
+ "localtime", "lookup", "lsode", "lsode_options",
+ "lu", "luinc", "luupdate", "matrix_type", "max",
+ "min", "mktime", "pinv", "qr", "qrdelete",
+ "qrinsert", "qrshift", "qrupdate", "quad",
+ "quad_options", "qz", "rand", "rande", "randg",
+ "randn", "randp", "randperm", "rcond", "regexp",
+ "regexpi", "regexprep", "schur", "setgrent",
+ "setpwent", "sort", "spalloc", "sparse", "spparms",
+ "sprank", "sqrtm", "strfind", "strftime",
+ "strptime", "strrep", "svd", "svd_driver", "syl",
+ "symamd", "symbfact", "symrcm", "time", "tsearch",
+ "typecast", "urlread", "urlwrite")
+
+ mapping_kw = (
+ "abs", "acos", "acosh", "acot", "acoth", "acsc",
+ "acsch", "angle", "arg", "asec", "asech", "asin",
+ "asinh", "atan", "atanh", "beta", "betainc",
+ "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
+ "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
+ "erfcx", "erfinv", "exp", "finite", "fix", "floor",
+ "fmod", "gamma", "gammainc", "gammaln", "imag",
+ "isalnum", "isalpha", "isascii", "iscntrl",
+ "isdigit", "isfinite", "isgraph", "isinf",
+ "islower", "isna", "isnan", "isprint", "ispunct",
+ "isspace", "isupper", "isxdigit", "lcm", "lgamma",
+ "log", "lower", "mod", "real", "rem", "round",
+ "roundb", "sec", "sech", "sign", "sin", "sinh",
+ "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
+
+ builtin_consts = (
+ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
+ "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
+ "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
+ "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
+ "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
+ "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
+ "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
+ "WSTOPSIG", "WTERMSIG", "WUNTRACED")
+
+ tokens = {
+ 'root': [
(r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
(r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
- (r'[%#].*$', Comment),
+ (r'[%#].*$', Comment),
(r'^\s*function\b', Keyword, 'deffunc'),
-
- # from 'iskeyword' on hg changeset 8cc154f45e37
- (words((
+
+ # from 'iskeyword' on hg changeset 8cc154f45e37
+ (words((
'__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
@@ -3160,42 +3160,42 @@ class OctaveLexer(RegexLexer):
'methods', 'otherwise', 'persistent', 'properties', 'return',
'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
- suffix=r'\b'), Name.Builtin),
-
- (words(builtin_consts, suffix=r'\b'), Name.Constant),
-
- # operators in Octave but not Matlab:
- (r'-=|!=|!|/=|--', Operator),
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators in Octave but not Matlab requiring escape for re:
- (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
- # punctuation:
- (r'[\[\](){}:@.,]', Punctuation),
- (r'=|:|;', Punctuation),
-
- (r'"[^"]*"', String),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'[a-zA-Z_]\w*', Name),
+ Keyword),
+
+ (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
+ suffix=r'\b'), Name.Builtin),
+
+ (words(builtin_consts, suffix=r'\b'), Name.Constant),
+
+ # operators in Octave but not Matlab:
+ (r'-=|!=|!|/=|--', Operator),
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators in Octave but not Matlab requiring escape for re:
+ (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+
+ # punctuation:
+ (r'[\[\](){}:@.,]', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'[a-zA-Z_]\w*', Name),
(r'\s+', Text),
- (r'.', Text),
- ],
+ (r'.', Text),
+ ],
'percentblockcomment': [
(r'^\s*%\}', Comment.Multiline, '#pop'),
(r'^.*\n', Comment.Multiline),
@@ -3206,89 +3206,89 @@ class OctaveLexer(RegexLexer):
(r'^.*\n', Comment.Multiline),
(r'.', Comment.Multiline),
],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'deffunc': [
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'deffunc': [
(r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
(r'(\s*)([a-zA-Z_]\w*)',
bygroups(Whitespace, Name.Function), '#pop'),
- ],
- }
-
+ ],
+ }
+
def analyse_text(text):
"""Octave is quite hard to spot, and it looks like Matlab as well."""
return 0
-
-
-class ScilabLexer(RegexLexer):
- """
- For Scilab source code.
-
- .. versionadded:: 1.5
- """
- name = 'Scilab'
- aliases = ['scilab']
- filenames = ['*.sci', '*.sce', '*.tst']
- mimetypes = ['text/scilab']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
+
+
+class ScilabLexer(RegexLexer):
+ """
+ For Scilab source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Scilab'
+ aliases = ['scilab']
+ filenames = ['*.sci', '*.sce', '*.tst']
+ mimetypes = ['text/scilab']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
(r'^\s*function\b', Keyword, 'deffunc'),
-
- (words((
- '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
- 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
- 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
- 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
- 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
- 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
- Keyword),
-
- (words(_scilab_builtins.functions_kw +
- _scilab_builtins.commands_kw +
- _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
-
- (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
+
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(_scilab_builtins.functions_kw +
+ _scilab_builtins.commands_kw +
+ _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
+
+ (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
(r'[\[\](){}@.,=:;]+', Punctuation),
-
- (r'"[^"]*"', String),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w)\].])\'+', Operator),
- (r'(?<![\w)\].])\'', String, 'string'),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- (r'[a-zA-Z_]\w*', Name),
+
+ (r'"[^"]*"', String),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w)\].])\'+', Operator),
+ (r'(?<![\w)\].])\'', String, 'string'),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'[a-zA-Z_]\w*', Name),
(r'\s+', Whitespace),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- (r'.', String, '#pop'),
- ],
- 'deffunc': [
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ (r'.', String, '#pop'),
+ ],
+ 'deffunc': [
(r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Whitespace, Text, Whitespace, Punctuation,
- Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Whitespace), '#pop'),
- # function with no args
- (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
- ],
- }
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ml.py b/contrib/python/Pygments/py3/pygments/lexers/ml.py
index 60bd8b9dbc..d8ca014ba6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ml.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ml.py
@@ -1,770 +1,770 @@
-"""
- pygments.lexers.ml
- ~~~~~~~~~~~~~~~~~~
-
- Lexers for ML family languages.
-
+"""
+ pygments.lexers.ml
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for ML family languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
-
-
-class SMLLexer(RegexLexer):
- """
- For the Standard ML language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Standard ML'
- aliases = ['sml']
- filenames = ['*.sml', '*.sig', '*.fun']
- mimetypes = ['text/x-standardml', 'application/x-standardml']
-
+
+
+class SMLLexer(RegexLexer):
+ """
+ For the Standard ML language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Standard ML'
+ aliases = ['sml']
+ filenames = ['*.sml', '*.sig', '*.fun']
+ mimetypes = ['text/x-standardml', 'application/x-standardml']
+
alphanumid_reserved = {
- # Core
- 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
- 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
- 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
- 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
- # Modules
- 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
- 'struct', 'structure', 'where',
+ # Core
+ 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
+ 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
+ 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
+ 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
+ # Modules
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
+ 'struct', 'structure', 'where',
}
-
+
symbolicid_reserved = {
- # Core
+ # Core
':', r'\|', '=', '=>', '->', '#',
- # Modules
- ':>',
+ # Modules
+ ':>',
}
-
+
nonid_reserved = {'(', ')', '[', ']', '{', '}', ',', ';', '...', '_'}
-
- alphanumid_re = r"[a-zA-Z][\w']*"
- symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
- # A character constant is a sequence of the form #s, where s is a string
- # constant denoting a string of size one character. This setup just parses
- # the entire string as either a String.Double or a String.Char (depending
- # on the argument), even if the String.Char is an erronous
- # multiple-character string.
- def stringy(whatkind):
- return [
- (r'[^"\\]', whatkind),
- (r'\\[\\"abtnvfr]', String.Escape),
- # Control-character notation is used for codes < 32,
- # where \^@ == \000
- (r'\\\^[\x40-\x5e]', String.Escape),
- # Docs say 'decimal digits'
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\\s+\\', String.Interpol),
- (r'"', whatkind, '#pop'),
- ]
-
- # Callbacks for distinguishing tokens and reserved words
- def long_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- else:
- token = Name.Namespace
- yield match.start(1), token, match.group(1)
- yield match.start(2), Punctuation, match.group(2)
-
- def end_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved:
- token = Error
- elif match.group(1) in self.symbolicid_reserved:
- token = Error
- else:
- token = Name
- yield match.start(1), token, match.group(1)
-
- def id_callback(self, match):
- str = match.group(1)
- if str in self.alphanumid_reserved:
- token = Keyword.Reserved
- elif str in self.symbolicid_reserved:
- token = Punctuation
- else:
- token = Name
- yield match.start(1), token, str
-
- tokens = {
- # Whitespace and comments are (almost) everywhere
- 'whitespace': [
- (r'\s+', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
-
- 'delimiters': [
- # This lexer treats these delimiters specially:
- # Delimiters define scopes, and the scope is how the meaning of
- # the `|' is resolved - is it a case/handle expression, or function
- # definition by cases? (This is not how the Definition works, but
- # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
- (r'\(|\[|\{', Punctuation, 'main'),
- (r'\)|\]|\}', Punctuation, '#pop'),
- (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
- (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
- (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
- ],
-
- 'core': [
- # Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
- Punctuation),
-
- # Special constants: strings, floats, numbers in decimal and hex
- (r'#"', String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'~?0x[0-9a-fA-F]+', Number.Hex),
- (r'0wx[0-9a-fA-F]+', Number.Hex),
- (r'0w\d+', Number.Integer),
- (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
- (r'~?\d+\.\d+', Number.Float),
- (r'~?\d+[eE]~?\d+', Number.Float),
- (r'~?\d+', Number.Integer),
-
- # Labels
- (r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
- # Some reserved words trigger a special, local lexer state change
- (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
+
+ alphanumid_re = r"[a-zA-Z][\w']*"
+ symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
+
+ # A character constant is a sequence of the form #s, where s is a string
+ # constant denoting a string of size one character. This setup just parses
+ # the entire string as either a String.Double or a String.Char (depending
+ # on the argument), even if the String.Char is an erronous
+ # multiple-character string.
+ def stringy(whatkind):
+ return [
+ (r'[^"\\]', whatkind),
+ (r'\\[\\"abtnvfr]', String.Escape),
+ # Control-character notation is used for codes < 32,
+ # where \^@ == \000
+ (r'\\\^[\x40-\x5e]', String.Escape),
+ # Docs say 'decimal digits'
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\\s+\\', String.Interpol),
+ (r'"', whatkind, '#pop'),
+ ]
+
+ # Callbacks for distinguishing tokens and reserved words
+ def long_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ else:
+ token = Name.Namespace
+ yield match.start(1), token, match.group(1)
+ yield match.start(2), Punctuation, match.group(2)
+
+ def end_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ elif match.group(1) in self.symbolicid_reserved:
+ token = Error
+ else:
+ token = Name
+ yield match.start(1), token, match.group(1)
+
+ def id_callback(self, match):
+ str = match.group(1)
+ if str in self.alphanumid_reserved:
+ token = Keyword.Reserved
+ elif str in self.symbolicid_reserved:
+ token = Punctuation
+ else:
+ token = Name
+ yield match.start(1), token, str
+
+ tokens = {
+ # Whitespace and comments are (almost) everywhere
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+
+ 'delimiters': [
+ # This lexer treats these delimiters specially:
+ # Delimiters define scopes, and the scope is how the meaning of
+ # the `|' is resolved - is it a case/handle expression, or function
+ # definition by cases? (This is not how the Definition works, but
+ # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
+ (r'\(|\[|\{', Punctuation, 'main'),
+ (r'\)|\]|\}', Punctuation, '#pop'),
+ (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
+ (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
+ (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
+ ],
+
+ 'core': [
+ # Punctuation that doesn't overlap symbolic identifiers
+ (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
+ Punctuation),
+
+ # Special constants: strings, floats, numbers in decimal and hex
+ (r'#"', String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ (r'~?0x[0-9a-fA-F]+', Number.Hex),
+ (r'0wx[0-9a-fA-F]+', Number.Hex),
+ (r'0w\d+', Number.Integer),
+ (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+\.\d+', Number.Float),
+ (r'~?\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+', Number.Integer),
+
+ # Labels
+ (r'#\s*[1-9][0-9]*', Name.Label),
+ (r'#\s*(%s)' % alphanumid_re, Name.Label),
+ (r'#\s+(%s)' % symbolicid_re, Name.Label),
+ # Some reserved words trigger a special, local lexer state change
+ (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
(r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
- (r'\b(functor|include|open|signature|structure)\b(?!\')',
- Keyword.Reserved, 'sname'),
- (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
- # Regular identifiers, long and otherwise
- (r'\'[\w\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
- ],
- 'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
- (r'\s+', Error),
- (r'\S+', Error),
- ],
-
-
- # Main parser (prevents errors in files that have scoping errors)
- 'root': [
- default('main')
- ],
-
- # In this scope, I expect '|' to not be followed by a function name,
- # and I expect 'and' to be followed by a binding site
- 'main': [
- include('whitespace'),
-
- # Special behavior of val/and/fun
- (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
- (r'\b(fun)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main-fun', 'fname')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # In this scope, I expect '|' and 'and' to be followed by a function
- 'main-fun': [
- include('whitespace'),
-
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- # Special behavior of val/and/fun
- (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
- (r'\b(val)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main', 'vname')),
-
- # Special behavior of '|' and '|'-manipulating keywords
- (r'\|', Punctuation, 'fname'),
- (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Character and string parsers
- 'char': stringy(String.Char),
- 'string': stringy(String.Double),
-
- 'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
- ],
-
- # Dealing with what comes after module system keywords
- 'sname': [
- include('whitespace'),
- include('breakout'),
-
- (r'(%s)' % alphanumid_re, Name.Namespace),
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
- 'fname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
- # Ignore interesting function declarations like "fun (x + y) = ..."
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'val' (or 'and') keyword
- 'vname': [
- include('whitespace'),
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
- # Ignore interesting patterns like 'val (x, y)'
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'type' (or 'and') keyword
- 'tname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # A type binding includes most identifiers
- 'typbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Dealing with what comes after the 'datatype' (or 'and') keyword
- 'dname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[\w\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'(=)(\s*)(datatype)',
- bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
- ('#pop', 'datbind', 'datcon')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # common case - A | B | C of int
- 'datbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
- (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- (r'(\|)(\s*)(%s)' % alphanumid_re,
- bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
- bygroups(Punctuation, Text, Name.Class)),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Dealing with what comes after an exception
- 'ename': [
- include('whitespace'),
-
+ (r'\b(functor|include|open|signature|structure)\b(?!\')',
+ Keyword.Reserved, 'sname'),
+ (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
+
+ # Regular identifiers, long and otherwise
+ (r'\'[\w\']*', Name.Decorator),
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
+ (r'(%s)' % alphanumid_re, id_callback),
+ (r'(%s)' % symbolicid_re, id_callback),
+ ],
+ 'dotted': [
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback),
+ (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
+ (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
+ (r'\s+', Error),
+ (r'\S+', Error),
+ ],
+
+
+ # Main parser (prevents errors in files that have scoping errors)
+ 'root': [
+ default('main')
+ ],
+
+ # In this scope, I expect '|' to not be followed by a function name,
+ # and I expect 'and' to be followed by a binding site
+ 'main': [
+ include('whitespace'),
+
+ # Special behavior of val/and/fun
+ (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
+ (r'\b(fun)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main-fun', 'fname')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # In this scope, I expect '|' and 'and' to be followed by a function
+ 'main-fun': [
+ include('whitespace'),
+
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ # Special behavior of val/and/fun
+ (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
+ (r'\b(val)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main', 'vname')),
+
+ # Special behavior of '|' and '|'-manipulating keywords
+ (r'\|', Punctuation, 'fname'),
+ (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Character and string parsers
+ 'char': stringy(String.Char),
+ 'string': stringy(String.Double),
+
+ 'breakout': [
+ (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
+ ],
+
+ # Dealing with what comes after module system keywords
+ 'sname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'(%s)' % alphanumid_re, Name.Namespace),
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
+ 'fname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
+
+ # Ignore interesting function declarations like "fun (x + y) = ..."
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'val' (or 'and') keyword
+ 'vname': [
+ include('whitespace'),
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
+
+ # Ignore interesting patterns like 'val (x, y)'
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'type' (or 'and') keyword
+ 'tname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # A type binding includes most identifiers
+ 'typbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Dealing with what comes after the 'datatype' (or 'and') keyword
+ 'dname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'(=)(\s*)(datatype)',
+ bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation,
+ ('#pop', 'datbind', 'datcon')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # common case - A | B | C of int
+ 'datbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
+ (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ (r'(\|)(\s*)(%s)' % alphanumid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+ (r'(\|)(\s+)(%s)' % symbolicid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Dealing with what comes after an exception
+ 'ename': [
+ include('whitespace'),
+
(r'(and\b)(\s+)(%s)' % alphanumid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
(r'(and\b)(\s*)(%s)' % symbolicid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
(r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
-
+
default('#pop'),
- ],
-
- 'datcon': [
- include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Series of type variables
- 'tyvarseq': [
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- (r'\'[\w\']*', Name.Decorator),
- (alphanumid_re, Name),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (symbolicid_re, Name),
- ],
-
- 'comment': [
- (r'[^(*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[(*)]', Comment.Multiline),
- ],
- }
-
-
-class OcamlLexer(RegexLexer):
- """
- For the OCaml language.
-
- .. versionadded:: 0.7
- """
-
- name = 'OCaml'
- aliases = ['ocaml']
- filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
- mimetypes = ['text/x-ocaml']
-
- keywords = (
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- 'downto', 'else', 'end', 'exception', 'external', 'false',
- 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
- )
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
- )
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- default('#pop'),
- ],
- }
-
-
-class OpaLexer(RegexLexer):
- """
- Lexer for the Opa language (http://opalang.org).
-
- .. versionadded:: 1.5
- """
-
- name = 'Opa'
- aliases = ['opa']
- filenames = ['*.opa']
- mimetypes = ['text/x-opa']
-
- # most of these aren't strictly keywords
- # but if you color only real keywords, you might just
- # as well not color anything
- keywords = (
- 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
- 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
- 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
- 'type', 'val', 'with', 'xml_parser',
- )
-
- # matches both stuff and `stuff`
- ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
- op_re = r'[.=\-<>,@~%/+?*&^!]'
- punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
- # because they are also used for inserts
-
- tokens = {
- # copied from the caml lexer, should be adapted
- 'escape-sequence': [
- (r'\\[\\"\'ntr}]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
-
- # factorizing these rules, because they are inserted many times
- 'comments': [
- (r'/\*', Comment, 'nested-comment'),
- (r'//.*?$', Comment),
- ],
- 'comments-and-spaces': [
- include('comments'),
- (r'\s+', Text),
- ],
-
- 'root': [
- include('comments-and-spaces'),
- # keywords
- (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
- # directives
- # we could parse the actual set of directives instead of anything
- # starting with @, but this is troublesome
- # because it needs to be adjusted all the time
- # and assuming we parse only sources that compile, it is useless
- (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
-
- # number literals
- (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+[eE][+\-]?\d+', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'\d+', Number.Integer),
- # color literals
- (r'#[\da-fA-F]{3,6}', Number.Integer),
-
- # string literals
- (r'"', String.Double, 'string'),
- # char literal, should be checked because this is the regexp from
- # the caml lexer
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
- String.Char),
-
- # this is meant to deal with embedded exprs in strings
- # every time we find a '}' we pop a state so that if we were
- # inside a string, we are back in the string state
- # as a consequence, we must also push a state every time we find a
- # '{' or else we will have errors when parsing {} for instance
- (r'\{', Operator, '#push'),
- (r'\}', Operator, '#pop'),
-
- # html literals
- # this is a much more strict that the actual parser,
- # since a<b would not be parsed as html
- # but then again, the parser is way too lax, and we can't hope
- # to have something as tolerant
- (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
- # db path
- # matching the '[_]' in '/a[_]' because it is a part
- # of the syntax of the db path definition
- # unfortunately, i don't know how to match the ']' in
- # /a[1], so this is somewhat inconsistent
- (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
- # putting the same color on <- as on db path, since
- # it can be used only to mean Db.write
- (r'<-(?!'+op_re+r')', Name.Variable),
-
- # 'modules'
- # although modules are not distinguished by their names as in caml
- # the standard library seems to follow the convention that modules
- # only area capitalized
- (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
- # operators
- # = has a special role because this is the only
- # way to syntactic distinguish binding constructions
- # unfortunately, this colors the equal in {x=2} too
- (r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
-
- # coercions
- (r':', Operator, 'type'),
- # type variables
- # we need this rule because we don't parse specially type
- # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
- ("'"+ident_re, Keyword.Type),
-
- # id literal, #something, or #{expr}
- (r'#'+ident_re, String.Single),
- (r'#(?=\{)', String.Single),
-
- # identifiers
- # this avoids to color '2' in 'a2' as an integer
- (ident_re, Text),
-
- # default, not sure if that is needed or not
- # (r'.', Text),
- ],
-
- # it is quite painful to have to parse types to know where they end
- # this is the general rule for a type
- # a type is either:
- # * -> ty
- # * type-with-slash
- # * type-with-slash -> ty
- # * type-with-slash (, type-with-slash)+ -> ty
- #
- # the code is pretty funky in here, but this code would roughly
- # translate in caml to:
- # let rec type stream =
- # match stream with
- # | [< "->"; stream >] -> type stream
- # | [< ""; stream >] ->
- # type_with_slash stream
- # type_lhs_1 stream;
- # and type_1 stream = ...
- 'type': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type),
- default(('#pop', 'type-lhs-1', 'type-with-slash')),
- ],
-
- # parses all the atomic or closed constructions in the syntax of type
- # expressions: record types, tuple types, type constructors, basic type
- # and type variables
- 'type-1': [
- include('comments-and-spaces'),
- (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
- (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (ident_re, Keyword.Type, '#pop'),
- ("'"+ident_re, Keyword.Type),
- # this case is not in the syntax but sometimes
- # we think we are parsing types when in fact we are parsing
- # some css, so we just pop the states until we get back into
- # the root state
- default('#pop'),
- ],
-
- # type-with-slash is either:
- # * type-1
- # * type-1 (/ type-1)+
- 'type-with-slash': [
- include('comments-and-spaces'),
- default(('#pop', 'slash-type-1', 'type-1')),
- ],
- 'slash-type-1': [
- include('comments-and-spaces'),
- ('/', Keyword.Type, ('#pop', 'type-1')),
- # same remark as above
- default('#pop'),
- ],
-
- # we go in this state after having parsed a type-with-slash
- # while trying to parse a type
- # and at this point we must determine if we are parsing an arrow
- # type (in which case we must continue parsing) or not (in which
- # case we stop)
- 'type-lhs-1': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
- default('#pop'),
- ],
- 'type-arrow': [
- include('comments-and-spaces'),
- # the look ahead here allows to parse f(x : int, y : float -> truc)
- # correctly
- (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- # same remark as above
- default('#pop'),
- ],
-
- # no need to do precise parsing for tuples and records
- # because they are closed constructions, so we can simply
- # find the closing delimiter
- # note that this function would be not work if the source
- # contained identifiers like `{)` (although it could be patched
- # to support it)
- 'type-tuple': [
- include('comments-and-spaces'),
- (r'[^()/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\(', Keyword.Type, '#push'),
- (r'\)', Keyword.Type, '#pop'),
- ],
- 'type-record': [
- include('comments-and-spaces'),
- (r'[^{}/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\{', Keyword.Type, '#push'),
- (r'\}', Keyword.Type, '#pop'),
- ],
-
- # 'type-tuple': [
- # include('comments-and-spaces'),
- # (r'\)', Keyword.Type, '#pop'),
- # default(('#pop', 'type-tuple-1', 'type-1')),
- # ],
- # 'type-tuple-1': [
- # include('comments-and-spaces'),
- # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
- # (r',', Keyword.Type, 'type-1'),
- # ],
- # 'type-record':[
- # include('comments-and-spaces'),
- # (r'\}', Keyword.Type, '#pop'),
- # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
- # ],
- # 'type-record-field-expr': [
- #
- # ],
-
- 'nested-comment': [
- (r'[^/*]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[/*]', Comment),
- ],
-
- # the copy pasting between string and single-string
- # is kinda sad. Is there a way to avoid that??
- 'string': [
- (r'[^\\"{]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
- 'single-string': [
- (r'[^\\\'{]+', String.Double),
- (r'\'', String.Double, '#pop'),
- (r'\{', Operator, 'root'),
- include('escape-sequence'),
- ],
-
- # all the html stuff
- # can't really reuse some existing html parser
- # because we must be able to parse embedded expressions
-
- # we are in this state after someone parsed the '<' that
- # started the html literal
- 'html-open-tag': [
- (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- # we are in this state after someone parsed the '</' that
- # started the end of the closing tag
- 'html-end-tag': [
- # this is a star, because </> is allowed
- (r'[\w\-:]*>', String.Single, '#pop'),
- ],
-
- # we are in this state after having parsed '<ident(:ident)?'
- # we thus parse a possibly empty list of attributes
- 'html-attr': [
- (r'\s+', Text),
- (r'[\w\-:]+=', String.Single, 'html-attr-value'),
- (r'/>', String.Single, '#pop'),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- 'html-attr-value': [
- (r"'", String.Single, ('#pop', 'single-string')),
- (r'"', String.Single, ('#pop', 'string')),
- (r'#'+ident_re, String.Single, '#pop'),
- (r'#(?=\{)', String.Single, ('#pop', 'root')),
- (r'[^"\'{`=<>]+', String.Single, '#pop'),
- (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
- ],
-
- # we should probably deal with '\' escapes here
- 'html-content': [
- (r'<!--', Comment, 'html-comment'),
- (r'</', String.Single, ('#pop', 'html-end-tag')),
- (r'<', String.Single, 'html-open-tag'),
- (r'\{', Operator, 'root'),
- (r'[^<{]+', String.Single),
- ],
-
- 'html-comment': [
- (r'-->', Comment, '#pop'),
- (r'[^\-]+|-', Comment),
- ],
- }
+ ],
+
+ 'datcon': [
+ include('whitespace'),
+ (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Series of type variables
+ 'tyvarseq': [
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ (r'\'[\w\']*', Name.Decorator),
+ (alphanumid_re, Name),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ (symbolicid_re, Name),
+ ],
+
+ 'comment': [
+ (r'[^(*)]', Comment.Multiline),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[(*)]', Comment.Multiline),
+ ],
+ }
+
+
+class OcamlLexer(RegexLexer):
+ """
+ For the OCaml language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'OCaml'
+ aliases = ['ocaml']
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
+ mimetypes = ['text/x-ocaml']
+
+ keywords = (
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
+ )
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
+ r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'\(\*(?![)])', Comment, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ default('#pop'),
+ ],
+ }
+
+
+class OpaLexer(RegexLexer):
+ """
+ Lexer for the Opa language (http://opalang.org).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Opa'
+ aliases = ['opa']
+ filenames = ['*.opa']
+ mimetypes = ['text/x-opa']
+
+ # most of these aren't strictly keywords
+ # but if you color only real keywords, you might just
+ # as well not color anything
+ keywords = (
+ 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
+ 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
+ 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
+ 'type', 'val', 'with', 'xml_parser',
+ )
+
+ # matches both stuff and `stuff`
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
+
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
+ # because they are also used for inserts
+
+ tokens = {
+ # copied from the caml lexer, should be adapted
+ 'escape-sequence': [
+ (r'\\[\\"\'ntr}]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+
+ # factorizing these rules, because they are inserted many times
+ 'comments': [
+ (r'/\*', Comment, 'nested-comment'),
+ (r'//.*?$', Comment),
+ ],
+ 'comments-and-spaces': [
+ include('comments'),
+ (r'\s+', Text),
+ ],
+
+ 'root': [
+ include('comments-and-spaces'),
+ # keywords
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ # directives
+ # we could parse the actual set of directives instead of anything
+ # starting with @, but this is troublesome
+ # because it needs to be adjusted all the time
+ # and assuming we parse only sources that compile, it is useless
+ (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
+
+ # number literals
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'\d+', Number.Integer),
+ # color literals
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
+
+ # string literals
+ (r'"', String.Double, 'string'),
+ # char literal, should be checked because this is the regexp from
+ # the caml lexer
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
+ String.Char),
+
+ # this is meant to deal with embedded exprs in strings
+ # every time we find a '}' we pop a state so that if we were
+ # inside a string, we are back in the string state
+ # as a consequence, we must also push a state every time we find a
+ # '{' or else we will have errors when parsing {} for instance
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+
+ # html literals
+ # this is a much more strict that the actual parser,
+ # since a<b would not be parsed as html
+ # but then again, the parser is way too lax, and we can't hope
+ # to have something as tolerant
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
+
+ # db path
+ # matching the '[_]' in '/a[_]' because it is a part
+ # of the syntax of the db path definition
+ # unfortunately, i don't know how to match the ']' in
+ # /a[1], so this is somewhat inconsistent
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
+ # putting the same color on <- as on db path, since
+ # it can be used only to mean Db.write
+ (r'<-(?!'+op_re+r')', Name.Variable),
+
+ # 'modules'
+ # although modules are not distinguished by their names as in caml
+ # the standard library seems to follow the convention that modules
+ # only area capitalized
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
+
+ # operators
+ # = has a special role because this is the only
+ # way to syntactic distinguish binding constructions
+ # unfortunately, this colors the equal in {x=2} too
+ (r'=(?!'+op_re+r')', Keyword),
+ (r'(%s)+' % op_re, Operator),
+ (r'(%s)+' % punc_re, Operator),
+
+ # coercions
+ (r':', Operator, 'type'),
+ # type variables
+ # we need this rule because we don't parse specially type
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
+ ("'"+ident_re, Keyword.Type),
+
+ # id literal, #something, or #{expr}
+ (r'#'+ident_re, String.Single),
+ (r'#(?=\{)', String.Single),
+
+ # identifiers
+ # this avoids to color '2' in 'a2' as an integer
+ (ident_re, Text),
+
+ # default, not sure if that is needed or not
+ # (r'.', Text),
+ ],
+
+ # it is quite painful to have to parse types to know where they end
+ # this is the general rule for a type
+ # a type is either:
+ # * -> ty
+ # * type-with-slash
+ # * type-with-slash -> ty
+ # * type-with-slash (, type-with-slash)+ -> ty
+ #
+ # the code is pretty funky in here, but this code would roughly
+ # translate in caml to:
+ # let rec type stream =
+ # match stream with
+ # | [< "->"; stream >] -> type stream
+ # | [< ""; stream >] ->
+ # type_with_slash stream
+ # type_lhs_1 stream;
+ # and type_1 stream = ...
+ 'type': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type),
+ default(('#pop', 'type-lhs-1', 'type-with-slash')),
+ ],
+
+ # parses all the atomic or closed constructions in the syntax of type
+ # expressions: record types, tuple types, type constructors, basic type
+ # and type variables
+ 'type-1': [
+ include('comments-and-spaces'),
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (r'~?\{', Keyword.Type, ('#pop', 'type-record')),
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (ident_re, Keyword.Type, '#pop'),
+ ("'"+ident_re, Keyword.Type),
+ # this case is not in the syntax but sometimes
+ # we think we are parsing types when in fact we are parsing
+ # some css, so we just pop the states until we get back into
+ # the root state
+ default('#pop'),
+ ],
+
+ # type-with-slash is either:
+ # * type-1
+ # * type-1 (/ type-1)+
+ 'type-with-slash': [
+ include('comments-and-spaces'),
+ default(('#pop', 'slash-type-1', 'type-1')),
+ ],
+ 'slash-type-1': [
+ include('comments-and-spaces'),
+ ('/', Keyword.Type, ('#pop', 'type-1')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # we go in this state after having parsed a type-with-slash
+ # while trying to parse a type
+ # and at this point we must determine if we are parsing an arrow
+ # type (in which case we must continue parsing) or not (in which
+ # case we stop)
+ 'type-lhs-1': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
+ default('#pop'),
+ ],
+ 'type-arrow': [
+ include('comments-and-spaces'),
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
+ # correctly
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # no need to do precise parsing for tuples and records
+ # because they are closed constructions, so we can simply
+ # find the closing delimiter
+ # note that this function would be not work if the source
+ # contained identifiers like `{)` (although it could be patched
+ # to support it)
+ 'type-tuple': [
+ include('comments-and-spaces'),
+ (r'[^()/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\(', Keyword.Type, '#push'),
+ (r'\)', Keyword.Type, '#pop'),
+ ],
+ 'type-record': [
+ include('comments-and-spaces'),
+ (r'[^{}/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\{', Keyword.Type, '#push'),
+ (r'\}', Keyword.Type, '#pop'),
+ ],
+
+ # 'type-tuple': [
+ # include('comments-and-spaces'),
+ # (r'\)', Keyword.Type, '#pop'),
+ # default(('#pop', 'type-tuple-1', 'type-1')),
+ # ],
+ # 'type-tuple-1': [
+ # include('comments-and-spaces'),
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
+ # (r',', Keyword.Type, 'type-1'),
+ # ],
+ # 'type-record':[
+ # include('comments-and-spaces'),
+ # (r'\}', Keyword.Type, '#pop'),
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
+ # ],
+ # 'type-record-field-expr': [
+ #
+ # ],
+
+ 'nested-comment': [
+ (r'[^/*]+', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[/*]', Comment),
+ ],
+
+ # the copy pasting between string and single-string
+ # is kinda sad. Is there a way to avoid that??
+ 'string': [
+ (r'[^\\"{]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+ 'single-string': [
+ (r'[^\\\'{]+', String.Double),
+ (r'\'', String.Double, '#pop'),
+ (r'\{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+
+ # all the html stuff
+ # can't really reuse some existing html parser
+ # because we must be able to parse embedded expressions
+
+ # we are in this state after someone parsed the '<' that
+ # started the html literal
+ 'html-open-tag': [
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ # we are in this state after someone parsed the '</' that
+ # started the end of the closing tag
+ 'html-end-tag': [
+ # this is a star, because </> is allowed
+ (r'[\w\-:]*>', String.Single, '#pop'),
+ ],
+
+ # we are in this state after having parsed '<ident(:ident)?'
+ # we thus parse a possibly empty list of attributes
+ 'html-attr': [
+ (r'\s+', Text),
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
+ (r'/>', String.Single, '#pop'),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ 'html-attr-value': [
+ (r"'", String.Single, ('#pop', 'single-string')),
+ (r'"', String.Single, ('#pop', 'string')),
+ (r'#'+ident_re, String.Single, '#pop'),
+ (r'#(?=\{)', String.Single, ('#pop', 'root')),
+ (r'[^"\'{`=<>]+', String.Single, '#pop'),
+ (r'\{', Operator, ('#pop', 'root')), # this is a tail call!
+ ],
+
+ # we should probably deal with '\' escapes here
+ 'html-content': [
+ (r'<!--', Comment, 'html-comment'),
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
+ (r'<', String.Single, 'html-open-tag'),
+ (r'\{', Operator, 'root'),
+ (r'[^<{]+', String.Single),
+ ],
+
+ 'html-comment': [
+ (r'-->', Comment, '#pop'),
+ (r'[^\-]+|-', Comment),
+ ],
+ }
class ReasonLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modeling.py b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
index b00a7f10b3..7cd9144e96 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modeling.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
@@ -1,365 +1,365 @@
-"""
- pygments.lexers.modeling
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for modeling languages.
-
+"""
+ pygments.lexers.modeling
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for modeling languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
-
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers import _stan_builtins
-
-__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
-
-
-class ModelicaLexer(RegexLexer):
- """
- For `Modelica <http://www.modelica.org/>`_ source code.
-
- .. versionadded:: 1.1
- """
- name = 'Modelica'
- aliases = ['modelica']
- filenames = ['*.mo']
- mimetypes = ['text/x-modelica']
-
- flags = re.DOTALL | re.MULTILINE
-
- _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
-
- tokens = {
- 'whitespace': [
+
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers import _stan_builtins
+
+__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
+
+
+class ModelicaLexer(RegexLexer):
+ """
+ For `Modelica <http://www.modelica.org/>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Modelica'
+ aliases = ['modelica']
+ filenames = ['*.mo']
+ mimetypes = ['text/x-modelica']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
+
+ tokens = {
+ 'whitespace': [
(r'[\s\ufeff]+', Text),
- (r'//[^\n]*\n?', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'root': [
- include('whitespace'),
- (r'"', String.Double, 'string'),
- (r'[()\[\]{},;]+', Punctuation),
- (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
- (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
- (r'\d+', Number.Integer),
- (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
- r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
- r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
- r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
- r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
- r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
- r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
- r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
- r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
- r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
- r'transpose|vector|zeros)\b', Name.Builtin),
- (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
- r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
+ (r'//[^\n]*\n?', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'"', String.Double, 'string'),
+ (r'[()\[\]{},;]+', Punctuation),
+ (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
+ (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
+ r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
+ r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
+ r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
+ r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
+ r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
+ r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
+ r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
+ r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
+ r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
+ r'transpose|vector|zeros)\b', Name.Builtin),
+ (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
+ r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
- r'output|parameter|partial|protected|public|pure|redeclare|'
- r'replaceable|return|stream|then|when|while)\b',
- Keyword.Reserved),
- (r'(and|not|or)\b', Operator.Word),
- (r'(block|class|connector|end|function|model|operator|package|'
- r'record|type)\b', Keyword.Reserved, 'class'),
- (r'(false|true)\b', Keyword.Constant),
- (r'within\b', Keyword.Reserved, 'package-prefix'),
- (_name, Name)
- ],
- 'class': [
- include('whitespace'),
- (r'(function|record)\b', Keyword.Reserved),
- (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
- (_name, Name.Class, '#pop'),
- default('#pop')
- ],
- 'package-prefix': [
- include('whitespace'),
- (_name, Name.Namespace, '#pop'),
- default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\[\'"?\\abfnrtv]', String.Escape),
- (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
- using(HtmlLexer)),
- (r'<|\\?[^"\\<]+', String.Double)
- ]
- }
-
-
-class BugsLexer(RegexLexer):
- """
- Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
- models.
-
- .. versionadded:: 1.6
- """
-
- name = 'BUGS'
- aliases = ['bugs', 'winbugs', 'openbugs']
- filenames = ['*.bug']
-
- _FUNCTIONS = (
- # Scalar functions
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
- 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
- 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
- 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
- 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
- 'trunc',
- # Vector functions
- 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
- 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
- 'sd', 'sort', 'sum',
- # Special
- 'D', 'I', 'F', 'T', 'C')
- """ OpenBUGS built-in functions
-
- From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
- This also includes
-
- - T, C, I : Truncation and censoring.
- ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- - D : ODE
- - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
- """
-
- _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
- 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
- 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
- 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
- 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
- 'dmt', 'dwish')
- """ OpenBUGS built-in distributions
-
- Functions from
- http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
- """
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
- Name.Builtin),
- # Regular variable names
- (r'[A-Za-z][\w.]*', Name),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- # Punctuation
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators
- (r'\+|-|\*|/', Operator),
- # Block
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r"^\s*model\s*{", text, re.M):
- return 0.7
- else:
- return 0.0
-
-
-class JagsLexer(RegexLexer):
- """
- Pygments Lexer for JAGS.
-
- .. versionadded:: 1.6
- """
-
- name = 'JAGS'
- aliases = ['jags']
- filenames = ['*.jag', '*.bug']
-
- # JAGS
- _FUNCTIONS = (
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cos', 'cosh', 'cloglog',
- 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
- 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
- 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
- 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
- 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
- # Truncation/Censoring (should I include)
- 'T', 'I')
- # Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
- ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
- 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
- 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
- # Other distributions without density and probability
- _OTHER_DISTRIBUTIONS = (
- 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
- 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
- 'dnbinom', 'dweibull', 'ddirich')
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'names': [
- # Regular variable names
- (r'[a-zA-Z][\w.]*\b', Name),
- ],
- 'comments': [
- # do not use stateful comments
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model|data)(\s+)(\{)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- (r'var(?![\w.])', Keyword.Declaration),
- # Reserved Words
- (r'(for|in)(?![\w.])', Keyword.Reserved),
- # Builtins
- # Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
- Name.Builtin),
- # Names
- include('names'),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- (r'<-|~', Operator),
- # # JAGS includes many more than OpenBUGS
- (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*model\s*\{', text, re.M):
- if re.search(r'^\s*data\s*\{', text, re.M):
- return 0.9
- elif re.search(r'^\s*var', text, re.M):
- return 0.9
- else:
- return 0.3
- else:
- return 0
-
-
-class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
-
- The Stan modeling language is specified in the *Stan Modeling Language
+ r'output|parameter|partial|protected|public|pure|redeclare|'
+ r'replaceable|return|stream|then|when|while)\b',
+ Keyword.Reserved),
+ (r'(and|not|or)\b', Operator.Word),
+ (r'(block|class|connector|end|function|model|operator|package|'
+ r'record|type)\b', Keyword.Reserved, 'class'),
+ (r'(false|true)\b', Keyword.Constant),
+ (r'within\b', Keyword.Reserved, 'package-prefix'),
+ (_name, Name)
+ ],
+ 'class': [
+ include('whitespace'),
+ (r'(function|record)\b', Keyword.Reserved),
+ (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
+ (_name, Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'package-prefix': [
+ include('whitespace'),
+ (_name, Name.Namespace, '#pop'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\[\'"?\\abfnrtv]', String.Escape),
+ (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
+ using(HtmlLexer)),
+ (r'<|\\?[^"\\<]+', String.Double)
+ ]
+ }
+
+
+class BugsLexer(RegexLexer):
+ """
+ Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
+ models.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'BUGS'
+ aliases = ['bugs', 'winbugs', 'openbugs']
+ filenames = ['*.bug']
+
+ _FUNCTIONS = (
+ # Scalar functions
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
+ 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
+ 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
+ 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
+ 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
+ 'trunc',
+ # Vector functions
+ 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
+ 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
+ 'sd', 'sort', 'sum',
+ # Special
+ 'D', 'I', 'F', 'T', 'C')
+ """ OpenBUGS built-in functions
+
+ From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
+
+ This also includes
+
+ - T, C, I : Truncation and censoring.
+ ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
+ - D : ODE
+ - F : Functional http://www.openbugs.info/Examples/Functionals.html
+
+ """
+
+ _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
+ 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
+ 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
+ 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
+ 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
+ 'dmt', 'dwish')
+ """ OpenBUGS built-in distributions
+
+ Functions from
+ http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
+ """
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Built-in Functions
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
+ Name.Builtin),
+ # Regular variable names
+ (r'[A-Za-z][\w.]*', Name),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ # Punctuation
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix and prefix operators
+ (r'\+|-|\*|/', Operator),
+ # Block
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r"^\s*model\s*{", text, re.M):
+ return 0.7
+ else:
+ return 0.0
+
+
+class JagsLexer(RegexLexer):
+ """
+ Pygments Lexer for JAGS.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JAGS'
+ aliases = ['jags']
+ filenames = ['*.jag', '*.bug']
+
+ # JAGS
+ _FUNCTIONS = (
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cos', 'cosh', 'cloglog',
+ 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
+ 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
+ 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
+ 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
+ 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
+ # Truncation/Censoring (should I include)
+ 'T', 'I')
+ # Distributions with density, probability and quartile functions
+ _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
+ ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
+ 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
+ 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
+ # Other distributions without density and probability
+ _OTHER_DISTRIBUTIONS = (
+ 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
+ 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
+ 'dnbinom', 'dweibull', 'ddirich')
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'names': [
+ # Regular variable names
+ (r'[a-zA-Z][\w.]*\b', Name),
+ ],
+ 'comments': [
+ # do not use stateful comments
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model|data)(\s+)(\{)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ (r'var(?![\w.])', Keyword.Declaration),
+ # Reserved Words
+ (r'(for|in)(?![\w.])', Keyword.Reserved),
+ # Builtins
+ # Need to use lookahead because . is a valid char
+ (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ + _DISTRIBUTIONS
+ + _OTHER_DISTRIBUTIONS),
+ Name.Builtin),
+ # Names
+ include('names'),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ (r'<-|~', Operator),
+ # # JAGS includes many more than OpenBUGS
+ (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*model\s*\{', text, re.M):
+ if re.search(r'^\s*data\s*\{', text, re.M):
+ return 0.9
+ elif re.search(r'^\s*var', text, re.M):
+ return 0.9
+ else:
+ return 0.3
+ else:
+ return 0
+
+
+class StanLexer(RegexLexer):
+ """Pygments Lexer for Stan models.
+
+ The Stan modeling language is specified in the *Stan Modeling Language
User's Guide and Reference Manual, v2.17.0*,
`pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__.
-
- .. versionadded:: 1.6
- """
-
- name = 'Stan'
- aliases = ['stan']
- filenames = ['*.stan']
-
- tokens = {
- 'whitespace': [
- (r"\s+", Text),
- ],
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'(//|#).*$', Comment.Single),
- ],
- 'root': [
- # Stan is more restrictive on strings than this regex
- (r'"[^"]*"', String),
- # Comments
- include('comments'),
- # block start
- include('whitespace'),
- # Block start
- (r'(%s)(\s*)(\{)' %
- r'|'.join(('functions', 'data', r'transformed\s+?data',
- 'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
- bygroups(Keyword.Namespace, Text, Punctuation)),
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Stan'
+ aliases = ['stan']
+ filenames = ['*.stan']
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'(//|#).*$', Comment.Single),
+ ],
+ 'root': [
+ # Stan is more restrictive on strings than this regex
+ (r'"[^"]*"', String),
+ # Comments
+ include('comments'),
+ # block start
+ include('whitespace'),
+ # Block start
+ (r'(%s)(\s*)(\{)' %
+ r'|'.join(('functions', 'data', r'transformed\s+?data',
+ 'parameters', r'transformed\s+parameters',
+ 'model', r'generated\s+quantities')),
+ bygroups(Keyword.Namespace, Text, Punctuation)),
# target keyword
(r'target\s*\+=', Keyword),
- # Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
- # Truncation
- (r'T(?=\s*\[)', Keyword),
- # Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ # Reserved Words
+ (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
+ # Truncation
+ (r'T(?=\s*\[)', Keyword),
+ # Data types
+ (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
# < should be punctuation, but elsewhere I can't tell if it is in
# a range constraint
(r'(<)(\s*)(upper|lower)(\s*)(=)',
bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
(r'(,)(\s*)(upper)(\s*)(=)',
bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
- # Punctuation
+ # Punctuation
(r"[;,\[\]()]", Punctuation),
- # Builtin
+ # Builtin
(r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
(r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
bygroups(Operator, Whitespace, Name.Builtin)),
- # Special names ending in __, like lp__
- (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
- # user-defined functions
- (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
- # Regular variable names
- (r'[A-Za-z]\w*\b', Name),
- # Real Literals
+ # Special names ending in __, like lp__
+ (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
+ (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
+ # user-defined functions
+ (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
+ # Regular variable names
+ (r'[A-Za-z]\w*\b', Name),
+ # Real Literals
(r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
(r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
- # Integer Literals
+ # Integer Literals
(r'[0-9]+', Number.Integer),
- # Assignment operators
+ # Assignment operators
(r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
- # Infix, prefix and postfix operators (and = )
+ # Infix, prefix and postfix operators (and = )
(r"\+|-|\.?\*|\.?/|\\|'|\^|!=?|<=?|>=?|\|\||&&|%|\?|:", Operator),
- # Block delimiters
- (r'[{}]', Punctuation),
+ # Block delimiters
+ (r'[{}]', Punctuation),
# Distribution |
(r'\|', Punctuation)
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*parameters\s*\{', text, re.M):
- return 1.0
- else:
- return 0.0
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*parameters\s*\{', text, re.M):
+ return 1.0
+ else:
+ return 0.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modula2.py b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
index cad2f4fd40..deae285112 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modula2.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
@@ -1,1563 +1,1563 @@
-"""
- pygments.lexers.modula2
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Multi-Dialect Lexer for Modula-2.
-
+"""
+ pygments.lexers.modula2
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Multi-Dialect Lexer for Modula-2.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- String, Number, Punctuation, Error
-
-__all__ = ['Modula2Lexer']
-
-
-# Multi-Dialect Modula-2 Lexer
-class Modula2Lexer(RegexLexer):
- """
- For `Modula-2 <http://www.modula2.org/>`_ source code.
-
- The Modula-2 lexer supports several dialects. By default, it operates in
- fallback mode, recognising the *combined* literals, punctuation symbols
- and operators of all supported dialects, and the *combined* reserved words
- and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
- differentiating between library defined identifiers.
-
- To select a specific dialect, a dialect option may be passed
- or a dialect tag may be embedded into a source file.
-
- Dialect Options:
-
- `m2pim`
- Select PIM Modula-2 dialect.
- `m2iso`
- Select ISO Modula-2 dialect.
- `m2r10`
- Select Modula-2 R10 dialect.
- `objm2`
- Select Objective Modula-2 dialect.
-
- The PIM and ISO dialect options may be qualified with a language extension.
-
- Language Extensions:
-
- `+aglet`
- Select Aglet Modula-2 extensions, available with m2iso.
- `+gm2`
- Select GNU Modula-2 extensions, available with m2pim.
- `+p1`
- Select p1 Modula-2 extensions, available with m2iso.
- `+xds`
- Select XDS Modula-2 extensions, available with m2iso.
-
-
- Passing a Dialect Option via Unix Commandline Interface
-
- Dialect options may be passed to the lexer using the `dialect` key.
- Only one such option should be passed. If multiple dialect options are
- passed, the first valid option is used, any subsequent options are ignored.
-
- Examples:
-
- `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
- Use ISO dialect to render input to HTML output
- `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
- Use ISO dialect with p1 extensions to render input to RTF output
-
-
- Embedding a Dialect Option within a source file
-
- A dialect option may be embedded in a source file in form of a dialect
- tag, a specially formatted comment that specifies a dialect option.
-
- Dialect Tag EBNF::
-
- dialectTag :
- OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
-
- dialectOption :
- 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
- 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
-
- Prefix : '!' ;
-
- OpeningCommentDelim : '(*' ;
-
- ClosingCommentDelim : '*)' ;
-
- No whitespace is permitted between the tokens of a dialect tag.
-
- In the event that a source file contains multiple dialect tags, the first
- tag that contains a valid dialect option will be used and any subsequent
- dialect tags will be ignored. Ideally, a dialect tag should be placed
- at the beginning of a source file.
-
- An embedded dialect tag overrides a dialect option set via command line.
-
- Examples:
-
- ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
- Use Modula2 R10 dialect to render this source file.
- ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
- Use PIM dialect with GNU extensions to render this source file.
-
-
- Algol Publication Mode:
-
- In Algol publication mode, source text is rendered for publication of
- algorithms in scientific papers and academic texts, following the format
- of the Revised Algol-60 Language Report. It is activated by passing
- one of two corresponding styles as an option:
-
- `algol`
- render reserved words lowercase underline boldface
- and builtins lowercase boldface italic
- `algol_nu`
- render reserved words lowercase boldface (no underlining)
- and builtins lowercase boldface italic
-
- The lexer automatically performs the required lowercase conversion when
- this mode is activated.
-
- Example:
-
- ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
- Render input file in Algol publication mode to LaTeX output.
-
-
- Rendering Mode of First Class ADT Identifiers:
-
- The rendering of standard library first class ADT identifiers is controlled
- by option flag "treat_stdlib_adts_as_builtins".
-
- When this option is turned on, standard library ADT identifiers are rendered
- as builtins. When it is turned off, they are rendered as ordinary library
- identifiers.
-
- `treat_stdlib_adts_as_builtins` (default: On)
-
- The option is useful for dialects that support ADTs as first class objects
- and provide ADTs in the standard library that would otherwise be built-in.
-
- At present, only Modula-2 R10 supports library ADTs as first class objects
- and therefore, no ADT identifiers are defined for any other dialects.
-
- Example:
-
- ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
- Render standard library ADTs as ordinary library types.
-
- .. versionadded:: 1.3
-
- .. versionchanged:: 2.1
- Added multi-dialect support.
- """
- name = 'Modula-2'
- aliases = ['modula2', 'm2']
- filenames = ['*.def', '*.mod']
- mimetypes = ['text/x-modula2']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'dialecttags': [
- # PIM Dialect Tag
- (r'\(\*!m2pim\*\)', Comment.Special),
- # ISO Dialect Tag
- (r'\(\*!m2iso\*\)', Comment.Special),
- # M2R10 Dialect Tag
- (r'\(\*!m2r10\*\)', Comment.Special),
- # ObjM2 Dialect Tag
- (r'\(\*!objm2\*\)', Comment.Special),
- # Aglet Extensions Dialect Tag
- (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
- # GNU Extensions Dialect Tag
- (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
- # p1 Extensions Dialect Tag
- (r'\(\*!m2iso\+p1\*\)', Comment.Special),
- # XDS Extensions Dialect Tag
- (r'\(\*!m2iso\+xds\*\)', Comment.Special),
- ],
- 'identifiers': [
- (r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'prefixed_number_literals': [
- #
- # Base-2, whole number
- (r'0b[01]+(\'[01]+)*', Number.Bin),
- #
- # Base-16, whole number
- (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
- ],
- 'plain_number_literals': [
- #
- # Base-10, real number with exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*' # fractional part
- r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent
- Number.Float),
- #
- # Base-10, real number without exponent
- (r'[0-9]+(\'[0-9]+)*' # integral part
- r'\.[0-9]+(\'[0-9]+)*', # fractional part
- Number.Float),
- #
- # Base-10, whole number
- (r'[0-9]+(\'[0-9]+)*', Number.Integer),
- ],
- 'suffixed_number_literals': [
- #
- # Base-8, whole number
- (r'[0-7]+B', Number.Oct),
- #
- # Base-8, character code
- (r'[0-7]+C', Number.Oct),
- #
- # Base-16, number
- (r'[0-9A-F]+H', Number.Hex),
- ],
- 'string_literals': [
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ String, Number, Punctuation, Error
+
+__all__ = ['Modula2Lexer']
+
+
+# Multi-Dialect Modula-2 Lexer
+class Modula2Lexer(RegexLexer):
+ """
+ For `Modula-2 <http://www.modula2.org/>`_ source code.
+
+ The Modula-2 lexer supports several dialects. By default, it operates in
+ fallback mode, recognising the *combined* literals, punctuation symbols
+ and operators of all supported dialects, and the *combined* reserved words
+ and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
+ differentiating between library defined identifiers.
+
+ To select a specific dialect, a dialect option may be passed
+ or a dialect tag may be embedded into a source file.
+
+ Dialect Options:
+
+ `m2pim`
+ Select PIM Modula-2 dialect.
+ `m2iso`
+ Select ISO Modula-2 dialect.
+ `m2r10`
+ Select Modula-2 R10 dialect.
+ `objm2`
+ Select Objective Modula-2 dialect.
+
+ The PIM and ISO dialect options may be qualified with a language extension.
+
+ Language Extensions:
+
+ `+aglet`
+ Select Aglet Modula-2 extensions, available with m2iso.
+ `+gm2`
+ Select GNU Modula-2 extensions, available with m2pim.
+ `+p1`
+ Select p1 Modula-2 extensions, available with m2iso.
+ `+xds`
+ Select XDS Modula-2 extensions, available with m2iso.
+
+
+ Passing a Dialect Option via Unix Commandline Interface
+
+ Dialect options may be passed to the lexer using the `dialect` key.
+ Only one such option should be passed. If multiple dialect options are
+ passed, the first valid option is used, any subsequent options are ignored.
+
+ Examples:
+
+ `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
+ Use ISO dialect to render input to HTML output
+ `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
+ Use ISO dialect with p1 extensions to render input to RTF output
+
+
+ Embedding a Dialect Option within a source file
+
+ A dialect option may be embedded in a source file in form of a dialect
+ tag, a specially formatted comment that specifies a dialect option.
+
+ Dialect Tag EBNF::
+
+ dialectTag :
+ OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
+
+ dialectOption :
+ 'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
+ 'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
+
+ Prefix : '!' ;
+
+ OpeningCommentDelim : '(*' ;
+
+ ClosingCommentDelim : '*)' ;
+
+ No whitespace is permitted between the tokens of a dialect tag.
+
+ In the event that a source file contains multiple dialect tags, the first
+ tag that contains a valid dialect option will be used and any subsequent
+ dialect tags will be ignored. Ideally, a dialect tag should be placed
+ at the beginning of a source file.
+
+ An embedded dialect tag overrides a dialect option set via command line.
+
+ Examples:
+
+ ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
+ Use Modula2 R10 dialect to render this source file.
+ ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
+ Use PIM dialect with GNU extensions to render this source file.
+
+
+ Algol Publication Mode:
+
+ In Algol publication mode, source text is rendered for publication of
+ algorithms in scientific papers and academic texts, following the format
+ of the Revised Algol-60 Language Report. It is activated by passing
+ one of two corresponding styles as an option:
+
+ `algol`
+ render reserved words lowercase underline boldface
+ and builtins lowercase boldface italic
+ `algol_nu`
+ render reserved words lowercase boldface (no underlining)
+ and builtins lowercase boldface italic
+
+ The lexer automatically performs the required lowercase conversion when
+ this mode is activated.
+
+ Example:
+
+ ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
+ Render input file in Algol publication mode to LaTeX output.
+
+
+ Rendering Mode of First Class ADT Identifiers:
+
+ The rendering of standard library first class ADT identifiers is controlled
+ by option flag "treat_stdlib_adts_as_builtins".
+
+ When this option is turned on, standard library ADT identifiers are rendered
+ as builtins. When it is turned off, they are rendered as ordinary library
+ identifiers.
+
+ `treat_stdlib_adts_as_builtins` (default: On)
+
+ The option is useful for dialects that support ADTs as first class objects
+ and provide ADTs in the standard library that would otherwise be built-in.
+
+ At present, only Modula-2 R10 supports library ADTs as first class objects
+ and therefore, no ADT identifiers are defined for any other dialects.
+
+ Example:
+
+ ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
+ Render standard library ADTs as ordinary library types.
+
+ .. versionadded:: 1.3
+
+ .. versionchanged:: 2.1
+ Added multi-dialect support.
+ """
+ name = 'Modula-2'
+ aliases = ['modula2', 'm2']
+ filenames = ['*.def', '*.mod']
+ mimetypes = ['text/x-modula2']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'dialecttags': [
+ # PIM Dialect Tag
+ (r'\(\*!m2pim\*\)', Comment.Special),
+ # ISO Dialect Tag
+ (r'\(\*!m2iso\*\)', Comment.Special),
+ # M2R10 Dialect Tag
+ (r'\(\*!m2r10\*\)', Comment.Special),
+ # ObjM2 Dialect Tag
+ (r'\(\*!objm2\*\)', Comment.Special),
+ # Aglet Extensions Dialect Tag
+ (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
+ # GNU Extensions Dialect Tag
+ (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
+ # p1 Extensions Dialect Tag
+ (r'\(\*!m2iso\+p1\*\)', Comment.Special),
+ # XDS Extensions Dialect Tag
+ (r'\(\*!m2iso\+xds\*\)', Comment.Special),
+ ],
+ 'identifiers': [
+ (r'([a-zA-Z_$][\w$]*)', Name),
+ ],
+ 'prefixed_number_literals': [
+ #
+ # Base-2, whole number
+ (r'0b[01]+(\'[01]+)*', Number.Bin),
+ #
+ # Base-16, whole number
+ (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
+ ],
+ 'plain_number_literals': [
+ #
+ # Base-10, real number with exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part
+ r'\.[0-9]+(\'[0-9]+)*' # fractional part
+ r'[eE][+-]?[0-9]+(\'[0-9]+)*', # exponent
+ Number.Float),
+ #
+ # Base-10, real number without exponent
+ (r'[0-9]+(\'[0-9]+)*' # integral part
+ r'\.[0-9]+(\'[0-9]+)*', # fractional part
+ Number.Float),
+ #
+ # Base-10, whole number
+ (r'[0-9]+(\'[0-9]+)*', Number.Integer),
+ ],
+ 'suffixed_number_literals': [
+ #
+ # Base-8, whole number
+ (r'[0-7]+B', Number.Oct),
+ #
+ # Base-8, character code
+ (r'[0-7]+C', Number.Oct),
+ #
+ # Base-16, number
+ (r'[0-9A-F]+H', Number.Hex),
+ ],
+ 'string_literals': [
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'digraph_operators': [
- # Dot Product Operator
- (r'\*\.', Operator),
- # Array Concatenation Operator
- (r'\+>', Operator), # M2R10 + ObjM2
- # Inequality Operator
- (r'<>', Operator), # ISO + PIM
- # Less-Or-Equal, Subset
- (r'<=', Operator),
- # Greater-Or-Equal, Superset
- (r'>=', Operator),
- # Identity Operator
- (r'==', Operator), # M2R10 + ObjM2
- # Type Conversion Operator
- (r'::', Operator), # M2R10 + ObjM2
- # Assignment Symbol
- (r':=', Operator),
- # Postfix Increment Mutator
- (r'\+\+', Operator), # M2R10 + ObjM2
- # Postfix Decrement Mutator
- (r'--', Operator), # M2R10 + ObjM2
- ],
- 'unigraph_operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # ISO 80000-2 compliant Set Difference Operator
- (r'\\', Operator), # M2R10 + ObjM2
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Dereferencing Operator Synonym
- (r'@', Operator), # ISO
- # Logical AND Operator Synonym
- (r'&', Operator), # PIM + ISO
- # Logical NOT Operator Synonym
- (r'~', Operator), # PIM + ISO
- # Smalltalk Message Prefix
- (r'`', Operator), # ObjM2
- ],
- 'digraph_punctuation': [
- # Range Constructor
- (r'\.\.', Punctuation),
- # Opening Chevron Bracket
- (r'<<', Punctuation), # M2R10 + ISO
- # Closing Chevron Bracket
- (r'>>', Punctuation), # M2R10 + ISO
- # Blueprint Punctuation
- (r'->', Punctuation), # M2R10 + ISO
- # Distinguish |# and # in M2 R10
- (r'\|#', Punctuation),
- # Distinguish ## and # in M2 R10
- (r'##', Punctuation),
- # Distinguish |* and * in M2 R10
- (r'\|\*', Punctuation),
- ],
- 'unigraph_punctuation': [
- # Common Punctuation
+ ],
+ 'digraph_operators': [
+ # Dot Product Operator
+ (r'\*\.', Operator),
+ # Array Concatenation Operator
+ (r'\+>', Operator), # M2R10 + ObjM2
+ # Inequality Operator
+ (r'<>', Operator), # ISO + PIM
+ # Less-Or-Equal, Subset
+ (r'<=', Operator),
+ # Greater-Or-Equal, Superset
+ (r'>=', Operator),
+ # Identity Operator
+ (r'==', Operator), # M2R10 + ObjM2
+ # Type Conversion Operator
+ (r'::', Operator), # M2R10 + ObjM2
+ # Assignment Symbol
+ (r':=', Operator),
+ # Postfix Increment Mutator
+ (r'\+\+', Operator), # M2R10 + ObjM2
+ # Postfix Decrement Mutator
+ (r'--', Operator), # M2R10 + ObjM2
+ ],
+ 'unigraph_operators': [
+ # Arithmetic Operators
+ (r'[+-]', Operator),
+ (r'[*/]', Operator),
+ # ISO 80000-2 compliant Set Difference Operator
+ (r'\\', Operator), # M2R10 + ObjM2
+ # Relational Operators
+ (r'[=#<>]', Operator),
+ # Dereferencing Operator
+ (r'\^', Operator),
+ # Dereferencing Operator Synonym
+ (r'@', Operator), # ISO
+ # Logical AND Operator Synonym
+ (r'&', Operator), # PIM + ISO
+ # Logical NOT Operator Synonym
+ (r'~', Operator), # PIM + ISO
+ # Smalltalk Message Prefix
+ (r'`', Operator), # ObjM2
+ ],
+ 'digraph_punctuation': [
+ # Range Constructor
+ (r'\.\.', Punctuation),
+ # Opening Chevron Bracket
+ (r'<<', Punctuation), # M2R10 + ISO
+ # Closing Chevron Bracket
+ (r'>>', Punctuation), # M2R10 + ISO
+ # Blueprint Punctuation
+ (r'->', Punctuation), # M2R10 + ISO
+ # Distinguish |# and # in M2 R10
+ (r'\|#', Punctuation),
+ # Distinguish ## and # in M2 R10
+ (r'##', Punctuation),
+ # Distinguish |* and * in M2 R10
+ (r'\|\*', Punctuation),
+ ],
+ 'unigraph_punctuation': [
+ # Common Punctuation
(r'[()\[\]{},.:;|]', Punctuation),
- # Case Label Separator Synonym
- (r'!', Punctuation), # ISO
- # Blueprint Punctuation
- (r'\?', Punctuation), # M2R10 + ObjM2
- ],
- 'comments': [
- # Single Line Comment
- (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
- # Block Comment
- (r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # Template Block Comment
- (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
- ],
- 'pragmas': [
- # ISO Style Pragmas
- (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
- # Pascal Style Pragmas
- (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
- ],
- 'root': [
- include('whitespace'),
- include('dialecttags'),
- include('pragmas'),
- include('comments'),
- include('identifiers'),
- include('suffixed_number_literals'), # PIM + ISO
- include('prefixed_number_literals'), # M2R10 + ObjM2
- include('plain_number_literals'),
- include('string_literals'),
- include('digraph_punctuation'),
- include('digraph_operators'),
- include('unigraph_punctuation'),
- include('unigraph_operators'),
- ]
- }
-
-# C o m m o n D a t a s e t s
-
- # Common Reserved Words Dataset
- common_reserved_words = (
- # 37 common reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
- 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
- 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
- )
-
- # Common Builtins Dataset
- common_builtins = (
- # 16 common builtins
- 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
- 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
- 'TRUE',
- )
-
- # Common Pseudo-Module Builtins Dataset
- common_pseudo_builtins = (
- # 4 common pseudo builtins
- 'ADDRESS', 'BYTE', 'WORD', 'ADR'
- )
-
-# P I M M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for PIM Modula-2
- pim_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
- '+>', '->', '<<', '>>', '|#', '##',
- )
-
- # PIM Modula-2 Additional Reserved Words Dataset
- pim_additional_reserved_words = (
- # 3 additional reserved words
- 'EXPORT', 'QUALIFIED', 'WITH',
- )
-
- # PIM Modula-2 Additional Builtins Dataset
- pim_additional_builtins = (
- # 16 additional builtins
- 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
- 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
- )
-
- # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
- pim_additional_pseudo_builtins = (
- # 5 additional pseudo builtins
- 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
- )
-
-# I S O M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for ISO Modula-2
- iso_lexemes_to_reject = (
- '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
- '<<', '>>', '|#', '##',
- )
-
- # ISO Modula-2 Additional Reserved Words Dataset
- iso_additional_reserved_words = (
- # 9 additional reserved words (ISO 10514-1)
- 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
- 'REM', 'RETRY', 'WITH',
- # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
- 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
- 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
- )
-
- # ISO Modula-2 Additional Builtins Dataset
- iso_additional_builtins = (
- # 26 additional builtins (ISO 10514-1)
- 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
- 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
- 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
- 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
- # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
- 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
- )
-
- # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
- iso_additional_pseudo_builtins = (
- # 14 additional builtins (SYSTEM)
- 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
- 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
- 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
- # 13 additional builtins (COROUTINES)
- 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
- 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
- 'NEWCOROUTINE', 'PROT', 'TRANSFER',
- # 9 additional builtins (EXCEPTIONS)
- 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
- 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
- 'IsExceptionalExecution', 'RAISE',
- # 3 additional builtins (TERMINATION)
- 'TERMINATION', 'IsTerminating', 'HasHalted',
- # 4 additional builtins (M2EXCEPTION)
- 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
- 'indexException', 'rangeException', 'caseSelectException',
- 'invalidLocation', 'functionException', 'wholeValueException',
- 'wholeDivException', 'realValueException', 'realDivException',
- 'complexValueException', 'complexDivException', 'protException',
- 'sysException', 'coException', 'exException',
- )
-
-# M o d u l a - 2 R 1 0 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Modula-2 R10
- m2r10_lexemes_to_reject = (
- '!', '`', '@', '$', '%', '&', '<>',
- )
-
- # Modula-2 R10 reserved words in addition to the common set
- m2r10_additional_reserved_words = (
- # 12 additional reserved words
- 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
- 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
- # 2 additional reserved words with symbolic assembly option
- 'ASM', 'REG',
- )
-
- # Modula-2 R10 builtins in addition to the common set
- m2r10_additional_builtins = (
- # 26 additional builtins
- 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
- 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
- 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
- 'UNICHAR', 'WRITE', 'WRITEF',
- )
-
- # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
- m2r10_additional_pseudo_builtins = (
- # 13 additional builtins (TPROPERTIES)
- 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
- 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
- 'TMAXEXP', 'TMINEXP',
- # 4 additional builtins (CONVERSION)
- 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
- # 35 additional builtins (UNSAFE)
- 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
- 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
- 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
- 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
- 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
- # 11 additional builtins (ATOMIC)
- 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
- 'BWNAND', 'BWOR', 'BWXOR',
- # 7 additional builtins (COMPILER)
- 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
- 'HASH',
- # 5 additional builtins (ASSEMBLER)
- 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
- )
-
-# O b j e c t i v e M o d u l a - 2 D a t a s e t s
-
- # Lexemes to Mark as Error Tokens for Objective Modula-2
- objm2_lexemes_to_reject = (
- '!', '$', '%', '&', '<>',
- )
-
- # Objective Modula-2 Extensions
- # reserved words in addition to Modula-2 R10
- objm2_additional_reserved_words = (
- # 16 additional reserved words
- 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
- 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
- 'SUPER', 'TRY',
- )
-
- # Objective Modula-2 Extensions
- # builtins in addition to Modula-2 R10
- objm2_additional_builtins = (
- # 3 additional builtins
- 'OBJECT', 'NO', 'YES',
- )
-
- # Objective Modula-2 Extensions
- # pseudo-module builtins in addition to Modula-2 R10
- objm2_additional_pseudo_builtins = (
- # None
- )
-
-# A g l e t M o d u l a - 2 D a t a s e t s
-
- # Aglet Extensions
- # reserved words in addition to ISO Modula-2
- aglet_additional_reserved_words = (
- # None
- )
-
- # Aglet Extensions
- # builtins in addition to ISO Modula-2
- aglet_additional_builtins = (
- # 9 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
- )
-
- # Aglet Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- aglet_additional_pseudo_builtins = (
- # None
- )
-
-# G N U M o d u l a - 2 D a t a s e t s
-
- # GNU Extensions
- # reserved words in addition to PIM Modula-2
- gm2_additional_reserved_words = (
- # 10 additional reserved words
- 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
- '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
- )
-
- # GNU Extensions
- # builtins in addition to PIM Modula-2
- gm2_additional_builtins = (
- # 21 additional builtins
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
- 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
- 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
- )
-
- # GNU Extensions
- # pseudo-module builtins in addition to PIM Modula-2
- gm2_additional_pseudo_builtins = (
- # None
- )
-
-# p 1 M o d u l a - 2 D a t a s e t s
-
- # p1 Extensions
- # reserved words in addition to ISO Modula-2
- p1_additional_reserved_words = (
- # None
- )
-
- # p1 Extensions
- # builtins in addition to ISO Modula-2
- p1_additional_builtins = (
- # None
- )
-
- # p1 Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- p1_additional_pseudo_builtins = (
- # 1 additional builtin
- 'BCD',
- )
-
-# X D S M o d u l a - 2 D a t a s e t s
-
- # XDS Extensions
- # reserved words in addition to ISO Modula-2
- xds_additional_reserved_words = (
- # 1 additional reserved word
- 'SEQ',
- )
-
- # XDS Extensions
- # builtins in addition to ISO Modula-2
- xds_additional_builtins = (
- # 9 additional builtins
- 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
- 'LONGCARD', 'SHORTCARD', 'SHORTINT',
- )
-
- # XDS Modula-2 Extensions
- # pseudo-module builtins in addition to ISO Modula-2
- xds_additional_pseudo_builtins = (
- # 22 additional builtins (SYSTEM)
- 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
- 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
- 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
- # 3 additional builtins (COMPILER)
- 'COMPILER', 'OPTION', 'EQUATION'
- )
-
-# P I M S t a n d a r d L i b r a r y D a t a s e t s
-
- # PIM Modula-2 Standard Library Modules Dataset
- pim_stdlib_module_identifiers = (
- 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
- )
-
- # PIM Modula-2 Standard Library Types Dataset
- pim_stdlib_type_identifiers = (
- 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
- 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
- 'DirectoryCommand',
- )
-
- # PIM Modula-2 Standard Library Procedures Dataset
- pim_stdlib_proc_identifiers = (
- 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
- 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
- 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
- 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
- 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
- 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
- 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
- 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
- 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
- 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
- )
-
- # PIM Modula-2 Standard Library Variables Dataset
- pim_stdlib_var_identifiers = (
- 'Done', 'termCH', 'in', 'out'
- )
-
- # PIM Modula-2 Standard Library Constants Dataset
- pim_stdlib_const_identifiers = (
- 'EOL',
- )
-
-# I S O S t a n d a r d L i b r a r y D a t a s e t s
-
- # ISO Modula-2 Standard Library Modules Dataset
- iso_stdlib_module_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Types Dataset
- iso_stdlib_type_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Procedures Dataset
- iso_stdlib_proc_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Variables Dataset
- iso_stdlib_var_identifiers = (
- # TO DO
- )
-
- # ISO Modula-2 Standard Library Constants Dataset
- iso_stdlib_const_identifiers = (
- # TO DO
- )
-
-# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
-
- # Modula-2 R10 Standard Library ADTs Dataset
- m2r10_stdlib_adt_identifiers = (
- 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
- 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
- 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
- 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
- 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
- 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
- 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
- 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
- 'INT64', 'INT128', 'STRING', 'UNISTRING',
- )
-
- # Modula-2 R10 Standard Library Blueprints Dataset
- m2r10_stdlib_blueprint_identifiers = (
- 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
- 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
- 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
- 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
- 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
- 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
- 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
- )
-
- # Modula-2 R10 Standard Library Modules Dataset
- m2r10_stdlib_module_identifiers = (
- 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
- 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
- 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
- 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
- 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
- )
-
- # Modula-2 R10 Standard Library Types Dataset
- m2r10_stdlib_type_identifiers = (
- 'File', 'Status',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Procedures Dataset
- m2r10_stdlib_proc_identifiers = (
- 'ALLOCATE', 'DEALLOCATE', 'SIZE',
- # TO BE COMPLETED
- )
-
- # Modula-2 R10 Standard Library Variables Dataset
- m2r10_stdlib_var_identifiers = (
- 'stdIn', 'stdOut', 'stdErr',
- )
-
- # Modula-2 R10 Standard Library Constants Dataset
- m2r10_stdlib_const_identifiers = (
- 'pi', 'tau',
- )
-
-# D i a l e c t s
-
- # Dialect modes
- dialects = (
- 'unknown',
- 'm2pim', 'm2iso', 'm2r10', 'objm2',
- 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
- )
-
-# D a t a b a s e s
-
- # Lexemes to Mark as Errors Database
- lexemes_to_reject_db = {
- # Lexemes to reject for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Lexemes to reject for PIM Modula-2
- 'm2pim': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for ISO Modula-2
- 'm2iso': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for Modula-2 R10
- 'm2r10': (
- m2r10_lexemes_to_reject,
- ),
- # Lexemes to reject for Objective Modula-2
- 'objm2': (
- objm2_lexemes_to_reject,
- ),
- # Lexemes to reject for Aglet Modula-2
- 'm2iso+aglet': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for GNU Modula-2
- 'm2pim+gm2': (
- pim_lexemes_to_reject,
- ),
- # Lexemes to reject for p1 Modula-2
- 'm2iso+p1': (
- iso_lexemes_to_reject,
- ),
- # Lexemes to reject for XDS Modula-2
- 'm2iso+xds': (
- iso_lexemes_to_reject,
- ),
- }
-
- # Reserved Words Database
- reserved_words_db = {
- # Reserved words for unknown dialect
- 'unknown': (
- common_reserved_words,
- pim_additional_reserved_words,
- iso_additional_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for PIM Modula-2
- 'm2pim': (
- common_reserved_words,
- pim_additional_reserved_words,
- ),
-
- # Reserved words for Modula-2 R10
- 'm2iso': (
- common_reserved_words,
- iso_additional_reserved_words,
- ),
-
- # Reserved words for ISO Modula-2
- 'm2r10': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- ),
-
- # Reserved words for Objective Modula-2
- 'objm2': (
- common_reserved_words,
- m2r10_additional_reserved_words,
- objm2_additional_reserved_words,
- ),
-
- # Reserved words for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_reserved_words,
- iso_additional_reserved_words,
- aglet_additional_reserved_words,
- ),
-
- # Reserved words for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_reserved_words,
- pim_additional_reserved_words,
- gm2_additional_reserved_words,
- ),
-
- # Reserved words for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_reserved_words,
- iso_additional_reserved_words,
- p1_additional_reserved_words,
- ),
-
- # Reserved words for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_reserved_words,
- iso_additional_reserved_words,
- xds_additional_reserved_words,
- ),
- }
-
- # Builtins Database
- builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_builtins,
- pim_additional_builtins,
- iso_additional_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_builtins,
- pim_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_builtins,
- iso_additional_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_builtins,
- m2r10_additional_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_builtins,
- m2r10_additional_builtins,
- objm2_additional_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_builtins,
- iso_additional_builtins,
- aglet_additional_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_builtins,
- pim_additional_builtins,
- gm2_additional_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_builtins,
- iso_additional_builtins,
- p1_additional_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_builtins,
- iso_additional_builtins,
- xds_additional_builtins,
- ),
- }
-
- # Pseudo-Module Builtins Database
- pseudo_builtins_db = {
- # Builtins for unknown dialect
- 'unknown': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- iso_additional_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for PIM Modula-2
- 'm2pim': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2iso': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- ),
-
- # Builtins for ISO Modula-2
- 'm2r10': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- ),
-
- # Builtins for Objective Modula-2
- 'objm2': (
- common_pseudo_builtins,
- m2r10_additional_pseudo_builtins,
- objm2_additional_pseudo_builtins,
- ),
-
- # Builtins for Aglet Modula-2 Extensions
- 'm2iso+aglet': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- aglet_additional_pseudo_builtins,
- ),
-
- # Builtins for GNU Modula-2 Extensions
- 'm2pim+gm2': (
- common_pseudo_builtins,
- pim_additional_pseudo_builtins,
- gm2_additional_pseudo_builtins,
- ),
-
- # Builtins for p1 Modula-2 Extensions
- 'm2iso+p1': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- p1_additional_pseudo_builtins,
- ),
-
- # Builtins for XDS Modula-2 Extensions
- 'm2iso+xds': (
- common_pseudo_builtins,
- iso_additional_pseudo_builtins,
- xds_additional_pseudo_builtins,
- ),
- }
-
- # Standard Library ADTs Database
- stdlib_adts_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library ADTs for PIM Modula-2
- 'm2pim': (
- # No first class library types
- ),
-
- # Standard Library ADTs for ISO Modula-2
- 'm2iso': (
- # No first class library types
- ),
-
- # Standard Library ADTs for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library ADTs for Aglet Modula-2
- 'm2iso+aglet': (
- # No first class library types
- ),
-
- # Standard Library ADTs for GNU Modula-2
- 'm2pim+gm2': (
- # No first class library types
- ),
-
- # Standard Library ADTs for p1 Modula-2
- 'm2iso+p1': (
- # No first class library types
- ),
-
- # Standard Library ADTs for XDS Modula-2
- 'm2iso+xds': (
- # No first class library types
- ),
- }
-
- # Standard Library Modules Database
- stdlib_modules_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Modules for PIM Modula-2
- 'm2pim': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for ISO Modula-2
- 'm2iso': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- m2r10_stdlib_adt_identifiers,
- ),
-
- # Standard Library Modules for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_blueprint_identifiers,
- m2r10_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_module_identifiers,
- ),
-
- # Standard Library Modules for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_module_identifiers,
- ),
- }
-
- # Standard Library Types Database
- stdlib_types_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Types for PIM Modula-2
- 'm2pim': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for ISO Modula-2
- 'm2iso': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_type_identifiers,
- ),
-
- # Standard Library Types for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_type_identifiers,
- ),
- }
-
- # Standard Library Procedures Database
- stdlib_procedures_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Procedures for PIM Modula-2
- 'm2pim': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for ISO Modula-2
- 'm2iso': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_proc_identifiers,
- ),
-
- # Standard Library Procedures for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_proc_identifiers,
- ),
- }
-
- # Standard Library Variables Database
- stdlib_variables_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Variables for PIM Modula-2
- 'm2pim': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for ISO Modula-2
- 'm2iso': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_var_identifiers,
- ),
-
- # Standard Library Variables for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_var_identifiers,
- ),
- }
-
- # Standard Library Constants Database
- stdlib_constants_db = {
- # Empty entry for unknown dialect
- 'unknown': (
- # LEAVE THIS EMPTY
- ),
- # Standard Library Constants for PIM Modula-2
- 'm2pim': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for ISO Modula-2
- 'm2iso': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Modula-2 R10
- 'm2r10': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Objective Modula-2
- 'objm2': (
- m2r10_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for Aglet Modula-2
- 'm2iso+aglet': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for GNU Modula-2
- 'm2pim+gm2': (
- pim_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for p1 Modula-2
- 'm2iso+p1': (
- iso_stdlib_const_identifiers,
- ),
-
- # Standard Library Constants for XDS Modula-2
- 'm2iso+xds': (
- iso_stdlib_const_identifiers,
- ),
- }
-
-# M e t h o d s
-
- # initialise a lexer instance
- def __init__(self, **options):
- #
- # check dialect options
- #
- dialects = get_list_opt(options, 'dialect', [])
- #
- for dialect_option in dialects:
- if dialect_option in self.dialects[1:-1]:
- # valid dialect option found
- self.set_dialect(dialect_option)
- break
- #
- # Fallback Mode (DEFAULT)
- else:
- # no valid dialect option
- self.set_dialect('unknown')
- #
- self.dialect_set_by_tag = False
- #
- # check style options
- #
- styles = get_list_opt(options, 'style', [])
- #
- # use lowercase mode for Algol style
- if 'algol' in styles or 'algol_nu' in styles:
- self.algol_publication_mode = True
- else:
- self.algol_publication_mode = False
- #
- # Check option flags
- #
- self.treat_stdlib_adts_as_builtins = get_bool_opt(
- options, 'treat_stdlib_adts_as_builtins', True)
- #
- # call superclass initialiser
- RegexLexer.__init__(self, **options)
-
- # Set lexer to a specified dialect
- def set_dialect(self, dialect_id):
- #
- # if __debug__:
- # print 'entered set_dialect with arg: ', dialect_id
- #
- # check dialect name against known dialects
- if dialect_id not in self.dialects:
- dialect = 'unknown' # default
- else:
- dialect = dialect_id
- #
- # compose lexemes to reject set
- lexemes_to_reject_set = set()
- # add each list of reject lexemes for this dialect
- for list in self.lexemes_to_reject_db[dialect]:
- lexemes_to_reject_set.update(set(list))
- #
- # compose reserved words set
- reswords_set = set()
- # add each list of reserved words for this dialect
- for list in self.reserved_words_db[dialect]:
- reswords_set.update(set(list))
- #
- # compose builtins set
- builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.builtins_db[dialect]:
- builtins_set.update(set(list).difference(reswords_set))
- #
- # compose pseudo-builtins set
- pseudo_builtins_set = set()
- # add each list of builtins for this dialect excluding reserved words
- for list in self.pseudo_builtins_db[dialect]:
- pseudo_builtins_set.update(set(list).difference(reswords_set))
- #
- # compose ADTs set
- adts_set = set()
- # add each list of ADTs for this dialect excluding reserved words
- for list in self.stdlib_adts_db[dialect]:
- adts_set.update(set(list).difference(reswords_set))
- #
- # compose modules set
- modules_set = set()
- # add each list of builtins for this dialect excluding builtins
- for list in self.stdlib_modules_db[dialect]:
- modules_set.update(set(list).difference(builtins_set))
- #
- # compose types set
- types_set = set()
- # add each list of types for this dialect excluding builtins
- for list in self.stdlib_types_db[dialect]:
- types_set.update(set(list).difference(builtins_set))
- #
- # compose procedures set
- procedures_set = set()
- # add each list of procedures for this dialect excluding builtins
- for list in self.stdlib_procedures_db[dialect]:
- procedures_set.update(set(list).difference(builtins_set))
- #
- # compose variables set
- variables_set = set()
- # add each list of variables for this dialect excluding builtins
- for list in self.stdlib_variables_db[dialect]:
- variables_set.update(set(list).difference(builtins_set))
- #
- # compose constants set
- constants_set = set()
- # add each list of constants for this dialect excluding builtins
- for list in self.stdlib_constants_db[dialect]:
- constants_set.update(set(list).difference(builtins_set))
- #
- # update lexer state
- self.dialect = dialect
- self.lexemes_to_reject = lexemes_to_reject_set
- self.reserved_words = reswords_set
- self.builtins = builtins_set
- self.pseudo_builtins = pseudo_builtins_set
- self.adts = adts_set
- self.modules = modules_set
- self.types = types_set
- self.procedures = procedures_set
- self.variables = variables_set
- self.constants = constants_set
- #
- # if __debug__:
- # print 'exiting set_dialect'
- # print ' self.dialect: ', self.dialect
- # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
- # print ' self.reserved_words: ', self.reserved_words
- # print ' self.builtins: ', self.builtins
- # print ' self.pseudo_builtins: ', self.pseudo_builtins
- # print ' self.adts: ', self.adts
- # print ' self.modules: ', self.modules
- # print ' self.types: ', self.types
- # print ' self.procedures: ', self.procedures
- # print ' self.variables: ', self.variables
- # print ' self.types: ', self.types
- # print ' self.constants: ', self.constants
-
- # Extracts a dialect name from a dialect tag comment string and checks
- # the extracted name against known dialects. If a match is found, the
- # matching name is returned, otherwise dialect id 'unknown' is returned
- def get_dialect_from_dialect_tag(self, dialect_tag):
- #
- # if __debug__:
- # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
- #
- # constants
- left_tag_delim = '(*!'
- right_tag_delim = '*)'
- left_tag_delim_len = len(left_tag_delim)
- right_tag_delim_len = len(right_tag_delim)
- indicator_start = left_tag_delim_len
- indicator_end = -(right_tag_delim_len)
- #
- # check comment string for dialect indicator
- if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
- and dialect_tag.startswith(left_tag_delim) \
- and dialect_tag.endswith(right_tag_delim):
- #
- # if __debug__:
- # print 'dialect tag found'
- #
- # extract dialect indicator
- indicator = dialect_tag[indicator_start:indicator_end]
- #
- # if __debug__:
- # print 'extracted: ', indicator
- #
- # check against known dialects
- for index in range(1, len(self.dialects)):
- #
- # if __debug__:
- # print 'dialects[', index, ']: ', self.dialects[index]
- #
- if indicator == self.dialects[index]:
- #
- # if __debug__:
- # print 'matching dialect found'
- #
- # indicator matches known dialect
- return indicator
- else:
- # indicator does not match any dialect
- return 'unknown' # default
- else:
- # invalid indicator string
- return 'unknown' # default
-
- # intercept the token stream, modify token attributes and return them
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- #
- # check for dialect tag if dialect has not been set by tag
- if not self.dialect_set_by_tag and token == Comment.Special:
- indicated_dialect = self.get_dialect_from_dialect_tag(value)
- if indicated_dialect != 'unknown':
- # token is a dialect indicator
- # reset reserved words and builtins
- self.set_dialect(indicated_dialect)
- self.dialect_set_by_tag = True
- #
- # check for reserved words, predefined and stdlib identifiers
- if token is Name:
- if value in self.reserved_words:
- token = Keyword.Reserved
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.builtins:
- token = Name.Builtin
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.pseudo_builtins:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.adts:
- if not self.treat_stdlib_adts_as_builtins:
- token = Name.Namespace
- else:
- token = Name.Builtin.Pseudo
- if self.algol_publication_mode:
- value = value.lower()
- #
- elif value in self.modules:
- token = Name.Namespace
- #
- elif value in self.types:
- token = Name.Class
- #
- elif value in self.procedures:
- token = Name.Function
- #
- elif value in self.variables:
- token = Name.Variable
- #
- elif value in self.constants:
- token = Name.Constant
- #
- elif token in Number:
- #
- # mark prefix number literals as error for PIM and ISO dialects
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- if "'" in value or value[0:2] in ('0b', '0x', '0u'):
- token = Error
- #
- elif self.dialect in ('m2r10', 'objm2'):
- # mark base-8 number literals as errors for M2 R10 and ObjM2
- if token is Number.Oct:
- token = Error
- # mark suffix base-16 literals as errors for M2 R10 and ObjM2
- elif token is Number.Hex and 'H' in value:
- token = Error
- # mark real numbers with E as errors for M2 R10 and ObjM2
- elif token is Number.Float and 'E' in value:
- token = Error
- #
- elif token in Comment:
- #
- # mark single line comment as error for PIM and ISO dialects
- if token is Comment.Single:
- if self.dialect not in ('unknown', 'm2r10', 'objm2'):
- token = Error
- #
- if token is Comment.Preproc:
- # mark ISO pragma as error for PIM dialects
- if value.startswith('<*') and \
- self.dialect.startswith('m2pim'):
- token = Error
- # mark PIM pragma as comment for other dialects
- elif value.startswith('(*$') and \
- self.dialect != 'unknown' and \
- not self.dialect.startswith('m2pim'):
- token = Comment.Multiline
- #
- else: # token is neither Name nor Comment
- #
- # mark lexemes matching the dialect's error token set as errors
- if value in self.lexemes_to_reject:
- token = Error
- #
- # substitute lexemes when in Algol mode
- if self.algol_publication_mode:
- if value == '#':
+ # Case Label Separator Synonym
+ (r'!', Punctuation), # ISO
+ # Blueprint Punctuation
+ (r'\?', Punctuation), # M2R10 + ObjM2
+ ],
+ 'comments': [
+ # Single Line Comment
+ (r'^//.*?\n', Comment.Single), # M2R10 + ObjM2
+ # Block Comment
+ (r'\(\*([^$].*?)\*\)', Comment.Multiline),
+ # Template Block Comment
+ (r'/\*(.*?)\*/', Comment.Multiline), # M2R10 + ObjM2
+ ],
+ 'pragmas': [
+ # ISO Style Pragmas
+ (r'<\*.*?\*>', Comment.Preproc), # ISO, M2R10 + ObjM2
+ # Pascal Style Pragmas
+ (r'\(\*\$.*?\*\)', Comment.Preproc), # PIM
+ ],
+ 'root': [
+ include('whitespace'),
+ include('dialecttags'),
+ include('pragmas'),
+ include('comments'),
+ include('identifiers'),
+ include('suffixed_number_literals'), # PIM + ISO
+ include('prefixed_number_literals'), # M2R10 + ObjM2
+ include('plain_number_literals'),
+ include('string_literals'),
+ include('digraph_punctuation'),
+ include('digraph_operators'),
+ include('unigraph_punctuation'),
+ include('unigraph_operators'),
+ ]
+ }
+
+# C o m m o n D a t a s e t s
+
+ # Common Reserved Words Dataset
+ common_reserved_words = (
+ # 37 common reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+ 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
+ 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
+ 'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+ 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
+ )
+
+ # Common Builtins Dataset
+ common_builtins = (
+ # 16 common builtins
+ 'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
+ 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
+ 'TRUE',
+ )
+
+ # Common Pseudo-Module Builtins Dataset
+ common_pseudo_builtins = (
+ # 4 common pseudo builtins
+ 'ADDRESS', 'BYTE', 'WORD', 'ADR'
+ )
+
+# P I M M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for PIM Modula-2
+ pim_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
+ '+>', '->', '<<', '>>', '|#', '##',
+ )
+
+ # PIM Modula-2 Additional Reserved Words Dataset
+ pim_additional_reserved_words = (
+ # 3 additional reserved words
+ 'EXPORT', 'QUALIFIED', 'WITH',
+ )
+
+ # PIM Modula-2 Additional Builtins Dataset
+ pim_additional_builtins = (
+ # 16 additional builtins
+ 'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
+ 'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
+ )
+
+ # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
+ pim_additional_pseudo_builtins = (
+ # 5 additional pseudo builtins
+ 'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
+ )
+
+# I S O M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for ISO Modula-2
+ iso_lexemes_to_reject = (
+ '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
+ '<<', '>>', '|#', '##',
+ )
+
+ # ISO Modula-2 Additional Reserved Words Dataset
+ iso_additional_reserved_words = (
+ # 9 additional reserved words (ISO 10514-1)
+ 'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
+ 'REM', 'RETRY', 'WITH',
+ # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
+ 'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
+ 'REVEAL', 'TRACED', 'UNSAFEGUARDED',
+ )
+
+ # ISO Modula-2 Additional Builtins Dataset
+ iso_additional_builtins = (
+ # 26 additional builtins (ISO 10514-1)
+ 'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
+ 'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE', 'LENGTH',
+ 'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
+ 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
+ # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
+ 'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
+ )
+
+ # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
+ iso_additional_pseudo_builtins = (
+ # 14 additional builtins (SYSTEM)
+ 'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
+ 'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
+ 'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
+ # 13 additional builtins (COROUTINES)
+ 'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
+ 'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
+ 'NEWCOROUTINE', 'PROT', 'TRANSFER',
+ # 9 additional builtins (EXCEPTIONS)
+ 'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
+ 'ExceptionSource', 'GetMessage', 'IsCurrentSource',
+ 'IsExceptionalExecution', 'RAISE',
+ # 3 additional builtins (TERMINATION)
+ 'TERMINATION', 'IsTerminating', 'HasHalted',
+ # 4 additional builtins (M2EXCEPTION)
+ 'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
+ 'indexException', 'rangeException', 'caseSelectException',
+ 'invalidLocation', 'functionException', 'wholeValueException',
+ 'wholeDivException', 'realValueException', 'realDivException',
+ 'complexValueException', 'complexDivException', 'protException',
+ 'sysException', 'coException', 'exException',
+ )
+
+# M o d u l a - 2 R 1 0 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Modula-2 R10
+ m2r10_lexemes_to_reject = (
+ '!', '`', '@', '$', '%', '&', '<>',
+ )
+
+ # Modula-2 R10 reserved words in addition to the common set
+ m2r10_additional_reserved_words = (
+ # 12 additional reserved words
+ 'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
+ 'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
+ # 2 additional reserved words with symbolic assembly option
+ 'ASM', 'REG',
+ )
+
+ # Modula-2 R10 builtins in addition to the common set
+ m2r10_additional_builtins = (
+ # 26 additional builtins
+ 'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
+ 'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
+ 'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
+ 'UNICHAR', 'WRITE', 'WRITEF',
+ )
+
+ # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
+ m2r10_additional_pseudo_builtins = (
+ # 13 additional builtins (TPROPERTIES)
+ 'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
+ 'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
+ 'TMAXEXP', 'TMINEXP',
+ # 4 additional builtins (CONVERSION)
+ 'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
+ # 35 additional builtins (UNSAFE)
+ 'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
+ 'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
+ 'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
+ 'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
+ 'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
+ # 11 additional builtins (ATOMIC)
+ 'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
+ 'BWNAND', 'BWOR', 'BWXOR',
+ # 7 additional builtins (COMPILER)
+ 'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
+ 'HASH',
+ # 5 additional builtins (ASSEMBLER)
+ 'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
+ )
+
+# O b j e c t i v e M o d u l a - 2 D a t a s e t s
+
+ # Lexemes to Mark as Error Tokens for Objective Modula-2
+ objm2_lexemes_to_reject = (
+ '!', '$', '%', '&', '<>',
+ )
+
+ # Objective Modula-2 Extensions
+ # reserved words in addition to Modula-2 R10
+ objm2_additional_reserved_words = (
+ # 16 additional reserved words
+ 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
+ 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
+ 'SUPER', 'TRY',
+ )
+
+ # Objective Modula-2 Extensions
+ # builtins in addition to Modula-2 R10
+ objm2_additional_builtins = (
+ # 3 additional builtins
+ 'OBJECT', 'NO', 'YES',
+ )
+
+ # Objective Modula-2 Extensions
+ # pseudo-module builtins in addition to Modula-2 R10
+ objm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# A g l e t M o d u l a - 2 D a t a s e t s
+
+ # Aglet Extensions
+ # reserved words in addition to ISO Modula-2
+ aglet_additional_reserved_words = (
+ # None
+ )
+
+ # Aglet Extensions
+ # builtins in addition to ISO Modula-2
+ aglet_additional_builtins = (
+ # 9 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
+ )
+
+ # Aglet Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ aglet_additional_pseudo_builtins = (
+ # None
+ )
+
+# G N U M o d u l a - 2 D a t a s e t s
+
+ # GNU Extensions
+ # reserved words in addition to PIM Modula-2
+ gm2_additional_reserved_words = (
+ # 10 additional reserved words
+ 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
+ '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
+ )
+
+ # GNU Extensions
+ # builtins in addition to PIM Modula-2
+ gm2_additional_builtins = (
+ # 21 additional builtins
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
+ 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
+ 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
+ )
+
+ # GNU Extensions
+ # pseudo-module builtins in addition to PIM Modula-2
+ gm2_additional_pseudo_builtins = (
+ # None
+ )
+
+# p 1 M o d u l a - 2 D a t a s e t s
+
+ # p1 Extensions
+ # reserved words in addition to ISO Modula-2
+ p1_additional_reserved_words = (
+ # None
+ )
+
+ # p1 Extensions
+ # builtins in addition to ISO Modula-2
+ p1_additional_builtins = (
+ # None
+ )
+
+ # p1 Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ p1_additional_pseudo_builtins = (
+ # 1 additional builtin
+ 'BCD',
+ )
+
+# X D S M o d u l a - 2 D a t a s e t s
+
+ # XDS Extensions
+ # reserved words in addition to ISO Modula-2
+ xds_additional_reserved_words = (
+ # 1 additional reserved word
+ 'SEQ',
+ )
+
+ # XDS Extensions
+ # builtins in addition to ISO Modula-2
+ xds_additional_builtins = (
+ # 9 additional builtins
+ 'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
+ 'LONGCARD', 'SHORTCARD', 'SHORTINT',
+ )
+
+ # XDS Modula-2 Extensions
+ # pseudo-module builtins in addition to ISO Modula-2
+ xds_additional_pseudo_builtins = (
+ # 22 additional builtins (SYSTEM)
+ 'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
+ 'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
+ 'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
+ # 3 additional builtins (COMPILER)
+ 'COMPILER', 'OPTION', 'EQUATION'
+ )
+
+# P I M S t a n d a r d L i b r a r y D a t a s e t s
+
+ # PIM Modula-2 Standard Library Modules Dataset
+ pim_stdlib_module_identifiers = (
+ 'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
+ )
+
+ # PIM Modula-2 Standard Library Types Dataset
+ pim_stdlib_type_identifiers = (
+ 'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
+ 'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
+ 'DirectoryCommand',
+ )
+
+ # PIM Modula-2 Standard Library Procedures Dataset
+ pim_stdlib_proc_identifiers = (
+ 'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
+ 'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
+ 'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
+ 'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
+ 'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
+ 'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
+ 'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
+ 'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
+ 'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
+ 'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
+ )
+
+ # PIM Modula-2 Standard Library Variables Dataset
+ pim_stdlib_var_identifiers = (
+ 'Done', 'termCH', 'in', 'out'
+ )
+
+ # PIM Modula-2 Standard Library Constants Dataset
+ pim_stdlib_const_identifiers = (
+ 'EOL',
+ )
+
+# I S O S t a n d a r d L i b r a r y D a t a s e t s
+
+ # ISO Modula-2 Standard Library Modules Dataset
+ iso_stdlib_module_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Types Dataset
+ iso_stdlib_type_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Procedures Dataset
+ iso_stdlib_proc_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Variables Dataset
+ iso_stdlib_var_identifiers = (
+ # TO DO
+ )
+
+ # ISO Modula-2 Standard Library Constants Dataset
+ iso_stdlib_const_identifiers = (
+ # TO DO
+ )
+
+# M 2 R 1 0 S t a n d a r d L i b r a r y D a t a s e t s
+
+ # Modula-2 R10 Standard Library ADTs Dataset
+ m2r10_stdlib_adt_identifiers = (
+ 'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
+ 'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
+ 'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
+ 'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
+ 'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
+ 'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
+ 'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
+ 'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
+ 'INT64', 'INT128', 'STRING', 'UNISTRING',
+ )
+
+ # Modula-2 R10 Standard Library Blueprints Dataset
+ m2r10_stdlib_blueprint_identifiers = (
+ 'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
+ 'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
+ 'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
+ 'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
+ 'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
+ 'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
+ 'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
+ )
+
+ # Modula-2 R10 Standard Library Modules Dataset
+ m2r10_stdlib_module_identifiers = (
+ 'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
+ 'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
+ 'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
+ 'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
+ 'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
+ )
+
+ # Modula-2 R10 Standard Library Types Dataset
+ m2r10_stdlib_type_identifiers = (
+ 'File', 'Status',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Procedures Dataset
+ m2r10_stdlib_proc_identifiers = (
+ 'ALLOCATE', 'DEALLOCATE', 'SIZE',
+ # TO BE COMPLETED
+ )
+
+ # Modula-2 R10 Standard Library Variables Dataset
+ m2r10_stdlib_var_identifiers = (
+ 'stdIn', 'stdOut', 'stdErr',
+ )
+
+ # Modula-2 R10 Standard Library Constants Dataset
+ m2r10_stdlib_const_identifiers = (
+ 'pi', 'tau',
+ )
+
+# D i a l e c t s
+
+ # Dialect modes
+ dialects = (
+ 'unknown',
+ 'm2pim', 'm2iso', 'm2r10', 'objm2',
+ 'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
+ )
+
+# D a t a b a s e s
+
+ # Lexemes to Mark as Errors Database
+ lexemes_to_reject_db = {
+ # Lexemes to reject for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Lexemes to reject for PIM Modula-2
+ 'm2pim': (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for ISO Modula-2
+ 'm2iso': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Modula-2 R10
+ 'm2r10': (
+ m2r10_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Objective Modula-2
+ 'objm2': (
+ objm2_lexemes_to_reject,
+ ),
+ # Lexemes to reject for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_lexemes_to_reject,
+ ),
+ # Lexemes to reject for p1 Modula-2
+ 'm2iso+p1': (
+ iso_lexemes_to_reject,
+ ),
+ # Lexemes to reject for XDS Modula-2
+ 'm2iso+xds': (
+ iso_lexemes_to_reject,
+ ),
+ }
+
+ # Reserved Words Database
+ reserved_words_db = {
+ # Reserved words for unknown dialect
+ 'unknown': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ iso_additional_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for PIM Modula-2
+ 'm2pim': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ ),
+
+ # Reserved words for Modula-2 R10
+ 'm2iso': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ ),
+
+ # Reserved words for ISO Modula-2
+ 'm2r10': (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ ),
+
+ # Reserved words for Objective Modula-2
+ 'objm2': (
+ common_reserved_words,
+ m2r10_additional_reserved_words,
+ objm2_additional_reserved_words,
+ ),
+
+ # Reserved words for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ aglet_additional_reserved_words,
+ ),
+
+ # Reserved words for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_reserved_words,
+ pim_additional_reserved_words,
+ gm2_additional_reserved_words,
+ ),
+
+ # Reserved words for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ p1_additional_reserved_words,
+ ),
+
+ # Reserved words for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_reserved_words,
+ iso_additional_reserved_words,
+ xds_additional_reserved_words,
+ ),
+ }
+
+ # Builtins Database
+ builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown': (
+ common_builtins,
+ pim_additional_builtins,
+ iso_additional_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim': (
+ common_builtins,
+ pim_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso': (
+ common_builtins,
+ iso_additional_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10': (
+ common_builtins,
+ m2r10_additional_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2': (
+ common_builtins,
+ m2r10_additional_builtins,
+ objm2_additional_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_builtins,
+ iso_additional_builtins,
+ aglet_additional_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_builtins,
+ pim_additional_builtins,
+ gm2_additional_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_builtins,
+ iso_additional_builtins,
+ p1_additional_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_builtins,
+ iso_additional_builtins,
+ xds_additional_builtins,
+ ),
+ }
+
+ # Pseudo-Module Builtins Database
+ pseudo_builtins_db = {
+ # Builtins for unknown dialect
+ 'unknown': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for PIM Modula-2
+ 'm2pim': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2iso': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ ),
+
+ # Builtins for ISO Modula-2
+ 'm2r10': (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Objective Modula-2
+ 'objm2': (
+ common_pseudo_builtins,
+ m2r10_additional_pseudo_builtins,
+ objm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for Aglet Modula-2 Extensions
+ 'm2iso+aglet': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ aglet_additional_pseudo_builtins,
+ ),
+
+ # Builtins for GNU Modula-2 Extensions
+ 'm2pim+gm2': (
+ common_pseudo_builtins,
+ pim_additional_pseudo_builtins,
+ gm2_additional_pseudo_builtins,
+ ),
+
+ # Builtins for p1 Modula-2 Extensions
+ 'm2iso+p1': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ p1_additional_pseudo_builtins,
+ ),
+
+ # Builtins for XDS Modula-2 Extensions
+ 'm2iso+xds': (
+ common_pseudo_builtins,
+ iso_additional_pseudo_builtins,
+ xds_additional_pseudo_builtins,
+ ),
+ }
+
+ # Standard Library ADTs Database
+ stdlib_adts_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library ADTs for PIM Modula-2
+ 'm2pim': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for ISO Modula-2
+ 'm2iso': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library ADTs for Aglet Modula-2
+ 'm2iso+aglet': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for GNU Modula-2
+ 'm2pim+gm2': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for p1 Modula-2
+ 'm2iso+p1': (
+ # No first class library types
+ ),
+
+ # Standard Library ADTs for XDS Modula-2
+ 'm2iso+xds': (
+ # No first class library types
+ ),
+ }
+
+ # Standard Library Modules Database
+ stdlib_modules_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Modules for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ m2r10_stdlib_adt_identifiers,
+ ),
+
+ # Standard Library Modules for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_blueprint_identifiers,
+ m2r10_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_module_identifiers,
+ ),
+
+ # Standard Library Modules for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_module_identifiers,
+ ),
+ }
+
+ # Standard Library Types Database
+ stdlib_types_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Types for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_type_identifiers,
+ ),
+
+ # Standard Library Types for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_type_identifiers,
+ ),
+ }
+
+ # Standard Library Procedures Database
+ stdlib_procedures_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Procedures for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_proc_identifiers,
+ ),
+
+ # Standard Library Procedures for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_proc_identifiers,
+ ),
+ }
+
+ # Standard Library Variables Database
+ stdlib_variables_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Variables for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_var_identifiers,
+ ),
+
+ # Standard Library Variables for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_var_identifiers,
+ ),
+ }
+
+ # Standard Library Constants Database
+ stdlib_constants_db = {
+ # Empty entry for unknown dialect
+ 'unknown': (
+ # LEAVE THIS EMPTY
+ ),
+ # Standard Library Constants for PIM Modula-2
+ 'm2pim': (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for ISO Modula-2
+ 'm2iso': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Modula-2 R10
+ 'm2r10': (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Objective Modula-2
+ 'objm2': (
+ m2r10_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for Aglet Modula-2
+ 'm2iso+aglet': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for GNU Modula-2
+ 'm2pim+gm2': (
+ pim_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for p1 Modula-2
+ 'm2iso+p1': (
+ iso_stdlib_const_identifiers,
+ ),
+
+ # Standard Library Constants for XDS Modula-2
+ 'm2iso+xds': (
+ iso_stdlib_const_identifiers,
+ ),
+ }
+
+# M e t h o d s
+
+ # initialise a lexer instance
+ def __init__(self, **options):
+ #
+ # check dialect options
+ #
+ dialects = get_list_opt(options, 'dialect', [])
+ #
+ for dialect_option in dialects:
+ if dialect_option in self.dialects[1:-1]:
+ # valid dialect option found
+ self.set_dialect(dialect_option)
+ break
+ #
+ # Fallback Mode (DEFAULT)
+ else:
+ # no valid dialect option
+ self.set_dialect('unknown')
+ #
+ self.dialect_set_by_tag = False
+ #
+ # check style options
+ #
+ styles = get_list_opt(options, 'style', [])
+ #
+ # use lowercase mode for Algol style
+ if 'algol' in styles or 'algol_nu' in styles:
+ self.algol_publication_mode = True
+ else:
+ self.algol_publication_mode = False
+ #
+ # Check option flags
+ #
+ self.treat_stdlib_adts_as_builtins = get_bool_opt(
+ options, 'treat_stdlib_adts_as_builtins', True)
+ #
+ # call superclass initialiser
+ RegexLexer.__init__(self, **options)
+
+ # Set lexer to a specified dialect
+ def set_dialect(self, dialect_id):
+ #
+ # if __debug__:
+ # print 'entered set_dialect with arg: ', dialect_id
+ #
+ # check dialect name against known dialects
+ if dialect_id not in self.dialects:
+ dialect = 'unknown' # default
+ else:
+ dialect = dialect_id
+ #
+ # compose lexemes to reject set
+ lexemes_to_reject_set = set()
+ # add each list of reject lexemes for this dialect
+ for list in self.lexemes_to_reject_db[dialect]:
+ lexemes_to_reject_set.update(set(list))
+ #
+ # compose reserved words set
+ reswords_set = set()
+ # add each list of reserved words for this dialect
+ for list in self.reserved_words_db[dialect]:
+ reswords_set.update(set(list))
+ #
+ # compose builtins set
+ builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.builtins_db[dialect]:
+ builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose pseudo-builtins set
+ pseudo_builtins_set = set()
+ # add each list of builtins for this dialect excluding reserved words
+ for list in self.pseudo_builtins_db[dialect]:
+ pseudo_builtins_set.update(set(list).difference(reswords_set))
+ #
+ # compose ADTs set
+ adts_set = set()
+ # add each list of ADTs for this dialect excluding reserved words
+ for list in self.stdlib_adts_db[dialect]:
+ adts_set.update(set(list).difference(reswords_set))
+ #
+ # compose modules set
+ modules_set = set()
+ # add each list of builtins for this dialect excluding builtins
+ for list in self.stdlib_modules_db[dialect]:
+ modules_set.update(set(list).difference(builtins_set))
+ #
+ # compose types set
+ types_set = set()
+ # add each list of types for this dialect excluding builtins
+ for list in self.stdlib_types_db[dialect]:
+ types_set.update(set(list).difference(builtins_set))
+ #
+ # compose procedures set
+ procedures_set = set()
+ # add each list of procedures for this dialect excluding builtins
+ for list in self.stdlib_procedures_db[dialect]:
+ procedures_set.update(set(list).difference(builtins_set))
+ #
+ # compose variables set
+ variables_set = set()
+ # add each list of variables for this dialect excluding builtins
+ for list in self.stdlib_variables_db[dialect]:
+ variables_set.update(set(list).difference(builtins_set))
+ #
+ # compose constants set
+ constants_set = set()
+ # add each list of constants for this dialect excluding builtins
+ for list in self.stdlib_constants_db[dialect]:
+ constants_set.update(set(list).difference(builtins_set))
+ #
+ # update lexer state
+ self.dialect = dialect
+ self.lexemes_to_reject = lexemes_to_reject_set
+ self.reserved_words = reswords_set
+ self.builtins = builtins_set
+ self.pseudo_builtins = pseudo_builtins_set
+ self.adts = adts_set
+ self.modules = modules_set
+ self.types = types_set
+ self.procedures = procedures_set
+ self.variables = variables_set
+ self.constants = constants_set
+ #
+ # if __debug__:
+ # print 'exiting set_dialect'
+ # print ' self.dialect: ', self.dialect
+ # print ' self.lexemes_to_reject: ', self.lexemes_to_reject
+ # print ' self.reserved_words: ', self.reserved_words
+ # print ' self.builtins: ', self.builtins
+ # print ' self.pseudo_builtins: ', self.pseudo_builtins
+ # print ' self.adts: ', self.adts
+ # print ' self.modules: ', self.modules
+ # print ' self.types: ', self.types
+ # print ' self.procedures: ', self.procedures
+ # print ' self.variables: ', self.variables
+ # print ' self.types: ', self.types
+ # print ' self.constants: ', self.constants
+
+ # Extracts a dialect name from a dialect tag comment string and checks
+ # the extracted name against known dialects. If a match is found, the
+ # matching name is returned, otherwise dialect id 'unknown' is returned
+ def get_dialect_from_dialect_tag(self, dialect_tag):
+ #
+ # if __debug__:
+ # print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
+ #
+ # constants
+ left_tag_delim = '(*!'
+ right_tag_delim = '*)'
+ left_tag_delim_len = len(left_tag_delim)
+ right_tag_delim_len = len(right_tag_delim)
+ indicator_start = left_tag_delim_len
+ indicator_end = -(right_tag_delim_len)
+ #
+ # check comment string for dialect indicator
+ if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
+ and dialect_tag.startswith(left_tag_delim) \
+ and dialect_tag.endswith(right_tag_delim):
+ #
+ # if __debug__:
+ # print 'dialect tag found'
+ #
+ # extract dialect indicator
+ indicator = dialect_tag[indicator_start:indicator_end]
+ #
+ # if __debug__:
+ # print 'extracted: ', indicator
+ #
+ # check against known dialects
+ for index in range(1, len(self.dialects)):
+ #
+ # if __debug__:
+ # print 'dialects[', index, ']: ', self.dialects[index]
+ #
+ if indicator == self.dialects[index]:
+ #
+ # if __debug__:
+ # print 'matching dialect found'
+ #
+ # indicator matches known dialect
+ return indicator
+ else:
+ # indicator does not match any dialect
+ return 'unknown' # default
+ else:
+ # invalid indicator string
+ return 'unknown' # default
+
+ # intercept the token stream, modify token attributes and return them
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ #
+ # check for dialect tag if dialect has not been set by tag
+ if not self.dialect_set_by_tag and token == Comment.Special:
+ indicated_dialect = self.get_dialect_from_dialect_tag(value)
+ if indicated_dialect != 'unknown':
+ # token is a dialect indicator
+ # reset reserved words and builtins
+ self.set_dialect(indicated_dialect)
+ self.dialect_set_by_tag = True
+ #
+ # check for reserved words, predefined and stdlib identifiers
+ if token is Name:
+ if value in self.reserved_words:
+ token = Keyword.Reserved
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.builtins:
+ token = Name.Builtin
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.pseudo_builtins:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.adts:
+ if not self.treat_stdlib_adts_as_builtins:
+ token = Name.Namespace
+ else:
+ token = Name.Builtin.Pseudo
+ if self.algol_publication_mode:
+ value = value.lower()
+ #
+ elif value in self.modules:
+ token = Name.Namespace
+ #
+ elif value in self.types:
+ token = Name.Class
+ #
+ elif value in self.procedures:
+ token = Name.Function
+ #
+ elif value in self.variables:
+ token = Name.Variable
+ #
+ elif value in self.constants:
+ token = Name.Constant
+ #
+ elif token in Number:
+ #
+ # mark prefix number literals as error for PIM and ISO dialects
+ if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+ if "'" in value or value[0:2] in ('0b', '0x', '0u'):
+ token = Error
+ #
+ elif self.dialect in ('m2r10', 'objm2'):
+ # mark base-8 number literals as errors for M2 R10 and ObjM2
+ if token is Number.Oct:
+ token = Error
+ # mark suffix base-16 literals as errors for M2 R10 and ObjM2
+ elif token is Number.Hex and 'H' in value:
+ token = Error
+ # mark real numbers with E as errors for M2 R10 and ObjM2
+ elif token is Number.Float and 'E' in value:
+ token = Error
+ #
+ elif token in Comment:
+ #
+ # mark single line comment as error for PIM and ISO dialects
+ if token is Comment.Single:
+ if self.dialect not in ('unknown', 'm2r10', 'objm2'):
+ token = Error
+ #
+ if token is Comment.Preproc:
+ # mark ISO pragma as error for PIM dialects
+ if value.startswith('<*') and \
+ self.dialect.startswith('m2pim'):
+ token = Error
+ # mark PIM pragma as comment for other dialects
+ elif value.startswith('(*$') and \
+ self.dialect != 'unknown' and \
+ not self.dialect.startswith('m2pim'):
+ token = Comment.Multiline
+ #
+ else: # token is neither Name nor Comment
+ #
+ # mark lexemes matching the dialect's error token set as errors
+ if value in self.lexemes_to_reject:
+ token = Error
+ #
+ # substitute lexemes when in Algol mode
+ if self.algol_publication_mode:
+ if value == '#':
value = '≠'
- elif value == '<=':
+ elif value == '<=':
value = '≤'
- elif value == '>=':
+ elif value == '>=':
value = '≥'
- elif value == '==':
+ elif value == '==':
value = '≡'
- elif value == '*.':
+ elif value == '*.':
value = '•'
-
- # return result
- yield index, token, value
+
+ # return result
+ yield index, token, value
def analyse_text(text):
"""It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
index ce6ba87537..5b672c3388 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
@@ -1,47 +1,47 @@
-"""
- pygments.lexers.nimrod
- ~~~~~~~~~~~~~~~~~~~~~~
-
+"""
+ pygments.lexers.nimrod
+ ~~~~~~~~~~~~~~~~~~~~~~
+
Lexer for the Nim language (formerly known as Nimrod).
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-
-__all__ = ['NimrodLexer']
-
-
-class NimrodLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['NimrodLexer']
+
+
+class NimrodLexer(RegexLexer):
+ """
For `Nim <http://nim-lang.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nimrod'
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nimrod'
aliases = ['nimrod', 'nim']
- filenames = ['*.nim', '*.nimrod']
+ filenames = ['*.nim', '*.nimrod']
mimetypes = ['text/x-nim']
-
- flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
-
- def underscorize(words):
- newWords = []
- new = ""
- for word in words:
- for ch in word:
- new += (ch + "_?")
- newWords.append(new)
- new = ""
- return "|".join(newWords)
-
- keywords = [
+
+ flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
+
+ def underscorize(words):
+ newWords = []
+ new = ""
+ for word in words:
+ for ch in word:
+ new += (ch + "_?")
+ newWords.append(new)
+ new = ""
+ return "|".join(newWords)
+
+ keywords = [
'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
@@ -50,109 +50,109 @@ class NimrodLexer(RegexLexer):
'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise',
'ref', 'return', 'shl', 'shr', 'static', 'template', 'try',
'tuple', 'type', 'using', 'when', 'while', 'xor'
- ]
-
- keywordsPseudo = [
- 'nil', 'true', 'false'
- ]
-
- opWords = [
- 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
- 'notin', 'is', 'isnot'
- ]
-
- types = [
- 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
- 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
- ]
-
- tokens = {
- 'root': [
- (r'##.*$', String.Doc),
- (r'#.*$', Comment),
- (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
- (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
- Punctuation),
-
- # Strings
- (r'(?:[\w]+)"', String, 'rdqs'),
- (r'"""', String, 'tdqs'),
- ('"', String, 'dqs'),
-
- # Char
- ("'", String.Char, 'chars'),
-
- # Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
- (r'(p_?r_?o_?c_?\s)(?![(\[\]])', Keyword, 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include']),
- Keyword.Namespace),
- (r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Keyword.Type),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
- # Identifiers
- (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
- # Numbers
- (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
- Number.Float, ('float-suffix', 'float-number')),
- (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
- (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
- (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
- (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
- # Whitespace
- (r'\s+', Text),
- (r'.+$', Error),
- ],
- 'chars': [
- (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
- (r"'", String.Char, '#pop'),
- (r".", String.Char)
- ],
- 'strings': [
- (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
- (r'[^\\\'"$\n]+', String),
- # quotes, dollars and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- # newlines are an error (use "nl" state)
- ],
- 'dqs': [
- (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
- String.Escape),
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'rdqs': [
- (r'"(?!")', String, '#pop'),
- (r'""', String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""(?!")', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'funcname': [
- (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
- (r'`.+`', Name.Function, '#pop')
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'float-number': [
- (r'\.(?!\.)[0-9_]*', Number.Float),
- (r'e[+-]?[0-9][0-9_]*', Number.Float),
- default('#pop')
- ],
- 'float-suffix': [
- (r'\'f(32|64)', Number.Float),
- default('#pop')
- ],
- 'int-suffix': [
- (r'\'i(32|64)', Number.Integer.Long),
- (r'\'i(8|16)', Number.Integer),
- default('#pop')
- ],
- }
+ ]
+
+ keywordsPseudo = [
+ 'nil', 'true', 'false'
+ ]
+
+ opWords = [
+ 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
+ 'notin', 'is', 'isnot'
+ ]
+
+ types = [
+ 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
+ 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'##.*$', String.Doc),
+ (r'#.*$', Comment),
+ (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
+ (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
+ Punctuation),
+
+ # Strings
+ (r'(?:[\w]+)"', String, 'rdqs'),
+ (r'"""', String, 'tdqs'),
+ ('"', String, 'dqs'),
+
+ # Char
+ ("'", String.Char, 'chars'),
+
+ # Keywords
+ (r'(%s)\b' % underscorize(opWords), Operator.Word),
+ (r'(p_?r_?o_?c_?\s)(?![(\[\]])', Keyword, 'funcname'),
+ (r'(%s)\b' % underscorize(keywords), Keyword),
+ (r'(%s)\b' % underscorize(['from', 'import', 'include']),
+ Keyword.Namespace),
+ (r'(v_?a_?r)\b', Keyword.Declaration),
+ (r'(%s)\b' % underscorize(types), Keyword.Type),
+ (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
+ # Identifiers
+ (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
+ # Numbers
+ (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
+ Number.Float, ('float-suffix', 'float-number')),
+ (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
+ (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
+ (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
+ (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
+ # Whitespace
+ (r'\s+', Text),
+ (r'.+$', Error),
+ ],
+ 'chars': [
+ (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
+ (r"'", String.Char, '#pop'),
+ (r".", String.Char)
+ ],
+ 'strings': [
+ (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
+ (r'[^\\\'"$\n]+', String),
+ # quotes, dollars and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'dqs': [
+ (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
+ String.Escape),
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'rdqs': [
+ (r'"(?!")', String, '#pop'),
+ (r'""', String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""(?!")', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'funcname': [
+ (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
+ (r'`.+`', Name.Function, '#pop')
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'float-number': [
+ (r'\.(?!\.)[0-9_]*', Number.Float),
+ (r'e[+-]?[0-9][0-9_]*', Number.Float),
+ default('#pop')
+ ],
+ 'float-suffix': [
+ (r'\'f(32|64)', Number.Float),
+ default('#pop')
+ ],
+ 'int-suffix': [
+ (r'\'i(32|64)', Number.Integer.Long),
+ (r'\'i(8|16)', Number.Integer),
+ default('#pop')
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nit.py b/contrib/python/Pygments/py3/pygments/lexers/nit.py
index 7a15d78f99..ff7a107850 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nit.py
@@ -1,63 +1,63 @@
-"""
- pygments.lexers.nit
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for the Nit language.
-
+"""
+ pygments.lexers.nit
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nit language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['NitLexer']
-
-
-class NitLexer(RegexLexer):
- """
- For `nit <http://nitlanguage.org>`_ source.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nit'
- aliases = ['nit']
- filenames = ['*.nit']
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (words((
- 'package', 'module', 'import', 'class', 'abstract', 'interface',
- 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
- 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
- 'public', 'protected', 'private', 'intrude', 'if', 'then',
- 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
- 'implies', 'return', 'continue', 'break', 'abort', 'assert',
- 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
- 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
- Keyword),
- (r'[A-Z]\w*', Name.Class),
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
- (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
- r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
- (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
- (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
- (r'"(\\.|([^"}{\\]))*"', String), # Simple String
- (r'"(\\.|([^"}{\\]))*\{', String), # Start string
- (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
- (r'\}(\\.|([^"}{\\]))*"', String), # End String
- (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
- (r'[0-9]+', Number.Integer),
- (r'[0-9]*.[0-9]+', Number.Float),
- (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
- (r'[a-z]\w*', Name),
- (r'_\w+', Name.Variable.Instance),
- (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
- (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
- (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
- (r'[\r\n\t ]+', Text),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['NitLexer']
+
+
+class NitLexer(RegexLexer):
+ """
+ For `nit <http://nitlanguage.org>`_ source.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nit'
+ aliases = ['nit']
+ filenames = ['*.nit']
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (words((
+ 'package', 'module', 'import', 'class', 'abstract', 'interface',
+ 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
+ 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
+ 'public', 'protected', 'private', 'intrude', 'if', 'then',
+ 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
+ 'implies', 'return', 'continue', 'break', 'abort', 'assert',
+ 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
+ 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t( ])'),
+ Keyword),
+ (r'[A-Z]\w*', Name.Class),
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((\{\{?)?(""?\{\{?)*""""*)', String), # Simple long string
+ (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
+ r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?(\{\{?""?)*\{\{\{\{*)', String), # Start long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(""?)?(\{\{?""?)*\{\{\{\{*', String), # Mid long string
+ (r'\}\}\}(((\\.|[^\'\\])|\\r|\\n))*(\{\{?)?(""?\{\{?)*""""*', String), # End long string
+ (r'"(\\.|([^"}{\\]))*"', String), # Simple String
+ (r'"(\\.|([^"}{\\]))*\{', String), # Start string
+ (r'\}(\\.|([^"}{\\]))*\{', String), # Mid String
+ (r'\}(\\.|([^"}{\\]))*"', String), # End String
+ (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*.[0-9]+', Number.Float),
+ (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
+ (r'[a-z]\w*', Name),
+ (r'_\w+', Name.Variable.Instance),
+ (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
+ (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
+ (r'`\{[^`]*`\}', Text), # Extern blocks won't be Lexed by Nit
+ (r'[\r\n\t ]+', Text),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nix.py b/contrib/python/Pygments/py3/pygments/lexers/nix.py
index bd7afe7440..23e4dbc336 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nix.py
@@ -1,135 +1,135 @@
-"""
- pygments.lexers.nix
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the NixOS Nix language.
-
+"""
+ pygments.lexers.nix
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the NixOS Nix language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['NixLexer']
-
-
-class NixLexer(RegexLexer):
- """
- For the `Nix language <http://nixos.org/nix/>`_.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nix'
- aliases = ['nixos', 'nix']
- filenames = ['*.nix']
- mimetypes = ['text/x-nix']
-
- flags = re.MULTILINE | re.UNICODE
-
- keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
- 'else', 'then', '...']
- builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
- 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
- operators = ['++', '+', '?', '.', '!', '//', '==',
- '!=', '&&', '||', '->', '=']
-
- punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
-
- tokens = {
- 'root': [
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # multiline comments
- (r'/\*', Comment.Multiline, 'comment'),
-
- # whitespace
- (r'\s+', Text),
-
- # keywords
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Name.Builtin),
-
- (r'\b(true|false|null)\b', Name.Constant),
-
- # operators
- ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
- Operator),
-
- # word operators
- (r'\b(or|and)\b', Operator.Word),
-
- # punctuations
- ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
-
- # integers
- (r'[0-9]+', Number.Integer),
-
- # strings
- (r'"', String.Double, 'doublequote'),
- (r"''", String.Single, 'singlequote'),
-
- # paths
- (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
- (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
-
- # urls
- (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
-
- # names of variables
- (r'[\w-]+\s*=', String.Symbol),
- (r'[a-zA-Z_][\w\'-]*', Text),
-
- ],
- 'comment': [
- (r'[^/*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'singlequote': [
- (r"'''", String.Escape),
- (r"''\$\{", String.Escape),
- (r"''\n", String.Escape),
- (r"''\r", String.Escape),
- (r"''\t", String.Escape),
- (r"''", String.Single, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r"[^']", String.Single),
- ],
- 'doublequote': [
- (r'\\', String.Escape),
- (r'\\"', String.Escape),
- (r'\\$\{', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r'[^"]', String.Double),
- ],
- 'antiquote': [
- (r"\}", String.Interpol, '#pop'),
- # TODO: we should probably escape also here ''${ \${
- (r"\$\{", String.Interpol, '#push'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- rv = 0.0
- # TODO: let/in
- if re.search(r'import.+?<[^>]+>', text):
- rv += 0.4
- if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
- rv += 0.4
- if re.search(r'=\s+mkIf\s+', text):
- rv += 0.4
- if re.search(r'\{[a-zA-Z,\s]+\}:', text):
- rv += 0.1
- return rv
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['NixLexer']
+
+
+class NixLexer(RegexLexer):
+ """
+ For the `Nix language <http://nixos.org/nix/>`_.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nix'
+ aliases = ['nixos', 'nix']
+ filenames = ['*.nix']
+ mimetypes = ['text/x-nix']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
+ 'else', 'then', '...']
+ builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
+ 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
+ operators = ['++', '+', '?', '.', '!', '//', '==',
+ '!=', '&&', '||', '->', '=']
+
+ punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
+
+ tokens = {
+ 'root': [
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # multiline comments
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # keywords
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
+
+ # highlight the builtins
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ Name.Builtin),
+
+ (r'\b(true|false|null)\b', Name.Constant),
+
+ # operators
+ ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
+ Operator),
+
+ # word operators
+ (r'\b(or|and)\b', Operator.Word),
+
+ # punctuations
+ ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
+
+ # integers
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'"', String.Double, 'doublequote'),
+ (r"''", String.Single, 'singlequote'),
+
+ # paths
+ (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
+ (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
+
+ # urls
+ (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
+
+ # names of variables
+ (r'[\w-]+\s*=', String.Symbol),
+ (r'[a-zA-Z_][\w\'-]*', Text),
+
+ ],
+ 'comment': [
+ (r'[^/*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'singlequote': [
+ (r"'''", String.Escape),
+ (r"''\$\{", String.Escape),
+ (r"''\n", String.Escape),
+ (r"''\r", String.Escape),
+ (r"''\t", String.Escape),
+ (r"''", String.Single, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r"[^']", String.Single),
+ ],
+ 'doublequote': [
+ (r'\\', String.Escape),
+ (r'\\"', String.Escape),
+ (r'\\$\{', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r'[^"]', String.Double),
+ ],
+ 'antiquote': [
+ (r"\}", String.Interpol, '#pop'),
+ # TODO: we should probably escape also here ''${ \${
+ (r"\$\{", String.Interpol, '#push'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ # TODO: let/in
+ if re.search(r'import.+?<[^>]+>', text):
+ rv += 0.4
+ if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
+ rv += 0.4
+ if re.search(r'=\s+mkIf\s+', text):
+ rv += 0.4
+ if re.search(r'\{[a-zA-Z,\s]+\}:', text):
+ rv += 0.1
+ return rv
diff --git a/contrib/python/Pygments/py3/pygments/lexers/oberon.py b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
index 7010e910b5..266506f62e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/oberon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
@@ -1,107 +1,107 @@
-"""
- pygments.lexers.oberon
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Oberon family languages.
-
+"""
+ pygments.lexers.oberon
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Oberon family languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['ComponentPascalLexer']
-
-
-class ComponentPascalLexer(RegexLexer):
- """
- For `Component Pascal <http://www.oberon.ch/pdf/CP-Lang.pdf>`_ source code.
-
- .. versionadded:: 2.1
- """
- name = 'Component Pascal'
- aliases = ['componentpascal', 'cp']
- filenames = ['*.cp', '*.cps']
- mimetypes = ['text/x-component-pascal']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comments'),
- include('punctuation'),
- include('numliterals'),
- include('strings'),
- include('operators'),
- include('builtins'),
- include('identifiers'),
- ],
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'comments': [
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ComponentPascalLexer']
+
+
+class ComponentPascalLexer(RegexLexer):
+ """
+ For `Component Pascal <http://www.oberon.ch/pdf/CP-Lang.pdf>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Component Pascal'
+ aliases = ['componentpascal', 'cp']
+ filenames = ['*.cp', '*.cps']
+ mimetypes = ['text/x-component-pascal']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ include('punctuation'),
+ include('numliterals'),
+ include('strings'),
+ include('operators'),
+ include('builtins'),
+ include('identifiers'),
+ ],
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'comments': [
(r'\(\*([^$].*?)\*\)', Comment.Multiline),
- # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
- ],
- 'punctuation': [
+ # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
+ ],
+ 'punctuation': [
(r'[()\[\]{},.:;|]', Punctuation),
- ],
- 'numliterals': [
- (r'[0-9A-F]+X\b', Number.Hex), # char code
- (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
- (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
- (r'[0-9]+\.[0-9]+', Number.Float), # real number
- (r'[0-9]+', Number.Integer), # decimal whole number
- ],
- 'strings': [
- (r"'[^\n']*'", String), # single quoted string
- (r'"[^\n"]*"', String), # double quoted string
- ],
- 'operators': [
- # Arithmetic Operators
- (r'[+-]', Operator),
- (r'[*/]', Operator),
- # Relational Operators
- (r'[=#<>]', Operator),
- # Dereferencing Operator
- (r'\^', Operator),
- # Logical AND Operator
- (r'&', Operator),
- # Logical NOT Operator
- (r'~', Operator),
- # Assignment Symbol
- (r':=', Operator),
- # Range Constructor
- (r'\.\.', Operator),
- (r'\$', Operator),
- ],
- 'identifiers': [
+ ],
+ 'numliterals': [
+ (r'[0-9A-F]+X\b', Number.Hex), # char code
+ (r'[0-9A-F]+[HL]\b', Number.Hex), # hexadecimal number
+ (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
+ (r'[0-9]+\.[0-9]+', Number.Float), # real number
+ (r'[0-9]+', Number.Integer), # decimal whole number
+ ],
+ 'strings': [
+ (r"'[^\n']*'", String), # single quoted string
+ (r'"[^\n"]*"', String), # double quoted string
+ ],
+ 'operators': [
+ # Arithmetic Operators
+ (r'[+-]', Operator),
+ (r'[*/]', Operator),
+ # Relational Operators
+ (r'[=#<>]', Operator),
+ # Dereferencing Operator
+ (r'\^', Operator),
+ # Logical AND Operator
+ (r'&', Operator),
+ # Logical NOT Operator
+ (r'~', Operator),
+ # Assignment Symbol
+ (r':=', Operator),
+ # Range Constructor
+ (r'\.\.', Operator),
+ (r'\$', Operator),
+ ],
+ 'identifiers': [
(r'([a-zA-Z_$][\w$]*)', Name),
- ],
- 'builtins': [
- (words((
- 'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
- 'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
- ), suffix=r'\b'), Keyword.Type),
- (words((
- 'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
- 'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
- 'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
- 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
- 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
- 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
- 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
- 'VAR', 'WHILE', 'WITH'
- ), suffix=r'\b'), Keyword.Reserved),
- (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
- ]
- }
+ ],
+ 'builtins': [
+ (words((
+ 'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
+ 'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
+ ), suffix=r'\b'), Keyword.Type),
+ (words((
+ 'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
+ 'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
+ 'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
+ 'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
+ 'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
+ 'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
+ 'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
+ 'VAR', 'WHILE', 'WITH'
+ ), suffix=r'\b'), Keyword.Reserved),
+ (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
+ ]
+ }
def analyse_text(text):
"""The only other lexer using .cp is the C++ one, so we check if for
diff --git a/contrib/python/Pygments/py3/pygments/lexers/objective.py b/contrib/python/Pygments/py3/pygments/lexers/objective.py
index a4cc44b387..656e9ced76 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/objective.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/objective.py
@@ -1,425 +1,425 @@
-"""
- pygments.lexers.objective
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Objective-C family languages.
-
+"""
+ pygments.lexers.objective
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Objective-C family languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
- inherit, default
-from pygments.token import Text, Keyword, Name, String, Operator, \
- Number, Punctuation, Literal, Comment
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-
-__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
-
-
-def objective(baselexer):
- """
- Generate a subclass of baselexer that accepts the Objective-C syntax
- extensions.
- """
-
- # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
- # since that's quite common in ordinary C/C++ files. It's OK to match
- # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
- #
- # The upshot of this is that we CANNOT match @class or @interface
- _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
- # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
- # (note the identifier is *optional* when there is a ':'!)
- _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
- r'(?:[a-zA-Z_]\w*\s*\]|'
- r'(?:[a-zA-Z_]\w*)?:)')
-
- class GeneratedObjectiveCVariant(baselexer):
- """
- Implements Objective-C syntax on top of an existing C family lexer.
- """
-
- tokens = {
- 'statements': [
- (r'@"', String, 'string'),
- (r'@(YES|NO)', Number),
- (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'@0[0-7]+[Ll]?', Number.Oct),
- (r'@\d+[Ll]?', Number.Integer),
- (r'@\(', Literal, 'literal_number'),
- (r'@\[', Literal, 'literal_array'),
- (r'@\{', Literal, 'literal_dictionary'),
- (words((
- '@selector', '@private', '@protected', '@public', '@encode',
- '@synchronized', '@try', '@throw', '@catch', '@finally',
- '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
- '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
- 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
- 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
- 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
+ inherit, default
+from pygments.token import Text, Keyword, Name, String, Operator, \
+ Number, Punctuation, Literal, Comment
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
+
+
+def objective(baselexer):
+ """
+ Generate a subclass of baselexer that accepts the Objective-C syntax
+ extensions.
+ """
+
+ # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+ # since that's quite common in ordinary C/C++ files. It's OK to match
+ # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+ #
+ # The upshot of this is that we CANNOT match @class or @interface
+ _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
+
+ # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
+ # (note the identifier is *optional* when there is a ':'!)
+ _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
+ r'(?:[a-zA-Z_]\w*\s*\]|'
+ r'(?:[a-zA-Z_]\w*)?:)')
+
+ class GeneratedObjectiveCVariant(baselexer):
+ """
+ Implements Objective-C syntax on top of an existing C family lexer.
+ """
+
+ tokens = {
+ 'statements': [
+ (r'@"', String, 'string'),
+ (r'@(YES|NO)', Number),
+ (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'@0[0-7]+[Ll]?', Number.Oct),
+ (r'@\d+[Ll]?', Number.Integer),
+ (r'@\(', Literal, 'literal_number'),
+ (r'@\[', Literal, 'literal_array'),
+ (r'@\{', Literal, 'literal_dictionary'),
+ (words((
+ '@selector', '@private', '@protected', '@public', '@encode',
+ '@synchronized', '@try', '@throw', '@catch', '@finally',
+ '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
+ '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
+ 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
+ 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
+ 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
'@required', '@autoreleasepool', '@import'), suffix=r'\b'),
- Keyword),
- (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
- 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
- Keyword.Type),
- (r'@(true|false|YES|NO)\n', Name.Builtin),
- (r'(YES|NO|nil|self|super)\b', Name.Builtin),
- # Carbon types
- (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
- # Carbon built-ins
- (r'(TRUE|FALSE)\b', Name.Builtin),
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_classname')),
- (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_forward_classname')),
- # @ can also prefix other expressions like @{...} or @(...)
- (r'@', Punctuation),
- inherit,
- ],
- 'oc_classname': [
- # interface definition that inherits
+ Keyword),
+ (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
+ 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'@(true|false|YES|NO)\n', Name.Builtin),
+ (r'(YES|NO|nil|self|super)\b', Name.Builtin),
+ # Carbon types
+ (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
+ # Carbon built-ins
+ (r'(TRUE|FALSE)\b', Name.Builtin),
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_classname')),
+ (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_forward_classname')),
+ # @ can also prefix other expressions like @{...} or @(...)
+ (r'@', Punctuation),
+ inherit,
+ ],
+ 'oc_classname': [
+ # interface definition that inherits
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
- ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- # interface definition for a category
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ # interface definition for a category
(r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
- bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
- ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
- bygroups(Name.Class, Text, Name.Label), '#pop'),
- # simple interface / implementation
+ bygroups(Name.Class, Text, Name.Label), '#pop'),
+ # simple interface / implementation
(r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
- bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
+ bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
(r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'oc_forward_classname': [
+ ],
+ 'oc_forward_classname': [
(r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
- bygroups(Name.Class, Text), 'oc_forward_classname'),
+ bygroups(Name.Class, Text), 'oc_forward_classname'),
(r'([a-zA-Z$_][\w$]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop')
- ],
- 'oc_ivars': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'root': [
- # methods
- (r'^([-+])(\s*)' # method marker
- r'(\(.*?\))?(\s*)' # return type
- r'([a-zA-Z$_][\w$]*:?)', # begin of method name
- bygroups(Punctuation, Text, using(this),
- Text, Name.Function),
- 'method'),
- inherit,
- ],
- 'method': [
- include('whitespace'),
- # TODO unsure if ellipses are allowed elsewhere, see
- # discussion in Issue 789
- (r',', Punctuation),
- (r'\.\.\.', Punctuation),
- (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
- bygroups(using(this), Text, Name.Variable)),
- (r'[a-zA-Z$_][\w$]*:', Name.Function),
- (';', Punctuation, '#pop'),
- (r'\{', Punctuation, 'function'),
- default('#pop'),
- ],
- 'literal_number': [
- (r'\(', Punctuation, 'literal_number_inner'),
- (r'\)', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_number_inner': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_array': [
- (r'\[', Punctuation, 'literal_array_inner'),
- (r'\]', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_array_inner': [
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_dictionary': [
- (r'\}', Literal, '#pop'),
- include('statement'),
- ],
- }
-
- def analyse_text(text):
- if _oc_keywords.search(text):
- return 1.0
- elif '@"' in text: # strings
- return 0.8
- elif re.search('@[0-9]+', text):
- return 0.7
- elif _oc_message.search(text):
- return 0.8
- return 0
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- baselexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
-
- return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
- """
- For Objective-C source code with preprocessor directives.
- """
-
- name = 'Objective-C'
- aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
- filenames = ['*.m', '*.h']
- mimetypes = ['text/x-objective-c']
- priority = 0.05 # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
- """
- For Objective-C++ source code with preprocessor directives.
- """
-
- name = 'Objective-C++'
- aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
- filenames = ['*.mm', '*.hh']
- mimetypes = ['text/x-objective-c++']
- priority = 0.05 # Lower than C++
-
-
-class LogosLexer(ObjectiveCppLexer):
- """
- For Logos + Objective-C source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
-
- name = 'Logos'
- aliases = ['logos']
- filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
- mimetypes = ['text/x-logos']
- priority = 0.25
-
- tokens = {
- 'statements': [
- (r'(%orig|%log)\b', Keyword),
- (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
- bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
- (r'(%init)\b(\()',
- bygroups(Keyword, Punctuation), 'logos_init_directive'),
- (r'(%init)(?=\s*;)', bygroups(Keyword)),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class), '#pop'),
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'logos_classname')),
- inherit,
- ],
- 'logos_init_directive': [
+ bygroups(Name.Class, Text), '#pop')
+ ],
+ 'oc_ivars': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ # methods
+ (r'^([-+])(\s*)' # method marker
+ r'(\(.*?\))?(\s*)' # return type
+ r'([a-zA-Z$_][\w$]*:?)', # begin of method name
+ bygroups(Punctuation, Text, using(this),
+ Text, Name.Function),
+ 'method'),
+ inherit,
+ ],
+ 'method': [
+ include('whitespace'),
+ # TODO unsure if ellipses are allowed elsewhere, see
+ # discussion in Issue 789
+ (r',', Punctuation),
+ (r'\.\.\.', Punctuation),
+ (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
+ bygroups(using(this), Text, Name.Variable)),
+ (r'[a-zA-Z$_][\w$]*:', Name.Function),
+ (';', Punctuation, '#pop'),
+ (r'\{', Punctuation, 'function'),
+ default('#pop'),
+ ],
+ 'literal_number': [
+ (r'\(', Punctuation, 'literal_number_inner'),
+ (r'\)', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_number_inner': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array': [
+ (r'\[', Punctuation, 'literal_array_inner'),
+ (r'\]', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array_inner': [
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_dictionary': [
+ (r'\}', Literal, '#pop'),
+ include('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ if _oc_keywords.search(text):
+ return 1.0
+ elif '@"' in text: # strings
+ return 0.8
+ elif re.search('@[0-9]+', text):
+ return 0.7
+ elif _oc_message.search(text):
+ return 0.8
+ return 0
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ baselexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
+
+ return GeneratedObjectiveCVariant
+
+
+class ObjectiveCLexer(objective(CLexer)):
+ """
+ For Objective-C source code with preprocessor directives.
+ """
+
+ name = 'Objective-C'
+ aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
+ filenames = ['*.m', '*.h']
+ mimetypes = ['text/x-objective-c']
+ priority = 0.05 # Lower than C
+
+
+class ObjectiveCppLexer(objective(CppLexer)):
+ """
+ For Objective-C++ source code with preprocessor directives.
+ """
+
+ name = 'Objective-C++'
+ aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
+ filenames = ['*.mm', '*.hh']
+ mimetypes = ['text/x-objective-c++']
+ priority = 0.05 # Lower than C++
+
+
+class LogosLexer(ObjectiveCppLexer):
+ """
+ For Logos + Objective-C source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Logos'
+ aliases = ['logos']
+ filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
+ mimetypes = ['text/x-logos']
+ priority = 0.25
+
+ tokens = {
+ 'statements': [
+ (r'(%orig|%log)\b', Keyword),
+ (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
+ bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
+ (r'(%init)\b(\()',
+ bygroups(Keyword, Punctuation), 'logos_init_directive'),
+ (r'(%init)(?=\s*;)', bygroups(Keyword)),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class), '#pop'),
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'logos_classname')),
+ inherit,
+ ],
+ 'logos_init_directive': [
(r'\s+', Text),
- (',', Punctuation, ('logos_init_directive', '#pop')),
+ (',', Punctuation, ('logos_init_directive', '#pop')),
(r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
- bygroups(Name.Class, Text, Punctuation, Text, Text)),
+ bygroups(Name.Class, Text, Punctuation, Text, Text)),
(r'([a-zA-Z$_][\w$]*)', Name.Class),
(r'\)', Punctuation, '#pop'),
- ],
- 'logos_classname': [
+ ],
+ 'logos_classname': [
(r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
(r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'root': [
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- 'logos_classname'),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class)),
+ ],
+ 'root': [
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ 'logos_classname'),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class)),
(r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
- bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
- (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
- 'function'),
+ bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
+ (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
+ 'function'),
(r'(%new)(\s*)(\()(.*?)(\))',
- bygroups(Keyword, Text, Keyword, String, Keyword)),
- (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
- inherit,
- ],
- }
-
- _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
- def analyse_text(text):
- if LogosLexer._logos_keywords.search(text):
- return 1.0
- return 0
-
-
-class SwiftLexer(RegexLexer):
- """
- For `Swift <https://developer.apple.com/swift/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Swift'
- filenames = ['*.swift']
- aliases = ['swift']
- mimetypes = ['text/x-swift']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'//', Comment.Single, 'comment-single'),
- (r'/\*', Comment.Multiline, 'comment-multi'),
+ bygroups(Keyword, Text, Keyword, String, Keyword)),
+ (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
+ inherit,
+ ],
+ }
+
+ _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
+
+ def analyse_text(text):
+ if LogosLexer._logos_keywords.search(text):
+ return 1.0
+ return 0
+
+
+class SwiftLexer(RegexLexer):
+ """
+ For `Swift <https://developer.apple.com/swift/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Swift'
+ filenames = ['*.swift']
+ aliases = ['swift']
+ mimetypes = ['text/x-swift']
+
+ tokens = {
+ 'root': [
+ # Whitespace and Comments
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'//', Comment.Single, 'comment-single'),
+ (r'/\*', Comment.Multiline, 'comment-multi'),
(r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
-
- # Keywords
- include('keywords'),
-
- # Global Types
- (words((
- 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
- 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
- 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
- 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
- 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
- 'EnumerateSequence', 'FilterCollectionView',
- 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
- 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
- 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
- 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
- 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
- 'LazyForwardCollection', 'LazyRandomAccessCollection',
- 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
- 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
- 'Optional', 'PermutationGenerator', 'QuickLookObject',
- 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
- 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
- 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
- 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
- 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
- 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
- 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
- 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
- # Protocols
- 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
- 'BidirectionalIndexType', 'BitwiseOperationsType',
- 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
- 'CollectionType', 'Comparable', 'DebugPrintable',
- 'DictionaryLiteralConvertible', 'Equatable',
- 'ExtendedGraphemeClusterLiteralConvertible',
- 'ExtensibleCollectionType', 'FloatLiteralConvertible',
- 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
- 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
- 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
- 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
- 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
- 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
- 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
- 'Streamable', 'Strideable', 'StringInterpolationConvertible',
- 'StringLiteralConvertible', 'UnicodeCodecType',
- 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
- '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
- '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
- '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
- '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
- '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
- '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
- '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
- '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
- '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
- '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
- '_SwiftNSStringType', '_UnsignedIntegerType',
- # Variables
- 'C_ARGC', 'C_ARGV', 'Process',
- # Typealiases
- 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
- 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
- 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
- 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
- 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
- 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
- # Foundation/Cocoa
- 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
- Name.Builtin),
- # Functions
- (words((
- 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
- 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
- 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
- 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
- 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
- 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
- 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
- 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
- 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
- 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
- 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
- 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
- 'withExtendedLifetime', 'withUnsafeMutablePointer',
- 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
- 'withVaList'), suffix=r'\b'),
- Name.Builtin.Pseudo),
-
- # Implicit Block Variables
- (r'\$\d+', Name.Variable),
-
- # Binary Literal
- (r'0b[01_]+', Number.Bin),
- # Octal Literal
- (r'0o[0-7_]+', Number.Oct),
- # Hexadecimal Literal
- (r'0x[0-9a-fA-F_]+', Number.Hex),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
- (r'[0-9][0-9_]*', Number.Integer),
- # String Literal
- (r'"', String, 'string'),
-
- # Operators and Punctuation
- (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
- (r'[/=\-+!*%<>&|^?~]+', Operator),
-
- # Identifier
- (r'[a-zA-Z_]\w*', Name)
- ],
- 'keywords': [
- (words((
+
+ # Keywords
+ include('keywords'),
+
+ # Global Types
+ (words((
+ 'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
+ 'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
+ 'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
+ 'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
+ 'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
+ 'EnumerateSequence', 'FilterCollectionView',
+ 'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
+ 'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
+ 'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
+ 'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
+ 'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
+ 'LazyForwardCollection', 'LazyRandomAccessCollection',
+ 'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
+ 'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
+ 'Optional', 'PermutationGenerator', 'QuickLookObject',
+ 'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
+ 'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
+ 'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
+ 'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
+ 'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
+ 'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
+ 'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
+ 'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
+ # Protocols
+ 'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
+ 'BidirectionalIndexType', 'BitwiseOperationsType',
+ 'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
+ 'CollectionType', 'Comparable', 'DebugPrintable',
+ 'DictionaryLiteralConvertible', 'Equatable',
+ 'ExtendedGraphemeClusterLiteralConvertible',
+ 'ExtensibleCollectionType', 'FloatLiteralConvertible',
+ 'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
+ 'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
+ 'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
+ 'NilLiteralConvertible', 'OutputStreamType', 'Printable',
+ 'RandomAccessIndexType', 'RangeReplaceableCollectionType',
+ 'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
+ 'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
+ 'Streamable', 'Strideable', 'StringInterpolationConvertible',
+ 'StringLiteralConvertible', 'UnicodeCodecType',
+ 'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
+ '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
+ '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
+ '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
+ '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
+ '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
+ '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
+ '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
+ '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
+ '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
+ '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
+ '_SwiftNSStringType', '_UnsignedIntegerType',
+ # Variables
+ 'C_ARGC', 'C_ARGV', 'Process',
+ # Typealiases
+ 'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
+ 'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
+ 'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
+ 'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
+ 'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
+ 'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
+ # Foundation/Cocoa
+ 'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
+ Name.Builtin),
+ # Functions
+ (words((
+ 'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
+ 'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
+ 'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
+ 'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
+ 'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
+ 'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
+ 'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
+ 'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
+ 'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
+ 'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
+ 'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
+ 'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
+ 'withExtendedLifetime', 'withUnsafeMutablePointer',
+ 'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
+ 'withVaList'), suffix=r'\b'),
+ Name.Builtin.Pseudo),
+
+ # Implicit Block Variables
+ (r'\$\d+', Name.Variable),
+
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct),
+ # Hexadecimal Literal
+ (r'0x[0-9a-fA-F_]+', Number.Hex),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
+ (r'[0-9][0-9_]*', Number.Integer),
+ # String Literal
+ (r'"', String, 'string'),
+
+ # Operators and Punctuation
+ (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
+ (r'[/=\-+!*%<>&|^?~]+', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name)
+ ],
+ 'keywords': [
+ (words((
'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
'repeat', 'return', '#selector', 'switch', 'throw', 'try',
'where', 'while'), suffix=r'\b'),
- Keyword),
- (r'@availability\([^)]+\)', Keyword.Reserved),
- (words((
- 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
+ Keyword),
+ (r'@availability\([^)]+\)', Keyword.Reserved),
+ (words((
+ 'associativity', 'convenience', 'dynamic', 'didSet', 'final',
'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
'none', 'nonmutating', 'optional', 'override', 'postfix',
'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
@@ -427,77 +427,77 @@ class SwiftLexer(RegexLexer):
'@availability', '@autoclosure', '@noreturn',
'@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
'@UIApplicationMain', '@IBAction', '@IBDesignable',
- '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
- Keyword.Reserved),
- (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
+ '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
r'|__FILE__|__FUNCTION__|__LINE__|_'
r'|#(?:file|line|column|function))\b', Keyword.Constant),
- (r'import\b', Keyword.Declaration, 'module'),
- (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Class)),
- (r'(func)(\s+)([a-zA-Z_]\w*)',
- bygroups(Keyword.Declaration, Text, Name.Function)),
- (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
- Text, Name.Variable)),
- (words((
+ (r'import\b', Keyword.Declaration, 'module'),
+ (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Class)),
+ (r'(func)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Keyword.Declaration, Text, Name.Function)),
+ (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
+ Text, Name.Variable)),
+ (words((
'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
- 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
- Keyword.Declaration)
- ],
- 'comment': [
- (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
- Comment.Special)
- ],
-
- # Nested
- 'comment-single': [
- (r'\n', Text, '#pop'),
- include('comment'),
- (r'[^\n]', Comment.Single)
- ],
- 'comment-multi': [
- include('comment'),
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'module': [
- (r'\n', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Name.Class),
- include('root')
- ],
- 'preproc': [
- (r'\n', Text, '#pop'),
- include('keywords'),
- (r'[A-Za-z]\w*', Comment.Preproc),
- include('root')
- ],
- 'string': [
- (r'\\\(', String.Interpol, 'string-intp'),
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String)
- ],
- 'string-intp': [
- (r'\(', String.Interpol, '#push'),
- (r'\)', String.Interpol, '#pop'),
- include('root')
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
+ 'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
+ Keyword.Declaration)
+ ],
+ 'comment': [
+ (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
+ Comment.Special)
+ ],
+
+ # Nested
+ 'comment-single': [
+ (r'\n', Text, '#pop'),
+ include('comment'),
+ (r'[^\n]', Comment.Single)
+ ],
+ 'comment-multi': [
+ include('comment'),
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'module': [
+ (r'\n', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Name.Class),
+ include('root')
+ ],
+ 'preproc': [
+ (r'\n', Text, '#pop'),
+ include('keywords'),
+ (r'[A-Za-z]\w*', Comment.Preproc),
+ include('root')
+ ],
+ 'string': [
+ (r'\\\(', String.Interpol, 'string-intp'),
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String)
+ ],
+ 'string-intp': [
+ (r'\(', String.Interpol, '#push'),
+ (r'\)', String.Interpol, '#pop'),
+ include('root')
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ooc.py b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
index 0c74cdec4b..8e9ee156f8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ooc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
@@ -1,84 +1,84 @@
-"""
- pygments.lexers.ooc
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Ooc language.
-
+"""
+ pygments.lexers.ooc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Ooc language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['OocLexer']
-
-
-class OocLexer(RegexLexer):
- """
- For `Ooc <http://ooc-lang.org/>`_ source code
-
- .. versionadded:: 1.2
- """
- name = 'Ooc'
- aliases = ['ooc']
- filenames = ['*.ooc']
- mimetypes = ['text/x-ooc']
-
- tokens = {
- 'root': [
- (words((
- 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
- 'this', 'super', 'new', 'const', 'final', 'static', 'import',
- 'use', 'extern', 'inline', 'proto', 'break', 'continue',
- 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
- 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
- 'false', 'null'), prefix=r'\b', suffix=r'\b'),
- Keyword),
- (r'include\b', Keyword, 'include'),
- (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
- bygroups(Keyword, Text, Name.Function)),
- (r'\bfunc\b', Keyword),
- # Note: %= and ^= not listed on http://ooc-lang.org/syntax
- (r'//.*', Comment),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
- r'&&?|\|\|?|\^=?)', Operator),
- (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
- Name.Function)),
- (r'[A-Z][A-Z0-9_]+', Name.Constant),
- (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
-
- (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
- bygroups(Name.Function, Text)),
- (r'[a-z]\w*', Name.Variable),
-
- # : introduces types
- (r'[:(){}\[\];,]', Punctuation),
-
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0c[0-9]+', Number.Oct),
- (r'0b[01]+', Number.Bin),
- (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
- (r'[0-9_]+', Number.Decimal),
-
- (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
- String.Double),
- (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@', Punctuation), # pointer dereference
- (r'\.', Punctuation), # imports or chain operator
-
- (r'\\[ \t\n]', Text),
- (r'[ \t]+', Text),
- ],
- 'include': [
- (r'[\w/]+', Name),
- (r',', Punctuation),
- (r'[ \t]', Text),
- (r'[;\n]', Text, '#pop'),
- ],
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['OocLexer']
+
+
+class OocLexer(RegexLexer):
+ """
+ For `Ooc <http://ooc-lang.org/>`_ source code
+
+ .. versionadded:: 1.2
+ """
+ name = 'Ooc'
+ aliases = ['ooc']
+ filenames = ['*.ooc']
+ mimetypes = ['text/x-ooc']
+
+ tokens = {
+ 'root': [
+ (words((
+ 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'new', 'const', 'final', 'static', 'import',
+ 'use', 'extern', 'inline', 'proto', 'break', 'continue',
+ 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
+ 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
+ 'false', 'null'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'include\b', Keyword, 'include'),
+ (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\bfunc\b', Keyword),
+ # Note: %= and ^= not listed on http://ooc-lang.org/syntax
+ (r'//.*', Comment),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
+ r'&&?|\|\|?|\^=?)', Operator),
+ (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
+ Name.Function)),
+ (r'[A-Z][A-Z0-9_]+', Name.Constant),
+ (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
+
+ (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
+ bygroups(Name.Function, Text)),
+ (r'[a-z]\w*', Name.Variable),
+
+ # : introduces types
+ (r'[:(){}\[\];,]', Punctuation),
+
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'0c[0-9]+', Number.Oct),
+ (r'0b[01]+', Number.Bin),
+ (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
+ (r'[0-9_]+', Number.Decimal),
+
+ (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
+ String.Double),
+ (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'@', Punctuation), # pointer dereference
+ (r'\.', Punctuation), # imports or chain operator
+
+ (r'\\[ \t\n]', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'include': [
+ (r'[\w/]+', Name),
+ (r',', Punctuation),
+ (r'[ \t]', Text),
+ (r'[;\n]', Text, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/other.py b/contrib/python/Pygments/py3/pygments/lexers/other.py
index b0930088e6..f9fb0e879e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/other.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/other.py
@@ -1,40 +1,40 @@
-"""
- pygments.lexers.other
- ~~~~~~~~~~~~~~~~~~~~~
-
- Just export lexer classes previously contained in this module.
-
+"""
+ pygments.lexers.other
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Just export lexer classes previously contained in this module.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
-from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
- TcshLexer
-from pygments.lexers.robotframework import RobotFrameworkLexer
-from pygments.lexers.testing import GherkinLexer
-from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
-from pygments.lexers.prolog import LogtalkLexer
-from pygments.lexers.snobol import SnobolLexer
-from pygments.lexers.rebol import RebolLexer
-from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
-from pygments.lexers.modeling import ModelicaLexer
-from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
- HybrisLexer
-from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
- AsymptoteLexer, PovrayLexer
-from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
- GoodDataCLLexer, MaqlLexer
-from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
-from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
- MscgenLexer, VGLLexer
-from pygments.lexers.basic import CbmBasicV2Lexer
-from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
-from pygments.lexers.ecl import ECLLexer
-from pygments.lexers.urbi import UrbiscriptLexer
-from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
-from pygments.lexers.installers import NSISLexer, RPMSpecLexer
-from pygments.lexers.textedit import AwkLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
+from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
+ TcshLexer
+from pygments.lexers.robotframework import RobotFrameworkLexer
+from pygments.lexers.testing import GherkinLexer
+from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
+from pygments.lexers.prolog import LogtalkLexer
+from pygments.lexers.snobol import SnobolLexer
+from pygments.lexers.rebol import RebolLexer
+from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
+from pygments.lexers.modeling import ModelicaLexer
+from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
+ HybrisLexer
+from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
+ AsymptoteLexer, PovrayLexer
+from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
+ GoodDataCLLexer, MaqlLexer
+from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
+from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
+ MscgenLexer, VGLLexer
+from pygments.lexers.basic import CbmBasicV2Lexer
+from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
+from pygments.lexers.ecl import ECLLexer
+from pygments.lexers.urbi import UrbiscriptLexer
+from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
+from pygments.lexers.installers import NSISLexer, RPMSpecLexer
+from pygments.lexers.textedit import AwkLexer
from pygments.lexers.smv import NuSMVLexer
-
-__all__ = []
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parasail.py b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
index 49d8d672e1..c2513fcc17 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parasail.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
@@ -1,78 +1,78 @@
-"""
- pygments.lexers.parasail
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for ParaSail.
-
+"""
+ pygments.lexers.parasail
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for ParaSail.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Literal
-
-__all__ = ['ParaSailLexer']
-
-
-class ParaSailLexer(RegexLexer):
- """
- For `ParaSail <http://www.parasail-lang.org>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'ParaSail'
- aliases = ['parasail']
- filenames = ['*.psi', '*.psl']
- mimetypes = ['text/x-parasail']
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'\b(and|or|xor)=', Operator.Word),
- (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
- r'(is|not)\s+null)\b',
- Operator.Word),
- # Keywords
- (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
- r'each|end|exit|extends|exports|forward|func|global|implements|'
- r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
- r'optional|private|queued|ref|return|reverse|separate|some|'
- r'type|until|var|with|'
- # Control flow
- r'if|then|else|elsif|case|for|while|loop)\b',
- Keyword.Reserved),
- (r'(abstract\s+)?(interface|class|op|func|type)',
- Keyword.Declaration),
- # Literals
- (r'"[^"]*"', String),
- (r'\\[\'ntrf"0]', String.Escape),
- (r'#[a-zA-Z]\w*', Literal), # Enumeration
- include('numbers'),
- (r"'[^']'", String.Char),
- (r'[a-zA-Z]\w*', Name),
- # Operators and Punctuation
- (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['ParaSailLexer']
+
+
+class ParaSailLexer(RegexLexer):
+ """
+ For `ParaSail <http://www.parasail-lang.org>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'ParaSail'
+ aliases = ['parasail']
+ filenames = ['*.psi', '*.psl']
+ mimetypes = ['text/x-parasail']
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'\b(and|or|xor)=', Operator.Word),
+ (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
+ r'(is|not)\s+null)\b',
+ Operator.Word),
+ # Keywords
+ (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
+ r'each|end|exit|extends|exports|forward|func|global|implements|'
+ r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
+ r'optional|private|queued|ref|return|reverse|separate|some|'
+ r'type|until|var|with|'
+ # Control flow
+ r'if|then|else|elsif|case|for|while|loop)\b',
+ Keyword.Reserved),
+ (r'(abstract\s+)?(interface|class|op|func|type)',
+ Keyword.Declaration),
+ # Literals
+ (r'"[^"]*"', String),
+ (r'\\[\'ntrf"0]', String.Escape),
+ (r'#[a-zA-Z]\w*', Literal), # Enumeration
+ include('numbers'),
+ (r"'[^']'", String.Char),
+ (r'[a-zA-Z]\w*', Name),
+ # Operators and Punctuation
+ (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\|=|\||/=|\+|-|\*|/|'
- r'\.\.|<\.\.|\.\.<|<\.\.<)',
- Operator),
- (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
- Punctuation),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex
- (r'0[bB][01][01_]*', Number.Bin), # C-like bin
- (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp
- Number.Float),
- (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float
- (r'\d[0-9_]*', Number.Integer), # integer
- ],
- }
+ r'\.\.|<\.\.|\.\.<|<\.\.<)',
+ Operator),
+ (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
+ Punctuation),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex), # any base
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), # C-like hex
+ (r'0[bB][01][01_]*', Number.Bin), # C-like bin
+ (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*', # float exp
+ Number.Float),
+ (r'\d[0-9_]*\.\d[0-9_]*', Number.Float), # float
+ (r'\d[0-9_]*', Number.Integer), # integer
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parsers.py b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
index 0009082fc4..52e8f466e0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parsers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
@@ -1,799 +1,799 @@
-"""
- pygments.lexers.parsers
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for parser generators.
-
+"""
+ pygments.lexers.parsers
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for parser generators.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, \
- include, bygroups, using
-from pygments.token import Punctuation, Other, Text, Comment, Operator, \
- Keyword, Name, String, Number, Whitespace
-from pygments.lexers.jvm import JavaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.objective import ObjectiveCLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.ruby import RubyLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-
-__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
- 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
- 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
- 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- 'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
- 'AntlrJavaLexer', 'AntlrActionScriptLexer',
- 'TreetopLexer', 'EbnfLexer']
-
-
-class RagelLexer(RegexLexer):
- """
- A pure `Ragel <http://www.complang.org/ragel/>`_ lexer. Use this for
- fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
- (or one of the language-specific subclasses).
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel'
- aliases = ['ragel']
- filenames = []
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace)
- ],
- 'comments': [
- (r'\#.*$', Comment),
- ],
- 'keywords': [
- (r'(access|action|alphtype)\b', Keyword),
- (r'(getkey|write|machine|include)\b', Keyword),
- (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
- (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
- ],
- 'numbers': [
- (r'0x[0-9A-Fa-f]+', Number.Hex),
- (r'[+-]?[0-9]+', Number.Integer),
- ],
- 'literals': [
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, DelegatingLexer, \
+ include, bygroups, using
+from pygments.token import Punctuation, Other, Text, Comment, Operator, \
+ Keyword, Name, String, Number, Whitespace
+from pygments.lexers.jvm import JavaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.objective import ObjectiveCLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.dotnet import CSharpLexer
+from pygments.lexers.ruby import RubyLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+
+__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
+ 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
+ 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
+ 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
+ 'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
+ 'AntlrJavaLexer', 'AntlrActionScriptLexer',
+ 'TreetopLexer', 'EbnfLexer']
+
+
+class RagelLexer(RegexLexer):
+ """
+ A pure `Ragel <http://www.complang.org/ragel/>`_ lexer. Use this for
+ fragments of Ragel. For ``.rl`` files, use RagelEmbeddedLexer instead
+ (or one of the language-specific subclasses).
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel'
+ aliases = ['ragel']
+ filenames = []
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Whitespace)
+ ],
+ 'comments': [
+ (r'\#.*$', Comment),
+ ],
+ 'keywords': [
+ (r'(access|action|alphtype)\b', Keyword),
+ (r'(getkey|write|machine|include)\b', Keyword),
+ (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
+ (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
+ ],
+ 'numbers': [
+ (r'0x[0-9A-Fa-f]+', Number.Hex),
+ (r'[+-]?[0-9]+', Number.Integer),
+ ],
+ 'literals': [
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String), # square bracket literals
(r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex), # regular expressions
- ],
- 'identifiers': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- 'operators': [
- (r',', Operator), # Join
- (r'\||&|--?', Operator), # Union, Intersection and Subtraction
- (r'\.|<:|:>>?', Operator), # Concatention
- (r':', Operator), # Label
- (r'->', Operator), # Epsilon Transition
- (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
- (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
- (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
- (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
- (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
- (r'>|@|\$|%', Operator), # Transition Actions and Priorities
- (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
- (r'!|\^', Operator), # Negation
- (r'\(|\)', Operator), # Grouping
- ],
- 'root': [
- include('literals'),
- include('whitespace'),
- include('comments'),
- include('keywords'),
- include('numbers'),
- include('identifiers'),
- include('operators'),
- (r'\{', Punctuation, 'host'),
- (r'=', Operator),
- (r';', Punctuation),
- ],
- 'host': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^{}\'"/#]+', # exclude unsafe characters
- r'[^\\]\\[{}]', # allow escaped { or }
-
- # strings and comments may safely contain unsafe characters
+ ],
+ 'identifiers': [
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ 'operators': [
+ (r',', Operator), # Join
+ (r'\||&|--?', Operator), # Union, Intersection and Subtraction
+ (r'\.|<:|:>>?', Operator), # Concatention
+ (r':', Operator), # Label
+ (r'->', Operator), # Epsilon Transition
+ (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
+ (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
+ (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
+ (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
+ (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
+ (r'>|@|\$|%', Operator), # Transition Actions and Priorities
+ (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator), # Repetition
+ (r'!|\^', Operator), # Negation
+ (r'\(|\)', Operator), # Grouping
+ ],
+ 'root': [
+ include('literals'),
+ include('whitespace'),
+ include('comments'),
+ include('keywords'),
+ include('numbers'),
+ include('identifiers'),
+ include('operators'),
+ (r'\{', Punctuation, 'host'),
+ (r'=', Operator),
+ (r';', Punctuation),
+ ],
+ 'host': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^{}\'"/#]+', # exclude unsafe characters
+ r'[^\\]\\[{}]', # allow escaped { or }
+
+ # strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'\#.*$\n?', # ruby comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'\#.*$\n?', # ruby comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- }
-
-
-class RagelEmbeddedLexer(RegexLexer):
- """
- A lexer for `Ragel`_ embedded in a host language file.
-
- This will only highlight Ragel statements. If you want host language
- highlighting then call the language-specific Ragel lexer.
-
- .. versionadded:: 1.1
- """
-
- name = 'Embedded Ragel'
- aliases = ['ragel-em']
- filenames = ['*.rl']
-
- tokens = {
- 'root': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^%\'"/#]+', # exclude unsafe characters
- r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
-
- # strings and comments may safely contain unsafe characters
+
+ # / is safe now that we've handled regex and javadoc comments
+ r'/',
+ )) + r')+', Other),
+
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ }
+
+
+class RagelEmbeddedLexer(RegexLexer):
+ """
+ A lexer for `Ragel`_ embedded in a host language file.
+
+ This will only highlight Ragel statements. If you want host language
+ highlighting then call the language-specific Ragel lexer.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Embedded Ragel'
+ aliases = ['ragel-em']
+ filenames = ['*.rl']
+
+ tokens = {
+ 'root': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^%\'"/#]+', # exclude unsafe characters
+ r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
+
+ # strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', # regular expression
-
- # / is safe now that we've handled regex and javadoc comments
- r'/',
- )) + r')+', Other),
-
- # Single Line FSM.
- # Please don't put a quoted newline in a single line FSM.
- # That's just mean. It will break this.
- (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
- using(RagelLexer),
- Punctuation, Text)),
-
- # Multi Line FSM.
- (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
- ],
- 'multi-line-fsm': [
- (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
- r'(' + r'|'.join((
- r'[^}\'"\[/#]', # exclude unsafe characters
- r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
- r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
- r'[^\\]\\[{}]', # ...and } is okay if it's escaped
-
- # allow / if it's preceded with one of these symbols
- # (ragel EOF actions)
- r'(>|\$|%|<|@|<>)/',
-
- # specifically allow regex followed immediately by *
- # so it doesn't get mistaken for a comment
+
+ # / is safe now that we've handled regex and javadoc comments
+ r'/',
+ )) + r')+', Other),
+
+ # Single Line FSM.
+ # Please don't put a quoted newline in a single line FSM.
+ # That's just mean. It will break this.
+ (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
+ using(RagelLexer),
+ Punctuation, Text)),
+
+ # Multi Line FSM.
+ (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
+ ],
+ 'multi-line-fsm': [
+ (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
+ r'(' + r'|'.join((
+ r'[^}\'"\[/#]', # exclude unsafe characters
+ r'\}(?=[^%]|$)', # } is okay as long as it's not followed by %
+ r'\}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
+ r'[^\\]\\[{}]', # ...and } is okay if it's escaped
+
+ # allow / if it's preceded with one of these symbols
+ # (ragel EOF actions)
+ r'(>|\$|%|<|@|<>)/',
+
+ # specifically allow regex followed immediately by *
+ # so it doesn't get mistaken for a comment
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
-
- # allow / as long as it's not followed by another / or by a *
- r'/(?=[^/*]|$)',
-
- # We want to match as many of these as we can in one block.
- # Not sure if we need the + sign here,
- # does it help performance?
- )) + r')+',
-
- # strings and comments may safely contain unsafe characters
+
+ # allow / as long as it's not followed by another / or by a *
+ r'/(?=[^/*]|$)',
+
+ # We want to match as many of these as we can in one block.
+ # Not sure if we need the + sign here,
+ # does it help performance?
+ )) + r')+',
+
+ # strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
r"\[(\\\\|\\[^\\]|[^\]\\])*\]", # square bracket literal
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- )) + r')+', using(RagelLexer)),
-
- (r'\}%%', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- return '@LANG: indep' in text
-
-
-class RagelRubyLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a Ruby host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Ruby Host'
- aliases = ['ragel-ruby', 'ragel-rb']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
+ )) + r')+', using(RagelLexer)),
+
+ (r'\}%%', Punctuation, '#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return '@LANG: indep' in text
+
+
+class RagelRubyLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a Ruby host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Ruby Host'
+ aliases = ['ragel-ruby', 'ragel-rb']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: ruby' in text
-
-
-class RagelCLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in C Host'
- aliases = ['ragel-c']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return '@LANG: ruby' in text
+
+
+class RagelCLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a C host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in C Host'
+ aliases = ['ragel-c']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(CLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: c' in text
-
-
-class RagelDLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a D host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in D Host'
- aliases = ['ragel-d']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return '@LANG: c' in text
+
+
+class RagelDLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a D host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in D Host'
+ aliases = ['ragel-d']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(DLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: d' in text
-
-
-class RagelCppLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a CPP host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in CPP Host'
- aliases = ['ragel-cpp']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return '@LANG: d' in text
+
+
+class RagelCppLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a CPP host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in CPP Host'
+ aliases = ['ragel-cpp']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(CppLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: c++' in text
-
-
-class RagelObjectiveCLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in an Objective C host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Objective C Host'
- aliases = ['ragel-objc']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return '@LANG: c++' in text
+
+
+class RagelObjectiveCLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in an Objective C host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Objective C Host'
+ aliases = ['ragel-objc']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: objc' in text
-
-
-class RagelJavaLexer(DelegatingLexer):
- """
- A lexer for `Ragel`_ in a Java host file.
-
- .. versionadded:: 1.1
- """
-
- name = 'Ragel in Java Host'
- aliases = ['ragel-java']
- filenames = ['*.rl']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return '@LANG: objc' in text
+
+
+class RagelJavaLexer(DelegatingLexer):
+ """
+ A lexer for `Ragel`_ in a Java host file.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Ragel in Java Host'
+ aliases = ['ragel-java']
+ filenames = ['*.rl']
+
+ def __init__(self, **options):
super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
-
- def analyse_text(text):
- return '@LANG: java' in text
-
-
-class AntlrLexer(RegexLexer):
- """
- Generic `ANTLR`_ Lexer.
- Should not be called directly, instead
- use DelegatingLexer for your target language.
-
- .. versionadded:: 1.1
-
- .. _ANTLR: http://www.antlr.org/
- """
-
- name = 'ANTLR'
- aliases = ['antlr']
- filenames = []
-
- _id = r'[A-Za-z]\w*'
- _TOKEN_REF = r'[A-Z]\w*'
- _RULE_REF = r'[a-z]\w*'
- _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
- _INT = r'[0-9]+'
-
- tokens = {
- 'whitespace': [
- (r'\s+', Whitespace),
- ],
- 'comments': [
- (r'//.*$', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
-
- (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
- bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
- Punctuation)),
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # tokensSpec
- (r'tokens\b', Keyword, 'tokens'),
- # attrScope
+
+ def analyse_text(text):
+ return '@LANG: java' in text
+
+
+class AntlrLexer(RegexLexer):
+ """
+ Generic `ANTLR`_ Lexer.
+ Should not be called directly, instead
+ use DelegatingLexer for your target language.
+
+ .. versionadded:: 1.1
+
+ .. _ANTLR: http://www.antlr.org/
+ """
+
+ name = 'ANTLR'
+ aliases = ['antlr']
+ filenames = []
+
+ _id = r'[A-Za-z]\w*'
+ _TOKEN_REF = r'[A-Z]\w*'
+ _RULE_REF = r'[a-z]\w*'
+ _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
+ _INT = r'[0-9]+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Whitespace),
+ ],
+ 'comments': [
+ (r'//.*$', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ ],
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+
+ (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
+ bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
+ Punctuation)),
+ # optionsSpec
+ (r'options\b', Keyword, 'options'),
+ # tokensSpec
+ (r'tokens\b', Keyword, 'tokens'),
+ # attrScope
(r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation), 'action'),
- # exception
- (r'(catch|finally)\b', Keyword, 'exception'),
- # action
+ bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+ Punctuation), 'action'),
+ # exception
+ (r'(catch|finally)\b', Keyword, 'exception'),
+ # action
(r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- Name.Label, Whitespace, Punctuation), 'action'),
- # rule
- (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
- bygroups(Keyword, Whitespace, Name.Label, Punctuation),
- ('rule-alts', 'rule-prelims')),
- ],
- 'exception': [
- (r'\n', Whitespace, '#pop'),
- (r'\s', Whitespace),
- include('comments'),
-
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- ],
- 'rule-prelims': [
- include('whitespace'),
- include('comments'),
-
- (r'returns\b', Keyword),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- # throwsSpec
- (r'(throws)(\s+)(' + _id + ')',
- bygroups(Keyword, Whitespace, Name.Label)),
- (r'(,)(\s*)(' + _id + ')',
- bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
- # optionsSpec
- (r'options\b', Keyword, 'options'),
- # ruleScopeSpec - scope followed by target language code or name of action
- # TODO finish implementing other possibilities for scope
- # L173 ANTLRv3.g from ANTLR book
- (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
- 'action'),
+ bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+ Name.Label, Whitespace, Punctuation), 'action'),
+ # rule
+ (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
+ bygroups(Keyword, Whitespace, Name.Label, Punctuation),
+ ('rule-alts', 'rule-prelims')),
+ ],
+ 'exception': [
+ (r'\n', Whitespace, '#pop'),
+ (r'\s', Whitespace),
+ include('comments'),
+
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ ],
+ 'rule-prelims': [
+ include('whitespace'),
+ include('comments'),
+
+ (r'returns\b', Keyword),
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ # throwsSpec
+ (r'(throws)(\s+)(' + _id + ')',
+ bygroups(Keyword, Whitespace, Name.Label)),
+ (r'(,)(\s*)(' + _id + ')',
+ bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
+ # optionsSpec
+ (r'options\b', Keyword, 'options'),
+ # ruleScopeSpec - scope followed by target language code or name of action
+ # TODO finish implementing other possibilities for scope
+ # L173 ANTLRv3.g from ANTLR book
+ (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
+ 'action'),
(r'(scope)(\s+)(' + _id + r')(\s*)(;)',
- bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
- # ruleAction
+ bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
+ # ruleAction
(r'(@' + _id + r')(\s*)(\{)',
- bygroups(Name.Label, Whitespace, Punctuation), 'action'),
- # finished prelims, go to rule alts!
- (r':', Punctuation, '#pop')
- ],
- 'rule-alts': [
- include('whitespace'),
- include('comments'),
-
- # These might need to go in a separate 'block' state triggered by (
- (r'options\b', Keyword, 'options'),
- (r':', Punctuation),
-
- # literals
+ bygroups(Name.Label, Whitespace, Punctuation), 'action'),
+ # finished prelims, go to rule alts!
+ (r':', Punctuation, '#pop')
+ ],
+ 'rule-alts': [
+ include('whitespace'),
+ include('comments'),
+
+ # These might need to go in a separate 'block' state triggered by (
+ (r'options\b', Keyword, 'options'),
+ (r':', Punctuation),
+
+ # literals
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'<<([^>]|>[^>])>>', String),
- # identifiers
- # Tokens start with capital letter.
- (r'\$?[A-Z_]\w*', Name.Constant),
- # Rules start with small letter.
- (r'\$?[a-z_]\w*', Name.Variable),
- # operators
- (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
- (r',', Punctuation),
- (r'\[', Punctuation, 'nested-arg-action'),
- (r'\{', Punctuation, 'action'),
- (r';', Punctuation, '#pop')
- ],
- 'tokens': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ (r'<<([^>]|>[^>])>>', String),
+ # identifiers
+ # Tokens start with capital letter.
+ (r'\$?[A-Z_]\w*', Name.Constant),
+ # Rules start with small letter.
+ (r'\$?[a-z_]\w*', Name.Variable),
+ # operators
+ (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
+ (r',', Punctuation),
+ (r'\[', Punctuation, 'nested-arg-action'),
+ (r'\{', Punctuation, 'action'),
+ (r';', Punctuation, '#pop')
+ ],
+ 'tokens': [
+ include('whitespace'),
+ include('comments'),
+ (r'\{', Punctuation),
+ (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
+ r')?(\s*)(;)',
- bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
- String, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'options': [
- include('whitespace'),
- include('comments'),
- (r'\{', Punctuation),
- (r'(' + _id + r')(\s*)(=)(\s*)(' +
+ bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
+ String, Whitespace, Punctuation)),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'options': [
+ include('whitespace'),
+ include('comments'),
+ (r'\{', Punctuation),
+ (r'(' + _id + r')(\s*)(=)(\s*)(' +
'|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
- bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
- Text, Whitespace, Punctuation)),
- (r'\}', Punctuation, '#pop'),
- ],
- 'action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^${}\'"/\\]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
+ bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
+ Text, Whitespace, Punctuation)),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'action': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^${}\'"/\\]+', # exclude unsafe characters
+
+ # strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # backslashes are okay, as long as we are not backslashing a %
- r'\\(?!%)',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
- (r'(\\)(%)', bygroups(Punctuation, Other)),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'\{', Punctuation, '#push'),
- (r'\}', Punctuation, '#pop'),
- ],
- 'nested-arg-action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks.
- r'[^$\[\]\'"/]+', # exclude unsafe characters
-
- # strings and comments may safely contain unsafe characters
+
+ # backslashes are okay, as long as we are not backslashing a %
+ r'\\(?!%)',
+
+ # Now that we've handled regex and javadoc comments
+ # it's safe to let / through.
+ r'/',
+ )) + r')+', Other),
+ (r'(\\)(%)', bygroups(Punctuation, Other)),
+ (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+ bygroups(Name.Variable, Punctuation, Name.Property)),
+ (r'\{', Punctuation, '#push'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'nested-arg-action': [
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks.
+ r'[^$\[\]\'"/]+', # exclude unsafe characters
+
+ # strings and comments may safely contain unsafe characters
r'"(\\\\|\\[^\\]|[^"\\])*"',
r"'(\\\\|\\[^\\]|[^'\\])*'",
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
-
- # regular expression: There's no reason for it to start
- # with a * and this stops confusion with comments.
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+
+ # regular expression: There's no reason for it to start
+ # with a * and this stops confusion with comments.
r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
- # Now that we've handled regex and javadoc comments
- # it's safe to let / through.
- r'/',
- )) + r')+', Other),
-
-
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
- bygroups(Name.Variable, Punctuation, Name.Property)),
- (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
- ]
- }
-
- def analyse_text(text):
- return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
-
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-class AntlrCppLexer(DelegatingLexer):
- """
- `ANTLR`_ with CPP Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With CPP Target'
- aliases = ['antlr-cpp']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ # Now that we've handled regex and javadoc comments
+ # it's safe to let / through.
+ r'/',
+ )) + r')+', Other),
+
+
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
+ bygroups(Name.Variable, Punctuation, Name.Property)),
+ (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
+ ]
+ }
+
+ def analyse_text(text):
+ return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
+
+
+# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
+
+class AntlrCppLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with CPP Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With CPP Target'
+ aliases = ['antlr-cpp']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(CppLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
-
-
-class AntlrObjectiveCLexer(DelegatingLexer):
- """
- `ANTLR`_ with Objective-C Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ObjectiveC Target'
- aliases = ['antlr-objc']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
+
+
+class AntlrObjectiveCLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Objective-C Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With ObjectiveC Target'
+ aliases = ['antlr-objc']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(ObjectiveCLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
-
-
-class AntlrCSharpLexer(DelegatingLexer):
- """
- `ANTLR`_ with C# Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With C# Target'
- aliases = ['antlr-csharp', 'antlr-c#']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
+
+
+class AntlrCSharpLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with C# Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With C# Target'
+ aliases = ['antlr-csharp', 'antlr-c#']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(CSharpLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
-
-
-class AntlrPythonLexer(DelegatingLexer):
- """
- `ANTLR`_ with Python Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Python Target'
- aliases = ['antlr-python']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
+
+
+class AntlrPythonLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Python Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Python Target'
+ aliases = ['antlr-python']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(PythonLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
-
-
-class AntlrJavaLexer(DelegatingLexer):
- """
- `ANTLR`_ with Java Target
-
- .. versionadded:: 1.
- """
-
- name = 'ANTLR With Java Target'
- aliases = ['antlr-java']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
+
+
+class AntlrJavaLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Java Target
+
+ .. versionadded:: 1.
+ """
+
+ name = 'ANTLR With Java Target'
+ aliases = ['antlr-java']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(JavaLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- # Antlr language is Java by default
- return AntlrLexer.analyse_text(text) and 0.9
-
-
-class AntlrRubyLexer(DelegatingLexer):
- """
- `ANTLR`_ with Ruby Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Ruby Target'
- aliases = ['antlr-ruby', 'antlr-rb']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ # Antlr language is Java by default
+ return AntlrLexer.analyse_text(text) and 0.9
+
+
+class AntlrRubyLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Ruby Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Ruby Target'
+ aliases = ['antlr-ruby', 'antlr-rb']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(RubyLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
-
-
-class AntlrPerlLexer(DelegatingLexer):
- """
- `ANTLR`_ with Perl Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With Perl Target'
- aliases = ['antlr-perl']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
+
+
+class AntlrPerlLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with Perl Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With Perl Target'
+ aliases = ['antlr-perl']
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
super().__init__(PerlLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
-
-
-class AntlrActionScriptLexer(DelegatingLexer):
- """
- `ANTLR`_ with ActionScript Target
-
- .. versionadded:: 1.1
- """
-
- name = 'ANTLR With ActionScript Target'
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
+
+
+class AntlrActionScriptLexer(DelegatingLexer):
+ """
+ `ANTLR`_ with ActionScript Target
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'ANTLR With ActionScript Target'
aliases = ['antlr-actionscript', 'antlr-as']
- filenames = ['*.G', '*.g']
-
- def __init__(self, **options):
- from pygments.lexers.actionscript import ActionScriptLexer
+ filenames = ['*.G', '*.g']
+
+ def __init__(self, **options):
+ from pygments.lexers.actionscript import ActionScriptLexer
super().__init__(ActionScriptLexer, AntlrLexer, **options)
-
- def analyse_text(text):
- return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
-
-
-class TreetopBaseLexer(RegexLexer):
- """
- A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
- Not for direct use; use TreetopLexer instead.
-
- .. versionadded:: 1.6
- """
-
- tokens = {
- 'root': [
- include('space'),
- (r'require[ \t]+[^\n\r]+[\n\r]', Other),
- (r'module\b', Keyword.Namespace, 'module'),
- (r'grammar\b', Keyword, 'grammar'),
- ],
- 'module': [
- include('space'),
- include('end'),
- (r'module\b', Keyword, '#push'),
- (r'grammar\b', Keyword, 'grammar'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
- ],
- 'grammar': [
- include('space'),
- include('end'),
- (r'rule\b', Keyword, 'rule'),
- (r'include\b', Keyword, 'include'),
- (r'[A-Z]\w*', Name),
- ],
- 'include': [
- include('space'),
- (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
- ],
- 'rule': [
- include('space'),
- include('end'),
+
+ def analyse_text(text):
+ return AntlrLexer.analyse_text(text) and \
+ re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+
+
+class TreetopBaseLexer(RegexLexer):
+ """
+ A base lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
+ Not for direct use; use TreetopLexer instead.
+
+ .. versionadded:: 1.6
+ """
+
+ tokens = {
+ 'root': [
+ include('space'),
+ (r'require[ \t]+[^\n\r]+[\n\r]', Other),
+ (r'module\b', Keyword.Namespace, 'module'),
+ (r'grammar\b', Keyword, 'grammar'),
+ ],
+ 'module': [
+ include('space'),
+ include('end'),
+ (r'module\b', Keyword, '#push'),
+ (r'grammar\b', Keyword, 'grammar'),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
+ ],
+ 'grammar': [
+ include('space'),
+ include('end'),
+ (r'rule\b', Keyword, 'rule'),
+ (r'include\b', Keyword, 'include'),
+ (r'[A-Z]\w*', Name),
+ ],
+ 'include': [
+ include('space'),
+ (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
+ ],
+ 'rule': [
+ include('space'),
+ include('end'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'[A-Za-z_]\w*', Name),
- (r'[()]', Punctuation),
- (r'[?+*/&!~]', Operator),
- (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
- (r'([0-9]*)(\.\.)([0-9]*)',
- bygroups(Number.Integer, Operator, Number.Integer)),
- (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
- (r'\{', Punctuation, 'inline_module'),
- (r'\.', String.Regex),
- ],
- 'inline_module': [
- (r'\{', Other, 'ruby'),
- (r'\}', Punctuation, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'ruby': [
- (r'\{', Other, '#push'),
- (r'\}', Other, '#pop'),
- (r'[^{}]+', Other),
- ],
- 'space': [
- (r'[ \t\n\r]+', Whitespace),
- (r'#[^\n]*', Comment.Single),
- ],
- 'end': [
- (r'end\b', Keyword, '#pop'),
- ],
- }
-
-
-class TreetopLexer(DelegatingLexer):
- """
- A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-
- .. versionadded:: 1.6
- """
-
- name = 'Treetop'
- aliases = ['treetop']
- filenames = ['*.treetop', '*.tt']
-
- def __init__(self, **options):
+ (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'[A-Za-z_]\w*', Name),
+ (r'[()]', Punctuation),
+ (r'[?+*/&!~]', Operator),
+ (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
+ (r'([0-9]*)(\.\.)([0-9]*)',
+ bygroups(Number.Integer, Operator, Number.Integer)),
+ (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
+ (r'\{', Punctuation, 'inline_module'),
+ (r'\.', String.Regex),
+ ],
+ 'inline_module': [
+ (r'\{', Other, 'ruby'),
+ (r'\}', Punctuation, '#pop'),
+ (r'[^{}]+', Other),
+ ],
+ 'ruby': [
+ (r'\{', Other, '#push'),
+ (r'\}', Other, '#pop'),
+ (r'[^{}]+', Other),
+ ],
+ 'space': [
+ (r'[ \t\n\r]+', Whitespace),
+ (r'#[^\n]*', Comment.Single),
+ ],
+ 'end': [
+ (r'end\b', Keyword, '#pop'),
+ ],
+ }
+
+
+class TreetopLexer(DelegatingLexer):
+ """
+ A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Treetop'
+ aliases = ['treetop']
+ filenames = ['*.treetop', '*.tt']
+
+ def __init__(self, **options):
super().__init__(RubyLexer, TreetopBaseLexer, **options)
-
-
-class EbnfLexer(RegexLexer):
- """
- Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
- grammars.
-
- .. versionadded:: 2.0
- """
-
- name = 'EBNF'
- aliases = ['ebnf']
- filenames = ['*.ebnf']
- mimetypes = ['text/x-ebnf']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'=', Operator, 'production'),
- ],
- 'production': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'"[^"]*"', String.Double),
- (r"'[^']*'", String.Single),
- (r'(\?[^?]*\?)', Name.Entity),
- (r'[\[\]{}(),|]', Punctuation),
- (r'-', Operator),
- (r';', Punctuation, '#pop'),
- (r'\.', Punctuation, '#pop'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comment_start': [
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- include('comment_start'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[*)]', Comment.Multiline),
- ],
- 'identifier': [
- (r'([a-zA-Z][\w \-]*)', Keyword),
- ],
- }
+
+
+class EbnfLexer(RegexLexer):
+ """
+ Lexer for `ISO/IEC 14977 EBNF
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ grammars.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'EBNF'
+ aliases = ['ebnf']
+ filenames = ['*.ebnf']
+ mimetypes = ['text/x-ebnf']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'=', Operator, 'production'),
+ ],
+ 'production': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'"[^"]*"', String.Double),
+ (r"'[^']*'", String.Single),
+ (r'(\?[^?]*\?)', Name.Entity),
+ (r'[\[\]{}(),|]', Punctuation),
+ (r'-', Operator),
+ (r';', Punctuation, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comment_start': [
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ include('comment_start'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[*)]', Comment.Multiline),
+ ],
+ 'identifier': [
+ (r'([a-zA-Z][\w \-]*)', Keyword),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pascal.py b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
index 0d1ac3fdb7..1561e8227d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pascal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
@@ -1,643 +1,643 @@
-"""
- pygments.lexers.pascal
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Pascal family languages.
-
+"""
+ pygments.lexers.pascal
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pascal family languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
- using, this, default
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error
-from pygments.scanner import Scanner
-
-# compatibility import
-from pygments.lexers.modula2 import Modula2Lexer
-
-__all__ = ['DelphiLexer', 'AdaLexer']
-
-
-class DelphiLexer(Lexer):
- """
- For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
- Turbo Pascal and Free Pascal source code.
-
- Additional options accepted:
-
- `turbopascal`
- Highlight Turbo Pascal specific keywords (default: ``True``).
- `delphi`
- Highlight Borland Delphi specific keywords (default: ``True``).
- `freepascal`
- Highlight Free Pascal specific keywords (default: ``True``).
- `units`
- A list of units that should be considered builtin, supported are
- ``System``, ``SysUtils``, ``Classes`` and ``Math``.
- Default is to consider all of them builtin.
- """
- name = 'Delphi'
- aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
+ using, this, default
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.scanner import Scanner
+
+# compatibility import
+from pygments.lexers.modula2 import Modula2Lexer
+
+__all__ = ['DelphiLexer', 'AdaLexer']
+
+
+class DelphiLexer(Lexer):
+ """
+ For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
+ Turbo Pascal and Free Pascal source code.
+
+ Additional options accepted:
+
+ `turbopascal`
+ Highlight Turbo Pascal specific keywords (default: ``True``).
+ `delphi`
+ Highlight Borland Delphi specific keywords (default: ``True``).
+ `freepascal`
+ Highlight Free Pascal specific keywords (default: ``True``).
+ `units`
+ A list of units that should be considered builtin, supported are
+ ``System``, ``SysUtils``, ``Classes`` and ``Math``.
+ Default is to consider all of them builtin.
+ """
+ name = 'Delphi'
+ aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
filenames = ['*.pas', '*.dpr']
- mimetypes = ['text/x-pascal']
-
- TURBO_PASCAL_KEYWORDS = (
- 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
- 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
- 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
- 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
- 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
- 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
- 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
- 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
- )
-
- DELPHI_KEYWORDS = (
- 'as', 'class', 'except', 'exports', 'finalization', 'finally',
- 'initialization', 'is', 'library', 'on', 'property', 'raise',
- 'threadvar', 'try'
- )
-
- FREE_PASCAL_KEYWORDS = (
- 'dispose', 'exit', 'false', 'new', 'true'
- )
-
+ mimetypes = ['text/x-pascal']
+
+ TURBO_PASCAL_KEYWORDS = (
+ 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
+ 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
+ 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
+ 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
+ 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
+ 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
+ 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
+ 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
+ )
+
+ DELPHI_KEYWORDS = (
+ 'as', 'class', 'except', 'exports', 'finalization', 'finally',
+ 'initialization', 'is', 'library', 'on', 'property', 'raise',
+ 'threadvar', 'try'
+ )
+
+ FREE_PASCAL_KEYWORDS = (
+ 'dispose', 'exit', 'false', 'new', 'true'
+ )
+
BLOCK_KEYWORDS = {
- 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
- 'finalization', 'function', 'implementation', 'initialization',
- 'label', 'library', 'operator', 'procedure', 'program', 'property',
- 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
+ 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
+ 'finalization', 'function', 'implementation', 'initialization',
+ 'label', 'library', 'operator', 'procedure', 'program', 'property',
+ 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
}
-
+
FUNCTION_MODIFIERS = {
- 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
- 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
- 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
- 'override', 'assembler'
+ 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
+ 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
+ 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
+ 'override', 'assembler'
}
-
- # XXX: those aren't global. but currently we know no way for defining
- # them just for the type context.
+
+ # XXX: those aren't global. but currently we know no way for defining
+ # them just for the type context.
DIRECTIVES = {
- 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
- 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
- 'published', 'public'
+ 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
+ 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
+ 'published', 'public'
}
-
+
BUILTIN_TYPES = {
- 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
- 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
- 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
- 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
- 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
- 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
- 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
- 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
- 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
- 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
- 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
- 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
- 'widechar', 'widestring', 'word', 'wordbool'
+ 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
+ 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
+ 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
+ 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
+ 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
+ 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
+ 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
+ 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
+ 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
+ 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
+ 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
+ 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
+ 'widechar', 'widestring', 'word', 'wordbool'
}
-
- BUILTIN_UNITS = {
- 'System': (
- 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
- 'append', 'arctan', 'assert', 'assigned', 'assignfile',
- 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
- 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
- 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
- 'dispose', 'doubletocomp', 'endthread', 'enummodules',
- 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
- 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
- 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
- 'findresourcehinstance', 'flush', 'frac', 'freemem',
- 'get8087cw', 'getdir', 'getlasterror', 'getmem',
- 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
- 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
- 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
- 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
- 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
- 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
- 'randomize', 'read', 'readln', 'reallocmem',
- 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
- 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
- 'set8087cw', 'setlength', 'setlinebreakstyle',
- 'setmemorymanager', 'setstring', 'settextbuf',
- 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
- 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
- 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
- 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
- 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
- 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
- 'widecharlentostring', 'widecharlentostrvar',
- 'widechartostring', 'widechartostrvar',
- 'widestringtoucs4string', 'write', 'writeln'
- ),
- 'SysUtils': (
- 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
- 'allocmem', 'ansicomparefilename', 'ansicomparestr',
- 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
- 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
- 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
- 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
- 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
- 'ansistrscan', 'ansistrupper', 'ansiuppercase',
- 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
- 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
- 'callterminateprocs', 'changefileext', 'charlength',
- 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
- 'comparetext', 'createdir', 'createguid', 'currentyear',
- 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
- 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
- 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
- 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
- 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
- 'exceptionerrormessage', 'excludetrailingbackslash',
- 'excludetrailingpathdelimiter', 'expandfilename',
- 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
- 'extractfiledrive', 'extractfileext', 'extractfilename',
- 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
- 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
- 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
- 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
- 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
- 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
- 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
- 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
- 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
- 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
- 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
- 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
- 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
- 'includetrailingbackslash', 'includetrailingpathdelimiter',
- 'incmonth', 'initializepackage', 'interlockeddecrement',
- 'interlockedexchange', 'interlockedexchangeadd',
- 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
- 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
- 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
- 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
- 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
- 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
- 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
- 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
- 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
- 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
- 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
- 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
- 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
- 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
- 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
- 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
- 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
- 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
- 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
- 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
- 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
- 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
- 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
- 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
- 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
- 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
- 'wraptext'
- ),
- 'Classes': (
- 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
- 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
- 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
- 'groupdescendantswith', 'hextobin', 'identtoint',
- 'initinheritedcomponent', 'inttoident', 'invalidpoint',
- 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
- 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
- 'pointsequal', 'readcomponentres', 'readcomponentresex',
- 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
- 'registerclasses', 'registercomponents', 'registerintegerconsts',
- 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
- 'teststreamformat', 'unregisterclass', 'unregisterclasses',
- 'unregisterintegerconsts', 'unregistermoduleclasses',
- 'writecomponentresfile'
- ),
- 'Math': (
- 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
- 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
- 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
- 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
- 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
- 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
- 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
- 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
- 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
- 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
- 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
- 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
- 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
- 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
- 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
- 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
- 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
- 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
- 'tan', 'tanh', 'totalvariance', 'variance'
- )
- }
-
+
+ BUILTIN_UNITS = {
+ 'System': (
+ 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
+ 'append', 'arctan', 'assert', 'assigned', 'assignfile',
+ 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
+ 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
+ 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
+ 'dispose', 'doubletocomp', 'endthread', 'enummodules',
+ 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
+ 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
+ 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
+ 'findresourcehinstance', 'flush', 'frac', 'freemem',
+ 'get8087cw', 'getdir', 'getlasterror', 'getmem',
+ 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
+ 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
+ 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
+ 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
+ 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
+ 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
+ 'randomize', 'read', 'readln', 'reallocmem',
+ 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
+ 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
+ 'set8087cw', 'setlength', 'setlinebreakstyle',
+ 'setmemorymanager', 'setstring', 'settextbuf',
+ 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
+ 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
+ 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
+ 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
+ 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
+ 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
+ 'widecharlentostring', 'widecharlentostrvar',
+ 'widechartostring', 'widechartostrvar',
+ 'widestringtoucs4string', 'write', 'writeln'
+ ),
+ 'SysUtils': (
+ 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
+ 'allocmem', 'ansicomparefilename', 'ansicomparestr',
+ 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
+ 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
+ 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
+ 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
+ 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
+ 'ansistrscan', 'ansistrupper', 'ansiuppercase',
+ 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
+ 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
+ 'callterminateprocs', 'changefileext', 'charlength',
+ 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
+ 'comparetext', 'createdir', 'createguid', 'currentyear',
+ 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
+ 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
+ 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
+ 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
+ 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
+ 'exceptionerrormessage', 'excludetrailingbackslash',
+ 'excludetrailingpathdelimiter', 'expandfilename',
+ 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
+ 'extractfiledrive', 'extractfileext', 'extractfilename',
+ 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
+ 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
+ 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
+ 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
+ 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
+ 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
+ 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
+ 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
+ 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
+ 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
+ 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
+ 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
+ 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
+ 'includetrailingbackslash', 'includetrailingpathdelimiter',
+ 'incmonth', 'initializepackage', 'interlockeddecrement',
+ 'interlockedexchange', 'interlockedexchangeadd',
+ 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
+ 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
+ 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
+ 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
+ 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
+ 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
+ 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
+ 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
+ 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
+ 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
+ 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
+ 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
+ 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
+ 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
+ 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
+ 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
+ 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
+ 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
+ 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
+ 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
+ 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
+ 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
+ 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
+ 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
+ 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
+ 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
+ 'wraptext'
+ ),
+ 'Classes': (
+ 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
+ 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
+ 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
+ 'groupdescendantswith', 'hextobin', 'identtoint',
+ 'initinheritedcomponent', 'inttoident', 'invalidpoint',
+ 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
+ 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
+ 'pointsequal', 'readcomponentres', 'readcomponentresex',
+ 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
+ 'registerclasses', 'registercomponents', 'registerintegerconsts',
+ 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
+ 'teststreamformat', 'unregisterclass', 'unregisterclasses',
+ 'unregisterintegerconsts', 'unregistermoduleclasses',
+ 'writecomponentresfile'
+ ),
+ 'Math': (
+ 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
+ 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
+ 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
+ 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
+ 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
+ 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
+ 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
+ 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
+ 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
+ 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
+ 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
+ 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
+ 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
+ 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
+ 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
+ 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
+ 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
+ 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
+ 'tan', 'tanh', 'totalvariance', 'variance'
+ )
+ }
+
ASM_REGISTERS = {
- 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
- 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
- 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
- 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
- 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
- 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
- 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
- 'xmm6', 'xmm7'
+ 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
+ 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
+ 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
+ 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
+ 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
+ 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
+ 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
+ 'xmm6', 'xmm7'
}
-
+
ASM_INSTRUCTIONS = {
- 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
- 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
- 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
- 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
- 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
- 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
- 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
- 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
- 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
- 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
- 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
- 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
- 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
- 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
- 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
- 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
- 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
- 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
- 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
- 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
- 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
- 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
- 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
- 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
- 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
- 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
- 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
- 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
- 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
- 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
- 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
- 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
- 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
- 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
- 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
- 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
- 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
- 'xlatb', 'xor'
+ 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
+ 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
+ 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
+ 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
+ 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
+ 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
+ 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
+ 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
+ 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
+ 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
+ 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
+ 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
+ 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
+ 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
+ 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
+ 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
+ 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
+ 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
+ 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
+ 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
+ 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
+ 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
+ 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
+ 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
+ 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
+ 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
+ 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
+ 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
+ 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
+ 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
+ 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
+ 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
+ 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
+ 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
+ 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
+ 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
+ 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
+ 'xlatb', 'xor'
}
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.keywords = set()
- if get_bool_opt(options, 'turbopascal', True):
- self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
- if get_bool_opt(options, 'delphi', True):
- self.keywords.update(self.DELPHI_KEYWORDS)
- if get_bool_opt(options, 'freepascal', True):
- self.keywords.update(self.FREE_PASCAL_KEYWORDS)
- self.builtins = set()
- for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
- self.builtins.update(self.BUILTIN_UNITS[unit])
-
- def get_tokens_unprocessed(self, text):
- scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
- stack = ['initial']
- in_function_block = False
- in_property_block = False
- was_dot = False
- next_token_is_function = False
- next_token_is_property = False
- collect_labels = False
- block_labels = set()
- brace_balance = [0, 0]
-
- while not scanner.eos:
- token = Error
-
- if stack[-1] == 'initial':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
- token = Operator
- # stop label highlighting on next ";"
- if collect_labels and scanner.match == ';':
- collect_labels = False
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- # abort function naming ``foo = Function(...)``
- next_token_is_function = False
- # if we are in a function block we count the open
- # braces because ootherwise it's impossible to
- # determine the end of the modifier context
- if in_function_block or in_property_block:
- if scanner.match == '(':
- brace_balance[0] += 1
- elif scanner.match == ')':
- brace_balance[0] -= 1
- elif scanner.match == '[':
- brace_balance[1] += 1
- elif scanner.match == ']':
- brace_balance[1] -= 1
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name == 'result':
- token = Name.Builtin.Pseudo
- elif lowercase_name in self.keywords:
- token = Keyword
- # if we are in a special block and a
- # block ending keyword occours (and the parenthesis
- # is balanced) we end the current block context
- if (in_function_block or in_property_block) and \
- lowercase_name in self.BLOCK_KEYWORDS and \
- brace_balance[0] <= 0 and \
- brace_balance[1] <= 0:
- in_function_block = False
- in_property_block = False
- brace_balance = [0, 0]
- block_labels = set()
- if lowercase_name in ('label', 'goto'):
- collect_labels = True
- elif lowercase_name == 'asm':
- stack.append('asm')
- elif lowercase_name == 'property':
- in_property_block = True
- next_token_is_property = True
- elif lowercase_name in ('procedure', 'operator',
- 'function', 'constructor',
- 'destructor'):
- in_function_block = True
- next_token_is_function = True
- # we are in a function block and the current name
- # is in the set of registered modifiers. highlight
- # it as pseudo keyword
- elif in_function_block and \
- lowercase_name in self.FUNCTION_MODIFIERS:
- token = Keyword.Pseudo
- # if we are in a property highlight some more
- # modifiers
- elif in_property_block and \
- lowercase_name in ('read', 'write'):
- token = Keyword.Pseudo
- next_token_is_function = True
- # if the last iteration set next_token_is_function
- # to true we now want this name highlighted as
- # function. so do that and reset the state
- elif next_token_is_function:
- # Look if the next token is a dot. If yes it's
- # not a function, but a class name and the
- # part after the dot a function name
- if scanner.test(r'\s*\.\s*'):
- token = Name.Class
- # it's not a dot, our job is done
- else:
- token = Name.Function
- next_token_is_function = False
- # same for properties
- elif next_token_is_property:
- token = Name.Property
- next_token_is_property = False
- # Highlight this token as label and add it
- # to the list of known labels
- elif collect_labels:
- token = Name.Label
- block_labels.add(scanner.match.lower())
- # name is in list of known labels
- elif lowercase_name in block_labels:
- token = Name.Label
- elif lowercase_name in self.BUILTIN_TYPES:
- token = Keyword.Type
- elif lowercase_name in self.DIRECTIVES:
- token = Keyword.Pseudo
- # builtins are just builtins if the token
- # before isn't a dot
- elif not was_dot and lowercase_name in self.builtins:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
- token = String.Char
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- # if the stack depth is deeper than once, pop
- if len(stack) > 1:
- stack.pop()
- scanner.get_char()
-
- elif stack[-1] == 'string':
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"'"):
- token = String
- stack.pop()
- elif scanner.scan(r"[^']*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
-
- elif stack[-1] == 'asm':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'end'):
- token = Keyword
- stack.pop()
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
- token = Name.Label
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name in self.ASM_INSTRUCTIONS:
- token = Keyword
- elif lowercase_name in self.ASM_REGISTERS:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
- token = Operator
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- scanner.get_char()
- stack.pop()
-
- # save the dot!!!11
- if scanner.match.strip():
- was_dot = scanner.match == '.'
- yield scanner.start_pos, token, scanner.match or ''
-
-
-class AdaLexer(RegexLexer):
- """
- For Ada source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'Ada'
- aliases = ['ada', 'ada95', 'ada2005']
- filenames = ['*.adb', '*.ads', '*.ada']
- mimetypes = ['text/x-ada']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
- (r'(subtype|type)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'task|protected', Keyword.Declaration),
- (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
- (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
- (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
- Comment.Preproc)),
- (r'(true|false|null)\b', Keyword.Constant),
- (words((
- 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count',
- 'Cursor', 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator',
- 'Integer', 'Long_Float', 'Long_Integer', 'Long_Long_Float',
- 'Long_Long_Integer', 'Natural', 'Positive', 'Reference_Type',
- 'Short_Float', 'Short_Integer', 'Short_Short_Float',
- 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'),
- suffix=r'\b'),
- Keyword.Type),
- (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
- (r'generic|private', Keyword.Declaration),
- (r'package', Keyword.Declaration, 'package'),
- (r'array\b', Keyword.Reserved, 'array_def'),
- (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'(\w+)(\s*)(:)(\s*)(constant)',
- bygroups(Name.Constant, Text, Punctuation, Text,
- Keyword.Reserved)),
- (r'<<\w+>>', Name.Label),
- (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
- bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
- (words((
- 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
- 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
- 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
- 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
- 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
- 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
+
+ def __init__(self, **options):
+ Lexer.__init__(self, **options)
+ self.keywords = set()
+ if get_bool_opt(options, 'turbopascal', True):
+ self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
+ if get_bool_opt(options, 'delphi', True):
+ self.keywords.update(self.DELPHI_KEYWORDS)
+ if get_bool_opt(options, 'freepascal', True):
+ self.keywords.update(self.FREE_PASCAL_KEYWORDS)
+ self.builtins = set()
+ for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
+ self.builtins.update(self.BUILTIN_UNITS[unit])
+
+ def get_tokens_unprocessed(self, text):
+ scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
+ stack = ['initial']
+ in_function_block = False
+ in_property_block = False
+ was_dot = False
+ next_token_is_function = False
+ next_token_is_property = False
+ collect_labels = False
+ block_labels = set()
+ brace_balance = [0, 0]
+
+ while not scanner.eos:
+ token = Error
+
+ if stack[-1] == 'initial':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
+ token = Operator
+ # stop label highlighting on next ";"
+ if collect_labels and scanner.match == ';':
+ collect_labels = False
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ # abort function naming ``foo = Function(...)``
+ next_token_is_function = False
+ # if we are in a function block we count the open
+ # braces because ootherwise it's impossible to
+ # determine the end of the modifier context
+ if in_function_block or in_property_block:
+ if scanner.match == '(':
+ brace_balance[0] += 1
+ elif scanner.match == ')':
+ brace_balance[0] -= 1
+ elif scanner.match == '[':
+ brace_balance[1] += 1
+ elif scanner.match == ']':
+ brace_balance[1] -= 1
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name == 'result':
+ token = Name.Builtin.Pseudo
+ elif lowercase_name in self.keywords:
+ token = Keyword
+ # if we are in a special block and a
+ # block ending keyword occours (and the parenthesis
+ # is balanced) we end the current block context
+ if (in_function_block or in_property_block) and \
+ lowercase_name in self.BLOCK_KEYWORDS and \
+ brace_balance[0] <= 0 and \
+ brace_balance[1] <= 0:
+ in_function_block = False
+ in_property_block = False
+ brace_balance = [0, 0]
+ block_labels = set()
+ if lowercase_name in ('label', 'goto'):
+ collect_labels = True
+ elif lowercase_name == 'asm':
+ stack.append('asm')
+ elif lowercase_name == 'property':
+ in_property_block = True
+ next_token_is_property = True
+ elif lowercase_name in ('procedure', 'operator',
+ 'function', 'constructor',
+ 'destructor'):
+ in_function_block = True
+ next_token_is_function = True
+ # we are in a function block and the current name
+ # is in the set of registered modifiers. highlight
+ # it as pseudo keyword
+ elif in_function_block and \
+ lowercase_name in self.FUNCTION_MODIFIERS:
+ token = Keyword.Pseudo
+ # if we are in a property highlight some more
+ # modifiers
+ elif in_property_block and \
+ lowercase_name in ('read', 'write'):
+ token = Keyword.Pseudo
+ next_token_is_function = True
+ # if the last iteration set next_token_is_function
+ # to true we now want this name highlighted as
+ # function. so do that and reset the state
+ elif next_token_is_function:
+ # Look if the next token is a dot. If yes it's
+ # not a function, but a class name and the
+ # part after the dot a function name
+ if scanner.test(r'\s*\.\s*'):
+ token = Name.Class
+ # it's not a dot, our job is done
+ else:
+ token = Name.Function
+ next_token_is_function = False
+ # same for properties
+ elif next_token_is_property:
+ token = Name.Property
+ next_token_is_property = False
+ # Highlight this token as label and add it
+ # to the list of known labels
+ elif collect_labels:
+ token = Name.Label
+ block_labels.add(scanner.match.lower())
+ # name is in list of known labels
+ elif lowercase_name in block_labels:
+ token = Name.Label
+ elif lowercase_name in self.BUILTIN_TYPES:
+ token = Keyword.Type
+ elif lowercase_name in self.DIRECTIVES:
+ token = Keyword.Pseudo
+ # builtins are just builtins if the token
+ # before isn't a dot
+ elif not was_dot and lowercase_name in self.builtins:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
+ token = String.Char
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ # if the stack depth is deeper than once, pop
+ if len(stack) > 1:
+ stack.pop()
+ scanner.get_char()
+
+ elif stack[-1] == 'string':
+ if scanner.scan(r"''"):
+ token = String.Escape
+ elif scanner.scan(r"'"):
+ token = String
+ stack.pop()
+ elif scanner.scan(r"[^']*"):
+ token = String
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ elif stack[-1] == 'asm':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'end'):
+ token = Keyword
+ stack.pop()
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
+ token = Name.Label
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name in self.ASM_INSTRUCTIONS:
+ token = Keyword
+ elif lowercase_name in self.ASM_REGISTERS:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
+ token = Operator
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ # save the dot!!!11
+ if scanner.match.strip():
+ was_dot = scanner.match == '.'
+ yield scanner.start_pos, token, scanner.match or ''
+
+
+class AdaLexer(RegexLexer):
+ """
+ For Ada source code.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Ada'
+ aliases = ['ada', 'ada95', 'ada2005']
+ filenames = ['*.adb', '*.ads', '*.ada']
+ mimetypes = ['text/x-ada']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
+ (r'(subtype|type)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'task|protected', Keyword.Declaration),
+ (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
+ (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
+ (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
+ Comment.Preproc)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (words((
+ 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count',
+ 'Cursor', 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator',
+ 'Integer', 'Long_Float', 'Long_Integer', 'Long_Long_Float',
+ 'Long_Long_Integer', 'Natural', 'Positive', 'Reference_Type',
+ 'Short_Float', 'Short_Integer', 'Short_Short_Float',
+ 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'),
+ suffix=r'\b'),
+ Keyword.Type),
+ (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
+ (r'generic|private', Keyword.Declaration),
+ (r'package', Keyword.Declaration, 'package'),
+ (r'array\b', Keyword.Reserved, 'array_def'),
+ (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'(\w+)(\s*)(:)(\s*)(constant)',
+ bygroups(Name.Constant, Text, Punctuation, Text,
+ Keyword.Reserved)),
+ (r'<<\w+>>', Name.Label),
+ (r'(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
+ bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
+ (words((
+ 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
+ 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
+ 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
+ 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
+ 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
+ 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
'return', 'reverse', 'select', 'separate', 'some', 'subtype',
'synchronized', 'task', 'tagged', 'terminate', 'then', 'type', 'until',
'when', 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
- Keyword.Reserved),
- (r'"[^"]*"', String),
- include('attribute'),
- include('numbers'),
- (r"'[^']'", String.Character),
- (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
- (r'[*<>+=/&-]', Operator),
- (r'\n+', Text),
- ],
- 'numbers': [
+ Keyword.Reserved),
+ (r'"[^"]*"', String),
+ include('attribute'),
+ include('numbers'),
+ (r"'[^']'", String.Character),
+ (r'(\w+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
+ (r'[*<>+=/&-]', Operator),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
(r'[0-9_]+#[0-9a-f_\.]+#', Number.Hex),
- (r'[0-9_]+\.[0-9_]*', Number.Float),
- (r'[0-9_]+', Number.Integer),
- ],
- 'attribute': [
- (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
- ],
- 'subprogram': [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'is\b', Keyword.Reserved, '#pop'),
- (r'"[^"]+"|\w+', Name.Function),
- include('root'),
- ],
- 'end': [
- ('(if|case|record|loop|select)', Keyword.Reserved),
+ (r'[0-9_]+\.[0-9_]*', Number.Float),
+ (r'[0-9_]+', Number.Integer),
+ ],
+ 'attribute': [
+ (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'is\b', Keyword.Reserved, '#pop'),
+ (r'"[^"]+"|\w+', Name.Function),
+ include('root'),
+ ],
+ 'end': [
+ ('(if|case|record|loop|select)', Keyword.Reserved),
(r'"[^"]+"|[\w.]+', Name.Function),
(r'\s+', Text),
- (';', Punctuation, '#pop'),
- ],
- 'type_def': [
- (r';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'formal_part'),
- (r'with|and|use', Keyword.Reserved),
- (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
- (r'record\b', Keyword.Reserved, ('record_def')),
- (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
- include('root'),
- ],
- 'array_def': [
- (r';', Punctuation, '#pop'),
- (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
- include('root'),
- ],
- 'record_def': [
- (r'end record', Keyword.Reserved, '#pop'),
- include('root'),
- ],
- 'import': [
- (r'[\w.]+', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'formal_part': [
- (r'\)', Punctuation, '#pop'),
- (r'\w+', Name.Variable),
- (r',|:[^=]', Punctuation),
- (r'(in|not|null|out|access)\b', Keyword.Reserved),
- include('root'),
- ],
- 'package': [
- ('body', Keyword.Declaration),
+ (';', Punctuation, '#pop'),
+ ],
+ 'type_def': [
+ (r';', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'formal_part'),
+ (r'with|and|use', Keyword.Reserved),
+ (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
+ (r'record\b', Keyword.Reserved, ('record_def')),
+ (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'array_def': [
+ (r';', Punctuation, '#pop'),
+ (r'(\w+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)),
+ include('root'),
+ ],
+ 'record_def': [
+ (r'end record', Keyword.Reserved, '#pop'),
+ include('root'),
+ ],
+ 'import': [
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'\w+', Name.Variable),
+ (r',|:[^=]', Punctuation),
+ (r'(in|not|null|out|access)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ 'package': [
+ ('body', Keyword.Declaration),
(r'is\s+new|renames', Keyword.Reserved),
- ('is', Keyword.Reserved, '#pop'),
- (';', Punctuation, '#pop'),
+ ('is', Keyword.Reserved, '#pop'),
+ (';', Punctuation, '#pop'),
(r'\(', Punctuation, 'package_instantiation'),
(r'([\w.]+)', Name.Class),
- include('root'),
- ],
- 'package_instantiation': [
- (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
- (r'[\w.\'"]', Text),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- }
+ include('root'),
+ ],
+ 'package_instantiation': [
+ (r'("[^"]+"|\w+)(\s+)(=>)', bygroups(Name.Variable, Text, Punctuation)),
+ (r'[\w.\'"]', Text),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pawn.py b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
index 5a303e4949..59616b4a8b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pawn.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
@@ -1,89 +1,89 @@
-"""
- pygments.lexers.pawn
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Pawn languages.
-
+"""
+ pygments.lexers.pawn
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Pawn languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
-from pygments.util import get_bool_opt
-
-__all__ = ['SourcePawnLexer', 'PawnLexer']
-
-
-class SourcePawnLexer(RegexLexer):
- """
- For SourcePawn source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
- name = 'SourcePawn'
- aliases = ['sp']
- filenames = ['*.sp']
- mimetypes = ['text/x-sourcepawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
+from pygments.util import get_bool_opt
+
+__all__ = ['SourcePawnLexer', 'PawnLexer']
+
+
+class SourcePawnLexer(RegexLexer):
+ """
+ For SourcePawn source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+ name = 'SourcePawn'
+ aliases = ['sp']
+ filenames = ['*.sp']
+ mimetypes = ['text/x-sourcepawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(case|const|continue|native|'
- r'default|else|enum|for|if|new|operator|'
- r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(case|const|continue|native|'
+ r'default|else|enum|for|if|new|operator|'
+ r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
SM_TYPES = {'Action', 'bool', 'Float', 'Plugin', 'String', 'any',
'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
@@ -103,97 +103,97 @@ class SourcePawnLexer(RegexLexer):
'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
'TopMenuPosition', 'TopMenuObject', 'UserMsg'}
-
- def __init__(self, **options):
- self.smhighlighting = get_bool_opt(options,
- 'sourcemod', True)
-
- self._functions = set()
- if self.smhighlighting:
- from pygments.lexers._sourcemod_builtins import FUNCTIONS
- self._functions.update(FUNCTIONS)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.smhighlighting:
- if value in self.SM_TYPES:
- token = Keyword.Type
- elif value in self._functions:
- token = Name.Builtin
- yield index, token, value
-
-
-class PawnLexer(RegexLexer):
- """
- For Pawn source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pawn'
- aliases = ['pawn']
- filenames = ['*.p', '*.pwn', '*.inc']
- mimetypes = ['text/x-pawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
+
+ def __init__(self, **options):
+ self.smhighlighting = get_bool_opt(options,
+ 'sourcemod', True)
+
+ self._functions = set()
+ if self.smhighlighting:
+ from pygments.lexers._sourcemod_builtins import FUNCTIONS
+ self._functions.update(FUNCTIONS)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.smhighlighting:
+ if value in self.SM_TYPES:
+ token = Keyword.Type
+ elif value in self._functions:
+ token = Name.Builtin
+ yield index, token, value
+
+
+class PawnLexer(RegexLexer):
+ """
+ For Pawn source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pawn'
+ aliases = ['pawn']
+ filenames = ['*.p', '*.pwn', '*.inc']
+ mimetypes = ['text/x-pawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
(r'^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(switch|case|default|const|new|static|char|continue|break|'
- r'if|else|for|while|do|operator|enum|'
- r'public|return|sizeof|tagof|state|goto)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(switch|case|default|const|new|static|char|continue|break|'
+ r'if|else|for|while|do|operator|enum|'
+ r'public|return|sizeof|tagof|state|goto)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
def analyse_text(text):
"""This is basically C. There is a keyword which doesn't exist in C
diff --git a/contrib/python/Pygments/py3/pygments/lexers/perl.py b/contrib/python/Pygments/py3/pygments/lexers/perl.py
index bac325bb45..63ff22717f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/perl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/perl.py
@@ -1,135 +1,135 @@
-"""
- pygments.lexers.perl
- ~~~~~~~~~~~~~~~~~~~~
-
+"""
+ pygments.lexers.perl
+ ~~~~~~~~~~~~~~~~~~~~
+
Lexers for Perl, Raku and related languages.
-
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- using, this, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-from pygments.util import shebang_matches
-
-__all__ = ['PerlLexer', 'Perl6Lexer']
-
-
-class PerlLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ using, this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+
+__all__ = ['PerlLexer', 'Perl6Lexer']
+
+
+class PerlLexer(RegexLexer):
+ """
For `Perl <https://www.perl.org>`_ source code.
- """
-
- name = 'Perl'
- aliases = ['perl', 'pl']
+ """
+
+ name = 'Perl'
+ aliases = ['perl', 'pl']
filenames = ['*.pl', '*.pm', '*.t', '*.perl']
- mimetypes = ['text/x-perl', 'application/x-perl']
-
- flags = re.DOTALL | re.MULTILINE
- # TODO: give this to a perl guy who knows how to parse perl...
- tokens = {
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\A\#!.+?$', Comment.Hashbang),
- (r'\#.*?$', Comment.Single),
- (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
- (words((
- 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
- 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
+ mimetypes = ['text/x-perl', 'application/x-perl']
+
+ flags = re.DOTALL | re.MULTILINE
+ # TODO: give this to a perl guy who knows how to parse perl...
+ tokens = {
+ 'balanced-regex': [
+ (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\A\#!.+?$', Comment.Hashbang),
+ (r'\#.*?$', Comment.Single),
+ (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
+ (words((
+ 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
+ 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
'unless', 'until', 'while', 'print', 'new', 'BEGIN',
- 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
- Keyword),
- (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
- bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
- (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
- # common delimiters
- (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
- String.Regex),
- (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
- (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
- String.Regex),
- (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
- String.Regex),
- # balanced delimiters
- (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
- 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
- 'balanced-regex'),
-
- (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
- String.Regex),
- (r'\s+', Text),
- (words((
- 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
- 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
- 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
- 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
- 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
- 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
- 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
- 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
- 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
- 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
- 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
- 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
- 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
+ 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
+ bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
+ (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ # common delimiters
+ (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+ String.Regex),
+ (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+ (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+ (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+ String.Regex),
+ (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+ String.Regex),
+ # balanced delimiters
+ (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+ 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
+ 'balanced-regex'),
+
+ (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+ String.Regex),
+ (r'\s+', Text),
+ (words((
+ 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
+ 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
+ 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
+ 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
+ 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
+ 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
+ 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
+ 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
+ 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
+ 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
+ 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
+ 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
+ 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
- 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
+ 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
- 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
- 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
- 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
- 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
- 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
- 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
- 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
- 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
- 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
- Name.Builtin),
- (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+ 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
+ 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
+ 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
+ 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
+ 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
+ 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
+ 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
+ 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
+ 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
+ Name.Builtin),
+ (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
(r'(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)',
bygroups(String, String, String.Delimiter, String, String.Delimiter, Text)),
- (r'__END__', Comment.Preproc, 'end-part'),
- (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
- (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
- (r'[$@%#]+', Name.Variable, 'varname'),
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
- (r'<([^\s>]+)>', String.Regex),
- (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
- (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
- (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
- (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
- (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+ (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+ (r'[$@%#]+', Name.Variable, 'varname'),
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
+ (r'<([^\s>]+)>', String.Regex),
+ (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+ (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+ (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+ (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+ (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
(r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
bygroups(Keyword, Text, Name.Namespace)),
(r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
@@ -138,107 +138,107 @@ class PerlLexer(RegexLexer):
(words((
'no', 'package', 'require', 'use'), suffix=r'\b'),
Keyword),
- (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&^|!\\~]=?', Operator),
- (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
- # of punctuation in Perl!
- (r'(?=\w)', Name, 'name'),
- ],
- 'format': [
- (r'\.\n', String.Interpol, '#pop'),
- (r'[^\n]*\n', String.Interpol),
- ],
- 'varname': [
- (r'\s+', Text),
- (r'\{', Punctuation, '#pop'), # hash syntax?
- (r'\)|,', Punctuation, '#pop'), # argument specifier
- (r'\w+::', Name.Namespace),
- (r'[\w:]+', Name.Variable, '#pop'),
- ],
- 'name': [
+ (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&^|!\\~]=?', Operator),
+ (r'[()\[\]:;,<>/?{}]', Punctuation), # yes, there's no shortage
+ # of punctuation in Perl!
+ (r'(?=\w)', Name, 'name'),
+ ],
+ 'format': [
+ (r'\.\n', String.Interpol, '#pop'),
+ (r'[^\n]*\n', String.Interpol),
+ ],
+ 'varname': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, '#pop'), # hash syntax?
+ (r'\)|,', Punctuation, '#pop'), # argument specifier
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name.Variable, '#pop'),
+ ],
+ 'name': [
(r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
(r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
- (r'[\w:]+', Name, '#pop'),
- (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
- (r'(?=\W)', Text, '#pop'),
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*[!?]?', Name.Function),
- (r'\s+', Text),
- # argument declaration
- (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
- (r';', Punctuation, '#pop'),
- (r'.*?\{', Punctuation, '#pop'),
- ],
- 'cb-string': [
- (r'\\[{}\\]', String.Other),
- (r'\\', String.Other),
- (r'\{', String.Other, 'cb-string'),
- (r'\}', String.Other, '#pop'),
- (r'[^{}\\]+', String.Other)
- ],
- 'rb-string': [
- (r'\\[()\\]', String.Other),
- (r'\\', String.Other),
- (r'\(', String.Other, 'rb-string'),
- (r'\)', String.Other, '#pop'),
- (r'[^()]+', String.Other)
- ],
- 'sb-string': [
- (r'\\[\[\]\\]', String.Other),
- (r'\\', String.Other),
- (r'\[', String.Other, 'sb-string'),
- (r'\]', String.Other, '#pop'),
- (r'[^\[\]]+', String.Other)
- ],
- 'lt-string': [
- (r'\\[<>\\]', String.Other),
- (r'\\', String.Other),
- (r'\<', String.Other, 'lt-string'),
- (r'\>', String.Other, '#pop'),
- (r'[^<>]+', String.Other)
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'perl'):
- return True
+ (r'[\w:]+', Name, '#pop'),
+ (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
+ (r'(?=\W)', Text, '#pop'),
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_]\w*[!?]?', Name.Function),
+ (r'\s+', Text),
+ # argument declaration
+ (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
+ (r';', Punctuation, '#pop'),
+ (r'.*?\{', Punctuation, '#pop'),
+ ],
+ 'cb-string': [
+ (r'\\[{}\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\{', String.Other, 'cb-string'),
+ (r'\}', String.Other, '#pop'),
+ (r'[^{}\\]+', String.Other)
+ ],
+ 'rb-string': [
+ (r'\\[()\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\(', String.Other, 'rb-string'),
+ (r'\)', String.Other, '#pop'),
+ (r'[^()]+', String.Other)
+ ],
+ 'sb-string': [
+ (r'\\[\[\]\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\[', String.Other, 'sb-string'),
+ (r'\]', String.Other, '#pop'),
+ (r'[^\[\]]+', String.Other)
+ ],
+ 'lt-string': [
+ (r'\\[<>\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\<', String.Other, 'lt-string'),
+ (r'\>', String.Other, '#pop'),
+ (r'[^<>]+', String.Other)
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'perl'):
+ return True
result = 0
if re.search(r'(?:my|our)\s+[$@%(]', text):
result += 0.9
-
+
if ':=' in text:
# := is not valid Perl, but it appears in unicon, so we should
# become less confident if we think we found Perl with :=
result /= 2
-
+
return result
-class Perl6Lexer(ExtendedRegexLexer):
- """
+class Perl6Lexer(ExtendedRegexLexer):
+ """
For `Raku <https://www.raku.org>`_ (a.k.a. Perl 6) source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Perl6'
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Perl6'
aliases = ['perl6', 'pl6', 'raku']
- filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
+ filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
'*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
'*.rakutest', '*.rakudoc']
- mimetypes = ['text/x-perl6', 'application/x-perl6']
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
+ mimetypes = ['text/x-perl6', 'application/x-perl6']
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
PERL6_IDENTIFIER_RANGE = r"['\w:-]"
-
- PERL6_KEYWORDS = (
+
+ PERL6_KEYWORDS = (
#Phasers
'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
@@ -252,9 +252,9 @@ class Perl6Lexer(ExtendedRegexLexer):
'when','while','with','without',
#Traits
'export','native','repr','required','rw','symbol',
- )
-
- PERL6_BUILTINS = (
+ )
+
+ PERL6_BUILTINS = (
'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
'actions','add','add_attribute','add_enum_value','add_fallback',
@@ -368,9 +368,9 @@ class Perl6Lexer(ExtendedRegexLexer):
'whole-second','WHY','wordcase','words','workaround','wrap','write',
'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
- )
-
- PERL6_BUILTIN_CLASSES = (
+ )
+
+ PERL6_BUILTIN_CLASSES = (
#Booleans
'False','True',
#Classes
@@ -406,24 +406,24 @@ class Perl6Lexer(ExtendedRegexLexer):
'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
'Version','VM','Whatever','WhateverCode','WrapHandle'
- )
-
- PERL6_OPERATORS = (
- 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
- 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
- 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
- '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
- '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
- 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
- '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
- '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
- '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
+ )
+
+ PERL6_OPERATORS = (
+ 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
+ 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
+ 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
+ '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
+ '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
+ 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
+ '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
+ '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
+ '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
'not', '<==', '==>', '<<==', '==>>','unicmp',
- )
-
- # Perl 6 has a *lot* of possible bracketing characters
- # this list was lifted from STD.pm6 (https://github.com/perl6/std)
- PERL6_BRACKETS = {
+ )
+
+ # Perl 6 has a *lot* of possible bracketing characters
+ # this list was lifted from STD.pm6 (https://github.com/perl6/std)
+ PERL6_BRACKETS = {
'\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
'\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
'\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
@@ -488,244 +488,244 @@ class Perl6Lexer(ExtendedRegexLexer):
'\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
'\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
'\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
- }
-
- def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
- if boundary_regex_fragment is None:
- return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
- suffix + r')\b'
- else:
- return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
- r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
- boundary_regex_fragment + r')'
-
- def brackets_callback(token_class):
- def callback(lexer, match, context):
- groups = match.groupdict()
- opening_chars = groups['delimiter']
- n_chars = len(opening_chars)
- adverbs = groups.get('adverbs')
-
- closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
- text = context.text
-
- if closer is None: # it's not a mirrored character, which means we
- # just need to look for the next occurrence
-
- end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
- else: # we need to look for the corresponding closing character,
- # keep nesting in mind
- closing_chars = closer * n_chars
- nesting_level = 1
-
- search_pos = match.start('delimiter')
-
- while nesting_level > 0:
- next_open_pos = text.find(opening_chars, search_pos + n_chars)
- next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
- if next_close_pos == -1:
- next_close_pos = len(text)
- nesting_level = 0
- elif next_open_pos != -1 and next_open_pos < next_close_pos:
- nesting_level += 1
- search_pos = next_open_pos
- else: # next_close_pos < next_open_pos
- nesting_level -= 1
- search_pos = next_close_pos
-
- end_pos = next_close_pos
-
- if end_pos < 0: # if we didn't find a closer, just highlight the
- # rest of the text in this class
- end_pos = len(text)
-
- if adverbs is not None and re.search(r':to\b', adverbs):
- heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
- end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
- r'\s*$', text[end_pos:], re.MULTILINE)
-
- if end_heredoc:
- end_pos += end_heredoc.end()
- else:
- end_pos = len(text)
-
- yield match.start(), token_class, text[match.start():end_pos + n_chars]
- context.pos = end_pos + n_chars
-
- return callback
-
- def opening_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter an opening brace and we're one level
- # below a token state, it means we need to increment
- # the nesting level for braces so we know later when
- # we should return to the token rules.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level += 1
-
- def closing_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
-
- # if we encounter a free closing brace and we're one level
- # below a token state, it means we need to check the nesting
- # level to see if we need to return to the token state.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level -= 1
- if context.perl6_token_nesting_level == 0:
- stack.pop()
-
- def embedded_perl6_callback(lexer, match, context):
- context.perl6_token_nesting_level = 1
- yield match.start(), Text, context.text[match.start():match.end()]
- context.pos = match.end()
- context.stack.append('root')
-
- # If you're modifying these rules, be careful if you need to process '{' or '}'
- # characters. We have special logic for processing these characters (due to the fact
- # that you can nest Perl 6 code in regex blocks), so if you need to process one of
- # them, make sure you also process the corresponding one!
- tokens = {
- 'common': [
- (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
- brackets_callback(Comment.Multiline)),
+ }
+
+ def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
+ if boundary_regex_fragment is None:
+ return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
+ suffix + r')\b'
+ else:
+ return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
+ r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
+ boundary_regex_fragment + r')'
+
+ def brackets_callback(token_class):
+ def callback(lexer, match, context):
+ groups = match.groupdict()
+ opening_chars = groups['delimiter']
+ n_chars = len(opening_chars)
+ adverbs = groups.get('adverbs')
+
+ closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
+ text = context.text
+
+ if closer is None: # it's not a mirrored character, which means we
+ # just need to look for the next occurrence
+
+ end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
+ else: # we need to look for the corresponding closing character,
+ # keep nesting in mind
+ closing_chars = closer * n_chars
+ nesting_level = 1
+
+ search_pos = match.start('delimiter')
+
+ while nesting_level > 0:
+ next_open_pos = text.find(opening_chars, search_pos + n_chars)
+ next_close_pos = text.find(closing_chars, search_pos + n_chars)
+
+ if next_close_pos == -1:
+ next_close_pos = len(text)
+ nesting_level = 0
+ elif next_open_pos != -1 and next_open_pos < next_close_pos:
+ nesting_level += 1
+ search_pos = next_open_pos
+ else: # next_close_pos < next_open_pos
+ nesting_level -= 1
+ search_pos = next_close_pos
+
+ end_pos = next_close_pos
+
+ if end_pos < 0: # if we didn't find a closer, just highlight the
+ # rest of the text in this class
+ end_pos = len(text)
+
+ if adverbs is not None and re.search(r':to\b', adverbs):
+ heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
+ end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
+ r'\s*$', text[end_pos:], re.MULTILINE)
+
+ if end_heredoc:
+ end_pos += end_heredoc.end()
+ else:
+ end_pos = len(text)
+
+ yield match.start(), token_class, text[match.start():end_pos + n_chars]
+ context.pos = end_pos + n_chars
+
+ return callback
+
+ def opening_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter an opening brace and we're one level
+ # below a token state, it means we need to increment
+ # the nesting level for braces so we know later when
+ # we should return to the token rules.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level += 1
+
+ def closing_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter a free closing brace and we're one level
+ # below a token state, it means we need to check the nesting
+ # level to see if we need to return to the token state.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level -= 1
+ if context.perl6_token_nesting_level == 0:
+ stack.pop()
+
+ def embedded_perl6_callback(lexer, match, context):
+ context.perl6_token_nesting_level = 1
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+ context.stack.append('root')
+
+ # If you're modifying these rules, be careful if you need to process '{' or '}'
+ # characters. We have special logic for processing these characters (due to the fact
+ # that you can nest Perl 6 code in regex blocks), so if you need to process one of
+ # them, make sure you also process the corresponding one!
+ tokens = {
+ 'common': [
+ (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
+ brackets_callback(Comment.Multiline)),
(r'#[^\n]*$', Comment.Single),
- (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
- (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
- (r'^=.*?\n\s*?\n', Comment.Multiline),
- (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
- bygroups(Keyword, Name), 'token-sym-brackets'),
+ (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
+ (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
+ (r'^=.*?\n\s*?\n', Comment.Multiline),
+ (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
+ bygroups(Keyword, Name), 'token-sym-brackets'),
(r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
- bygroups(Keyword, Name), 'pre-token'),
- # deal with a special case in the Perl 6 grammar (role q { ... })
- (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
- (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
- (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
- Name.Builtin),
- (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
- # copied from PerlLexer
+ bygroups(Keyword, Name), 'pre-token'),
+ # deal with a special case in the Perl 6 grammar (role q { ... })
+ (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
+ (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
+ (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
+ Name.Builtin),
+ (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
+ # copied from PerlLexer
(r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable),
- (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'::\?\w+', Name.Variable.Global),
+ Name.Variable),
+ (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
+ (r'::\?\w+', Name.Variable.Global),
(r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable.Global),
- (r'\$(?:<.*?>)+', Name.Variable),
- (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
- r'(?P=first_char)*)', brackets_callback(String)),
- # copied from PerlLexer
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'm\w+(?=\()', Name),
- (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
- r'(?P=first_char)*)', brackets_callback(String.Regex)),
- (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
- String.Regex),
- (r'<[^\s=].*?\S>', String),
- (_build_word_match(PERL6_OPERATORS), Operator),
- (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- ],
- 'root': [
- include('common'),
- (r'\{', opening_brace_callback),
- (r'\}', closing_brace_callback),
- (r'.+?', Text),
- ],
- 'pre-token': [
- include('common'),
- (r'\{', Text, ('#pop', 'token')),
- (r'.+?', Text),
- ],
- 'token-sym-brackets': [
- (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
- brackets_callback(Name), ('#pop', 'pre-token')),
- default(('#pop', 'pre-token')),
- ],
- 'token': [
- (r'\}', Text, '#pop'),
- (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
- # make sure that quotes in character classes aren't treated as strings
- (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
- # make sure that '#' characters in quotes aren't treated as comments
- (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
- (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
+ Name.Variable.Global),
+ (r'\$(?:<.*?>)+', Name.Variable),
+ (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
+ r'(?P=first_char)*)', brackets_callback(String)),
+ # copied from PerlLexer
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'm\w+(?=\()', Name),
+ (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^\w:\s])'
+ r'(?P=first_char)*)', brackets_callback(String.Regex)),
+ (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
+ String.Regex),
+ (r'<[^\s=].*?\S>', String),
+ (_build_word_match(PERL6_OPERATORS), Operator),
+ (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ ],
+ 'root': [
+ include('common'),
+ (r'\{', opening_brace_callback),
+ (r'\}', closing_brace_callback),
+ (r'.+?', Text),
+ ],
+ 'pre-token': [
+ include('common'),
+ (r'\{', Text, ('#pop', 'token')),
+ (r'.+?', Text),
+ ],
+ 'token-sym-brackets': [
+ (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
+ brackets_callback(Name), ('#pop', 'pre-token')),
+ default(('#pop', 'pre-token')),
+ ],
+ 'token': [
+ (r'\}', Text, '#pop'),
+ (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
+ # make sure that quotes in character classes aren't treated as strings
+ (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
+ # make sure that '#' characters in quotes aren't treated as comments
+ (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
+ (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
(r'#.*?$', Comment.Single),
- (r'\{', embedded_perl6_callback),
- ('.+?', String.Regex),
- ],
- }
-
- def analyse_text(text):
- def strip_pod(lines):
- in_pod = False
- stripped_lines = []
-
- for line in lines:
- if re.match(r'^=(?:end|cut)', line):
- in_pod = False
- elif re.match(r'^=\w+', line):
- in_pod = True
- elif not in_pod:
- stripped_lines.append(line)
-
- return stripped_lines
-
- # XXX handle block comments
- lines = text.splitlines()
- lines = strip_pod(lines)
- text = '\n'.join(lines)
-
- if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
- return True
-
- saw_perl_decl = False
- rating = False
-
- # check for my/our/has declarations
+ (r'\{', embedded_perl6_callback),
+ ('.+?', String.Regex),
+ ],
+ }
+
+ def analyse_text(text):
+ def strip_pod(lines):
+ in_pod = False
+ stripped_lines = []
+
+ for line in lines:
+ if re.match(r'^=(?:end|cut)', line):
+ in_pod = False
+ elif re.match(r'^=\w+', line):
+ in_pod = True
+ elif not in_pod:
+ stripped_lines.append(line)
+
+ return stripped_lines
+
+ # XXX handle block comments
+ lines = text.splitlines()
+ lines = strip_pod(lines)
+ text = '\n'.join(lines)
+
+ if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
+ return True
+
+ saw_perl_decl = False
+ rating = False
+
+ # check for my/our/has declarations
if re.search(r"(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
r"+\s+)?[$@%&(]", text):
- rating = 0.8
- saw_perl_decl = True
-
- for line in lines:
- line = re.sub('#.*', '', line)
+ rating = 0.8
+ saw_perl_decl = True
+
+ for line in lines:
+ line = re.sub('#.*', '', line)
if re.match(r'^\s*$', line):
- continue
-
- # match v6; use v6; use v6.0; use v6.0.0;
+ continue
+
+ # match v6; use v6; use v6.0; use v6.0.0;
if re.match(r'^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
- return True
- # match class, module, role, enum, grammar declarations
+ return True
+ # match class, module, role, enum, grammar declarations
class_decl = re.match(r'^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
- if class_decl:
- if saw_perl_decl or class_decl.group('scope') is not None:
- return True
- rating = 0.05
- continue
- break
-
+ if class_decl:
+ if saw_perl_decl or class_decl.group('scope') is not None:
+ return True
+ rating = 0.05
+ continue
+ break
+
if ':=' in text:
# Same logic as above for PerlLexer
rating /= 2
- return rating
-
- def __init__(self, **options):
+ return rating
+
+ def __init__(self, **options):
super().__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/php.py b/contrib/python/Pygments/py3/pygments/lexers/php.py
index 3ba299ac0a..a182b6930c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/php.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/php.py
@@ -1,91 +1,91 @@
-"""
- pygments.lexers.php
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for PHP and related languages.
-
+"""
+ pygments.lexers.php
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for PHP and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
using, this, words, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Other, Generic
from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-
+
__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
-
+
line_re = re.compile('.*?\n')
-
-
-class ZephirLexer(RegexLexer):
- """
- For `Zephir language <http://zephir-lang.com/>`_ source code.
-
- Zephir is a compiled high level language aimed
- to the creation of C-extensions for PHP.
-
- .. versionadded:: 2.0
- """
-
- name = 'Zephir'
- aliases = ['zephir']
- filenames = ['*.zep']
-
- zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
- zephir_type = ['bit', 'bits', 'string']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
+
+
+class ZephirLexer(RegexLexer):
+ """
+ For `Zephir language <http://zephir-lang.com/>`_ source code.
+
+ Zephir is a compiled high level language aimed
+ to the creation of C-extensions for PHP.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Zephir'
+ aliases = ['zephir']
+ filenames = ['*.zep']
+
+ zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
+ zephir_type = ['bit', 'bits', 'string']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'/', Operator, '#pop'),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
(r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
- r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
- r'empty)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
- r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
- r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
- r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w\\]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
+ r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
+ r'empty)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
+ r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
+ r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
+ r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_][\w\\]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
+ ]
+ }
+
+
class PsyshConsoleLexer(Lexer):
"""
For `PsySH`_ console output, such as:
@@ -135,108 +135,108 @@ class PsyshConsoleLexer(Lexer):
phplexer.get_tokens_unprocessed(curcode))
-class PhpLexer(RegexLexer):
- """
- For `PHP <http://www.php.net/>`_ source code.
- For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
- Additional options accepted:
-
- `startinline`
- If given and ``True`` the lexer starts highlighting with
- php code (i.e.: no starting ``<?php`` required). The default
- is ``False``.
- `funcnamehighlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabledmodules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted
- except the special ``'unknown'`` module that includes functions
- that are known to php but are undocumented.
-
- To get a list of allowed modules have a look into the
- `_php_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._php_builtins import MODULES
- >>> MODULES.keys()
- ['PHP Options/Info', 'Zip', 'dba', ...]
-
- In fact the names of those modules match the module names from
- the php documentation.
- """
-
- name = 'PHP'
- aliases = ['php', 'php3', 'php4', 'php5']
- filenames = ['*.php', '*.php[345]', '*.inc']
- mimetypes = ['text/x-php']
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'<\?(php)?', Comment.Preproc, 'php'),
- (r'[^<]+', Other),
- (r'<', Other)
- ],
- 'php': [
- (r'\?>', Comment.Preproc, '#pop'),
+class PhpLexer(RegexLexer):
+ """
+ For `PHP <http://www.php.net/>`_ source code.
+ For PHP embedded in HTML, use the `HtmlPhpLexer`.
+
+ Additional options accepted:
+
+ `startinline`
+ If given and ``True`` the lexer starts highlighting with
+ php code (i.e.: no starting ``<?php`` required). The default
+ is ``False``.
+ `funcnamehighlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabledmodules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted
+ except the special ``'unknown'`` module that includes functions
+ that are known to php but are undocumented.
+
+ To get a list of allowed modules have a look into the
+ `_php_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._php_builtins import MODULES
+ >>> MODULES.keys()
+ ['PHP Options/Info', 'Zip', 'dba', ...]
+
+ In fact the names of those modules match the module names from
+ the php documentation.
+ """
+
+ name = 'PHP'
+ aliases = ['php', 'php3', 'php4', 'php5']
+ filenames = ['*.php', '*.php[345]', '*.inc']
+ mimetypes = ['text/x-php']
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'<\?(php)?', Comment.Preproc, 'php'),
+ (r'[^<]+', Other),
+ (r'<', Other)
+ ],
+ 'php': [
+ (r'\?>', Comment.Preproc, '#pop'),
(r'(<<<)([\'"]?)(' + _ident_inner + r')(\2\n.*?\n\s*)(\3)(;?)(\n)',
bygroups(String, String, String.Delimiter, String, String.Delimiter,
Punctuation, Text)),
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'//.*?\n', Comment.Single),
- # put the empty comment here, it is otherwise seen as
- # the start of a docstring
- (r'/\*\*/', Comment.Multiline),
- (r'/\*\*.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(->|::)(\s*)(' + _ident_inner + ')',
- bygroups(Operator, Text, Name.Attribute)),
- (r'[~!%^&*+=|:.<>/@-]+', Operator),
- (r'\?', Operator), # don't add to the charclass above!
- (r'[\[\]{}();,]+', Punctuation),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
- (r'(function)(\s+)(&?)(\s*)',
- bygroups(Keyword, Text, Operator, Text), 'functionname'),
- (r'(const)(\s+)(' + _ident_inner + ')',
- bygroups(Keyword, Text, Name.Constant)),
- (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
- r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
- r'FALSE|print|for|require|continue|foreach|require_once|'
- r'declare|return|default|static|do|switch|die|stdClass|'
- r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'//.*?\n', Comment.Single),
+ # put the empty comment here, it is otherwise seen as
+ # the start of a docstring
+ (r'/\*\*/', Comment.Multiline),
+ (r'/\*\*.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(->|::)(\s*)(' + _ident_inner + ')',
+ bygroups(Operator, Text, Name.Attribute)),
+ (r'[~!%^&*+=|:.<>/@-]+', Operator),
+ (r'\?', Operator), # don't add to the charclass above!
+ (r'[\[\]{}();,]+', Punctuation),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
+ (r'(function)(\s+)(&?)(\s*)',
+ bygroups(Keyword, Text, Operator, Text), 'functionname'),
+ (r'(const)(\s+)(' + _ident_inner + ')',
+ bygroups(Keyword, Text, Name.Constant)),
+ (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+ r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+ r'FALSE|print|for|require|continue|foreach|require_once|'
+ r'declare|return|default|static|do|switch|die|stdClass|'
+ r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
r'virtual|endfor|include_once|while|endforeach|global|'
r'endif|list|endswitch|new|endwhile|not|'
r'array|E_ALL|NULL|final|php_user_filter|interface|'
- r'implements|public|private|protected|abstract|clone|try|'
- r'catch|throw|this|use|namespace|trait|yield|'
- r'finally)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
+ r'implements|public|private|protected|abstract|clone|try|'
+ r'catch|throw|this|use|namespace|trait|yield|'
+ r'finally)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
include('magicconstants'),
(r'\$\{\$+' + _ident_inner + r'\}', Name.Variable),
- (r'\$+' + _ident_inner, Name.Variable),
- (_ident_inner, Name.Other),
- (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
- (r'\d+e[+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
- (r'0x[a-f0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'0b[01]+', Number.Bin),
- (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
- (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
- (r'"', String.Double, 'string'),
- ],
+ (r'\$+' + _ident_inner, Name.Variable),
+ (_ident_inner, Name.Other),
+ (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'\d+e[+-]?[0-9]+', Number.Float),
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[a-f0-9]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'0b[01]+', Number.Bin),
+ (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
+ (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
+ (r'"', String.Double, 'string'),
+ ],
'magicfuncs': [
# source: http://php.net/manual/en/language.oop5.magic.php
(words((
@@ -253,68 +253,68 @@ class PhpLexer(RegexLexer):
suffix=r'\b'),
Name.Constant),
],
- 'classname': [
- (_ident_inner, Name.Class, '#pop')
- ],
- 'functionname': [
+ 'classname': [
+ (_ident_inner, Name.Class, '#pop')
+ ],
+ 'functionname': [
include('magicfuncs'),
(_ident_inner, Name.Function, '#pop'),
default('#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'[^{$"\\]+', String.Double),
- (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'[^{$"\\]+', String.Double),
+ (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
(r'\$' + _ident_inner + r'(\[\S+?\]|->' + _ident_inner + ')?',
- String.Interpol),
- (r'(\{\$\{)(.*?)(\}\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\{)(\$.*?)(\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\$\{)(\S+)(\})',
- bygroups(String.Interpol, Name.Variable, String.Interpol)),
+ String.Interpol),
+ (r'(\{\$\{)(.*?)(\}\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\{)(\$.*?)(\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\$\{)(\S+)(\})',
+ bygroups(String.Interpol, Name.Variable, String.Interpol)),
(r'[${\\]', String.Double)
- ],
- }
-
- def __init__(self, **options):
- self.funcnamehighlighting = get_bool_opt(
- options, 'funcnamehighlighting', True)
- self.disabledmodules = get_list_opt(
- options, 'disabledmodules', ['unknown'])
- self.startinline = get_bool_opt(options, 'startinline', False)
-
- # private option argument for the lexer itself
- if '_startinline' in options:
- self.startinline = options.pop('_startinline')
-
- # collect activated functions in a set
- self._functions = set()
- if self.funcnamehighlighting:
- from pygments.lexers._php_builtins import MODULES
+ ],
+ }
+
+ def __init__(self, **options):
+ self.funcnamehighlighting = get_bool_opt(
+ options, 'funcnamehighlighting', True)
+ self.disabledmodules = get_list_opt(
+ options, 'disabledmodules', ['unknown'])
+ self.startinline = get_bool_opt(options, 'startinline', False)
+
+ # private option argument for the lexer itself
+ if '_startinline' in options:
+ self.startinline = options.pop('_startinline')
+
+ # collect activated functions in a set
+ self._functions = set()
+ if self.funcnamehighlighting:
+ from pygments.lexers._php_builtins import MODULES
for key, value in MODULES.items():
- if key not in self.disabledmodules:
- self._functions.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.startinline:
- stack.append('php')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Other:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
+ if key not in self.disabledmodules:
+ self._functions.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.startinline:
+ stack.append('php')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Other:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
if shebang_matches(text, r'php'):
return True
- rv = 0.0
- if re.search(r'<\?(?!xml)', text):
- rv += 0.3
- return rv
+ rv = 0.0
+ if re.search(r'<\?(?!xml)', text):
+ rv += 0.3
+ return rv
diff --git a/contrib/python/Pygments/py3/pygments/lexers/praat.py b/contrib/python/Pygments/py3/pygments/lexers/praat.py
index 8fbae8c520..112d583acb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/praat.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/praat.py
@@ -1,217 +1,217 @@
-"""
- pygments.lexers.praat
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Praat
-
+"""
+ pygments.lexers.praat
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Praat
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, include
-from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
- Operator
-
-__all__ = ['PraatLexer']
-
-
-class PraatLexer(RegexLexer):
- """
- For `Praat <http://www.praat.org>`_ scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Praat'
- aliases = ['praat']
- filenames = ['*.praat', '*.proc', '*.psc']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, include
+from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
+ Operator
+
+__all__ = ['PraatLexer']
+
+
+class PraatLexer(RegexLexer):
+ """
+ For `Praat <http://www.praat.org>`_ scripts.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Praat'
+ aliases = ['praat']
+ filenames = ['*.praat', '*.proc', '*.psc']
+
keywords = (
- 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
- 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
- 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
- 'editor', 'endeditor', 'clearinfo',
+ 'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
+ 'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
+ 'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
+ 'editor', 'endeditor', 'clearinfo',
)
-
+
functions_string = (
- 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
- 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
- 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
- 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
+ 'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
+ 'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
+ 'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
+ 'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
)
-
+
functions_numeric = (
- 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
- 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
- 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
- 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
- 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
- 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
- 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
- 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
- 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
- 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
- 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
- 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
- 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
+ 'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
+ 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
+ 'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
+ 'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
+ 'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
+ 'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
+ 'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
+ 'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
+ 'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
+ 'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
+ 'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
+ 'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
+ 'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
- 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
- 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
- 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
- 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
- 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
- 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
- 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
+ 'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
+ 'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
+ 'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
+ 'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
+ 'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
+ 'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
+ 'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
- 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
+ 'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
- 'writeInfo', 'writeInfoLine',
+ 'writeInfo', 'writeInfoLine',
)
-
+
functions_array = (
- 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
+ 'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
)
-
+
objects = (
- 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
- 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
- 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
- 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
- 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
- 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
- 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
- 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
- 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
- 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
- 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
- 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
- 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
- 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
- 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
- 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
+ 'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
+ 'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
+ 'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
+ 'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
+ 'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
+ 'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
+ 'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
+ 'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
+ 'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
+ 'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
+ 'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
+ 'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
+ 'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
+ 'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
+ 'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
+ 'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
- 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
- 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
- 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
- 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
- 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
- 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
- 'Weight', 'WordList',
+ 'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
+ 'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
+ 'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
+ 'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
+ 'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
+ 'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
+ 'Weight', 'WordList',
)
-
+
variables_numeric = (
- 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
+ 'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
)
-
+
variables_string = (
- 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
- 'preferencesDirectory', 'newline', 'temporaryDirectory',
- 'defaultDirectory',
+ 'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
+ 'preferencesDirectory', 'newline', 'temporaryDirectory',
+ 'defaultDirectory',
)
-
+
object_attributes = (
'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
)
- tokens = {
- 'root': [
- (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
- (r'^#.*?$', Comment.Single),
- (r';[^\n]*', Comment.Single),
- (r'\s+', Text),
-
- (r'\bprocedure\b', Keyword, 'procedure_definition'),
- (r'\bcall\b', Keyword, 'procedure_call'),
- (r'@', Name.Function, 'procedure_call'),
-
- include('function_call'),
-
- (words(keywords, suffix=r'\b'), Keyword),
-
- (r'(\bform\b)(\s+)([^\n]+)',
- bygroups(Keyword, Text, String), 'old_form'),
-
- (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
- r'include|execute|system(?:_nocheck)?)(\s+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
-
- include('variable_name'),
- include('number'),
-
- (r'"', String, 'string'),
-
- (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
-
- (r'\b[A-Z]', Keyword, 'command'),
- (r'(\.{3}|[)(,])', Punctuation),
- ],
- 'command': [
- (r'( ?[\w()-]+ ?)', Keyword),
+ tokens = {
+ 'root': [
+ (r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
+ (r'^#.*?$', Comment.Single),
+ (r';[^\n]*', Comment.Single),
+ (r'\s+', Text),
+
+ (r'\bprocedure\b', Keyword, 'procedure_definition'),
+ (r'\bcall\b', Keyword, 'procedure_call'),
+ (r'@', Name.Function, 'procedure_call'),
+
+ include('function_call'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+
+ (r'(\bform\b)(\s+)([^\n]+)',
+ bygroups(Keyword, Text, String), 'old_form'),
+
+ (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
+ r'include|execute|system(?:_nocheck)?)(\s+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
+
+ include('variable_name'),
+ include('number'),
+
+ (r'"', String, 'string'),
+
+ (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
+
+ (r'\b[A-Z]', Keyword, 'command'),
+ (r'(\.{3}|[)(,])', Punctuation),
+ ],
+ 'command': [
+ (r'( ?[\w()-]+ ?)', Keyword),
include('string_interpolated'),
- (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
- (r':', Keyword, ('#pop', 'comma_list')),
+ (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
+ (r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
- ],
- 'procedure_call': [
- (r'\s+', Text),
- (r'([\w.]+)(:|\s*\()',
- bygroups(Name.Function, Text), '#pop'),
- (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
- ],
- 'procedure_definition': [
- (r'\s', Text),
- (r'([\w.]+)(\s*?[(:])',
- bygroups(Name.Function, Text), '#pop'),
- (r'([\w.]+)([^\n]*)',
- bygroups(Name.Function, Text), '#pop'),
- ],
- 'function_call': [
- (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
+ ],
+ 'procedure_call': [
+ (r'\s+', Text),
+ (r'([\w.]+)(:|\s*\()',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
+ ],
+ 'procedure_definition': [
+ (r'\s', Text),
+ (r'([\w.]+)(\s*?[(:])',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([\w.]+)([^\n]*)',
+ bygroups(Name.Function, Text), '#pop'),
+ ],
+ 'function_call': [
+ (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
- ],
- 'function': [
- (r'\s+', Text),
- (r':', Punctuation, ('#pop', 'comma_list')),
- (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
- ],
- 'comma_list': [
- (r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'(\s*[])\n])', Text, '#pop'),
-
- (r'\s+', Text),
- (r'"', String, 'string'),
- (r'\b(if|then|else|fi|endif)\b', Keyword),
-
- include('function_call'),
- include('variable_name'),
- include('operator'),
- include('number'),
-
+ ],
+ 'function': [
+ (r'\s+', Text),
+ (r':', Punctuation, ('#pop', 'comma_list')),
+ (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
+ ],
+ 'comma_list': [
+ (r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'(\s*[])\n])', Text, '#pop'),
+
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ (r'\b(if|then|else|fi|endif)\b', Keyword),
+
+ include('function_call'),
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
(r'[()]', Text),
- (r',', Punctuation),
- ],
- 'old_arguments': [
- (r'\n', Text, '#pop'),
-
- include('variable_name'),
- include('operator'),
- include('number'),
-
- (r'"', String, 'string'),
- (r'[^\n]', Text),
- ],
- 'number': [
+ (r',', Punctuation),
+ ],
+ 'old_arguments': [
+ (r'\n', Text, '#pop'),
+
+ include('variable_name'),
+ include('operator'),
+ include('number'),
+
+ (r'"', String, 'string'),
+ (r'[^\n]', Text),
+ ],
+ 'number': [
(r'\n', Text, '#pop'),
- (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
- ],
+ (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
+ ],
'object_reference': [
include('string_interpolated'),
(r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
@@ -220,82 +220,82 @@ class PraatLexer(RegexLexer):
(r'\$', Name.Builtin),
(r'\[', Text, '#pop'),
- ],
- 'variable_name': [
- include('operator'),
- include('number'),
-
- (words(variables_string, suffix=r'\$'), Name.Variable.Global),
+ ],
+ 'variable_name': [
+ include('operator'),
+ include('number'),
+
+ (words(variables_string, suffix=r'\$'), Name.Variable.Global),
(words(variables_numeric,
suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
Name.Variable.Global),
-
+
(words(objects, prefix=r'\b', suffix=r"(_)"),
bygroups(Name.Builtin, Name.Builtin),
'object_reference'),
-
+
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
- (r'[\[\]]', Punctuation, 'comma_list'),
+ (r'[\[\]]', Punctuation, 'comma_list'),
include('string_interpolated'),
- ],
- 'operator': [
+ ],
+ 'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
- ],
- 'string_interpolated': [
+ ],
+ 'string_interpolated': [
(r'\'[_a-z][^\[\]\'":]*(\[([\d,]+|"[\w,]+")\])?(:[0-9]+)?\'',
- String.Interpol),
- ],
- 'string_unquoted': [
- (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'\n', Text, '#pop'),
- (r'\s', Text),
+ String.Interpol),
+ ],
+ 'string_unquoted': [
+ (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'\n', Text, '#pop'),
+ (r'\s', Text),
include('string_interpolated'),
- (r"'", String),
- (r"[^'\n]+", String),
- ],
- 'string': [
- (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
-
- (r'"', String, '#pop'),
+ (r"'", String),
+ (r"[^'\n]+", String),
+ ],
+ 'string': [
+ (r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
+
+ (r'"', String, '#pop'),
include('string_interpolated'),
- (r"'", String),
- (r'[^\'"\n]+', String),
- ],
- 'old_form': [
+ (r"'", String),
+ (r'[^\'"\n]+', String),
+ ],
+ 'old_form': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
- (r'\s+', Text),
-
- (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
- bygroups(Keyword, Text), 'number'),
-
- (r'(option|button)([ \t]+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(sentence|text)([ \t]+\S+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
- bygroups(Keyword, Text, String, Text)),
-
- (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
- bygroups(Keyword, Text, Name.Variable)),
-
- # Ideally processing of the number would happend in the 'number'
- # but that doesn't seem to work
- (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
- r'(?:[eE][-+]?\d+)?%?)',
- bygroups(Keyword, Text, Operator, Number)),
-
- (r'(comment)(\s+)',
- bygroups(Keyword, Text), 'string_unquoted'),
-
- (r'\bendform\b', Keyword, '#pop'),
- ]
- }
+ (r'\s+', Text),
+
+ (r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
+ bygroups(Keyword, Text), 'number'),
+
+ (r'(option|button)([ \t]+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(sentence|text)([ \t]+\S+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
+ bygroups(Keyword, Text, String, Text)),
+
+ (r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
+ bygroups(Keyword, Text, Name.Variable)),
+
+ # Ideally processing of the number would happend in the 'number'
+ # but that doesn't seem to work
+ (r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
+ r'(?:[eE][-+]?\d+)?%?)',
+ bygroups(Keyword, Text, Operator, Number)),
+
+ (r'(comment)(\s+)',
+ bygroups(Keyword, Text), 'string_unquoted'),
+
+ (r'\bendform\b', Keyword, '#pop'),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/prolog.py b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
index 21c813625d..1ef27478f2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/prolog.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
@@ -1,305 +1,305 @@
-"""
- pygments.lexers.prolog
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Prolog and Prolog-like languages.
-
+"""
+ pygments.lexers.prolog
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Prolog and Prolog-like languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['PrologLexer', 'LogtalkLexer']
-
-
-class PrologLexer(RegexLexer):
- """
- Lexer for Prolog files.
- """
- name = 'Prolog'
- aliases = ['prolog']
- filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
- mimetypes = ['text/x-prolog']
-
- flags = re.UNICODE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'nested-comment'),
- (r'%.*', Comment.Single),
- # character literal
- (r'0\'.', String.Char),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # literal with prepended base
- (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[\[\](){}|.,;!]', Punctuation),
- (r':-|-->', Punctuation),
- (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['PrologLexer', 'LogtalkLexer']
+
+
+class PrologLexer(RegexLexer):
+ """
+ Lexer for Prolog files.
+ """
+ name = 'Prolog'
+ aliases = ['prolog']
+ filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
+ mimetypes = ['text/x-prolog']
+
+ flags = re.UNICODE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'nested-comment'),
+ (r'%.*', Comment.Single),
+ # character literal
+ (r'0\'.', String.Char),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # literal with prepended base
+ (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[\[\](){}|.,;!]', Punctuation),
+ (r':-|-->', Punctuation),
+ (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
r'\\[0-7]+\\|\\["\\abcefnrstv]|[^\\"])*"', String.Double),
- (r"'(?:''|[^'])*'", String.Atom), # quoted atom
- # Needs to not be followed by an atom.
- # (r'=(?=\s|[a-zA-Z\[])', Operator),
- (r'is\b', Operator),
- (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
- Operator),
- (r'(mod|div|not)\b', Operator),
- (r'_', Keyword), # The don't-care variable
- (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+ (r"'(?:''|[^'])*'", String.Atom), # quoted atom
+ # Needs to not be followed by an atom.
+ # (r'=(?=\s|[a-zA-Z\[])', Operator),
+ (r'is\b', Operator),
+ (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+ Operator),
+ (r'(mod|div|not)\b', Operator),
+ (r'_', Keyword), # The don't-care variable
+ (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
(r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
r'(\s*)(:-|-->)',
- bygroups(Name.Function, Text, Operator)), # function defn
+ bygroups(Name.Function, Text, Operator)), # function defn
(r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
r'(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
+ bygroups(Name.Function, Text, Punctuation)),
(r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
- String.Atom), # atom, characters
- # This one includes !
+ String.Atom), # atom, characters
+ # This one includes !
(r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
- String.Atom), # atom, graphics
- (r'[A-Z_]\w*', Name.Variable),
+ String.Atom), # atom, graphics
+ (r'[A-Z_]\w*', Name.Variable),
(r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
- ],
- 'nested-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
- def analyse_text(text):
- return ':-' in text
-
-
-class LogtalkLexer(RegexLexer):
- """
- For `Logtalk <http://logtalk.org/>`_ source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'Logtalk'
- aliases = ['logtalk']
- filenames = ['*.lgt', '*.logtalk']
- mimetypes = ['text/x-logtalk']
-
- tokens = {
- 'root': [
- # Directives
- (r'^\s*:-\s', Punctuation, 'directive'),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- # Numbers
+ ],
+ 'nested-comment': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+ def analyse_text(text):
+ return ':-' in text
+
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt', '*.logtalk']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s', Punctuation, 'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
(r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Event handlers
- (r'(after|before)(?=[(])', Keyword),
- # Message forwarding handler
- (r'forward(?=[(])', Keyword),
- # Execution-context methods
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Message forwarding handler
+ (r'forward(?=[(])', Keyword),
+ # Execution-context methods
(r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
- # Reflection
- (r'(current_predicate|predicate_property)(?=[(])', Keyword),
- # DCGs and term expansion
- (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
- # Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
- (r'(object|protocol|category)_property(?=[(])', Keyword),
- # Entity relations
- (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
- (r'extends_(object|protocol|category)(?=[(])', Keyword),
- (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
- (r'(instantiat|specializ)es_class(?=[(])', Keyword),
- # Events
- (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
- # Flags
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
(r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
- # Compiling, loading, and library paths
+ # Compiling, loading, and library paths
(r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
- (r'\blogtalk_make\b', Keyword),
- # Database
- (r'(clause|retract(all)?)(?=[(])', Keyword),
- (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
- # Control constructs
- (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'\blogtalk_make\b', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control constructs
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
(r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
(r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
- # All solutions
- (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
# Multi-threading predicates
(r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
# Engine predicates
(r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
- # Term unification
- (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
- # Term creation and decomposition
- (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
- # Evaluable functors
- (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
- (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
- (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
- # Other arithmetic functors
- (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
- # Term testing
+ # Term unification
+ (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
+ # Term testing
(r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
- # Term comparison
- (r'compare(?=[(])', Keyword),
- # Stream selection and control
- (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
- (r'(open|close)(?=[(])', Keyword),
- (r'flush_output(?=[(])', Keyword),
- (r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
- # Character and byte input/output
- (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
- (r'\bnl\b', Keyword),
- # Term input/output
- (r'read(_term)?(?=[(])', Keyword),
- (r'write(q|_(canonical|term))?(?=[(])', Keyword),
- (r'(current_)?op(?=[(])', Keyword),
- (r'(current_)?char_conversion(?=[(])', Keyword),
- # Atomic term processing
- (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
- (r'(char_code|sub_atom)(?=[(])', Keyword),
- (r'number_c(har|ode)s(?=[(])', Keyword),
- # Implementation defined hooks functions
- (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
- (r'\bhalt\b', Keyword),
- (r'halt(?=[(])', Keyword),
- # Message sending operators
- (r'(::|:|\^\^)', Operator),
- # External call
- (r'[{}]', Keyword),
- # Logic and control
- (r'(ignore|once)(?=[(])', Keyword),
- (r'\brepeat\b', Keyword),
- # Sorting
- (r'(key)?sort(?=[(])', Keyword),
- # Bitwise functors
- (r'(>>|<<|/\\|\\\\|\\)', Operator),
- # Predicate aliases
- (r'\bas\b', Operator),
- # Arithemtic evaluation
- (r'\bis\b', Keyword),
- # Arithemtic comparison
- (r'(=:=|=\\=|<|=<|>=|>)', Operator),
- # Term creation and decomposition
- (r'=\.\.', Operator),
- # Term unification
- (r'(=|\\=)', Operator),
- # Term comparison
- (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
- # Evaluable functors
- (r'(//|[-+*/])', Operator),
- (r'\b(e|pi|div|mod|rem)\b', Operator),
- # Other arithemtic functors
- (r'\b\*\*\b', Operator),
- # DCG rules
- (r'-->', Operator),
- # Control constructs
- (r'([!;]|->)', Operator),
- # Logic and control
- (r'\\+', Operator),
- # Mode operators
- (r'[?@]', Operator),
- # Existential quantifier
- (r'\^', Operator),
- # Strings
+ # Term comparison
+ (r'compare(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'(ignore|once)(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Sorting
+ (r'(key)?sort(?=[(])', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Predicate aliases
+ (r'\bas\b', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(e|pi|div|mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Existential quantifier
+ (r'\^', Operator),
+ # Strings
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Punctuation
- (r'[()\[\],.|]', Text),
- # Atoms
+ (r'[()\[\],.|]', Text),
+ # Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- ],
-
- 'quoted_atom': [
- (r"''", String),
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
- (r"[^\\'\n]+", String),
- (r'\\', String),
- ],
-
- 'directive': [
- # Conditional compilation directives
- (r'(el)?if(?=[(])', Keyword, 'root'),
+ (r"'", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"''", String),
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Conditional compilation directives
+ (r'(el)?if(?=[(])', Keyword, 'root'),
(r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
- # Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
(r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
- # Predicate scope directives
- (r'(public|protected|private)(?=[(])', Keyword, 'root'),
- # Other directives
- (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
- (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+ (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
(r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
(r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
- (r'op(?=[(])', Keyword, 'root'),
- (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
(r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
(r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
- ],
-
- 'entityrelations': [
- (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
- # Numbers
+ ],
+
+ 'entityrelations': [
+ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
(r"0'[\\]?.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
- # Variables
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
(r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
- # Atoms
+ # Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
- (r"'", String, 'quoted_atom'),
- # Strings
+ (r"'", String, 'quoted_atom'),
+ # Strings
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # End of entity-opening directive
- (r'([)]\.)', Text, 'root'),
- # Scope operator
- (r'(::)', Operator),
+ # End of entity-opening directive
+ (r'([)]\.)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
# Punctuation
- (r'[()\[\],.|]', Text),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/', Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- ]
- }
-
- def analyse_text(text):
- if ':- object(' in text:
- return 1.0
- elif ':- protocol(' in text:
- return 1.0
- elif ':- category(' in text:
- return 1.0
+ (r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/', Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if ':- object(' in text:
+ return 1.0
+ elif ':- protocol(' in text:
+ return 1.0
+ elif ':- category(' in text:
+ return 1.0
elif re.search(r'^:-\s[a-z]', text, re.M):
- return 0.9
- else:
- return 0.0
+ return 0.9
+ else:
+ return 0.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/python.py b/contrib/python/Pygments/py3/pygments/lexers/python.py
index 2901d7b982..1c1a527831 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/python.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/python.py
@@ -1,32 +1,32 @@
-"""
- pygments.lexers.python
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Python and related languages.
-
+"""
+ pygments.lexers.python
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Python and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
import keyword
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
default, words, combined, do_insertions, this
-from pygments.util import get_bool_opt, shebang_matches
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Error
-from pygments import unistring as uni
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
'Python2Lexer', 'Python2TracebackLexer',
'CythonLexer', 'DgLexer', 'NumPyLexer']
-
-line_re = re.compile('.*?\n')
-
-
-class PythonLexer(RegexLexer):
- """
+
+line_re = re.compile('.*?\n')
+
+
+class PythonLexer(RegexLexer):
+ """
For `Python <http://www.python.org>`_ source code (version 3.x).
.. versionadded:: 0.10
@@ -34,9 +34,9 @@ class PythonLexer(RegexLexer):
.. versionchanged:: 2.5
This is now the default ``PythonLexer``. It is still available as the
alias ``Python3Lexer``.
- """
-
- name = 'Python'
+ """
+
+ name = 'Python'
aliases = ['python', 'py', 'sage', 'python3', 'py3']
filenames = [
'*.py',
@@ -60,13 +60,13 @@ class PythonLexer(RegexLexer):
]
mimetypes = ['text/x-python', 'application/x-python',
'text/x-python3', 'application/x-python3']
-
+
flags = re.MULTILINE | re.UNICODE
uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
- def innerstring_rules(ttype):
- return [
+ def innerstring_rules(ttype):
+ return [
# the old style '%s' % (...) string formatting (still valid in Py3)
(r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsaux%]', String.Interpol),
@@ -418,43 +418,43 @@ class Python2Lexer(RegexLexer):
def innerstring_rules(ttype):
return [
- # the old style '%s' % (...) string formatting
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ # the old style '%s' % (...) string formatting
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- # backslashes, quotes and formatting signs must be parsed one at a time
- (r'[^\\\'"%\n]+', ttype),
- (r'[\'"\\]', ttype),
- # unhandled string formatting sign
- (r'%', ttype),
- # newlines are an error (use "nl" state)
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r'%', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
(r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
bygroups(Text, String.Affix, String.Doc)),
(r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
bygroups(Text, String.Affix, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'\A#!.+$', Comment.Hashbang),
- (r'#.*$', Comment.Single),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
+ (r'[^\S\n]+', Text),
+ (r'\A#!.+$', Comment.Hashbang),
+ (r'#.*$', Comment.Single),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('builtins'),
include('magicfuncs'),
include('magicvars'),
- include('backtick'),
+ include('backtick'),
('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
bygroups(String.Affix, String.Double), 'tdqs'),
("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
@@ -471,40 +471,40 @@ class Python2Lexer(RegexLexer):
combined('stringescape', 'dqs')),
("([uUbB]?)(')", bygroups(String.Affix, String.Single),
combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
- 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
- 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
- 'print', 'raise', 'return', 'try', 'while', 'yield',
- 'yield from', 'as', 'with'), suffix=r'\b'),
- Keyword),
- ],
- 'builtins': [
- (words((
- '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
- 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
- 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
- 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
- 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
- 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
- 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
- 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
- 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
- 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
- prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'print', 'raise', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+ 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+ 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+ 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+ 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+ 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+ 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+ 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
(r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
'MemoryError', 'NameError',
'NotImplementedError', 'OSError', 'OverflowError', 'OverflowWarning',
'PendingDeprecationWarning', 'ReferenceError',
@@ -514,8 +514,8 @@ class Python2Lexer(RegexLexer):
'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
'WindowsError', 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
+ Name.Exception),
+ ],
'magicfuncs': [
(words((
'__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',
@@ -547,210 +547,210 @@ class Python2Lexer(RegexLexer):
suffix=r'\b'),
Name.Variable.Magic),
],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[\w.]+', Name.Decorator),
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@[\w.]+', Name.Decorator),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
+ ],
+ 'funcname': [
include('magicfuncs'),
(r'[a-zA-Z_]\w*', Name.Function, '#pop'),
default('#pop'),
- ],
- 'classname': [
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'as\b', Keyword.Namespace),
- (r',', Operator),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'import\b', Keyword.Namespace, '#pop'),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- # sadly, in "raise x from y" y will be highlighted as namespace too
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # anything else here also means "raise x from y" and is therefore
- # not an error
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings-single': innerstring_rules(String.Single),
- 'strings-double': innerstring_rules(String.Double),
- 'dqs': [
- (r'"', String.Double, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings-double')
- ],
- 'sqs': [
- (r"'", String.Single, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings-single')
- ],
- 'tdqs': [
- (r'"""', String.Double, '#pop'),
- include('strings-double'),
- (r'\n', String.Double)
- ],
- 'tsqs': [
- (r"'''", String.Single, '#pop'),
- include('strings-single'),
- (r'\n', String.Single)
- ],
- }
-
- def analyse_text(text):
+ ],
+ 'import': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'as\b', Keyword.Namespace),
+ (r',', Operator),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'import\b', Keyword.Namespace, '#pop'),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ # sadly, in "raise x from y" y will be highlighted as namespace too
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # anything else here also means "raise x from y" and is therefore
+ # not an error
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings-single': innerstring_rules(String.Single),
+ 'strings-double': innerstring_rules(String.Double),
+ 'dqs': [
+ (r'"', String.Double, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings-double')
+ ],
+ 'sqs': [
+ (r"'", String.Single, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings-single')
+ ],
+ 'tdqs': [
+ (r'"""', String.Double, '#pop'),
+ include('strings-double'),
+ (r'\n', String.Double)
+ ],
+ 'tsqs': [
+ (r"'''", String.Single, '#pop'),
+ include('strings-single'),
+ (r'\n', String.Single)
+ ],
+ }
+
+ def analyse_text(text):
return shebang_matches(text, r'pythonw?2(\.\d)?')
-
-
-class PythonConsoleLexer(Lexer):
- """
- For Python console output or doctests, such as:
-
- .. sourcecode:: pycon
-
- >>> a = 'foo'
- >>> print a
- foo
- >>> 1 / 0
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ZeroDivisionError: integer division or modulo by zero
-
- Additional options:
-
- `python3`
+
+
+class PythonConsoleLexer(Lexer):
+ """
+ For Python console output or doctests, such as:
+
+ .. sourcecode:: pycon
+
+ >>> a = 'foo'
+ >>> print a
+ foo
+ >>> 1 / 0
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ZeroDivisionError: integer division or modulo by zero
+
+ Additional options:
+
+ `python3`
Use Python 3 lexer for code. Default is ``True``.
-
- .. versionadded:: 1.0
+
+ .. versionadded:: 1.0
.. versionchanged:: 2.5
Now defaults to ``True``.
- """
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- def __init__(self, **options):
+ """
+ name = 'Python console session'
+ aliases = ['pycon']
+ mimetypes = ['text/x-python-doctest']
+
+ def __init__(self, **options):
self.python3 = get_bool_opt(options, 'python3', True)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- if self.python3:
- pylexer = PythonLexer(**self.options)
- tblexer = PythonTracebackLexer(**self.options)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ if self.python3:
+ pylexer = PythonLexer(**self.options)
+ tblexer = PythonTracebackLexer(**self.options)
else:
pylexer = Python2Lexer(**self.options)
tblexer = Python2TracebackLexer(**self.options)
-
- curcode = ''
- insertions = []
- curtb = ''
- tbindex = 0
- tb = 0
- for match in line_re.finditer(text):
- line = match.group()
+
+ curcode = ''
+ insertions = []
+ curtb = ''
+ tbindex = 0
+ tb = 0
+ for match in line_re.finditer(text):
+ line = match.group()
if line.startswith('>>> ') or line.startswith('... '):
- tb = 0
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
+ tb = 0
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
elif line.rstrip() == '...' and not tb:
- # only a new >>> prompt can end an exception block
- # otherwise an ellipsis in place of the traceback frames
- # will be mishandled
- insertions.append((len(curcode),
+ # only a new >>> prompt can end an exception block
+ # otherwise an ellipsis in place of the traceback frames
+ # will be mishandled
+ insertions.append((len(curcode),
[(0, Generic.Prompt, '...')]))
- curcode += line[3:]
- else:
- if curcode:
+ curcode += line[3:]
+ else:
+ if curcode:
yield from do_insertions(
insertions, pylexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
+ curcode = ''
+ insertions = []
if (line.startswith('Traceback (most recent call last):') or
re.match(' File "[^"]+", line \\d+\\n$', line)):
- tb = 1
- curtb = line
- tbindex = match.start()
- elif line == 'KeyboardInterrupt\n':
- yield match.start(), Name.Class, line
- elif tb:
- curtb += line
+ tb = 1
+ curtb = line
+ tbindex = match.start()
+ elif line == 'KeyboardInterrupt\n':
+ yield match.start(), Name.Class, line
+ elif tb:
+ curtb += line
if not (line.startswith(' ') or line.strip() == '...'):
- tb = 0
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
- curtb = ''
- else:
- yield match.start(), Generic.Output, line
- if curcode:
+ tb = 0
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+ curtb = ''
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
yield from do_insertions(insertions,
pylexer.get_tokens_unprocessed(curcode))
- if curtb:
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
-
-
-class PythonTracebackLexer(RegexLexer):
- """
+ if curtb:
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+
+
+class PythonTracebackLexer(RegexLexer):
+ """
For Python 3.x tracebacks, with support for chained exceptions.
-
+
.. versionadded:: 1.0
.. versionchanged:: 2.5
This is now the default ``PythonTracebackLexer``. It is still available
as the alias ``Python3TracebackLexer``.
- """
-
- name = 'Python Traceback'
+ """
+
+ name = 'Python Traceback'
aliases = ['pytb', 'py3tb']
filenames = ['*.pytb', '*.py3tb']
mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
(r'\n', Text),
(r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
(r'^During handling of the above exception, another '
r'exception occurred:\n\n', Generic.Traceback),
(r'^The above exception was the direct cause of the '
r'following exception:\n\n', Generic.Traceback),
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
bygroups(Text, using(PythonLexer), Text), 'markers'),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
(r'^([a-zA-Z_][\w.]*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
+ bygroups(Generic.Error, Text), '#pop')
+ ],
'markers': [
# Either `PEP 657 <https://www.python.org/dev/peps/pep-0657/>`
# error locations in Python 3.11+, or single-caret markers
@@ -760,283 +760,283 @@ class PythonTracebackLexer(RegexLexer):
'#pop'),
default('#pop'),
],
- }
-
-
+ }
+
+
Python3TracebackLexer = PythonTracebackLexer
class Python2TracebackLexer(RegexLexer):
- """
+ """
For Python tracebacks.
-
+
.. versionadded:: 0.7
.. versionchanged:: 2.5
This class has been renamed from ``PythonTracebackLexer``.
``PythonTracebackLexer`` now refers to the Python 3 variant.
- """
-
+ """
+
name = 'Python 2.x Traceback'
aliases = ['py2tb']
filenames = ['*.py2tb']
mimetypes = ['text/x-python2-traceback']
-
- tokens = {
- 'root': [
+
+ tokens = {
+ 'root': [
# Cover both (most recent call last) and (innermost last)
# The optional ^C allows us to catch keyboard interrupt signals.
(r'^(\^C)?(Traceback.*\n)',
bygroups(Text, Generic.Traceback), 'intb'),
# SyntaxError starts with this.
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
(r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
bygroups(Text, using(Python2Lexer), Text), 'marker'),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
'marker': [
# For syntax errors.
(r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
default('#pop'),
],
- }
-
-
-class CythonLexer(RegexLexer):
- """
- For Pyrex and `Cython <http://cython.org>`_ source code.
-
- .. versionadded:: 1.1
- """
-
- name = 'Cython'
- aliases = ['cython', 'pyx', 'pyrex']
- filenames = ['*.pyx', '*.pxd', '*.pxi']
- mimetypes = ['text/x-cython', 'application/x-cython']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'(<)([a-zA-Z0-9.?]+)(>)',
- bygroups(Punctuation, Keyword.Type, Punctuation)),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
- (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
- Name, Punctuation)),
- include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
- # (should actually start a block with only cdefs)
- (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (words((
+ }
+
+
+class CythonLexer(RegexLexer):
+ """
+ For Pyrex and `Cython <http://cython.org>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Cython'
+ aliases = ['cython', 'pyx', 'pyrex']
+ filenames = ['*.pyx', '*.pxd', '*.pxi']
+ mimetypes = ['text/x-cython', 'application/x-cython']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'(<)([a-zA-Z0-9.?]+)(>)',
+ bygroups(Punctuation, Keyword.Type, Punctuation)),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+ (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+ bygroups(Keyword, Number.Integer, Operator, Name, Operator,
+ Name, Punctuation)),
+ include('keywords'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ # (should actually start a block with only cdefs)
+ (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
- 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
- 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
- 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
- Keyword),
- (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
- ],
- 'builtins': [
- (words((
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
+ 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+ 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+ ],
+ 'builtins': [
+ (words((
'__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
- 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
- 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
- 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
- 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
- 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
- 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+ 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+ 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+ 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
- 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
- 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
- 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
- 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
- r')\b', Name.Builtin.Pseudo),
- (words((
- 'ArithmeticError', 'AssertionError', 'AttributeError',
- 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
- 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
- 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
- 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
- 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
- 'OSError', 'OverflowError', 'OverflowWarning',
- 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
- 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
- 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
- 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
- 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
- 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
- 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@\w+', Name.Decorator),
+ 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+ 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
+ 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
+ 'OSError', 'OverflowError', 'OverflowWarning',
+ 'PendingDeprecationWarning', 'ReferenceError', 'RuntimeError',
+ 'RuntimeWarning', 'StandardError', 'StopIteration', 'SyntaxError',
+ 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@\w+', Name.Decorator),
(r'[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
+ ],
+ 'funcname': [
(r'[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'cdef': [
- (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
- (r'(struct|enum|union|class)\b', Keyword),
- (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
- (r'([a-zA-Z_]\w*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
- (r'from\b', Keyword, '#pop'),
- (r'as\b', Keyword),
- (r':', Punctuation, '#pop'),
- (r'(?=["\'])', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Keyword.Type),
- (r'.', Text),
- ],
- 'classname': [
+ ],
+ 'cdef': [
+ (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
+ (r'(struct|enum|union|class)\b', Keyword),
+ (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([a-zA-Z_]\w*)(\s*)(,)',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'from\b', Keyword, '#pop'),
+ (r'as\b', Keyword),
+ (r':', Punctuation, '#pop'),
+ (r'(?=["\'])', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Keyword.Type),
+ (r'.', Text),
+ ],
+ 'classname': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class DgLexer(RegexLexer):
- """
- Lexer for `dg <http://pyos.github.com/dg>`_,
- a functional and object-oriented programming language
- running on the CPython 3 VM.
-
- .. versionadded:: 1.6
- """
- name = 'dg'
- aliases = ['dg']
- filenames = ['*.dg']
- mimetypes = ['text/x-dg']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment.Single),
-
- (r'(?i)0b[01]+', Number.Bin),
- (r'(?i)0o[0-7]+', Number.Oct),
- (r'(?i)0x[0-9a-f]+', Number.Hex),
- (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
- (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
- (r'(?i)[+-]?[0-9]+j?', Number.Integer),
-
- (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
- (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
- (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
- (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
-
- (r"`\w+'*`", Operator),
- (r'\b(and|in|is|or|where)\b', Operator.Word),
- (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
-
- (words((
- 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
- 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+
+class DgLexer(RegexLexer):
+ """
+ Lexer for `dg <http://pyos.github.com/dg>`_,
+ a functional and object-oriented programming language
+ running on the CPython 3 VM.
+
+ .. versionadded:: 1.6
+ """
+ name = 'dg'
+ aliases = ['dg']
+ filenames = ['*.dg']
+ mimetypes = ['text/x-dg']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment.Single),
+
+ (r'(?i)0b[01]+', Number.Bin),
+ (r'(?i)0o[0-7]+', Number.Oct),
+ (r'(?i)0x[0-9a-f]+', Number.Hex),
+ (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+j?', Number.Integer),
+
+ (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
+ (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
+ (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
+ (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
+
+ (r"`\w+'*`", Operator),
+ (r'\b(and|in|is|or|where)\b', Operator.Word),
+ (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
+
+ (words((
+ 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
+ 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str',
'super', 'tuple', 'tuple\'', 'type'),
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (words((
- '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
- 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
+ Name.Builtin),
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
+ 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst',
'getattr', 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init',
'input', 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len',
@@ -1044,144 +1044,144 @@ class DgLexer(RegexLexer):
'print', 'repr', 'reversed', 'round', 'setattr', 'scanl1?', 'snd',
'sorted', 'sum', 'tail', 'take', 'takewhile', 'vars', 'zip'),
prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
- Name.Builtin),
- (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
- Name.Builtin.Pseudo),
-
- (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
- Name.Exception),
- (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
- r"SystemExit)(?!['\w])", Name.Exception),
-
- (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
- r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
-
- (r"[A-Z_]+'*(?!['\w])", Name),
- (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
- (r"\w+'*", Name),
-
- (r'[()]', Punctuation),
- (r'.', Error),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'string': [
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ Name.Builtin),
+ (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
+ Name.Builtin.Pseudo),
+
+ (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
+ Name.Exception),
+ (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
+ r"SystemExit)(?!['\w])", Name.Exception),
+
+ (r"(?<![\w.])(except|finally|for|if|import|not|otherwise|raise|"
+ r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
+
+ (r"[A-Z_]+'*(?!['\w])", Name),
+ (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
+ (r"\w+'*", Name),
+
+ (r'[()]', Punctuation),
+ (r'.', Error),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'string': [
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
'[hlL]?[E-GXc-giorsux%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String),
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop')
- ],
- 'sqs': [
- (r"'", String, '#pop')
- ],
- 'tdqs': [
- (r'"""', String, '#pop')
- ],
- 'tsqs': [
- (r"'''", String, '#pop')
- ],
- }
-
-
-class NumPyLexer(PythonLexer):
- """
- A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
- """
-
- name = 'NumPy'
- aliases = ['numpy']
-
- # override the mimetypes to not inherit them from python
- mimetypes = []
- filenames = []
-
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String),
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop')
+ ],
+ }
+
+
+class NumPyLexer(PythonLexer):
+ """
+ A Python lexer recognizing Numerical Python builtins.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'NumPy'
+ aliases = ['numpy']
+
+ # override the mimetypes to not inherit them from python
+ mimetypes = []
+ filenames = []
+
EXTRA_KEYWORDS = {
- 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
- 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
- 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
- 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
- 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
- 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
- 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
- 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
- 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
- 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
- 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
- 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
- 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
- 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
- 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
- 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
- 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
- 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
- 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
- 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
- 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
- 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
- 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
- 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
- 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
- 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
- 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
- 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
- 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
- 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
- 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
- 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
- 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
- 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
- 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
- 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
- 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
- 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
- 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
- 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
- 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
- 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
- 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
- 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
- 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
- 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
- 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
- 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
- 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
- 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
- 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
- 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
- 'set_numeric_ops', 'set_printoptions', 'set_string_function',
- 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
- 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
- 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
- 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
- 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
- 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
- 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
- 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
- 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
- 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
- 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
+ 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
+ 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
+ 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
+ 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
+ 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
+ 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
+ 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
+ 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
+ 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
+ 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
+ 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
+ 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
+ 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
+ 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
+ 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
+ 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
+ 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
+ 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
+ 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
+ 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
+ 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
+ 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
+ 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
+ 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
+ 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
+ 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
+ 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
+ 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
+ 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
+ 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
+ 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
+ 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
+ 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
+ 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
+ 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
+ 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
+ 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
+ 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
+ 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
+ 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
+ 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
+ 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
+ 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
+ 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
+ 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
+ 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
+ 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
+ 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
+ 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
+ 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
+ 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
+ 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
+ 'set_numeric_ops', 'set_printoptions', 'set_string_function',
+ 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
+ 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
+ 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
+ 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
+ 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
+ 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
+ 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
+ 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
+ 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
+ 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
+ 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
}
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- PythonLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
- def analyse_text(text):
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ PythonLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
ltext = text[:1000]
return (shebang_matches(text, r'pythonw?(3(\.\d)?)?') or
'import ' in ltext) \
diff --git a/contrib/python/Pygments/py3/pygments/lexers/qvt.py b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
index 72817f09c1..69dfe94c1d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/qvt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
@@ -1,105 +1,105 @@
-"""
- pygments.lexers.qvt
- ~~~~~~~~~~~~~~~~~~~
-
- Lexer for QVT Operational language.
-
+"""
+ pygments.lexers.qvt
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for QVT Operational language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
words
-from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
- Name, String, Number
-
-__all__ = ['QVToLexer']
-
-
-class QVToLexer(RegexLexer):
+from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
+ Name, String, Number
+
+__all__ = ['QVToLexer']
+
+
+class QVToLexer(RegexLexer):
"""
- For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
-
- Reference for implementing this: «Meta Object Facility (MOF) 2.0
- Query/View/Transformation Specification», Version 1.1 - January 2011
- (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
- particular.
-
- Notable tokens assignments:
-
- - Name.Class is assigned to the identifier following any of the following
- keywords: metamodel, class, exception, primitive, enum, transformation
- or library
-
- - Name.Function is assigned to the names of mappings and queries
-
- - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
- 'self' and 'result'.
- """
- # With obvious borrowings & inspiration from the Java, Python and C lexers
-
- name = 'QVTO'
- aliases = ['qvto', 'qvt']
- filenames = ['*.qvto']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(--|//)(\s*)(directive:)?(.*)$',
- bygroups(Comment, Comment, Comment.Preproc, Comment)),
- # Uncomment the following if you want to distinguish between
- # '/*' and '/**', à la javadoc
+ For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
+
+ Reference for implementing this: «Meta Object Facility (MOF) 2.0
+ Query/View/Transformation Specification», Version 1.1 - January 2011
+ (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
+ particular.
+
+ Notable tokens assignments:
+
+ - Name.Class is assigned to the identifier following any of the following
+ keywords: metamodel, class, exception, primitive, enum, transformation
+ or library
+
+ - Name.Function is assigned to the names of mappings and queries
+
+ - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
+ 'self' and 'result'.
+ """
+ # With obvious borrowings & inspiration from the Java, Python and C lexers
+
+ name = 'QVTO'
+ aliases = ['qvto', 'qvt']
+ filenames = ['*.qvto']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(--|//)(\s*)(directive:)?(.*)$',
+ bygroups(Comment, Comment, Comment.Preproc, Comment)),
+ # Uncomment the following if you want to distinguish between
+ # '/*' and '/**', à la javadoc
# (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'\\\n', Text),
- (r'(and|not|or|xor|##?)\b', Operator.Word),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'\\\n', Text),
+ (r'(and|not|or|xor|##?)\b', Operator.Word),
(r'(:{1,2}=|[-+]=)\b', Operator.Word),
(r'(@|<<|>>)\b', Keyword), # stereotypes
(r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
- (r'[]{}:(),;[]', Punctuation),
- (r'(true|false|unlimited|null)\b', Keyword.Constant),
- (r'(this|self|result)\b', Name.Builtin.Pseudo),
- (r'(var)\b', Keyword.Declaration),
- (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'(true|false|unlimited|null)\b', Keyword.Constant),
+ (r'(this|self|result)\b', Name.Builtin.Pseudo),
+ (r'(var)\b', Keyword.Declaration),
+ (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
(r'(metamodel|class|exception|primitive|enum|transformation|'
r'library)(\s+)(\w+)',
- bygroups(Keyword.Word, Text, Name.Class)),
+ bygroups(Keyword.Word, Text, Name.Class)),
(r'(exception)(\s+)(\w+)',
bygroups(Keyword.Word, Text, Name.Exception)),
- (r'(main)\b', Name.Function),
+ (r'(main)\b', Name.Function),
(r'(mapping|helper|query)(\s+)',
bygroups(Keyword.Declaration, Text), 'operation'),
- (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
- (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
- Keyword.Type),
- include('keywords'),
- ('"', String, combined('stringescape', 'dqs')),
- ("'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
+ (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
+ (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
+ Keyword.Type),
+ include('keywords'),
+ ('"', String, combined('stringescape', 'dqs')),
+ ("'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
# (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
- # bygroups(Text, Text, Text)),
+ # bygroups(Text, Text, Text)),
],
-
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
+
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
(r'[a-zA-Z_][\w.]*', Name.Namespace),
default('#pop'),
],
-
- 'operation': [
- (r'::', Text),
+
+ 'operation': [
+ (r'::', Text),
(r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
bygroups(Text, Name.Function, Text, Punctuation), '#pop')
],
-
- 'assert': [
- (r'(warning|error|fatal)\b', Keyword, '#pop'),
+
+ 'assert': [
+ (r'(warning|error|fatal)\b', Keyword, '#pop'),
default('#pop'), # all else: go back
],
-
- 'keywords': [
+
+ 'keywords': [
(words((
'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
@@ -117,35 +117,35 @@ class QVToLexer(RegexLexer):
'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
'xmap', 'xselect'), suffix=r'\b'), Keyword),
+ ],
+
+ # There is no need to distinguish between String.Single and
+ # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
+ 'strings': [
+ (r'[^\\\'"\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
],
-
- # There is no need to distinguish between String.Single and
- # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
- 'strings': [
- (r'[^\\\'"\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- ],
- 'stringescape': [
- (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
- ],
+ 'stringescape': [
+ (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
+ ],
'dqs': [ # double-quoted string
- (r'"', String, '#pop'),
- (r'\\\\|\\"', String.Escape),
- include('strings')
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"', String.Escape),
+ include('strings')
],
'sqs': [ # single-quoted string
- (r"'", String, '#pop'),
- (r"\\\\|\\'", String.Escape),
- include('strings')
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'", String.Escape),
+ include('strings')
],
- 'name': [
+ 'name': [
(r'[a-zA-Z_]\w*', Name),
],
- # numbers: excerpt taken from the python lexer
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer)
- ],
+ # numbers: excerpt taken from the python lexer
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/r.py b/contrib/python/Pygments/py3/pygments/lexers/r.py
index 44168a7ad5..d325a43cf9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/r.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/r.py
@@ -1,190 +1,190 @@
-"""
- pygments.lexers.r
- ~~~~~~~~~~~~~~~~~
-
- Lexers for the R/S languages.
-
+"""
+ pygments.lexers.r
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for the R/S languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import Lexer, RegexLexer, include, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class RConsoleLexer(Lexer):
- """
- For R console transcripts or R CMD BATCH output files.
- """
-
- name = 'RConsole'
- aliases = ['rconsole', 'rout']
- filenames = ['*.Rout']
-
- def get_tokens_unprocessed(self, text):
- slexer = SLexer(**self.options)
-
- current_code_block = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>') or line.startswith('+'):
- # Colorize the prompt as such,
- # then put rest of line into current_code_block
- insertions.append((len(current_code_block),
- [(0, Generic.Prompt, line[:2])]))
- current_code_block += line[2:]
- else:
- # We have reached a non-prompt line!
- # If we have stored prompt lines, need to process them first.
- if current_code_block:
- # Weave together the prompts and highlight code.
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+ """
+ For R console transcripts or R CMD BATCH output files.
+ """
+
+ name = 'RConsole'
+ aliases = ['rconsole', 'rout']
+ filenames = ['*.Rout']
+
+ def get_tokens_unprocessed(self, text):
+ slexer = SLexer(**self.options)
+
+ current_code_block = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>') or line.startswith('+'):
+ # Colorize the prompt as such,
+ # then put rest of line into current_code_block
+ insertions.append((len(current_code_block),
+ [(0, Generic.Prompt, line[:2])]))
+ current_code_block += line[2:]
+ else:
+ # We have reached a non-prompt line!
+ # If we have stored prompt lines, need to process them first.
+ if current_code_block:
+ # Weave together the prompts and highlight code.
yield from do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block))
- # Reset vars for next code block.
- current_code_block = ''
- insertions = []
- # Now process the actual line itself, this is output from R.
- yield match.start(), Generic.Output, line
-
- # If we happen to end on a code block with nothing after it, need to
- # process the last code block. This is neither elegant nor DRY so
- # should be changed.
- if current_code_block:
+ # Reset vars for next code block.
+ current_code_block = ''
+ insertions = []
+ # Now process the actual line itself, this is output from R.
+ yield match.start(), Generic.Output, line
+
+ # If we happen to end on a code block with nothing after it, need to
+ # process the last code block. This is neither elegant nor DRY so
+ # should be changed.
+ if current_code_block:
yield from do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block))
-
-
-class SLexer(RegexLexer):
- """
- For S, S-plus, and R source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'S'
- aliases = ['splus', 's', 'r']
- filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
- mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
- 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
+
+
+class SLexer(RegexLexer):
+ """
+ For S, S-plus, and R source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'S'
+ aliases = ['splus', 's', 'r']
+ filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+ mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+ 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+
valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
- tokens = {
- 'comments': [
- (r'#.*$', Comment.Single),
- ],
- 'valid_name': [
+ tokens = {
+ 'comments': [
+ (r'#.*$', Comment.Single),
+ ],
+ 'valid_name': [
(valid_name, Name),
- ],
- 'punctuation': [
- (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
- ],
- 'keywords': [
- (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
- r'(?![\w.])',
- Keyword.Reserved),
- ],
- 'operators': [
- (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
+ ],
+ 'punctuation': [
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+ ],
+ 'keywords': [
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![\w.])',
+ Keyword.Reserved),
+ ],
+ 'operators': [
+ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
- ],
- 'builtin_symbols': [
- (r'(NULL|NA(_(integer|real|complex|character)_)?|'
- r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
- r'(?![\w.])',
- Keyword.Constant),
- (r'(T|F)\b', Name.Builtin.Pseudo),
- ],
- 'numbers': [
- # hex number
- (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
- # decimal number
- (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
- Number),
- ],
- 'statements': [
- include('comments'),
- # whitespaces
- (r'\s+', Text),
- (r'\'', String, 'string_squote'),
- (r'\"', String, 'string_dquote'),
- include('builtin_symbols'),
+ ],
+ 'builtin_symbols': [
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+ r'(?![\w.])',
+ Keyword.Constant),
+ (r'(T|F)\b', Name.Builtin.Pseudo),
+ ],
+ 'numbers': [
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
+ ],
+ 'statements': [
+ include('comments'),
+ # whitespaces
+ (r'\s+', Text),
+ (r'\'', String, 'string_squote'),
+ (r'\"', String, 'string_dquote'),
+ include('builtin_symbols'),
include('valid_name'),
- include('numbers'),
- include('keywords'),
- include('punctuation'),
- include('operators'),
- ],
- 'root': [
+ include('numbers'),
+ include('keywords'),
+ include('punctuation'),
+ include('operators'),
+ ],
+ 'root': [
# calls:
(r'(%s)\s*(?=\()' % valid_name, Name.Function),
- include('statements'),
- # blocks:
- (r'\{|\}', Punctuation),
- # (r'\{', Punctuation, 'block'),
- (r'.', Text),
- ],
- # 'block': [
- # include('statements'),
- # ('\{', Punctuation, '#push'),
- # ('\}', Punctuation, '#pop')
- # ],
- 'string_squote': [
- (r'([^\'\\]|\\.)*\'', String, '#pop'),
- ],
- 'string_dquote': [
- (r'([^"\\]|\\.)*"', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
- return 0.11
-
-
-class RdLexer(RegexLexer):
- """
- Pygments Lexer for R documentation (Rd) files
-
- This is a very minimal implementation, highlighting little more
- than the macros. A description of Rd syntax is found in `Writing R
- Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
+ include('statements'),
+ # blocks:
+ (r'\{|\}', Punctuation),
+ # (r'\{', Punctuation, 'block'),
+ (r'.', Text),
+ ],
+ # 'block': [
+ # include('statements'),
+ # ('\{', Punctuation, '#push'),
+ # ('\}', Punctuation, '#pop')
+ # ],
+ 'string_squote': [
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
+ ],
+ 'string_dquote': [
+ (r'([^"\\]|\\.)*"', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
-
- .. versionadded:: 1.6
- """
- name = 'Rd'
- aliases = ['rd']
- filenames = ['*.Rd']
- mimetypes = ['text/x-r-doc']
-
- # To account for verbatim / LaTeX-like / and R-like areas
- # would require parsing.
- tokens = {
- 'root': [
- # catch escaped brackets and percent sign
- (r'\\[\\{}%]', String.Escape),
- # comments
- (r'%.*$', Comment),
- # special macros with no arguments
- (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
- # macros
- (r'\\[a-zA-Z]+\b', Keyword),
- # special preprocessor macros
- (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
- # non-escaped brackets
- (r'[{}]', Name.Builtin),
- # everything else
- (r'[^\\%\n{}]+', Text),
- (r'.', Text),
- ]
- }
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root': [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rdf.py b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
index bd7a4f690c..bd60e8b764 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rdf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
@@ -1,33 +1,33 @@
-"""
- pygments.lexers.rdf
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for semantic web and RDF query languages and markup.
-
+"""
+ pygments.lexers.rdf
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for semantic web and RDF query languages and markup.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default
-from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
- Whitespace, Name, Literal, Comment, Text
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default
+from pygments.token import Keyword, Punctuation, String, Number, Operator, Generic, \
+ Whitespace, Name, Literal, Comment, Text
+
__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
-
-
-class SparqlLexer(RegexLexer):
- """
+
+
+class SparqlLexer(RegexLexer):
+ """
Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
-
- .. versionadded:: 2.0
- """
- name = 'SPARQL'
- aliases = ['sparql']
- filenames = ['*.rq', '*.sparql']
- mimetypes = ['application/sparql-query']
-
+
+ .. versionadded:: 2.0
+ """
+ name = 'SPARQL'
+ aliases = ['sparql']
+ filenames = ['*.rq', '*.sparql']
+ mimetypes = ['application/sparql-query']
+
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
@@ -56,136 +56,136 @@ class SparqlLexer(RegexLexer):
PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
- # terminal productions ::
-
+ # terminal productions ::
+
PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
+
PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
+
PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
+
HEX = '[' + HEX_GRP + ']'
-
+
PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
- IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
-
+
+ IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
+
BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
'.]*' + PN_CHARS + ')?'
-
+
PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
+
VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
'0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
-
- PERCENT = '%' + HEX + HEX
-
- PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
- PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
+
+ PERCENT = '%' + HEX + HEX
+
+ PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
+
+ PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
+
PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
-
- EXPONENT = r'[eE][+-]?\d+'
-
- # Lexer token definitions ::
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # keywords ::
+
+ EXPONENT = r'[eE][+-]?\d+'
+
+ # Lexer token definitions ::
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # keywords ::
(r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
- r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
+ r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
- r'using\s+named|using|graph|default|named|all|optional|service|'
+ r'using\s+named|using|graph|default|named|all|optional|service|'
r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
- (r'(a)\b', Keyword),
- # IRIs ::
- ('(' + IRIREF + ')', Name.Label),
- # blank nodes ::
- ('(' + BLANK_NODE_LABEL + ')', Name.Label),
- # # variables ::
- ('[?$]' + VARNAME, Name.Variable),
- # prefixed names ::
+ (r'(a)\b', Keyword),
+ # IRIs ::
+ ('(' + IRIREF + ')', Name.Label),
+ # blank nodes ::
+ ('(' + BLANK_NODE_LABEL + ')', Name.Label),
+ # # variables ::
+ ('[?$]' + VARNAME, Name.Variable),
+ # prefixed names ::
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
- bygroups(Name.Namespace, Punctuation, Name.Tag)),
- # function names ::
+ bygroups(Name.Namespace, Punctuation, Name.Tag)),
+ # function names ::
(r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
- r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
- r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
+ r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
+ r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
- r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
- r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
- r'count|sum|min|max|avg|sample|group_concat|separator)\b',
- Name.Function),
- # boolean literals ::
- (r'(true|false)', Keyword.Constant),
- # double literals ::
+ r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
+ r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
+ r'count|sum|min|max|avg|sample|group_concat|separator)\b',
+ Name.Function),
+ # boolean literals ::
+ (r'(true|false)', Keyword.Constant),
+ # double literals ::
(r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
- # decimal literals ::
- (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
- # integer literals ::
- (r'[+\-]?\d+', Number.Integer),
- # operators ::
- (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
- # punctuation characters ::
- (r'[(){}.;,:^\[\]]', Punctuation),
- # line comments ::
- (r'#[^\n]*', Comment),
- # strings ::
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String.Escape, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'u' + HEX + '{4}', String.Escape, '#pop'),
- (r'U' + HEX + '{8}', String.Escape, '#pop'),
- (r'.', String.Escape, '#pop'),
- ],
- 'end-of-string': [
- (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Name.Function), '#pop:2'),
- (r'\^\^', Operator, '#pop:2'),
- default('#pop:2'),
- ],
- }
-
-
-class TurtleLexer(RegexLexer):
- """
- Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
-
- .. versionadded:: 2.1
- """
- name = 'Turtle'
- aliases = ['turtle']
- filenames = ['*.ttl']
- mimetypes = ['text/turtle', 'application/x-turtle']
-
+ # decimal literals ::
+ (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
+ # integer literals ::
+ (r'[+\-]?\d+', Number.Integer),
+ # operators ::
+ (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
+ # punctuation characters ::
+ (r'[(){}.;,:^\[\]]', Punctuation),
+ # line comments ::
+ (r'#[^\n]*', Comment),
+ # strings ::
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String.Escape, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'u' + HEX + '{4}', String.Escape, '#pop'),
+ (r'U' + HEX + '{8}', String.Escape, '#pop'),
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'end-of-string': [
+ (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
+ bygroups(Operator, Name.Function), '#pop:2'),
+ (r'\^\^', Operator, '#pop:2'),
+ default('#pop:2'),
+ ],
+ }
+
+
+class TurtleLexer(RegexLexer):
+ """
+ Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Turtle'
+ aliases = ['turtle']
+ filenames = ['*.ttl']
+ mimetypes = ['text/turtle', 'application/x-turtle']
+
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
'\u00c0-\u00d6'
@@ -199,7 +199,7 @@ class TurtleLexer(RegexLexer):
'\u3001-\ud7ff'
'\uf900-\ufdcf'
'\ufdf0-\ufffd')
-
+
PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
PN_CHARS_GRP = (PN_CHARS_U_GRP +
@@ -233,80 +233,80 @@ class TurtleLexer(RegexLexer):
'(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
PN_CHARS_GRP + ':]|' + PLX + '))?')
- patterns = {
+ patterns = {
'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
- 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
- }
-
- tokens = {
- 'root': [
+ 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
+ }
+
+ tokens = {
+ 'root': [
(r'\s+', Text),
-
- # Base / prefix
- (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
- Punctuation)),
- (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
- bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
- Name.Variable, Whitespace, Punctuation)),
-
- # The shorthand predicate 'a'
- (r'(?<=\s)a(?=\s)', Keyword.Type),
-
- # IRIREF
- (r'%(IRIREF)s' % patterns, Name.Variable),
-
- # PrefixedName
+
+ # Base / prefix
+ (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
+ Punctuation)),
+ (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
+ Name.Variable, Whitespace, Punctuation)),
+
+ # The shorthand predicate 'a'
+ (r'(?<=\s)a(?=\s)', Keyword.Type),
+
+ # IRIREF
+ (r'%(IRIREF)s' % patterns, Name.Variable),
+
+ # PrefixedName
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
bygroups(Name.Namespace, Punctuation, Name.Tag)),
-
- # Comment
- (r'#[^\n]+', Comment),
-
- (r'\b(true|false)\b', Literal),
- (r'[+\-]?\d*\.\d+', Number.Float),
- (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
- (r'[+\-]?\d+', Number.Integer),
- (r'[\[\](){}.;,:^]', Punctuation),
-
- (r'"""', String, 'triple-double-quoted-string'),
- (r'"', String, 'single-double-quoted-string'),
- (r"'''", String, 'triple-single-quoted-string'),
- (r"'", String, 'single-single-quoted-string'),
- ],
- 'triple-double-quoted-string': [
- (r'"""', String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-double-quoted-string': [
- (r'"', String, 'end-of-string'),
- (r'[^"\\\n]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'triple-single-quoted-string': [
- (r"'''", String, 'end-of-string'),
- (r'[^\\]+', String),
- (r'\\', String, 'string-escape'),
- ],
- 'single-single-quoted-string': [
- (r"'", String, 'end-of-string'),
- (r"[^'\\\n]+", String),
- (r'\\', String, 'string-escape'),
- ],
- 'string-escape': [
- (r'.', String, '#pop'),
- ],
- 'end-of-string': [
+
+ # Comment
+ (r'#[^\n]+', Comment),
+
+ (r'\b(true|false)\b', Literal),
+ (r'[+\-]?\d*\.\d+', Number.Float),
+ (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
+ (r'[+\-]?\d+', Number.Integer),
+ (r'[\[\](){}.;,:^]', Punctuation),
+
+ (r'"""', String, 'triple-double-quoted-string'),
+ (r'"', String, 'single-double-quoted-string'),
+ (r"'''", String, 'triple-single-quoted-string'),
+ (r"'", String, 'single-single-quoted-string'),
+ ],
+ 'triple-double-quoted-string': [
+ (r'"""', String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-double-quoted-string': [
+ (r'"', String, 'end-of-string'),
+ (r'[^"\\\n]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'triple-single-quoted-string': [
+ (r"'''", String, 'end-of-string'),
+ (r'[^\\]+', String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'single-single-quoted-string': [
+ (r"'", String, 'end-of-string'),
+ (r"[^'\\\n]+", String),
+ (r'\\', String, 'string-escape'),
+ ],
+ 'string-escape': [
+ (r'.', String, '#pop'),
+ ],
+ 'end-of-string': [
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
- bygroups(Operator, Generic.Emph), '#pop:2'),
-
- (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
-
- default('#pop:2'),
-
- ],
- }
+ bygroups(Operator, Generic.Emph), '#pop:2'),
+
+ (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
+
+ default('#pop:2'),
+
+ ],
+ }
# Turtle and Tera Term macro files share the same file extension
# but each has a recognizable and distinct syntax.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rebol.py b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
index 57480a1cb9..1dea1d19f3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rebol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
@@ -1,430 +1,430 @@
-"""
- pygments.lexers.rebol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the REBOL and related languages.
-
+"""
+ pygments.lexers.rebol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the REBOL and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Generic, Whitespace
-
-__all__ = ['RebolLexer', 'RedLexer']
-
-
-class RebolLexer(RegexLexer):
- """
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- .. versionadded:: 1.1
- """
- name = 'REBOL'
- aliases = ['rebol']
- filenames = ['*.r', '*.r3', '*.reb']
- mimetypes = ['text/x-rebol']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
- r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
- r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
- r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
- r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
- r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
- r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
- r'while|compress|decompress|secure|open|close|read|read-io|'
- r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
- r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
- r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
- r'browse|launch|stats|get-modes|set-modes|to-local-file|'
- r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
- r'hide|draw|show|size-text|textinfo|offset-to-caret|'
- r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
- r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
- r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
- r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
- r'rsa-encrypt)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
- r'minimum|maximum|negate|complement|absolute|random|head|tail|'
- r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
- r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
- r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
- r'copy)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(error|source|input|license|help|install|echo|Usage|with|func|'
- r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
- r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
- r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
- r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
- r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
- r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
- r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
- r'write-user|save-user|set-user-name|protect-system|parse-xml|'
- r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
- r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
- r'request-dir|center-face|do-events|net-error|decode-url|'
- r'parse-header|parse-header-date|parse-email-addrs|import-email|'
- r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
- r'find-key-face|do-face|viewtop|confine|find-window|'
- r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
- r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
- r'read-thru|load-thru|do-thru|launch-thru|load-image|'
- r'request-download|do-face-alt|set-font|set-para|get-style|'
- r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
- r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
- r'resize-face|load-stock|load-stock-block|notify|request|flash|'
- r'request-color|request-pass|request-text|request-list|'
- r'request-date|request-file|dbug|editor|link-relative-path|'
- r'emailer|parse-error)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
- r'return|exit|break)$', word):
- yield match.start(), Name.Exception, word
- elif re.match('REBOL$', word):
- yield match.start(), Generic.Heading, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+ """
+ A `REBOL <http://www.rebol.com/>`_ lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'REBOL'
+ aliases = ['rebol']
+ filenames = ['*.r', '*.r3', '*.reb']
+ mimetypes = ['text/x-rebol']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+ r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+ r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+ r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+ r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+ r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+ r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+ r'while|compress|decompress|secure|open|close|read|read-io|'
+ r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+ r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+ r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+ r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+ r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+ r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+ r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+ r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+ r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+ r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+ r'rsa-encrypt)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+ r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+ r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+ r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+ r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+ r'copy)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(error|source|input|license|help|install|echo|Usage|with|func|'
+ r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+ r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+ r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+ r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+ r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+ r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+ r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+ r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+ r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+ r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+ r'request-dir|center-face|do-events|net-error|decode-url|'
+ r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+ r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+ r'find-key-face|do-face|viewtop|confine|find-window|'
+ r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+ r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+ r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+ r'request-download|do-face-alt|set-font|set-para|get-style|'
+ r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+ r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+ r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+ r'request-color|request-pass|request-text|request-list|'
+ r'request-date|request-file|dbug|editor|link-relative-path|'
+ r'emailer|parse-error)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+ r'return|exit|break)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match('REBOL$', word):
+ yield match.start(), Generic.Heading, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
- word):
- yield match.start(), Operator, word
+ word):
+ yield match.start(), Operator, word
elif re.match(r".*\?$", word):
- yield match.start(), Keyword, word
+ yield match.start(), Keyword, word
elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'REBOL\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
- r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{")\s/[\]]*', Name.Attribute),
- (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
-
- def analyse_text(text):
- """
- Check if code contains REBOL header and so it probably not R code
- """
- if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code starts with REBOL header
- return 1.0
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'REBOL\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{")\s/[\]]*', Name.Attribute),
+ (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check if code contains REBOL header and so it probably not R code
+ """
+ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code starts with REBOL header
+ return 1.0
elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code contains REBOL header but also some text before it
- return 0.5
-
-
-class RedLexer(RegexLexer):
- """
- A `Red-language <http://www.red-lang.org/>`_ lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Red'
- aliases = ['red', 'red/system']
- filenames = ['*.red', '*.reds']
- mimetypes = ['text/x-red', 'text/x-red-system']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
- r'foreach|forall|func|function|does|has|switch|'
- r'case|reduce|compose|get|set|print|prin|equal\?|'
- r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
- r'greater-or-equal\?|same\?|not|type\?|stats|'
- r'bind|union|replace|charset|routine)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
- r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
- r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
- r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
- r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
- r'update|write)$', word):
- yield match.start(), Name.Function, word
- elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
- r'none|crlf|dot|null-byte)$', word):
- yield match.start(), Name.Builtin.Pseudo, word
- elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
- r'#switch|#default|#get-definition)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
- r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
- r'quote|forever)$', word):
- yield match.start(), Name.Exception, word
- elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
- r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
- r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
- r'any-struct\?|none\?|word\?|any-series\?)$', word):
- yield match.start(), Keyword, word
- elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
+ # The code contains REBOL header but also some text before it
+ return 0.5
+
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+ r'update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+ r'none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+ r'#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+ r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+ r'quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+ r'any-struct\?|none\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
r'<<<|>>>|<<|>>|<|>%)$', word):
- yield match.start(), Operator, word
+ yield match.start(), Operator, word
elif re.match(r".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- elif re.match(":.*", word):
- yield match.start(), Generic.Subheading, word # get-word
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'Red/System\s+\[', Generic.Strong, 'script'),
- (r'Red\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#\{[0-9a-f\s]*\}', Number.Hex),
- (r'2#\{', Number.Hex, 'bin2'),
- (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
- (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
- bygroups(Number.Hex, Name.Variable, Whitespace)),
- (r'"', String, 'string'),
- (r'\{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(^{")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+X\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]()]', Generic.Strong),
- (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
- (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s\{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{"\[]+', Comment),
- (r'/[^(^{^")\s/[\]]*', Name.Attribute),
- (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(^{")\s]+)', Text),
- ],
- 'string': [
- (r'[^(^")]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(^{})]+', String),
- (escape_re, String.Escape),
- (r'[(|)]+', String),
- (r'\^.', String.Escape),
- (r'\{', String, '#push'),
- (r'\}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[(|)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([01]\s*){8}', Number.Hex),
- (r'\}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(^")]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(^{})]+', Comment),
- (escape_re, Comment),
- (r'[(|)]+', Comment),
- (r'\^.', Comment),
- (r'\{', Comment, '#push'),
- (r'\}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'\{', Comment, "commentString2"),
- (r'[^(\[\]"{)]+', Comment),
- ],
- }
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#\{[0-9a-f\s]*\}', Number.Hex),
+ (r'2#\{', Number.Hex, 'bin2'),
+ (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
+ (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
+ bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'\{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(^{")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+X\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]()]', Generic.Strong),
+ (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # url
+ (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s\{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{"\[]+', Comment),
+ (r'/[^(^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(^{")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(^")]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(^{})]+', String),
+ (escape_re, String.Escape),
+ (r'[(|)]+', String),
+ (r'\^.', String.Escape),
+ (r'\{', String, '#push'),
+ (r'\}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[(|)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([01]\s*){8}', Number.Hex),
+ (r'\}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(^")]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(^{})]+', Comment),
+ (escape_re, Comment),
+ (r'[(|)]+', Comment),
+ (r'\^.', Comment),
+ (r'\{', Comment, '#push'),
+ (r'\}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'\{', Comment, "commentString2"),
+ (r'[^(\[\]"{)]+', Comment),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/resource.py b/contrib/python/Pygments/py3/pygments/lexers/resource.py
index 3ed176a181..0223375bcc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/resource.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/resource.py
@@ -1,84 +1,84 @@
-"""
- pygments.lexers.resource
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for resource definition files.
-
+"""
+ pygments.lexers.resource
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for resource definition files.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Comment, String, Number, Operator, Text, \
- Keyword, Name
-
-__all__ = ['ResourceLexer']
-
-
-class ResourceLexer(RegexLexer):
- """Lexer for `ICU Resource bundles
- <http://userguide.icu-project.org/locale/resources>`_.
-
- .. versionadded:: 2.0
- """
- name = 'ResourceBundle'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Comment, String, Number, Operator, Text, \
+ Keyword, Name
+
+__all__ = ['ResourceLexer']
+
+
+class ResourceLexer(RegexLexer):
+ """Lexer for `ICU Resource bundles
+ <http://userguide.icu-project.org/locale/resources>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'ResourceBundle'
aliases = ['resourcebundle', 'resource']
filenames = []
-
- _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
- ':int', ':alias')
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- (r'//.*?$', Comment),
- (r'"', String, 'string'),
- (r'-?\d+', Number.Integer),
- (r'[,{}]', Operator),
- (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
- bygroups(Name, Text, Keyword)),
- (r'\s+', Text),
- (words(_types), Keyword),
- ],
- 'string': [
- (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
- r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
- (r'\{', String.Escape, 'msgname'),
- (r'"', String, '#pop')
- ],
- 'msgname': [
- (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
- ],
- 'message': [
- (r'\{', String.Escape, 'msgname'),
- (r'\}', String.Escape, '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*\})',
- bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape, Operator.Word, String.Escape, Operator,
- String.Escape, Number.Integer, String.Escape), 'choice'),
- (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
- bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
- String.Escape), 'choice'),
- (r'\s+', String.Escape)
- ],
- 'choice': [
- (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
- bygroups(Operator, Number.Integer, String.Escape), 'message'),
- (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
- (r'\}', String.Escape, ('#pop', '#pop')),
- (r'\s+', String.Escape)
- ],
- 'str': [
- (r'\}', String.Escape, '#pop'),
- (r'\{', String.Escape, 'msgname'),
- (r'[^{}]+', String)
- ]
- }
-
- def analyse_text(text):
+
+ _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
+ ':int', ':alias')
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment),
+ (r'"', String, 'string'),
+ (r'-?\d+', Number.Integer),
+ (r'[,{}]', Operator),
+ (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
+ bygroups(Name, Text, Keyword)),
+ (r'\s+', Text),
+ (words(_types), Keyword),
+ ],
+ 'string': [
+ (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
+ r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
+ (r'\{', String.Escape, 'msgname'),
+ (r'"', String, '#pop')
+ ],
+ 'msgname': [
+ (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
+ ],
+ 'message': [
+ (r'\{', String.Escape, 'msgname'),
+ (r'\}', String.Escape, '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*\})',
+ bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape, Operator.Word, String.Escape, Operator,
+ String.Escape, Number.Integer, String.Escape), 'choice'),
+ (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
+ bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
+ String.Escape), 'choice'),
+ (r'\s+', String.Escape)
+ ],
+ 'choice': [
+ (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
+ bygroups(Operator, Number.Integer, String.Escape), 'message'),
+ (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
+ (r'\}', String.Escape, ('#pop', '#pop')),
+ (r'\s+', String.Escape)
+ ],
+ 'str': [
+ (r'\}', String.Escape, '#pop'),
+ (r'\{', String.Escape, 'msgname'),
+ (r'[^{}]+', String)
+ ]
+ }
+
+ def analyse_text(text):
if text.startswith('root:table'):
return 1.0
diff --git a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
index 4380113831..bbffc1c84e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
@@ -1,81 +1,81 @@
-"""
- pygments.lexers.roboconf
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Roboconf DSL.
-
+"""
+ pygments.lexers.roboconf
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Roboconf DSL.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, re
-from pygments.token import Text, Operator, Keyword, Name, Comment
-
-__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
-
-
-class RoboconfGraphLexer(RegexLexer):
- """
- Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ graph files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Graph'
- aliases = ['roboconf-graph']
- filenames = ['*.graph']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
- # Skip white spaces
- (r'\s+', Text),
-
- # There is one operator
- (r'=', Operator),
-
- # Keywords
- (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words((
- 'installer', 'extends', 'exports', 'imports', 'facets',
- 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
-
-
-class RoboconfInstancesLexer(RegexLexer):
- """
- Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ instances files.
-
- .. versionadded:: 2.1
- """
- name = 'Roboconf Instances'
- aliases = ['roboconf-instances']
- filenames = ['*.instances']
-
- flags = re.IGNORECASE | re.MULTILINE
- tokens = {
- 'root': [
-
- # Skip white spaces
- (r'\s+', Text),
-
- # Keywords
- (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
- (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
- (r'\s*[\w.-]+\s*:', Name),
-
- # Comments
- (r'#.*\n', Comment),
-
- # Default
- (r'[^#]', Text),
- (r'.*\n', Text)
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, re
+from pygments.token import Text, Operator, Keyword, Name, Comment
+
+__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
+
+
+class RoboconfGraphLexer(RegexLexer):
+ """
+ Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ graph files.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Roboconf Graph'
+ aliases = ['roboconf-graph']
+ filenames = ['*.graph']
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # There is one operator
+ (r'=', Operator),
+
+ # Keywords
+ (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words((
+ 'installer', 'extends', 'exports', 'imports', 'facets',
+ 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class RoboconfInstancesLexer(RegexLexer):
+ """
+ Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ instances files.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Roboconf Instances'
+ aliases = ['roboconf-instances']
+ filenames = ['*.instances']
+
+ flags = re.IGNORECASE | re.MULTILINE
+ tokens = {
+ 'root': [
+
+ # Skip white spaces
+ (r'\s+', Text),
+
+ # Keywords
+ (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
+ (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
+ (r'\s*[\w.-]+\s*:', Name),
+
+ # Comments
+ (r'#.*\n', Comment),
+
+ # Default
+ (r'[^#]', Text),
+ (r'.*\n', Text)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
index 3c212f5e20..b2092df3d6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
@@ -1,551 +1,551 @@
-"""
- pygments.lexers.robotframework
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for Robot Framework.
-
+"""
+ pygments.lexers.robotframework
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for Robot Framework.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# Copyright 2012 Nokia Siemens Networks Oyj
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token
-
-__all__ = ['RobotFrameworkLexer']
-
-
-HEADING = Token.Generic.Heading
-SETTING = Token.Keyword.Namespace
-IMPORT = Token.Name.Namespace
-TC_KW_NAME = Token.Generic.Subheading
-KEYWORD = Token.Name.Function
-ARGUMENT = Token.String
-VARIABLE = Token.Name.Variable
-COMMENT = Token.Comment
-SEPARATOR = Token.Punctuation
-SYNTAX = Token.Punctuation
-GHERKIN = Token.Generic.Emph
-ERROR = Token.Error
-
-
-def normalize(string, remove=''):
- string = string.lower()
- for char in remove + ' ':
- if char in string:
- string = string.replace(char, '')
- return string
-
-
-class RobotFrameworkLexer(Lexer):
- """
- For `Robot Framework <http://robotframework.org>`_ test data.
-
- Supports both space and pipe separated plain text formats.
-
- .. versionadded:: 1.6
- """
- name = 'RobotFramework'
- aliases = ['robotframework']
+ :license: BSD, see LICENSE for details.
+"""
+
+# Copyright 2012 Nokia Siemens Networks Oyj
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from pygments.lexer import Lexer
+from pygments.token import Token
+
+__all__ = ['RobotFrameworkLexer']
+
+
+HEADING = Token.Generic.Heading
+SETTING = Token.Keyword.Namespace
+IMPORT = Token.Name.Namespace
+TC_KW_NAME = Token.Generic.Subheading
+KEYWORD = Token.Name.Function
+ARGUMENT = Token.String
+VARIABLE = Token.Name.Variable
+COMMENT = Token.Comment
+SEPARATOR = Token.Punctuation
+SYNTAX = Token.Punctuation
+GHERKIN = Token.Generic.Emph
+ERROR = Token.Error
+
+
+def normalize(string, remove=''):
+ string = string.lower()
+ for char in remove + ' ':
+ if char in string:
+ string = string.replace(char, '')
+ return string
+
+
+class RobotFrameworkLexer(Lexer):
+ """
+ For `Robot Framework <http://robotframework.org>`_ test data.
+
+ Supports both space and pipe separated plain text formats.
+
+ .. versionadded:: 1.6
+ """
+ name = 'RobotFramework'
+ aliases = ['robotframework']
filenames = ['*.robot']
- mimetypes = ['text/x-robotframework']
-
- def __init__(self, **options):
- options['tabsize'] = 2
- options['encoding'] = 'UTF-8'
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- row_tokenizer = RowTokenizer()
- var_tokenizer = VariableTokenizer()
- index = 0
- for row in text.splitlines():
- for value, token in row_tokenizer.tokenize(row):
- for value, token in var_tokenizer.tokenize(value, token):
- if value:
+ mimetypes = ['text/x-robotframework']
+
+ def __init__(self, **options):
+ options['tabsize'] = 2
+ options['encoding'] = 'UTF-8'
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ row_tokenizer = RowTokenizer()
+ var_tokenizer = VariableTokenizer()
+ index = 0
+ for row in text.splitlines():
+ for value, token in row_tokenizer.tokenize(row):
+ for value, token in var_tokenizer.tokenize(value, token):
+ if value:
yield index, token, str(value)
- index += len(value)
-
-
+ index += len(value)
+
+
class VariableTokenizer:
-
- def tokenize(self, string, token):
- var = VariableSplitter(string, identifiers='$@%&')
- if var.start < 0 or token in (COMMENT, ERROR):
- yield string, token
- return
- for value, token in self._tokenize(var, string, token):
- if value:
- yield value, token
-
- def _tokenize(self, var, string, orig_token):
- before = string[:var.start]
- yield before, orig_token
- yield var.identifier + '{', SYNTAX
+
+ def tokenize(self, string, token):
+ var = VariableSplitter(string, identifiers='$@%&')
+ if var.start < 0 or token in (COMMENT, ERROR):
+ yield string, token
+ return
+ for value, token in self._tokenize(var, string, token):
+ if value:
+ yield value, token
+
+ def _tokenize(self, var, string, orig_token):
+ before = string[:var.start]
+ yield before, orig_token
+ yield var.identifier + '{', SYNTAX
yield from self.tokenize(var.base, VARIABLE)
- yield '}', SYNTAX
+ yield '}', SYNTAX
if var.index is not None:
- yield '[', SYNTAX
+ yield '[', SYNTAX
yield from self.tokenize(var.index, VARIABLE)
- yield ']', SYNTAX
+ yield ']', SYNTAX
yield from self.tokenize(string[var.end:], orig_token)
-
-
+
+
class RowTokenizer:
-
- def __init__(self):
- self._table = UnknownTable()
- self._splitter = RowSplitter()
- testcases = TestCaseTable()
- settings = SettingTable(testcases.set_default_template)
- variables = VariableTable()
- keywords = KeywordTable()
- self._tables = {'settings': settings, 'setting': settings,
- 'metadata': settings,
- 'variables': variables, 'variable': variables,
- 'testcases': testcases, 'testcase': testcases,
+
+ def __init__(self):
+ self._table = UnknownTable()
+ self._splitter = RowSplitter()
+ testcases = TestCaseTable()
+ settings = SettingTable(testcases.set_default_template)
+ variables = VariableTable()
+ keywords = KeywordTable()
+ self._tables = {'settings': settings, 'setting': settings,
+ 'metadata': settings,
+ 'variables': variables, 'variable': variables,
+ 'testcases': testcases, 'testcase': testcases,
'tasks': testcases, 'task': testcases,
- 'keywords': keywords, 'keyword': keywords,
- 'userkeywords': keywords, 'userkeyword': keywords}
-
- def tokenize(self, row):
- commented = False
- heading = False
- for index, value in enumerate(self._splitter.split(row)):
- # First value, and every second after that, is a separator.
- index, separator = divmod(index-1, 2)
- if value.startswith('#'):
- commented = True
- elif index == 0 and value.startswith('*'):
- self._table = self._start_table(value)
- heading = True
+ 'keywords': keywords, 'keyword': keywords,
+ 'userkeywords': keywords, 'userkeyword': keywords}
+
+ def tokenize(self, row):
+ commented = False
+ heading = False
+ for index, value in enumerate(self._splitter.split(row)):
+ # First value, and every second after that, is a separator.
+ index, separator = divmod(index-1, 2)
+ if value.startswith('#'):
+ commented = True
+ elif index == 0 and value.startswith('*'):
+ self._table = self._start_table(value)
+ heading = True
yield from self._tokenize(value, index, commented,
separator, heading)
- self._table.end_row()
-
- def _start_table(self, header):
- name = normalize(header, remove='*')
- return self._tables.get(name, UnknownTable())
-
- def _tokenize(self, value, index, commented, separator, heading):
- if commented:
- yield value, COMMENT
- elif separator:
- yield value, SEPARATOR
- elif heading:
- yield value, HEADING
- else:
+ self._table.end_row()
+
+ def _start_table(self, header):
+ name = normalize(header, remove='*')
+ return self._tables.get(name, UnknownTable())
+
+ def _tokenize(self, value, index, commented, separator, heading):
+ if commented:
+ yield value, COMMENT
+ elif separator:
+ yield value, SEPARATOR
+ elif heading:
+ yield value, HEADING
+ else:
yield from self._table.tokenize(value, index)
-
-
+
+
class RowSplitter:
- _space_splitter = re.compile('( {2,})')
+ _space_splitter = re.compile('( {2,})')
_pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
-
- def split(self, row):
- splitter = (row.startswith('| ') and self._split_from_pipes
- or self._split_from_spaces)
+
+ def split(self, row):
+ splitter = (row.startswith('| ') and self._split_from_pipes
+ or self._split_from_spaces)
yield from splitter(row)
- yield '\n'
-
- def _split_from_spaces(self, row):
- yield '' # Start with (pseudo)separator similarly as with pipes
+ yield '\n'
+
+ def _split_from_spaces(self, row):
+ yield '' # Start with (pseudo)separator similarly as with pipes
yield from self._space_splitter.split(row)
-
- def _split_from_pipes(self, row):
- _, separator, rest = self._pipe_splitter.split(row, 1)
- yield separator
- while self._pipe_splitter.search(rest):
- cell, separator, rest = self._pipe_splitter.split(rest, 1)
- yield cell
- yield separator
- yield rest
-
-
+
+ def _split_from_pipes(self, row):
+ _, separator, rest = self._pipe_splitter.split(row, 1)
+ yield separator
+ while self._pipe_splitter.search(rest):
+ cell, separator, rest = self._pipe_splitter.split(rest, 1)
+ yield cell
+ yield separator
+ yield rest
+
+
class Tokenizer:
- _tokens = None
-
- def __init__(self):
- self._index = 0
-
- def tokenize(self, value):
- values_and_tokens = self._tokenize(value, self._index)
- self._index += 1
- if isinstance(values_and_tokens, type(Token)):
- values_and_tokens = [(value, values_and_tokens)]
- return values_and_tokens
-
- def _tokenize(self, value, index):
- index = min(index, len(self._tokens) - 1)
- return self._tokens[index]
-
- def _is_assign(self, value):
- if value.endswith('='):
- value = value[:-1].strip()
- var = VariableSplitter(value, identifiers='$@&')
- return var.start == 0 and var.end == len(value)
-
-
-class Comment(Tokenizer):
- _tokens = (COMMENT,)
-
-
-class Setting(Tokenizer):
- _tokens = (SETTING, ARGUMENT)
- _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
+ _tokens = None
+
+ def __init__(self):
+ self._index = 0
+
+ def tokenize(self, value):
+ values_and_tokens = self._tokenize(value, self._index)
+ self._index += 1
+ if isinstance(values_and_tokens, type(Token)):
+ values_and_tokens = [(value, values_and_tokens)]
+ return values_and_tokens
+
+ def _tokenize(self, value, index):
+ index = min(index, len(self._tokens) - 1)
+ return self._tokens[index]
+
+ def _is_assign(self, value):
+ if value.endswith('='):
+ value = value[:-1].strip()
+ var = VariableSplitter(value, identifiers='$@&')
+ return var.start == 0 and var.end == len(value)
+
+
+class Comment(Tokenizer):
+ _tokens = (COMMENT,)
+
+
+class Setting(Tokenizer):
+ _tokens = (SETTING, ARGUMENT)
+ _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
- _import_settings = ('library', 'resource', 'variables')
- _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
+ _import_settings = ('library', 'resource', 'variables')
+ _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
'testtimeout','tasktimeout')
- _custom_tokenizer = None
-
- def __init__(self, template_setter=None):
- Tokenizer.__init__(self)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 1 and self._template_setter:
- self._template_setter(value)
- if index == 0:
- normalized = normalize(value)
- if normalized in self._keyword_settings:
- self._custom_tokenizer = KeywordCall(support_assign=False)
- elif normalized in self._import_settings:
- self._custom_tokenizer = ImportSetting()
- elif normalized not in self._other_settings:
- return ERROR
- elif self._custom_tokenizer:
- return self._custom_tokenizer.tokenize(value)
- return Tokenizer._tokenize(self, value, index)
-
-
-class ImportSetting(Tokenizer):
- _tokens = (IMPORT, ARGUMENT)
-
-
-class TestCaseSetting(Setting):
- _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
- 'template')
- _import_settings = ()
- _other_settings = ('documentation', 'tags', 'timeout')
-
- def _tokenize(self, value, index):
- if index == 0:
- type = Setting._tokenize(self, value[1:-1], index)
- return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
- return Setting._tokenize(self, value, index)
-
-
-class KeywordSetting(TestCaseSetting):
- _keyword_settings = ('teardown',)
- _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
-
-
-class Variable(Tokenizer):
- _tokens = (SYNTAX, ARGUMENT)
-
- def _tokenize(self, value, index):
- if index == 0 and not self._is_assign(value):
- return ERROR
- return Tokenizer._tokenize(self, value, index)
-
-
-class KeywordCall(Tokenizer):
- _tokens = (KEYWORD, ARGUMENT)
-
- def __init__(self, support_assign=True):
- Tokenizer.__init__(self)
- self._keyword_found = not support_assign
- self._assigns = 0
-
- def _tokenize(self, value, index):
- if not self._keyword_found and self._is_assign(value):
- self._assigns += 1
- return SYNTAX # VariableTokenizer tokenizes this later.
- if self._keyword_found:
- return Tokenizer._tokenize(self, value, index - self._assigns)
- self._keyword_found = True
- return GherkinTokenizer().tokenize(value, KEYWORD)
-
-
+ _custom_tokenizer = None
+
+ def __init__(self, template_setter=None):
+ Tokenizer.__init__(self)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 1 and self._template_setter:
+ self._template_setter(value)
+ if index == 0:
+ normalized = normalize(value)
+ if normalized in self._keyword_settings:
+ self._custom_tokenizer = KeywordCall(support_assign=False)
+ elif normalized in self._import_settings:
+ self._custom_tokenizer = ImportSetting()
+ elif normalized not in self._other_settings:
+ return ERROR
+ elif self._custom_tokenizer:
+ return self._custom_tokenizer.tokenize(value)
+ return Tokenizer._tokenize(self, value, index)
+
+
+class ImportSetting(Tokenizer):
+ _tokens = (IMPORT, ARGUMENT)
+
+
+class TestCaseSetting(Setting):
+ _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
+ 'template')
+ _import_settings = ()
+ _other_settings = ('documentation', 'tags', 'timeout')
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ type = Setting._tokenize(self, value[1:-1], index)
+ return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
+ return Setting._tokenize(self, value, index)
+
+
+class KeywordSetting(TestCaseSetting):
+ _keyword_settings = ('teardown',)
+ _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
+
+
+class Variable(Tokenizer):
+ _tokens = (SYNTAX, ARGUMENT)
+
+ def _tokenize(self, value, index):
+ if index == 0 and not self._is_assign(value):
+ return ERROR
+ return Tokenizer._tokenize(self, value, index)
+
+
+class KeywordCall(Tokenizer):
+ _tokens = (KEYWORD, ARGUMENT)
+
+ def __init__(self, support_assign=True):
+ Tokenizer.__init__(self)
+ self._keyword_found = not support_assign
+ self._assigns = 0
+
+ def _tokenize(self, value, index):
+ if not self._keyword_found and self._is_assign(value):
+ self._assigns += 1
+ return SYNTAX # VariableTokenizer tokenizes this later.
+ if self._keyword_found:
+ return Tokenizer._tokenize(self, value, index - self._assigns)
+ self._keyword_found = True
+ return GherkinTokenizer().tokenize(value, KEYWORD)
+
+
class GherkinTokenizer:
- _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
-
- def tokenize(self, value, token):
- match = self._gherkin_prefix.match(value)
- if not match:
- return [(value, token)]
- end = match.end()
- return [(value[:end], GHERKIN), (value[end:], token)]
-
-
-class TemplatedKeywordCall(Tokenizer):
- _tokens = (ARGUMENT,)
-
-
-class ForLoop(Tokenizer):
-
- def __init__(self):
- Tokenizer.__init__(self)
- self._in_arguments = False
-
- def _tokenize(self, value, index):
- token = self._in_arguments and ARGUMENT or SYNTAX
- if value.upper() in ('IN', 'IN RANGE'):
- self._in_arguments = True
- return token
-
-
+ _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
+
+ def tokenize(self, value, token):
+ match = self._gherkin_prefix.match(value)
+ if not match:
+ return [(value, token)]
+ end = match.end()
+ return [(value[:end], GHERKIN), (value[end:], token)]
+
+
+class TemplatedKeywordCall(Tokenizer):
+ _tokens = (ARGUMENT,)
+
+
+class ForLoop(Tokenizer):
+
+ def __init__(self):
+ Tokenizer.__init__(self)
+ self._in_arguments = False
+
+ def _tokenize(self, value, index):
+ token = self._in_arguments and ARGUMENT or SYNTAX
+ if value.upper() in ('IN', 'IN RANGE'):
+ self._in_arguments = True
+ return token
+
+
class _Table:
- _tokenizer_class = None
-
- def __init__(self, prev_tokenizer=None):
- self._tokenizer = self._tokenizer_class()
- self._prev_tokenizer = prev_tokenizer
- self._prev_values_on_row = []
-
- def tokenize(self, value, index):
- if self._continues(value, index):
- self._tokenizer = self._prev_tokenizer
- yield value, SYNTAX
- else:
+ _tokenizer_class = None
+
+ def __init__(self, prev_tokenizer=None):
+ self._tokenizer = self._tokenizer_class()
+ self._prev_tokenizer = prev_tokenizer
+ self._prev_values_on_row = []
+
+ def tokenize(self, value, index):
+ if self._continues(value, index):
+ self._tokenizer = self._prev_tokenizer
+ yield value, SYNTAX
+ else:
yield from self._tokenize(value, index)
- self._prev_values_on_row.append(value)
-
- def _continues(self, value, index):
- return value == '...' and all(self._is_empty(t)
- for t in self._prev_values_on_row)
-
- def _is_empty(self, value):
- return value in ('', '\\')
-
- def _tokenize(self, value, index):
- return self._tokenizer.tokenize(value)
-
- def end_row(self):
- self.__init__(prev_tokenizer=self._tokenizer)
-
-
-class UnknownTable(_Table):
- _tokenizer_class = Comment
-
- def _continues(self, value, index):
- return False
-
-
-class VariableTable(_Table):
- _tokenizer_class = Variable
-
-
-class SettingTable(_Table):
- _tokenizer_class = Setting
-
- def __init__(self, template_setter, prev_tokenizer=None):
- _Table.__init__(self, prev_tokenizer)
- self._template_setter = template_setter
-
- def _tokenize(self, value, index):
- if index == 0 and normalize(value) == 'testtemplate':
- self._tokenizer = Setting(self._template_setter)
- return _Table._tokenize(self, value, index)
-
- def end_row(self):
- self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
-
-
-class TestCaseTable(_Table):
- _setting_class = TestCaseSetting
- _test_template = None
- _default_template = None
-
- @property
- def _tokenizer_class(self):
- if self._test_template or (self._default_template and
- self._test_template is not False):
- return TemplatedKeywordCall
- return KeywordCall
-
- def _continues(self, value, index):
- return index > 0 and _Table._continues(self, value, index)
-
- def _tokenize(self, value, index):
- if index == 0:
- if value:
- self._test_template = None
- return GherkinTokenizer().tokenize(value, TC_KW_NAME)
- if index == 1 and self._is_setting(value):
- if self._is_template(value):
- self._test_template = False
- self._tokenizer = self._setting_class(self.set_test_template)
- else:
- self._tokenizer = self._setting_class()
- if index == 1 and self._is_for_loop(value):
- self._tokenizer = ForLoop()
- if index == 1 and self._is_empty(value):
- return [(value, SYNTAX)]
- return _Table._tokenize(self, value, index)
-
- def _is_setting(self, value):
- return value.startswith('[') and value.endswith(']')
-
- def _is_template(self, value):
- return normalize(value) == '[template]'
-
- def _is_for_loop(self, value):
- return value.startswith(':') and normalize(value, remove=':') == 'for'
-
- def set_test_template(self, template):
- self._test_template = self._is_template_set(template)
-
- def set_default_template(self, template):
- self._default_template = self._is_template_set(template)
-
- def _is_template_set(self, template):
- return normalize(template) not in ('', '\\', 'none', '${empty}')
-
-
-class KeywordTable(TestCaseTable):
- _tokenizer_class = KeywordCall
- _setting_class = KeywordSetting
-
- def _is_template(self, value):
- return False
-
-
-# Following code copied directly from Robot Framework 2.7.5.
-
-class VariableSplitter:
-
- def __init__(self, string, identifiers):
- self.identifier = None
- self.base = None
- self.index = None
- self.start = -1
- self.end = -1
- self._identifiers = identifiers
- self._may_have_internal_variables = False
- try:
- self._split(string)
- except ValueError:
- pass
- else:
- self._finalize()
-
- def get_replaced_base(self, variables):
- if self._may_have_internal_variables:
- return variables.replace_string(self.base)
- return self.base
-
- def _finalize(self):
- self.identifier = self._variable_chars[0]
- self.base = ''.join(self._variable_chars[2:-1])
- self.end = self.start + len(self._variable_chars)
- if self._has_list_or_dict_variable_index():
- self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
- self.end += len(self._list_and_dict_variable_index_chars)
-
- def _has_list_or_dict_variable_index(self):
- return self._list_and_dict_variable_index_chars\
- and self._list_and_dict_variable_index_chars[-1] == ']'
-
- def _split(self, string):
- start_index, max_index = self._find_variable(string)
- self.start = start_index
- self._open_curly = 1
- self._state = self._variable_state
- self._variable_chars = [string[start_index], '{']
- self._list_and_dict_variable_index_chars = []
- self._string = string
- start_index += 2
- for index, char in enumerate(string[start_index:]):
- index += start_index # Giving start to enumerate only in Py 2.6+
- try:
- self._state(char, index)
- except StopIteration:
- return
- if index == max_index and not self._scanning_list_variable_index():
- return
-
- def _scanning_list_variable_index(self):
- return self._state in [self._waiting_list_variable_index_state,
- self._list_variable_index_state]
-
- def _find_variable(self, string):
- max_end_index = string.rfind('}')
- if max_end_index == -1:
- raise ValueError('No variable end found')
- if self._is_escaped(string, max_end_index):
- return self._find_variable(string[:max_end_index])
- start_index = self._find_start_index(string, 1, max_end_index)
- if start_index == -1:
- raise ValueError('No variable start found')
- return start_index, max_end_index
-
- def _find_start_index(self, string, start, end):
- index = string.find('{', start, end) - 1
- if index < 0:
- return -1
- if self._start_index_is_ok(string, index):
- return index
- return self._find_start_index(string, index+2, end)
-
- def _start_index_is_ok(self, string, index):
- return string[index] in self._identifiers\
- and not self._is_escaped(string, index)
-
- def _is_escaped(self, string, index):
- escaped = False
- while index > 0 and string[index-1] == '\\':
- index -= 1
- escaped = not escaped
- return escaped
-
- def _variable_state(self, char, index):
- self._variable_chars.append(char)
- if char == '}' and not self._is_escaped(self._string, index):
- self._open_curly -= 1
- if self._open_curly == 0:
- if not self._is_list_or_dict_variable():
- raise StopIteration
- self._state = self._waiting_list_variable_index_state
- elif char in self._identifiers:
- self._state = self._internal_variable_start_state
-
- def _is_list_or_dict_variable(self):
- return self._variable_chars[0] in ('@','&')
-
- def _internal_variable_start_state(self, char, index):
- self._state = self._variable_state
- if char == '{':
- self._variable_chars.append(char)
- self._open_curly += 1
- self._may_have_internal_variables = True
- else:
- self._variable_state(char, index)
-
- def _waiting_list_variable_index_state(self, char, index):
- if char != '[':
- raise StopIteration
- self._list_and_dict_variable_index_chars.append(char)
- self._state = self._list_variable_index_state
-
- def _list_variable_index_state(self, char, index):
- self._list_and_dict_variable_index_chars.append(char)
- if char == ']':
- raise StopIteration
+ self._prev_values_on_row.append(value)
+
+ def _continues(self, value, index):
+ return value == '...' and all(self._is_empty(t)
+ for t in self._prev_values_on_row)
+
+ def _is_empty(self, value):
+ return value in ('', '\\')
+
+ def _tokenize(self, value, index):
+ return self._tokenizer.tokenize(value)
+
+ def end_row(self):
+ self.__init__(prev_tokenizer=self._tokenizer)
+
+
+class UnknownTable(_Table):
+ _tokenizer_class = Comment
+
+ def _continues(self, value, index):
+ return False
+
+
+class VariableTable(_Table):
+ _tokenizer_class = Variable
+
+
+class SettingTable(_Table):
+ _tokenizer_class = Setting
+
+ def __init__(self, template_setter, prev_tokenizer=None):
+ _Table.__init__(self, prev_tokenizer)
+ self._template_setter = template_setter
+
+ def _tokenize(self, value, index):
+ if index == 0 and normalize(value) == 'testtemplate':
+ self._tokenizer = Setting(self._template_setter)
+ return _Table._tokenize(self, value, index)
+
+ def end_row(self):
+ self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
+
+
+class TestCaseTable(_Table):
+ _setting_class = TestCaseSetting
+ _test_template = None
+ _default_template = None
+
+ @property
+ def _tokenizer_class(self):
+ if self._test_template or (self._default_template and
+ self._test_template is not False):
+ return TemplatedKeywordCall
+ return KeywordCall
+
+ def _continues(self, value, index):
+ return index > 0 and _Table._continues(self, value, index)
+
+ def _tokenize(self, value, index):
+ if index == 0:
+ if value:
+ self._test_template = None
+ return GherkinTokenizer().tokenize(value, TC_KW_NAME)
+ if index == 1 and self._is_setting(value):
+ if self._is_template(value):
+ self._test_template = False
+ self._tokenizer = self._setting_class(self.set_test_template)
+ else:
+ self._tokenizer = self._setting_class()
+ if index == 1 and self._is_for_loop(value):
+ self._tokenizer = ForLoop()
+ if index == 1 and self._is_empty(value):
+ return [(value, SYNTAX)]
+ return _Table._tokenize(self, value, index)
+
+ def _is_setting(self, value):
+ return value.startswith('[') and value.endswith(']')
+
+ def _is_template(self, value):
+ return normalize(value) == '[template]'
+
+ def _is_for_loop(self, value):
+ return value.startswith(':') and normalize(value, remove=':') == 'for'
+
+ def set_test_template(self, template):
+ self._test_template = self._is_template_set(template)
+
+ def set_default_template(self, template):
+ self._default_template = self._is_template_set(template)
+
+ def _is_template_set(self, template):
+ return normalize(template) not in ('', '\\', 'none', '${empty}')
+
+
+class KeywordTable(TestCaseTable):
+ _tokenizer_class = KeywordCall
+ _setting_class = KeywordSetting
+
+ def _is_template(self, value):
+ return False
+
+
+# Following code copied directly from Robot Framework 2.7.5.
+
+class VariableSplitter:
+
+ def __init__(self, string, identifiers):
+ self.identifier = None
+ self.base = None
+ self.index = None
+ self.start = -1
+ self.end = -1
+ self._identifiers = identifiers
+ self._may_have_internal_variables = False
+ try:
+ self._split(string)
+ except ValueError:
+ pass
+ else:
+ self._finalize()
+
+ def get_replaced_base(self, variables):
+ if self._may_have_internal_variables:
+ return variables.replace_string(self.base)
+ return self.base
+
+ def _finalize(self):
+ self.identifier = self._variable_chars[0]
+ self.base = ''.join(self._variable_chars[2:-1])
+ self.end = self.start + len(self._variable_chars)
+ if self._has_list_or_dict_variable_index():
+ self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
+ self.end += len(self._list_and_dict_variable_index_chars)
+
+ def _has_list_or_dict_variable_index(self):
+ return self._list_and_dict_variable_index_chars\
+ and self._list_and_dict_variable_index_chars[-1] == ']'
+
+ def _split(self, string):
+ start_index, max_index = self._find_variable(string)
+ self.start = start_index
+ self._open_curly = 1
+ self._state = self._variable_state
+ self._variable_chars = [string[start_index], '{']
+ self._list_and_dict_variable_index_chars = []
+ self._string = string
+ start_index += 2
+ for index, char in enumerate(string[start_index:]):
+ index += start_index # Giving start to enumerate only in Py 2.6+
+ try:
+ self._state(char, index)
+ except StopIteration:
+ return
+ if index == max_index and not self._scanning_list_variable_index():
+ return
+
+ def _scanning_list_variable_index(self):
+ return self._state in [self._waiting_list_variable_index_state,
+ self._list_variable_index_state]
+
+ def _find_variable(self, string):
+ max_end_index = string.rfind('}')
+ if max_end_index == -1:
+ raise ValueError('No variable end found')
+ if self._is_escaped(string, max_end_index):
+ return self._find_variable(string[:max_end_index])
+ start_index = self._find_start_index(string, 1, max_end_index)
+ if start_index == -1:
+ raise ValueError('No variable start found')
+ return start_index, max_end_index
+
+ def _find_start_index(self, string, start, end):
+ index = string.find('{', start, end) - 1
+ if index < 0:
+ return -1
+ if self._start_index_is_ok(string, index):
+ return index
+ return self._find_start_index(string, index+2, end)
+
+ def _start_index_is_ok(self, string, index):
+ return string[index] in self._identifiers\
+ and not self._is_escaped(string, index)
+
+ def _is_escaped(self, string, index):
+ escaped = False
+ while index > 0 and string[index-1] == '\\':
+ index -= 1
+ escaped = not escaped
+ return escaped
+
+ def _variable_state(self, char, index):
+ self._variable_chars.append(char)
+ if char == '}' and not self._is_escaped(self._string, index):
+ self._open_curly -= 1
+ if self._open_curly == 0:
+ if not self._is_list_or_dict_variable():
+ raise StopIteration
+ self._state = self._waiting_list_variable_index_state
+ elif char in self._identifiers:
+ self._state = self._internal_variable_start_state
+
+ def _is_list_or_dict_variable(self):
+ return self._variable_chars[0] in ('@','&')
+
+ def _internal_variable_start_state(self, char, index):
+ self._state = self._variable_state
+ if char == '{':
+ self._variable_chars.append(char)
+ self._open_curly += 1
+ self._may_have_internal_variables = True
+ else:
+ self._variable_state(char, index)
+
+ def _waiting_list_variable_index_state(self, char, index):
+ if char != '[':
+ raise StopIteration
+ self._list_and_dict_variable_index_chars.append(char)
+ self._state = self._list_variable_index_state
+
+ def _list_variable_index_state(self, char, index):
+ self._list_and_dict_variable_index_chars.append(char)
+ if char == ']':
+ raise StopIteration
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ruby.py b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
index 2b2f923eb9..38e80f0913 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ruby.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
@@ -1,334 +1,334 @@
-"""
- pygments.lexers.ruby
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Ruby and related languages.
-
+"""
+ pygments.lexers.ruby
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Ruby and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
- bygroups, default, LexerContext, do_insertions, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Generic
-from pygments.util import shebang_matches
-
-__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
-
-line_re = re.compile('.*?\n')
-
-
-RUBY_OPERATORS = (
- '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
- '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
-)
-
-
-class RubyLexer(ExtendedRegexLexer):
- """
- For `Ruby <http://www.ruby-lang.org>`_ source code.
- """
-
- name = 'Ruby'
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, do_insertions, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+RUBY_OPERATORS = (
+ '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+ '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+
+class RubyLexer(ExtendedRegexLexer):
+ """
+ For `Ruby <http://www.ruby-lang.org>`_ source code.
+ """
+
+ name = 'Ruby'
aliases = ['ruby', 'rb', 'duby']
- filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
- mimetypes = ['text/x-ruby', 'application/x-ruby']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Ruby...
+ mimetypes = ['text/x-ruby', 'application/x-ruby']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
# match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
+
+ start = match.start(1)
yield start, Operator, match.group(1) # <<[-~]?
yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
yield match.start(3), String.Delimiter, match.group(3) # heredoc name
yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
# this may find other heredocs, so limit the recursion depth
if len(heredocstack) < 100:
yield from self.get_tokens_unprocessed(context=ctx)
else:
yield ctx.pos, String.Heredoc, match.group(5)
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
yield match.start(), String.Delimiter, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
- def gen_rubystrings_rules():
- def intp_regex_callback(self, match, ctx):
- yield match.start(1), String.Regex, match.group(1) # begin
- nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
- ctx.pos = match.end()
-
- def intp_string_callback(self, match, ctx):
- yield match.start(1), String.Other, match.group(1)
- nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Other, match.group(4) # end
- ctx.pos = match.end()
-
- states = {}
- states['strings'] = [
- # easy ones
- (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
- (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
+ (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
(r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
- (r':"', String.Symbol, 'simple-sym'),
- (r'([a-zA-Z_]\w*)(:)(?!:)',
- bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'([a-zA-Z_]\w*)(:)(?!:)',
+ bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
(r'"', String.Double, 'simple-string-double'),
(r"'", String.Single, 'simple-string-single'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+
# quoted string and symbol
for name, ttype, end in ('string-double', String.Double, '"'), \
('string-single', String.Single, "'"),\
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # braced quoted strings
- for lbrace, rbrace, bracecc, name in \
- ('\\{', '\\}', '{}', 'cb'), \
- ('\\[', '\\]', '\\[\\]', 'sb'), \
- ('\\(', '\\)', '()', 'pa'), \
- ('<', '>', '<>', 'ab'):
- states[name+'-intp-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + bracecc + ']', String.Other),
- (lbrace, String.Other, '#push'),
- (rbrace, String.Other, '#pop'),
- (r'[\\#' + bracecc + ']', String.Other),
- (r'[^\\#' + bracecc + ']+', String.Other),
- ]
- states['strings'].append((r'%[qsw]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + bracecc + ']', String.Regex),
- (lbrace, String.Regex, '#push'),
- (rbrace + '[mixounse]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + bracecc + ']', String.Regex),
- (r'[^\\#' + bracecc + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- # these must come after %<brace>!
- states['strings'] += [
- # %r regex
- (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
- intp_regex_callback),
- # regular fancy strings with qsw
- (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
- (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- # special forms of fancy strings after operators or
- # in method calls with braces
- (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # and because of fixed width lookbehinds the whole thing a
- # second time for line startings...
- (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # all regular fancy strings without qsw
- (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'\A#!.+?$', Comment.Hashbang),
- (r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
- # keywords
- (words((
- 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
- 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
- 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
- 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
- Keyword),
- # start of function, class and module names
- (r'(module)(\s+)([a-zA-Z_]\w*'
- r'(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- # special methods
- (words((
- 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
- 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
- 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
- suffix=r'\b'),
- Keyword.Pseudo),
- (r'(not|and|or)\b', Operator.Word),
- (words((
- 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
- 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
- 'private_method_defined', 'protected_method_defined',
- 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
- Name.Builtin),
- (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (words((
- 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
- 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
- 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
- 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
- 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
- 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
- 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
- 'instance_method', 'instance_methods',
- 'instance_variable_get', 'instance_variable_set', 'instance_variables',
- 'lambda', 'load', 'local_variables', 'loop',
- 'method', 'method_missing', 'methods', 'module_eval', 'name',
- 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
- 'private_instance_methods',
- 'private_methods', 'proc', 'protected_instance_methods',
- 'protected_methods', 'public_class_method',
- 'public_instance_methods', 'public_methods',
- 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
- 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
- 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
- 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
- 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
- Name.Builtin),
- (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
- # normal heredocs
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+ for lbrace, rbrace, bracecc, name in \
+ ('\\{', '\\}', '{}', 'cb'), \
+ ('\\[', '\\]', '\\[\\]', 'sb'), \
+ ('\\(', '\\)', '()', 'pa'), \
+ ('<', '>', '<>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + bracecc + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + bracecc + ']', String.Other),
+ (r'[^\\#' + bracecc + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + bracecc + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + bracecc + ']', String.Regex),
+ (r'[^\\#' + bracecc + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
+ (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'\A#!.+?$', Comment.Hashbang),
+ (r'#.*?$', Comment.Single),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+ # keywords
+ (words((
+ 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+ 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+ 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+ 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ # start of function, class and module names
+ (r'(module)(\s+)([a-zA-Z_]\w*'
+ r'(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ # special methods
+ (words((
+ 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+ 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+ 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+ suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (words((
+ 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+ 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+ 'private_method_defined', 'protected_method_defined',
+ 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+ Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (words((
+ 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+ 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+ 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+ 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+ 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+ 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+ 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+ 'instance_method', 'instance_methods',
+ 'instance_variable_get', 'instance_variable_set', 'instance_variables',
+ 'lambda', 'load', 'local_variables', 'loop',
+ 'method', 'method_missing', 'methods', 'module_eval', 'name',
+ 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+ 'private_instance_methods',
+ 'private_methods', 'proc', 'protected_instance_methods',
+ 'protected_methods', 'public_class_method',
+ 'public_instance_methods', 'public_methods',
+ 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+ 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+ 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+ 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+ 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # normal heredocs
(r'(?<!\w)(<<[-~]?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
+ heredoc_callback),
+ # empty string heredocs
(r'(<<[-~]?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=\.index\s)|'
- r'(?<=\.scan\s)|'
- r'(?<=\.sub\s)|'
- r'(?<=\.sub!\s)|'
- r'(?<=\.gsub\s)|'
- r'(?<=\.gsub!\s)|'
- r'(?<=\.match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- # Names
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'\$\w+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # chars
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z]\w+', Name.Constant),
- # this is needed because ruby attributes can look
- # like keywords (class) or like this: ` ?!?
- (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
- bygroups(Operator, Name.Operator)),
- (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
- bygroups(Operator, Name)),
- (r'[a-zA-Z_]\w*[!?]?', Name),
- (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Text)
- ],
- 'funcname': [
- (r'\(', Punctuation, 'defexpr'),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignments)
+ (r'(?:^|(?<=[=<>~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=\.index\s)|'
+ r'(?<=\.scan\s)|'
+ r'(?<=\.sub\s)|'
+ r'(?<=\.sub!\s)|'
+ r'(?<=\.gsub\s)|'
+ r'(?<=\.gsub!\s)|'
+ r'(?<=\.match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z]\w+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ (r'[a-zA-Z_]\w*[!?]?', Name),
+ (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'\(', Punctuation, 'defexpr'),
(r'(?:([a-zA-Z_]\w*)(\.))?' # optional scope name, like "self."
r'('
r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?' # method name
@@ -336,128 +336,128 @@ class RubyLexer(ExtendedRegexLexer):
r'|\[\]=?' # or element reference/assignment override
r'|`' # or the undocumented backtick override
r')',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- default('#pop')
- ],
- 'classname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'<<', Operator, '#pop'),
- (r'[A-Z_]\w*', Name.Class, '#pop'),
- default('#pop')
- ],
- 'defexpr': [
- (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
- (r'\(', Operator, '#push'),
- include('root')
- ],
- 'in-intp': [
- (r'\{', String.Interpol, '#push'),
- (r'\}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#\{', String.Interpol, 'in-intp'),
- (r'#@@?[a-zA-Z_]\w*', String.Interpol),
- (r'#\$[a-zA-Z_]\w*', String.Interpol)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
- String.Escape)
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[mixounse]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
- tokens.update(gen_rubystrings_rules())
-
- def analyse_text(text):
- return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
- """
- For Ruby interactive console (**irb**) output like:
-
- .. sourcecode:: rbcon
-
- irb(main):001:0> a = 1
- => 1
- irb(main):002:0> puts a
- 1
- => nil
- """
- name = 'Ruby irb session'
- aliases = ['rbcon', 'irb']
- mimetypes = ['text/x-ruby-shellsession']
-
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'<<', Operator, '#pop'),
+ (r'[A-Z_]\w*', Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'defexpr': [
+ (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+ (r'\(', Operator, '#push'),
+ include('root')
+ ],
+ 'in-intp': [
+ (r'\{', String.Interpol, '#push'),
+ (r'\}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#\{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+ (r'#\$[a-zA-Z_]\w*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+ def analyse_text(text):
+ return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ For Ruby interactive console (**irb**) output like:
+
+ .. sourcecode:: rbcon
+
+ irb(main):001:0> a = 1
+ => 1
+ irb(main):002:0> puts a
+ 1
+ => nil
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+ mimetypes = ['text/x-ruby-shellsession']
+
_prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
r'|>> |\?> ')
-
- def get_tokens_unprocessed(self, text):
- rblexer = RubyLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
yield from do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
yield from do_insertions(
insertions, rblexer.get_tokens_unprocessed(curcode))
-
-
-class FancyLexer(RegexLexer):
- """
- Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
-
- Fancy is a self-hosted, pure object-oriented, dynamic,
- class-based, concurrent general-purpose programming language
- running on Rubinius, the Ruby VM.
-
- .. versionadded:: 1.5
- """
- name = 'Fancy'
- filenames = ['*.fy', '*.fancypack']
- aliases = ['fancy', 'fy']
- mimetypes = ['text/x-fancysrc']
-
- tokens = {
- # copied from PerlLexer:
- 'balanced-regex': [
+
+
+class FancyLexer(RegexLexer):
+ """
+ Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
+
+ Fancy is a self-hosted, pure object-oriented, dynamic,
+ class-based, concurrent general-purpose programming language
+ running on Rubinius, the Ruby VM.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fancy'
+ filenames = ['*.fy', '*.fancypack']
+ aliases = ['fancy', 'fy']
+ mimetypes = ['text/x-fancysrc']
+
+ tokens = {
+ # copied from PerlLexer:
+ 'balanced-regex': [
(r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
(r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
(r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
(r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
(r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
@@ -465,59 +465,59 @@ class FancyLexer(RegexLexer):
(r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
(r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
(r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\s+', Text),
-
- # balanced delimiters (copied from PerlLexer):
+ ],
+ 'root': [
+ (r'\s+', Text),
+
+ # balanced delimiters (copied from PerlLexer):
(r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
(r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
(r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
(r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
(r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
-
- # Comments
- (r'#(.*?)\n', Comment.Single),
- # Symbols
- (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
- # Multi-line DoubleQuotedString
+ (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
+
+ # Comments
+ (r'#(.*?)\n', Comment.Single),
+ # Symbols
+ (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
+ # Multi-line DoubleQuotedString
(r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
- # DoubleQuotedString
+ # DoubleQuotedString
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # keywords
- (r'(def|class|try|catch|finally|retry|return|return_local|match|'
- r'case|->|=>)\b', Keyword),
- # constants
- (r'(self|super|nil|false|true)\b', Name.Constant),
- (r'[(){};,/?|:\\]', Punctuation),
- # names
- (words((
- 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
- 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
- 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
- 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
- Name.Builtin),
- # functions
- (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
- # operators, must be below functions
- (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
+ # keywords
+ (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+ r'case|->|=>)\b', Keyword),
+ # constants
+ (r'(self|super|nil|false|true)\b', Name.Constant),
+ (r'[(){};,/?|:\\]', Punctuation),
+ # names
+ (words((
+ 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+ 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+ 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+ 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+ Name.Builtin),
+ # functions
+ (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+ # operators, must be below functions
+ (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
(r'[A-Z]\w*', Name.Constant),
(r'@[a-zA-Z_]\w*', Name.Variable.Instance),
(r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- ('@@?', Operator),
+ ('@@?', Operator),
(r'[a-zA-Z_]\w*', Name),
- # numbers - / checks are necessary to avoid mismarking regexes,
- # see comment in RubyLexer
- (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ]
- }
+ # numbers - / checks are necessary to avoid mismarking regexes,
+ # see comment in RubyLexer
+ (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rust.py b/contrib/python/Pygments/py3/pygments/lexers/rust.py
index d01f73e4a4..3fa841afa4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rust.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rust.py
@@ -1,31 +1,31 @@
-"""
- pygments.lexers.rust
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the Rust language.
-
+"""
+ pygments.lexers.rust
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rust language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Whitespace
-
-__all__ = ['RustLexer']
-
-
-class RustLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+ """
Lexer for the Rust programming language (version 1.47).
-
- .. versionadded:: 1.6
- """
- name = 'Rust'
- filenames = ['*.rs', '*.rs.in']
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rust'
+ filenames = ['*.rs', '*.rs.in']
aliases = ['rust', 'rs']
mimetypes = ['text/rust', 'text/x-rust']
-
+
keyword_types = (words((
'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
@@ -63,27 +63,27 @@ class RustLexer(RegexLexer):
'vec', 'write', 'writeln',
), suffix=r'!'), Name.Function.Magic)
- tokens = {
- 'root': [
- # rust allows a file to start with a shebang, but if the first line
+ tokens = {
+ 'root': [
+ # rust allows a file to start with a shebang, but if the first line
# starts with #![ then it's not a shebang but a crate attribute.
- (r'#![^[\r\n].*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- # Whitespace and Comments
- (r'\n', Whitespace),
- (r'\s+', Whitespace),
- (r'//!.*?\n', String.Doc),
- (r'///(\n|[^/].*?\n)', String.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
- (r'/\*!', String.Doc, 'doccomment'),
- (r'/\*', Comment.Multiline, 'comment'),
-
- # Macro parameters
- (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
- # Keywords
+ (r'#![^[\r\n].*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ # Whitespace and Comments
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//!.*?\n', String.Doc),
+ (r'///(\n|[^/].*?\n)', String.Doc),
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
+ (r'/\*!', String.Doc, 'doccomment'),
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # Macro parameters
+ (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
+ # Keywords
(words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
@@ -92,10 +92,10 @@ class RustLexer(RegexLexer):
(words(('abstract', 'become', 'do', 'final', 'macro', 'override',
'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
suffix=r'\b'), Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
+ (r'(true|false)\b', Keyword.Constant),
(r'self\b', Name.Builtin.Pseudo),
(r'mod\b', Keyword, 'modname'),
- (r'let\b', Keyword.Declaration),
+ (r'let\b', Keyword.Declaration),
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
@@ -108,68 +108,68 @@ class RustLexer(RegexLexer):
(r'::\b', Text),
# Types in positions.
(r'(?::|->)', Text, 'typename'),
- # Labels
+ # Labels
(r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character literals
- (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
- (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
- String.Char),
+ (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
+ (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
+ String.Char),
# Binary literals
- (r'0b[01_]+', Number.Bin, 'number_lit'),
+ (r'0b[01_]+', Number.Bin, 'number_lit'),
# Octal literals
- (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+ (r'0o[0-7_]+', Number.Oct, 'number_lit'),
# Hexadecimal literals
- (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal literals
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
'number_lit'),
- (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+ (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String literals
- (r'b"', String, 'bytestring'),
- (r'"', String, 'string'),
+ (r'b"', String, 'bytestring'),
+ (r'"', String, 'string'),
(r'(?s)b?r(#*)".*?"\1', String),
-
+
# Lifetime names
(r"'", Operator, 'lifetime'),
-
- # Operators and Punctuation
+
+ # Operators and Punctuation
(r'\.\.=?', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
# Identifiers
- (r'[a-zA-Z_]\w*', Name),
+ (r'[a-zA-Z_]\w*', Name),
# Raw identifiers
(r'r#[a-zA-Z_]\w*', Name),
-
- # Attributes
- (r'#!?\[', Comment.Preproc, 'attribute['),
+
+ # Attributes
+ (r'#!?\[', Comment.Preproc, 'attribute['),
# Misc
# Lone hashes: not used in Rust syntax, but allowed in macro
# arguments, most famously for quote::quote!()
(r'#', Text),
- ],
- 'comment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- 'doccomment': [
- (r'[^*/]+', String.Doc),
- (r'/\*', String.Doc, '#push'),
- (r'\*/', String.Doc, '#pop'),
- (r'[*/]', String.Doc),
- ],
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'doccomment': [
+ (r'[^*/]+', String.Doc),
+ (r'/\*', String.Doc, '#push'),
+ (r'\*/', String.Doc, '#pop'),
+ (r'[*/]', String.Doc),
+ ],
'modname': [
(r'\s+', Text),
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
@@ -194,29 +194,29 @@ class RustLexer(RegexLexer):
(r"[a-zA-Z_]+\w*", Name.Attribute),
default('#pop'),
],
- 'number_lit': [
- (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
- (r'f(32|64)', Keyword, '#pop'),
- default('#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
- r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String),
- ],
- 'bytestring': [
- (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
- include('string'),
- ],
- 'attribute_common': [
- (r'"', String, 'string'),
- (r'\[', Comment.Preproc, 'attribute['),
- ],
- 'attribute[': [
- include('attribute_common'),
+ 'number_lit': [
+ (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
+ (r'f(32|64)', Keyword, '#pop'),
+ default('#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
+ r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String),
+ ],
+ 'bytestring': [
+ (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
+ include('string'),
+ ],
+ 'attribute_common': [
+ (r'"', String, 'string'),
+ (r'\[', Comment.Preproc, 'attribute['),
+ ],
+ 'attribute[': [
+ include('attribute_common'),
(r'\]', Comment.Preproc, '#pop'),
(r'[^"\]\[]+', Comment.Preproc),
- ],
- }
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/scripting.py b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
index 9a1e63d66a..61808a1b36 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/scripting.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
@@ -1,948 +1,948 @@
-"""
- pygments.lexers.scripting
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for scripting and embedded languages.
-
+"""
+ pygments.lexers.scripting
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scripting and embedded languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
- words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Whitespace, Other
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Whitespace, Other
from pygments.util import get_bool_opt, get_list_opt
-
-__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
- 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
+
+__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+ 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
-
-
-class LuaLexer(RegexLexer):
- """
- For `Lua <http://www.lua.org>`_ source code.
-
- Additional options accepted:
-
- `func_name_highlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabled_modules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted.
-
- To get a list of allowed modules have a look into the
- `_lua_builtins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._lua_builtins import MODULES
- >>> MODULES.keys()
- ['string', 'coroutine', 'modules', 'io', 'basic', ...]
- """
-
- name = 'Lua'
- aliases = ['lua']
- filenames = ['*.lua', '*.wlua']
- mimetypes = ['text/x-lua', 'application/x-lua']
-
+
+
+class LuaLexer(RegexLexer):
+ """
+ For `Lua <http://www.lua.org>`_ source code.
+
+ Additional options accepted:
+
+ `func_name_highlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabled_modules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted.
+
+ To get a list of allowed modules have a look into the
+ `_lua_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._lua_builtins import MODULES
+ >>> MODULES.keys()
+ ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+ """
+
+ name = 'Lua'
+ aliases = ['lua']
+ filenames = ['*.lua', '*.wlua']
+ mimetypes = ['text/x-lua', 'application/x-lua']
+
_comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
_comment_single = r'(?:--.*$)'
_space = r'(?:\s+)'
_s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
_name = r'(?:[^\W\d]\w*)'
- tokens = {
- 'root': [
+ tokens = {
+ 'root': [
# Lua allows a file to start with a shebang.
(r'#!.*', Comment.Preproc),
- default('base'),
- ],
+ default('base'),
+ ],
'ws': [
(_comment_multiline, Comment.Multiline),
(_comment_single, Comment.Single),
(_space, Text),
],
- 'base': [
+ 'base': [
include('ws'),
-
+
(r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'\d+', Number.Integer),
-
- # multiline strings
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # multiline strings
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
(r'::', Punctuation, 'label'),
(r'\.{3}', Punctuation),
(r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
- (r'[\[\]{}().,:;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
-
- ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
+ (r'[\[\]{}().,:;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+
+ ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
r'while)\b', Keyword.Reserved),
(r'goto\b', Keyword.Reserved, 'goto'),
- (r'(local)\b', Keyword.Declaration),
- (r'(true|false|nil)\b', Keyword.Constant),
-
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
(r'(function)\b', Keyword.Reserved, 'funcname'),
-
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
-
- 'funcname': [
+
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'funcname': [
include('ws'),
(r'[.:]', Punctuation),
(r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
(_name, Name.Function, '#pop'),
- # inline function
+ # inline function
(r'\(', Punctuation, '#pop'),
- ],
-
+ ],
+
'goto': [
include('ws'),
(_name, Name.Label, '#pop'),
- ],
-
+ ],
+
'label': [
include('ws'),
(r'::', Punctuation, '#pop'),
(_name, Name.Label),
],
- 'stringescape': [
+ 'stringescape': [
(r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
r'u\{[0-9a-fA-F]+\})', String.Escape),
- ],
-
- 'sqs': [
+ ],
+
+ 'sqs': [
(r"'", String.Single, '#pop'),
(r"[^\\']+", String.Single),
- ],
-
- 'dqs': [
+ ],
+
+ 'dqs': [
(r'"', String.Double, '#pop'),
(r'[^\\"]+', String.Double),
- ]
- }
-
- def __init__(self, **options):
- self.func_name_highlighting = get_bool_opt(
- options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
- self._functions = set()
- if self.func_name_highlighting:
- from pygments.lexers._lua_builtins import MODULES
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._lua_builtins import MODULES
for mod, func in MODULES.items():
- if mod not in self.disabled_modules:
- self._functions.update(func)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- elif '.' in value:
- a, b = value.split('.')
- yield index, Name, a
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ elif '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
yield index + len(a), Punctuation, '.'
- yield index + len(a) + 1, Name, b
- continue
- yield index, token, value
-
-class MoonScriptLexer(LuaLexer):
- """
- For `MoonScript <http://moonscript.org>`_ source code.
-
- .. versionadded:: 1.5
- """
-
+ yield index + len(a) + 1, Name, b
+ continue
+ yield index, token, value
+
+class MoonScriptLexer(LuaLexer):
+ """
+ For `MoonScript <http://moonscript.org>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
name = 'MoonScript'
aliases = ['moonscript', 'moon']
filenames = ['*.moon']
- mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('--.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
(r'\n', Whitespace),
- (r'[^\S\n]+', Text),
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(->|=>)', Name.Function),
- (r':[a-zA-Z_]\w*', Name.Variable),
- (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
- (r'[;,]', Punctuation),
- (r'[\[\]{}()]', Keyword.Type),
- (r'[a-zA-Z_]\w*:', Name.Variable),
- (words((
- 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
- 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
- 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
- 'break'), suffix=r'\b'),
- Keyword),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(and|or|not)\b', Operator.Word),
- (r'(self)\b', Name.Builtin.Pseudo),
- (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
- (r'[A-Z]\w*', Name.Class), # proper name
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
- 'sqs': [
- ("'", String.Single, '#pop'),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_]\w*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]{}()]', Keyword.Type),
+ (r'[a-zA-Z_]\w*:', Name.Variable),
+ (words((
+ 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+ 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+ 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+ 'break'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
("[^']+", String)
- ],
- 'dqs': [
- ('"', String.Double, '#pop'),
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
('[^"]+', String)
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # set . as Operator instead of Punctuation
- for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
- if token == Punctuation and value == ".":
- token = Operator
- yield index, token, value
-
-
-class ChaiscriptLexer(RegexLexer):
- """
- For `ChaiScript <http://chaiscript.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'ChaiScript'
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
aliases = ['chaiscript', 'chai']
- filenames = ['*.chai']
- mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'^\#.*?\n', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'[=+\-*/]', Operator),
- (r'(for|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch'
- r')\b', Keyword, 'slashstartsregex'),
- (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(attr|def|fun)\b', Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
- (r'(eval|throw)\b', Name.Builtin),
- (r'`\S+`', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"', String.Double, 'dqstring'),
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ],
- 'dqstring': [
- (r'\$\{[^"}]+?\}', String.Interpol),
- (r'\$', String.Double),
- (r'\\\\', String.Double),
- (r'\\"', String.Double),
- (r'[^\\"$]+', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class LSLLexer(RegexLexer):
- """
- For Second Life's Linden Scripting Language source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'LSL'
- aliases = ['lsl']
- filenames = ['*.lsl']
- mimetypes = ['text/x-lsl']
-
- flags = re.MULTILINE
-
- lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
- lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
- lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
- lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
- lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
- lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
- lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
- lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
- lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
- lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
- lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
- lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
- lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
- lsl_invalid_illegal = r'\b(?:event)\b'
- lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
- lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
- lsl_reserved_log = r'\b(?:print)\b'
- lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
-
- tokens = {
- 'root':
- [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String.Double, 'string'),
- (lsl_keywords, Keyword),
- (lsl_types, Keyword.Type),
- (lsl_states, Name.Class),
- (lsl_events, Name.Builtin),
- (lsl_functions_builtin, Name.Function),
- (lsl_constants_float, Keyword.Constant),
- (lsl_constants_integer, Keyword.Constant),
- (lsl_constants_integer_boolean, Keyword.Constant),
- (lsl_constants_rotation, Keyword.Constant),
- (lsl_constants_string, Keyword.Constant),
- (lsl_constants_vector, Keyword.Constant),
- (lsl_invalid_broken, Error),
- (lsl_invalid_deprecated, Error),
- (lsl_invalid_illegal, Error),
- (lsl_invalid_unimplemented, Error),
- (lsl_reserved_godmode, Keyword.Reserved),
- (lsl_reserved_log, Keyword.Reserved),
- (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (lsl_operators, Operator),
- (r':=?', Error),
- (r'[,;{}()\[\]]', Punctuation),
- (r'\n+', Whitespace),
- (r'\s+', Whitespace)
- ],
- 'comment':
- [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string':
- [
- (r'\\([nt"\\])', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\\.', Error),
- (r'[^"\\]+', String.Double),
- ]
- }
-
-
-class AppleScriptLexer(RegexLexer):
- """
- For `AppleScript source code
- <http://developer.apple.com/documentation/AppleScript/
- Conceptual/AppleScriptLangGuide>`_,
- including `AppleScript Studio
- <http://developer.apple.com/documentation/AppleScript/
- Reference/StudioReference>`_.
- Contributed by Andreas Amann <aamann@mac.com>.
-
- .. versionadded:: 1.0
- """
-
- name = 'AppleScript'
- aliases = ['applescript']
- filenames = ['*.applescript']
-
- flags = re.MULTILINE | re.DOTALL
-
- Identifiers = r'[a-zA-Z]\w*'
-
- # XXX: use words() for all of these
- Literals = ('AppleScript', 'current application', 'false', 'linefeed',
- 'missing value', 'pi', 'quote', 'result', 'return', 'space',
- 'tab', 'text item delimiters', 'true', 'version')
- Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
- 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
- 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
- 'text ', 'unit types', '(?:Unicode )?text', 'string')
- BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
- 'paragraph', 'word', 'year')
- HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
- 'aside from', 'at', 'below', 'beneath', 'beside',
- 'between', 'for', 'given', 'instead of', 'on', 'onto',
- 'out of', 'over', 'since')
- Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
- 'choose application', 'choose color', 'choose file( name)?',
- 'choose folder', 'choose from list',
- 'choose remote application', 'clipboard info',
- 'close( access)?', 'copy', 'count', 'current date', 'delay',
- 'delete', 'display (alert|dialog)', 'do shell script',
- 'duplicate', 'exists', 'get eof', 'get volume settings',
- 'info for', 'launch', 'list (disks|folder)', 'load script',
- 'log', 'make', 'mount volume', 'new', 'offset',
- 'open( (for access|location))?', 'path to', 'print', 'quit',
- 'random number', 'read', 'round', 'run( script)?',
- 'say', 'scripting components',
- 'set (eof|the clipboard to|volume)', 'store script',
- 'summarize', 'system attribute', 'system info',
- 'the clipboard', 'time to GMT', 'write', 'quoted form')
- References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
- 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
- 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
- 'before', 'behind', 'every', 'front', 'index', 'last',
- 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
- Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
- "isn't", "isn't equal( to)?", "is not equal( to)?",
- "doesn't equal", "does not equal", "(is )?greater than",
- "comes after", "is not less than or equal( to)?",
- "isn't less than or equal( to)?", "(is )?less than",
- "comes before", "is not greater than or equal( to)?",
- "isn't greater than or equal( to)?",
- "(is )?greater than or equal( to)?", "is not less than",
- "isn't less than", "does not come before",
- "doesn't come before", "(is )?less than or equal( to)?",
- "is not greater than", "isn't greater than",
- "does not come after", "doesn't come after", "starts? with",
- "begins? with", "ends? with", "contains?", "does not contain",
- "doesn't contain", "is in", "is contained by", "is not in",
- "is not contained by", "isn't contained by", "div", "mod",
- "not", "(a )?(ref( to)?|reference to)", "is", "does")
- Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
- 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
- 'try', 'until', 'using terms from', 'while', 'whith',
- 'with timeout( of)?', 'with transaction', 'by', 'continue',
- 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
- Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
- Reserved = ('but', 'put', 'returning', 'the')
- StudioClasses = ('action cell', 'alert reply', 'application', 'box',
- 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
- 'clip view', 'color well', 'color-panel',
- 'combo box( item)?', 'control',
- 'data( (cell|column|item|row|source))?', 'default entry',
- 'dialog reply', 'document', 'drag info', 'drawer',
- 'event', 'font(-panel)?', 'formatter',
- 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
- 'movie( view)?', 'open-panel', 'outline view', 'panel',
- 'pasteboard', 'plugin', 'popup button',
- 'progress indicator', 'responder', 'save-panel',
- 'scroll view', 'secure text field( cell)?', 'slider',
- 'sound', 'split view', 'stepper', 'tab view( item)?',
- 'table( (column|header cell|header view|view))',
- 'text( (field( cell)?|view))?', 'toolbar( item)?',
- 'user-defaults', 'view', 'window')
- StudioEvents = ('accept outline drop', 'accept table drop', 'action',
- 'activated', 'alert ended', 'awake from nib', 'became key',
- 'became main', 'begin editing', 'bounds changed',
- 'cell value', 'cell value changed', 'change cell value',
- 'change item value', 'changed', 'child of item',
- 'choose menu item', 'clicked', 'clicked toolbar item',
- 'closed', 'column clicked', 'column moved',
- 'column resized', 'conclude drop', 'data representation',
- 'deminiaturized', 'dialog ended', 'document nib name',
- 'double clicked', 'drag( (entered|exited|updated))?',
- 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
- 'item value', 'item value changed', 'items changed',
- 'keyboard down', 'keyboard up', 'launched',
- 'load data representation', 'miniaturized', 'mouse down',
- 'mouse dragged', 'mouse entered', 'mouse exited',
- 'mouse moved', 'mouse up', 'moved',
- 'number of browser rows', 'number of items',
- 'number of rows', 'open untitled', 'opened', 'panel ended',
- 'parameters updated', 'plugin loaded', 'prepare drop',
- 'prepare outline drag', 'prepare outline drop',
- 'prepare table drag', 'prepare table drop',
- 'read from file', 'resigned active', 'resigned key',
- 'resigned main', 'resized( sub views)?',
- 'right mouse down', 'right mouse dragged',
- 'right mouse up', 'rows changed', 'scroll wheel',
- 'selected tab view item', 'selection changed',
- 'selection changing', 'should begin editing',
- 'should close', 'should collapse item',
- 'should end editing', 'should expand item',
- 'should open( untitled)?',
- 'should quit( after last window closed)?',
- 'should select column', 'should select item',
- 'should select row', 'should select tab view item',
- 'should selection change', 'should zoom', 'shown',
- 'update menu item', 'update parameters',
- 'update toolbar item', 'was hidden', 'was miniaturized',
- 'will become active', 'will close', 'will dismiss',
- 'will display browser cell', 'will display cell',
- 'will display item cell', 'will display outline cell',
- 'will finish launching', 'will hide', 'will miniaturize',
- 'will move', 'will open', 'will pop up', 'will quit',
- 'will resign active', 'will resize( sub views)?',
- 'will select tab view item', 'will show', 'will zoom',
- 'write to file', 'zoomed')
- StudioCommands = ('animate', 'append', 'call method', 'center',
- 'close drawer', 'close panel', 'display',
- 'display alert', 'display dialog', 'display panel', 'go',
- 'hide', 'highlight', 'increment', 'item for',
- 'load image', 'load movie', 'load nib', 'load panel',
- 'load sound', 'localized string', 'lock focus', 'log',
- 'open drawer', 'path for', 'pause', 'perform action',
- 'play', 'register', 'resume', 'scroll', 'select( all)?',
- 'show', 'size to fit', 'start', 'step back',
- 'step forward', 'stop', 'synchronize', 'unlock focus',
- 'update')
- StudioProperties = ('accepts arrow key', 'action method', 'active',
- 'alignment', 'allowed identifiers',
- 'allows branch selection', 'allows column reordering',
- 'allows column resizing', 'allows column selection',
- 'allows customization',
- 'allows editing text attributes',
- 'allows empty selection', 'allows mixed state',
- 'allows multiple selection', 'allows reordering',
- 'allows undo', 'alpha( value)?', 'alternate image',
- 'alternate increment value', 'alternate title',
- 'animation delay', 'associated file name',
- 'associated object', 'auto completes', 'auto display',
- 'auto enables items', 'auto repeat',
- 'auto resizes( outline column)?',
- 'auto save expanded items', 'auto save name',
- 'auto save table columns', 'auto saves configuration',
- 'auto scroll', 'auto sizes all columns to fit',
- 'auto sizes cells', 'background color', 'bezel state',
- 'bezel style', 'bezeled', 'border rect', 'border type',
- 'bordered', 'bounds( rotation)?', 'box type',
- 'button returned', 'button type',
- 'can choose directories', 'can choose files',
- 'can draw', 'can hide',
- 'cell( (background color|size|type))?', 'characters',
- 'class', 'click count', 'clicked( data)? column',
- 'clicked data item', 'clicked( data)? row',
- 'closeable', 'collating', 'color( (mode|panel))',
- 'command key down', 'configuration',
- 'content(s| (size|view( margins)?))?', 'context',
- 'continuous', 'control key down', 'control size',
- 'control tint', 'control view',
- 'controller visible', 'coordinate system',
- 'copies( on scroll)?', 'corner view', 'current cell',
- 'current column', 'current( field)? editor',
- 'current( menu)? item', 'current row',
- 'current tab view item', 'data source',
- 'default identifiers', 'delta (x|y|z)',
- 'destination window', 'directory', 'display mode',
- 'displayed cell', 'document( (edited|rect|view))?',
- 'double value', 'dragged column', 'dragged distance',
- 'dragged items', 'draws( cell)? background',
- 'draws grid', 'dynamically scrolls', 'echos bullets',
- 'edge', 'editable', 'edited( data)? column',
- 'edited data item', 'edited( data)? row', 'enabled',
- 'enclosing scroll view', 'ending page',
- 'error handling', 'event number', 'event type',
- 'excluded from windows menu', 'executable path',
- 'expanded', 'fax number', 'field editor', 'file kind',
- 'file name', 'file type', 'first responder',
- 'first visible column', 'flipped', 'floating',
- 'font( panel)?', 'formatter', 'frameworks path',
- 'frontmost', 'gave up', 'grid color', 'has data items',
- 'has horizontal ruler', 'has horizontal scroller',
- 'has parent data item', 'has resize indicator',
- 'has shadow', 'has sub menu', 'has vertical ruler',
- 'has vertical scroller', 'header cell', 'header view',
- 'hidden', 'hides when deactivated', 'highlights by',
- 'horizontal line scroll', 'horizontal page scroll',
- 'horizontal ruler view', 'horizontally resizable',
- 'icon image', 'id', 'identifier',
- 'ignores multiple clicks',
- 'image( (alignment|dims when disabled|frame style|scaling))?',
- 'imports graphics', 'increment value',
- 'indentation per level', 'indeterminate', 'index',
- 'integer value', 'intercell spacing', 'item height',
- 'key( (code|equivalent( modifier)?|window))?',
- 'knob thickness', 'label', 'last( visible)? column',
- 'leading offset', 'leaf', 'level', 'line scroll',
- 'loaded', 'localized sort', 'location', 'loop mode',
- 'main( (bunde|menu|window))?', 'marker follows cell',
- 'matrix mode', 'maximum( content)? size',
- 'maximum visible columns',
- 'menu( form representation)?', 'miniaturizable',
- 'miniaturized', 'minimized image', 'minimized title',
- 'minimum column width', 'minimum( content)? size',
- 'modal', 'modified', 'mouse down state',
- 'movie( (controller|file|rect))?', 'muted', 'name',
- 'needs display', 'next state', 'next text',
- 'number of tick marks', 'only tick mark values',
- 'opaque', 'open panel', 'option key down',
- 'outline table column', 'page scroll', 'pages across',
- 'pages down', 'palette label', 'pane splitter',
- 'parent data item', 'parent window', 'pasteboard',
- 'path( (names|separator))?', 'playing',
- 'plays every frame', 'plays selection only', 'position',
- 'preferred edge', 'preferred type', 'pressure',
- 'previous text', 'prompt', 'properties',
- 'prototype cell', 'pulls down', 'rate',
- 'released when closed', 'repeated',
- 'requested print time', 'required file type',
- 'resizable', 'resized column', 'resource path',
- 'returns records', 'reuses columns', 'rich text',
- 'roll over', 'row height', 'rulers visible',
- 'save panel', 'scripts path', 'scrollable',
- 'selectable( identifiers)?', 'selected cell',
- 'selected( data)? columns?', 'selected data items?',
- 'selected( data)? rows?', 'selected item identifier',
- 'selection by rect', 'send action on arrow key',
- 'sends action when done editing', 'separates columns',
- 'separator item', 'sequence number', 'services menu',
- 'shared frameworks path', 'shared support path',
- 'sheet', 'shift key down', 'shows alpha',
- 'shows state by', 'size( mode)?',
- 'smart insert delete enabled', 'sort case sensitivity',
- 'sort column', 'sort order', 'sort type',
- 'sorted( data rows)?', 'sound', 'source( mask)?',
- 'spell checking enabled', 'starting page', 'state',
- 'string value', 'sub menu', 'super menu', 'super view',
- 'tab key traverses cells', 'tab state', 'tab type',
- 'tab view', 'table view', 'tag', 'target( printer)?',
- 'text color', 'text container insert',
- 'text container origin', 'text returned',
- 'tick mark position', 'time stamp',
- 'title(d| (cell|font|height|position|rect))?',
- 'tool tip', 'toolbar', 'trailing offset', 'transparent',
- 'treat packages as directories', 'truncated labels',
- 'types', 'unmodified characters', 'update views',
- 'use sort indicator', 'user defaults',
- 'uses data source', 'uses ruler',
- 'uses threaded animation',
- 'uses title from previous column', 'value wraps',
- 'version',
- 'vertical( (line scroll|page scroll|ruler view))?',
- 'vertically resizable', 'view',
- 'visible( document rect)?', 'volume', 'width', 'window',
- 'windows menu', 'wraps', 'zoomable', 'zoomed')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+ ],
+ 'dqstring': [
+ (r'\$\{[^"}]+?\}', String.Interpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class LSLLexer(RegexLexer):
+ """
+ For Second Life's Linden Scripting Language source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'LSL'
+ aliases = ['lsl']
+ filenames = ['*.lsl']
+ mimetypes = ['text/x-lsl']
+
+ flags = re.MULTILINE
+
+ lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+ lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+ lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+ lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+ lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+ lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+ lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+ lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+ lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+ lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+ lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+ lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+ lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+ lsl_invalid_illegal = r'\b(?:event)\b'
+ lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+ lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+ lsl_reserved_log = r'\b(?:print)\b'
+ lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
+
+ tokens = {
+ 'root':
+ [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String.Double, 'string'),
+ (lsl_keywords, Keyword),
+ (lsl_types, Keyword.Type),
+ (lsl_states, Name.Class),
+ (lsl_events, Name.Builtin),
+ (lsl_functions_builtin, Name.Function),
+ (lsl_constants_float, Keyword.Constant),
+ (lsl_constants_integer, Keyword.Constant),
+ (lsl_constants_integer_boolean, Keyword.Constant),
+ (lsl_constants_rotation, Keyword.Constant),
+ (lsl_constants_string, Keyword.Constant),
+ (lsl_constants_vector, Keyword.Constant),
+ (lsl_invalid_broken, Error),
+ (lsl_invalid_deprecated, Error),
+ (lsl_invalid_illegal, Error),
+ (lsl_invalid_unimplemented, Error),
+ (lsl_reserved_godmode, Keyword.Reserved),
+ (lsl_reserved_log, Keyword.Reserved),
+ (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (lsl_operators, Operator),
+ (r':=?', Error),
+ (r'[,;{}()\[\]]', Punctuation),
+ (r'\n+', Whitespace),
+ (r'\s+', Whitespace)
+ ],
+ 'comment':
+ [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'string':
+ [
+ (r'\\([nt"\\])', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', Error),
+ (r'[^"\\]+', String.Double),
+ ]
+ }
+
+
+class AppleScriptLexer(RegexLexer):
+ """
+ For `AppleScript source code
+ <http://developer.apple.com/documentation/AppleScript/
+ Conceptual/AppleScriptLangGuide>`_,
+ including `AppleScript Studio
+ <http://developer.apple.com/documentation/AppleScript/
+ Reference/StudioReference>`_.
+ Contributed by Andreas Amann <aamann@mac.com>.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'AppleScript'
+ aliases = ['applescript']
+ filenames = ['*.applescript']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ Identifiers = r'[a-zA-Z]\w*'
+
+ # XXX: use words() for all of these
+ Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+ 'missing value', 'pi', 'quote', 'result', 'return', 'space',
+ 'tab', 'text item delimiters', 'true', 'version')
+ Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+ 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+ 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+ 'text ', 'unit types', '(?:Unicode )?text', 'string')
+ BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+ 'paragraph', 'word', 'year')
+ HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+ 'aside from', 'at', 'below', 'beneath', 'beside',
+ 'between', 'for', 'given', 'instead of', 'on', 'onto',
+ 'out of', 'over', 'since')
+ Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+ 'choose application', 'choose color', 'choose file( name)?',
+ 'choose folder', 'choose from list',
+ 'choose remote application', 'clipboard info',
+ 'close( access)?', 'copy', 'count', 'current date', 'delay',
+ 'delete', 'display (alert|dialog)', 'do shell script',
+ 'duplicate', 'exists', 'get eof', 'get volume settings',
+ 'info for', 'launch', 'list (disks|folder)', 'load script',
+ 'log', 'make', 'mount volume', 'new', 'offset',
+ 'open( (for access|location))?', 'path to', 'print', 'quit',
+ 'random number', 'read', 'round', 'run( script)?',
+ 'say', 'scripting components',
+ 'set (eof|the clipboard to|volume)', 'store script',
+ 'summarize', 'system attribute', 'system info',
+ 'the clipboard', 'time to GMT', 'write', 'quoted form')
+ References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+ 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+ 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+ 'before', 'behind', 'every', 'front', 'index', 'last',
+ 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+ Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+ "isn't", "isn't equal( to)?", "is not equal( to)?",
+ "doesn't equal", "does not equal", "(is )?greater than",
+ "comes after", "is not less than or equal( to)?",
+ "isn't less than or equal( to)?", "(is )?less than",
+ "comes before", "is not greater than or equal( to)?",
+ "isn't greater than or equal( to)?",
+ "(is )?greater than or equal( to)?", "is not less than",
+ "isn't less than", "does not come before",
+ "doesn't come before", "(is )?less than or equal( to)?",
+ "is not greater than", "isn't greater than",
+ "does not come after", "doesn't come after", "starts? with",
+ "begins? with", "ends? with", "contains?", "does not contain",
+ "doesn't contain", "is in", "is contained by", "is not in",
+ "is not contained by", "isn't contained by", "div", "mod",
+ "not", "(a )?(ref( to)?|reference to)", "is", "does")
+ Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+ 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+ 'try', 'until', 'using terms from', 'while', 'whith',
+ 'with timeout( of)?', 'with transaction', 'by', 'continue',
+ 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+ Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+ Reserved = ('but', 'put', 'returning', 'the')
+ StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+ 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+ 'clip view', 'color well', 'color-panel',
+ 'combo box( item)?', 'control',
+ 'data( (cell|column|item|row|source))?', 'default entry',
+ 'dialog reply', 'document', 'drag info', 'drawer',
+ 'event', 'font(-panel)?', 'formatter',
+ 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+ 'movie( view)?', 'open-panel', 'outline view', 'panel',
+ 'pasteboard', 'plugin', 'popup button',
+ 'progress indicator', 'responder', 'save-panel',
+ 'scroll view', 'secure text field( cell)?', 'slider',
+ 'sound', 'split view', 'stepper', 'tab view( item)?',
+ 'table( (column|header cell|header view|view))',
+ 'text( (field( cell)?|view))?', 'toolbar( item)?',
+ 'user-defaults', 'view', 'window')
+ StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+ 'activated', 'alert ended', 'awake from nib', 'became key',
+ 'became main', 'begin editing', 'bounds changed',
+ 'cell value', 'cell value changed', 'change cell value',
+ 'change item value', 'changed', 'child of item',
+ 'choose menu item', 'clicked', 'clicked toolbar item',
+ 'closed', 'column clicked', 'column moved',
+ 'column resized', 'conclude drop', 'data representation',
+ 'deminiaturized', 'dialog ended', 'document nib name',
+ 'double clicked', 'drag( (entered|exited|updated))?',
+ 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+ 'item value', 'item value changed', 'items changed',
+ 'keyboard down', 'keyboard up', 'launched',
+ 'load data representation', 'miniaturized', 'mouse down',
+ 'mouse dragged', 'mouse entered', 'mouse exited',
+ 'mouse moved', 'mouse up', 'moved',
+ 'number of browser rows', 'number of items',
+ 'number of rows', 'open untitled', 'opened', 'panel ended',
+ 'parameters updated', 'plugin loaded', 'prepare drop',
+ 'prepare outline drag', 'prepare outline drop',
+ 'prepare table drag', 'prepare table drop',
+ 'read from file', 'resigned active', 'resigned key',
+ 'resigned main', 'resized( sub views)?',
+ 'right mouse down', 'right mouse dragged',
+ 'right mouse up', 'rows changed', 'scroll wheel',
+ 'selected tab view item', 'selection changed',
+ 'selection changing', 'should begin editing',
+ 'should close', 'should collapse item',
+ 'should end editing', 'should expand item',
+ 'should open( untitled)?',
+ 'should quit( after last window closed)?',
+ 'should select column', 'should select item',
+ 'should select row', 'should select tab view item',
+ 'should selection change', 'should zoom', 'shown',
+ 'update menu item', 'update parameters',
+ 'update toolbar item', 'was hidden', 'was miniaturized',
+ 'will become active', 'will close', 'will dismiss',
+ 'will display browser cell', 'will display cell',
+ 'will display item cell', 'will display outline cell',
+ 'will finish launching', 'will hide', 'will miniaturize',
+ 'will move', 'will open', 'will pop up', 'will quit',
+ 'will resign active', 'will resize( sub views)?',
+ 'will select tab view item', 'will show', 'will zoom',
+ 'write to file', 'zoomed')
+ StudioCommands = ('animate', 'append', 'call method', 'center',
+ 'close drawer', 'close panel', 'display',
+ 'display alert', 'display dialog', 'display panel', 'go',
+ 'hide', 'highlight', 'increment', 'item for',
+ 'load image', 'load movie', 'load nib', 'load panel',
+ 'load sound', 'localized string', 'lock focus', 'log',
+ 'open drawer', 'path for', 'pause', 'perform action',
+ 'play', 'register', 'resume', 'scroll', 'select( all)?',
+ 'show', 'size to fit', 'start', 'step back',
+ 'step forward', 'stop', 'synchronize', 'unlock focus',
+ 'update')
+ StudioProperties = ('accepts arrow key', 'action method', 'active',
+ 'alignment', 'allowed identifiers',
+ 'allows branch selection', 'allows column reordering',
+ 'allows column resizing', 'allows column selection',
+ 'allows customization',
+ 'allows editing text attributes',
+ 'allows empty selection', 'allows mixed state',
+ 'allows multiple selection', 'allows reordering',
+ 'allows undo', 'alpha( value)?', 'alternate image',
+ 'alternate increment value', 'alternate title',
+ 'animation delay', 'associated file name',
+ 'associated object', 'auto completes', 'auto display',
+ 'auto enables items', 'auto repeat',
+ 'auto resizes( outline column)?',
+ 'auto save expanded items', 'auto save name',
+ 'auto save table columns', 'auto saves configuration',
+ 'auto scroll', 'auto sizes all columns to fit',
+ 'auto sizes cells', 'background color', 'bezel state',
+ 'bezel style', 'bezeled', 'border rect', 'border type',
+ 'bordered', 'bounds( rotation)?', 'box type',
+ 'button returned', 'button type',
+ 'can choose directories', 'can choose files',
+ 'can draw', 'can hide',
+ 'cell( (background color|size|type))?', 'characters',
+ 'class', 'click count', 'clicked( data)? column',
+ 'clicked data item', 'clicked( data)? row',
+ 'closeable', 'collating', 'color( (mode|panel))',
+ 'command key down', 'configuration',
+ 'content(s| (size|view( margins)?))?', 'context',
+ 'continuous', 'control key down', 'control size',
+ 'control tint', 'control view',
+ 'controller visible', 'coordinate system',
+ 'copies( on scroll)?', 'corner view', 'current cell',
+ 'current column', 'current( field)? editor',
+ 'current( menu)? item', 'current row',
+ 'current tab view item', 'data source',
+ 'default identifiers', 'delta (x|y|z)',
+ 'destination window', 'directory', 'display mode',
+ 'displayed cell', 'document( (edited|rect|view))?',
+ 'double value', 'dragged column', 'dragged distance',
+ 'dragged items', 'draws( cell)? background',
+ 'draws grid', 'dynamically scrolls', 'echos bullets',
+ 'edge', 'editable', 'edited( data)? column',
+ 'edited data item', 'edited( data)? row', 'enabled',
+ 'enclosing scroll view', 'ending page',
+ 'error handling', 'event number', 'event type',
+ 'excluded from windows menu', 'executable path',
+ 'expanded', 'fax number', 'field editor', 'file kind',
+ 'file name', 'file type', 'first responder',
+ 'first visible column', 'flipped', 'floating',
+ 'font( panel)?', 'formatter', 'frameworks path',
+ 'frontmost', 'gave up', 'grid color', 'has data items',
+ 'has horizontal ruler', 'has horizontal scroller',
+ 'has parent data item', 'has resize indicator',
+ 'has shadow', 'has sub menu', 'has vertical ruler',
+ 'has vertical scroller', 'header cell', 'header view',
+ 'hidden', 'hides when deactivated', 'highlights by',
+ 'horizontal line scroll', 'horizontal page scroll',
+ 'horizontal ruler view', 'horizontally resizable',
+ 'icon image', 'id', 'identifier',
+ 'ignores multiple clicks',
+ 'image( (alignment|dims when disabled|frame style|scaling))?',
+ 'imports graphics', 'increment value',
+ 'indentation per level', 'indeterminate', 'index',
+ 'integer value', 'intercell spacing', 'item height',
+ 'key( (code|equivalent( modifier)?|window))?',
+ 'knob thickness', 'label', 'last( visible)? column',
+ 'leading offset', 'leaf', 'level', 'line scroll',
+ 'loaded', 'localized sort', 'location', 'loop mode',
+ 'main( (bunde|menu|window))?', 'marker follows cell',
+ 'matrix mode', 'maximum( content)? size',
+ 'maximum visible columns',
+ 'menu( form representation)?', 'miniaturizable',
+ 'miniaturized', 'minimized image', 'minimized title',
+ 'minimum column width', 'minimum( content)? size',
+ 'modal', 'modified', 'mouse down state',
+ 'movie( (controller|file|rect))?', 'muted', 'name',
+ 'needs display', 'next state', 'next text',
+ 'number of tick marks', 'only tick mark values',
+ 'opaque', 'open panel', 'option key down',
+ 'outline table column', 'page scroll', 'pages across',
+ 'pages down', 'palette label', 'pane splitter',
+ 'parent data item', 'parent window', 'pasteboard',
+ 'path( (names|separator))?', 'playing',
+ 'plays every frame', 'plays selection only', 'position',
+ 'preferred edge', 'preferred type', 'pressure',
+ 'previous text', 'prompt', 'properties',
+ 'prototype cell', 'pulls down', 'rate',
+ 'released when closed', 'repeated',
+ 'requested print time', 'required file type',
+ 'resizable', 'resized column', 'resource path',
+ 'returns records', 'reuses columns', 'rich text',
+ 'roll over', 'row height', 'rulers visible',
+ 'save panel', 'scripts path', 'scrollable',
+ 'selectable( identifiers)?', 'selected cell',
+ 'selected( data)? columns?', 'selected data items?',
+ 'selected( data)? rows?', 'selected item identifier',
+ 'selection by rect', 'send action on arrow key',
+ 'sends action when done editing', 'separates columns',
+ 'separator item', 'sequence number', 'services menu',
+ 'shared frameworks path', 'shared support path',
+ 'sheet', 'shift key down', 'shows alpha',
+ 'shows state by', 'size( mode)?',
+ 'smart insert delete enabled', 'sort case sensitivity',
+ 'sort column', 'sort order', 'sort type',
+ 'sorted( data rows)?', 'sound', 'source( mask)?',
+ 'spell checking enabled', 'starting page', 'state',
+ 'string value', 'sub menu', 'super menu', 'super view',
+ 'tab key traverses cells', 'tab state', 'tab type',
+ 'tab view', 'table view', 'tag', 'target( printer)?',
+ 'text color', 'text container insert',
+ 'text container origin', 'text returned',
+ 'tick mark position', 'time stamp',
+ 'title(d| (cell|font|height|position|rect))?',
+ 'tool tip', 'toolbar', 'trailing offset', 'transparent',
+ 'treat packages as directories', 'truncated labels',
+ 'types', 'unmodified characters', 'update views',
+ 'use sort indicator', 'user defaults',
+ 'uses data source', 'uses ruler',
+ 'uses threaded animation',
+ 'uses title from previous column', 'value wraps',
+ 'version',
+ 'vertical( (line scroll|page scroll|ruler view))?',
+ 'vertically resizable', 'view',
+ 'visible( document rect)?', 'volume', 'width', 'window',
+ 'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'¬\n', String.Escape),
- (r"'s\s+", Text), # This is a possessive, consider moving
- (r'(--|#).*?$', Comment),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[(){}!,.:]', Punctuation),
+ (r"'s\s+", Text), # This is a possessive, consider moving
+ (r'(--|#).*?$', Comment),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[(){}!,.:]', Punctuation),
(r'(«)([^»]+)(»)',
- bygroups(Text, Name.Builtin, Text)),
- (r'\b((?:considering|ignoring)\s*)'
- r'(application responses|case|diacriticals|hyphens|'
- r'numeric strings|punctuation|white space)',
- bygroups(Keyword, Name.Builtin)),
+ bygroups(Text, Name.Builtin, Text)),
+ (r'\b((?:considering|ignoring)\s*)'
+ r'(application responses|case|diacriticals|hyphens|'
+ r'numeric strings|punctuation|white space)',
+ bygroups(Keyword, Name.Builtin)),
(r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
- (r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
- bygroups(Keyword, Name.Function)),
- (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
- bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
+ (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
+ (r'^(\s*(?:on|end)\s+)'
+ r'(%s)' % '|'.join(StudioEvents[::-1]),
+ bygroups(Keyword, Name.Function)),
+ (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\b(as )(%s)\b' % '|'.join(Classes),
+ bygroups(Keyword, Name.Class)),
+ (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
+ (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(Control), Keyword),
+ (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
+ (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
+ (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
+ (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
- (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- ],
- 'comment': [
+ (r'\b(%s)\b' % Identifiers, Name.Variable),
+ (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ 'comment': [
(r'\(\*', Comment.Multiline, '#push'),
(r'\*\)', Comment.Multiline, '#pop'),
- ('[^*(]+', Comment.Multiline),
- ('[*(]', Comment.Multiline),
- ],
- }
-
-
-class RexxLexer(RegexLexer):
- """
- `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
- a wide range of different platforms with its roots found on mainframe
- systems. It is popular for I/O- and data based tasks and can act as glue
- language to bind different applications together.
-
- .. versionadded:: 2.0
- """
- name = 'Rexx'
- aliases = ['rexx', 'arexx']
- filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
- mimetypes = ['text/x-rexx']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
+ ('[^*(]+', Comment.Multiline),
+ ('[*(]', Comment.Multiline),
+ ],
+ }
+
+
+class RexxLexer(RegexLexer):
+ """
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Rexx'
+ aliases = ['rexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String, 'string_double'),
- (r"'", String, 'string_single'),
- (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
- (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration)),
- (r'([a-z_]\w*)(\s*)(:)',
- bygroups(Name.Label, Whitespace, Operator)),
- include('function'),
- include('keyword'),
- include('operator'),
- (r'[a-z_]\w*', Text),
- ],
- 'function': [
- (words((
- 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
- 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
- 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
- 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
- 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
- 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
- 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
- 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
- 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
- 'xrange'), suffix=r'(\s*)(\()'),
- bygroups(Name.Builtin, Whitespace, Operator)),
- ],
- 'keyword': [
- (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
- r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
- r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
- r'while)\b', Keyword.Reserved),
- ],
- 'operator': [
- (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
- r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
- r'¬>>|¬>|¬|\.|,)', Operator),
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'string_single': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_]\w*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_]\w*', Text),
+ ],
+ 'function': [
+ (words((
+ 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+ 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+ 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+ 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+ 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+ 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+ 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+ 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+ 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+ 'xrange'), suffix=r'(\s*)(\()'),
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ r'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
(r'[^\'\n]+', String),
- (r'\'\'', String),
- (r'\'', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ]
- }
-
- _c = lambda s: re.compile(s, re.MULTILINE)
- _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
- _ADDRESS_PATTERN = _c(r'^\s*address\s+')
- _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
- _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
- _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
- _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
- _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
- PATTERNS_AND_WEIGHTS = (
- (_ADDRESS_COMMAND_PATTERN, 0.2),
- (_ADDRESS_PATTERN, 0.05),
- (_DO_WHILE_PATTERN, 0.1),
- (_ELSE_DO_PATTERN, 0.1),
- (_IF_THEN_DO_PATTERN, 0.1),
- (_PROCEDURE_PATTERN, 0.5),
- (_PARSE_ARG_PATTERN, 0.2),
- )
-
- def analyse_text(text):
- """
- Check for inital comment and patterns that distinguish Rexx from other
- C-like languages.
- """
- if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
- # Header matches MVS Rexx requirements, this is certainly a Rexx
- # script.
- return 1.0
- elif text.startswith('/*'):
- # Header matches general Rexx requirements; the source code might
- # still be any language using C comments such as C++, C# or Java.
- lowerText = text.lower()
- result = sum(weight
- for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
- if pattern.search(lowerText)) + 0.01
- return min(result, 1.0)
-
-
-class MOOCodeLexer(RegexLexer):
- """
- For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
- language).
-
- .. versionadded:: 0.9
- """
- name = 'MOOCode'
- filenames = ['*.moo']
- aliases = ['moocode', 'moo']
- mimetypes = ['text/x-moocode']
-
- tokens = {
- 'root': [
- # Numbers
- (r'(0|[1-9][0-9_]*)', Number.Integer),
- # Strings
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ _c = lambda s: re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for inital comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+ """
+ For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
+ language).
+
+ .. versionadded:: 0.9
+ """
+ name = 'MOOCode'
+ filenames = ['*.moo']
+ aliases = ['moocode', 'moo']
+ mimetypes = ['text/x-moocode']
+
+ tokens = {
+ 'root': [
+ # Numbers
+ (r'(0|[1-9][0-9_]*)', Number.Integer),
+ # Strings
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- # exceptions
- (r'(E_PERM|E_DIV)', Name.Exception),
- # db-refs
- (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
- # Keywords
- (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
- r'|endwhile|break|continue|return|try'
- r'|except|endtry|finally|in)\b', Keyword),
- # builtins
- (r'(random|length)', Name.Builtin),
- # special variables
- (r'(player|caller|this|args)', Name.Variable.Instance),
- # skip whitespace
- (r'\s+', Text),
- (r'\n', Text),
- # other operators
- (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
- # function call
- (r'(\w+)(\()', bygroups(Name.Function, Operator)),
- # variables
- (r'(\w+)', Text),
- ]
- }
-
-
-class HybrisLexer(RegexLexer):
- """
- For `Hybris <http://www.hybris-lang.org>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
- mimetypes = ['text/x-hybris', 'application/x-hybris']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:function|method|operator\s+)+?)'
- r'([a-zA-Z_]\w*)'
- r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w.]*', Name.Decorator),
- (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
- r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
- (r'(extends|private|protected|public|static|throws|function|method|'
- r'operator)\b', Keyword.Declaration),
- (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
- r'__INC_PATH__)\b', Keyword.Constant),
- (r'(class|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import|include)(\s+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (words((
- 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
- 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
- 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
- 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
- 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
- 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
- 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
- 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
- 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
- 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
- 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
- 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
- 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
- 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
- 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
- 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
- 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
- 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
- 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
- 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
- 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
- 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
- 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
- 'contains', 'join'), suffix=r'\b'),
- Name.Builtin),
- (words((
- 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
- 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
- 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
- Keyword.Type),
+ # exceptions
+ (r'(E_PERM|E_DIV)', Name.Exception),
+ # db-refs
+ (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
+ # Keywords
+ (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+ r'|endwhile|break|continue|return|try'
+ r'|except|endtry|finally|in)\b', Keyword),
+ # builtins
+ (r'(random|length)', Name.Builtin),
+ # special variables
+ (r'(player|caller|this|args)', Name.Variable.Instance),
+ # skip whitespace
+ (r'\s+', Text),
+ (r'\n', Text),
+ # other operators
+ (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
+ # function call
+ (r'(\w+)(\()', bygroups(Name.Function, Operator)),
+ # variables
+ (r'(\w+)', Text),
+ ]
+ }
+
+
+class HybrisLexer(RegexLexer):
+ """
+ For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Hybris'
+ aliases = ['hybris', 'hy']
+ filenames = ['*.hy', '*.hyb']
+ mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:function|method|operator\s+)+?)'
+ r'([a-zA-Z_]\w*)'
+ r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w.]*', Name.Decorator),
+ (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+ r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+ (r'(extends|private|protected|public|static|throws|function|method|'
+ r'operator)\b', Keyword.Declaration),
+ (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+ r'__INC_PATH__)\b', Keyword.Constant),
+ (r'(class|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import|include)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (words((
+ 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+ 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
+ 'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
+ 'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
+ 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
+ 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
+ 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
+ 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+ 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
+ 'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
+ 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
+ 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
+ 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+ 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
+ 'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
+ 'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
+ 'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
+ 'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
+ 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
+ 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+ 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
+ 'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
+ 'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
+ 'contains', 'join'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+ 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+ 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+ Keyword.Type),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
+
def analyse_text(text):
"""public method and private method don't seem to be quite common
elsewhere."""
@@ -950,283 +950,283 @@ class HybrisLexer(RegexLexer):
if re.search(r'\b(?:public|private)\s+method\b', text):
result += 0.01
return result
-
-
-
-class EasytrieveLexer(RegexLexer):
- """
- Easytrieve Plus is a programming language for extracting, filtering and
- converting sequential data. Furthermore it can layout data for reports.
- It is mainly used on mainframe platforms and can access several of the
- mainframe's native file formats. It is somewhat comparable to awk.
-
- .. versionadded:: 2.1
- """
- name = 'Easytrieve'
- aliases = ['easytrieve']
- filenames = ['*.ezt', '*.mac']
- mimetypes = ['text/x-easytrieve']
- flags = 0
-
- # Note: We cannot use r'\b' at the start and end of keywords because
- # Easytrieve Plus delimiter characters are:
- #
- # * space ( )
- # * apostrophe (')
- # * period (.)
- # * comma (,)
- # * paranthesis ( and )
- # * colon (:)
- #
- # Additionally words end once a '*' appears, indicatins a comment.
- _DELIMITERS = r' \'.,():\n'
- _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
- _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
- _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
- _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
+
+
+
+class EasytrieveLexer(RegexLexer):
+ """
+ Easytrieve Plus is a programming language for extracting, filtering and
+ converting sequential data. Furthermore it can layout data for reports.
+ It is mainly used on mainframe platforms and can access several of the
+ mainframe's native file formats. It is somewhat comparable to awk.
+
+ .. versionadded:: 2.1
+ """
+ name = 'Easytrieve'
+ aliases = ['easytrieve']
+ filenames = ['*.ezt', '*.mac']
+ mimetypes = ['text/x-easytrieve']
+ flags = 0
+
+ # Note: We cannot use r'\b' at the start and end of keywords because
+ # Easytrieve Plus delimiter characters are:
+ #
+ # * space ( )
+ # * apostrophe (')
+ # * period (.)
+ # * comma (,)
+ # * paranthesis ( and )
+ # * colon (:)
+ #
+ # Additionally words end once a '*' appears, indicatins a comment.
+ _DELIMITERS = r' \'.,():\n'
+ _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
+ _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
+ _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
+ _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
_OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
- _KEYWORDS = [
- 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
- 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
- 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
- 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
- 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
- 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
- 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
- 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
- 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
- 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
- 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
- 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
- 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
- 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
- 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
- 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
- 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
- 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
- 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
- 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
- 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
- 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
- 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
- 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
- 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
- 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
- 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
- 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
- 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
- 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
- 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
- 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
- ]
-
- tokens = {
- 'root': [
- (r'\*.*\n', Comment.Single),
- (r'\n+', Whitespace),
- # Macro argument
- (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
- 'after_macro_argument'),
- # Macro call
- (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
- (r'(FILE|MACRO|REPORT)(\s+)',
- bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
- (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
- bygroups(Keyword.Declaration, Operator)),
- (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
- bygroups(Keyword.Reserved, Operator)),
- (_OPERATORS_PATTERN, Operator),
- # Procedure declaration
- (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration, Whitespace)),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String),
- (r'\s+', Whitespace),
- # Everything else just belongs to a name
+ _KEYWORDS = [
+ 'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
+ 'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
+ 'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
+ 'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
+ 'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
+ 'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
+ 'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
+ 'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
+ 'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
+ 'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
+ 'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
+ 'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
+ 'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
+ 'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
+ 'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
+ 'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
+ 'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
+ 'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
+ 'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
+ 'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
+ 'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
+ 'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
+ 'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
+ 'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
+ 'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
+ 'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
+ 'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
+ 'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
+ 'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
+ 'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
+ 'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
+ 'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\*.*\n', Comment.Single),
+ (r'\n+', Whitespace),
+ # Macro argument
+ (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
+ 'after_macro_argument'),
+ # Macro call
+ (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
+ (r'(FILE|MACRO|REPORT)(\s+)',
+ bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
+ (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
+ bygroups(Keyword.Declaration, Operator)),
+ (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
+ bygroups(Keyword.Reserved, Operator)),
+ (_OPERATORS_PATTERN, Operator),
+ # Procedure declaration
+ (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration, Whitespace)),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String),
+ (r'\s+', Whitespace),
+ # Everything else just belongs to a name
(_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- 'after_declaration': [
- (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
+ ],
+ 'after_declaration': [
+ (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
default('#pop'),
- ],
- 'after_macro_argument': [
- (r'\*.*\n', Comment.Single, '#pop'),
- (r'\s+', Whitespace, '#pop'),
- (_OPERATORS_PATTERN, Operator, '#pop'),
- (r"'(''|[^'])*'", String, '#pop'),
- # Everything else just belongs to a name
+ ],
+ 'after_macro_argument': [
+ (r'\*.*\n', Comment.Single, '#pop'),
+ (r'\s+', Whitespace, '#pop'),
+ (_OPERATORS_PATTERN, Operator, '#pop'),
+ (r"'(''|[^'])*'", String, '#pop'),
+ # Everything else just belongs to a name
(_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
- ],
- }
- _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
- _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
-
- def analyse_text(text):
- """
- Perform a structural analysis for basic Easytrieve constructs.
- """
- result = 0.0
- lines = text.split('\n')
- hasEndProc = False
- hasHeaderComment = False
- hasFile = False
- hasJob = False
- hasProc = False
- hasParm = False
- hasReport = False
-
- def isCommentLine(line):
- return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
-
- def isEmptyLine(line):
- return not bool(line.strip())
-
- # Remove possible empty lines and header comments.
- while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
- if not isEmptyLine(lines[0]):
- hasHeaderComment = True
- del lines[0]
-
- if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
- # Looks like an Easytrieve macro.
- result = 0.4
- if hasHeaderComment:
- result += 0.4
- else:
- # Scan the source for lines starting with indicators.
- for line in lines:
- words = line.split()
- if (len(words) >= 2):
- firstWord = words[0]
- if not hasReport:
- if not hasJob:
- if not hasFile:
- if not hasParm:
- if firstWord == 'PARM':
- hasParm = True
- if firstWord == 'FILE':
- hasFile = True
- if firstWord == 'JOB':
- hasJob = True
- elif firstWord == 'PROC':
- hasProc = True
- elif firstWord == 'END-PROC':
- hasEndProc = True
- elif firstWord == 'REPORT':
- hasReport = True
-
- # Weight the findings.
- if hasJob and (hasProc == hasEndProc):
- if hasHeaderComment:
- result += 0.1
- if hasParm:
- if hasProc:
- # Found PARM, JOB and PROC/END-PROC:
- # pretty sure this is Easytrieve.
- result += 0.8
- else:
- # Found PARAM and JOB: probably this is Easytrieve
- result += 0.5
- else:
- # Found JOB and possibly other keywords: might be Easytrieve
- result += 0.11
- if hasParm:
- # Note: PARAM is not a proper English word, so this is
- # regarded a much better indicator for Easytrieve than
- # the other words.
- result += 0.2
- if hasFile:
- result += 0.01
- if hasReport:
- result += 0.01
- assert 0.0 <= result <= 1.0
- return result
-
-
-class JclLexer(RegexLexer):
- """
+ ],
+ }
+ _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
+ _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
+
+ def analyse_text(text):
+ """
+ Perform a structural analysis for basic Easytrieve constructs.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ hasEndProc = False
+ hasHeaderComment = False
+ hasFile = False
+ hasJob = False
+ hasProc = False
+ hasParm = False
+ hasReport = False
+
+ def isCommentLine(line):
+ return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
+
+ def isEmptyLine(line):
+ return not bool(line.strip())
+
+ # Remove possible empty lines and header comments.
+ while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
+ if not isEmptyLine(lines[0]):
+ hasHeaderComment = True
+ del lines[0]
+
+ if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
+ # Looks like an Easytrieve macro.
+ result = 0.4
+ if hasHeaderComment:
+ result += 0.4
+ else:
+ # Scan the source for lines starting with indicators.
+ for line in lines:
+ words = line.split()
+ if (len(words) >= 2):
+ firstWord = words[0]
+ if not hasReport:
+ if not hasJob:
+ if not hasFile:
+ if not hasParm:
+ if firstWord == 'PARM':
+ hasParm = True
+ if firstWord == 'FILE':
+ hasFile = True
+ if firstWord == 'JOB':
+ hasJob = True
+ elif firstWord == 'PROC':
+ hasProc = True
+ elif firstWord == 'END-PROC':
+ hasEndProc = True
+ elif firstWord == 'REPORT':
+ hasReport = True
+
+ # Weight the findings.
+ if hasJob and (hasProc == hasEndProc):
+ if hasHeaderComment:
+ result += 0.1
+ if hasParm:
+ if hasProc:
+ # Found PARM, JOB and PROC/END-PROC:
+ # pretty sure this is Easytrieve.
+ result += 0.8
+ else:
+ # Found PARAM and JOB: probably this is Easytrieve
+ result += 0.5
+ else:
+ # Found JOB and possibly other keywords: might be Easytrieve
+ result += 0.11
+ if hasParm:
+ # Note: PARAM is not a proper English word, so this is
+ # regarded a much better indicator for Easytrieve than
+ # the other words.
+ result += 0.2
+ if hasFile:
+ result += 0.01
+ if hasReport:
+ result += 0.01
+ assert 0.0 <= result <= 1.0
+ return result
+
+
+class JclLexer(RegexLexer):
+ """
`Job Control Language (JCL)
<http://publibz.boulder.ibm.com/cgi-bin/bookmgr_OS390/BOOKS/IEA2B570/CCONTENTS>`_
- is a scripting language used on mainframe platforms to instruct the system
- on how to run a batch job or start a subsystem. It is somewhat
- comparable to MS DOS batch and Unix shell scripts.
-
- .. versionadded:: 2.1
- """
- name = 'JCL'
- aliases = ['jcl']
- filenames = ['*.jcl']
- mimetypes = ['text/x-jcl']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//\*.*\n', Comment.Single),
- (r'//', Keyword.Pseudo, 'statement'),
- (r'/\*', Keyword.Pseudo, 'jes2_statement'),
- # TODO: JES3 statement
- (r'.*\n', Other) # Input text or inline code in any language.
- ],
- 'statement': [
- (r'\s*\n', Whitespace, '#pop'),
+ is a scripting language used on mainframe platforms to instruct the system
+ on how to run a batch job or start a subsystem. It is somewhat
+ comparable to MS DOS batch and Unix shell scripts.
+
+ .. versionadded:: 2.1
+ """
+ name = 'JCL'
+ aliases = ['jcl']
+ filenames = ['*.jcl']
+ mimetypes = ['text/x-jcl']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'//\*.*\n', Comment.Single),
+ (r'//', Keyword.Pseudo, 'statement'),
+ (r'/\*', Keyword.Pseudo, 'jes2_statement'),
+ # TODO: JES3 statement
+ (r'.*\n', Other) # Input text or inline code in any language.
+ ],
+ 'statement': [
+ (r'\s*\n', Whitespace, '#pop'),
(r'([a-z]\w*)(\s+)(exec|job)(\s*)',
- bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
- 'option'),
+ bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
+ 'option'),
(r'[a-z]\w*', Name.Variable, 'statement_command'),
- (r'\s+', Whitespace, 'statement_command'),
- ],
- 'statement_command': [
- (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
- r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
- include('option')
- ],
- 'jes2_statement': [
- (r'\s*\n', Whitespace, '#pop'),
- (r'\$', Keyword, 'option'),
- (r'\b(jobparam|message|netacct|notify|output|priority|route|'
- r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
- ],
- 'option': [
- # (r'\n', Text, 'root'),
- (r'\*', Name.Builtin),
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=&%]', Operator),
+ (r'\s+', Whitespace, 'statement_command'),
+ ],
+ 'statement_command': [
+ (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
+ r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
+ include('option')
+ ],
+ 'jes2_statement': [
+ (r'\s*\n', Whitespace, '#pop'),
+ (r'\$', Keyword, 'option'),
+ (r'\b(jobparam|message|netacct|notify|output|priority|route|'
+ r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
+ ],
+ 'option': [
+ # (r'\n', Text, 'root'),
+ (r'\*', Name.Builtin),
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=&%]', Operator),
(r'[a-z_]\w*', Name),
(r'\d+\.\d*', Number.Float),
(r'\.\d+', Number.Float),
(r'\d+', Number.Integer),
- (r"'", String, 'option_string'),
- (r'[ \t]+', Whitespace, 'option_comment'),
- (r'\.', Punctuation),
- ],
- 'option_string': [
- (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
- (r"''", String),
- (r"[^']", String),
- (r"'", String, '#pop'),
- ],
- 'option_comment': [
- # (r'\n', Text, 'root'),
- (r'.+', Comment.Single),
- ]
- }
-
- _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
- re.IGNORECASE)
-
- def analyse_text(text):
- """
- Recognize JCL job by header.
- """
- result = 0.0
- lines = text.split('\n')
- if len(lines) > 0:
- if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
- result = 1.0
- assert 0.0 <= result <= 1.0
- return result
+ (r"'", String, 'option_string'),
+ (r'[ \t]+', Whitespace, 'option_comment'),
+ (r'\.', Punctuation),
+ ],
+ 'option_string': [
+ (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
+ (r"''", String),
+ (r"[^']", String),
+ (r"'", String, '#pop'),
+ ],
+ 'option_comment': [
+ # (r'\n', Text, 'root'),
+ (r'.+', Comment.Single),
+ ]
+ }
+
+ _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
+ re.IGNORECASE)
+
+ def analyse_text(text):
+ """
+ Recognize JCL job by header.
+ """
+ result = 0.0
+ lines = text.split('\n')
+ if len(lines) > 0:
+ if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
+ result = 1.0
+ assert 0.0 <= result <= 1.0
+ return result
class MiniScriptLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/shell.py b/contrib/python/Pygments/py3/pygments/lexers/shell.py
index fd26a4b3ea..0e3881376c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/shell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/shell.py
@@ -1,132 +1,132 @@
-"""
- pygments.lexers.shell
- ~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various shells.
-
+"""
+ pygments.lexers.shell
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various shells.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
- include, default, this, using, words
-from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
-
-
-__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
+ include, default, this, using, words
+from pygments.token import Punctuation, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Generic
+from pygments.util import shebang_matches
+
+
+__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
'ExeclineLexer']
-
-line_re = re.compile('.*?\n')
-
-
-class BashLexer(RegexLexer):
- """
+
+line_re = re.compile('.*?\n')
+
+
+class BashLexer(RegexLexer):
+ """
Lexer for (ba|k|z|)sh shell scripts.
-
- .. versionadded:: 0.6
- """
-
- name = 'Bash'
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Bash'
aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
- filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
+ filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'*.exheres-0', '*.exlib', '*.zsh',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
'.kshrc', 'kshrc',
'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', String.Interpol, 'curly'),
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', String.Interpol, 'curly'),
(r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
- (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
- (r'\$', Text),
- ],
- 'basic': [
+ (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
+ (r'\$', Text),
+ ],
+ 'basic': [
(r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
- r'select|continue|until|esac|elif)(\s*)\b',
- bygroups(Keyword, Text)),
- (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
- r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
- r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
- r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
- r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
- r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
- Name.Builtin),
- (r'\A#!.+\n', Comment.Hashbang),
- (r'#.*\n', Comment.Single),
- (r'\\[\w\W]', String.Escape),
+ r'select|continue|until|esac|elif)(\s*)\b',
+ bygroups(Keyword, Text)),
+ (r'\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|'
+ r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
+ r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
+ r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
+ r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
+ r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
+ Name.Builtin),
+ (r'\A#!.+\n', Comment.Hashbang),
+ (r'#.*\n', Comment.Single),
+ (r'\\[\w\W]', String.Escape),
(r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]', Operator),
- (r'<<<', Operator), # here-string
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r'&&|\|\|', Operator),
- ],
- 'data': [
+ (r'[\[\]{}()=]', Operator),
+ (r'<<<', Operator), # here-string
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r'&&|\|\|', Operator),
+ ],
+ 'data': [
(r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&', Punctuation),
- (r'\|', Punctuation),
- (r'\s+', Text),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&', Punctuation),
+ (r'\|', Punctuation),
+ (r'\s+', Text),
(r'\d+\b', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- (r'<', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'curly': [
- (r'\}', String.Interpol, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$\\]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+#\d+', Number),
- (r'\d+#(?! )', Number),
- (r'\d+', Number),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'(ba|z|)sh'):
- return 1
- if text.startswith('$ '):
- return 0.2
-
-
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ (r'<', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'curly': [
+ (r'\}', String.Interpol, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$\\]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'(ba|z|)sh'):
+ return 1
+ if text.startswith('$ '):
+ return 0.2
+
+
class SlurmBashLexer(BashLexer):
"""
Lexer for (ba|k|z|)sh Slurm scripts.
@@ -150,25 +150,25 @@ class SlurmBashLexer(BashLexer):
yield index, token, value
-class ShellSessionBaseLexer(Lexer):
- """
+class ShellSessionBaseLexer(Lexer):
+ """
Base lexer for shell sessions.
-
- .. versionadded:: 2.1
- """
+
+ .. versionadded:: 2.1
+ """
_venv = re.compile(r'^(\([^)]*\))(\s*)')
- def get_tokens_unprocessed(self, text):
- innerlexer = self._innerLexerCls(**self.options)
-
- pos = 0
- curcode = ''
- insertions = []
+ def get_tokens_unprocessed(self, text):
+ innerlexer = self._innerLexerCls(**self.options)
+
+ pos = 0
+ curcode = ''
+ insertions = []
backslash_continuation = False
-
- for match in line_re.finditer(text):
- line = match.group()
+
+ for match in line_re.finditer(text):
+ line = match.group()
venv_match = self._venv.match(line)
if venv_match:
@@ -183,15 +183,15 @@ class ShellSessionBaseLexer(Lexer):
m = self._ps1rgx.match(line)
if m:
- # To support output lexers (say diff output), the output
- # needs to be broken by prompts whenever the output lexer
- # changes.
- if not insertions:
- pos = match.start()
-
- insertions.append((len(curcode),
- [(0, Generic.Prompt, m.group(1))]))
- curcode += m.group(2)
+ # To support output lexers (say diff output), the output
+ # needs to be broken by prompts whenever the output lexer
+ # changes.
+ if not insertions:
+ pos = match.start()
+
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, m.group(1))]))
+ curcode += m.group(2)
backslash_continuation = curcode.endswith('\\\n')
elif backslash_continuation:
if line.startswith(self._ps2):
@@ -201,327 +201,327 @@ class ShellSessionBaseLexer(Lexer):
else:
curcode += line
backslash_continuation = curcode.endswith('\\\n')
- else:
- if insertions:
- toks = innerlexer.get_tokens_unprocessed(curcode)
- for i, t, v in do_insertions(insertions, toks):
- yield pos+i, t, v
- yield match.start(), Generic.Output, line
- insertions = []
- curcode = ''
- if insertions:
- for i, t, v in do_insertions(insertions,
- innerlexer.get_tokens_unprocessed(curcode)):
- yield pos+i, t, v
-
-
-class BashSessionLexer(ShellSessionBaseLexer):
- """
+ else:
+ if insertions:
+ toks = innerlexer.get_tokens_unprocessed(curcode)
+ for i, t, v in do_insertions(insertions, toks):
+ yield pos+i, t, v
+ yield match.start(), Generic.Output, line
+ insertions = []
+ curcode = ''
+ if insertions:
+ for i, t, v in do_insertions(insertions,
+ innerlexer.get_tokens_unprocessed(curcode)):
+ yield pos+i, t, v
+
+
+class BashSessionLexer(ShellSessionBaseLexer):
+ """
Lexer for Bash shell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 1.1
- """
-
- name = 'Bash Session'
- aliases = ['console', 'shell-session']
- filenames = ['*.sh-session', '*.shell-session']
- mimetypes = ['application/x-shell-session', 'application/x-sh-session']
-
- _innerLexerCls = BashLexer
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Bash Session'
+ aliases = ['console', 'shell-session']
+ filenames = ['*.sh-session', '*.shell-session']
+ mimetypes = ['application/x-shell-session', 'application/x-sh-session']
+
+ _innerLexerCls = BashLexer
_ps1rgx = re.compile(
- r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
+ r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
_ps2 = '> '
-
-
-class BatchLexer(RegexLexer):
- """
- Lexer for the DOS/Windows Batch file format.
-
- .. versionadded:: 0.7
- """
- name = 'Batchfile'
+
+
+class BatchLexer(RegexLexer):
+ """
+ Lexer for the DOS/Windows Batch file format.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Batchfile'
aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
- filenames = ['*.bat', '*.cmd']
- mimetypes = ['application/x-dos-batch']
-
- flags = re.MULTILINE | re.IGNORECASE
-
- _nl = r'\n\x1a'
- _punct = r'&<>|'
- _ws = r'\t\v\f\r ,;=\xa0'
+ filenames = ['*.bat', '*.cmd']
+ mimetypes = ['application/x-dos-batch']
+
+ flags = re.MULTILINE | re.IGNORECASE
+
+ _nl = r'\n\x1a'
+ _punct = r'&<>|'
+ _ws = r'\t\v\f\r ,;=\xa0'
_nlws = r'\s\x1a\xa0,;='
- _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
- _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
- (_nl, _ws, _nl, _punct))
- _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
- _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
+ _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
+ _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
+ (_nl, _ws, _nl, _punct))
+ _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
+ _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
_label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
_label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
- _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
- _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
+ _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
+ _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
_string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
- _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
- r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
- r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
- r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
- r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
- (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
+ _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
+ r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
+ r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
+ r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
+ r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
+ (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
_core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
_core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
- _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
- _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
- _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token))
-
- def _make_begin_state(compound, _core_token=_core_token,
- _core_token_compound=_core_token_compound,
- _keyword_terminator=_keyword_terminator,
- _nl=_nl, _punct=_punct, _string=_string,
- _space=_space, _start_label=_start_label,
- _stoken=_stoken, _token_terminator=_token_terminator,
- _variable=_variable, _ws=_ws):
- rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
- ')' if compound else '')
- rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
- rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
- set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
- suffix = ''
- if compound:
- _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
- _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
- suffix = '/compound'
- return [
- ((r'\)', Punctuation, '#pop') if compound else
- (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
- Comment.Single)),
- (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
- (_space, using(this, state='text')),
- include('redirect%s' % suffix),
- (r'[%s]+' % _nl, Text),
- (r'\(', Punctuation, 'root/compound'),
- (r'@+', Punctuation),
- (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
- r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
- (_nl, _token_terminator, _space,
- _core_token_compound if compound else _core_token, _nl, _nl),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
- (_keyword_terminator, rest, _nl, _nl, rest),
- bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
- 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
- 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
- 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
- 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
- 'title', 'type', 'ver', 'verify', 'vol'),
- suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
- (r'(call)(%s?)(:)' % _space,
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'call%s' % suffix),
- (r'call%s' % _keyword_terminator, Keyword),
- (r'(for%s(?!\^))(%s)(/f%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/f', 'for')),
- (r'(for%s(?!\^))(%s)(/l%s)' %
- (_token_terminator, _space, _token_terminator),
- bygroups(Keyword, using(this, state='text'), Keyword),
- ('for/l', 'for')),
- (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
- (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Punctuation),
- 'label%s' % suffix),
- (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
- (_token_terminator, _space, _token_terminator, _space,
- _token_terminator, _space),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), Keyword,
- using(this, state='text')), ('(?', 'if')),
- (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
- (_token_terminator, _space, _stoken, _keyword_terminator,
- rest_of_line_compound if compound else rest_of_line),
- Comment.Single, 'follow%s' % suffix),
- (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
- bygroups(Keyword, using(this, state='text'), Keyword),
- 'arithmetic%s' % suffix),
- (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
- r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
- (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
- ')' if compound else '', _nl, _nl),
- bygroups(Keyword, using(this, state='text'), Keyword,
- using(this, state='text'), using(this, state='variable'),
- Punctuation),
- 'follow%s' % suffix),
- default('follow%s' % suffix)
- ]
-
- def _make_follow_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _space=_space, _start_label=_start_label,
- _token=_token, _token_compound=_token_compound,
- _ws=_ws):
- suffix = '/compound' if compound else ''
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'%s([%s]*)(%s)(.*)' %
- (_start_label, _ws, _label_compound if compound else _label),
- bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
- include('redirect%s' % suffix),
- (r'(?=[%s])' % _nl, Text, '#pop'),
- (r'\|\|?|&&?', Punctuation, '#pop'),
- include('text')
- ]
- return state
-
- def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
+ _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
+ _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
+ _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
+ (_punct, _string, _variable, _core_token))
+
+ def _make_begin_state(compound, _core_token=_core_token,
+ _core_token_compound=_core_token_compound,
+ _keyword_terminator=_keyword_terminator,
+ _nl=_nl, _punct=_punct, _string=_string,
+ _space=_space, _start_label=_start_label,
+ _stoken=_stoken, _token_terminator=_token_terminator,
+ _variable=_variable, _ws=_ws):
+ rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
+ ')' if compound else '')
+ rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
+ rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
+ set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
+ suffix = ''
+ if compound:
+ _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
+ _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
+ suffix = '/compound'
+ return [
+ ((r'\)', Punctuation, '#pop') if compound else
+ (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
+ Comment.Single)),
+ (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
+ (_space, using(this, state='text')),
+ include('redirect%s' % suffix),
+ (r'[%s]+' % _nl, Text),
+ (r'\(', Punctuation, 'root/compound'),
+ (r'@+', Punctuation),
+ (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
+ r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
+ (_nl, _token_terminator, _space,
+ _core_token_compound if compound else _core_token, _nl, _nl),
+ bygroups(Keyword, using(this, state='text')),
+ 'follow%s' % suffix),
+ (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
+ (_keyword_terminator, rest, _nl, _nl, rest),
+ bygroups(Keyword, using(this, state='text')),
+ 'follow%s' % suffix),
+ (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
+ 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
+ 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
+ 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
+ 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
+ 'title', 'type', 'ver', 'verify', 'vol'),
+ suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
+ (r'(call)(%s?)(:)' % _space,
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ 'call%s' % suffix),
+ (r'call%s' % _keyword_terminator, Keyword),
+ (r'(for%s(?!\^))(%s)(/f%s)' %
+ (_token_terminator, _space, _token_terminator),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/f', 'for')),
+ (r'(for%s(?!\^))(%s)(/l%s)' %
+ (_token_terminator, _space, _token_terminator),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ ('for/l', 'for')),
+ (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
+ (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
+ bygroups(Keyword, using(this, state='text'), Punctuation),
+ 'label%s' % suffix),
+ (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
+ (_token_terminator, _space, _token_terminator, _space,
+ _token_terminator, _space),
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), Keyword,
+ using(this, state='text')), ('(?', 'if')),
+ (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
+ (_token_terminator, _space, _stoken, _keyword_terminator,
+ rest_of_line_compound if compound else rest_of_line),
+ Comment.Single, 'follow%s' % suffix),
+ (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
+ bygroups(Keyword, using(this, state='text'), Keyword),
+ 'arithmetic%s' % suffix),
+ (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
+ r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
+ (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
+ ')' if compound else '', _nl, _nl),
+ bygroups(Keyword, using(this, state='text'), Keyword,
+ using(this, state='text'), using(this, state='variable'),
+ Punctuation),
+ 'follow%s' % suffix),
+ default('follow%s' % suffix)
+ ]
+
+ def _make_follow_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _space=_space, _start_label=_start_label,
+ _token=_token, _token_compound=_token_compound,
+ _ws=_ws):
+ suffix = '/compound' if compound else ''
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (r'%s([%s]*)(%s)(.*)' %
+ (_start_label, _ws, _label_compound if compound else _label),
+ bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
+ include('redirect%s' % suffix),
+ (r'(?=[%s])' % _nl, Text, '#pop'),
+ (r'\|\|?|&&?', Punctuation, '#pop'),
+ include('text')
+ ]
+ return state
+
+ def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
_string=_string, _variable=_variable,
_ws=_ws, _nlws=_nlws):
- op = r'=+\-*/!~'
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state += [
- (r'0[0-7]+', Number.Oct),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'[(),]+', Punctuation),
- (r'([%s]|%%|\^\^)+' % op, Operator),
+ op = r'=+\-*/!~'
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state += [
+ (r'0[0-7]+', Number.Oct),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'[(),]+', Punctuation),
+ (r'([%s]|%%|\^\^)+' % op, Operator),
(r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
(_string, _variable, _nl, op, _nlws, _punct, _nlws,
- r'[^)]' if compound else r'[\w\W]'),
- using(this, state='variable')),
- (r'(?=[\x00|&])', Text, '#pop'),
- include('follow')
- ]
- return state
-
- def _make_call_state(compound, _label=_label,
- _label_compound=_label_compound):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
- bygroups(Punctuation, Name.Label), '#pop'))
- return state
-
- def _make_label_state(compound, _label=_label,
- _label_compound=_label_compound, _nl=_nl,
- _punct=_punct, _string=_string, _variable=_variable):
- state = []
- if compound:
- state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
- (_label_compound if compound else _label, _string,
- _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
- _punct, r')' if compound else ''),
- bygroups(Name.Label, Comment.Single), '#pop'))
- return state
-
- def _make_redirect_state(compound,
- _core_token_compound=_core_token_compound,
- _nl=_nl, _punct=_punct, _stoken=_stoken,
- _string=_string, _space=_space,
+ r'[^)]' if compound else r'[\w\W]'),
+ using(this, state='variable')),
+ (r'(?=[\x00|&])', Text, '#pop'),
+ include('follow')
+ ]
+ return state
+
+ def _make_call_state(compound, _label=_label,
+ _label_compound=_label_compound):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
+ bygroups(Punctuation, Name.Label), '#pop'))
+ return state
+
+ def _make_label_state(compound, _label=_label,
+ _label_compound=_label_compound, _nl=_nl,
+ _punct=_punct, _string=_string, _variable=_variable):
+ state = []
+ if compound:
+ state.append((r'(?=\))', Text, '#pop'))
+ state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
+ (_label_compound if compound else _label, _string,
+ _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
+ _punct, r')' if compound else ''),
+ bygroups(Name.Label, Comment.Single), '#pop'))
+ return state
+
+ def _make_redirect_state(compound,
+ _core_token_compound=_core_token_compound,
+ _nl=_nl, _punct=_punct, _stoken=_stoken,
+ _string=_string, _space=_space,
_variable=_variable, _nlws=_nlws):
- stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token_compound))
- return [
+ stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
+ (_punct, _string, _variable, _core_token_compound))
+ return [
(r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
(_nlws, _nlws),
- bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
+ bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
(r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
(_nlws, _nl, _space, stoken_compound if compound else _stoken),
- bygroups(Number.Integer, Punctuation, using(this, state='text')))
- ]
-
- tokens = {
- 'root': _make_begin_state(False),
- 'follow': _make_follow_state(False),
- 'arithmetic': _make_arithmetic_state(False),
- 'call': _make_call_state(False),
- 'label': _make_label_state(False),
- 'redirect': _make_redirect_state(False),
- 'root/compound': _make_begin_state(True),
- 'follow/compound': _make_follow_state(True),
- 'arithmetic/compound': _make_arithmetic_state(True),
- 'call/compound': _make_call_state(True),
- 'label/compound': _make_label_state(True),
- 'redirect/compound': _make_redirect_state(True),
- 'variable-or-escape': [
- (_variable, Name.Variable),
- (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (_variable, Name.Variable),
- (r'\^!|%%', String.Escape),
- (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
- default('#pop')
- ],
- 'sqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Single)
- ],
- 'bqstring': [
- include('variable-or-escape'),
- (r'[^%]+|%', String.Backtick)
- ],
- 'text': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
+ bygroups(Number.Integer, Punctuation, using(this, state='text')))
+ ]
+
+ tokens = {
+ 'root': _make_begin_state(False),
+ 'follow': _make_follow_state(False),
+ 'arithmetic': _make_arithmetic_state(False),
+ 'call': _make_call_state(False),
+ 'label': _make_label_state(False),
+ 'redirect': _make_redirect_state(False),
+ 'root/compound': _make_begin_state(True),
+ 'follow/compound': _make_follow_state(True),
+ 'arithmetic/compound': _make_arithmetic_state(True),
+ 'call/compound': _make_call_state(True),
+ 'label/compound': _make_label_state(True),
+ 'redirect/compound': _make_redirect_state(True),
+ 'variable-or-escape': [
+ (_variable, Name.Variable),
+ (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (_variable, Name.Variable),
+ (r'\^!|%%', String.Escape),
+ (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
+ default('#pop')
+ ],
+ 'sqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Single)
+ ],
+ 'bqstring': [
+ include('variable-or-escape'),
+ (r'[^%]+|%', String.Backtick)
+ ],
+ 'text': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
(r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
- ],
- 'variable': [
- (r'"', String.Double, 'string'),
- include('variable-or-escape'),
- (r'[^"%%^%s]+|.' % _nl, Name.Variable)
- ],
- 'for': [
- (r'(%s)(in)(%s)(\()' % (_space, _space),
- bygroups(using(this, state='text'), Keyword,
- using(this, state='text'), Punctuation), '#pop'),
- include('follow')
- ],
- 'for2': [
- (r'\)', Punctuation),
- (r'(%s)(do%s)' % (_space, _token_terminator),
- bygroups(using(this, state='text'), Keyword), '#pop'),
- (r'[%s]+' % _nl, Text),
- include('follow')
- ],
- 'for/f': [
+ ],
+ 'variable': [
+ (r'"', String.Double, 'string'),
+ include('variable-or-escape'),
+ (r'[^"%%^%s]+|.' % _nl, Name.Variable)
+ ],
+ 'for': [
+ (r'(%s)(in)(%s)(\()' % (_space, _space),
+ bygroups(using(this, state='text'), Keyword,
+ using(this, state='text'), Punctuation), '#pop'),
+ include('follow')
+ ],
+ 'for2': [
+ (r'\)', Punctuation),
+ (r'(%s)(do%s)' % (_space, _token_terminator),
+ bygroups(using(this, state='text'), Keyword), '#pop'),
+ (r'[%s]+' % _nl, Text),
+ include('follow')
+ ],
+ 'for/f': [
(r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
- bygroups(String.Double, using(this, state='string'), Text,
- Punctuation)),
- (r'"', String.Double, ('#pop', 'for2', 'string')),
+ bygroups(String.Double, using(this, state='string'), Text,
+ Punctuation)),
+ (r'"', String.Double, ('#pop', 'for2', 'string')),
(r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
- bygroups(using(this, state='sqstring'), Text, Punctuation)),
+ bygroups(using(this, state='sqstring'), Text, Punctuation)),
(r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
- bygroups(using(this, state='bqstring'), Text, Punctuation)),
- include('for2')
- ],
- 'for/l': [
- (r'-?\d+', Number.Integer),
- include('for2')
- ],
- 'if': [
- (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
- (_token_terminator, _space),
- bygroups(Keyword, using(this, state='text'),
- Number.Integer), '#pop'),
- (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text'),
- using(this, state='variable')), '#pop'),
- (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
- bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
- bygroups(using(this, state='arithmetic'), Operator.Word,
- using(this, state='arithmetic')), '#pop'),
+ bygroups(using(this, state='bqstring'), Text, Punctuation)),
+ include('for2')
+ ],
+ 'for/l': [
+ (r'-?\d+', Number.Integer),
+ include('for2')
+ ],
+ 'if': [
+ (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
+ (_token_terminator, _space),
+ bygroups(Keyword, using(this, state='text'),
+ Number.Integer), '#pop'),
+ (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
+ bygroups(Keyword, using(this, state='text'),
+ using(this, state='variable')), '#pop'),
+ (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
+ bygroups(Keyword, using(this, state='text')), '#pop'),
+ (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
+ bygroups(using(this, state='arithmetic'), Operator.Word,
+ using(this, state='arithmetic')), '#pop'),
(_stoken, using(this, state='text'), ('#pop', 'if2')),
],
'if2': [
@@ -529,157 +529,157 @@ class BatchLexer(RegexLexer):
bygroups(using(this, state='text'), Operator,
using(this, state='text')), '#pop'),
(r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
- bygroups(using(this, state='text'), Operator.Word,
- using(this, state='text')), '#pop')
- ],
- '(?': [
- (_space, using(this, state='text')),
- (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
- default('#pop')
- ],
- 'else?': [
- (_space, using(this, state='text')),
- (r'else%s' % _token_terminator, Keyword, '#pop'),
- default('#pop')
- ]
- }
-
-
-class MSDOSSessionLexer(ShellSessionBaseLexer):
- """
+ bygroups(using(this, state='text'), Operator.Word,
+ using(this, state='text')), '#pop')
+ ],
+ '(?': [
+ (_space, using(this, state='text')),
+ (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
+ default('#pop')
+ ],
+ 'else?': [
+ (_space, using(this, state='text')),
+ (r'else%s' % _token_terminator, Keyword, '#pop'),
+ default('#pop')
+ ]
+ }
+
+
+class MSDOSSessionLexer(ShellSessionBaseLexer):
+ """
Lexer for MS DOS shell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'MSDOS Session'
- aliases = ['doscon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = BatchLexer
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'MSDOS Session'
+ aliases = ['doscon']
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = BatchLexer
_ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
- _ps2 = 'More? '
-
-
-class TcshLexer(RegexLexer):
- """
- Lexer for tcsh scripts.
-
- .. versionadded:: 0.10
- """
-
- name = 'Tcsh'
- aliases = ['tcsh', 'csh']
- filenames = ['*.tcsh', '*.csh']
- mimetypes = ['application/x-csh']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\$\(', Keyword, 'paren'),
- (r'\$\{#?', Keyword, 'curly'),
- (r'`', String.Backtick, 'backticks'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|endif|else|while|then|foreach|case|default|'
- r'continue|goto|breaksw|end|switch|endsw)\s*\b',
- Keyword),
- (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
- r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
- r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
- r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
- r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
- r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
- r'source|stop|suspend|source|suspend|telltc|time|'
- r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
- r'ver|wait|warp|watchlog|where|which)\s*\b',
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'curly': [
- (r'\}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'backticks': [
- (r'`', String.Backtick, '#pop'),
- include('root'),
- ],
- }
-
-
-class TcshSessionLexer(ShellSessionBaseLexer):
- """
+ _ps2 = 'More? '
+
+
+class TcshLexer(RegexLexer):
+ """
+ Lexer for tcsh scripts.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Tcsh'
+ aliases = ['tcsh', 'csh']
+ filenames = ['*.tcsh', '*.csh']
+ mimetypes = ['application/x-csh']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\$\{#?', Keyword, 'curly'),
+ (r'`', String.Backtick, 'backticks'),
+ include('data'),
+ ],
+ 'basic': [
+ (r'\b(if|endif|else|while|then|foreach|case|default|'
+ r'continue|goto|breaksw|end|switch|endsw)\s*\b',
+ Keyword),
+ (r'\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|'
+ r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
+ r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
+ r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
+ r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
+ r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
+ r'source|stop|suspend|source|suspend|telltc|time|'
+ r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
+ r'ver|wait|warp|watchlog|where|which)\s*\b',
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'curly': [
+ (r'\}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'backticks': [
+ (r'`', String.Backtick, '#pop'),
+ include('root'),
+ ],
+ }
+
+
+class TcshSessionLexer(ShellSessionBaseLexer):
+ """
Lexer for Tcsh sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'Tcsh Session'
- aliases = ['tcshcon']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = TcshLexer
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Tcsh Session'
+ aliases = ['tcshcon']
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = TcshLexer
_ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
- _ps2 = '? '
-
-
-class PowerShellLexer(RegexLexer):
- """
- For Windows PowerShell code.
-
- .. versionadded:: 1.5
- """
- name = 'PowerShell'
+ _ps2 = '? '
+
+
+class PowerShellLexer(RegexLexer):
+ """
+ For Windows PowerShell code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'PowerShell'
aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
- filenames = ['*.ps1', '*.psm1']
- mimetypes = ['text/x-powershell']
-
- flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
-
- keywords = (
- 'while validateset validaterange validatepattern validatelength '
- 'validatecount until trap switch return ref process param parameter in '
- 'if global: function foreach for finally filter end elseif else '
- 'dynamicparam do default continue cmdletbinding break begin alias \\? '
- '% #script #private #local #global mandatory parametersetname position '
- 'valuefrompipeline valuefrompipelinebypropertyname '
- 'valuefromremainingarguments helpmessage try catch throw').split()
-
- operators = (
- 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
- 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
- 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
- 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
- 'lt match ne not notcontains notlike notmatch or regex replace '
- 'wildcard').split()
-
- verbs = (
+ filenames = ['*.ps1', '*.psm1']
+ mimetypes = ['text/x-powershell']
+
+ flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
+
+ keywords = (
+ 'while validateset validaterange validatepattern validatelength '
+ 'validatecount until trap switch return ref process param parameter in '
+ 'if global: function foreach for finally filter end elseif else '
+ 'dynamicparam do default continue cmdletbinding break begin alias \\? '
+ '% #script #private #local #global mandatory parametersetname position '
+ 'valuefrompipeline valuefrompipelinebypropertyname '
+ 'valuefromremainingarguments helpmessage try catch throw').split()
+
+ operators = (
+ 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
+ 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
+ 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
+ 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
+ 'lt match ne not notcontains notlike notmatch or regex replace '
+ 'wildcard').split()
+
+ verbs = (
'write where watch wait use update unregister unpublish unprotect '
'unlock uninstall undo unblock trace test tee take sync switch '
'suspend submit stop step start split sort skip show set send select '
@@ -691,7 +691,7 @@ class PowerShellLexer(RegexLexer):
'dismount disconnect disable deny debug cxnew copy convertto '
'convertfrom convert connect confirm compress complete compare close '
'clear checkpoint block backup assert approve aggregate add').split()
-
+
aliases_ = (
'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
@@ -704,154 +704,154 @@ class PowerShellLexer(RegexLexer):
'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
'trcm type wget where wjb write').split()
- commenthelp = (
- 'component description example externalhelp forwardhelpcategory '
- 'forwardhelptargetname functionality inputs link '
- 'notes outputs parameter remotehelprunspace role synopsis').split()
-
- tokens = {
- 'root': [
- # we need to count pairs of parentheses for correct highlight
- # of '$(...)' blocks in strings
- (r'\(', Punctuation, 'child'),
- (r'\s+', Text),
- (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
- bygroups(Comment, String.Doc, Comment)),
- (r'#[^\n]*?$', Comment),
- (r'(&lt;|<)#', Comment.Multiline, 'multline'),
- (r'@"\n', String.Heredoc, 'heredoc-double'),
- (r"@'\n.*?\n'@", String.Heredoc),
- # escaped syntax
- (r'`[\'"$@-]', Punctuation),
- (r'"', String.Double, 'string'),
- (r"'([^']|'')*'", String.Single),
- (r'(\$|@@|@)((global|script|private|env):)?\w+',
- Name.Variable),
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'-(%s)\b' % '|'.join(operators), Operator),
- (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
+ commenthelp = (
+ 'component description example externalhelp forwardhelpcategory '
+ 'forwardhelptargetname functionality inputs link '
+ 'notes outputs parameter remotehelprunspace role synopsis').split()
+
+ tokens = {
+ 'root': [
+ # we need to count pairs of parentheses for correct highlight
+ # of '$(...)' blocks in strings
+ (r'\(', Punctuation, 'child'),
+ (r'\s+', Text),
+ (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
+ bygroups(Comment, String.Doc, Comment)),
+ (r'#[^\n]*?$', Comment),
+ (r'(&lt;|<)#', Comment.Multiline, 'multline'),
+ (r'@"\n', String.Heredoc, 'heredoc-double'),
+ (r"@'\n.*?\n'@", String.Heredoc),
+ # escaped syntax
+ (r'`[\'"$@-]', Punctuation),
+ (r'"', String.Double, 'string'),
+ (r"'([^']|'')*'", String.Single),
+ (r'(\$|@@|@)((global|script|private|env):)?\w+',
+ Name.Variable),
+ (r'(%s)\b' % '|'.join(keywords), Keyword),
+ (r'-(%s)\b' % '|'.join(operators), Operator),
+ (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
(r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
- (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
- (r'-[a-z_]\w*', Name),
- (r'\w+', Name),
+ (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
+ (r'-[a-z_]\w*', Name),
+ (r'\w+', Name),
(r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
- ],
- 'child': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'multline': [
- (r'[^#&.]+', Comment.Multiline),
- (r'#(>|&gt;)', Comment.Multiline, '#pop'),
- (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
- (r'[#&.]', Comment.Multiline),
- ],
- 'string': [
- (r"`[0abfnrtv'\"$`]", String.Escape),
- (r'[^$`"]+', String.Double),
- (r'\$\(', Punctuation, 'child'),
- (r'""', String.Double),
- (r'[`$]', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'heredoc-double': [
- (r'\n"@', String.Heredoc, '#pop'),
- (r'\$\(', Punctuation, 'child'),
- (r'[^@\n]+"]', String.Heredoc),
- (r".", String.Heredoc),
- ]
- }
-
-
-class PowerShellSessionLexer(ShellSessionBaseLexer):
- """
+ ],
+ 'child': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'multline': [
+ (r'[^#&.]+', Comment.Multiline),
+ (r'#(>|&gt;)', Comment.Multiline, '#pop'),
+ (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
+ (r'[#&.]', Comment.Multiline),
+ ],
+ 'string': [
+ (r"`[0abfnrtv'\"$`]", String.Escape),
+ (r'[^$`"]+', String.Double),
+ (r'\$\(', Punctuation, 'child'),
+ (r'""', String.Double),
+ (r'[`$]', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'heredoc-double': [
+ (r'\n"@', String.Heredoc, '#pop'),
+ (r'\$\(', Punctuation, 'child'),
+ (r'[^@\n]+"]', String.Heredoc),
+ (r".", String.Heredoc),
+ ]
+ }
+
+
+class PowerShellSessionLexer(ShellSessionBaseLexer):
+ """
Lexer for PowerShell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
- """
-
- name = 'PowerShell Session'
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'PowerShell Session'
aliases = ['pwsh-session', 'ps1con']
- filenames = []
- mimetypes = []
-
- _innerLexerCls = PowerShellLexer
+ filenames = []
+ mimetypes = []
+
+ _innerLexerCls = PowerShellLexer
_ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
- _ps2 = '>> '
-
-
-class FishShellLexer(RegexLexer):
- """
- Lexer for Fish shell scripts.
-
- .. versionadded:: 2.1
- """
-
- name = 'Fish'
- aliases = ['fish', 'fishshell']
- filenames = ['*.fish', '*.load']
- mimetypes = ['application/x-fish']
-
- tokens = {
- 'root': [
- include('basic'),
- include('data'),
- include('interp'),
- ],
- 'interp': [
- (r'\$\(\(', Keyword, 'math'),
- (r'\(', Keyword, 'paren'),
- (r'\$#?(\w+|.)', Name.Variable),
- ],
- 'basic': [
- (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
- r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
- r'cd|count|test)(\s*)\b',
- bygroups(Keyword, Text)),
- (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
- r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
- r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
- r'fish_update_completions|fishd|funced|funcsave|functions|help|'
- r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
- r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
- r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
- Name.Builtin),
- (r'#.*\n', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]()=]', Operator),
- (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- ],
- 'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
- (r'"', String.Double, 'string'),
- (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r"(?s)'.*?'", String.Single),
- (r';', Punctuation),
- (r'&|\||\^|<|>', Operator),
- (r'\s+', Text),
- (r'\d+(?= |\Z)', Number),
- (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
- include('interp'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- 'math': [
- (r'\)\)', Keyword, '#pop'),
- (r'[-+*/%^|&]|\*\*|\|\|', Operator),
- (r'\d+#\d+', Number),
- (r'\d+#(?! )', Number),
- (r'\d+', Number),
- include('root'),
- ],
- }
+ _ps2 = '>> '
+
+
+class FishShellLexer(RegexLexer):
+ """
+ Lexer for Fish shell scripts.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'Fish'
+ aliases = ['fish', 'fishshell']
+ filenames = ['*.fish', '*.load']
+ mimetypes = ['application/x-fish']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\(', Keyword, 'paren'),
+ (r'\$#?(\w+|.)', Name.Variable),
+ ],
+ 'basic': [
+ (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
+ r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
+ r'cd|count|test)(\s*)\b',
+ bygroups(Keyword, Text)),
+ (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
+ r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
+ r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
+ r'fish_update_completions|fishd|funced|funcsave|functions|help|'
+ r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
+ r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
+ r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
+ Name.Builtin),
+ (r'#.*\n', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]()=]', Operator),
+ (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ ],
+ 'data': [
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
+ (r';', Punctuation),
+ (r'&|\||\^|<|>', Operator),
+ (r'\s+', Text),
+ (r'\d+(?= |\Z)', Number),
+ (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ 'math': [
+ (r'\)\)', Keyword, '#pop'),
+ (r'[-+*/%^|&]|\*\*|\|\|', Operator),
+ (r'\d+#\d+', Number),
+ (r'\d+#(?! )', Number),
+ (r'\d+', Number),
+ include('root'),
+ ],
+ }
class ExeclineLexer(RegexLexer):
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
index ebb3311dd2..e67c4cf1c1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
@@ -1,194 +1,194 @@
-"""
- pygments.lexers.smalltalk
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for Smalltalk and related languages.
-
+"""
+ pygments.lexers.smalltalk
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Smalltalk and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SmalltalkLexer', 'NewspeakLexer']
-
-
-class SmalltalkLexer(RegexLexer):
- """
- For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
- Contributed by Stefan Matthias Aust.
- Rewritten by Nils Winter.
-
- .. versionadded:: 0.10
- """
- name = 'Smalltalk'
- filenames = ['*.st']
- aliases = ['smalltalk', 'squeak', 'st']
- mimetypes = ['text/x-smalltalk']
-
- tokens = {
- 'root': [
- (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
- include('squeak fileout'),
- include('whitespaces'),
- include('method definition'),
- (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
- include('objects'),
- (r'\^|\:=|\_', Operator),
- # temporaries
- (r'[\]({}.;!]', Text),
- ],
- 'method definition': [
- # Not perfect can't allow whitespaces at the beginning and the
- # without breaking everything
- (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
- (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
- bygroups(Name.Function, Text, Name.Variable, Text)),
- ],
- 'blockvariables': [
- include('whitespaces'),
- (r'(:)(\s*)(\w+)',
- bygroups(Operator, Text, Name.Variable)),
- (r'\|', Operator, '#pop'),
- default('#pop'), # else pop
- ],
- 'literals': [
- (r"'(''|[^'])*'", String, 'afterobject'),
- (r'\$.', String.Char, 'afterobject'),
- (r'#\(', String.Symbol, 'parenth'),
- (r'\)', Text, 'afterobject'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
- ],
- '_parenth_helper': [
- include('whitespaces'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
- (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
- # literals
- (r"'(''|[^'])*'", String),
- (r'\$.', String.Char),
- (r'#*\(', String.Symbol, 'inner_parenth'),
- ],
- 'parenth': [
- # This state is a bit tricky since
- # we can't just pop this state
- (r'\)', String.Symbol, ('root', 'afterobject')),
- include('_parenth_helper'),
- ],
- 'inner_parenth': [
- (r'\)', String.Symbol, '#pop'),
- include('_parenth_helper'),
- ],
- 'whitespaces': [
- # skip whitespace and comments
- (r'\s+', Text),
- (r'"(""|[^"])*"', Comment),
- ],
- 'objects': [
- (r'\[', Text, 'blockvariables'),
- (r'\]', Text, 'afterobject'),
- (r'\b(self|super|true|false|nil|thisContext)\b',
- Name.Builtin.Pseudo, 'afterobject'),
- (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
- (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
- (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
- String.Symbol, 'afterobject'),
- include('literals'),
- ],
- 'afterobject': [
- (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
- include('whitespaces'),
- (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
- Name.Builtin, '#pop'),
- (r'\b(new\b(?!:))', Name.Builtin),
- (r'\:=|\_', Operator, '#pop'),
- (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
- (r'\b[a-zA-Z]+\w*', Name.Function),
- (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
- (r'\.', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'[\])}]', Text),
- (r'[\[({]', Text, '#pop'),
- ],
- 'squeak fileout': [
- # Squeak fileout format (optional)
- (r'^"(""|[^"])*"!', Keyword),
- (r"^'(''|[^'])*'!", Keyword),
- (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
- bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
- (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
- bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
- (r'^(\w+)( subclass: )(#\w+)'
- r'(\s+instanceVariableNames: )(.*?)'
- r'(\s+classVariableNames: )(.*?)'
- r'(\s+poolDictionaries: )(.*?)'
- r'(\s+category: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
- String, Keyword, String, Keyword, String, Keyword)),
- (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String, Keyword)),
- (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
- (r'! !$', Keyword),
- ],
- }
-
-
-class NewspeakLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+ """
+ For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
+ Contributed by Stefan Matthias Aust.
+ Rewritten by Nils Winter.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Smalltalk'
+ filenames = ['*.st']
+ aliases = ['smalltalk', 'squeak', 'st']
+ mimetypes = ['text/x-smalltalk']
+
+ tokens = {
+ 'root': [
+ (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+ include('squeak fileout'),
+ include('whitespaces'),
+ include('method definition'),
+ (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+ include('objects'),
+ (r'\^|\:=|\_', Operator),
+ # temporaries
+ (r'[\]({}.;!]', Text),
+ ],
+ 'method definition': [
+ # Not perfect can't allow whitespaces at the beginning and the
+ # without breaking everything
+ (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+ (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+ bygroups(Name.Function, Text, Name.Variable, Text)),
+ ],
+ 'blockvariables': [
+ include('whitespaces'),
+ (r'(:)(\s*)(\w+)',
+ bygroups(Operator, Text, Name.Variable)),
+ (r'\|', Operator, '#pop'),
+ default('#pop'), # else pop
+ ],
+ 'literals': [
+ (r"'(''|[^'])*'", String, 'afterobject'),
+ (r'\$.', String.Char, 'afterobject'),
+ (r'#\(', String.Symbol, 'parenth'),
+ (r'\)', Text, 'afterobject'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+ ],
+ '_parenth_helper': [
+ include('whitespaces'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+ (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
+ # literals
+ (r"'(''|[^'])*'", String),
+ (r'\$.', String.Char),
+ (r'#*\(', String.Symbol, 'inner_parenth'),
+ ],
+ 'parenth': [
+ # This state is a bit tricky since
+ # we can't just pop this state
+ (r'\)', String.Symbol, ('root', 'afterobject')),
+ include('_parenth_helper'),
+ ],
+ 'inner_parenth': [
+ (r'\)', String.Symbol, '#pop'),
+ include('_parenth_helper'),
+ ],
+ 'whitespaces': [
+ # skip whitespace and comments
+ (r'\s+', Text),
+ (r'"(""|[^"])*"', Comment),
+ ],
+ 'objects': [
+ (r'\[', Text, 'blockvariables'),
+ (r'\]', Text, 'afterobject'),
+ (r'\b(self|super|true|false|nil|thisContext)\b',
+ Name.Builtin.Pseudo, 'afterobject'),
+ (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+ (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+ String.Symbol, 'afterobject'),
+ include('literals'),
+ ],
+ 'afterobject': [
+ (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
+ include('whitespaces'),
+ (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+ Name.Builtin, '#pop'),
+ (r'\b(new\b(?!:))', Name.Builtin),
+ (r'\:=|\_', Operator, '#pop'),
+ (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+ (r'\b[a-zA-Z]+\w*', Name.Function),
+ (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'[\])}]', Text),
+ (r'[\[({]', Text, '#pop'),
+ ],
+ 'squeak fileout': [
+ # Squeak fileout format (optional)
+ (r'^"(""|[^"])*"!', Keyword),
+ (r"^'(''|[^'])*'!", Keyword),
+ (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+ (r'^(\w+)( subclass: )(#\w+)'
+ r'(\s+instanceVariableNames: )(.*?)'
+ r'(\s+classVariableNames: )(.*?)'
+ r'(\s+poolDictionaries: )(.*?)'
+ r'(\s+category: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+ String, Keyword, String, Keyword, String, Keyword)),
+ (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String, Keyword)),
+ (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+ (r'! !$', Keyword),
+ ],
+ }
+
+
+class NewspeakLexer(RegexLexer):
+ """
For `Newspeak <http://newspeaklanguage.org/>`_ syntax.
-
- .. versionadded:: 1.1
- """
- name = 'Newspeak'
- filenames = ['*.ns2']
- aliases = ['newspeak', ]
- mimetypes = ['text/x-newspeak']
-
- tokens = {
- 'root': [
- (r'\b(Newsqueak2)\b', Keyword.Declaration),
- (r"'[^']*'", String),
- (r'\b(class)(\s+)(\w+)(\s*)',
- bygroups(Keyword.Declaration, Text, Name.Class, Text)),
- (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
- Keyword),
- (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- (r'<\w+>', Comment.Special),
- include('expressionstat'),
- include('whitespace')
- ],
-
- 'expressionstat': [
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'\d+', Number.Integer),
- (r':\w+', Name.Variable),
- (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
- (r'\w+:', Name.Function),
- (r'\w+', Name.Variable),
- (r'\(|\)', Punctuation),
- (r'\[|\]', Punctuation),
- (r'\{|\}', Punctuation),
-
- (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
- (r'\.|;', Punctuation),
- include('whitespace'),
- include('literals'),
- ],
- 'literals': [
- (r'\$.', String),
- (r"'[^']*'", String),
- (r"#'[^']*'", String.Symbol),
- (r"#\w+:?", String.Symbol),
- (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'"[^"]*"', Comment)
- ],
- }
+
+ .. versionadded:: 1.1
+ """
+ name = 'Newspeak'
+ filenames = ['*.ns2']
+ aliases = ['newspeak', ]
+ mimetypes = ['text/x-newspeak']
+
+ tokens = {
+ 'root': [
+ (r'\b(Newsqueak2)\b', Keyword.Declaration),
+ (r"'[^']*'", String),
+ (r'\b(class)(\s+)(\w+)(\s*)',
+ bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+ (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+ Keyword),
+ (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'<\w+>', Comment.Special),
+ include('expressionstat'),
+ include('whitespace')
+ ],
+
+ 'expressionstat': [
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r':\w+', Name.Variable),
+ (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+ (r'\w+:', Name.Function),
+ (r'\w+', Name.Variable),
+ (r'\(|\)', Punctuation),
+ (r'\[|\]', Punctuation),
+ (r'\{|\}', Punctuation),
+
+ (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+ (r'\.|;', Punctuation),
+ include('whitespace'),
+ include('literals'),
+ ],
+ 'literals': [
+ (r'\$.', String),
+ (r"'[^']*'", String),
+ (r"#'[^']*'", String.Symbol),
+ (r"#\w+:?", String.Symbol),
+ (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'"[^"]*"', Comment)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/snobol.py b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
index b5719c3a5a..cee252f24a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/snobol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
@@ -1,82 +1,82 @@
-"""
- pygments.lexers.snobol
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for the SNOBOL language.
-
+"""
+ pygments.lexers.snobol
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SNOBOL language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SnobolLexer']
-
-
-class SnobolLexer(RegexLexer):
- """
- Lexer for the SNOBOL4 programming language.
-
- Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
- Does not require spaces around binary operators.
-
- .. versionadded:: 1.5
- """
-
- name = "Snobol"
- aliases = ["snobol"]
- filenames = ['*.snobol']
- mimetypes = ['text/x-snobol']
-
- tokens = {
- # root state, start of line
- # comments, continuation lines, and directives start in column 1
- # as do labels
- 'root': [
- (r'\*.*\n', Comment),
- (r'[+.] ', Punctuation, 'statement'),
- (r'-.*\n', Comment),
- (r'END\s*\n', Name.Label, 'heredoc'),
- (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
- (r'\s+', Text, 'statement'),
- ],
- # statement state, line after continuation or label
- 'statement': [
- (r'\s*\n', Text, '#pop'),
- (r'\s+', Text),
- (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
- r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
- r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
- r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
- Name.Builtin),
- (r'[A-Za-z][\w.]*', Name),
- # ASCII equivalents of original operators
- # | for the EBCDIC equivalent, ! likewise
- # \ for EBCDIC negation
- (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- # Accept SPITBOL syntax for real numbers
- # as well as Macro SNOBOL4
- (r'[0-9]+(?=[^.EeDd])', Number.Integer),
- (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
- # Goto
- (r':', Punctuation, 'goto'),
- (r'[()<>,;]', Punctuation),
- ],
- # Goto block
- 'goto': [
- (r'\s*\n', Text, "#pop:2"),
- (r'\s+', Text),
- (r'F|S', Keyword),
- (r'(\()([A-Za-z][\w.]*)(\))',
- bygroups(Punctuation, Name.Label, Punctuation))
- ],
- # everything after the END statement is basically one
- # big heredoc.
- 'heredoc': [
- (r'.*\n', String.Heredoc)
- ]
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[+.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[()<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/special.py b/contrib/python/Pygments/py3/pygments/lexers/special.py
index bff6652c56..b108cf1aef 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/special.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/special.py
@@ -1,40 +1,40 @@
-"""
- pygments.lexers.special
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Special lexers.
-
+"""
+ pygments.lexers.special
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Special lexers.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
import ast
-import re
-
-from pygments.lexer import Lexer
+import re
+
+from pygments.lexer import Lexer
from pygments.token import Token, Error, Text, Generic
from pygments.util import get_choice_opt
-
-
+
+
__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
-
-
-class TextLexer(Lexer):
- """
- "Null" lexer, doesn't highlight anything.
- """
- name = 'Text only'
- aliases = ['text']
- filenames = ['*.txt']
- mimetypes = ['text/plain']
+
+
+class TextLexer(Lexer):
+ """
+ "Null" lexer, doesn't highlight anything.
+ """
+ name = 'Text only'
+ aliases = ['text']
+ filenames = ['*.txt']
+ mimetypes = ['text/plain']
priority = 0.01
-
- def get_tokens_unprocessed(self, text):
- yield 0, Text, text
-
+
+ def get_tokens_unprocessed(self, text):
+ yield 0, Text, text
+
def analyse_text(text):
return TextLexer.priority
-
+
class OutputLexer(Lexer):
"""
@@ -49,32 +49,32 @@ class OutputLexer(Lexer):
yield 0, Generic.Output, text
-_ttype_cache = {}
-
+_ttype_cache = {}
+
line_re = re.compile('.*?\n')
-
-
-class RawTokenLexer(Lexer):
- """
+
+
+class RawTokenLexer(Lexer):
+ """
Recreate a token stream formatted with the `RawTokenFormatter`.
-
- Additional options accepted:
-
- `compress`
- If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
- the given compression algorithm before lexing (default: ``""``).
- """
- name = 'Raw token data'
+
+ Additional options accepted:
+
+ `compress`
+ If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
+ the given compression algorithm before lexing (default: ``""``).
+ """
+ name = 'Raw token data'
aliases = []
- filenames = []
- mimetypes = ['application/x-pygments-tokens']
-
- def __init__(self, **options):
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- Lexer.__init__(self, **options)
-
- def get_tokens(self, text):
+ filenames = []
+ mimetypes = ['application/x-pygments-tokens']
+
+ def __init__(self, **options):
+ self.compress = get_choice_opt(options, 'compress',
+ ['', 'none', 'gz', 'bz2'], '')
+ Lexer.__init__(self, **options)
+
+ def get_tokens(self, text):
if self.compress:
if isinstance(text, str):
text = text.encode('latin1')
@@ -89,31 +89,31 @@ class RawTokenLexer(Lexer):
yield Error, text.decode('latin1')
if isinstance(text, bytes):
text = text.decode('latin1')
-
+
# do not call Lexer.get_tokens() because stripping is not optional.
text = text.strip('\n') + '\n'
- for i, t, v in self.get_tokens_unprocessed(text):
- yield t, v
-
- def get_tokens_unprocessed(self, text):
- length = 0
- for match in line_re.finditer(text):
- try:
+ for i, t, v in self.get_tokens_unprocessed(text):
+ yield t, v
+
+ def get_tokens_unprocessed(self, text):
+ length = 0
+ for match in line_re.finditer(text):
+ try:
ttypestr, val = match.group().rstrip().split('\t', 1)
- ttype = _ttype_cache.get(ttypestr)
- if not ttype:
- ttype = Token
- ttypes = ttypestr.split('.')[1:]
- for ttype_ in ttypes:
- if not ttype_ or not ttype_[0].isupper():
- raise ValueError('malformed token name')
- ttype = getattr(ttype, ttype_)
- _ttype_cache[ttypestr] = ttype
+ ttype = _ttype_cache.get(ttypestr)
+ if not ttype:
+ ttype = Token
+ ttypes = ttypestr.split('.')[1:]
+ for ttype_ in ttypes:
+ if not ttype_ or not ttype_[0].isupper():
+ raise ValueError('malformed token name')
+ ttype = getattr(ttype, ttype_)
+ _ttype_cache[ttypestr] = ttype
val = ast.literal_eval(val)
if not isinstance(val, str):
raise ValueError('expected str')
except (SyntaxError, ValueError):
val = match.group()
ttype = Error
- yield length, ttype, val
- length += len(val)
+ yield length, ttype, val
+ length += len(val)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sql.py b/contrib/python/Pygments/py3/pygments/lexers/sql.py
index 752f135005..096380eb14 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sql.py
@@ -1,51 +1,51 @@
-"""
- pygments.lexers.sql
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for various SQL dialects and related interactive sessions.
-
- Postgres specific lexers:
-
- `PostgresLexer`
- A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
- lexer are:
-
- - keywords and data types list parsed from the PG docs (run the
- `_postgres_builtins` module to update them);
- - Content of $-strings parsed using a specific lexer, e.g. the content
- of a PL/Python function is parsed using the Python lexer;
- - parse PG specific constructs: E-strings, $-strings, U&-strings,
- different operators and punctuation.
-
- `PlPgsqlLexer`
- A lexer for the PL/pgSQL language. Adds a few specific construct on
- top of the PG SQL lexer (such as <<label>>).
-
- `PostgresConsoleLexer`
- A lexer to highlight an interactive psql session:
-
- - identifies the prompt and does its best to detect the end of command
- in multiline statement where not all the lines are prefixed by a
- prompt, telling them apart from the output;
- - highlights errors in the output and notification levels;
- - handles psql backslash commands.
-
- The ``tests/examplefiles`` contains a few test files with data to be
- parsed by these lexers.
-
+"""
+ pygments.lexers.sql
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various SQL dialects and related interactive sessions.
+
+ Postgres specific lexers:
+
+ `PostgresLexer`
+ A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
+ lexer are:
+
+ - keywords and data types list parsed from the PG docs (run the
+ `_postgres_builtins` module to update them);
+ - Content of $-strings parsed using a specific lexer, e.g. the content
+ of a PL/Python function is parsed using the Python lexer;
+ - parse PG specific constructs: E-strings, $-strings, U&-strings,
+ different operators and punctuation.
+
+ `PlPgsqlLexer`
+ A lexer for the PL/pgSQL language. Adds a few specific construct on
+ top of the PG SQL lexer (such as <<label>>).
+
+ `PostgresConsoleLexer`
+ A lexer to highlight an interactive psql session:
+
+ - identifies the prompt and does its best to detect the end of command
+ in multiline statement where not all the lines are prefixed by a
+ prompt, telling them apart from the output;
+ - highlights errors in the output and notification levels;
+ - handles psql backslash commands.
+
+ The ``tests/examplefiles`` contains a few test files with data to be
+ parsed by these lexers.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
Keyword, Name, String, Number, Generic, Literal
-from pygments.lexers import get_lexer_by_name, ClassNotFound
-
-from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
+from pygments.lexers import get_lexer_by_name, ClassNotFound
+
+from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
from pygments.lexers._mysql_builtins import \
MYSQL_CONSTANTS, \
MYSQL_DATATYPES, \
@@ -54,19 +54,19 @@ from pygments.lexers._mysql_builtins import \
MYSQL_OPTIMIZER_HINTS
from pygments.lexers import _tsql_builtins
-
-
-__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
+
+
+__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
'SqlLexer', 'TransactSqlLexer', 'MySqlLexer',
'SqliteConsoleLexer', 'RqlLexer']
-
-line_re = re.compile('.*?\n')
+
+line_re = re.compile('.*?\n')
sqlite_prompt_re = re.compile(r'^(?:sqlite| ...)>(?= )')
-
-language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
-
+
+language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
+
do_re = re.compile(r'\bDO\b', re.IGNORECASE)
-
+
# Regular expressions for analyse_text()
name_between_bracket_re = re.compile(r'\[[a-zA-Z_]\w*\]')
name_between_backtick_re = re.compile(r'`[a-zA-Z_]\w*`')
@@ -75,27 +75,27 @@ tsql_declare_re = re.compile(r'\bdeclare\s+@', re.IGNORECASE)
tsql_variable_re = re.compile(r'@[a-zA-Z_]\w*\b')
-def language_callback(lexer, match):
- """Parse the content of a $-string using a lexer
-
+def language_callback(lexer, match):
+ """Parse the content of a $-string using a lexer
+
The lexer is chosen looking for a nearby LANGUAGE or assumed as
plpgsql if inside a DO statement and no LANGUAGE has been found.
- """
+ """
lx = None
- m = language_re.match(lexer.text[match.end():match.end()+100])
- if m is not None:
+ m = language_re.match(lexer.text[match.end():match.end()+100])
+ if m is not None:
lx = lexer._get_lexer(m.group(1))
- else:
- m = list(language_re.finditer(
- lexer.text[max(0, match.start()-100):match.start()]))
- if m:
+ else:
+ m = list(language_re.finditer(
+ lexer.text[max(0, match.start()-100):match.start()]))
+ if m:
lx = lexer._get_lexer(m[-1].group(1))
else:
m = list(do_re.finditer(
lexer.text[max(0, match.start()-25):match.start()]))
if m:
lx = lexer._get_lexer('plpgsql')
-
+
# 1 = $, 2 = delimiter, 3 = $
yield (match.start(1), String, match.group(1))
yield (match.start(2), String.Delimiter, match.group(2))
@@ -103,94 +103,94 @@ def language_callback(lexer, match):
# 4 = string contents
if lx:
yield from lx.get_tokens_unprocessed(match.group(4))
- else:
+ else:
yield (match.start(4), String, match.group(4))
# 5 = $, 6 = delimiter, 7 = $
yield (match.start(5), String, match.group(5))
yield (match.start(6), String.Delimiter, match.group(6))
yield (match.start(7), String, match.group(7))
-
-
+
+
class PostgresBase:
- """Base class for Postgres-related lexers.
-
- This is implemented as a mixin to avoid the Lexer metaclass kicking in.
- this way the different lexer don't have a common Lexer ancestor. If they
- had, _tokens could be created on this ancestor and not updated for the
- other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
- seem to suggest that regexp lexers are not really subclassable.
- """
- def get_tokens_unprocessed(self, text, *args):
- # Have a copy of the entire text to be used by `language_callback`.
- self.text = text
+ """Base class for Postgres-related lexers.
+
+ This is implemented as a mixin to avoid the Lexer metaclass kicking in.
+ this way the different lexer don't have a common Lexer ancestor. If they
+ had, _tokens could be created on this ancestor and not updated for the
+ other classes, resulting e.g. in PL/pgSQL parsed as SQL. This shortcoming
+ seem to suggest that regexp lexers are not really subclassable.
+ """
+ def get_tokens_unprocessed(self, text, *args):
+ # Have a copy of the entire text to be used by `language_callback`.
+ self.text = text
yield from super().get_tokens_unprocessed(text, *args)
-
- def _get_lexer(self, lang):
- if lang.lower() == 'sql':
- return get_lexer_by_name('postgresql', **self.options)
-
- tries = [lang]
- if lang.startswith('pl'):
- tries.append(lang[2:])
- if lang.endswith('u'):
- tries.append(lang[:-1])
- if lang.startswith('pl') and lang.endswith('u'):
- tries.append(lang[2:-1])
-
+
+ def _get_lexer(self, lang):
+ if lang.lower() == 'sql':
+ return get_lexer_by_name('postgresql', **self.options)
+
+ tries = [lang]
+ if lang.startswith('pl'):
+ tries.append(lang[2:])
+ if lang.endswith('u'):
+ tries.append(lang[:-1])
+ if lang.startswith('pl') and lang.endswith('u'):
+ tries.append(lang[2:-1])
+
for lx in tries:
- try:
+ try:
return get_lexer_by_name(lx, **self.options)
- except ClassNotFound:
- pass
- else:
- # TODO: better logging
- # print >>sys.stderr, "language not found:", lang
- return None
-
-
-class PostgresLexer(PostgresBase, RegexLexer):
- """
- Lexer for the PostgreSQL dialect of SQL.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL SQL dialect'
- aliases = ['postgresql', 'postgres']
- mimetypes = ['text/x-postgresql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
+ except ClassNotFound:
+ pass
+ else:
+ # TODO: better logging
+ # print >>sys.stderr, "language not found:", lang
+ return None
+
+
+class PostgresLexer(PostgresBase, RegexLexer):
+ """
+ Lexer for the PostgreSQL dialect of SQL.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'PostgreSQL SQL dialect'
+ aliases = ['postgresql', 'postgres']
+ mimetypes = ['text/x-postgresql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
(r'(' + '|'.join(s.replace(" ", r"\s+")
for s in DATATYPES + PSEUDO_TYPES) + r')\b',
Name.Builtin),
- (words(KEYWORDS, suffix=r'\b'), Keyword),
- (r'[+*/<>=~!@#%^&|`?-]+', Operator),
- (r'::', Operator), # cast
- (r'\$\d+', Name.Variable),
- (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
- (r'[0-9]+', Number.Integer),
+ (words(KEYWORDS, suffix=r'\b'), Keyword),
+ (r'[+*/<>=~!@#%^&|`?-]+', Operator),
+ (r'::', Operator), # cast
+ (r'\$\d+', Name.Variable),
+ (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
+ (r'[0-9]+', Number.Integer),
(r"((?:E|U&)?)(')", bygroups(String.Affix, String.Single), 'string'),
# quoted identifier
(r'((?:U&)?)(")', bygroups(String.Affix, String.Name), 'quoted-ident'),
(r'(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)', language_callback),
- (r'[a-z_]\w*', Name),
-
- # psql variable in SQL
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
-
- (r'[;:()\[\]{},.]', Punctuation),
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ],
+ (r'[a-z_]\w*', Name),
+
+ # psql variable in SQL
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
+
+ (r'[;:()\[\]{},.]', Punctuation),
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ],
'string': [
(r"[^']+", String.Single),
(r"''", String.Single),
@@ -201,191 +201,191 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'""', String.Name),
(r'"', String.Name, '#pop'),
],
- }
-
-
-class PlPgsqlLexer(PostgresBase, RegexLexer):
- """
- Handle the extra syntax in Pl/pgSQL language.
-
- .. versionadded:: 1.5
- """
- name = 'PL/pgSQL'
- aliases = ['plpgsql']
- mimetypes = ['text/x-plpgsql']
-
- flags = re.IGNORECASE
+ }
+
+
+class PlPgsqlLexer(PostgresBase, RegexLexer):
+ """
+ Handle the extra syntax in Pl/pgSQL language.
+
+ .. versionadded:: 1.5
+ """
+ name = 'PL/pgSQL'
+ aliases = ['plpgsql']
+ mimetypes = ['text/x-plpgsql']
+
+ flags = re.IGNORECASE
tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
-
- # extend the keywords list
- for i, pattern in enumerate(tokens['root']):
- if pattern[1] == Keyword:
- tokens['root'][i] = (
- words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
- Keyword)
- del i
- break
- else:
- assert 0, "SQL keywords not found"
-
- # Add specific PL/pgSQL rules (before the SQL ones)
- tokens['root'][:0] = [
- (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
- (r':=', Operator),
- (r'\<\<[a-z]\w*\>\>', Name.Label),
- (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
- ]
-
-
-class PsqlRegexLexer(PostgresBase, RegexLexer):
- """
- Extend the PostgresLexer adding support specific for psql commands.
-
- This is not a complete psql lexer yet as it lacks prompt support
- and output rendering.
- """
-
- name = 'PostgreSQL console - regexp based lexer'
- aliases = [] # not public
-
- flags = re.IGNORECASE
+
+ # extend the keywords list
+ for i, pattern in enumerate(tokens['root']):
+ if pattern[1] == Keyword:
+ tokens['root'][i] = (
+ words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
+ Keyword)
+ del i
+ break
+ else:
+ assert 0, "SQL keywords not found"
+
+ # Add specific PL/pgSQL rules (before the SQL ones)
+ tokens['root'][:0] = [
+ (r'\%[a-z]\w*\b', Name.Builtin), # actually, a datatype
+ (r':=', Operator),
+ (r'\<\<[a-z]\w*\>\>', Name.Label),
+ (r'\#[a-z]\w*\b', Keyword.Pseudo), # #variable_conflict
+ ]
+
+
+class PsqlRegexLexer(PostgresBase, RegexLexer):
+ """
+ Extend the PostgresLexer adding support specific for psql commands.
+
+ This is not a complete psql lexer yet as it lacks prompt support
+ and output rendering.
+ """
+
+ name = 'PostgreSQL console - regexp based lexer'
+ aliases = [] # not public
+
+ flags = re.IGNORECASE
tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
-
- tokens['root'].append(
- (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
- tokens['psql-command'] = [
- (r'\n', Text, 'root'),
+
+ tokens['root'].append(
+ (r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
+ tokens['psql-command'] = [
+ (r'\n', Text, 'root'),
(r'\s+', Whitespace),
- (r'\\[^\s]+', Keyword.Pseudo),
- (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Backtick),
- (r"[^\s]+", String.Symbol),
- ]
-
-
-re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
-re_psql_command = re.compile(r'\s*\\')
-re_end_command = re.compile(r';\s*(--.*?)?$')
-re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
-re_error = re.compile(r'(ERROR|FATAL):')
-re_message = re.compile(
- r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
- r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
-
-
+ (r'\\[^\s]+', Keyword.Pseudo),
+ (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Backtick),
+ (r"[^\s]+", String.Symbol),
+ ]
+
+
+re_prompt = re.compile(r'^(\S.*?)??[=\-\(\$\'\"][#>]')
+re_psql_command = re.compile(r'\s*\\')
+re_end_command = re.compile(r';\s*(--.*?)?$')
+re_psql_command = re.compile(r'(\s*)(\\.+?)(\s+)$')
+re_error = re.compile(r'(ERROR|FATAL):')
+re_message = re.compile(
+ r'((?:DEBUG|INFO|NOTICE|WARNING|ERROR|'
+ r'FATAL|HINT|DETAIL|CONTEXT|LINE [0-9]+):)(.*?\n)')
+
+
class lookahead:
- """Wrap an iterator and allow pushing back an item."""
- def __init__(self, x):
- self.iter = iter(x)
- self._nextitem = None
-
- def __iter__(self):
- return self
-
- def send(self, i):
- self._nextitem = i
- return i
-
- def __next__(self):
- if self._nextitem is not None:
- ni = self._nextitem
- self._nextitem = None
- return ni
- return next(self.iter)
- next = __next__
-
-
-class PostgresConsoleLexer(Lexer):
- """
- Lexer for psql sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'PostgreSQL console (psql)'
- aliases = ['psql', 'postgresql-console', 'postgres-console']
- mimetypes = ['text/x-postgresql-psql']
-
- def get_tokens_unprocessed(self, data):
- sql = PsqlRegexLexer(**self.options)
-
- lines = lookahead(line_re.findall(data))
-
- # prompt-output cycle
- while 1:
-
- # consume the lines of the command: start with an optional prompt
- # and continue until the end of command is detected
- curcode = ''
- insertions = []
+ """Wrap an iterator and allow pushing back an item."""
+ def __init__(self, x):
+ self.iter = iter(x)
+ self._nextitem = None
+
+ def __iter__(self):
+ return self
+
+ def send(self, i):
+ self._nextitem = i
+ return i
+
+ def __next__(self):
+ if self._nextitem is not None:
+ ni = self._nextitem
+ self._nextitem = None
+ return ni
+ return next(self.iter)
+ next = __next__
+
+
+class PostgresConsoleLexer(Lexer):
+ """
+ Lexer for psql sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'PostgreSQL console (psql)'
+ aliases = ['psql', 'postgresql-console', 'postgres-console']
+ mimetypes = ['text/x-postgresql-psql']
+
+ def get_tokens_unprocessed(self, data):
+ sql = PsqlRegexLexer(**self.options)
+
+ lines = lookahead(line_re.findall(data))
+
+ # prompt-output cycle
+ while 1:
+
+ # consume the lines of the command: start with an optional prompt
+ # and continue until the end of command is detected
+ curcode = ''
+ insertions = []
for line in lines:
- # Identify a shell prompt in case of psql commandline example
- if line.startswith('$') and not curcode:
- lexer = get_lexer_by_name('console', **self.options)
+ # Identify a shell prompt in case of psql commandline example
+ if line.startswith('$') and not curcode:
+ lexer = get_lexer_by_name('console', **self.options)
yield from lexer.get_tokens_unprocessed(line)
- break
-
- # Identify a psql prompt
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- insertions.append((len(curcode),
- [(0, Generic.Prompt, mprompt.group())]))
- curcode += line[len(mprompt.group()):]
- else:
- curcode += line
-
- # Check if this is the end of the command
- # TODO: better handle multiline comments at the end with
- # a lexer with an external state?
- if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
- break
-
- # Emit the combined stream of command and prompt(s)
+ break
+
+ # Identify a psql prompt
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, mprompt.group())]))
+ curcode += line[len(mprompt.group()):]
+ else:
+ curcode += line
+
+ # Check if this is the end of the command
+ # TODO: better handle multiline comments at the end with
+ # a lexer with an external state?
+ if re_psql_command.match(curcode) \
+ or re_end_command.search(curcode):
+ break
+
+ # Emit the combined stream of command and prompt(s)
yield from do_insertions(insertions,
sql.get_tokens_unprocessed(curcode))
-
- # Emit the output lines
- out_token = Generic.Output
+
+ # Emit the output lines
+ out_token = Generic.Output
for line in lines:
- mprompt = re_prompt.match(line)
- if mprompt is not None:
- # push the line back to have it processed by the prompt
- lines.send(line)
- break
-
- mmsg = re_message.match(line)
- if mmsg is not None:
- if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
- out_token = Generic.Error
- yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
- yield (mmsg.start(2), out_token, mmsg.group(2))
- else:
- yield (0, out_token, line)
+ mprompt = re_prompt.match(line)
+ if mprompt is not None:
+ # push the line back to have it processed by the prompt
+ lines.send(line)
+ break
+
+ mmsg = re_message.match(line)
+ if mmsg is not None:
+ if mmsg.group(1).startswith("ERROR") \
+ or mmsg.group(1).startswith("FATAL"):
+ out_token = Generic.Error
+ yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
+ yield (mmsg.start(2), out_token, mmsg.group(2))
+ else:
+ yield (0, out_token, line)
else:
return
-
-
-class SqlLexer(RegexLexer):
- """
- Lexer for Structured Query Language. Currently, this lexer does
- not recognize any special syntax except ANSI SQL.
- """
-
- name = 'SQL'
- aliases = ['sql']
- filenames = ['*.sql']
- mimetypes = ['text/x-sql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
+
+
+class SqlLexer(RegexLexer):
+ """
+ Lexer for Structured Query Language. Currently, this lexer does
+ not recognize any special syntax except ANSI SQL.
+ """
+
+ name = 'SQL'
+ aliases = ['sql']
+ filenames = ['*.sql']
+ mimetypes = ['text/x-sql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
(r'--.*\n?', Comment.Single),
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (words((
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (words((
'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER',
'AGGREGATE', 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE',
'AND', 'ANY', 'ARE', 'AS', 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT',
@@ -393,8 +393,8 @@ class SqlLexer(RegexLexer):
'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', 'BIT_LENGTH', 'BOTH', 'BREADTH',
'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', 'CASCADE',
'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
- 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
- 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
+ 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
+ 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE',
'CLUSTER', 'COALESCE', 'COBOL', 'COLLATE', 'COLLATION',
'COLLATION_CATALOG', 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN',
@@ -407,7 +407,7 @@ class SqlLexer(RegexLexer):
'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', 'CURRENT_PATH',
'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
- 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
+ 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE',
'DEFERRED', 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS',
'DEREF', 'DESC', 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR',
@@ -473,32 +473,32 @@ class SqlLexer(RegexLexer):
'VERSION', 'VERSIONS', 'VERSIONING', 'VIEW',
'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', 'WITH', 'WITHOUT', 'WORK',
'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
- Keyword),
- (words((
+ Keyword),
+ (words((
'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
- Name.Builtin),
- (r'[+*/<>=~!@#%^&|`?-]', Operator),
- (r'[0-9]+', Number.Integer),
- # TODO: Backslash escapes?
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
- (r'[;:()\[\],.]', Punctuation)
- ],
- 'multiline-comments': [
- (r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[^/*]+', Comment.Multiline),
- (r'[/*]', Comment.Multiline)
- ]
- }
-
+ Name.Builtin),
+ (r'[+*/<>=~!@#%^&|`?-]', Operator),
+ (r'[0-9]+', Number.Integer),
+ # TODO: Backslash escapes?
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
+ (r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ 'multiline-comments': [
+ (r'/\*', Comment.Multiline, 'multiline-comments'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^/*]+', Comment.Multiline),
+ (r'[/*]', Comment.Multiline)
+ ]
+ }
+
def analyse_text(self, text):
return
-
+
class TransactSqlLexer(RegexLexer):
"""
@@ -584,22 +584,22 @@ class TransactSqlLexer(RegexLexer):
return rating
-class MySqlLexer(RegexLexer):
+class MySqlLexer(RegexLexer):
"""The Oracle MySQL lexer.
This lexer does not attempt to maintain strict compatibility with
MariaDB syntax or keywords. Although MySQL and MariaDB's common code
history suggests there may be significant overlap between the two,
compatibility between the two is not a target for this lexer.
- """
-
- name = 'MySQL'
- aliases = ['mysql']
- mimetypes = ['text/x-mysql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
+ """
+
+ name = 'MySQL'
+ aliases = ['mysql']
+ mimetypes = ['text/x-mysql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
# Comments
@@ -676,7 +676,7 @@ class MySqlLexer(RegexLexer):
# Punctuation
(r'[(),.;]', Punctuation),
- ],
+ ],
# Multiline comment substates
# ---------------------------
@@ -691,7 +691,7 @@ class MySqlLexer(RegexLexer):
'multiline-comment': [
(r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*/', Comment.Multiline, '#pop'),
(r'\*', Comment.Multiline),
],
@@ -746,8 +746,8 @@ class MySqlLexer(RegexLexer):
(r'``', Name.Quoted.Escape),
(r'`', Name.Quoted, '#pop'),
],
- }
-
+ }
+
def analyse_text(text):
rating = 0
name_between_backtick_count = len(
@@ -765,75 +765,75 @@ class MySqlLexer(RegexLexer):
elif name_between_backtick_count > 0:
rating += 0.1
return rating
-
-
-class SqliteConsoleLexer(Lexer):
- """
- Lexer for example sessions using sqlite3.
-
- .. versionadded:: 0.11
- """
-
- name = 'sqlite3con'
- aliases = ['sqlite3']
- filenames = ['*.sqlite3-console']
- mimetypes = ['text/x-sqlite3-console']
-
- def get_tokens_unprocessed(self, data):
- sql = SqlLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(data):
- line = match.group()
+
+
+class SqliteConsoleLexer(Lexer):
+ """
+ Lexer for example sessions using sqlite3.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'sqlite3con'
+ aliases = ['sqlite3']
+ filenames = ['*.sqlite3-console']
+ mimetypes = ['text/x-sqlite3-console']
+
+ def get_tokens_unprocessed(self, data):
+ sql = SqlLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(data):
+ line = match.group()
prompt_match = sqlite_prompt_re.match(line)
if prompt_match is not None:
- insertions.append((len(curcode),
+ insertions.append((len(curcode),
[(0, Generic.Prompt, line[:7])]))
insertions.append((len(curcode),
[(7, Whitespace, ' ')]))
- curcode += line[8:]
- else:
- if curcode:
+ curcode += line[8:]
+ else:
+ if curcode:
yield from do_insertions(insertions,
sql.get_tokens_unprocessed(curcode))
- curcode = ''
- insertions = []
- if line.startswith('SQL error: '):
- yield (match.start(), Generic.Traceback, line)
- else:
- yield (match.start(), Generic.Output, line)
- if curcode:
+ curcode = ''
+ insertions = []
+ if line.startswith('SQL error: '):
+ yield (match.start(), Generic.Traceback, line)
+ else:
+ yield (match.start(), Generic.Output, line)
+ if curcode:
yield from do_insertions(insertions,
sql.get_tokens_unprocessed(curcode))
-
-
-class RqlLexer(RegexLexer):
- """
- Lexer for Relation Query Language.
-
- `RQL <http://www.logilab.org/project/rql>`_
-
- .. versionadded:: 2.0
- """
- name = 'RQL'
- aliases = ['rql']
- filenames = ['*.rql']
- mimetypes = ['text/x-rql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
+
+
+class RqlLexer(RegexLexer):
+ """
+ Lexer for Relation Query Language.
+
+ `RQL <http://www.logilab.org/project/rql>`_
+
+ .. versionadded:: 2.0
+ """
+ name = 'RQL'
+ aliases = ['rql']
+ filenames = ['*.rql']
+ mimetypes = ['text/x-rql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
(r'\s+', Whitespace),
- (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
- r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
- r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
- (r'[+*/<>=%-]', Operator),
- (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
- (r'[0-9]+', Number.Integer),
- (r'[A-Z_]\w*\??', Name),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Single),
- (r'[;:()\[\],.]', Punctuation)
- ],
- }
+ (r'(DELETE|SET|INSERT|UNION|DISTINCT|WITH|WHERE|BEING|OR'
+ r'|AND|NOT|GROUPBY|HAVING|ORDERBY|ASC|DESC|LIMIT|OFFSET'
+ r'|TODAY|NOW|TRUE|FALSE|NULL|EXISTS)\b', Keyword),
+ (r'[+*/<>=%-]', Operator),
+ (r'(Any|is|instance_of|CWEType|CWRelation)\b', Name.Builtin),
+ (r'[0-9]+', Number.Integer),
+ (r'[A-Z_]\w*\??', Name),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Single),
+ (r'[;:()\[\],.]', Punctuation)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
index 724674f5e6..ecde68f484 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
@@ -1,92 +1,92 @@
-"""
- pygments.lexers.supercollider
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for SuperCollider
-
+"""
+ pygments.lexers.supercollider
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for SuperCollider
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexer import RegexLexer, include, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['SuperColliderLexer']
-
-
-class SuperColliderLexer(RegexLexer):
- """
- For `SuperCollider <http://supercollider.github.io/>`_ source code.
-
- .. versionadded:: 2.1
- """
-
- name = 'SuperCollider'
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SuperColliderLexer']
+
+
+class SuperColliderLexer(RegexLexer):
+ """
+ For `SuperCollider <http://supercollider.github.io/>`_ source code.
+
+ .. versionadded:: 2.1
+ """
+
+ name = 'SuperCollider'
aliases = ['supercollider', 'sc']
- filenames = ['*.sc', '*.scd']
- mimetypes = ['application/supercollider', 'text/supercollider', ]
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
+ filenames = ['*.sc', '*.scd']
+ mimetypes = ['application/supercollider', 'text/supercollider', ]
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (words((
- 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
- 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
- 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
- 'void'), suffix=r'\b'),
- Keyword, 'slashstartsregex'),
- (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
- Keyword.Declaration, 'slashstartsregex'),
- (words((
- '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
- 'debugger', 'double', 'enum', 'export', 'extends', 'final',
- 'float', 'goto', 'implements', 'import', 'int', 'interface',
- 'long', 'native', 'package', 'private', 'protected', 'public',
- 'short', 'static', 'super', 'synchronized', 'throws',
- 'transient', 'volatile'), suffix=r'\b'),
- Keyword.Reserved),
- (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
- (words((
- 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (words((
+ 'for', 'in', 'while', 'do', 'break', 'return', 'continue',
+ 'switch', 'case', 'default', 'if', 'else', 'throw', 'try',
+ 'catch', 'finally', 'new', 'delete', 'typeof', 'instanceof',
+ 'void'), suffix=r'\b'),
+ Keyword, 'slashstartsregex'),
+ (words(('var', 'let', 'with', 'function', 'arg'), suffix=r'\b'),
+ Keyword.Declaration, 'slashstartsregex'),
+ (words((
+ '(abstract', 'boolean', 'byte', 'char', 'class', 'const',
+ 'debugger', 'double', 'enum', 'export', 'extends', 'final',
+ 'float', 'goto', 'implements', 'import', 'int', 'interface',
+ 'long', 'native', 'package', 'private', 'protected', 'public',
+ 'short', 'static', 'super', 'synchronized', 'throws',
+ 'transient', 'volatile'), suffix=r'\b'),
+ Keyword.Reserved),
+ (words(('true', 'false', 'nil', 'inf'), suffix=r'\b'), Keyword.Constant),
+ (words((
+ 'Array', 'Boolean', 'Date', 'Error', 'Function', 'Number',
'Object', 'Packages', 'RegExp', 'String',
- 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
- 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
- 'thisThread', 'this'), suffix=r'\b'),
- Name.Builtin),
+ 'isFinite', 'isNaN', 'parseFloat', 'parseInt', 'super',
+ 'thisFunctionDef', 'thisFunction', 'thisMethod', 'thisProcess',
+ 'thisThread', 'this'), suffix=r'\b'),
+ Name.Builtin),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'\\?[$a-zA-Z_]\w*', String.Symbol),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
+ ]
+ }
def analyse_text(text):
"""We're searching for a common function and a unique keyword here."""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tcl.py b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
index 7be07357aa..9f67fd3a2d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tcl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
@@ -1,144 +1,144 @@
-"""
- pygments.lexers.tcl
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for Tcl and related languages.
-
+"""
+ pygments.lexers.tcl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Tcl and related languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Whitespace
-from pygments.util import shebang_matches
-
-__all__ = ['TclLexer']
-
-
-class TclLexer(RegexLexer):
- """
- For Tcl source code.
-
- .. versionadded:: 0.10
- """
-
- keyword_cmds_re = words((
- 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
- 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
- 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
- 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
-
- builtin_cmds_re = words((
- 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
- 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
- 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
- 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
- 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
- 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
- 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
- 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
- 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
-
- name = 'Tcl'
- aliases = ['tcl']
- filenames = ['*.tcl', '*.rvt']
- mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
- def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
- return [
- (keyword_cmds_re, Keyword, 'params' + context),
- (builtin_cmds_re, Name.Builtin, 'params' + context),
- (r'([\w.-]+)', Name.Variable, 'params' + context),
- (r'#', Comment, 'comment'),
- ]
-
- tokens = {
- 'root': [
- include('command'),
- include('basic'),
- include('data'),
- (r'\}', Keyword), # HACK: somehow we miscounted our braces
- ],
- 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
- 'command-in-brace': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-brace"),
- 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-bracket"),
- 'command-in-paren': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-paren"),
- 'basic': [
- (r'\(', Keyword, 'paren'),
- (r'\[', Keyword, 'bracket'),
- (r'\{', Keyword, 'brace'),
- (r'"', String.Double, 'string'),
- (r'(eq|ne|in|ni)\b', Operator.Word),
- (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
- ],
- 'data': [
+from pygments.util import shebang_matches
+
+__all__ = ['TclLexer']
+
+
+class TclLexer(RegexLexer):
+ """
+ For Tcl source code.
+
+ .. versionadded:: 0.10
+ """
+
+ keyword_cmds_re = words((
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
+
+ builtin_cmds_re = words((
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
+
+ name = 'Tcl'
+ aliases = ['tcl']
+ filenames = ['*.tcl', '*.rvt']
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
+
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
+ return [
+ (keyword_cmds_re, Keyword, 'params' + context),
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
+ (r'([\w.-]+)', Name.Variable, 'params' + context),
+ (r'#', Comment, 'comment'),
+ ]
+
+ tokens = {
+ 'root': [
+ include('command'),
+ include('basic'),
+ include('data'),
+ (r'\}', Keyword), # HACK: somehow we miscounted our braces
+ ],
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-brace"),
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-bracket"),
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-paren"),
+ 'basic': [
+ (r'\(', Keyword, 'paren'),
+ (r'\[', Keyword, 'bracket'),
+ (r'\{', Keyword, 'brace'),
+ (r'"', String.Double, 'string'),
+ (r'(eq|ne|in|ni)\b', Operator.Word),
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
+ ],
+ 'data': [
(r'\s+', Whitespace),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'0[0-7]+', Number.Oct),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\$([\w.:-]+)', Name.Variable),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'0[0-7]+', Number.Oct),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\$([\w.:-]+)', Name.Variable),
(r'([\w.,@:-]+)', Text),
- ],
- 'params': [
- (r';', Keyword, '#pop'),
- (r'\n', Text, '#pop'),
- (r'(else|elseif|then)\b', Keyword),
- include('basic'),
- include('data'),
- ],
- 'params-in-brace': [
- (r'\}', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-paren': [
- (r'\)', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-bracket': [
- (r'\]', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'string': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
- (r'"', String.Double, '#pop')
- ],
- 'string-square': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
- (r'\]', String.Double, '#pop')
- ],
- 'brace': [
- (r'\}', Keyword, '#pop'),
- include('command-in-brace'),
- include('basic'),
- include('data'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('command-in-paren'),
- include('basic'),
- include('data'),
- ],
- 'bracket': [
- (r'\]', Keyword, '#pop'),
- include('command-in-bracket'),
- include('basic'),
- include('data'),
- ],
- 'comment': [
- (r'.*[^\\]\n', Comment, '#pop'),
- (r'.*\\\n', Comment),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(tcl)')
+ ],
+ 'params': [
+ (r';', Keyword, '#pop'),
+ (r'\n', Text, '#pop'),
+ (r'(else|elseif|then)\b', Keyword),
+ include('basic'),
+ include('data'),
+ ],
+ 'params-in-brace': [
+ (r'\}', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-paren': [
+ (r'\)', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-bracket': [
+ (r'\]', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'string': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
+ (r'"', String.Double, '#pop')
+ ],
+ 'string-square': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
+ (r'\]', String.Double, '#pop')
+ ],
+ 'brace': [
+ (r'\}', Keyword, '#pop'),
+ include('command-in-brace'),
+ include('basic'),
+ include('data'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('command-in-paren'),
+ include('basic'),
+ include('data'),
+ ],
+ 'bracket': [
+ (r'\]', Keyword, '#pop'),
+ include('command-in-bracket'),
+ include('basic'),
+ include('data'),
+ ],
+ 'comment': [
+ (r'.*[^\\]\n', Comment, '#pop'),
+ (r'.*\\\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'(tcl)')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/templates.py b/contrib/python/Pygments/py3/pygments/lexers/templates.py
index 548e14afe2..de1570be1d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/templates.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/templates.py
@@ -1,1809 +1,1809 @@
-"""
- pygments.lexers.templates
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various template engines' markup.
-
+"""
+ pygments.lexers.templates
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various template engines' markup.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexers.html import HtmlLexer, XmlLexer
-from pygments.lexers.javascript import JavascriptLexer, LassoLexer
-from pygments.lexers.css import CssLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-from pygments.lexers.jvm import JavaLexer, TeaLangLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
- include, using, this, default, combined
-from pygments.token import Error, Punctuation, Whitespace, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
-from pygments.util import html_doctype_matches, looks_like_xml
-
-__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
- 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
- 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
- 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
- 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
- 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
- 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
- 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
- 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
- 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
- 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
- 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
- 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
- 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
- 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
- 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
- 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
- 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
- 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer, LassoLexer
+from pygments.lexers.css import CssLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+from pygments.lexers.jvm import JavaLexer, TeaLangLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
+ include, using, this, default, combined
+from pygments.token import Error, Punctuation, Whitespace, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
+from pygments.util import html_doctype_matches, looks_like_xml
+
+__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
+ 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
+ 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
+ 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
+ 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
+ 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
+ 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
+ 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
+ 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
+ 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
+ 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
+ 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
+ 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
+ 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
+ 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
+ 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
+ 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
+ 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
+ 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
-
-
-class ErbLexer(Lexer):
- """
- Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
- lexer.
-
- Just highlights ruby code between the preprocessor directives, other data
- is left untouched by the lexer.
-
- All options are also forwarded to the `RubyLexer`.
- """
-
- name = 'ERB'
- aliases = ['erb']
- mimetypes = ['application/x-ruby-templating']
-
- _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
-
- def __init__(self, **options):
- from pygments.lexers.ruby import RubyLexer
- self.ruby_lexer = RubyLexer(**options)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- """
- Since ERB doesn't allow "<%" and other tags inside of ruby
- blocks we have to use a split approach here that fails for
- that too.
- """
- tokens = self._block_re.split(text)
- tokens.reverse()
- state = idx = 0
- try:
- while True:
- # text
- if state == 0:
- val = tokens.pop()
- yield idx, Other, val
- idx += len(val)
- state = 1
- # block starts
- elif state == 1:
- tag = tokens.pop()
- # literals
- if tag in ('<%%', '%%>'):
- yield idx, Other, tag
- idx += 3
- state = 0
- # comment
- elif tag == '<%#':
- yield idx, Comment.Preproc, tag
- val = tokens.pop()
- yield idx + 3, Comment, val
- idx += 3 + len(val)
- state = 2
- # blocks or output
- elif tag in ('<%', '<%=', '<%-'):
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- data = tokens.pop()
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(data):
- yield r_idx + idx, r_token, r_value
- idx += len(data)
- state = 2
- elif tag in ('%>', '-%>'):
- yield idx, Error, tag
- idx += len(tag)
- state = 0
- # % raw ruby statements
- else:
- yield idx, Comment.Preproc, tag[0]
- r_idx = 0
- for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
- yield idx + 1 + r_idx, r_token, r_value
- idx += len(tag)
- state = 0
- # block ends
- elif state == 2:
- tag = tokens.pop()
- if tag not in ('%>', '-%>'):
- yield idx, Other, tag
- else:
- yield idx, Comment.Preproc, tag
- idx += len(tag)
- state = 0
- except IndexError:
- return
-
- def analyse_text(text):
- if '<%' in text and '%>' in text:
- return 0.4
-
-
-class SmartyLexer(RegexLexer):
- """
- Generic `Smarty <http://smarty.php.net/>`_ template lexer.
-
- Just highlights smarty code between the preprocessor directives, other
- data is left untouched by the lexer.
- """
-
- name = 'Smarty'
- aliases = ['smarty']
- filenames = ['*.tpl']
- mimetypes = ['application/x-smarty']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'(\{)(\*.*?\*)(\})',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(\{php\})(.*?)(\{/php\})',
- bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
- Comment.Preproc)),
- (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
- bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
- (r'\{', Comment.Preproc, 'smarty')
- ],
- 'smarty': [
- (r'\s+', Text),
- (r'\{', Comment.Preproc, '#push'),
- (r'\}', Comment.Preproc, '#pop'),
- (r'#[a-zA-Z_]\w*#', Name.Variable),
- (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
- (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
- (r'(true|false|null)\b', Keyword.Constant),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+
+
+class ErbLexer(Lexer):
+ """
+ Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
+ lexer.
+
+ Just highlights ruby code between the preprocessor directives, other data
+ is left untouched by the lexer.
+
+ All options are also forwarded to the `RubyLexer`.
+ """
+
+ name = 'ERB'
+ aliases = ['erb']
+ mimetypes = ['application/x-ruby-templating']
+
+ _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
+
+ def __init__(self, **options):
+ from pygments.lexers.ruby import RubyLexer
+ self.ruby_lexer = RubyLexer(**options)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ """
+ Since ERB doesn't allow "<%" and other tags inside of ruby
+ blocks we have to use a split approach here that fails for
+ that too.
+ """
+ tokens = self._block_re.split(text)
+ tokens.reverse()
+ state = idx = 0
+ try:
+ while True:
+ # text
+ if state == 0:
+ val = tokens.pop()
+ yield idx, Other, val
+ idx += len(val)
+ state = 1
+ # block starts
+ elif state == 1:
+ tag = tokens.pop()
+ # literals
+ if tag in ('<%%', '%%>'):
+ yield idx, Other, tag
+ idx += 3
+ state = 0
+ # comment
+ elif tag == '<%#':
+ yield idx, Comment.Preproc, tag
+ val = tokens.pop()
+ yield idx + 3, Comment, val
+ idx += 3 + len(val)
+ state = 2
+ # blocks or output
+ elif tag in ('<%', '<%=', '<%-'):
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ data = tokens.pop()
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(data):
+ yield r_idx + idx, r_token, r_value
+ idx += len(data)
+ state = 2
+ elif tag in ('%>', '-%>'):
+ yield idx, Error, tag
+ idx += len(tag)
+ state = 0
+ # % raw ruby statements
+ else:
+ yield idx, Comment.Preproc, tag[0]
+ r_idx = 0
+ for r_idx, r_token, r_value in \
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
+ yield idx + 1 + r_idx, r_token, r_value
+ idx += len(tag)
+ state = 0
+ # block ends
+ elif state == 2:
+ tag = tokens.pop()
+ if tag not in ('%>', '-%>'):
+ yield idx, Other, tag
+ else:
+ yield idx, Comment.Preproc, tag
+ idx += len(tag)
+ state = 0
+ except IndexError:
+ return
+
+ def analyse_text(text):
+ if '<%' in text and '%>' in text:
+ return 0.4
+
+
+class SmartyLexer(RegexLexer):
+ """
+ Generic `Smarty <http://smarty.php.net/>`_ template lexer.
+
+ Just highlights smarty code between the preprocessor directives, other
+ data is left untouched by the lexer.
+ """
+
+ name = 'Smarty'
+ aliases = ['smarty']
+ filenames = ['*.tpl']
+ mimetypes = ['application/x-smarty']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'(\{)(\*.*?\*)(\})',
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+ (r'(\{php\})(.*?)(\{/php\})',
+ bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
+ Comment.Preproc)),
+ (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
+ bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
+ (r'\{', Comment.Preproc, 'smarty')
+ ],
+ 'smarty': [
+ (r'\s+', Text),
+ (r'\{', Comment.Preproc, '#push'),
+ (r'\}', Comment.Preproc, '#pop'),
+ (r'#[a-zA-Z_]\w*#', Name.Variable),
+ (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name.Attribute)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
+ (r'[a-zA-Z_]\w*', Name.Attribute)
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{include\s+file=.*?\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
- rv += 0.15
+ rv += 0.15
if re.search(r'\{\$.*?\}', text):
- rv += 0.01
- return rv
-
-
-class VelocityLexer(RegexLexer):
- """
- Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
-
- Just highlights velocity directives and variable references, other
- data is left untouched by the lexer.
- """
-
- name = 'Velocity'
- aliases = ['velocity']
- filenames = ['*.vm', '*.fhtml']
-
- flags = re.MULTILINE | re.DOTALL
-
- identifier = r'[a-zA-Z_]\w*'
-
- tokens = {
- 'root': [
- (r'[^{#$]+', Other),
- (r'(#)(\*.*?\*)(#)',
- bygroups(Comment.Preproc, Comment, Comment.Preproc)),
- (r'(##)(.*?$)',
- bygroups(Comment.Preproc, Comment)),
- (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
- 'directiveparams'),
- (r'(#\{?)(' + identifier + r')(\}|\b)',
- bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
+ rv += 0.01
+ return rv
+
+
+class VelocityLexer(RegexLexer):
+ """
+ Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
+
+ Just highlights velocity directives and variable references, other
+ data is left untouched by the lexer.
+ """
+
+ name = 'Velocity'
+ aliases = ['velocity']
+ filenames = ['*.vm', '*.fhtml']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ identifier = r'[a-zA-Z_]\w*'
+
+ tokens = {
+ 'root': [
+ (r'[^{#$]+', Other),
+ (r'(#)(\*.*?\*)(#)',
+ bygroups(Comment.Preproc, Comment, Comment.Preproc)),
+ (r'(##)(.*?$)',
+ bygroups(Comment.Preproc, Comment)),
+ (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
+ 'directiveparams'),
+ (r'(#\{?)(' + identifier + r')(\}|\b)',
+ bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
(r'\$!?\{?', Punctuation, 'variable')
- ],
- 'variable': [
- (identifier, Name.Variable),
- (r'\(', Punctuation, 'funcparams'),
- (r'(\.)(' + identifier + r')',
- bygroups(Punctuation, Name.Variable), '#push'),
- (r'\}', Punctuation, '#pop'),
- default('#pop')
- ],
- 'directiveparams': [
- (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
- Operator),
- (r'\[', Operator, 'rangeoperator'),
- (r'\b' + identifier + r'\b', Name.Function),
- include('funcparams')
- ],
- 'rangeoperator': [
- (r'\.\.', Operator),
- include('funcparams'),
- (r'\]', Operator, '#pop')
- ],
- 'funcparams': [
+ ],
+ 'variable': [
+ (identifier, Name.Variable),
+ (r'\(', Punctuation, 'funcparams'),
+ (r'(\.)(' + identifier + r')',
+ bygroups(Punctuation, Name.Variable), '#push'),
+ (r'\}', Punctuation, '#pop'),
+ default('#pop')
+ ],
+ 'directiveparams': [
+ (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
+ Operator),
+ (r'\[', Operator, 'rangeoperator'),
+ (r'\b' + identifier + r'\b', Name.Function),
+ include('funcparams')
+ ],
+ 'rangeoperator': [
+ (r'\.\.', Operator),
+ include('funcparams'),
+ (r'\]', Operator, '#pop')
+ ],
+ 'funcparams': [
(r'\$!?\{?', Punctuation, 'variable'),
- (r'\s+', Text),
+ (r'\s+', Text),
(r'[,:]', Punctuation),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- (r"\b[0-9]+\b", Number),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
+ (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ (r"\b[0-9]+\b", Number),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.25
+ rv += 0.25
if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.15
+ rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
- rv += 0.15
+ rv += 0.15
if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
- r'(\.\w+(\([^)]*\))?)*\}?', text):
- rv += 0.01
- return rv
-
-
-class VelocityHtmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- """
-
- name = 'HTML+Velocity'
- aliases = ['html+velocity']
- alias_filenames = ['*.html', '*.fhtml']
- mimetypes = ['text/html+velocity']
-
- def __init__(self, **options):
+ r'(\.\w+(\([^)]*\))?)*\}?', text):
+ rv += 0.01
+ return rv
+
+
+class VelocityHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `VelocityLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ """
+
+ name = 'HTML+Velocity'
+ aliases = ['html+velocity']
+ alias_filenames = ['*.html', '*.fhtml']
+ mimetypes = ['text/html+velocity']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, VelocityLexer, **options)
-
-
-class VelocityXmlLexer(DelegatingLexer):
- """
- Subclass of the `VelocityLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- """
-
- name = 'XML+Velocity'
- aliases = ['xml+velocity']
- alias_filenames = ['*.xml', '*.vm']
- mimetypes = ['application/xml+velocity']
-
- def __init__(self, **options):
+
+
+class VelocityXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `VelocityLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ """
+
+ name = 'XML+Velocity'
+ aliases = ['xml+velocity']
+ alias_filenames = ['*.xml', '*.vm']
+ mimetypes = ['application/xml+velocity']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, VelocityLexer, **options)
-
- def analyse_text(text):
- rv = VelocityLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class DjangoLexer(RegexLexer):
- """
- Generic `django <http://www.djangoproject.com/documentation/templates/>`_
+
+ def analyse_text(text):
+ rv = VelocityLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class DjangoLexer(RegexLexer):
+ """
+ Generic `django <http://www.djangoproject.com/documentation/templates/>`_
and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
-
- It just highlights django/jinja code between the preprocessor directives,
- other data is left untouched by the lexer.
- """
-
- name = 'Django/Jinja'
- aliases = ['django', 'jinja']
- mimetypes = ['application/x-django-templating', 'application/x-jinja']
-
- flags = re.M | re.S
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # jinja/django comments
+
+ It just highlights django/jinja code between the preprocessor directives,
+ other data is left untouched by the lexer.
+ """
+
+ name = 'Django/Jinja'
+ aliases = ['django', 'jinja']
+ mimetypes = ['application/x-django-templating', 'application/x-jinja']
+
+ flags = re.M | re.S
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ # jinja/django comments
(r'\{#.*?#\}', Comment),
- # django comments
- (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Comment, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # raw jinja blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Text, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'block'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'block'),
- (r'\{', Other)
- ],
- 'varnames': [
- (r'(\|)(\s*)([a-zA-Z_]\w*)',
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
- (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
- r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
- Keyword),
- (r'(loop|block|super|forloop)\b', Name.Builtin),
- (r'[a-zA-Z_][\w-]*', Name.Variable),
- (r'\.\w+', Name.Variable),
+ # django comments
+ (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Comment, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # raw jinja blocks
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Text, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # filter blocks
+ (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
+ 'block'),
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'block'),
+ (r'\{', Other)
+ ],
+ 'varnames': [
+ (r'(\|)(\s*)([a-zA-Z_]\w*)',
+ bygroups(Operator, Text, Name.Function)),
+ (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
+ (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
+ r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
+ Keyword),
+ (r'(loop|block|super|forloop)\b', Name.Builtin),
+ (r'[a-zA-Z_][\w-]*', Name.Variable),
+ (r'\.\w+', Name.Variable),
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'block': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation)
- ]
- }
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'\{%\s*(block|extends)', text) is not None:
- rv += 0.4
- if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
- rv += 0.1
- if re.search(r'\{\{.*?\}\}', text) is not None:
- rv += 0.1
- return rv
-
-
-class MyghtyLexer(RegexLexer):
- """
- Generic `myghty templates`_ lexer. Code that isn't Myghty
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.6
-
- .. _myghty templates: http://www.myghty.org/
- """
-
- name = 'Myghty'
- aliases = ['myghty']
- filenames = ['*.myt', 'autodelegate']
- mimetypes = ['application/x-myghty']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ],
+ 'var': [
+ (r'\s+', Text),
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames')
+ ],
+ 'block': [
+ (r'\s+', Text),
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames'),
+ (r'.', Punctuation)
+ ]
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ if re.search(r'\{%\s*(block|extends)', text) is not None:
+ rv += 0.4
+ if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
+ rv += 0.1
+ if re.search(r'\{\{.*?\}\}', text) is not None:
+ rv += 0.1
+ return rv
+
+
+class MyghtyLexer(RegexLexer):
+ """
+ Generic `myghty templates`_ lexer. Code that isn't Myghty
+ markup is yielded as `Token.Other`.
+
+ .. versionadded:: 0.6
+
+ .. _myghty templates: http://www.myghty.org/
+ """
+
+ name = 'Myghty'
+ aliases = ['myghty']
+ filenames = ['*.myt', 'autodelegate']
+ mimetypes = ['application/x-myghty']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Text, Name.Function, Name.Tag,
- using(this), Name.Tag)),
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
+ using(this), Name.Tag)),
(r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Name.Function, Name.Tag,
- using(PythonLexer), Name.Tag)),
- (r'(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
+ bygroups(Name.Tag, Name.Function, Name.Tag,
+ using(PythonLexer), Name.Tag)),
+ (r'(<&[^|])(.*?)(,.*?)?(&>)',
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
- (r'</&>', Name.Tag),
+ bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
+ (r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PythonLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(Other, Operator)),
- ]
- }
-
-
-class MyghtyHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'HTML+Myghty'
- aliases = ['html+myghty']
- mimetypes = ['text/html+myghty']
-
- def __init__(self, **options):
+ bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+ bygroups(Name.Tag, using(PythonLexer), Other)),
+ (r"""(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=[%#]) | # an eval or comment line
+ (?=</?[%&]) | # a substitution or block or
+ # call start or end
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )""", bygroups(Other, Operator)),
+ ]
+ }
+
+
+class MyghtyHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'HTML+Myghty'
+ aliases = ['html+myghty']
+ mimetypes = ['text/html+myghty']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, MyghtyLexer, **options)
-
-
-class MyghtyXmlLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'XML+Myghty'
- aliases = ['xml+myghty']
- mimetypes = ['application/xml+myghty']
-
- def __init__(self, **options):
+
+
+class MyghtyXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'XML+Myghty'
+ aliases = ['xml+myghty']
+ mimetypes = ['application/xml+myghty']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, MyghtyLexer, **options)
-
-
-class MyghtyJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'JavaScript+Myghty'
+
+
+class MyghtyJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'JavaScript+Myghty'
aliases = ['javascript+myghty', 'js+myghty']
- mimetypes = ['application/x-javascript+myghty',
- 'text/x-javascript+myghty',
- 'text/javascript+mygthy']
-
- def __init__(self, **options):
+ mimetypes = ['application/x-javascript+myghty',
+ 'text/x-javascript+myghty',
+ 'text/javascript+mygthy']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, MyghtyLexer, **options)
-
-
-class MyghtyCssLexer(DelegatingLexer):
- """
- Subclass of the `MyghtyLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.6
- """
-
- name = 'CSS+Myghty'
- aliases = ['css+myghty']
- mimetypes = ['text/css+myghty']
-
- def __init__(self, **options):
+
+
+class MyghtyCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MyghtyLexer` that highlights unlexed data
+ with the `CssLexer`.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'CSS+Myghty'
+ aliases = ['css+myghty']
+ mimetypes = ['text/css+myghty']
+
+ def __init__(self, **options):
super().__init__(CssLexer, MyghtyLexer, **options)
-
-
-class MasonLexer(RegexLexer):
- """
- Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
- Mason markup is HTML.
-
- .. _mason templates: http://www.masonhq.com/
-
- .. versionadded:: 1.4
- """
- name = 'Mason'
- aliases = ['mason']
- filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
- mimetypes = ['application/x-mason']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
+
+
+class MasonLexer(RegexLexer):
+ """
+ Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
+ Mason markup is HTML.
+
+ .. _mason templates: http://www.masonhq.com/
+
+ .. versionadded:: 1.4
+ """
+ name = 'Mason'
+ aliases = ['mason']
+ filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
+ mimetypes = ['application/x-mason']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
(r'(?s)(<%doc>)(.*?)(</%doc>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
- bygroups(Name.Tag, Text, Name.Function, Name.Tag,
- using(this), Name.Tag)),
+ bygroups(Name.Tag, Text, Name.Function, Name.Tag,
+ using(this), Name.Tag)),
(r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
(r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
- bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
- (r'</&>', Name.Tag),
+ bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
+ (r'</&>', Name.Tag),
(r'(?s)(<%!?)(.*?)(%>)',
- bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
- (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
- (r'(?<=^)(%)([^\n]*)(\n|\Z)',
- bygroups(Name.Tag, using(PerlLexer), Other)),
- (r"""(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=[%#]) | # an eval or comment line
- (?=</?[%&]) | # a substitution or block or
- # call start or end
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )""", bygroups(using(HtmlLexer), Operator)),
- ]
- }
-
- def analyse_text(text):
- result = 0.0
+ bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
+ (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
+ (r'(?<=^)(%)([^\n]*)(\n|\Z)',
+ bygroups(Name.Tag, using(PerlLexer), Other)),
+ (r"""(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=[%#]) | # an eval or comment line
+ (?=</?[%&]) | # a substitution or block or
+ # call start or end
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )""", bygroups(using(HtmlLexer), Operator)),
+ ]
+ }
+
+ def analyse_text(text):
+ result = 0.0
if re.search(r'</%(class|doc|init)>', text) is not None:
- result = 1.0
- elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
- result = 0.11
- return result
-
-
-class MakoLexer(RegexLexer):
- """
- Generic `mako templates`_ lexer. Code that isn't Mako
- markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.7
-
- .. _mako templates: http://www.makotemplates.org/
- """
-
- name = 'Mako'
- aliases = ['mako']
- filenames = ['*.mao']
- mimetypes = ['application/x-mako']
-
- tokens = {
- 'root': [
- (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
+ result = 1.0
+ elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
+ result = 0.11
+ return result
+
+
+class MakoLexer(RegexLexer):
+ """
+ Generic `mako templates`_ lexer. Code that isn't Mako
+ markup is yielded as `Token.Other`.
+
+ .. versionadded:: 0.7
+
+ .. _mako templates: http://www.makotemplates.org/
+ """
+
+ name = 'Mako'
+ aliases = ['mako']
+ filenames = ['*.mao']
+ mimetypes = ['application/x-mako']
+
+ tokens = {
+ 'root': [
+ (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
- (r'(\s*)(%)([^\n]*)(\n|\Z)',
+ (r'(\s*)(%)([^\n]*)(\n|\Z)',
bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
- (r'(\s*)(##[^\n]*)(\n|\Z)',
+ (r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
(r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
- (r'(<%)([\w.:]+)',
- bygroups(Comment.Preproc, Name.Builtin), 'tag'),
- (r'(</%)([\w.:]+)(>)',
- bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
- (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
+ (r'(<%)([\w.:]+)',
+ bygroups(Comment.Preproc, Name.Builtin), 'tag'),
+ (r'(</%)([\w.:]+)(>)',
+ bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
+ (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
(r'(?s)(<%(?:!?))(.*?)(%>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(\$\{)(.*?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?<=\n)(?=%|\#\#) | # an eval or comment line
- (?=\#\*) | # multiline comment
- (?=</?%) | # a python block
- # call start or end
- (?=\$\{) | # a substitution
- (?<=\n)(?=\s*%) |
- # - don't consume
- (\\\n) | # an escaped newline
- \Z # end of string
- )
- ''', bygroups(Other, Operator)),
- (r'\s+', Text),
- ],
- 'ondeftags': [
- (r'<%', Comment.Preproc),
- (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
- include('tag'),
- ],
- 'tag': [
- (r'((?:\w+)\s*=)(\s*)(".*?")',
- bygroups(Name.Attribute, Text, String)),
- (r'/?\s*>', Comment.Preproc, '#pop'),
- (r'\s+', Text),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class MakoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `HtmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'HTML+Mako'
- aliases = ['html+mako']
- mimetypes = ['text/html+mako']
-
- def __init__(self, **options):
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'''(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?<=\n)(?=%|\#\#) | # an eval or comment line
+ (?=\#\*) | # multiline comment
+ (?=</?%) | # a python block
+ # call start or end
+ (?=\$\{) | # a substitution
+ (?<=\n)(?=\s*%) |
+ # - don't consume
+ (\\\n) | # an escaped newline
+ \Z # end of string
+ )
+ ''', bygroups(Other, Operator)),
+ (r'\s+', Text),
+ ],
+ 'ondeftags': [
+ (r'<%', Comment.Preproc),
+ (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
+ include('tag'),
+ ],
+ 'tag': [
+ (r'((?:\w+)\s*=)(\s*)(".*?")',
+ bygroups(Name.Attribute, Text, String)),
+ (r'/?\s*>', Comment.Preproc, '#pop'),
+ (r'\s+', Text),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+
+class MakoHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'HTML+Mako'
+ aliases = ['html+mako']
+ mimetypes = ['text/html+mako']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, MakoLexer, **options)
-
-
-class MakoXmlLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `XmlLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'XML+Mako'
- aliases = ['xml+mako']
- mimetypes = ['application/xml+mako']
-
- def __init__(self, **options):
+
+
+class MakoXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `XmlLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'XML+Mako'
+ aliases = ['xml+mako']
+ mimetypes = ['application/xml+mako']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, MakoLexer, **options)
-
-
-class MakoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `JavascriptLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'JavaScript+Mako'
+
+
+class MakoJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'JavaScript+Mako'
aliases = ['javascript+mako', 'js+mako']
- mimetypes = ['application/x-javascript+mako',
- 'text/x-javascript+mako',
- 'text/javascript+mako']
-
- def __init__(self, **options):
+ mimetypes = ['application/x-javascript+mako',
+ 'text/x-javascript+mako',
+ 'text/javascript+mako']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, MakoLexer, **options)
-
-
-class MakoCssLexer(DelegatingLexer):
- """
- Subclass of the `MakoLexer` that highlights unlexed data
- with the `CssLexer`.
-
- .. versionadded:: 0.7
- """
-
- name = 'CSS+Mako'
- aliases = ['css+mako']
- mimetypes = ['text/css+mako']
-
- def __init__(self, **options):
+
+
+class MakoCssLexer(DelegatingLexer):
+ """
+ Subclass of the `MakoLexer` that highlights unlexed data
+ with the `CssLexer`.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'CSS+Mako'
+ aliases = ['css+mako']
+ mimetypes = ['text/css+mako']
+
+ def __init__(self, **options):
super().__init__(CssLexer, MakoLexer, **options)
-
-
-# Genshi and Cheetah lexers courtesy of Matt Good.
-
-class CheetahPythonLexer(Lexer):
- """
- Lexer for handling Cheetah's special $ tokens in Python syntax.
- """
-
- def get_tokens_unprocessed(self, text):
- pylexer = PythonLexer(**self.options)
- for pos, type_, value in pylexer.get_tokens_unprocessed(text):
- if type_ == Token.Error and value == '$':
- type_ = Comment.Preproc
- yield pos, type_, value
-
-
-class CheetahLexer(RegexLexer):
- """
- Generic `cheetah templates`_ lexer. Code that isn't Cheetah
- markup is yielded as `Token.Other`. This also works for
- `spitfire templates`_ which use the same syntax.
-
- .. _cheetah templates: http://www.cheetahtemplate.org/
- .. _spitfire templates: http://code.google.com/p/spitfire/
- """
-
- name = 'Cheetah'
- aliases = ['cheetah', 'spitfire']
- filenames = ['*.tmpl', '*.spt']
- mimetypes = ['application/x-cheetah', 'application/x-spitfire']
-
- tokens = {
- 'root': [
- (r'(##[^\n]*)$',
- (bygroups(Comment))),
- (r'#[*](.|\n)*?[*]#', Comment),
- (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
- (r'#slurp$', Comment.Preproc),
- (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
- (bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc))),
- # TODO support other Python syntax like $foo['bar']
- (r'(\$)([a-zA-Z_][\w.]*\w)',
- bygroups(Comment.Preproc, using(CheetahPythonLexer))),
+
+
+# Genshi and Cheetah lexers courtesy of Matt Good.
+
+class CheetahPythonLexer(Lexer):
+ """
+ Lexer for handling Cheetah's special $ tokens in Python syntax.
+ """
+
+ def get_tokens_unprocessed(self, text):
+ pylexer = PythonLexer(**self.options)
+ for pos, type_, value in pylexer.get_tokens_unprocessed(text):
+ if type_ == Token.Error and value == '$':
+ type_ = Comment.Preproc
+ yield pos, type_, value
+
+
+class CheetahLexer(RegexLexer):
+ """
+ Generic `cheetah templates`_ lexer. Code that isn't Cheetah
+ markup is yielded as `Token.Other`. This also works for
+ `spitfire templates`_ which use the same syntax.
+
+ .. _cheetah templates: http://www.cheetahtemplate.org/
+ .. _spitfire templates: http://code.google.com/p/spitfire/
+ """
+
+ name = 'Cheetah'
+ aliases = ['cheetah', 'spitfire']
+ filenames = ['*.tmpl', '*.spt']
+ mimetypes = ['application/x-cheetah', 'application/x-spitfire']
+
+ tokens = {
+ 'root': [
+ (r'(##[^\n]*)$',
+ (bygroups(Comment))),
+ (r'#[*](.|\n)*?[*]#', Comment),
+ (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
+ (r'#slurp$', Comment.Preproc),
+ (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
+ (bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc))),
+ # TODO support other Python syntax like $foo['bar']
+ (r'(\$)([a-zA-Z_][\w.]*\w)',
+ bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(?s)(\$\{!?)(.*?)(\})',
- bygroups(Comment.Preproc, using(CheetahPythonLexer),
- Comment.Preproc)),
- (r'''(?sx)
- (.+?) # anything, followed by:
- (?:
- (?=\#[#a-zA-Z]*) | # an eval comment
- (?=\$[a-zA-Z_{]) | # a substitution
- \Z # end of string
- )
- ''', Other),
- (r'\s+', Text),
- ],
- }
-
-
-class CheetahHtmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `HtmlLexer`.
- """
-
- name = 'HTML+Cheetah'
- aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
- mimetypes = ['text/html+cheetah', 'text/html+spitfire']
-
- def __init__(self, **options):
+ bygroups(Comment.Preproc, using(CheetahPythonLexer),
+ Comment.Preproc)),
+ (r'''(?sx)
+ (.+?) # anything, followed by:
+ (?:
+ (?=\#[#a-zA-Z]*) | # an eval comment
+ (?=\$[a-zA-Z_{]) | # a substitution
+ \Z # end of string
+ )
+ ''', Other),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class CheetahHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `HtmlLexer`.
+ """
+
+ name = 'HTML+Cheetah'
+ aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
+ mimetypes = ['text/html+cheetah', 'text/html+spitfire']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, CheetahLexer, **options)
-
-
-class CheetahXmlLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `XmlLexer`.
- """
-
- name = 'XML+Cheetah'
- aliases = ['xml+cheetah', 'xml+spitfire']
- mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
-
- def __init__(self, **options):
+
+
+class CheetahXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `XmlLexer`.
+ """
+
+ name = 'XML+Cheetah'
+ aliases = ['xml+cheetah', 'xml+spitfire']
+ mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, CheetahLexer, **options)
-
-
-class CheetahJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `CheetahLexer` that highlights unlexed data
- with the `JavascriptLexer`.
- """
-
- name = 'JavaScript+Cheetah'
+
+
+class CheetahJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `CheetahLexer` that highlights unlexed data
+ with the `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Cheetah'
aliases = ['javascript+cheetah', 'js+cheetah',
'javascript+spitfire', 'js+spitfire']
- mimetypes = ['application/x-javascript+cheetah',
- 'text/x-javascript+cheetah',
- 'text/javascript+cheetah',
- 'application/x-javascript+spitfire',
- 'text/x-javascript+spitfire',
- 'text/javascript+spitfire']
-
- def __init__(self, **options):
+ mimetypes = ['application/x-javascript+cheetah',
+ 'text/x-javascript+cheetah',
+ 'text/javascript+cheetah',
+ 'application/x-javascript+spitfire',
+ 'text/x-javascript+spitfire',
+ 'text/javascript+spitfire']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, CheetahLexer, **options)
-
-
-class GenshiTextLexer(RegexLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
- templates.
- """
-
- name = 'Genshi Text'
- aliases = ['genshitext']
- mimetypes = ['application/x-genshi-text', 'text/x-genshi']
-
- tokens = {
- 'root': [
- (r'[^#$\s]+', Other),
- (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
- (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
- include('variable'),
- (r'[#$\s]', Other),
- ],
- 'directive': [
- (r'\n', Text, '#pop'),
- (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
- (r'(choose|when|with)([^\S\n]+)(.*)',
- bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
- (r'(choose|otherwise)\b', Keyword, '#pop'),
- (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
- Name.Variable),
- ]
- }
-
-
-class GenshiMarkupLexer(RegexLexer):
- """
- Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
- `GenshiLexer`.
- """
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^<$]+', Other),
- (r'(<\?python)(.*?)(\?>)',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- # yield style and script blocks as Other
- (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
- (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
+
+
+class GenshiTextLexer(RegexLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
+ templates.
+ """
+
+ name = 'Genshi Text'
+ aliases = ['genshitext']
+ mimetypes = ['application/x-genshi-text', 'text/x-genshi']
+
+ tokens = {
+ 'root': [
+ (r'[^#$\s]+', Other),
+ (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
+ (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
+ include('variable'),
+ (r'[#$\s]', Other),
+ ],
+ 'directive': [
+ (r'\n', Text, '#pop'),
+ (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
+ (r'(choose|when|with)([^\S\n]+)(.*)',
+ bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
+ (r'(choose|otherwise)\b', Keyword, '#pop'),
+ (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
+ ],
+ 'variable': [
+ (r'(?<!\$)(\$\{)(.+?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
+ Name.Variable),
+ ]
+ }
+
+
+class GenshiMarkupLexer(RegexLexer):
+ """
+ Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
+ `GenshiLexer`.
+ """
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^<$]+', Other),
+ (r'(<\?python)(.*?)(\?>)',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ # yield style and script blocks as Other
+ (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
+ (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
- include('variable'),
- (r'[<$]', Other),
- ],
- 'pytag': [
- (r'\s+', Text),
- (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'pyattr': [
- ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
- ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
- (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('"', String, 'attr-dstring'),
- ("'", String, 'attr-sstring'),
- (r'[^\s>]*', String, '#pop')
- ],
- 'attr-dstring': [
- ('"', String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'attr-sstring': [
- ("'", String, '#pop'),
- include('strings'),
- ("'", String)
- ],
- 'strings': [
- ('[^"\'$]+', String),
- include('variable')
- ],
- 'variable': [
- (r'(?<!\$)(\$\{)(.+?)(\})',
- bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
- (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
- Name.Variable),
- ]
- }
-
-
-class HtmlGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid HTML templates.
- """
-
- name = 'HTML+Genshi'
- aliases = ['html+genshi', 'html+kid']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+genshi']
-
- def __init__(self, **options):
+ include('variable'),
+ (r'[<$]', Other),
+ ],
+ 'pytag': [
+ (r'\s+', Text),
+ (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'pyattr': [
+ ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
+ ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
+ (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('"', String, 'attr-dstring'),
+ ("'", String, 'attr-sstring'),
+ (r'[^\s>]*', String, '#pop')
+ ],
+ 'attr-dstring': [
+ ('"', String, '#pop'),
+ include('strings'),
+ ("'", String)
+ ],
+ 'attr-sstring': [
+ ("'", String, '#pop'),
+ include('strings'),
+ ("'", String)
+ ],
+ 'strings': [
+ ('[^"\'$]+', String),
+ include('variable')
+ ],
+ 'variable': [
+ (r'(?<!\$)(\$\{)(.+?)(\})',
+ bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
+ (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
+ Name.Variable),
+ ]
+ }
+
+
+class HtmlGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ `kid <http://kid-templating.org/>`_ kid HTML templates.
+ """
+
+ name = 'HTML+Genshi'
+ aliases = ['html+genshi', 'html+kid']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+genshi']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
+ rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + HtmlLexer.analyse_text(text) - 0.01
-
-
-class GenshiLexer(DelegatingLexer):
- """
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
- `kid <http://kid-templating.org/>`_ kid XML templates.
- """
-
- name = 'Genshi'
- aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
- filenames = ['*.kid']
- alias_filenames = ['*.xml']
- mimetypes = ['application/x-genshi', 'application/x-kid']
-
- def __init__(self, **options):
+ rv += 0.2
+ return rv + HtmlLexer.analyse_text(text) - 0.01
+
+
+class GenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ `kid <http://kid-templating.org/>`_ kid XML templates.
+ """
+
+ name = 'Genshi'
+ aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
+ filenames = ['*.kid']
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/x-genshi', 'application/x-kid']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, GenshiMarkupLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'\$\{.*?\}', text) is not None:
- rv += 0.2
+ rv += 0.2
if re.search(r'py:(.*?)=["\']', text) is not None:
- rv += 0.2
- return rv + XmlLexer.analyse_text(text) - 0.01
-
-
-class JavascriptGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights javascript code in genshi text templates.
- """
-
- name = 'JavaScript+Genshi Text'
- aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
- 'javascript+genshi']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+genshi',
- 'text/x-javascript+genshi',
- 'text/javascript+genshi']
-
- def __init__(self, **options):
+ rv += 0.2
+ return rv + XmlLexer.analyse_text(text) - 0.01
+
+
+class JavascriptGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights javascript code in genshi text templates.
+ """
+
+ name = 'JavaScript+Genshi Text'
+ aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
+ 'javascript+genshi']
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+genshi',
+ 'text/x-javascript+genshi',
+ 'text/javascript+genshi']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, GenshiTextLexer, **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class CssGenshiLexer(DelegatingLexer):
- """
- A lexer that highlights CSS definitions in genshi text templates.
- """
-
- name = 'CSS+Genshi Text'
- aliases = ['css+genshitext', 'css+genshi']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+genshi']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return GenshiLexer.analyse_text(text) - 0.05
+
+
+class CssGenshiLexer(DelegatingLexer):
+ """
+ A lexer that highlights CSS definitions in genshi text templates.
+ """
+
+ name = 'CSS+Genshi Text'
+ aliases = ['css+genshitext', 'css+genshi']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+genshi']
+
+ def __init__(self, **options):
super().__init__(CssLexer, GenshiTextLexer, **options)
-
- def analyse_text(text):
- return GenshiLexer.analyse_text(text) - 0.05
-
-
-class RhtmlLexer(DelegatingLexer):
- """
- Subclass of the ERB lexer that highlights the unlexed data with the
- html lexer.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'RHTML'
- aliases = ['rhtml', 'html+erb', 'html+ruby']
- filenames = ['*.rhtml']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+ruby']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return GenshiLexer.analyse_text(text) - 0.05
+
+
+class RhtmlLexer(DelegatingLexer):
+ """
+ Subclass of the ERB lexer that highlights the unlexed data with the
+ html lexer.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'RHTML'
+ aliases = ['rhtml', 'html+erb', 'html+ruby']
+ filenames = ['*.rhtml']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+ruby']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- # one more than the XmlErbLexer returns
- rv += 0.5
- return rv
-
-
-class XmlErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights data outside preprocessor
- directives with the `XmlLexer`.
- """
-
- name = 'XML+Ruby'
+
+ def analyse_text(text):
+ rv = ErbLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ # one more than the XmlErbLexer returns
+ rv += 0.5
+ return rv
+
+
+class XmlErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights data outside preprocessor
+ directives with the `XmlLexer`.
+ """
+
+ name = 'XML+Ruby'
aliases = ['xml+ruby', 'xml+erb']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+ruby']
-
- def __init__(self, **options):
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/xml+ruby']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, ErbLexer, **options)
-
- def analyse_text(text):
- rv = ErbLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
- """
-
- name = 'CSS+Ruby'
+
+ def analyse_text(text):
+ rv = ErbLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
+ """
+
+ name = 'CSS+Ruby'
aliases = ['css+ruby', 'css+erb']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+ruby']
-
- def __init__(self, **options):
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+ruby']
+
+ def __init__(self, **options):
super().__init__(CssLexer, ErbLexer, **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class JavascriptErbLexer(DelegatingLexer):
- """
- Subclass of `ErbLexer` which highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Ruby'
+
+ def analyse_text(text):
+ return ErbLexer.analyse_text(text) - 0.05
+
+
+class JavascriptErbLexer(DelegatingLexer):
+ """
+ Subclass of `ErbLexer` which highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Ruby'
aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+ruby',
- 'text/x-javascript+ruby',
- 'text/javascript+ruby']
-
- def __init__(self, **options):
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+ruby',
+ 'text/x-javascript+ruby',
+ 'text/javascript+ruby']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, ErbLexer, **options)
-
- def analyse_text(text):
- return ErbLexer.analyse_text(text) - 0.05
-
-
-class HtmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+PHP'
- aliases = ['html+php']
- filenames = ['*.phtml']
- alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
- '*.php[345]']
- mimetypes = ['application/x-php',
- 'application/x-httpd-php', 'application/x-httpd-php3',
- 'application/x-httpd-php4', 'application/x-httpd-php5']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return ErbLexer.analyse_text(text) - 0.05
+
+
+class HtmlPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+PHP'
+ aliases = ['html+php']
+ filenames = ['*.phtml']
+ alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
+ '*.php[345]']
+ mimetypes = ['application/x-php',
+ 'application/x-httpd-php', 'application/x-httpd-php3',
+ 'application/x-httpd-php4', 'application/x-httpd-php5']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
- """
-
- name = 'XML+PHP'
- aliases = ['xml+php']
- alias_filenames = ['*.xml', '*.php', '*.php[345]']
- mimetypes = ['application/xml+php']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = PhpLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
+ """
+
+ name = 'XML+PHP'
+ aliases = ['xml+php']
+ alias_filenames = ['*.xml', '*.php', '*.php[345]']
+ mimetypes = ['application/xml+php']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, PhpLexer, **options)
-
- def analyse_text(text):
- rv = PhpLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
- """
-
- name = 'CSS+PHP'
- aliases = ['css+php']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+php']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = PhpLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
+ """
+
+ name = 'CSS+PHP'
+ aliases = ['css+php']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+php']
+
+ def __init__(self, **options):
super().__init__(CssLexer, PhpLexer, **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text) - 0.05
-
-
-class JavascriptPhpLexer(DelegatingLexer):
- """
- Subclass of `PhpLexer` which highlights unmatched data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+PHP'
+
+ def analyse_text(text):
+ return PhpLexer.analyse_text(text) - 0.05
+
+
+class JavascriptPhpLexer(DelegatingLexer):
+ """
+ Subclass of `PhpLexer` which highlights unmatched data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+PHP'
aliases = ['javascript+php', 'js+php']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+php',
- 'text/x-javascript+php',
- 'text/javascript+php']
-
- def __init__(self, **options):
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+php',
+ 'text/x-javascript+php',
+ 'text/javascript+php']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, PhpLexer, **options)
-
- def analyse_text(text):
- return PhpLexer.analyse_text(text)
-
-
-class HtmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Smarty'
- aliases = ['html+smarty']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
- mimetypes = ['text/html+smarty']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return PhpLexer.analyse_text(text)
+
+
+class HtmlSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+Smarty'
+ aliases = ['html+smarty']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
+ mimetypes = ['text/html+smarty']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Smarty'
- aliases = ['xml+smarty']
- alias_filenames = ['*.xml', '*.tpl']
- mimetypes = ['application/xml+smarty']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = SmartyLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `XmlLexer`.
+ """
+
+ name = 'XML+Smarty'
+ aliases = ['xml+smarty']
+ alias_filenames = ['*.xml', '*.tpl']
+ mimetypes = ['application/xml+smarty']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- rv = SmartyLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Smarty'
- aliases = ['css+smarty']
- alias_filenames = ['*.css', '*.tpl']
- mimetypes = ['text/css+smarty']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = SmartyLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `CssLexer`.
+ """
+
+ name = 'CSS+Smarty'
+ aliases = ['css+smarty']
+ alias_filenames = ['*.css', '*.tpl']
+ mimetypes = ['text/css+smarty']
+
+ def __init__(self, **options):
super().__init__(CssLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class JavascriptSmartyLexer(DelegatingLexer):
- """
- Subclass of the `SmartyLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Smarty'
+
+ def analyse_text(text):
+ return SmartyLexer.analyse_text(text) - 0.05
+
+
+class JavascriptSmartyLexer(DelegatingLexer):
+ """
+ Subclass of the `SmartyLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Smarty'
aliases = ['javascript+smarty', 'js+smarty']
- alias_filenames = ['*.js', '*.tpl']
- mimetypes = ['application/x-javascript+smarty',
- 'text/x-javascript+smarty',
- 'text/javascript+smarty']
-
- def __init__(self, **options):
+ alias_filenames = ['*.js', '*.tpl']
+ mimetypes = ['application/x-javascript+smarty',
+ 'text/x-javascript+smarty',
+ 'text/javascript+smarty']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, SmartyLexer, **options)
-
- def analyse_text(text):
- return SmartyLexer.analyse_text(text) - 0.05
-
-
-class HtmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- Nested Javascript and CSS is highlighted too.
- """
-
- name = 'HTML+Django/Jinja'
- aliases = ['html+django', 'html+jinja', 'htmldjango']
- alias_filenames = ['*.html', '*.htm', '*.xhtml']
- mimetypes = ['text/html+django', 'text/html+jinja']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return SmartyLexer.analyse_text(text) - 0.05
+
+
+class HtmlDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ Nested Javascript and CSS is highlighted too.
+ """
+
+ name = 'HTML+Django/Jinja'
+ aliases = ['html+django', 'html+jinja', 'htmldjango']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml']
+ mimetypes = ['text/html+django', 'text/html+jinja']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text):
- rv += 0.5
- return rv
-
-
-class XmlDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `XmlLexer`.
- """
-
- name = 'XML+Django/Jinja'
- aliases = ['xml+django', 'xml+jinja']
- alias_filenames = ['*.xml']
- mimetypes = ['application/xml+django', 'application/xml+jinja']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = DjangoLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text):
+ rv += 0.5
+ return rv
+
+
+class XmlDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `XmlLexer`.
+ """
+
+ name = 'XML+Django/Jinja'
+ aliases = ['xml+django', 'xml+jinja']
+ alias_filenames = ['*.xml']
+ mimetypes = ['application/xml+django', 'application/xml+jinja']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- rv = DjangoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class CssDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `CssLexer`.
- """
-
- name = 'CSS+Django/Jinja'
- aliases = ['css+django', 'css+jinja']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+django', 'text/css+jinja']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = DjangoLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class CssDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `CssLexer`.
+ """
+
+ name = 'CSS+Django/Jinja'
+ aliases = ['css+django', 'css+jinja']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+django', 'text/css+jinja']
+
+ def __init__(self, **options):
super().__init__(CssLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JavascriptDjangoLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `JavascriptLexer`.
- """
-
- name = 'JavaScript+Django/Jinja'
+
+ def analyse_text(text):
+ return DjangoLexer.analyse_text(text) - 0.05
+
+
+class JavascriptDjangoLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `JavascriptLexer`.
+ """
+
+ name = 'JavaScript+Django/Jinja'
aliases = ['javascript+django', 'js+django',
'javascript+jinja', 'js+jinja']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+django',
- 'application/x-javascript+jinja',
- 'text/x-javascript+django',
- 'text/x-javascript+jinja',
- 'text/javascript+django',
- 'text/javascript+jinja']
-
- def __init__(self, **options):
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+django',
+ 'application/x-javascript+jinja',
+ 'text/x-javascript+django',
+ 'text/x-javascript+jinja',
+ 'text/javascript+django',
+ 'text/javascript+jinja']
+
+ def __init__(self, **options):
super().__init__(JavascriptLexer, DjangoLexer, **options)
-
- def analyse_text(text):
- return DjangoLexer.analyse_text(text) - 0.05
-
-
-class JspRootLexer(RegexLexer):
- """
- Base for the `JspLexer`. Yields `Token.Other` for area outside of
- JSP tags.
-
- .. versionadded:: 0.7
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- # FIXME: I want to make these keywords but still parse attributes.
- (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
- Keyword),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
- ],
- }
-
-
-class JspLexer(DelegatingLexer):
- """
- Lexer for Java Server Pages.
-
- .. versionadded:: 0.7
- """
- name = 'Java Server Page'
- aliases = ['jsp']
- filenames = ['*.jsp']
- mimetypes = ['application/x-jsp']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ return DjangoLexer.analyse_text(text) - 0.05
+
+
+class JspRootLexer(RegexLexer):
+ """
+ Base for the `JspLexer`. Yields `Token.Other` for area outside of
+ JSP tags.
+
+ .. versionadded:: 0.7
+ """
+
+ tokens = {
+ 'root': [
+ (r'<%\S?', Keyword, 'sec'),
+ # FIXME: I want to make these keywords but still parse attributes.
+ (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
+ Keyword),
+ (r'[^<]+', Other),
+ (r'<', Other),
+ ],
+ 'sec': [
+ (r'%>', Keyword, '#pop'),
+ # note: '\w\W' != '.' without DOTALL.
+ (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
+ ],
+ }
+
+
+class JspLexer(DelegatingLexer):
+ """
+ Lexer for Java Server Pages.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Java Server Page'
+ aliases = ['jsp']
+ filenames = ['*.jsp']
+ mimetypes = ['application/x-jsp']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = JavaLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class EvoqueLexer(RegexLexer):
- """
- For files using the Evoque templating system.
-
- .. versionadded:: 1.1
- """
- name = 'Evoque'
- aliases = ['evoque']
- filenames = ['*.evoque']
- mimetypes = ['application/x-evoque']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'[^#$]+', Other),
- (r'#\[', Comment.Multiline, 'comment'),
- (r'\$\$', Other),
- # svn keywords
- (r'\$\w+:[^$\n]*\$', Comment.Multiline),
- # directives: begin, end
- (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, Punctuation)),
- # directives: evoque, overlay
- # see doc for handling first name arg: /directives/evoque/
- # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
- # should be using(PythonLexer), not passed out as String
+
+ def analyse_text(text):
+ rv = JavaLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class EvoqueLexer(RegexLexer):
+ """
+ For files using the Evoque templating system.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Evoque'
+ aliases = ['evoque']
+ filenames = ['*.evoque']
+ mimetypes = ['application/x-evoque']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'[^#$]+', Other),
+ (r'#\[', Comment.Multiline, 'comment'),
+ (r'\$\$', Other),
+ # svn keywords
+ (r'\$\w+:[^$\n]*\$', Comment.Multiline),
+ # directives: begin, end
+ (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ String, Punctuation)),
+ # directives: evoque, overlay
+ # see doc for handling first name arg: /directives/evoque/
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
+ # should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
- r'(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- String, using(PythonLexer), Punctuation)),
- # directives: if, for, prefer, test
- (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
- bygroups(Punctuation, Name.Builtin, Punctuation, None,
- using(PythonLexer), Punctuation)),
- # directive clauses (no {} expression)
- (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
- # expressions
- (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
- bygroups(Punctuation, None, using(PythonLexer),
- Name.Builtin, None, None, Punctuation)),
- (r'#', Other),
- ],
- 'comment': [
- (r'[^\]#]', Comment.Multiline),
- (r'#\[', Comment.Multiline, '#push'),
- (r'\]#', Comment.Multiline, '#pop'),
- (r'[\]#]', Comment.Multiline)
- ],
- }
-
+ r'(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ String, using(PythonLexer), Punctuation)),
+ # directives: if, for, prefer, test
+ (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
+ bygroups(Punctuation, Name.Builtin, Punctuation, None,
+ using(PythonLexer), Punctuation)),
+ # directive clauses (no {} expression)
+ (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
+ # expressions
+ (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
+ bygroups(Punctuation, None, using(PythonLexer),
+ Name.Builtin, None, None, Punctuation)),
+ (r'#', Other),
+ ],
+ 'comment': [
+ (r'[^\]#]', Comment.Multiline),
+ (r'#\[', Comment.Multiline, '#push'),
+ (r'\]#', Comment.Multiline, '#pop'),
+ (r'[\]#]', Comment.Multiline)
+ ],
+ }
+
def analyse_text(text):
"""Evoque templates use $evoque, which is unique."""
if '$evoque' in text:
return 1
-
-class EvoqueHtmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'HTML+Evoque'
- aliases = ['html+evoque']
- filenames = ['*.html']
- mimetypes = ['text/html+evoque']
-
- def __init__(self, **options):
+
+class EvoqueHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 1.1
+ """
+ name = 'HTML+Evoque'
+ aliases = ['html+evoque']
+ filenames = ['*.html']
+ mimetypes = ['text/html+evoque']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, EvoqueLexer, **options)
-
+
def analyse_text(text):
return EvoqueLexer.analyse_text(text)
-
-
-class EvoqueXmlLexer(DelegatingLexer):
- """
- Subclass of the `EvoqueLexer` that highlights unlexed data with the
- `XmlLexer`.
-
- .. versionadded:: 1.1
- """
- name = 'XML+Evoque'
- aliases = ['xml+evoque']
- filenames = ['*.xml']
- mimetypes = ['application/xml+evoque']
-
- def __init__(self, **options):
+
+
+class EvoqueXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `EvoqueLexer` that highlights unlexed data with the
+ `XmlLexer`.
+
+ .. versionadded:: 1.1
+ """
+ name = 'XML+Evoque'
+ aliases = ['xml+evoque']
+ filenames = ['*.xml']
+ mimetypes = ['application/xml+evoque']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, EvoqueLexer, **options)
-
+
def analyse_text(text):
return EvoqueLexer.analyse_text(text)
-
-
-class ColdfusionLexer(RegexLexer):
- """
- Coldfusion statements
- """
- name = 'cfstatement'
- aliases = ['cfs']
- filenames = []
- mimetypes = []
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'//.*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'\+\+|--', Operator),
- (r'[-+*/^&=!]', Operator),
- (r'<=|>=|<|>|==', Operator),
- (r'mod\b', Operator),
- (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
- (r'\|\||&&', Operator),
- (r'\?', Operator),
- (r'"', String.Double, 'string'),
- # There is a special rule for allowing html in single quoted
- # strings, evidently.
- (r"'.*?'", String.Single),
- (r'\d+', Number),
- (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
- r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
- r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(application|session|client|cookie|super|this|variables|arguments)\b',
- Name.Constant),
- (r'([a-z_$][\w.]*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-z_$][\w.]*', Name.Variable),
- (r'[()\[\]{};:,.\\]', Punctuation),
- (r'\s+', Text),
- ],
- 'string': [
- (r'""', String.Double),
- (r'#.+?#', String.Interp),
- (r'[^"#]+', String.Double),
- (r'#', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
-
-
-class ColdfusionMarkupLexer(RegexLexer):
- """
- Coldfusion markup only
- """
- name = 'Coldfusion'
- aliases = ['cf']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'[^<]+', Other),
- include('tags'),
- (r'<[^<>]*', Other),
- ],
- 'tags': [
- (r'<!---', Comment.Multiline, 'cfcomment'),
- (r'(?s)<!--.*?-->', Comment),
- (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
- (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- # negative lookbehind is for strings with embedded >
- (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
- r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
- r'mailpart|mail|header|content|zip|image|lock|argument|try|'
- r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
- bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
- ],
- 'cfoutput': [
- (r'[^#<]+', Other),
- (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
- Punctuation)),
- # (r'<cfoutput.*?>', Name.Builtin, '#push'),
- (r'</cfoutput.*?>', Name.Builtin, '#pop'),
- include('tags'),
- (r'(?s)<[^<>]*', Other),
- (r'#', Other),
- ],
- 'cfcomment': [
- (r'<!---', Comment.Multiline, '#push'),
- (r'--->', Comment.Multiline, '#pop'),
- (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
- ],
- }
-
-
-class ColdfusionHtmlLexer(DelegatingLexer):
- """
- Coldfusion markup in html
- """
- name = 'Coldfusion HTML'
- aliases = ['cfm']
- filenames = ['*.cfm', '*.cfml']
- mimetypes = ['application/x-coldfusion']
-
- def __init__(self, **options):
+
+
+class ColdfusionLexer(RegexLexer):
+ """
+ Coldfusion statements
+ """
+ name = 'cfstatement'
+ aliases = ['cfs']
+ filenames = []
+ mimetypes = []
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ (r'\+\+|--', Operator),
+ (r'[-+*/^&=!]', Operator),
+ (r'<=|>=|<|>|==', Operator),
+ (r'mod\b', Operator),
+ (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
+ (r'\|\||&&', Operator),
+ (r'\?', Operator),
+ (r'"', String.Double, 'string'),
+ # There is a special rule for allowing html in single quoted
+ # strings, evidently.
+ (r"'.*?'", String.Single),
+ (r'\d+', Number),
+ (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
+ r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
+ r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'(application|session|client|cookie|super|this|variables|arguments)\b',
+ Name.Constant),
+ (r'([a-z_$][\w.]*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-z_$][\w.]*', Name.Variable),
+ (r'[()\[\]{};:,.\\]', Punctuation),
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'#.+?#', String.Interp),
+ (r'[^"#]+', String.Double),
+ (r'#', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class ColdfusionMarkupLexer(RegexLexer):
+ """
+ Coldfusion markup only
+ """
+ name = 'Coldfusion'
+ aliases = ['cf']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'[^<]+', Other),
+ include('tags'),
+ (r'<[^<>]*', Other),
+ ],
+ 'tags': [
+ (r'<!---', Comment.Multiline, 'cfcomment'),
+ (r'(?s)<!--.*?-->', Comment),
+ (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
+ (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
+ # negative lookbehind is for strings with embedded >
+ (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
+ r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
+ r'mailpart|mail|header|content|zip|image|lock|argument|try|'
+ r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
+ bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
+ ],
+ 'cfoutput': [
+ (r'[^#<]+', Other),
+ (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
+ Punctuation)),
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
+ (r'</cfoutput.*?>', Name.Builtin, '#pop'),
+ include('tags'),
+ (r'(?s)<[^<>]*', Other),
+ (r'#', Other),
+ ],
+ 'cfcomment': [
+ (r'<!---', Comment.Multiline, '#push'),
+ (r'--->', Comment.Multiline, '#pop'),
+ (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
+ ],
+ }
+
+
+class ColdfusionHtmlLexer(DelegatingLexer):
+ """
+ Coldfusion markup in html
+ """
+ name = 'Coldfusion HTML'
+ aliases = ['cfm']
+ filenames = ['*.cfm', '*.cfml']
+ mimetypes = ['application/x-coldfusion']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
-
-
-class ColdfusionCFCLexer(DelegatingLexer):
- """
- Coldfusion markup/script components
-
- .. versionadded:: 2.0
- """
- name = 'Coldfusion CFC'
- aliases = ['cfc']
- filenames = ['*.cfc']
- mimetypes = []
-
- def __init__(self, **options):
+
+
+class ColdfusionCFCLexer(DelegatingLexer):
+ """
+ Coldfusion markup/script components
+
+ .. versionadded:: 2.0
+ """
+ name = 'Coldfusion CFC'
+ aliases = ['cfc']
+ filenames = ['*.cfc']
+ mimetypes = []
+
+ def __init__(self, **options):
super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
-
-
-class SspLexer(DelegatingLexer):
- """
- Lexer for Scalate Server Pages.
-
- .. versionadded:: 1.4
- """
- name = 'Scalate Server Page'
- aliases = ['ssp']
- filenames = ['*.ssp']
- mimetypes = ['application/x-ssp']
-
- def __init__(self, **options):
+
+
+class SspLexer(DelegatingLexer):
+ """
+ Lexer for Scalate Server Pages.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Scalate Server Page'
+ aliases = ['ssp']
+ filenames = ['*.ssp']
+ mimetypes = ['application/x-ssp']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
-
- def analyse_text(text):
- rv = 0.0
+
+ def analyse_text(text):
+ rv = 0.0
if re.search(r'val \w+\s*:', text):
- rv += 0.6
- if looks_like_xml(text):
- rv += 0.2
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class TeaTemplateRootLexer(RegexLexer):
- """
- Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
- code blocks.
-
- .. versionadded:: 1.5
- """
-
- tokens = {
- 'root': [
- (r'<%\S?', Keyword, 'sec'),
- (r'[^<]+', Other),
- (r'<', Other),
- ],
- 'sec': [
- (r'%>', Keyword, '#pop'),
- # note: '\w\W' != '.' without DOTALL.
- (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
- ],
- }
-
-
-class TeaTemplateLexer(DelegatingLexer):
- """
- Lexer for `Tea Templates <http://teatrove.org/>`_.
-
- .. versionadded:: 1.5
- """
- name = 'Tea'
- aliases = ['tea']
- filenames = ['*.tea']
- mimetypes = ['text/x-tea']
-
- def __init__(self, **options):
+ rv += 0.6
+ if looks_like_xml(text):
+ rv += 0.2
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class TeaTemplateRootLexer(RegexLexer):
+ """
+ Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
+ code blocks.
+
+ .. versionadded:: 1.5
+ """
+
+ tokens = {
+ 'root': [
+ (r'<%\S?', Keyword, 'sec'),
+ (r'[^<]+', Other),
+ (r'<', Other),
+ ],
+ 'sec': [
+ (r'%>', Keyword, '#pop'),
+ # note: '\w\W' != '.' without DOTALL.
+ (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
+ ],
+ }
+
+
+class TeaTemplateLexer(DelegatingLexer):
+ """
+ Lexer for `Tea Templates <http://teatrove.org/>`_.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Tea'
+ aliases = ['tea']
+ filenames = ['*.tea']
+ mimetypes = ['text/x-tea']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
-
- def analyse_text(text):
- rv = TeaLangLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- if '<%' in text and '%>' in text:
- rv += 0.1
- return rv
-
-
-class LassoHtmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `HtmlLexer`.
-
- Nested JavaScript and CSS is also highlighted.
-
- .. versionadded:: 1.6
- """
-
- name = 'HTML+Lasso'
- aliases = ['html+lasso']
- alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['text/html+lasso',
- 'application/x-httpd-lasso',
- 'application/x-httpd-lasso[89]']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = TeaLangLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ if '<%' in text and '%>' in text:
+ rv += 0.1
+ return rv
+
+
+class LassoHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `HtmlLexer`.
+
+ Nested JavaScript and CSS is also highlighted.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'HTML+Lasso'
+ aliases = ['html+lasso']
+ alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
+ '*.incl', '*.inc', '*.las']
+ mimetypes = ['text/html+lasso',
+ 'application/x-httpd-lasso',
+ 'application/x-httpd-lasso[89]']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if html_doctype_matches(text): # same as HTML lexer
- rv += 0.5
- return rv
-
-
-class LassoXmlLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `XmlLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'XML+Lasso'
- aliases = ['xml+lasso']
- alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
- '*.incl', '*.inc', '*.las']
- mimetypes = ['application/xml+lasso']
-
- def __init__(self, **options):
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.01
+ if html_doctype_matches(text): # same as HTML lexer
+ rv += 0.5
+ return rv
+
+
+class LassoXmlLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `XmlLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'XML+Lasso'
+ aliases = ['xml+lasso']
+ alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
+ '*.incl', '*.inc', '*.las']
+ mimetypes = ['application/xml+lasso']
+
+ def __init__(self, **options):
super().__init__(XmlLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.01
- if looks_like_xml(text):
- rv += 0.4
- return rv
-
-
-class LassoCssLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `CssLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'CSS+Lasso'
- aliases = ['css+lasso']
- alias_filenames = ['*.css']
- mimetypes = ['text/css+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.01
+ if looks_like_xml(text):
+ rv += 0.4
+ return rv
+
+
+class LassoCssLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `CssLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'CSS+Lasso'
+ aliases = ['css+lasso']
+ alias_filenames = ['*.css']
+ mimetypes = ['text/css+lasso']
+
+ def __init__(self, **options):
+ options['requiredelimiters'] = True
super().__init__(CssLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.05
if re.search(r'\w+:[^;]+;', text):
- rv += 0.1
- if 'padding:' in text:
- rv += 0.1
- return rv
-
-
-class LassoJavascriptLexer(DelegatingLexer):
- """
- Subclass of the `LassoLexer` which highlights unhandled data with the
- `JavascriptLexer`.
-
- .. versionadded:: 1.6
- """
-
- name = 'JavaScript+Lasso'
+ rv += 0.1
+ if 'padding:' in text:
+ rv += 0.1
+ return rv
+
+
+class LassoJavascriptLexer(DelegatingLexer):
+ """
+ Subclass of the `LassoLexer` which highlights unhandled data with the
+ `JavascriptLexer`.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JavaScript+Lasso'
aliases = ['javascript+lasso', 'js+lasso']
- alias_filenames = ['*.js']
- mimetypes = ['application/x-javascript+lasso',
- 'text/x-javascript+lasso',
- 'text/javascript+lasso']
-
- def __init__(self, **options):
- options['requiredelimiters'] = True
+ alias_filenames = ['*.js']
+ mimetypes = ['application/x-javascript+lasso',
+ 'text/x-javascript+lasso',
+ 'text/javascript+lasso']
+
+ def __init__(self, **options):
+ options['requiredelimiters'] = True
super().__init__(JavascriptLexer, LassoLexer, **options)
-
- def analyse_text(text):
- rv = LassoLexer.analyse_text(text) - 0.05
- return rv
-
-
-class HandlebarsLexer(RegexLexer):
- """
- Generic `handlebars <http://handlebarsjs.com/>` template lexer.
-
- Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
- Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.0
- """
-
- name = "Handlebars"
- aliases = ['handlebars']
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
-
+
+ def analyse_text(text):
+ rv = LassoLexer.analyse_text(text) - 0.05
+ return rv
+
+
+class HandlebarsLexer(RegexLexer):
+ """
+ Generic `handlebars <http://handlebarsjs.com/>` template lexer.
+
+ Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
+ Everything else is left for a delegating lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "Handlebars"
+ aliases = ['handlebars']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+
# Comment start {{! }} or {{!--
- (r'\{\{!.*\}\}', Comment),
-
+ (r'\{\{!.*\}\}', Comment),
+
# HTML Escaping open {{{expression
- (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
+ (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
# {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
(r'(\{\{)([#~/]+)([^\s}]*)',
bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
- (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
- ],
-
- 'tag': [
- (r'\s+', Text),
+ (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
+ ],
+
+ 'tag': [
+ (r'\s+', Text),
# HTML Escaping close }}}
- (r'\}\}\}', Comment.Special, '#pop'),
+ (r'\}\}\}', Comment.Special, '#pop'),
# blockClose}}, includes optional tilde ~
(r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
-
- # {{opt=something}}
+
+ # {{opt=something}}
(r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
-
+
# Partials {{> ...}}
(r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
(r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
@@ -1832,354 +1832,354 @@ class HandlebarsLexer(RegexLexer):
'generic': [
include('variable'),
- # borrowed from DjangoLexer
+ # borrowed from DjangoLexer
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ]
- }
-
-
-class HandlebarsHtmlLexer(DelegatingLexer):
- """
- Subclass of the `HandlebarsLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Handlebars"
- aliases = ["html+handlebars"]
- filenames = ['*.handlebars', '*.hbs']
- mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
-
- def __init__(self, **options):
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ]
+ }
+
+
+class HandlebarsHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `HandlebarsLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Handlebars"
+ aliases = ["html+handlebars"]
+ filenames = ['*.handlebars', '*.hbs']
+ mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, HandlebarsLexer, **options)
-
-
-class YamlJinjaLexer(DelegatingLexer):
- """
- Subclass of the `DjangoLexer` that highlights unlexed data with the
- `YamlLexer`.
-
- Commonly used in Saltstack salt states.
-
- .. versionadded:: 2.0
- """
-
- name = 'YAML+Jinja'
- aliases = ['yaml+jinja', 'salt', 'sls']
- filenames = ['*.sls']
- mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
-
- def __init__(self, **options):
+
+
+class YamlJinjaLexer(DelegatingLexer):
+ """
+ Subclass of the `DjangoLexer` that highlights unlexed data with the
+ `YamlLexer`.
+
+ Commonly used in Saltstack salt states.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'YAML+Jinja'
+ aliases = ['yaml+jinja', 'salt', 'sls']
+ filenames = ['*.sls']
+ mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
+
+ def __init__(self, **options):
super().__init__(YamlLexer, DjangoLexer, **options)
-
-
-class LiquidLexer(RegexLexer):
- """
- Lexer for `Liquid templates
- <http://www.rubydoc.info/github/Shopify/liquid>`_.
-
- .. versionadded:: 2.0
- """
- name = 'liquid'
- aliases = ['liquid']
- filenames = ['*.liquid']
-
- tokens = {
- 'root': [
- (r'[^{]+', Text),
- # tags and block tags
- (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
- # output tags
- (r'(\{\{)(\s*)([^\s}]+)',
- bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
- 'output'),
- (r'\{', Text)
- ],
-
- 'tag-or-block': [
- # builtin logic blocks
- (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
- (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
- combined('end-of-block', 'whitespace', 'generic')),
- (r'(else)(\s*)(%\})',
- bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
-
- # other builtin blocks
- (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation), '#pop'),
- (r'(comment)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
- (r'(raw)(\s*)(%\})',
- bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
-
- # end of block
- (r'(end(case|unless|if))(\s*)(%\})',
- bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
- (r'(end([^\s%]+))(\s*)(%\})',
- bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
-
- # builtin tags (assign and include are handled together with usual tags)
- (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
- bygroups(Name.Tag, Whitespace,
- using(this, state='generic'), Punctuation, Whitespace),
- 'variable-tag-markup'),
-
- # other tags or blocks
- (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
- ],
-
- 'output': [
- include('whitespace'),
+
+
+class LiquidLexer(RegexLexer):
+ """
+ Lexer for `Liquid templates
+ <http://www.rubydoc.info/github/Shopify/liquid>`_.
+
+ .. versionadded:: 2.0
+ """
+ name = 'liquid'
+ aliases = ['liquid']
+ filenames = ['*.liquid']
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Text),
+ # tags and block tags
+ (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
+ # output tags
+ (r'(\{\{)(\s*)([^\s}]+)',
+ bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
+ 'output'),
+ (r'\{', Text)
+ ],
+
+ 'tag-or-block': [
+ # builtin logic blocks
+ (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
+ (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
+ combined('end-of-block', 'whitespace', 'generic')),
+ (r'(else)(\s*)(%\})',
+ bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
+
+ # other builtin blocks
+ (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation), '#pop'),
+ (r'(comment)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
+ (r'(raw)(\s*)(%\})',
+ bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
+
+ # end of block
+ (r'(end(case|unless|if))(\s*)(%\})',
+ bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
+ (r'(end([^\s%]+))(\s*)(%\})',
+ bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
+
+ # builtin tags (assign and include are handled together with usual tags)
+ (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
+ bygroups(Name.Tag, Whitespace,
+ using(this, state='generic'), Punctuation, Whitespace),
+ 'variable-tag-markup'),
+
+ # other tags or blocks
+ (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
+ ],
+
+ 'output': [
+ include('whitespace'),
(r'\}\}', Punctuation, '#pop'), # end of output
-
- (r'\|', Punctuation, 'filters')
- ],
-
- 'filters': [
- include('whitespace'),
- (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
-
- (r'([^\s|:]+)(:?)(\s*)',
- bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
- ],
-
- 'filter-markup': [
- (r'\|', Punctuation, '#pop'),
- include('end-of-tag'),
- include('default-param-markup')
- ],
-
- 'condition': [
- include('end-of-block'),
- include('whitespace'),
-
- (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
- bygroups(using(this, state = 'generic'), Whitespace, Operator,
- Whitespace, using(this, state = 'generic'), Whitespace,
- Punctuation)),
- (r'\b!', Operator),
- (r'\bnot\b', Operator.Word),
- (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
- bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
- Whitespace, using(this, state = 'generic'))),
-
- include('generic'),
- include('whitespace')
- ],
-
- 'generic-value': [
- include('generic'),
- include('end-at-whitespace')
- ],
-
- 'operator': [
- (r'(\s*)((=|!|>|<)=?)(\s*)',
- bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
- (r'(\s*)(\bcontains\b)(\s*)',
- bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
- ],
-
- 'end-of-tag': [
- (r'\}\}', Punctuation, '#pop')
- ],
-
- 'end-of-block': [
- (r'%\}', Punctuation, ('#pop', '#pop'))
- ],
-
- 'end-at-whitespace': [
- (r'\s+', Whitespace, '#pop')
- ],
-
- # states for unknown markup
- 'param-markup': [
- include('whitespace'),
- # params with colons or equals
- (r'([^\s=:]+)(\s*)(=|:)',
- bygroups(Name.Attribute, Whitespace, Operator)),
- # explicit variables
- (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
- bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
- Whitespace, Punctuation)),
-
- include('string'),
- include('number'),
- include('keyword'),
- (r',', Punctuation)
- ],
-
- 'default-param-markup': [
- include('param-markup'),
- (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
- ],
-
- 'variable-param-markup': [
- include('param-markup'),
- include('variable'),
- (r'.', Text) # fallback
- ],
-
- 'tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('default-param-markup')
- ],
-
- 'variable-tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
- include('variable-param-markup')
- ],
-
- # states for different values types
- 'keyword': [
- (r'\b(false|true)\b', Keyword.Constant)
- ],
-
- 'variable': [
- (r'[a-zA-Z_]\w*', Name.Variable),
- (r'(?<=\w)\.(?=\w)', Punctuation)
- ],
-
- 'string': [
- (r"'[^']*'", String.Single),
- (r'"[^"]*"', String.Double)
- ],
-
- 'number': [
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer)
- ],
-
- 'generic': [ # decides for variable, string, keyword or number
- include('keyword'),
- include('string'),
- include('number'),
- include('variable')
- ],
-
- 'whitespace': [
- (r'[ \t]+', Whitespace)
- ],
-
- # states for builtin blocks
- 'comment': [
- (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), ('#pop', '#pop')),
- (r'.', Comment)
- ],
-
- 'raw': [
- (r'[^{]+', Text),
- (r'(\{%)(\s*)(endraw)(\s*)(%\})',
- bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
- Punctuation), '#pop'),
- (r'\{', Text)
- ],
- }
-
-
-class TwigLexer(RegexLexer):
- """
- `Twig <http://twig.sensiolabs.org/>`_ template lexer.
-
- It just highlights Twig code between the preprocessor directives,
- other data is left untouched by the lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Twig'
- aliases = ['twig']
- mimetypes = ['application/x-twig']
-
- flags = re.M | re.S
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w-]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- tokens = {
- 'root': [
- (r'[^{]+', Other),
- (r'\{\{', Comment.Preproc, 'var'),
- # twig comments
- (r'\{\#.*?\#\}', Comment),
- # raw twig blocks
- (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
- r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
- bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
- Other, Comment.Preproc, Text, Keyword, Text,
- Comment.Preproc)),
- # filter blocks
- (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
- bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
- 'tag'),
- (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
- bygroups(Comment.Preproc, Text, Keyword), 'tag'),
- (r'\{', Other),
- ],
- 'varnames': [
- (r'(\|)(\s*)(%s)' % _ident_inner,
- bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
- (r'(in|not|and|b-and|or|b-or|b-xor|is'
- r'if|elseif|else|import'
- r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
- r'matches|starts\s+with|ends\s+with)\b',
- Keyword),
- (r'(loop|block|parent)\b', Name.Builtin),
- (_ident_inner, Name.Variable),
- (r'\.' + _ident_inner, Name.Variable),
- (r'\.[0-9]+', Number),
+
+ (r'\|', Punctuation, 'filters')
+ ],
+
+ 'filters': [
+ include('whitespace'),
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
+
+ (r'([^\s|:]+)(:?)(\s*)',
+ bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
+ ],
+
+ 'filter-markup': [
+ (r'\|', Punctuation, '#pop'),
+ include('end-of-tag'),
+ include('default-param-markup')
+ ],
+
+ 'condition': [
+ include('end-of-block'),
+ include('whitespace'),
+
+ (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator,
+ Whitespace, using(this, state = 'generic'), Whitespace,
+ Punctuation)),
+ (r'\b!', Operator),
+ (r'\bnot\b', Operator.Word),
+ (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
+ bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
+ Whitespace, using(this, state = 'generic'))),
+
+ include('generic'),
+ include('whitespace')
+ ],
+
+ 'generic-value': [
+ include('generic'),
+ include('end-at-whitespace')
+ ],
+
+ 'operator': [
+ (r'(\s*)((=|!|>|<)=?)(\s*)',
+ bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
+ (r'(\s*)(\bcontains\b)(\s*)',
+ bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
+ ],
+
+ 'end-of-tag': [
+ (r'\}\}', Punctuation, '#pop')
+ ],
+
+ 'end-of-block': [
+ (r'%\}', Punctuation, ('#pop', '#pop'))
+ ],
+
+ 'end-at-whitespace': [
+ (r'\s+', Whitespace, '#pop')
+ ],
+
+ # states for unknown markup
+ 'param-markup': [
+ include('whitespace'),
+ # params with colons or equals
+ (r'([^\s=:]+)(\s*)(=|:)',
+ bygroups(Name.Attribute, Whitespace, Operator)),
+ # explicit variables
+ (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
+ bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
+ Whitespace, Punctuation)),
+
+ include('string'),
+ include('number'),
+ include('keyword'),
+ (r',', Punctuation)
+ ],
+
+ 'default-param-markup': [
+ include('param-markup'),
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
+ ],
+
+ 'variable-param-markup': [
+ include('param-markup'),
+ include('variable'),
+ (r'.', Text) # fallback
+ ],
+
+ 'tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('default-param-markup')
+ ],
+
+ 'variable-tag-markup': [
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ include('variable-param-markup')
+ ],
+
+ # states for different values types
+ 'keyword': [
+ (r'\b(false|true)\b', Keyword.Constant)
+ ],
+
+ 'variable': [
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ (r'(?<=\w)\.(?=\w)', Punctuation)
+ ],
+
+ 'string': [
+ (r"'[^']*'", String.Single),
+ (r'"[^"]*"', String.Double)
+ ],
+
+ 'number': [
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'generic': [ # decides for variable, string, keyword or number
+ include('keyword'),
+ include('string'),
+ include('number'),
+ include('variable')
+ ],
+
+ 'whitespace': [
+ (r'[ \t]+', Whitespace)
+ ],
+
+ # states for builtin blocks
+ 'comment': [
+ (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), ('#pop', '#pop')),
+ (r'.', Comment)
+ ],
+
+ 'raw': [
+ (r'[^{]+', Text),
+ (r'(\{%)(\s*)(endraw)(\s*)(%\})',
+ bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
+ Punctuation), '#pop'),
+ (r'\{', Text)
+ ],
+ }
+
+
+class TwigLexer(RegexLexer):
+ """
+ `Twig <http://twig.sensiolabs.org/>`_ template lexer.
+
+ It just highlights Twig code between the preprocessor directives,
+ other data is left untouched by the lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Twig'
+ aliases = ['twig']
+ mimetypes = ['application/x-twig']
+
+ flags = re.M | re.S
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w-]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ tokens = {
+ 'root': [
+ (r'[^{]+', Other),
+ (r'\{\{', Comment.Preproc, 'var'),
+ # twig comments
+ (r'\{\#.*?\#\}', Comment),
+ # raw twig blocks
+ (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
+ r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
+ bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
+ Other, Comment.Preproc, Text, Keyword, Text,
+ Comment.Preproc)),
+ # filter blocks
+ (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
+ bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
+ 'tag'),
+ (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
+ bygroups(Comment.Preproc, Text, Keyword), 'tag'),
+ (r'\{', Other),
+ ],
+ 'varnames': [
+ (r'(\|)(\s*)(%s)' % _ident_inner,
+ bygroups(Operator, Text, Name.Function)),
+ (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
+ (r'(in|not|and|b-and|or|b-or|b-xor|is'
+ r'if|elseif|else|import'
+ r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
+ r'matches|starts\s+with|ends\s+with)\b',
+ Keyword),
+ (r'(loop|block|parent)\b', Name.Builtin),
+ (_ident_inner, Name.Variable),
+ (r'\.' + _ident_inner, Name.Variable),
+ (r'\.[0-9]+', Number),
(r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
- (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
- r"0[xX][0-9a-fA-F]+[Ll]?", Number),
- ],
- 'var': [
- (r'\s+', Text),
- (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames')
- ],
- 'tag': [
- (r'\s+', Text),
- (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
- include('varnames'),
- (r'.', Punctuation),
- ],
- }
-
-
-class TwigHtmlLexer(DelegatingLexer):
- """
- Subclass of the `TwigLexer` that highlights unlexed data with the
- `HtmlLexer`.
-
- .. versionadded:: 2.0
- """
-
- name = "HTML+Twig"
- aliases = ["html+twig"]
- filenames = ['*.twig']
- mimetypes = ['text/html+twig']
-
- def __init__(self, **options):
+ (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
+ (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
+ r"0[xX][0-9a-fA-F]+[Ll]?", Number),
+ ],
+ 'var': [
+ (r'\s+', Text),
+ (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames')
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
+ include('varnames'),
+ (r'.', Punctuation),
+ ],
+ }
+
+
+class TwigHtmlLexer(DelegatingLexer):
+ """
+ Subclass of the `TwigLexer` that highlights unlexed data with the
+ `HtmlLexer`.
+
+ .. versionadded:: 2.0
+ """
+
+ name = "HTML+Twig"
+ aliases = ["html+twig"]
+ filenames = ['*.twig']
+ mimetypes = ['text/html+twig']
+
+ def __init__(self, **options):
super().__init__(HtmlLexer, TwigLexer, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/testing.py b/contrib/python/Pygments/py3/pygments/lexers/testing.py
index e52f572e88..18d1abd79b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/testing.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/testing.py
@@ -1,209 +1,209 @@
-"""
- pygments.lexers.testing
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for testing languages.
-
+"""
+ pygments.lexers.testing
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for testing languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
-
-__all__ = ['GherkinLexer', 'TAPLexer']
-
-
-class GherkinLexer(RegexLexer):
- """
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Comment, Keyword, Name, String, Number, Generic, Text
+
+__all__ = ['GherkinLexer', 'TAPLexer']
+
+
+class GherkinLexer(RegexLexer):
+ """
For `Gherkin <https://github.com/aslakhellesoy/gherkin/>` syntax.
-
- .. versionadded:: 1.2
- """
- name = 'Gherkin'
+
+ .. versionadded:: 1.2
+ """
+ name = 'Gherkin'
aliases = ['gherkin', 'cucumber']
- filenames = ['*.feature']
- mimetypes = ['text/x-gherkin']
-
+ filenames = ['*.feature']
+ mimetypes = ['text/x-gherkin']
+
feature_keywords = '^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
examples_keywords = '^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
step_keywords = '^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假设|假如|假定|但是|但し|並且|并且|同時|同时|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\\* )'
-
- tokens = {
- 'comments': [
- (r'^\s*#.*$', Comment),
- ],
- 'feature_elements': [
- (step_keywords, Keyword, "step_content_stack"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'feature_elements_on_stack': [
- (step_keywords, Keyword, "#pop:2"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table': [
- (r"\s+\|", Keyword, 'examples_table_header'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table_header': [
- (r"\s+\|\s*$", Keyword, "#pop:2"),
- include('comments'),
- (r"\\\|", Name.Variable),
- (r"\s*\|", Keyword),
- (r"[^|]", Name.Variable),
- ],
- 'scenario_sections_on_stack': [
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- "feature_elements_on_stack"),
- ],
- 'narrative': [
- include('scenario_sections_on_stack'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'table_vars': [
- (r'(<[^>]+>)', Name.Variable),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
- ],
- 'string': [
- include('table_vars'),
- (r'(\s|.)', String),
- ],
- 'py_string': [
- (r'"""', Keyword, "#pop"),
- include('string'),
- ],
- 'step_content_root': [
- (r"$", Keyword, "#pop"),
- include('step_content'),
- ],
- 'step_content_stack': [
- (r"$", Keyword, "#pop:2"),
- include('step_content'),
- ],
- 'step_content': [
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- include('comments'),
- (r'(\s|.)', Name.Function),
- ],
- 'table_content': [
- (r"\s+\|\s*$", Keyword, "#pop"),
- include('comments'),
- (r"\\\|", String),
- (r"\s*\|", Keyword),
- include('string'),
- ],
- 'double_string': [
- (r'"', Name.Function, "#pop"),
- include('string'),
- ],
- 'root': [
- (r'\n', Name.Function),
- include('comments'),
- (r'"""', Keyword, "py_string"),
- (r'\s+\|', Keyword, 'table_content'),
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
- (step_keywords, bygroups(Name.Function, Keyword),
- 'step_content_root'),
- (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
- 'narrative'),
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'feature_elements'),
- (examples_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'examples_table'),
- (r'(\s|.)', Name.Function),
- ]
- }
-
+
+ tokens = {
+ 'comments': [
+ (r'^\s*#.*$', Comment),
+ ],
+ 'feature_elements': [
+ (step_keywords, Keyword, "step_content_stack"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'feature_elements_on_stack': [
+ (step_keywords, Keyword, "#pop:2"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table': [
+ (r"\s+\|", Keyword, 'examples_table_header'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table_header': [
+ (r"\s+\|\s*$", Keyword, "#pop:2"),
+ include('comments'),
+ (r"\\\|", Name.Variable),
+ (r"\s*\|", Keyword),
+ (r"[^|]", Name.Variable),
+ ],
+ 'scenario_sections_on_stack': [
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ "feature_elements_on_stack"),
+ ],
+ 'narrative': [
+ include('scenario_sections_on_stack'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'table_vars': [
+ (r'(<[^>]+>)', Name.Variable),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
+ ],
+ 'string': [
+ include('table_vars'),
+ (r'(\s|.)', String),
+ ],
+ 'py_string': [
+ (r'"""', Keyword, "#pop"),
+ include('string'),
+ ],
+ 'step_content_root': [
+ (r"$", Keyword, "#pop"),
+ include('step_content'),
+ ],
+ 'step_content_stack': [
+ (r"$", Keyword, "#pop:2"),
+ include('step_content'),
+ ],
+ 'step_content': [
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ include('comments'),
+ (r'(\s|.)', Name.Function),
+ ],
+ 'table_content': [
+ (r"\s+\|\s*$", Keyword, "#pop"),
+ include('comments'),
+ (r"\\\|", String),
+ (r"\s*\|", Keyword),
+ include('string'),
+ ],
+ 'double_string': [
+ (r'"', Name.Function, "#pop"),
+ include('string'),
+ ],
+ 'root': [
+ (r'\n', Name.Function),
+ include('comments'),
+ (r'"""', Keyword, "py_string"),
+ (r'\s+\|', Keyword, 'table_content'),
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+ (step_keywords, bygroups(Name.Function, Keyword),
+ 'step_content_root'),
+ (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
+ 'narrative'),
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'feature_elements'),
+ (examples_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'examples_table'),
+ (r'(\s|.)', Name.Function),
+ ]
+ }
+
def analyse_text(self, text):
return
-
-
-class TAPLexer(RegexLexer):
- """
- For Test Anything Protocol (TAP) output.
-
- .. versionadded:: 2.1
- """
- name = 'TAP'
- aliases = ['tap']
- filenames = ['*.tap']
-
- tokens = {
- 'root': [
- # A TAP version may be specified.
- (r'^TAP version \d+\n', Name.Namespace),
-
- # Specify a plan with a plan line.
+
+
+class TAPLexer(RegexLexer):
+ """
+ For Test Anything Protocol (TAP) output.
+
+ .. versionadded:: 2.1
+ """
+ name = 'TAP'
+ aliases = ['tap']
+ filenames = ['*.tap']
+
+ tokens = {
+ 'root': [
+ # A TAP version may be specified.
+ (r'^TAP version \d+\n', Name.Namespace),
+
+ # Specify a plan with a plan line.
(r'^1\.\.\d+', Keyword.Declaration, 'plan'),
-
- # A test failure
- (r'^(not ok)([^\S\n]*)(\d*)',
- bygroups(Generic.Error, Text, Number.Integer), 'test'),
-
- # A test success
- (r'^(ok)([^\S\n]*)(\d*)',
- bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
-
- # Diagnostics start with a hash.
- (r'^#.*\n', Comment),
-
- # TAP's version of an abort statement.
- (r'^Bail out!.*\n', Generic.Error),
-
- # TAP ignores any unrecognized lines.
- (r'^.*\n', Text),
- ],
- 'plan': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A plan may have a directive with it.
- (r'#', Comment, 'directive'),
-
- # Or it could just end.
- (r'\n', Comment, '#pop'),
-
- # Anything else is wrong.
- (r'.*\n', Generic.Error, '#pop'),
- ],
- 'test': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Text),
-
- # A test may have a directive with it.
- (r'#', Comment, 'directive'),
-
- (r'\S+', Text),
-
- (r'\n', Text, '#pop'),
- ],
- 'directive': [
- # Consume whitespace (but not newline).
- (r'[^\S\n]+', Comment),
-
- # Extract todo items.
- (r'(?i)\bTODO\b', Comment.Preproc),
-
- # Extract skip items.
- (r'(?i)\bSKIP\S*', Comment.Preproc),
-
- (r'\S+', Comment),
-
- (r'\n', Comment, '#pop:2'),
- ],
- }
+
+ # A test failure
+ (r'^(not ok)([^\S\n]*)(\d*)',
+ bygroups(Generic.Error, Text, Number.Integer), 'test'),
+
+ # A test success
+ (r'^(ok)([^\S\n]*)(\d*)',
+ bygroups(Keyword.Reserved, Text, Number.Integer), 'test'),
+
+ # Diagnostics start with a hash.
+ (r'^#.*\n', Comment),
+
+ # TAP's version of an abort statement.
+ (r'^Bail out!.*\n', Generic.Error),
+
+ # TAP ignores any unrecognized lines.
+ (r'^.*\n', Text),
+ ],
+ 'plan': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Text),
+
+ # A plan may have a directive with it.
+ (r'#', Comment, 'directive'),
+
+ # Or it could just end.
+ (r'\n', Comment, '#pop'),
+
+ # Anything else is wrong.
+ (r'.*\n', Generic.Error, '#pop'),
+ ],
+ 'test': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Text),
+
+ # A test may have a directive with it.
+ (r'#', Comment, 'directive'),
+
+ (r'\S+', Text),
+
+ (r'\n', Text, '#pop'),
+ ],
+ 'directive': [
+ # Consume whitespace (but not newline).
+ (r'[^\S\n]+', Comment),
+
+ # Extract todo items.
+ (r'(?i)\bTODO\b', Comment.Preproc),
+
+ # Extract skip items.
+ (r'(?i)\bSKIP\S*', Comment.Preproc),
+
+ (r'\S+', Comment),
+
+ (r'\n', Comment, '#pop:2'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/text.py b/contrib/python/Pygments/py3/pygments/lexers/text.py
index 68e06594f7..1862b97119 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/text.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/text.py
@@ -1,25 +1,25 @@
-"""
- pygments.lexers.text
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for non-source code file types.
-
+"""
+ pygments.lexers.text
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-source code file types.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
- SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
-from pygments.lexers.console import PyPyLogLexer
-from pygments.lexers.textedit import VimLexer
-from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
- TexLexer, GroffLexer
-from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
-from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
-from pygments.lexers.haxe import HxmlLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
+ SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
+from pygments.lexers.console import PyPyLogLexer
+from pygments.lexers.textedit import VimLexer
+from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
+ TexLexer, GroffLexer
+from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
+from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
+from pygments.lexers.haxe import HxmlLexer
from pygments.lexers.sgf import SmartGameFormatLexer
-from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
-from pygments.lexers.data import YamlLexer
-from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
-
-__all__ = []
+from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textedit.py b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
index 0e567bca1d..ecf8c60585 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textedit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
@@ -1,78 +1,78 @@
-"""
- pygments.lexers.textedit
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for languages related to text processing.
-
+"""
+ pygments.lexers.textedit
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for languages related to text processing.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from bisect import bisect
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from bisect import bisect
+
from pygments.lexer import RegexLexer, bygroups, default, include, this, using
-from pygments.lexers.python import PythonLexer
+from pygments.lexers.python import PythonLexer
from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
Punctuation, String, Text, Whitespace
-
+
__all__ = ['AwkLexer', 'SedLexer', 'VimLexer']
-
-
-class AwkLexer(RegexLexer):
- """
- For Awk scripts.
-
- .. versionadded:: 1.5
- """
-
- name = 'Awk'
- aliases = ['awk', 'gawk', 'mawk', 'nawk']
- filenames = ['*.awk']
- mimetypes = ['application/x-awk']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'\B', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
- r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(break|continue|do|while|exit|for|if|else|'
- r'return)\b', Keyword, 'slashstartsregex'),
- (r'function\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
- r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
- r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
- r'delete|system)\b', Keyword.Reserved),
- (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
- r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
- r'RSTART|RT|SUBSEP)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+
+
+class AwkLexer(RegexLexer):
+ """
+ For Awk scripts.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Awk'
+ aliases = ['awk', 'gawk', 'mawk', 'nawk']
+ filenames = ['*.awk']
+ mimetypes = ['application/x-awk']
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'\B', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
+ r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(break|continue|do|while|exit|for|if|else|'
+ r'return)\b', Keyword, 'slashstartsregex'),
+ (r'function\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
+ r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
+ r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
+ r'delete|system)\b', Keyword.Reserved),
+ (r'(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|'
+ r'FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|'
+ r'RSTART|RT|SUBSEP)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
+ ]
+ }
+
+
class SedLexer(RegexLexer):
"""
Lexer for Sed script files.
@@ -108,95 +108,95 @@ class SedLexer(RegexLexer):
]
}
-class VimLexer(RegexLexer):
- """
- Lexer for VimL script files.
-
- .. versionadded:: 0.8
- """
- name = 'VimL'
- aliases = ['vim']
- filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
- '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
- mimetypes = ['text/x-vim']
- flags = re.MULTILINE
-
- _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
-
- tokens = {
- 'root': [
- (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
- bygroups(using(this), Keyword, Text, Operator, Text, Text,
- using(PythonLexer), Text)),
- (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
- bygroups(using(this), Keyword, Text, using(PythonLexer))),
-
- (r'^\s*".*', Comment),
-
- (r'[ \t]+', Text),
- # TODO: regexes can have other delims
+class VimLexer(RegexLexer):
+ """
+ Lexer for VimL script files.
+
+ .. versionadded:: 0.8
+ """
+ name = 'VimL'
+ aliases = ['vim']
+ filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
+ '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
+ mimetypes = ['text/x-vim']
+ flags = re.MULTILINE
+
+ _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
+
+ tokens = {
+ 'root': [
+ (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
+ bygroups(using(this), Keyword, Text, Operator, Text, Text,
+ using(PythonLexer), Text)),
+ (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
+ bygroups(using(this), Keyword, Text, using(PythonLexer))),
+
+ (r'^\s*".*', Comment),
+
+ (r'[ \t]+', Text),
+ # TODO: regexes can have other delims
(r'/[^/\\\n]*(?:\\[\s\S][^/\\\n]*)*/', String.Regex),
(r'"[^"\\\n]*(?:\\[\s\S][^"\\\n]*)*"', String.Double),
(r"'[^\n']*(?:''[^\n']*)*'", String.Single),
-
- # Who decided that doublequote was a good comment character??
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
- (r'-?\d+', Number),
- (r'#[0-9a-f]{6}', Number.Hex),
- (r'^:', Punctuation),
- (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
- (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
- Keyword),
- (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
- (r'\b\w+\b', Name.Other), # These are postprocessed below
- (r'.', Text),
- ],
- }
-
- def __init__(self, **options):
+
+ # Who decided that doublequote was a good comment character??
+ (r'(?<=\s)"[^\-:.%#=*].*', Comment),
+ (r'-?\d+', Number),
+ (r'#[0-9a-f]{6}', Number.Hex),
+ (r'^:', Punctuation),
+ (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
+ (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
+ Keyword),
+ (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
+ (r'\b\w+\b', Name.Other), # These are postprocessed below
+ (r'.', Text),
+ ],
+ }
+
+ def __init__(self, **options):
from pygments.lexers._vim_builtins import auto, command, option
- self._cmd = command
- self._opt = option
- self._aut = auto
-
- RegexLexer.__init__(self, **options)
-
- def is_in(self, w, mapping):
- r"""
- It's kind of difficult to decide if something might be a keyword
- in VimL because it allows you to abbreviate them. In fact,
- 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
- valid ways to call it so rather than making really awful regexps
- like::
-
- \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
- we match `\b\w+\b` and then call is_in() on those tokens. See
- `scripts/get_vimkw.py` for how the lists are extracted.
- """
- p = bisect(mapping, (w,))
- if p > 0:
- if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
- mapping[p-1][1][:len(w)] == w:
- return True
- if p < len(mapping):
- return mapping[p][0] == w[:len(mapping[p][0])] and \
- mapping[p][1][:len(w)] == w
- return False
-
- def get_tokens_unprocessed(self, text):
- # TODO: builtins are only subsequent tokens on lines
- # and 'keywords' only happen at the beginning except
- # for :au ones
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name.Other:
- if self.is_in(value, self._cmd):
- yield index, Keyword, value
- elif self.is_in(value, self._opt) or \
- self.is_in(value, self._aut):
- yield index, Name.Builtin, value
- else:
- yield index, Text, value
- else:
- yield index, token, value
+ self._cmd = command
+ self._opt = option
+ self._aut = auto
+
+ RegexLexer.__init__(self, **options)
+
+ def is_in(self, w, mapping):
+ r"""
+ It's kind of difficult to decide if something might be a keyword
+ in VimL because it allows you to abbreviate them. In fact,
+ 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
+ valid ways to call it so rather than making really awful regexps
+ like::
+
+ \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
+
+ we match `\b\w+\b` and then call is_in() on those tokens. See
+ `scripts/get_vimkw.py` for how the lists are extracted.
+ """
+ p = bisect(mapping, (w,))
+ if p > 0:
+ if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
+ mapping[p-1][1][:len(w)] == w:
+ return True
+ if p < len(mapping):
+ return mapping[p][0] == w[:len(mapping[p][0])] and \
+ mapping[p][1][:len(w)] == w
+ return False
+
+ def get_tokens_unprocessed(self, text):
+ # TODO: builtins are only subsequent tokens on lines
+ # and 'keywords' only happen at the beginning except
+ # for :au ones
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name.Other:
+ if self.is_in(value, self._cmd):
+ yield index, Keyword, value
+ elif self.is_in(value, self._opt) or \
+ self.is_in(value, self._aut):
+ yield index, Name.Builtin, value
+ else:
+ yield index, Text, value
+ else:
+ yield index, token, value
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
index 62d300a5b5..42ae5f05f7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
@@ -1,301 +1,301 @@
-"""
- pygments.lexers.textfmts
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for various text formats.
-
+"""
+ pygments.lexers.textfmts
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various text formats.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
from pygments.lexers import guess_lexer, get_lexer_by_name
from pygments.lexer import RegexLexer, bygroups, default, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Generic, Literal, Punctuation
-from pygments.util import ClassNotFound
-
+from pygments.util import ClassNotFound
+
__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer',
'NotmuchLexer', 'KernelLogLexer']
-
-
-class IrcLogsLexer(RegexLexer):
- """
- Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
- """
-
- name = 'IRC logs'
- aliases = ['irc']
- filenames = ['*.weechatlog']
- mimetypes = ['text/x-irclog']
-
- flags = re.VERBOSE | re.MULTILINE
- timestamp = r"""
- (
- # irssi / xchat and others
- (?: \[|\()? # Opening bracket or paren for the timestamp
- (?: # Timestamp
- (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
- (?:\d{1,4})
- [T ])? # Date/time separator: T or space
- (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
- (?: \d?\d)
- )
- (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
- |
- # weechat
- \d{4}\s\w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- |
- # xchat
- \w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- )?
- """
- tokens = {
- 'root': [
- # log start/end
- (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
- # hack
- ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
- # normal msgs
- ("^" + timestamp + r"""
- (\s*<.*?>\s*) # Nick """,
- bygroups(Comment.Preproc, Name.Tag), 'msg'),
- # /me msgs
- ("^" + timestamp + r"""
- (\s*[*]\s+) # Star
- (\S+\s+.*?\n) # Nick + rest of message """,
- bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
- # join/part msgs
- ("^" + timestamp + r"""
- (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
- (\S+\s+) # Nick + Space
- (.*?\n) # Rest of message """,
- bygroups(Comment.Preproc, Keyword, String, Comment)),
- (r"^.*?\n", Text),
- ],
- 'msg': [
- (r"\S+:(?!//)", Name.Attribute), # Prefix
- (r".*\n", Text, '#pop'),
- ],
- }
-
-
-class GettextLexer(RegexLexer):
- """
- Lexer for Gettext catalog files.
-
- .. versionadded:: 0.9
- """
- name = 'Gettext Catalog'
- aliases = ['pot', 'po']
- filenames = ['*.pot', '*.po']
- mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
- tokens = {
- 'root': [
- (r'^#,\s.*?$', Keyword.Type),
- (r'^#:\s.*?$', Keyword.Declaration),
- # (r'^#$', Comment),
- (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
- (r'^(")([A-Za-z-]+:)(.*")$',
- bygroups(String, Name.Property, String)),
- (r'^".*"$', String),
- (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
- bygroups(Name.Variable, Text, String)),
- (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
- bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
- ]
- }
-
-
-class HttpLexer(RegexLexer):
- """
- Lexer for HTTP sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'HTTP'
- aliases = ['http']
-
- flags = re.DOTALL
-
+
+
+class IrcLogsLexer(RegexLexer):
+ """
+ Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
+ """
+
+ name = 'IRC logs'
+ aliases = ['irc']
+ filenames = ['*.weechatlog']
+ mimetypes = ['text/x-irclog']
+
+ flags = re.VERBOSE | re.MULTILINE
+ timestamp = r"""
+ (
+ # irssi / xchat and others
+ (?: \[|\()? # Opening bracket or paren for the timestamp
+ (?: # Timestamp
+ (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
+ (?:\d{1,4})
+ [T ])? # Date/time separator: T or space
+ (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
+ (?: \d?\d)
+ )
+ (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
+ |
+ # weechat
+ \d{4}\s\w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ |
+ # xchat
+ \w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ )?
+ """
+ tokens = {
+ 'root': [
+ # log start/end
+ (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
+ # hack
+ ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+ # normal msgs
+ ("^" + timestamp + r"""
+ (\s*<.*?>\s*) # Nick """,
+ bygroups(Comment.Preproc, Name.Tag), 'msg'),
+ # /me msgs
+ ("^" + timestamp + r"""
+ (\s*[*]\s+) # Star
+ (\S+\s+.*?\n) # Nick + rest of message """,
+ bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
+ # join/part msgs
+ ("^" + timestamp + r"""
+ (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
+ (\S+\s+) # Nick + Space
+ (.*?\n) # Rest of message """,
+ bygroups(Comment.Preproc, Keyword, String, Comment)),
+ (r"^.*?\n", Text),
+ ],
+ 'msg': [
+ (r"\S+:(?!//)", Name.Attribute), # Prefix
+ (r".*\n", Text, '#pop'),
+ ],
+ }
+
+
+class GettextLexer(RegexLexer):
+ """
+ Lexer for Gettext catalog files.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Gettext Catalog'
+ aliases = ['pot', 'po']
+ filenames = ['*.pot', '*.po']
+ mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
+
+ tokens = {
+ 'root': [
+ (r'^#,\s.*?$', Keyword.Type),
+ (r'^#:\s.*?$', Keyword.Declaration),
+ # (r'^#$', Comment),
+ (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
+ (r'^(")([A-Za-z-]+:)(.*")$',
+ bygroups(String, Name.Property, String)),
+ (r'^".*"$', String),
+ (r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
+ bygroups(Name.Variable, Text, String)),
+ (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
+ bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
+ ]
+ }
+
+
+class HttpLexer(RegexLexer):
+ """
+ Lexer for HTTP sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
def get_tokens_unprocessed(self, text, stack=('root',)):
"""Reset the content-type state."""
self.content_type = None
return RegexLexer.get_tokens_unprocessed(self, text, stack)
- def header_callback(self, match):
- if match.group(1).lower() == 'content-type':
- content_type = match.group(5).strip()
- if ';' in content_type:
- content_type = content_type[:content_type.find(';')].strip()
- self.content_type = content_type
- yield match.start(1), Name.Attribute, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator, match.group(3)
- yield match.start(4), Text, match.group(4)
- yield match.start(5), Literal, match.group(5)
- yield match.start(6), Text, match.group(6)
-
- def continuous_header_callback(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Literal, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- def content_callback(self, match):
- content_type = getattr(self, 'content_type', None)
- content = match.group()
- offset = match.start()
- if content_type:
- from pygments.lexers import get_lexer_for_mimetype
- possible_lexer_mimetypes = [content_type]
- if '+' in content_type:
- # application/calendar+xml can be treated as application/xml
- # if there's not a better match.
- general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
- content_type)
- possible_lexer_mimetypes.append(general_type)
-
- for i in possible_lexer_mimetypes:
- try:
- lexer = get_lexer_for_mimetype(i)
- except ClassNotFound:
- pass
- else:
- for idx, token, value in lexer.get_tokens_unprocessed(content):
- yield offset + idx, token, value
- return
- yield offset, Text, content
-
- tokens = {
- 'root': [
- (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ possible_lexer_mimetypes = [content_type]
+ if '+' in content_type:
+ # application/calendar+xml can be treated as application/xml
+ # if there's not a better match.
+ general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
+ content_type)
+ possible_lexer_mimetypes.append(general_type)
+
+ for i in possible_lexer_mimetypes:
+ try:
+ lexer = get_lexer_for_mimetype(i)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)(\r?\n|\Z)',
- bygroups(Name.Function, Text, Name.Namespace, Text,
- Keyword.Reserved, Operator, Number, Text),
- 'headers'),
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
(r'(HTTP)(/)(1\.[01]|2(?:\.0)?|3)( +)(\d{3})(?:( +)([^\r\n]*))?(\r?\n|\Z)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number, Text,
Name.Exception, Text),
- 'headers'),
- ],
- 'headers': [
- (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
- (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
- (r'\r?\n', Text, 'content')
- ],
- 'content': [
- (r'.+', content_callback)
- ]
- }
-
- def analyse_text(text):
- return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
- 'OPTIONS /', 'TRACE /', 'PATCH /'))
-
-
-class TodotxtLexer(RegexLexer):
- """
- Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
-
- .. versionadded:: 2.0
- """
-
- name = 'Todotxt'
- aliases = ['todotxt']
- # *.todotxt is not a standard extension for Todo.txt files; including it
- # makes testing easier, and also makes autodetecting file type easier.
- filenames = ['todo.txt', '*.todotxt']
- mimetypes = ['text/x-todo']
-
- # Aliases mapping standard token types of Todo.txt format concepts
- CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
- IncompleteTaskText = Text # Incomplete tasks should look like plain text
-
- # Priority should have most emphasis to indicate importance of tasks
- Priority = Generic.Heading
- # Dates should have next most emphasis because time is important
- Date = Generic.Subheading
-
- # Project and context should have equal weight, and be in different colors
- Project = Generic.Error
- Context = String
-
- # If tag functionality is added, it should have the same weight as Project
- # and Context, and a different color. Generic.Traceback would work well.
-
- # Regex patterns for building up rules; dates, priorities, projects, and
- # contexts are all atomic
- # TODO: Make date regex more ISO 8601 compliant
- date_regex = r'\d{4,}-\d{2}-\d{2}'
- priority_regex = r'\([A-Z]\)'
- project_regex = r'\+\S+'
- context_regex = r'@\S+'
-
- # Compound regex expressions
- complete_one_date_regex = r'(x )(' + date_regex + r')'
- complete_two_date_regex = (complete_one_date_regex + r'( )(' +
- date_regex + r')')
- priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
-
- tokens = {
- # Should parse starting at beginning of line; each line is a task
- 'root': [
- # Complete task entry points: two total:
- # 1. Complete task with two dates
- (complete_two_date_regex, bygroups(CompleteTaskText, Date,
- CompleteTaskText, Date),
- 'complete'),
- # 2. Complete task with one date
- (complete_one_date_regex, bygroups(CompleteTaskText, Date),
- 'complete'),
-
- # Incomplete task entry points: six total:
- # 1. Priority plus date
- (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
- 'incomplete'),
- # 2. Priority only
- (priority_regex, Priority, 'incomplete'),
- # 3. Leading date
- (date_regex, Date, 'incomplete'),
- # 4. Leading context
- (context_regex, Context, 'incomplete'),
- # 5. Leading project
- (project_regex, Project, 'incomplete'),
- # 6. Non-whitespace catch-all
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
+ 'OPTIONS /', 'TRACE /', 'PATCH /'))
+
+
+class TodotxtLexer(RegexLexer):
+ """
+ Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Todotxt'
+ aliases = ['todotxt']
+ # *.todotxt is not a standard extension for Todo.txt files; including it
+ # makes testing easier, and also makes autodetecting file type easier.
+ filenames = ['todo.txt', '*.todotxt']
+ mimetypes = ['text/x-todo']
+
+ # Aliases mapping standard token types of Todo.txt format concepts
+ CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
+ IncompleteTaskText = Text # Incomplete tasks should look like plain text
+
+ # Priority should have most emphasis to indicate importance of tasks
+ Priority = Generic.Heading
+ # Dates should have next most emphasis because time is important
+ Date = Generic.Subheading
+
+ # Project and context should have equal weight, and be in different colors
+ Project = Generic.Error
+ Context = String
+
+ # If tag functionality is added, it should have the same weight as Project
+ # and Context, and a different color. Generic.Traceback would work well.
+
+ # Regex patterns for building up rules; dates, priorities, projects, and
+ # contexts are all atomic
+ # TODO: Make date regex more ISO 8601 compliant
+ date_regex = r'\d{4,}-\d{2}-\d{2}'
+ priority_regex = r'\([A-Z]\)'
+ project_regex = r'\+\S+'
+ context_regex = r'@\S+'
+
+ # Compound regex expressions
+ complete_one_date_regex = r'(x )(' + date_regex + r')'
+ complete_two_date_regex = (complete_one_date_regex + r'( )(' +
+ date_regex + r')')
+ priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
+
+ tokens = {
+ # Should parse starting at beginning of line; each line is a task
+ 'root': [
+ # Complete task entry points: two total:
+ # 1. Complete task with two dates
+ (complete_two_date_regex, bygroups(CompleteTaskText, Date,
+ CompleteTaskText, Date),
+ 'complete'),
+ # 2. Complete task with one date
+ (complete_one_date_regex, bygroups(CompleteTaskText, Date),
+ 'complete'),
+
+ # Incomplete task entry points: six total:
+ # 1. Priority plus date
+ (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
+ 'incomplete'),
+ # 2. Priority only
+ (priority_regex, Priority, 'incomplete'),
+ # 3. Leading date
+ (date_regex, Date, 'incomplete'),
+ # 4. Leading context
+ (context_regex, Context, 'incomplete'),
+ # 5. Leading project
+ (project_regex, Project, 'incomplete'),
+ # 6. Non-whitespace catch-all
(r'\S+', IncompleteTaskText, 'incomplete'),
- ],
-
- # Parse a complete task
- 'complete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', CompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
+ ],
+
+ # Parse a complete task
+ 'complete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', CompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
(r'\S+', CompleteTaskText),
- # Tokenize whitespace not containing a newline
+ # Tokenize whitespace not containing a newline
(r'\s+', CompleteTaskText),
- ],
-
- # Parse an incomplete task
- 'incomplete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', IncompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
+ ],
+
+ # Parse an incomplete task
+ 'incomplete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', IncompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
(r'\S+', IncompleteTaskText),
- # Tokenize whitespace not containing a newline
+ # Tokenize whitespace not containing a newline
(r'\s+', IncompleteTaskText),
- ],
- }
+ ],
+ }
class NotmuchLexer(RegexLexer):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/theorem.py b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
index a7f4330a54..b0cda344a4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/theorem.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
@@ -1,398 +1,398 @@
-"""
- pygments.lexers.theorem
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for theorem-proving languages.
-
+"""
+ pygments.lexers.theorem
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for theorem-proving languages.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, default, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic
-
-__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
-
-
-class CoqLexer(RegexLexer):
- """
- For the `Coq <http://coq.inria.fr/>`_ theorem prover.
-
- .. versionadded:: 1.5
- """
-
- name = 'Coq'
- aliases = ['coq']
- filenames = ['*.v']
- mimetypes = ['text/x-coq']
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
+
+
+class CoqLexer(RegexLexer):
+ """
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Coq'
+ aliases = ['coq']
+ filenames = ['*.v']
+ mimetypes = ['text/x-coq']
+
flags = re.UNICODE
- keywords1 = (
- # Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ keywords1 = (
+ # Vernacular commands
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Axioms', 'Hypothesis',
- 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Example', 'Let',
'Ltac', 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
'Arguments', 'Types', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
- 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Variant', 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Fact',
'Remark', 'Corollary', 'Proposition', 'Property', 'Goal',
'Proof', 'Restart', 'Save', 'Qed', 'Defined', 'Abort', 'Admitted',
'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
- 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
- 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
- 'Universe', 'Polymorphic', 'Monomorphic', 'Context'
- )
- keywords2 = (
- # Gallina
- 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
- 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
- 'for', 'of', 'nosimpl', 'with', 'as',
- )
- keywords3 = (
- # Sorts
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
+ 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
+ 'Universe', 'Polymorphic', 'Monomorphic', 'Context'
+ )
+ keywords2 = (
+ # Gallina
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
+ 'for', 'of', 'nosimpl', 'with', 'as',
+ )
+ keywords3 = (
+ # Sorts
'Type', 'Prop', 'SProp',
- )
- keywords4 = (
- # Tactics
- 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
- 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
- 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
- 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
- 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
- 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
- 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
- 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
- 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
- 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
- 'native_compute', 'subst',
- )
- keywords5 = (
- # Terminators
+ )
+ keywords4 = (
+ # Tactics
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
+ 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
+ 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
+ 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
+ 'native_compute', 'subst',
+ )
+ keywords5 = (
+ # Terminators
'by', 'now', 'done', 'exact', 'reflexivity',
'tauto', 'romega', 'omega', 'lia', 'nia', 'lra', 'nra', 'psatz',
- 'assumption', 'solve', 'contradiction', 'discriminate',
- 'congruence',
- )
- keywords6 = (
- # Control
- 'do', 'last', 'first', 'try', 'idtac', 'repeat',
- )
- # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- # 'downto', 'else', 'end', 'exception', 'external', 'false',
- # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- # 'type', 'val', 'virtual', 'when', 'while', 'with'
- keyopts = (
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
- '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
- '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
- r'/\\', r'\\/', r'\{\|', r'\|\}',
+ 'assumption', 'solve', 'contradiction', 'discriminate',
+ 'congruence',
+ )
+ keywords6 = (
+ # Control
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
+ )
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
+ '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
+ '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
+ r'/\\', r'\\/', r'\{\|', r'\|\}',
# 'Π', 'Σ', # Not defined in the standard library
'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
- )
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\(\*', Comment, 'comment'),
- (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
- (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
- (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
- # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
+ )
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\(\*', Comment, 'comment'),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+ (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name),
(r'\S', Name.Builtin.Pseudo),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^"]+', String.Double),
- (r'""', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z][a-z0-9_\']*', Name, '#pop'),
- default('#pop')
- ],
- }
-
- def analyse_text(text):
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+ default('#pop')
+ ],
+ }
+
+ def analyse_text(text):
if 'Qed' in text and 'Proof' in text:
return 1
-
-
-class IsabelleLexer(RegexLexer):
- """
- For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Isabelle'
- aliases = ['isabelle']
- filenames = ['*.thy']
- mimetypes = ['text/x-isabelle']
-
- keyword_minor = (
- 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
- 'class_instance', 'class_relation', 'code_module', 'congs',
- 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
- 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
- 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
- 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
- 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
- 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
- 'type_constructor', 'unchecked', 'unsafe', 'where',
- )
-
- keyword_diag = (
- 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
- 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
- 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
- 'print_abbrevs', 'print_antiquotations', 'print_attributes',
- 'print_binds', 'print_bnfs', 'print_bundles',
- 'print_case_translations', 'print_cases', 'print_claset',
- 'print_classes', 'print_codeproc', 'print_codesetup',
- 'print_coercions', 'print_commands', 'print_context',
- 'print_defn_rules', 'print_dependencies', 'print_facts',
- 'print_induct_rules', 'print_inductives', 'print_interps',
- 'print_locale', 'print_locales', 'print_methods', 'print_options',
- 'print_orders', 'print_quot_maps', 'print_quotconsts',
- 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
- 'print_rules', 'print_simpset', 'print_state', 'print_statement',
- 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
- 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
- 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
- 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
- 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
- )
-
- keyword_thy = ('theory', 'begin', 'end')
-
- keyword_section = ('header', 'chapter')
-
- keyword_subsection = (
- 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
- 'subsubsect',
- )
-
- keyword_theory_decl = (
- 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
- 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
- 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
- 'code_abort', 'code_class', 'code_const', 'code_datatype',
- 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
- 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
- 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
- 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
- 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
- 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
- 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
- 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
- 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
- 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
- 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
- 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
- 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
- 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
- 'overloading', 'parse_ast_translation', 'parse_translation',
- 'partial_function', 'primcorec', 'primrec', 'primrec_new',
- 'print_ast_translation', 'print_translation', 'quickcheck_generator',
- 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
- 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
- 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
- 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
- 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
- 'text_raw', 'theorems', 'translations', 'type_notation',
- 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
- 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
- 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
- 'bnf_axiomatization', 'cartouche', 'datatype_compat',
- 'free_constructors', 'functor', 'nominal_function',
- 'nominal_termination', 'permanent_interpretation',
- 'binds', 'defining', 'smt2_status', 'term_cartouche',
- 'boogie_file', 'text_cartouche',
- )
-
- keyword_theory_script = ('inductive_cases', 'inductive_simps')
-
- keyword_theory_goal = (
- 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
- 'crunch', 'crunch_ignore',
- 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
- 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
- 'nominal_primrec', 'pcpodef', 'primcorecursive',
- 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
- 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
- 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
- 'theorem', 'typedef', 'wrap_free_constructors',
- )
-
- keyword_qed = ('by', 'done', 'qed')
- keyword_abandon_proof = ('sorry', 'oops')
-
- keyword_proof_goal = ('have', 'hence', 'interpret')
-
- keyword_proof_block = ('next', 'proof')
-
- keyword_proof_chain = (
- 'finally', 'from', 'then', 'ultimately', 'with',
- )
-
- keyword_proof_decl = (
- 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
- 'txt', 'txt_raw', 'unfolding', 'using', 'write',
- )
-
- keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
-
- keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
-
- keyword_proof_script = (
- 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
- )
-
- operators = (
- '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
- '+', '-', '!', '?',
- )
-
- proof_operators = ('{', '}', '.', '..')
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'\(\*', Comment, 'comment'),
- (r'\{\*', Comment, 'text'),
-
- (words(operators), Operator),
- (words(proof_operators), Operator.Word),
-
- (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
- (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
- (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
-
- (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
- (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
-
- (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
-
- (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
- (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
-
- (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
-
- (r'\\<\w*>', Text.Symbol),
-
- (r"[^\W\d][.\w']*", Name),
- (r"\?[^\W\d][.\w']*", Name),
- (r"'[^\W\d][.\w']*", Name.Type),
-
- (r'\d[\d_]*', Name), # display numbers as name
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
-
- (r'"', String, 'string'),
- (r'`', String.Other, 'fact'),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'text': [
- (r'[^*}]+', Comment),
- (r'\*\}', Comment, '#pop'),
- (r'\*', Comment),
- (r'\}', Comment),
- ],
- 'string': [
- (r'[^"\\]+', String),
- (r'\\<\w*>', String.Symbol),
- (r'\\"', String),
- (r'\\', String),
- (r'"', String, '#pop'),
- ],
- 'fact': [
- (r'[^`\\]+', String.Other),
- (r'\\<\w*>', String.Symbol),
- (r'\\`', String.Other),
- (r'\\', String.Other),
- (r'`', String.Other, '#pop'),
- ],
- }
-
-
-class LeanLexer(RegexLexer):
- """
- For the `Lean <https://github.com/leanprover/lean>`_
- theorem prover.
-
- .. versionadded:: 2.0
- """
- name = 'Lean'
- aliases = ['lean']
- filenames = ['*.lean']
- mimetypes = ['text/x-lean']
-
- flags = re.MULTILINE | re.UNICODE
-
+
+
+class IsabelleLexer(RegexLexer):
+ """
+ For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Isabelle'
+ aliases = ['isabelle']
+ filenames = ['*.thy']
+ mimetypes = ['text/x-isabelle']
+
+ keyword_minor = (
+ 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
+ 'class_instance', 'class_relation', 'code_module', 'congs',
+ 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
+ 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
+ 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
+ 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
+ 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
+ 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
+ 'type_constructor', 'unchecked', 'unsafe', 'where',
+ )
+
+ keyword_diag = (
+ 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
+ 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
+ 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
+ 'print_abbrevs', 'print_antiquotations', 'print_attributes',
+ 'print_binds', 'print_bnfs', 'print_bundles',
+ 'print_case_translations', 'print_cases', 'print_claset',
+ 'print_classes', 'print_codeproc', 'print_codesetup',
+ 'print_coercions', 'print_commands', 'print_context',
+ 'print_defn_rules', 'print_dependencies', 'print_facts',
+ 'print_induct_rules', 'print_inductives', 'print_interps',
+ 'print_locale', 'print_locales', 'print_methods', 'print_options',
+ 'print_orders', 'print_quot_maps', 'print_quotconsts',
+ 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
+ 'print_rules', 'print_simpset', 'print_state', 'print_statement',
+ 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
+ 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
+ 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
+ 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
+ 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
+ )
+
+ keyword_thy = ('theory', 'begin', 'end')
+
+ keyword_section = ('header', 'chapter')
+
+ keyword_subsection = (
+ 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
+ 'subsubsect',
+ )
+
+ keyword_theory_decl = (
+ 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
+ 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
+ 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
+ 'code_abort', 'code_class', 'code_const', 'code_datatype',
+ 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
+ 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
+ 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
+ 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
+ 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
+ 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
+ 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
+ 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
+ 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
+ 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
+ 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
+ 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
+ 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
+ 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
+ 'overloading', 'parse_ast_translation', 'parse_translation',
+ 'partial_function', 'primcorec', 'primrec', 'primrec_new',
+ 'print_ast_translation', 'print_translation', 'quickcheck_generator',
+ 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
+ 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
+ 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
+ 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
+ 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
+ 'text_raw', 'theorems', 'translations', 'type_notation',
+ 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
+ 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
+ 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
+ 'bnf_axiomatization', 'cartouche', 'datatype_compat',
+ 'free_constructors', 'functor', 'nominal_function',
+ 'nominal_termination', 'permanent_interpretation',
+ 'binds', 'defining', 'smt2_status', 'term_cartouche',
+ 'boogie_file', 'text_cartouche',
+ )
+
+ keyword_theory_script = ('inductive_cases', 'inductive_simps')
+
+ keyword_theory_goal = (
+ 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
+ 'crunch', 'crunch_ignore',
+ 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
+ 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
+ 'nominal_primrec', 'pcpodef', 'primcorecursive',
+ 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
+ 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
+ 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
+ 'theorem', 'typedef', 'wrap_free_constructors',
+ )
+
+ keyword_qed = ('by', 'done', 'qed')
+ keyword_abandon_proof = ('sorry', 'oops')
+
+ keyword_proof_goal = ('have', 'hence', 'interpret')
+
+ keyword_proof_block = ('next', 'proof')
+
+ keyword_proof_chain = (
+ 'finally', 'from', 'then', 'ultimately', 'with',
+ )
+
+ keyword_proof_decl = (
+ 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
+ 'txt', 'txt_raw', 'unfolding', 'using', 'write',
+ )
+
+ keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
+
+ keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
+
+ keyword_proof_script = (
+ 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
+ )
+
+ operators = (
+ '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
+ '+', '-', '!', '?',
+ )
+
+ proof_operators = ('{', '}', '.', '..')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'\(\*', Comment, 'comment'),
+ (r'\{\*', Comment, 'text'),
+
+ (words(operators), Operator),
+ (words(proof_operators), Operator.Word),
+
+ (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
+ (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
+
+ (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+
+ (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
+
+ (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (r'\\<\w*>', Text.Symbol),
+
+ (r"[^\W\d][.\w']*", Name),
+ (r"\?[^\W\d][.\w']*", Name),
+ (r"'[^\W\d][.\w']*", Name.Type),
+
+ (r'\d[\d_]*', Name), # display numbers as name
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+
+ (r'"', String, 'string'),
+ (r'`', String.Other, 'fact'),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'text': [
+ (r'[^*}]+', Comment),
+ (r'\*\}', Comment, '#pop'),
+ (r'\*', Comment),
+ (r'\}', Comment),
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\"', String),
+ (r'\\', String),
+ (r'"', String, '#pop'),
+ ],
+ 'fact': [
+ (r'[^`\\]+', String.Other),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\`', String.Other),
+ (r'\\', String.Other),
+ (r'`', String.Other, '#pop'),
+ ],
+ }
+
+
+class LeanLexer(RegexLexer):
+ """
+ For the `Lean <https://github.com/leanprover/lean>`_
+ theorem prover.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Lean'
+ aliases = ['lean']
+ filenames = ['*.lean']
+ mimetypes = ['text/x-lean']
+
+ flags = re.MULTILINE | re.UNICODE
+
tokens = {
'root': [
(r'\s+', Text),
@@ -416,16 +416,16 @@ class LeanLexer(RegexLexer):
'inductive', 'coinductive', 'structure', 'extends',
'class', 'instance',
'abbreviation',
-
+
'noncomputable theory',
-
+
'noncomputable', 'mutual', 'meta',
-
+
'attribute',
-
+
'parameter', 'parameters',
'variable', 'variables',
-
+
'reserve', 'precedence',
'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr',
@@ -454,26 +454,26 @@ class LeanLexer(RegexLexer):
r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name),
(r'0x[A-Za-z0-9]+', Number.Integer),
(r'0b[01]+', Number.Integer),
- (r'\d+', Number.Integer),
- (r'"', String.Double, 'string'),
+ (r'\d+', Number.Integer),
+ (r'"', String.Double, 'string'),
(r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char),
(r'[~?][a-z][\w\']*:', Name.Variable),
(r'\S', Name.Builtin.Pseudo),
- ],
- 'comment': [
- (r'[^/-]', Comment.Multiline),
- (r'/-', Comment.Multiline, '#push'),
- (r'-/', Comment.Multiline, '#pop'),
- (r'[/-]', Comment.Multiline)
- ],
+ ],
+ 'comment': [
+ (r'[^/-]', Comment.Multiline),
+ (r'/-', Comment.Multiline, '#push'),
+ (r'-/', Comment.Multiline, '#pop'),
+ (r'[/-]', Comment.Multiline)
+ ],
'docstring': [
(r'[^/-]', String.Doc),
(r'-/', String.Doc, '#pop'),
(r'[/-]', String.Doc)
],
- 'string': [
- (r'[^\\"]+', String.Double),
+ 'string': [
+ (r'[^\\"]+', String.Double),
(r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape),
- ('"', String.Double, '#pop'),
- ],
- }
+ ('"', String.Double, '#pop'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
index 67ecd243cb..89c9e90085 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
@@ -1,51 +1,51 @@
-"""
- pygments.lexers.trafficscript
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexer for RiverBed's TrafficScript (RTS) language.
-
+"""
+ pygments.lexers.trafficscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for RiverBed's TrafficScript (RTS) language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
-
-__all__ = ['RtsLexer']
-
-
-class RtsLexer(RegexLexer):
- """
- For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
-
- .. versionadded:: 2.1
- """
- name = 'TrafficScript'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
+
+__all__ = ['RtsLexer']
+
+
+class RtsLexer(RegexLexer):
+ """
+ For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
+
+ .. versionadded:: 2.1
+ """
+ name = 'TrafficScript'
aliases = ['trafficscript', 'rts']
- filenames = ['*.rts']
-
- tokens = {
- 'root' : [
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"', String, 'escapable-string'),
- (r'(0x[0-9a-fA-F]+|\d+)', Number),
- (r'\d+\.\d+', Number.Float),
- (r'\$[a-zA-Z](\w|_)*', Name.Variable),
- (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
- (r'[a-zA-Z][\w.]*', Name.Function),
- (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
- (r'(>=|<=|==|!=|'
- r'&&|\|\||'
- r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
- r'>>|<<|'
- r'\+\+|--|=>)', Operator),
- (r'[ \t\r]+', Text),
- (r'#[^\n]*', Comment),
- ],
- 'escapable-string' : [
- (r'\\[tsn]', String.Escape),
- (r'[^"]', String),
- (r'"', String, '#pop'),
- ],
-
- }
+ filenames = ['*.rts']
+
+ tokens = {
+ 'root' : [
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"', String, 'escapable-string'),
+ (r'(0x[0-9a-fA-F]+|\d+)', Number),
+ (r'\d+\.\d+', Number.Float),
+ (r'\$[a-zA-Z](\w|_)*', Name.Variable),
+ (r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
+ (r'[a-zA-Z][\w.]*', Name.Function),
+ (r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
+ (r'(>=|<=|==|!=|'
+ r'&&|\|\||'
+ r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
+ r'>>|<<|'
+ r'\+\+|--|=>)', Operator),
+ (r'[ \t\r]+', Text),
+ (r'#[^\n]*', Comment),
+ ],
+ 'escapable-string' : [
+ (r'\\[tsn]', String.Escape),
+ (r'[^"]', String),
+ (r'"', String, '#pop'),
+ ],
+
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/urbi.py b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
index d9c1c9f82c..2167b72658 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/urbi.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
@@ -1,135 +1,135 @@
-"""
- pygments.lexers.urbi
- ~~~~~~~~~~~~~~~~~~~~
-
- Lexers for UrbiScript language.
-
+"""
+ pygments.lexers.urbi
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for UrbiScript language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import ExtendedRegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
-
-__all__ = ['UrbiscriptLexer']
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
- """
- For UrbiScript source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'UrbiScript'
- aliases = ['urbiscript']
- filenames = ['*.u']
- mimetypes = ['application/x-urbiscript']
-
- flags = re.DOTALL
-
- # TODO
- # - handle Experimental and deprecated tags with specific tokens
- # - handle Angles and Durations with specific tokens
-
- def blob_callback(lexer, match, ctx):
- text_before_blob = match.group(1)
- blob_start = match.group(2)
- blob_size_str = match.group(3)
- blob_size = int(blob_size_str)
- yield match.start(), String, text_before_blob
- ctx.pos += len(text_before_blob)
-
- # if blob size doesn't match blob format (example : "\B(2)(aaa)")
- # yield blob as a string
- if ctx.text[match.end() + blob_size] != ")":
- result = "\\B(" + blob_size_str + ")("
- yield match.start(), String, result
- ctx.pos += len(result)
- return
-
- # if blob is well formated, yield as Escape
- blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
- yield match.start(), String.Escape, blob_text
- ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # comments
- (r'//.*?\n', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
- (words((
- 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
- 'continue', 'default', 'else', 'enum', 'every', 'external',
- 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
- 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
- 'waituntil', 'whenever', 'while'), suffix=r'\b'),
- Keyword),
- (words((
- 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
- 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
- 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
- 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
- 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
- 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
- Keyword.Reserved),
- # deprecated keywords, use a meaningfull token when available
- (r'(emit|foreach|internal|loopn|static)\b', Keyword),
- # ignored keywords, use a meaningfull token when available
- (r'(private|protected|public)\b', Keyword),
- (r'(var|do|const|function|class)\b', Keyword.Declaration),
- (r'(true|false|nil|void)\b', Keyword.Constant),
- (words((
- 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
- 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
- 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
- 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
- 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
- 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
- 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
- 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
- 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
- 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
- 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
- 'UValue', 'UVar'), suffix=r'\b'),
- Name.Builtin),
- (r'(?:this)\b', Name.Builtin.Pseudo),
- # don't match single | and &
- (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
- (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
- Operator.Word),
- (r'[{}\[\]()]+', Punctuation),
- (r'(?:;|\||,|&|\?|!)+', Punctuation),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # Float, Integer, Angle and Duration
- (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
- r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
- # handle binary blob in strings
- (r'"', String.Double, "string.double"),
- (r"'", String.Single, "string.single"),
- ],
- 'string.double': [
- (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['UrbiscriptLexer']
+
+
+class UrbiscriptLexer(ExtendedRegexLexer):
+ """
+ For UrbiScript source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'UrbiScript'
+ aliases = ['urbiscript']
+ filenames = ['*.u']
+ mimetypes = ['application/x-urbiscript']
+
+ flags = re.DOTALL
+
+ # TODO
+ # - handle Experimental and deprecated tags with specific tokens
+ # - handle Angles and Durations with specific tokens
+
+ def blob_callback(lexer, match, ctx):
+ text_before_blob = match.group(1)
+ blob_start = match.group(2)
+ blob_size_str = match.group(3)
+ blob_size = int(blob_size_str)
+ yield match.start(), String, text_before_blob
+ ctx.pos += len(text_before_blob)
+
+ # if blob size doesn't match blob format (example : "\B(2)(aaa)")
+ # yield blob as a string
+ if ctx.text[match.end() + blob_size] != ")":
+ result = "\\B(" + blob_size_str + ")("
+ yield match.start(), String, result
+ ctx.pos += len(result)
+ return
+
+ # if blob is well formated, yield as Escape
+ blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
+ yield match.start(), String.Escape, blob_text
+ ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # comments
+ (r'//.*?\n', Comment),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
+ (words((
+ 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
+ 'continue', 'default', 'else', 'enum', 'every', 'external',
+ 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
+ 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
+ 'waituntil', 'whenever', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
+ 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
+ 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
+ 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
+ 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
+ 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
+ Keyword.Reserved),
+ # deprecated keywords, use a meaningfull token when available
+ (r'(emit|foreach|internal|loopn|static)\b', Keyword),
+ # ignored keywords, use a meaningfull token when available
+ (r'(private|protected|public)\b', Keyword),
+ (r'(var|do|const|function|class)\b', Keyword.Declaration),
+ (r'(true|false|nil|void)\b', Keyword.Constant),
+ (words((
+ 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
+ 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
+ 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
+ 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
+ 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
+ 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
+ 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
+ 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
+ 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
+ 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
+ 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
+ 'UValue', 'UVar'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(?:this)\b', Name.Builtin.Pseudo),
+ # don't match single | and &
+ (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
+ (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
+ Operator.Word),
+ (r'[{}\[\]()]+', Punctuation),
+ (r'(?:;|\||,|&|\?|!)+', Punctuation),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # Float, Integer, Angle and Duration
+ (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
+ r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
+ # handle binary blob in strings
+ (r'"', String.Double, "string.double"),
+ (r"'", String.Single, "string.single"),
+ ],
+ 'string.double': [
+ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
(r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
- ],
- 'string.single': [
- (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
+ ],
+ 'string.single': [
+ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
(r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
- ],
- # from http://pygments.org/docs/lexerdevelopment/#changing-states
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ]
- }
+ ],
+ # from http://pygments.org/docs/lexerdevelopment/#changing-states
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ]
+ }
def analyse_text(text):
"""This is fairly similar to C and others, but freezeif and
diff --git a/contrib/python/Pygments/py3/pygments/lexers/web.py b/contrib/python/Pygments/py3/pygments/lexers/web.py
index a186048888..972800d12c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/web.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/web.py
@@ -1,23 +1,23 @@
-"""
- pygments.lexers.web
- ~~~~~~~~~~~~~~~~~~~
-
- Just export previously exported lexers.
-
+"""
+ pygments.lexers.web
+ ~~~~~~~~~~~~~~~~~~~
+
+ Just export previously exported lexers.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
- HamlLexer, ScamlLexer, JadeLexer
-from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
-from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
- DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
-from pygments.lexers.actionscript import ActionScriptLexer, \
- ActionScript3Lexer, MxmlLexer
-from pygments.lexers.php import PhpLexer
-from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
-from pygments.lexers.data import JsonLexer
-JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
-
-__all__ = []
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
+ HamlLexer, ScamlLexer, JadeLexer
+from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
+from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
+ DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
+from pygments.lexers.actionscript import ActionScriptLexer, \
+ ActionScript3Lexer, MxmlLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
+from pygments.lexers.data import JsonLexer
+JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
+
+__all__ = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
index b1fd455525..c831169419 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
@@ -1,991 +1,991 @@
-"""
- pygments.lexers.webmisc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Lexers for misc. web stuff.
-
+"""
+ pygments.lexers.webmisc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc. web stuff.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
- default, using
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Whitespace
-
-from pygments.lexers.css import _indentation, _starts_block
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers.javascript import JavascriptLexer
-from pygments.lexers.ruby import RubyLexer
-
-__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
-
-
-class DuelLexer(RegexLexer):
- """
- Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
- See http://duelengine.org/.
- See http://jsonml.org/jbst/.
-
- .. versionadded:: 1.4
- """
-
- name = 'Duel'
- aliases = ['duel', 'jbst', 'jsonml+bst']
- filenames = ['*.duel', '*.jbst']
- mimetypes = ['text/x-duel', 'text/x-jbst']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#!:]?)(.*?)(%>)',
- bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
- (r'(<%\$)(.*?)(:)(.*?)(%>)',
- bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
- (r'(<%--)(.*?)(--%>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)',
- bygroups(using(HtmlLexer),
- using(JavascriptLexer), using(HtmlLexer))),
- (r'(.+?)(?=<)', using(HtmlLexer)),
- (r'.+', using(HtmlLexer)),
- ],
- }
-
-
-class XQueryLexer(ExtendedRegexLexer):
- """
- An XQuery lexer, parsing a stream and outputting the tokens needed to
- highlight xquery code.
-
- .. versionadded:: 1.4
- """
- name = 'XQuery'
- aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
- filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
- mimetypes = ['text/xquery', 'application/xquery']
-
- xquery_parse_state = []
-
- # FIX UNICODE LATER
- # ncnamestartchar = (
+
+from pygments.lexers.css import _indentation, _starts_block
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
+
+
+class DuelLexer(RegexLexer):
+ """
+ Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+ See http://duelengine.org/.
+ See http://jsonml.org/jbst/.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Duel'
+ aliases = ['duel', 'jbst', 'jsonml+bst']
+ filenames = ['*.duel', '*.jbst']
+ mimetypes = ['text/x-duel', 'text/x-jbst']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#!:]?)(.*?)(%>)',
+ bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+ (r'(<%\$)(.*?)(:)(.*?)(%>)',
+ bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+ (r'(<%--)(.*?)(--%>)',
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)',
+ bygroups(using(HtmlLexer),
+ using(JavascriptLexer), using(HtmlLexer))),
+ (r'(.+?)(?=<)', using(HtmlLexer)),
+ (r'.+', using(HtmlLexer)),
+ ],
+ }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+ """
+ An XQuery lexer, parsing a stream and outputting the tokens needed to
+ highlight xquery code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'XQuery'
+ aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
+ filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
+ mimetypes = ['text/xquery', 'application/xquery']
+
+ xquery_parse_state = []
+
+ # FIX UNICODE LATER
+ # ncnamestartchar = (
# r"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
# r"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
# r"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
# r"[\u10000-\uEFFFF]"
- # )
- ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
- # FIX UNICODE LATER
+ # )
+ ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
+ # FIX UNICODE LATER
# ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
# r"[\u203F-\u2040]")
- ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
- pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
- pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
- unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
- entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
- charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
- stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
- stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
- # FIX UNICODE LATER
+ ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
+ ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
+ pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
+ pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
+ pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
+ prefixedname = "%s:%s" % (ncname, ncname)
+ unprefixedname = ncname
+ qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
+
+ entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
+ charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
+
+ stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
+ stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
+
+ # FIX UNICODE LATER
# elementcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
# r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
# quotattrcontentchar = (r'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
# r'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
+ quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%()*+,\-./:;=?@\[\\\]^_\'`|~]'
# aposattrcontentchar = (r'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
# r'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
-
- # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
- # aposattrcontentchar
- # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
- flags = re.DOTALL | re.MULTILINE | re.UNICODE
-
- def punctuation_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def operator_root_callback(lexer, match, ctx):
- yield match.start(), Operator, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def popstate_tag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
+ aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%()*+,\-./:;=?@\[\\\]^_`|~]'
+
+ # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+ # aposattrcontentchar
+ # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+ flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+ def punctuation_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def operator_root_callback(lexer, match, ctx):
+ yield match.start(), Operator, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def popstate_tag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
if lexer.xquery_parse_state:
ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- next_state = lexer.xquery_parse_state.pop()
- if next_state == 'occurrenceindicator':
- if re.match("[?*+]+", match.group(2)):
- yield match.start(), Punctuation, match.group(2)
- ctx.stack.append('operator')
- ctx.pos = match.end()
- else:
- ctx.stack.append('operator')
- ctx.pos = match.end(1)
- else:
- ctx.stack.append(next_state)
- ctx.pos = match.end(1)
-
- def popstate_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # if we have run out of our state stack, pop whatever is on the pygments
- # state stack
- if len(lexer.xquery_parse_state) == 0:
- ctx.stack.pop()
+ ctx.pos = match.end()
+
+ def popstate_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ next_state = lexer.xquery_parse_state.pop()
+ if next_state == 'occurrenceindicator':
+ if re.match("[?*+]+", match.group(2)):
+ yield match.start(), Punctuation, match.group(2)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+ else:
+ ctx.stack.append('operator')
+ ctx.pos = match.end(1)
+ else:
+ ctx.stack.append(next_state)
+ ctx.pos = match.end(1)
+
+ def popstate_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # if we have run out of our state stack, pop whatever is on the pygments
+ # state stack
+ if len(lexer.xquery_parse_state) == 0:
+ ctx.stack.pop()
if not ctx.stack:
# make sure we have at least the root state on invalid inputs
ctx.stack = ['root']
- elif len(ctx.stack) > 1:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- else:
- # i don't know if i'll need this, but in case, default back to root
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_element_content_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('element_content')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.pos = match.end()
-
- def pushstate_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_order_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_map_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate_withmode(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Keyword, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('kindtest')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtestforpi')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('occurrenceindicator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_root_construct_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- cur_state = ctx.stack.pop()
- lexer.xquery_parse_state.append(cur_state)
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_operator_attribute_callback(lexer, match, ctx):
- yield match.start(), Name.Attribute, match.group(1)
- ctx.stack.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- tokens = {
- 'comment': [
- # xquery comments
+ elif len(ctx.stack) > 1:
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ else:
+ # i don't know if i'll need this, but in case, default back to root
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_element_content_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('element_content')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.pos = match.end()
+
+ def pushstate_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_order_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_map_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Keyword, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('kindtest')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtestforpi')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('occurrenceindicator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_construct_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ cur_state = ctx.stack.pop()
+ lexer.xquery_parse_state.append(cur_state)
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_attribute_callback(lexer, match, ctx):
+ yield match.start(), Name.Attribute, match.group(1)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ tokens = {
+ 'comment': [
+ # xquery comments
(r'[^:()]+', Comment),
(r'\(:', Comment, '#push'),
(r':\)', Comment, '#pop'),
(r'[:()]', Comment),
- ],
- 'whitespace': [
+ ],
+ 'whitespace': [
(r'\s+', Whitespace),
- ],
- 'operator': [
- include('whitespace'),
- (r'(\})', popstate_callback),
- (r'\(:', Comment, 'comment'),
-
- (r'(\{)', pushstate_root_callback),
- (r'then|else|external|at|div|except', Keyword, 'root'),
- (r'order by', Keyword, 'root'),
- (r'group by', Keyword, 'root'),
- (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
- (r'and|or', Operator.Word, 'root'),
- (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
- Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|count|preserve\s+strip',
- Keyword, 'root'),
- (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
- operator_root_callback),
- (r'(::|:|;|\[|//|/|,)',
- punctuation_root_callback),
- (r'(castable|cast)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(instance)(\s+)(of)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(treat)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
+ ],
+ 'operator': [
+ include('whitespace'),
+ (r'(\})', popstate_callback),
+ (r'\(:', Comment, 'comment'),
+
+ (r'(\{)', pushstate_root_callback),
+ (r'then|else|external|at|div|except', Keyword, 'root'),
+ (r'order by', Keyword, 'root'),
+ (r'group by', Keyword, 'root'),
+ (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+ (r'and|or', Operator.Word, 'root'),
+ (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+ Operator.Word, 'root'),
+ (r'return|satisfies|to|union|where|count|preserve\s+strip',
+ Keyword, 'root'),
+ (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=|!)',
+ operator_root_callback),
+ (r'(::|:|;|\[|//|/|,)',
+ punctuation_root_callback),
+ (r'(castable|cast)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(instance)(\s+)(of)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(treat)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
(r'(case)(\s+)(' + stringdouble + ')',
bygroups(Keyword, Text, String.Double), 'itemtype'),
(r'(case)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'itemtype'),
- (r'(case|as)\b', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)',
- bygroups(Punctuation, Text, Keyword), 'itemtype'),
- (r'\$', Name.Variable, 'varname'),
- (r'(for|let|previous|next)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ (r'(case|as)\b', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)',
+ bygroups(Punctuation, Text, Keyword), 'itemtype'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(for|let|previous|next)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable),
'varname'),
- # (r'\)|\?|\]', Punctuation, '#push'),
- (r'\)|\?|\]', Punctuation),
- (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
- (r'ascending|descending|default', Keyword, '#push'),
- (r'(allowing)(\s+)(empty)', bygroups(Keyword, Text, Keyword)),
- (r'external', Keyword),
- (r'(start|when|end)', Keyword, 'root'),
- (r'(only)(\s+)(end)', bygroups(Keyword, Text, Keyword), 'root'),
- (r'collation', Keyword, 'uritooperator'),
-
- # eXist specific XQUF
- (r'(into|following|preceding|with)', Keyword, 'root'),
-
- # support for current context on rhs of Simple Map Operator
- (r'\.', Operator),
-
- # finally catch all string literals and stay in operator state
- (stringdouble, String.Double),
- (stringsingle, String.Single),
-
- (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
- ],
- 'uritooperator': [
- (stringdouble, String.Double, '#pop'),
- (stringsingle, String.Single, '#pop'),
- ],
- 'namespacedecl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
- (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r',', Punctuation),
- (r'=', Operator),
- (r';', Punctuation, 'root'),
- (ncname, Name.Namespace),
- ],
- 'namespacekeyword': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double, 'namespacedecl'),
- (stringsingle, String.Single, 'namespacedecl'),
- (r'inherit|no-inherit', Keyword, 'root'),
- (r'namespace', Keyword, 'namespacedecl'),
- (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
- (r'preserve|no-preserve', Keyword),
- (r',', Punctuation),
- ],
+ # (r'\)|\?|\]', Punctuation, '#push'),
+ (r'\)|\?|\]', Punctuation),
+ (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+ (r'ascending|descending|default', Keyword, '#push'),
+ (r'(allowing)(\s+)(empty)', bygroups(Keyword, Text, Keyword)),
+ (r'external', Keyword),
+ (r'(start|when|end)', Keyword, 'root'),
+ (r'(only)(\s+)(end)', bygroups(Keyword, Text, Keyword), 'root'),
+ (r'collation', Keyword, 'uritooperator'),
+
+ # eXist specific XQUF
+ (r'(into|following|preceding|with)', Keyword, 'root'),
+
+ # support for current context on rhs of Simple Map Operator
+ (r'\.', Operator),
+
+ # finally catch all string literals and stay in operator state
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+
+ (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+ ],
+ 'uritooperator': [
+ (stringdouble, String.Double, '#pop'),
+ (stringsingle, String.Single, '#pop'),
+ ],
+ 'namespacedecl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
+ (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r',', Punctuation),
+ (r'=', Operator),
+ (r';', Punctuation, 'root'),
+ (ncname, Name.Namespace),
+ ],
+ 'namespacekeyword': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double, 'namespacedecl'),
+ (stringsingle, String.Single, 'namespacedecl'),
+ (r'inherit|no-inherit', Keyword, 'root'),
+ (r'namespace', Keyword, 'namespacedecl'),
+ (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+ (r'preserve|no-preserve', Keyword),
+ (r',', Punctuation),
+ ],
'annotationname': [
- (r'\(:', Comment, 'comment'),
- (qname, Name.Decorator),
- (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
- (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
+ (r'\(:', Comment, 'comment'),
+ (qname, Name.Decorator),
+ (r'(\()(' + stringdouble + ')', bygroups(Punctuation, String.Double)),
+ (r'(\()(' + stringsingle + ')', bygroups(Punctuation, String.Single)),
(r'(\,)(\s+)(' + stringdouble + ')',
bygroups(Punctuation, Text, String.Double)),
(r'(\,)(\s+)(' + stringsingle + ')',
bygroups(Punctuation, Text, String.Single)),
- (r'\)', Punctuation),
- (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
+ (r'\)', Punctuation),
+ (r'(\s+)(\%)', bygroups(Text, Name.Decorator), 'annotationname'),
(r'(\s+)(variable)(\s+)(\$)',
bygroups(Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
(r'(\s+)(function)(\s+)',
bygroups(Text, Keyword.Declaration, Text), 'root')
- ],
- 'varname': [
- (r'\(:', Comment, 'comment'),
+ ],
+ 'varname': [
+ (r'\(:', Comment, 'comment'),
(r'(' + qname + r')(\()?', bygroups(Name, Punctuation), 'operator'),
- ],
- 'singletype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (ncname + r'(:\*)', Name.Variable, 'operator'),
- (qname, Name.Variable, 'operator'),
- ],
- 'itemtype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\$', Name.Variable, 'varname'),
- (r'(void)(\s*)(\()(\s*)(\))',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
- (r'(element|attribute|schema-element|schema-attribute|comment|text|'
- r'node|binary|document-node|empty-sequence)(\s*)(\()',
- pushstate_occurrenceindicator_kindtest_callback),
- # Marklogic specific type?
- (r'(processing-instruction)(\s*)(\()',
- bygroups(Keyword, Text, Punctuation),
- ('occurrenceindicator', 'kindtestforpi')),
- (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation),
- 'occurrenceindicator'),
- (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
- (r';', Punctuation, '#pop'),
- (r'then|else', Keyword, '#pop'),
- (r'(at)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'namespacedecl'),
- (r'(at)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where|count',
- Keyword, 'root'),
- (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
- (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
- (r'external|at', Keyword, 'root'),
- (r'(stable)(\s+)(order)(\s+)(by)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
- (r'(castable|cast)(\s+)(as)',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
- (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
+ ],
+ 'singletype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (ncname + r'(:\*)', Name.Variable, 'operator'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'itemtype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(void)(\s*)(\()(\s*)(\))',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+ (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+ r'node|binary|document-node|empty-sequence)(\s*)(\()',
+ pushstate_occurrenceindicator_kindtest_callback),
+ # Marklogic specific type?
+ (r'(processing-instruction)(\s*)(\()',
+ bygroups(Keyword, Text, Punctuation),
+ ('occurrenceindicator', 'kindtestforpi')),
+ (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+ 'occurrenceindicator'),
+ (r'(\(\#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
+ (r';', Punctuation, '#pop'),
+ (r'then|else', Keyword, '#pop'),
+ (r'(at)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+ (r'(at)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+ (r'except|intersect|in|is|return|satisfies|to|union|where|count',
+ Keyword, 'root'),
+ (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+ (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
+ (r'external|at', Keyword, 'root'),
+ (r'(stable)(\s+)(order)(\s+)(by)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+ (r'(castable|cast)(\s+)(as)',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
+ (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
(r'(case)(\s+)(' + stringdouble + ')',
bygroups(Keyword, Text, String.Double), 'itemtype'),
(r'(case)(\s+)(' + stringsingle + ')',
bygroups(Keyword, Text, String.Single), 'itemtype'),
- (r'case|as', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
- (ncname + r':\*', Keyword.Type, 'operator'),
- (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
- (qname, Keyword.Type, 'occurrenceindicator'),
- ],
- 'kindtest': [
- (r'\(:', Comment, 'comment'),
- (r'\{', Punctuation, 'root'),
- (r'(\))([*+?]?)', popstate_kindtest_callback),
- (r'\*', Name, 'closekindtest'),
- (qname, Name, 'closekindtest'),
- (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
- ],
- 'kindtestforpi': [
- (r'\(:', Comment, 'comment'),
- (r'\)', Punctuation, '#pop'),
- (ncname, Name.Variable),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- ],
- 'closekindtest': [
- (r'\(:', Comment, 'comment'),
- (r'(\))', popstate_callback),
- (r',', Punctuation),
- (r'(\{)', pushstate_operator_root_callback),
- (r'\?', Punctuation),
- ],
- 'xml_comment': [
- (r'(-->)', popstate_xmlcomment_callback),
- (r'[^-]{1,2}', Literal),
+ (r'case|as', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+ (ncname + r':\*', Keyword.Type, 'operator'),
+ (r'(function|map|array)(\()', bygroups(Keyword.Type, Punctuation)),
+ (qname, Keyword.Type, 'occurrenceindicator'),
+ ],
+ 'kindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'\{', Punctuation, 'root'),
+ (r'(\))([*+?]?)', popstate_kindtest_callback),
+ (r'\*', Name, 'closekindtest'),
+ (qname, Name, 'closekindtest'),
+ (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+ ],
+ 'kindtestforpi': [
+ (r'\(:', Comment, 'comment'),
+ (r'\)', Punctuation, '#pop'),
+ (ncname, Name.Variable),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ ],
+ 'closekindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'(\))', popstate_callback),
+ (r',', Punctuation),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'\?', Punctuation),
+ ],
+ 'xml_comment': [
+ (r'(-->)', popstate_xmlcomment_callback),
+ (r'[^-]{1,2}', Literal),
(r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
Literal),
- ],
- 'processing_instruction': [
- (r'\s+', Text, 'processing_instruction_content'),
- (r'\?>', String.Doc, '#pop'),
- (pitarget, Name),
- ],
- 'processing_instruction_content': [
- (r'\?>', String.Doc, '#pop'),
+ ],
+ 'processing_instruction': [
+ (r'\s+', Text, 'processing_instruction_content'),
+ (r'\?>', String.Doc, '#pop'),
+ (pitarget, Name),
+ ],
+ 'processing_instruction_content': [
+ (r'\?>', String.Doc, '#pop'),
(r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
Literal),
- ],
- 'cdata_section': [
- (r']]>', String.Doc, '#pop'),
+ ],
+ 'cdata_section': [
+ (r']]>', String.Doc, '#pop'),
(r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
Literal),
- ],
- 'start_tag': [
- include('whitespace'),
- (r'(/>)', popstate_tag_callback),
- (r'>', Name.Tag, 'element_content'),
- (r'"', Punctuation, 'quot_attribute_content'),
- (r"'", Punctuation, 'apos_attribute_content'),
- (r'=', Operator),
- (qname, Name.Tag),
- ],
- 'quot_attribute_content': [
- (r'"', Punctuation, 'start_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'""', Name.Attribute),
- (quotattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'apos_attribute_content': [
- (r"'", Punctuation, 'start_tag'),
- (r'\{', Punctuation, 'root'),
- (r"''", Name.Attribute),
- (aposattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'element_content': [
- (r'</', Name.Tag, 'end_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'(<!--)', pushstate_element_content_xmlcomment_callback),
- (r'(<\?)', pushstate_element_content_processing_instruction_callback),
- (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
- (r'(<)', pushstate_element_content_starttag_callback),
- (elementcontentchar, Literal),
- (entityref, Literal),
- (charref, Literal),
- (r'\{\{|\}\}', Literal),
- ],
- 'end_tag': [
- include('whitespace'),
- (r'(>)', popstate_tag_callback),
- (qname, Name.Tag),
- ],
- 'xmlspace_decl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'preserve|strip', Keyword, '#pop'),
- ],
- 'declareordering': [
- (r'\(:', Comment, 'comment'),
- include('whitespace'),
- (r'ordered|unordered', Keyword, '#pop'),
- ],
- 'xqueryversion': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r'encoding', Keyword),
- (r';', Punctuation, '#pop'),
- ],
- 'pragma': [
- (qname, Name.Variable, 'pragmacontents'),
- ],
- 'pragmacontents': [
- (r'#\)', Punctuation, 'operator'),
+ ],
+ 'start_tag': [
+ include('whitespace'),
+ (r'(/>)', popstate_tag_callback),
+ (r'>', Name.Tag, 'element_content'),
+ (r'"', Punctuation, 'quot_attribute_content'),
+ (r"'", Punctuation, 'apos_attribute_content'),
+ (r'=', Operator),
+ (qname, Name.Tag),
+ ],
+ 'quot_attribute_content': [
+ (r'"', Punctuation, 'start_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'""', Name.Attribute),
+ (quotattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'apos_attribute_content': [
+ (r"'", Punctuation, 'start_tag'),
+ (r'\{', Punctuation, 'root'),
+ (r"''", Name.Attribute),
+ (aposattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'element_content': [
+ (r'</', Name.Tag, 'end_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+ (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+ (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+ (r'(<)', pushstate_element_content_starttag_callback),
+ (elementcontentchar, Literal),
+ (entityref, Literal),
+ (charref, Literal),
+ (r'\{\{|\}\}', Literal),
+ ],
+ 'end_tag': [
+ include('whitespace'),
+ (r'(>)', popstate_tag_callback),
+ (qname, Name.Tag),
+ ],
+ 'xmlspace_decl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'preserve|strip', Keyword, '#pop'),
+ ],
+ 'declareordering': [
+ (r'\(:', Comment, 'comment'),
+ include('whitespace'),
+ (r'ordered|unordered', Keyword, '#pop'),
+ ],
+ 'xqueryversion': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r'encoding', Keyword),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'pragma': [
+ (qname, Name.Variable, 'pragmacontents'),
+ ],
+ 'pragmacontents': [
+ (r'#\)', Punctuation, 'operator'),
(r'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|[\U00010000-\U0010FFFF]',
Literal),
- (r'(\s+)', Text),
- ],
- 'occurrenceindicator': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\*|\?|\+', Operator, 'operator'),
- (r':=', Operator, 'root'),
- default('operator'),
- ],
- 'option': [
- include('whitespace'),
- (qname, Name.Variable, '#pop'),
- ],
- 'qname_braren': [
- include('whitespace'),
- (r'(\{)', pushstate_operator_root_callback),
- (r'(\()', Punctuation, 'root'),
- ],
- 'element_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'attribute_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'root': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
-
- # handle operator state
- # order on numbers matters - handle most complex first
- (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
- (r'(\d+)', Number.Integer, 'operator'),
- (r'(\.\.|\.|\))', Punctuation, 'operator'),
- (r'(declare)(\s+)(construction)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(default)(\s+)(order)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
- (r'(declare)(\s+)(context)(\s+)(item)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(\s+)', Text),
+ ],
+ 'occurrenceindicator': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\*|\?|\+', Operator, 'operator'),
+ (r':=', Operator, 'root'),
+ default('operator'),
+ ],
+ 'option': [
+ include('whitespace'),
+ (qname, Name.Variable, '#pop'),
+ ],
+ 'qname_braren': [
+ include('whitespace'),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'(\()', Punctuation, 'root'),
+ ],
+ 'element_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'attribute_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+
+ # handle operator state
+ # order on numbers matters - handle most complex first
+ (r'\d+(\.\d*)?[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+)[eE][+-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
+ (r'(\d+)', Number.Integer, 'operator'),
+ (r'(\.\.|\.|\))', Punctuation, 'operator'),
+ (r'(declare)(\s+)(construction)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(declare)(\s+)(default)(\s+)(order)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
+ (r'(declare)(\s+)(context)(\s+)(item)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration), 'operator'),
(ncname + r':\*', Name, 'operator'),
(r'\*:'+ncname, Name.Tag, 'operator'),
(r'\*', Name.Tag, 'operator'),
- (stringdouble, String.Double, 'operator'),
- (stringsingle, String.Single, 'operator'),
-
- (r'(\}|\])', popstate_callback),
-
- # NAMESPACE DECL
- (r'(declare)(\s+)(default)(\s+)(collation)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration)),
- (r'(module|declare)(\s+)(namespace)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
- (r'(declare)(\s+)(base-uri)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
-
- # NAMESPACE KEYWORD
- (r'(declare)(\s+)(default)(\s+)(element|function)',
+ (stringdouble, String.Double, 'operator'),
+ (stringsingle, String.Single, 'operator'),
+
+ (r'(\}|\])', popstate_callback),
+
+ # NAMESPACE DECL
+ (r'(declare)(\s+)(default)(\s+)(collation)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration)),
+ (r'(module|declare)(\s+)(namespace)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
+ (r'(declare)(\s+)(base-uri)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacedecl'),
+
+ # NAMESPACE KEYWORD
+ (r'(declare)(\s+)(default)(\s+)(element|function)',
bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Keyword.Declaration),
'namespacekeyword'),
- (r'(import)(\s+)(schema|module)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
- (r'(declare)(\s+)(copy-namespaces)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'),
-
- # VARNAMEs
- (r'(for|let|some|every)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
- (r'\$', Name.Variable, 'varname'),
- (r'(declare)(\s+)(variable)(\s+)(\$)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
-
- # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS
- (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Text, Name.Decorator), 'annotationname'),
-
- # ITEMTYPE
- (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-
- (r'(element|attribute|schema-element|schema-attribute|comment|'
- r'text|node|document-node|empty-sequence)(\s+)(\()',
- pushstate_operator_kindtest_callback),
-
- (r'(processing-instruction)(\s+)(\()',
- pushstate_operator_kindtestforpi_callback),
-
- (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
- (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
- (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
- # (r'</', Name.Tag, 'end_tag'),
- (r'(<)', pushstate_operator_starttag_callback),
-
- (r'(declare)(\s+)(boundary-space)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'),
-
- (r'(validate)(\s+)(lax|strict)',
- pushstate_operator_root_validate_withmode),
- (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(element|attribute|namespace)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
-
- (r'(document|text|processing-instruction|comment)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
- # ATTRIBUTE
- (r'(attribute)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'attribute_qname'),
- # ELEMENT
- (r'(element)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'element_qname'),
- # PROCESSING_INSTRUCTION
- (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
- bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
- 'operator'),
-
- (r'(declare|define)(\s+)(function)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration)),
-
- (r'(\{|\[)', pushstate_operator_root_callback),
-
- (r'(unordered|ordered)(\s*)(\{)',
- pushstate_operator_order_callback),
-
- (r'(map|array)(\s*)(\{)',
- pushstate_operator_map_callback),
-
- (r'(declare)(\s+)(ordering)',
- bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'),
-
- (r'(xquery)(\s+)(version)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
-
- (r'(\(#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
-
- # sometimes return can occur in root state
- (r'return', Keyword),
-
- (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration, Text, Keyword.Declaration),
- 'option'),
-
- # URI LITERALS - single and double quoted
- (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
- (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
- (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
- bygroups(Keyword, Punctuation)),
- (r'(descendant|following-sibling|following|parent|preceding-sibling'
- r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-
- (r'then|else', Keyword),
-
- # eXist specific XQUF
- (r'(update)(\s*)(insert|delete|replace|value|rename)', bygroups(Keyword, Text, Keyword)),
- (r'(into|following|preceding|with)', Keyword),
-
- # Marklogic specific
- (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
- (r'(catch)(\s*)(\()(\$)',
- bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
-
-
- (r'(@'+qname+')', Name.Attribute, 'operator'),
- (r'(@'+ncname+')', Name.Attribute, 'operator'),
- (r'@\*:'+ncname, Name.Attribute, 'operator'),
- (r'@\*', Name.Attribute, 'operator'),
- (r'(@)', Name.Attribute, 'operator'),
-
- (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
- # STANDALONE QNAMES
- (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
- (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
- (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)),
- (qname, Name.Tag, 'operator'),
- ]
- }
-
-
-class QmlLexer(RegexLexer):
- """
- For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
-
- .. versionadded:: 1.6
- """
-
- # QML is based on javascript, so much of this is taken from the
- # JavascriptLexer above.
-
- name = 'QML'
- aliases = ['qml', 'qbs']
- filenames = ['*.qml', '*.qbs']
- mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
-
- # pasted from JavascriptLexer, with some additions
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # QML insertions
- (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
- 'slashstartsregex'),
- (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
-
- # the rest from JavascriptLexer
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
+ (r'(import)(\s+)(schema|module)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+ (r'(declare)(\s+)(copy-namespaces)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'namespacekeyword'),
+
+ # VARNAMEs
+ (r'(for|let|some|every)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'(for)(\s+)(tumbling|sliding)(\s+)(window)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(declare)(\s+)(variable)(\s+)(\$)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration, Text, Name.Variable), 'varname'),
+
+ # ANNOTATED GLOBAL VARIABLES AND FUNCTIONS
+ (r'(declare)(\s+)(\%)', bygroups(Keyword.Declaration, Text, Name.Decorator), 'annotationname'),
+
+ # ITEMTYPE
+ (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+ (r'(element|attribute|schema-element|schema-attribute|comment|'
+ r'text|node|document-node|empty-sequence)(\s+)(\()',
+ pushstate_operator_kindtest_callback),
+
+ (r'(processing-instruction)(\s+)(\()',
+ pushstate_operator_kindtestforpi_callback),
+
+ (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+ (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+ (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+ # (r'</', Name.Tag, 'end_tag'),
+ (r'(<)', pushstate_operator_starttag_callback),
+
+ (r'(declare)(\s+)(boundary-space)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'xmlspace_decl'),
+
+ (r'(validate)(\s+)(lax|strict)',
+ pushstate_operator_root_validate_withmode),
+ (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+ (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(switch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(element|attribute|namespace)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+
+ (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+ # ATTRIBUTE
+ (r'(attribute)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'attribute_qname'),
+ # ELEMENT
+ (r'(element)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'element_qname'),
+ # PROCESSING_INSTRUCTION
+ (r'(processing-instruction|namespace)(\s+)(' + ncname + r')(\s*)(\{)',
+ bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
+ 'operator'),
+
+ (r'(declare|define)(\s+)(function)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration)),
+
+ (r'(\{|\[)', pushstate_operator_root_callback),
+
+ (r'(unordered|ordered)(\s*)(\{)',
+ pushstate_operator_order_callback),
+
+ (r'(map|array)(\s*)(\{)',
+ pushstate_operator_map_callback),
+
+ (r'(declare)(\s+)(ordering)',
+ bygroups(Keyword.Declaration, Text, Keyword.Declaration), 'declareordering'),
+
+ (r'(xquery)(\s+)(version)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+ (r'(\(#)(\s*)', bygroups(Punctuation, Text), 'pragma'),
+
+ # sometimes return can occur in root state
+ (r'return', Keyword),
+
+ (r'(declare)(\s+)(option)', bygroups(Keyword.Declaration, Text, Keyword.Declaration),
+ 'option'),
+
+ # URI LITERALS - single and double quoted
+ (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+ (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+ (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+ bygroups(Keyword, Punctuation)),
+ (r'(descendant|following-sibling|following|parent|preceding-sibling'
+ r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+ (r'then|else', Keyword),
+
+ # eXist specific XQUF
+ (r'(update)(\s*)(insert|delete|replace|value|rename)', bygroups(Keyword, Text, Keyword)),
+ (r'(into|following|preceding|with)', Keyword),
+
+ # Marklogic specific
+ (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+ (r'(catch)(\s*)(\()(\$)',
+ bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+
+ (r'(@'+qname+')', Name.Attribute, 'operator'),
+ (r'(@'+ncname+')', Name.Attribute, 'operator'),
+ (r'@\*:'+ncname, Name.Attribute, 'operator'),
+ (r'@\*', Name.Attribute, 'operator'),
+ (r'(@)', Name.Attribute, 'operator'),
+
+ (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+ # STANDALONE QNAMES
+ (qname + r'(?=\s*\{)', Name.Tag, 'qname_braren'),
+ (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
+ (r'(' + qname + ')(#)([0-9]+)', bygroups(Name.Function, Keyword.Type, Number.Integer)),
+ (qname, Name.Tag, 'operator'),
+ ]
+ }
+
+
+class QmlLexer(RegexLexer):
+ """
+ For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
+
+ .. versionadded:: 1.6
+ """
+
+ # QML is based on javascript, so much of this is taken from the
+ # JavascriptLexer above.
+
+ name = 'QML'
+ aliases = ['qml', 'qbs']
+ filenames = ['*.qml', '*.qbs']
+ mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
+
+ # pasted from JavascriptLexer, with some additions
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ # QML insertions
+ (r'\bid\s*:\s*[A-Za-z][\w.]*', Keyword.Declaration,
+ 'slashstartsregex'),
+ (r'\b[A-Za-z][\w.]*\s*:', Keyword, 'slashstartsregex'),
+
+ # the rest from JavascriptLexer
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
- ]
- }
-
-
-class CirruLexer(RegexLexer):
+ ]
+ }
+
+
+class CirruLexer(RegexLexer):
r"""
- Syntax rules of Cirru can be found at:
- http://cirru.org/
-
- * using ``()`` for expressions, but restricted in a same line
- * using ``""`` for strings, with ``\`` for escaping chars
- * using ``$`` as folding operator
- * using ``,`` as unfolding operator
- * using indentations for nested blocks
-
- .. versionadded:: 2.0
- """
-
- name = 'Cirru'
- aliases = ['cirru']
- filenames = ['*.cirru']
- mimetypes = ['text/x-cirru']
- flags = re.MULTILINE
-
- tokens = {
- 'string': [
+ Syntax rules of Cirru can be found at:
+ http://cirru.org/
+
+ * using ``()`` for expressions, but restricted in a same line
+ * using ``""`` for strings, with ``\`` for escaping chars
+ * using ``$`` as folding operator
+ * using ``,`` as unfolding operator
+ * using indentations for nested blocks
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Cirru'
+ aliases = ['cirru']
+ filenames = ['*.cirru']
+ mimetypes = ['text/x-cirru']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'string': [
(r'[^"\\\n]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'"', String, '#pop'),
- ],
- 'escape': [
- (r'.', String.Escape, '#pop'),
- ],
- 'function': [
- (r'\,', Operator, '#pop'),
- (r'[^\s"()]+', Name.Function, '#pop'),
- (r'\)', Operator, '#pop'),
- (r'(?=\n)', Text, '#pop'),
- (r'\(', Operator, '#push'),
- (r'"', String, ('#pop', 'string')),
- (r'[ ]+', Text.Whitespace),
- ],
- 'line': [
- (r'(?<!\w)\$(?!\w)', Operator, 'function'),
- (r'\(', Operator, 'function'),
- (r'\)', Operator),
- (r'\n', Text, '#pop'),
- (r'"', String, 'string'),
- (r'[ ]+', Text.Whitespace),
- (r'[+-]?[\d.]+\b', Number),
- (r'[^\s"()]+', Name.Variable)
- ],
- 'root': [
- (r'^\n+', Text.Whitespace),
- default(('line', 'function')),
- ]
- }
-
-
-class SlimLexer(ExtendedRegexLexer):
- """
- For Slim markup.
-
- .. versionadded:: 2.0
- """
-
- name = 'Slim'
- aliases = ['slim']
- filenames = ['*.slim']
- mimetypes = ['text/x-slim']
-
- flags = re.IGNORECASE
- _dot = r'(?: \|\n(?=.* \|)|.)'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'([ \t]*==?)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
- (r'(-)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- (r'[ \t]+\n', Punctuation, '#pop:2'),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(.*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'=', Punctuation),
- (r'"[^"]+"', using(RubyLexer), 'tag'),
- (r'\'[^\']+\'', using(RubyLexer), 'tag'),
- (r'\w+', Text, 'tag'),
- ],
-
- 'slim-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
- }
+ (r'\\', String.Escape, 'escape'),
+ (r'"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'function': [
+ (r'\,', Operator, '#pop'),
+ (r'[^\s"()]+', Name.Function, '#pop'),
+ (r'\)', Operator, '#pop'),
+ (r'(?=\n)', Text, '#pop'),
+ (r'\(', Operator, '#push'),
+ (r'"', String, ('#pop', 'string')),
+ (r'[ ]+', Text.Whitespace),
+ ],
+ 'line': [
+ (r'(?<!\w)\$(?!\w)', Operator, 'function'),
+ (r'\(', Operator, 'function'),
+ (r'\)', Operator),
+ (r'\n', Text, '#pop'),
+ (r'"', String, 'string'),
+ (r'[ ]+', Text.Whitespace),
+ (r'[+-]?[\d.]+\b', Number),
+ (r'[^\s"()]+', Name.Variable)
+ ],
+ 'root': [
+ (r'^\n+', Text.Whitespace),
+ default(('line', 'function')),
+ ]
+ }
+
+
+class SlimLexer(ExtendedRegexLexer):
+ """
+ For Slim markup.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Slim'
+ aliases = ['slim']
+ filenames = ['*.slim']
+ mimetypes = ['text/x-slim']
+
+ flags = re.IGNORECASE
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'([ \t]*==?)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ (r'[ \t]+[\w:-]+(?==)', Name.Attribute, 'html-attributes'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
+ (r'(-)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ (r'[ \t]+\n', Punctuation, '#pop:2'),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(.*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'=', Punctuation),
+ (r'"[^"]+"', using(RubyLexer), 'tag'),
+ (r'\'[^\']+\'', using(RubyLexer), 'tag'),
+ (r'\w+', Text, 'tag'),
+ ],
+
+ 'slim-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/x10.py b/contrib/python/Pygments/py3/pygments/lexers/x10.py
index a5aff36d49..a18e7eb839 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/x10.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/x10.py
@@ -1,65 +1,65 @@
-"""
- pygments.lexers.x10
- ~~~~~~~~~~~~~~~~~~~
-
- Lexers for the X10 programming language.
-
+"""
+ pygments.lexers.x10
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the X10 programming language.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
from pygments.token import Text, Comment, Keyword, String
-
-__all__ = ['X10Lexer']
-
-class X10Lexer(RegexLexer):
- """
- For the X10 language.
-
- .. versionadded:: 0.1
- """
-
- name = 'X10'
- aliases = ['x10', 'xten']
- filenames = ['*.x10']
- mimetypes = ['text/x-x10']
-
- keywords = (
- 'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
- 'break', 'case', 'catch', 'class', 'clocked', 'continue',
- 'def', 'default', 'do', 'else', 'final', 'finally', 'finish',
- 'for', 'goto', 'haszero', 'here', 'if', 'import', 'in',
- 'instanceof', 'interface', 'isref', 'new', 'offer',
- 'operator', 'package', 'return', 'struct', 'switch', 'throw',
- 'try', 'type', 'val', 'var', 'when', 'while'
- )
-
- types = (
- 'void'
- )
-
- values = (
- 'false', 'null', 'self', 'super', 'this', 'true'
- )
-
- modifiers = (
- 'abstract', 'extends', 'implements', 'native', 'offers',
- 'private', 'property', 'protected', 'public', 'static',
- 'throws', 'transient'
- )
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
- (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
+
+__all__ = ['X10Lexer']
+
+class X10Lexer(RegexLexer):
+ """
+ For the X10 language.
+
+ .. versionadded:: 0.1
+ """
+
+ name = 'X10'
+ aliases = ['x10', 'xten']
+ filenames = ['*.x10']
+ mimetypes = ['text/x-x10']
+
+ keywords = (
+ 'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
+ 'break', 'case', 'catch', 'class', 'clocked', 'continue',
+ 'def', 'default', 'do', 'else', 'final', 'finally', 'finish',
+ 'for', 'goto', 'haszero', 'here', 'if', 'import', 'in',
+ 'instanceof', 'interface', 'isref', 'new', 'offer',
+ 'operator', 'package', 'return', 'struct', 'switch', 'throw',
+ 'try', 'type', 'val', 'var', 'when', 'while'
+ )
+
+ types = (
+ 'void'
+ )
+
+ values = (
+ 'false', 'null', 'self', 'super', 'this', 'true'
+ )
+
+ modifiers = (
+ 'abstract', 'extends', 'implements', 'native', 'offers',
+ 'private', 'property', 'protected', 'public', 'static',
+ 'throws', 'transient'
+ )
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
+ (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
+ (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
- (r'.', Text)
- ],
- }
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
+ (r'.', Text)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/modeline.py b/contrib/python/Pygments/py3/pygments/modeline.py
index 047d86d6be..8b9d173b7d 100644
--- a/contrib/python/Pygments/py3/pygments/modeline.py
+++ b/contrib/python/Pygments/py3/pygments/modeline.py
@@ -1,43 +1,43 @@
-"""
- pygments.modeline
- ~~~~~~~~~~~~~~~~~
-
- A simple modeline parser (based on pymodeline).
-
+"""
+ pygments.modeline
+ ~~~~~~~~~~~~~~~~~
+
+ A simple modeline parser (based on pymodeline).
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-__all__ = ['get_filetype_from_buffer']
-
-
-modeline_re = re.compile(r'''
- (?: vi | vim | ex ) (?: [<=>]? \d* )? :
- .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
-''', re.VERBOSE)
-
-
-def get_filetype_from_line(l):
- m = modeline_re.search(l)
- if m:
- return m.group(1)
-
-
-def get_filetype_from_buffer(buf, max_lines=5):
- """
- Scan the buffer for modelines and return filetype if one is found.
- """
- lines = buf.splitlines()
- for l in lines[-1:-max_lines-1:-1]:
- ret = get_filetype_from_line(l)
- if ret:
- return ret
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+__all__ = ['get_filetype_from_buffer']
+
+
+modeline_re = re.compile(r'''
+ (?: vi | vim | ex ) (?: [<=>]? \d* )? :
+ .* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
+''', re.VERBOSE)
+
+
+def get_filetype_from_line(l):
+ m = modeline_re.search(l)
+ if m:
+ return m.group(1)
+
+
+def get_filetype_from_buffer(buf, max_lines=5):
+ """
+ Scan the buffer for modelines and return filetype if one is found.
+ """
+ lines = buf.splitlines()
+ for l in lines[-1:-max_lines-1:-1]:
+ ret = get_filetype_from_line(l)
+ if ret:
+ return ret
for i in range(max_lines, -1, -1):
if i < len(lines):
ret = get_filetype_from_line(lines[i])
if ret:
return ret
-
- return None
+
+ return None
diff --git a/contrib/python/Pygments/py3/pygments/plugin.py b/contrib/python/Pygments/py3/pygments/plugin.py
index b1085b7ae4..b80be5e6d1 100644
--- a/contrib/python/Pygments/py3/pygments/plugin.py
+++ b/contrib/python/Pygments/py3/pygments/plugin.py
@@ -1,45 +1,45 @@
-"""
- pygments.plugin
- ~~~~~~~~~~~~~~~
-
- Pygments setuptools plugin interface. The methods defined
- here also work if setuptools isn't installed but they just
- return nothing.
-
- lexer plugins::
-
- [pygments.lexers]
- yourlexer = yourmodule:YourLexer
-
- formatter plugins::
-
- [pygments.formatters]
- yourformatter = yourformatter:YourFormatter
- /.ext = yourformatter:YourFormatter
-
- As you can see, you can define extensions for the formatter
- with a leading slash.
-
- syntax plugins::
-
- [pygments.styles]
- yourstyle = yourstyle:YourStyle
-
- filter plugin::
-
- [pygments.filter]
- yourfilter = yourfilter:YourFilter
-
-
+"""
+ pygments.plugin
+ ~~~~~~~~~~~~~~~
+
+ Pygments setuptools plugin interface. The methods defined
+ here also work if setuptools isn't installed but they just
+ return nothing.
+
+ lexer plugins::
+
+ [pygments.lexers]
+ yourlexer = yourmodule:YourLexer
+
+ formatter plugins::
+
+ [pygments.formatters]
+ yourformatter = yourformatter:YourFormatter
+ /.ext = yourformatter:YourFormatter
+
+ As you can see, you can define extensions for the formatter
+ with a leading slash.
+
+ syntax plugins::
+
+ [pygments.styles]
+ yourstyle = yourstyle:YourStyle
+
+ filter plugin::
+
+ [pygments.filter]
+ yourfilter = yourfilter:YourFilter
+
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-LEXER_ENTRY_POINT = 'pygments.lexers'
-FORMATTER_ENTRY_POINT = 'pygments.formatters'
-STYLE_ENTRY_POINT = 'pygments.styles'
-FILTER_ENTRY_POINT = 'pygments.filters'
-
-
+ :license: BSD, see LICENSE for details.
+"""
+LEXER_ENTRY_POINT = 'pygments.lexers'
+FORMATTER_ENTRY_POINT = 'pygments.formatters'
+STYLE_ENTRY_POINT = 'pygments.styles'
+FILTER_ENTRY_POINT = 'pygments.filters'
+
+
def iter_entry_points(group_name):
try:
import pkg_resources
@@ -49,21 +49,21 @@ def iter_entry_points(group_name):
return pkg_resources.iter_entry_points(group_name)
-def find_plugin_lexers():
+def find_plugin_lexers():
for entrypoint in iter_entry_points(LEXER_ENTRY_POINT):
- yield entrypoint.load()
-
-
-def find_plugin_formatters():
+ yield entrypoint.load()
+
+
+def find_plugin_formatters():
for entrypoint in iter_entry_points(FORMATTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_styles():
+ yield entrypoint.name, entrypoint.load()
+
+
+def find_plugin_styles():
for entrypoint in iter_entry_points(STYLE_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
-
-
-def find_plugin_filters():
+ yield entrypoint.name, entrypoint.load()
+
+
+def find_plugin_filters():
for entrypoint in iter_entry_points(FILTER_ENTRY_POINT):
- yield entrypoint.name, entrypoint.load()
+ yield entrypoint.name, entrypoint.load()
diff --git a/contrib/python/Pygments/py3/pygments/regexopt.py b/contrib/python/Pygments/py3/pygments/regexopt.py
index cb2c8e21a9..88cea0cc9f 100644
--- a/contrib/python/Pygments/py3/pygments/regexopt.py
+++ b/contrib/python/Pygments/py3/pygments/regexopt.py
@@ -1,91 +1,91 @@
-"""
- pygments.regexopt
- ~~~~~~~~~~~~~~~~~
-
- An algorithm that generates optimized regexes for matching long lists of
- literal strings.
-
+"""
+ pygments.regexopt
+ ~~~~~~~~~~~~~~~~~
+
+ An algorithm that generates optimized regexes for matching long lists of
+ literal strings.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from re import escape
-from os.path import commonprefix
-from itertools import groupby
-from operator import itemgetter
-
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from re import escape
+from os.path import commonprefix
+from itertools import groupby
+from operator import itemgetter
+
CS_ESCAPE = re.compile(r'[\[\^\\\-\]]')
-FIRST_ELEMENT = itemgetter(0)
-
-
-def make_charset(letters):
- return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
-
-
-def regex_opt_inner(strings, open_paren):
- """Return a regex that matches any string in the sorted list of strings."""
- close_paren = open_paren and ')' or ''
- # print strings, repr(open_paren)
- if not strings:
- # print '-> nothing left'
- return ''
- first = strings[0]
- if len(strings) == 1:
- # print '-> only 1 string'
- return open_paren + escape(first) + close_paren
- if not first:
- # print '-> first string empty'
- return open_paren + regex_opt_inner(strings[1:], '(?:') \
- + '?' + close_paren
- if len(first) == 1:
- # multiple one-char strings? make a charset
- oneletter = []
- rest = []
- for s in strings:
- if len(s) == 1:
- oneletter.append(s)
- else:
- rest.append(s)
- if len(oneletter) > 1: # do we have more than one oneletter string?
- if rest:
- # print '-> 1-character + rest'
- return open_paren + regex_opt_inner(rest, '') + '|' \
- + make_charset(oneletter) + close_paren
- # print '-> only 1-character'
+FIRST_ELEMENT = itemgetter(0)
+
+
+def make_charset(letters):
+ return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
+
+
+def regex_opt_inner(strings, open_paren):
+ """Return a regex that matches any string in the sorted list of strings."""
+ close_paren = open_paren and ')' or ''
+ # print strings, repr(open_paren)
+ if not strings:
+ # print '-> nothing left'
+ return ''
+ first = strings[0]
+ if len(strings) == 1:
+ # print '-> only 1 string'
+ return open_paren + escape(first) + close_paren
+ if not first:
+ # print '-> first string empty'
+ return open_paren + regex_opt_inner(strings[1:], '(?:') \
+ + '?' + close_paren
+ if len(first) == 1:
+ # multiple one-char strings? make a charset
+ oneletter = []
+ rest = []
+ for s in strings:
+ if len(s) == 1:
+ oneletter.append(s)
+ else:
+ rest.append(s)
+ if len(oneletter) > 1: # do we have more than one oneletter string?
+ if rest:
+ # print '-> 1-character + rest'
+ return open_paren + regex_opt_inner(rest, '') + '|' \
+ + make_charset(oneletter) + close_paren
+ # print '-> only 1-character'
return open_paren + make_charset(oneletter) + close_paren
- prefix = commonprefix(strings)
- if prefix:
- plen = len(prefix)
- # we have a prefix for all strings
- # print '-> prefix:', prefix
- return open_paren + escape(prefix) \
- + regex_opt_inner([s[plen:] for s in strings], '(?:') \
- + close_paren
- # is there a suffix?
- strings_rev = [s[::-1] for s in strings]
- suffix = commonprefix(strings_rev)
- if suffix:
- slen = len(suffix)
- # print '-> suffix:', suffix[::-1]
- return open_paren \
- + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
- + escape(suffix[::-1]) + close_paren
- # recurse on common 1-string prefixes
- # print '-> last resort'
- return open_paren + \
- '|'.join(regex_opt_inner(list(group[1]), '')
- for group in groupby(strings, lambda s: s[0] == first[0])) \
- + close_paren
-
-
-def regex_opt(strings, prefix='', suffix=''):
- """Return a compiled regex that matches any string in the given list.
-
- The strings to match must be literal strings, not regexes. They will be
- regex-escaped.
-
- *prefix* and *suffix* are pre- and appended to the final regex.
- """
- strings = sorted(strings)
- return prefix + regex_opt_inner(strings, '(') + suffix
+ prefix = commonprefix(strings)
+ if prefix:
+ plen = len(prefix)
+ # we have a prefix for all strings
+ # print '-> prefix:', prefix
+ return open_paren + escape(prefix) \
+ + regex_opt_inner([s[plen:] for s in strings], '(?:') \
+ + close_paren
+ # is there a suffix?
+ strings_rev = [s[::-1] for s in strings]
+ suffix = commonprefix(strings_rev)
+ if suffix:
+ slen = len(suffix)
+ # print '-> suffix:', suffix[::-1]
+ return open_paren \
+ + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
+ + escape(suffix[::-1]) + close_paren
+ # recurse on common 1-string prefixes
+ # print '-> last resort'
+ return open_paren + \
+ '|'.join(regex_opt_inner(list(group[1]), '')
+ for group in groupby(strings, lambda s: s[0] == first[0])) \
+ + close_paren
+
+
+def regex_opt(strings, prefix='', suffix=''):
+ """Return a compiled regex that matches any string in the given list.
+
+ The strings to match must be literal strings, not regexes. They will be
+ regex-escaped.
+
+ *prefix* and *suffix* are pre- and appended to the final regex.
+ """
+ strings = sorted(strings)
+ return prefix + regex_opt_inner(strings, '(') + suffix
diff --git a/contrib/python/Pygments/py3/pygments/scanner.py b/contrib/python/Pygments/py3/pygments/scanner.py
index 5f32a22c3c..a11b24bdcb 100644
--- a/contrib/python/Pygments/py3/pygments/scanner.py
+++ b/contrib/python/Pygments/py3/pygments/scanner.py
@@ -1,104 +1,104 @@
-"""
- pygments.scanner
- ~~~~~~~~~~~~~~~~
-
- This library implements a regex based scanner. Some languages
- like Pascal are easy to parse but have some keywords that
- depend on the context. Because of this it's impossible to lex
- that just by using a regular expression lexer like the
- `RegexLexer`.
-
- Have a look at the `DelphiLexer` to get an idea of how to use
- this scanner.
-
+"""
+ pygments.scanner
+ ~~~~~~~~~~~~~~~~
+
+ This library implements a regex based scanner. Some languages
+ like Pascal are easy to parse but have some keywords that
+ depend on the context. Because of this it's impossible to lex
+ that just by using a regular expression lexer like the
+ `RegexLexer`.
+
+ Have a look at the `DelphiLexer` to get an idea of how to use
+ this scanner.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import re
-
-
-class EndOfText(RuntimeError):
- """
- Raise if end of text is reached and the user
- tried to call a match function.
- """
-
-
+ :license: BSD, see LICENSE for details.
+"""
+import re
+
+
+class EndOfText(RuntimeError):
+ """
+ Raise if end of text is reached and the user
+ tried to call a match function.
+ """
+
+
class Scanner:
- """
- Simple scanner
-
- All method patterns are regular expression strings (not
- compiled expressions!)
- """
-
- def __init__(self, text, flags=0):
- """
- :param text: The text which should be scanned
- :param flags: default regular expression flags
- """
- self.data = text
- self.data_length = len(text)
- self.start_pos = 0
- self.pos = 0
- self.flags = flags
- self.last = None
- self.match = None
- self._re_cache = {}
-
- def eos(self):
- """`True` if the scanner reached the end of text."""
- return self.pos >= self.data_length
- eos = property(eos, eos.__doc__)
-
- def check(self, pattern):
- """
- Apply `pattern` on the current position and return
- the match object. (Doesn't touch pos). Use this for
- lookahead.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- return self._re_cache[pattern].match(self.data, self.pos)
-
- def test(self, pattern):
- """Apply a pattern on the current position and check
+ """
+ Simple scanner
+
+ All method patterns are regular expression strings (not
+ compiled expressions!)
+ """
+
+ def __init__(self, text, flags=0):
+ """
+ :param text: The text which should be scanned
+ :param flags: default regular expression flags
+ """
+ self.data = text
+ self.data_length = len(text)
+ self.start_pos = 0
+ self.pos = 0
+ self.flags = flags
+ self.last = None
+ self.match = None
+ self._re_cache = {}
+
+ def eos(self):
+ """`True` if the scanner reached the end of text."""
+ return self.pos >= self.data_length
+ eos = property(eos, eos.__doc__)
+
+ def check(self, pattern):
+ """
+ Apply `pattern` on the current position and return
+ the match object. (Doesn't touch pos). Use this for
+ lookahead.
+ """
+ if self.eos:
+ raise EndOfText()
+ if pattern not in self._re_cache:
+ self._re_cache[pattern] = re.compile(pattern, self.flags)
+ return self._re_cache[pattern].match(self.data, self.pos)
+
+ def test(self, pattern):
+ """Apply a pattern on the current position and check
if it patches. Doesn't touch pos.
"""
- return self.check(pattern) is not None
-
- def scan(self, pattern):
- """
- Scan the text for the given pattern and update pos/match
- and related fields. The return value is a boolen that
- indicates if the pattern matched. The matched value is
- stored on the instance as ``match``, the last value is
- stored as ``last``. ``start_pos`` is the position of the
- pointer before the pattern was matched, ``pos`` is the
- end position.
- """
- if self.eos:
- raise EndOfText()
- if pattern not in self._re_cache:
- self._re_cache[pattern] = re.compile(pattern, self.flags)
- self.last = self.match
- m = self._re_cache[pattern].match(self.data, self.pos)
- if m is None:
- return False
- self.start_pos = m.start()
- self.pos = m.end()
- self.match = m.group()
- return True
-
- def get_char(self):
- """Scan exactly one char."""
- self.scan('.')
-
- def __repr__(self):
- return '<%s %d/%d>' % (
- self.__class__.__name__,
- self.pos,
- self.data_length
- )
+ return self.check(pattern) is not None
+
+ def scan(self, pattern):
+ """
+ Scan the text for the given pattern and update pos/match
+ and related fields. The return value is a boolen that
+ indicates if the pattern matched. The matched value is
+ stored on the instance as ``match``, the last value is
+ stored as ``last``. ``start_pos`` is the position of the
+ pointer before the pattern was matched, ``pos`` is the
+ end position.
+ """
+ if self.eos:
+ raise EndOfText()
+ if pattern not in self._re_cache:
+ self._re_cache[pattern] = re.compile(pattern, self.flags)
+ self.last = self.match
+ m = self._re_cache[pattern].match(self.data, self.pos)
+ if m is None:
+ return False
+ self.start_pos = m.start()
+ self.pos = m.end()
+ self.match = m.group()
+ return True
+
+ def get_char(self):
+ """Scan exactly one char."""
+ self.scan('.')
+
+ def __repr__(self):
+ return '<%s %d/%d>' % (
+ self.__class__.__name__,
+ self.pos,
+ self.data_length
+ )
diff --git a/contrib/python/Pygments/py3/pygments/sphinxext.py b/contrib/python/Pygments/py3/pygments/sphinxext.py
index 644eb8c338..614fcef173 100644
--- a/contrib/python/Pygments/py3/pygments/sphinxext.py
+++ b/contrib/python/Pygments/py3/pygments/sphinxext.py
@@ -1,155 +1,155 @@
-"""
- pygments.sphinxext
- ~~~~~~~~~~~~~~~~~~
-
- Sphinx extension to generate automatic documentation of lexers,
- formatters and filters.
-
+"""
+ pygments.sphinxext
+ ~~~~~~~~~~~~~~~~~~
+
+ Sphinx extension to generate automatic documentation of lexers,
+ formatters and filters.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import sys
-
-from docutils import nodes
-from docutils.statemachine import ViewList
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+
+from docutils import nodes
+from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
-from sphinx.util.nodes import nested_parse_with_titles
-
-
-MODULEDOC = '''
-.. module:: %s
-
-%s
-%s
-'''
-
-LEXERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
- :MIME types: %s
-
- %s
-
-'''
-
-FMTERDOC = '''
-.. class:: %s
-
- :Short names: %s
- :Filenames: %s
-
- %s
-
-'''
-
-FILTERDOC = '''
-.. class:: %s
-
- :Name: %s
-
- %s
-
-'''
-
-
-class PygmentsDoc(Directive):
- """
- A directive to collect all lexers/formatters/filters and generate
- autoclass directives for them.
- """
- has_content = False
- required_arguments = 1
- optional_arguments = 0
- final_argument_whitespace = False
- option_spec = {}
-
- def run(self):
- self.filenames = set()
- if self.arguments[0] == 'lexers':
- out = self.document_lexers()
- elif self.arguments[0] == 'formatters':
- out = self.document_formatters()
- elif self.arguments[0] == 'filters':
- out = self.document_filters()
- else:
- raise Exception('invalid argument for "pygmentsdoc" directive')
- node = nodes.compound()
- vl = ViewList(out.split('\n'), source='')
- nested_parse_with_titles(self.state, vl, node)
- for fn in self.filenames:
- self.state.document.settings.record_dependencies.add(fn)
- return node.children
-
- def document_lexers(self):
- from pygments.lexers._mapping import LEXERS
- out = []
- modules = {}
- moduledocstrings = {}
- for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- if not cls.__doc__:
- print("Warning: %s does not have a docstring." % classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- modules.setdefault(module, []).append((
- classname,
- ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
- ', '.join(data[4]) or 'None',
- docstring))
- if module not in moduledocstrings:
- moddoc = mod.__doc__
- if isinstance(moddoc, bytes):
- moddoc = moddoc.decode('utf8')
- moduledocstrings[module] = moddoc
-
- for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
- if moduledocstrings[module] is None:
- raise Exception("Missing docstring for %s" % (module,))
- heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
- out.append(MODULEDOC % (module, heading, '-'*len(heading)))
- for data in lexers:
- out.append(LEXERDOC % data)
-
- return ''.join(out)
-
- def document_formatters(self):
- from pygments.formatters import FORMATTERS
-
- out = []
- for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
- module = data[0]
- mod = __import__(module, None, None, [classname])
- self.filenames.add(mod.__file__)
- cls = getattr(mod, classname)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- heading = cls.__name__
- out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
- ', '.join(data[3]).replace('*', '\\*') or 'None',
- docstring))
- return ''.join(out)
-
- def document_filters(self):
- from pygments.filters import FILTERS
-
- out = []
- for name, cls in FILTERS.items():
- self.filenames.add(sys.modules[cls.__module__].__file__)
- docstring = cls.__doc__
- if isinstance(docstring, bytes):
- docstring = docstring.decode('utf8')
- out.append(FILTERDOC % (cls.__name__, name, docstring))
- return ''.join(out)
-
-
-def setup(app):
- app.add_directive('pygmentsdoc', PygmentsDoc)
+from sphinx.util.nodes import nested_parse_with_titles
+
+
+MODULEDOC = '''
+.. module:: %s
+
+%s
+%s
+'''
+
+LEXERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+ :MIME types: %s
+
+ %s
+
+'''
+
+FMTERDOC = '''
+.. class:: %s
+
+ :Short names: %s
+ :Filenames: %s
+
+ %s
+
+'''
+
+FILTERDOC = '''
+.. class:: %s
+
+ :Name: %s
+
+ %s
+
+'''
+
+
+class PygmentsDoc(Directive):
+ """
+ A directive to collect all lexers/formatters/filters and generate
+ autoclass directives for them.
+ """
+ has_content = False
+ required_arguments = 1
+ optional_arguments = 0
+ final_argument_whitespace = False
+ option_spec = {}
+
+ def run(self):
+ self.filenames = set()
+ if self.arguments[0] == 'lexers':
+ out = self.document_lexers()
+ elif self.arguments[0] == 'formatters':
+ out = self.document_formatters()
+ elif self.arguments[0] == 'filters':
+ out = self.document_filters()
+ else:
+ raise Exception('invalid argument for "pygmentsdoc" directive')
+ node = nodes.compound()
+ vl = ViewList(out.split('\n'), source='')
+ nested_parse_with_titles(self.state, vl, node)
+ for fn in self.filenames:
+ self.state.document.settings.record_dependencies.add(fn)
+ return node.children
+
+ def document_lexers(self):
+ from pygments.lexers._mapping import LEXERS
+ out = []
+ modules = {}
+ moduledocstrings = {}
+ for classname, data in sorted(LEXERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ if not cls.__doc__:
+ print("Warning: %s does not have a docstring." % classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ modules.setdefault(module, []).append((
+ classname,
+ ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
+ ', '.join(data[4]) or 'None',
+ docstring))
+ if module not in moduledocstrings:
+ moddoc = mod.__doc__
+ if isinstance(moddoc, bytes):
+ moddoc = moddoc.decode('utf8')
+ moduledocstrings[module] = moddoc
+
+ for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
+ if moduledocstrings[module] is None:
+ raise Exception("Missing docstring for %s" % (module,))
+ heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
+ out.append(MODULEDOC % (module, heading, '-'*len(heading)))
+ for data in lexers:
+ out.append(LEXERDOC % data)
+
+ return ''.join(out)
+
+ def document_formatters(self):
+ from pygments.formatters import FORMATTERS
+
+ out = []
+ for classname, data in sorted(FORMATTERS.items(), key=lambda x: x[0]):
+ module = data[0]
+ mod = __import__(module, None, None, [classname])
+ self.filenames.add(mod.__file__)
+ cls = getattr(mod, classname)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ heading = cls.__name__
+ out.append(FMTERDOC % (heading, ', '.join(data[2]) or 'None',
+ ', '.join(data[3]).replace('*', '\\*') or 'None',
+ docstring))
+ return ''.join(out)
+
+ def document_filters(self):
+ from pygments.filters import FILTERS
+
+ out = []
+ for name, cls in FILTERS.items():
+ self.filenames.add(sys.modules[cls.__module__].__file__)
+ docstring = cls.__doc__
+ if isinstance(docstring, bytes):
+ docstring = docstring.decode('utf8')
+ out.append(FILTERDOC % (cls.__name__, name, docstring))
+ return ''.join(out)
+
+
+def setup(app):
+ app.add_directive('pygmentsdoc', PygmentsDoc)
diff --git a/contrib/python/Pygments/py3/pygments/style.py b/contrib/python/Pygments/py3/pygments/style.py
index 88ccaf263c..b036ea539b 100644
--- a/contrib/python/Pygments/py3/pygments/style.py
+++ b/contrib/python/Pygments/py3/pygments/style.py
@@ -1,15 +1,15 @@
-"""
- pygments.style
- ~~~~~~~~~~~~~~
-
- Basic style object.
-
+"""
+ pygments.style
+ ~~~~~~~~~~~~~~
+
+ Basic style object.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Token, STANDARD_TYPES
-
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.token import Token, STANDARD_TYPES
+
# Default mapping of ansixxx to RGB colors.
_ansimap = {
# dark
@@ -53,78 +53,78 @@ _deprecated_ansicolors = {
'#ansiwhite': 'ansiwhite',
}
ansicolors = set(_ansimap)
-
-
-class StyleMeta(type):
-
- def __new__(mcs, name, bases, dct):
- obj = type.__new__(mcs, name, bases, dct)
- for token in STANDARD_TYPES:
- if token not in obj.styles:
- obj.styles[token] = ''
-
- def colorformat(text):
+
+
+class StyleMeta(type):
+
+ def __new__(mcs, name, bases, dct):
+ obj = type.__new__(mcs, name, bases, dct)
+ for token in STANDARD_TYPES:
+ if token not in obj.styles:
+ obj.styles[token] = ''
+
+ def colorformat(text):
if text in ansicolors:
return text
- if text[0:1] == '#':
- col = text[1:]
- if len(col) == 6:
- return col
- elif len(col) == 3:
+ if text[0:1] == '#':
+ col = text[1:]
+ if len(col) == 6:
+ return col
+ elif len(col) == 3:
return col[0] * 2 + col[1] * 2 + col[2] * 2
- elif text == '':
- return ''
+ elif text == '':
+ return ''
elif text.startswith('var') or text.startswith('calc'):
return text
- assert False, "wrong color format %r" % text
-
- _styles = obj._styles = {}
-
- for ttype in obj.styles:
- for token in ttype.split():
- if token in _styles:
- continue
- ndef = _styles.get(token.parent, None)
- styledefs = obj.styles.get(token, '').split()
- if not ndef or token is None:
- ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
- elif 'noinherit' in styledefs and token is not Token:
- ndef = _styles[Token][:]
- else:
- ndef = ndef[:]
- _styles[token] = ndef
- for styledef in obj.styles.get(token, '').split():
- if styledef == 'noinherit':
- pass
- elif styledef == 'bold':
- ndef[1] = 1
- elif styledef == 'nobold':
- ndef[1] = 0
- elif styledef == 'italic':
- ndef[2] = 1
- elif styledef == 'noitalic':
- ndef[2] = 0
- elif styledef == 'underline':
- ndef[3] = 1
- elif styledef == 'nounderline':
- ndef[3] = 0
- elif styledef[:3] == 'bg:':
- ndef[4] = colorformat(styledef[3:])
- elif styledef[:7] == 'border:':
- ndef[5] = colorformat(styledef[7:])
- elif styledef == 'roman':
- ndef[6] = 1
- elif styledef == 'sans':
- ndef[7] = 1
- elif styledef == 'mono':
- ndef[8] = 1
- else:
- ndef[0] = colorformat(styledef)
-
- return obj
-
- def style_for_token(cls, token):
- t = cls._styles[token]
+ assert False, "wrong color format %r" % text
+
+ _styles = obj._styles = {}
+
+ for ttype in obj.styles:
+ for token in ttype.split():
+ if token in _styles:
+ continue
+ ndef = _styles.get(token.parent, None)
+ styledefs = obj.styles.get(token, '').split()
+ if not ndef or token is None:
+ ndef = ['', 0, 0, 0, '', '', 0, 0, 0]
+ elif 'noinherit' in styledefs and token is not Token:
+ ndef = _styles[Token][:]
+ else:
+ ndef = ndef[:]
+ _styles[token] = ndef
+ for styledef in obj.styles.get(token, '').split():
+ if styledef == 'noinherit':
+ pass
+ elif styledef == 'bold':
+ ndef[1] = 1
+ elif styledef == 'nobold':
+ ndef[1] = 0
+ elif styledef == 'italic':
+ ndef[2] = 1
+ elif styledef == 'noitalic':
+ ndef[2] = 0
+ elif styledef == 'underline':
+ ndef[3] = 1
+ elif styledef == 'nounderline':
+ ndef[3] = 0
+ elif styledef[:3] == 'bg:':
+ ndef[4] = colorformat(styledef[3:])
+ elif styledef[:7] == 'border:':
+ ndef[5] = colorformat(styledef[7:])
+ elif styledef == 'roman':
+ ndef[6] = 1
+ elif styledef == 'sans':
+ ndef[7] = 1
+ elif styledef == 'mono':
+ ndef[8] = 1
+ else:
+ ndef[0] = colorformat(styledef)
+
+ return obj
+
+ def style_for_token(cls, token):
+ t = cls._styles[token]
ansicolor = bgansicolor = None
color = t[0]
if color in _deprecated_ansicolors:
@@ -139,42 +139,42 @@ class StyleMeta(type):
bgansicolor = bgcolor
bgcolor = _ansimap[bgcolor]
- return {
+ return {
'color': color or None,
- 'bold': bool(t[1]),
- 'italic': bool(t[2]),
- 'underline': bool(t[3]),
+ 'bold': bool(t[1]),
+ 'italic': bool(t[2]),
+ 'underline': bool(t[3]),
'bgcolor': bgcolor or None,
- 'border': t[5] or None,
- 'roman': bool(t[6]) or None,
- 'sans': bool(t[7]) or None,
- 'mono': bool(t[8]) or None,
+ 'border': t[5] or None,
+ 'roman': bool(t[6]) or None,
+ 'sans': bool(t[7]) or None,
+ 'mono': bool(t[8]) or None,
'ansicolor': ansicolor,
'bgansicolor': bgansicolor,
- }
-
- def list_styles(cls):
- return list(cls)
-
- def styles_token(cls, ttype):
- return ttype in cls._styles
-
- def __iter__(cls):
- for token in cls._styles:
- yield token, cls.style_for_token(token)
-
- def __len__(cls):
- return len(cls._styles)
-
-
+ }
+
+ def list_styles(cls):
+ return list(cls)
+
+ def styles_token(cls, ttype):
+ return ttype in cls._styles
+
+ def __iter__(cls):
+ for token in cls._styles:
+ yield token, cls.style_for_token(token)
+
+ def __len__(cls):
+ return len(cls._styles)
+
+
class Style(metaclass=StyleMeta):
-
- #: overall background color (``None`` means transparent)
- background_color = '#ffffff'
-
- #: highlight background color
- highlight_color = '#ffffcc'
-
+
+ #: overall background color (``None`` means transparent)
+ background_color = '#ffffff'
+
+ #: highlight background color
+ highlight_color = '#ffffcc'
+
#: line number font color
line_number_color = 'inherit'
@@ -187,8 +187,8 @@ class Style(metaclass=StyleMeta):
#: special line number background color
line_number_special_background_color = '#ffffc0'
- #: Style definitions for individual token types.
- styles = {}
+ #: Style definitions for individual token types.
+ styles = {}
# Attribute for lexers defined within Pygments. If set
# to True, the style is not shown in the style gallery
diff --git a/contrib/python/Pygments/py3/pygments/styles/__init__.py b/contrib/python/Pygments/py3/pygments/styles/__init__.py
index d75de1a521..c844c1bf21 100644
--- a/contrib/python/Pygments/py3/pygments/styles/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/styles/__init__.py
@@ -1,47 +1,47 @@
-"""
- pygments.styles
- ~~~~~~~~~~~~~~~
-
- Contains built-in styles.
-
+"""
+ pygments.styles
+ ~~~~~~~~~~~~~~~
+
+ Contains built-in styles.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.plugin import find_plugin_styles
-from pygments.util import ClassNotFound
-
-
-#: Maps style names to 'submodule::classname'.
-STYLE_MAP = {
- 'default': 'default::DefaultStyle',
- 'emacs': 'emacs::EmacsStyle',
- 'friendly': 'friendly::FriendlyStyle',
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.plugin import find_plugin_styles
+from pygments.util import ClassNotFound
+
+
+#: Maps style names to 'submodule::classname'.
+STYLE_MAP = {
+ 'default': 'default::DefaultStyle',
+ 'emacs': 'emacs::EmacsStyle',
+ 'friendly': 'friendly::FriendlyStyle',
'friendly_grayscale': 'friendly_grayscale::FriendlyGrayscaleStyle',
- 'colorful': 'colorful::ColorfulStyle',
- 'autumn': 'autumn::AutumnStyle',
- 'murphy': 'murphy::MurphyStyle',
- 'manni': 'manni::ManniStyle',
+ 'colorful': 'colorful::ColorfulStyle',
+ 'autumn': 'autumn::AutumnStyle',
+ 'murphy': 'murphy::MurphyStyle',
+ 'manni': 'manni::ManniStyle',
'material': 'material::MaterialStyle',
- 'monokai': 'monokai::MonokaiStyle',
- 'perldoc': 'perldoc::PerldocStyle',
- 'pastie': 'pastie::PastieStyle',
- 'borland': 'borland::BorlandStyle',
- 'trac': 'trac::TracStyle',
- 'native': 'native::NativeStyle',
- 'fruity': 'fruity::FruityStyle',
- 'bw': 'bw::BlackWhiteStyle',
- 'vim': 'vim::VimStyle',
- 'vs': 'vs::VisualStudioStyle',
- 'tango': 'tango::TangoStyle',
- 'rrt': 'rrt::RrtStyle',
- 'xcode': 'xcode::XcodeStyle',
- 'igor': 'igor::IgorStyle',
- 'paraiso-light': 'paraiso_light::ParaisoLightStyle',
- 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle',
- 'lovelace': 'lovelace::LovelaceStyle',
- 'algol': 'algol::AlgolStyle',
- 'algol_nu': 'algol_nu::Algol_NuStyle',
+ 'monokai': 'monokai::MonokaiStyle',
+ 'perldoc': 'perldoc::PerldocStyle',
+ 'pastie': 'pastie::PastieStyle',
+ 'borland': 'borland::BorlandStyle',
+ 'trac': 'trac::TracStyle',
+ 'native': 'native::NativeStyle',
+ 'fruity': 'fruity::FruityStyle',
+ 'bw': 'bw::BlackWhiteStyle',
+ 'vim': 'vim::VimStyle',
+ 'vs': 'vs::VisualStudioStyle',
+ 'tango': 'tango::TangoStyle',
+ 'rrt': 'rrt::RrtStyle',
+ 'xcode': 'xcode::XcodeStyle',
+ 'igor': 'igor::IgorStyle',
+ 'paraiso-light': 'paraiso_light::ParaisoLightStyle',
+ 'paraiso-dark': 'paraiso_dark::ParaisoDarkStyle',
+ 'lovelace': 'lovelace::LovelaceStyle',
+ 'algol': 'algol::AlgolStyle',
+ 'algol_nu': 'algol_nu::Algol_NuStyle',
'arduino': 'arduino::ArduinoStyle',
'rainbow_dash': 'rainbow_dash::RainbowDashStyle',
'abap': 'abap::AbapStyle',
@@ -58,36 +58,36 @@ STYLE_MAP = {
'dracula': 'dracula::DraculaStyle',
'one-dark': 'onedark::OneDarkStyle',
'lilypond' : 'lilypond::LilyPondStyle',
-}
-
-
-def get_style_by_name(name):
- if name in STYLE_MAP:
- mod, cls = STYLE_MAP[name].split('::')
- builtin = "yes"
- else:
- for found_name, style in find_plugin_styles():
- if name == found_name:
- return style
- # perhaps it got dropped into our styles package
- builtin = ""
- mod = name
- cls = name.title() + "Style"
-
- try:
- mod = __import__('pygments.styles.' + mod, None, None, [cls])
- except ImportError:
- raise ClassNotFound("Could not find style module %r" % mod +
- (builtin and ", though it should be builtin") + ".")
- try:
- return getattr(mod, cls)
- except AttributeError:
- raise ClassNotFound("Could not find style class %r in style module." % cls)
-
-
-def get_all_styles():
+}
+
+
+def get_style_by_name(name):
+ if name in STYLE_MAP:
+ mod, cls = STYLE_MAP[name].split('::')
+ builtin = "yes"
+ else:
+ for found_name, style in find_plugin_styles():
+ if name == found_name:
+ return style
+ # perhaps it got dropped into our styles package
+ builtin = ""
+ mod = name
+ cls = name.title() + "Style"
+
+ try:
+ mod = __import__('pygments.styles.' + mod, None, None, [cls])
+ except ImportError:
+ raise ClassNotFound("Could not find style module %r" % mod +
+ (builtin and ", though it should be builtin") + ".")
+ try:
+ return getattr(mod, cls)
+ except AttributeError:
+ raise ClassNotFound("Could not find style class %r in style module." % cls)
+
+
+def get_all_styles():
"""Return a generator for all styles by name,
- both builtin and plugin."""
+ both builtin and plugin."""
yield from STYLE_MAP
- for name, _ in find_plugin_styles():
- yield name
+ for name, _ in find_plugin_styles():
+ yield name
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol.py b/contrib/python/Pygments/py3/pygments/styles/algol.py
index f293b0eca9..688eddf153 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol.py
@@ -1,62 +1,62 @@
-"""
- pygments.styles.algol
- ~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase underline boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords without underlining, refer to the `Algol_Nu` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
+"""
+ pygments.styles.algol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase underline boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords without underlining, refer to the `Algol_Nu` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-class AlgolStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "underline bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class AlgolStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "underline bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
index fe2846693b..b8737b27b0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
@@ -1,62 +1,62 @@
-"""
- pygments.styles.algol_nu
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Algol publication style without underlining of keywords.
-
- This style renders source code for publication of algorithms in
- scientific papers and academic texts, where its format is frequently used.
-
- It is based on the style of the revised Algol-60 language report[1].
-
- o No colours, only black, white and shades of grey are used.
- o Keywords are rendered in lowercase boldface.
- o Builtins are rendered in lowercase boldface italic.
- o Docstrings and pragmas are rendered in dark grey boldface.
- o Library identifiers are rendered in dark grey boldface italic.
- o Comments are rendered in grey italic.
-
- To render keywords with underlining, refer to the `Algol` style.
-
- For lowercase conversion of keywords and builtins in languages where
- these are not or might not be lowercase, a supporting lexer is required.
- The Algol and Modula-2 lexers automatically convert to lowercase whenever
- this style is selected.
-
- [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
-
+"""
+ pygments.styles.algol_nu
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Algol publication style without underlining of keywords.
+
+ This style renders source code for publication of algorithms in
+ scientific papers and academic texts, where its format is frequently used.
+
+ It is based on the style of the revised Algol-60 language report[1].
+
+ o No colours, only black, white and shades of grey are used.
+ o Keywords are rendered in lowercase boldface.
+ o Builtins are rendered in lowercase boldface italic.
+ o Docstrings and pragmas are rendered in dark grey boldface.
+ o Library identifiers are rendered in dark grey boldface italic.
+ o Comments are rendered in grey italic.
+
+ To render keywords with underlining, refer to the `Algol` style.
+
+ For lowercase conversion of keywords and builtins in languages where
+ these are not or might not be lowercase, a supporting lexer is required.
+ The Algol and Modula-2 lexers automatically convert to lowercase whenever
+ this style is selected.
+
+ [1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Operator
-
-
-class Algol_NuStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic #888",
- Comment.Preproc: "bold noitalic #888",
- Comment.Special: "bold noitalic #888",
-
- Keyword: "bold",
- Keyword.Declaration: "italic",
-
- Name.Builtin: "bold italic",
- Name.Builtin.Pseudo: "bold italic",
- Name.Namespace: "bold italic #666",
- Name.Class: "bold italic #666",
- Name.Function: "bold italic #666",
- Name.Variable: "bold italic #666",
- Name.Constant: "bold italic #666",
-
- Operator.Word: "bold",
-
- String: "italic #666",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Operator
+
+
+class Algol_NuStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic #888",
+ Comment.Preproc: "bold noitalic #888",
+ Comment.Special: "bold noitalic #888",
+
+ Keyword: "bold",
+ Keyword.Declaration: "italic",
+
+ Name.Builtin: "bold italic",
+ Name.Builtin.Pseudo: "bold italic",
+ Name.Namespace: "bold italic #666",
+ Name.Class: "bold italic #666",
+ Name.Function: "bold italic #666",
+ Name.Variable: "bold italic #666",
+ Name.Constant: "bold italic #666",
+
+ Operator.Word: "bold",
+
+ String: "italic #666",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/arduino.py b/contrib/python/Pygments/py3/pygments/styles/arduino.py
index 4dfe0f3ce2..26f0ee15b1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/arduino.py
+++ b/contrib/python/Pygments/py3/pygments/styles/arduino.py
@@ -1,97 +1,97 @@
-"""
- pygments.styles.arduino
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Arduino® Syntax highlighting style.
-
+"""
+ pygments.styles.arduino
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Arduino® Syntax highlighting style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
-
-
-class ArduinoStyle(Style):
+
+
+class ArduinoStyle(Style):
"""
- The Arduino® language style. This style is designed to highlight the
- Arduino source code, so exepect the best results with it.
- """
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Whitespace: "", # class: 'w'
- Error: "#a61717", # class: 'err'
-
- Comment: "#95a5a6", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
+ The Arduino® language style. This style is designed to highlight the
+ Arduino source code, so exepect the best results with it.
+ """
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Whitespace: "", # class: 'w'
+ Error: "#a61717", # class: 'err'
+
+ Comment: "#95a5a6", # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
Comment.Preproc: "#728E00", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#728E00", # class: 'k'
- Keyword.Constant: "#00979D", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "", # class: 'kn'
- Keyword.Pseudo: "#00979D", # class: 'kp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: "#728E00", # class: 'k'
+ Keyword.Constant: "#00979D", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: "", # class: 'kn'
+ Keyword.Pseudo: "#00979D", # class: 'kp'
Keyword.Reserved: "#00979D", # class: 'kr'
- Keyword.Type: "#00979D", # class: 'kt'
-
+ Keyword.Type: "#00979D", # class: 'kt'
+
Operator: "#728E00", # class: 'o'
- Operator.Word: "", # class: 'ow'
-
- Name: "#434f54", # class: 'n'
- Name.Attribute: "", # class: 'na'
+ Operator.Word: "", # class: 'ow'
+
+ Name: "#434f54", # class: 'n'
+ Name.Attribute: "", # class: 'na'
Name.Builtin: "#728E00", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "", # class: 'nc'
- Name.Constant: "", # class: 'no'
- Name.Decorator: "", # class: 'nd'
- Name.Entity: "", # class: 'ni'
- Name.Exception: "", # class: 'ne'
- Name.Function: "#D35400", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn'
- Name.Other: "#728E00", # class: 'nx'
- Name.Tag: "", # class: 'nt'
- Name.Variable: "", # class: 'nv'
- Name.Variable.Class: "", # class: 'vc'
- Name.Variable.Global: "", # class: 'vg'
- Name.Variable.Instance: "", # class: 'vi'
-
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: "", # class: 'nc'
+ Name.Constant: "", # class: 'no'
+ Name.Decorator: "", # class: 'nd'
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: "", # class: 'ne'
+ Name.Function: "#D35400", # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: "", # class: 'nn'
+ Name.Other: "#728E00", # class: 'nx'
+ Name.Tag: "", # class: 'nt'
+ Name.Variable: "", # class: 'nv'
+ Name.Variable.Class: "", # class: 'vc'
+ Name.Variable.Global: "", # class: 'vg'
+ Name.Variable.Instance: "", # class: 'vi'
+
Number: "#8A7B52", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- String: "#7F8C8D", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd'
- String.Double: "", # class: 's2'
- String.Escape: "", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: "", # class: 'gd',
- Generic.Emph: "", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "", # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "", # class: 'gp'
- Generic.Strong: "", # class: 'gs'
- Generic.Subheading: "", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ String: "#7F8C8D", # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: "", # class: 'sc'
+ String.Doc: "", # class: 'sd'
+ String.Double: "", # class: 's2'
+ String.Escape: "", # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: "", # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: "", # class: 'gd',
+ Generic.Emph: "", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "", # class: 'gh'
+ Generic.Inserted: "", # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "", # class: 'gp'
+ Generic.Strong: "", # class: 'gs'
+ Generic.Subheading: "", # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/autumn.py b/contrib/python/Pygments/py3/pygments/styles/autumn.py
index 85fd8982a9..65a8877eca 100644
--- a/contrib/python/Pygments/py3/pygments/styles/autumn.py
+++ b/contrib/python/Pygments/py3/pygments/styles/autumn.py
@@ -1,64 +1,64 @@
-"""
- pygments.styles.autumn
- ~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
+"""
+ pygments.styles.autumn
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class AutumnStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #aaaaaa',
- Comment.Preproc: 'noitalic #4c8317',
- Comment.Special: 'italic #0000aa',
-
- Keyword: '#0000aa',
- Keyword.Type: '#00aaaa',
-
- Operator.Word: '#0000aa',
-
- Name.Builtin: '#00aaaa',
- Name.Function: '#00aa00',
- Name.Class: 'underline #00aa00',
- Name.Namespace: 'underline #00aaaa',
- Name.Variable: '#aa0000',
- Name.Constant: '#aa0000',
- Name.Entity: 'bold #800',
- Name.Attribute: '#1e90ff',
- Name.Tag: 'bold #1e90ff',
- Name.Decorator: '#888888',
-
- String: '#aa5500',
- String.Symbol: '#0000aa',
- String.Regex: '#009999',
-
- Number: '#009999',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: '#F00 bg:#FAA'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class AutumnStyle(Style):
+ """
+ A colorful style, inspired by the terminal highlighting style.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: '#bbbbbb',
+
+ Comment: 'italic #aaaaaa',
+ Comment.Preproc: 'noitalic #4c8317',
+ Comment.Special: 'italic #0000aa',
+
+ Keyword: '#0000aa',
+ Keyword.Type: '#00aaaa',
+
+ Operator.Word: '#0000aa',
+
+ Name.Builtin: '#00aaaa',
+ Name.Function: '#00aa00',
+ Name.Class: 'underline #00aa00',
+ Name.Namespace: 'underline #00aaaa',
+ Name.Variable: '#aa0000',
+ Name.Constant: '#aa0000',
+ Name.Entity: 'bold #800',
+ Name.Attribute: '#1e90ff',
+ Name.Tag: 'bold #1e90ff',
+ Name.Decorator: '#888888',
+
+ String: '#aa5500',
+ String.Symbol: '#0000aa',
+ String.Regex: '#009999',
+
+ Number: '#009999',
+
+ Generic.Heading: 'bold #000080',
+ Generic.Subheading: 'bold #800080',
+ Generic.Deleted: '#aa0000',
+ Generic.Inserted: '#00aa00',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: '#F00 bg:#FAA'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/borland.py b/contrib/python/Pygments/py3/pygments/styles/borland.py
index 427e149f2f..5f6183721f 100644
--- a/contrib/python/Pygments/py3/pygments/styles/borland.py
+++ b/contrib/python/Pygments/py3/pygments/styles/borland.py
@@ -1,50 +1,50 @@
-"""
- pygments.styles.borland
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the Borland IDEs.
-
+"""
+ pygments.styles.borland
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the Borland IDEs.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class BorlandStyle(Style):
- """
- Style similar to the style used in the borland IDEs.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
-
- Comment: 'italic #008800',
- Comment.Preproc: 'noitalic #008080',
- Comment.Special: 'noitalic bold',
-
- String: '#0000FF',
- String.Char: '#800080',
- Number: '#0000FF',
- Keyword: 'bold #000080',
- Operator.Word: 'bold',
- Name.Tag: 'bold #000080',
- Name.Attribute: '#FF0000',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class BorlandStyle(Style):
+ """
+ Style similar to the style used in the borland IDEs.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+
+ Comment: 'italic #008800',
+ Comment.Preproc: 'noitalic #008080',
+ Comment.Special: 'noitalic bold',
+
+ String: '#0000FF',
+ String.Char: '#800080',
+ Number: '#0000FF',
+ Keyword: 'bold #000080',
+ Operator.Word: 'bold',
+ Name.Tag: 'bold #000080',
+ Name.Attribute: '#FF0000',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/bw.py b/contrib/python/Pygments/py3/pygments/styles/bw.py
index 1b38538761..42de46b201 100644
--- a/contrib/python/Pygments/py3/pygments/styles/bw.py
+++ b/contrib/python/Pygments/py3/pygments/styles/bw.py
@@ -1,48 +1,48 @@
-"""
- pygments.styles.bw
- ~~~~~~~~~~~~~~~~~~
-
- Simple black/white only style.
-
+"""
+ pygments.styles.bw
+ ~~~~~~~~~~~~~~~~~~
+
+ Simple black/white only style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-class BlackWhiteStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "italic",
- Comment.Preproc: "noitalic",
-
- Keyword: "bold",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold",
-
- Operator.Word: "bold",
-
- Name.Class: "bold",
- Name.Namespace: "bold",
- Name.Exception: "bold",
- Name.Entity: "bold",
- Name.Tag: "bold",
-
- String: "italic",
- String.Interpol: "bold",
- String.Escape: "bold",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Operator, Generic
+
+
+class BlackWhiteStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "italic",
+ Comment.Preproc: "noitalic",
+
+ Keyword: "bold",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold",
+
+ Operator.Word: "bold",
+
+ Name.Class: "bold",
+ Name.Namespace: "bold",
+ Name.Exception: "bold",
+ Name.Entity: "bold",
+ Name.Tag: "bold",
+
+ String: "italic",
+ String.Interpol: "bold",
+ String.Escape: "bold",
+
+ Generic.Heading: "bold",
+ Generic.Subheading: "bold",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/colorful.py b/contrib/python/Pygments/py3/pygments/styles/colorful.py
index a67b8e3e61..c3815afea9 100644
--- a/contrib/python/Pygments/py3/pygments/styles/colorful.py
+++ b/contrib/python/Pygments/py3/pygments/styles/colorful.py
@@ -1,80 +1,80 @@
-"""
- pygments.styles.colorful
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by CodeRay.
-
+"""
+ pygments.styles.colorful
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by CodeRay.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class ColorfulStyle(Style):
- """
- A colorful style, inspired by CodeRay.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
-
- Comment: "#888",
- Comment.Preproc: "#579",
- Comment.Special: "bold #cc0000",
-
- Keyword: "bold #080",
- Keyword.Pseudo: "#038",
- Keyword.Type: "#339",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#007020",
- Name.Function: "bold #06B",
- Name.Class: "bold #B06",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#963",
- Name.Variable.Instance: "#33B",
- Name.Variable.Class: "#369",
- Name.Variable.Global: "bold #d70",
- Name.Constant: "bold #036",
- Name.Label: "bold #970",
- Name.Entity: "bold #800",
- Name.Attribute: "#00C",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#fff0f0",
- String.Char: "#04D bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#fff0ff #000",
- String.Symbol: "#A60 bg:",
- String.Other: "#D20",
-
- Number: "bold #60E",
- Number.Integer: "bold #00D",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class ColorfulStyle(Style):
+ """
+ A colorful style, inspired by CodeRay.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+
+ Comment: "#888",
+ Comment.Preproc: "#579",
+ Comment.Special: "bold #cc0000",
+
+ Keyword: "bold #080",
+ Keyword.Pseudo: "#038",
+ Keyword.Type: "#339",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#007020",
+ Name.Function: "bold #06B",
+ Name.Class: "bold #B06",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#963",
+ Name.Variable.Instance: "#33B",
+ Name.Variable.Class: "#369",
+ Name.Variable.Global: "bold #d70",
+ Name.Constant: "bold #036",
+ Name.Label: "bold #970",
+ Name.Entity: "bold #800",
+ Name.Attribute: "#00C",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#fff0f0",
+ String.Char: "#04D bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#fff0ff #000",
+ String.Symbol: "#A60 bg:",
+ String.Other: "#D20",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #00D",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/default.py b/contrib/python/Pygments/py3/pygments/styles/default.py
index c69325f432..95b186bdd7 100644
--- a/contrib/python/Pygments/py3/pygments/styles/default.py
+++ b/contrib/python/Pygments/py3/pygments/styles/default.py
@@ -1,72 +1,72 @@
-"""
- pygments.styles.default
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- The default highlighting style.
-
+"""
+ pygments.styles.default
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ The default highlighting style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class DefaultStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class DefaultStyle(Style):
+ """
+ The default style (inspired by Emacs 22).
+ """
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
Comment: "italic #3D7B7B",
Comment.Preproc: "noitalic #9C6500",
-
- #Keyword: "bold #AA22FF",
- Keyword: "bold #008000",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #B00040",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#008000",
- Name.Function: "#0000FF",
- Name.Class: "bold #0000FF",
- Name.Namespace: "bold #0000FF",
+
+ #Keyword: "bold #AA22FF",
+ Keyword: "bold #008000",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold #B00040",
+
+ Operator: "#666666",
+ Operator.Word: "bold #AA22FF",
+
+ Name.Builtin: "#008000",
+ Name.Function: "#0000FF",
+ Name.Class: "bold #0000FF",
+ Name.Namespace: "bold #0000FF",
Name.Exception: "bold #CB3F38",
- Name.Variable: "#19177C",
- Name.Constant: "#880000",
+ Name.Variable: "#19177C",
+ Name.Constant: "#880000",
Name.Label: "#767600",
Name.Entity: "bold #717171",
Name.Attribute: "#687822",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BA2121",
- String.Doc: "italic",
+ Name.Tag: "bold #008000",
+ Name.Decorator: "#AA22FF",
+
+ String: "#BA2121",
+ String.Doc: "italic",
String.Interpol: "bold #A45A77",
String.Escape: "bold #AA5D1F",
String.Regex: "#A45A77",
- #String.Symbol: "#B8860B",
- String.Symbol: "#19177C",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
+ #String.Symbol: "#B8860B",
+ String.Symbol: "#19177C",
+ String.Other: "#008000",
+ Number: "#666666",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
Generic.Inserted: "#008400",
Generic.Error: "#E40000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
Generic.Output: "#717171",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/emacs.py b/contrib/python/Pygments/py3/pygments/styles/emacs.py
index e9cbc33da8..317e454978 100644
--- a/contrib/python/Pygments/py3/pygments/styles/emacs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/emacs.py
@@ -1,71 +1,71 @@
-"""
- pygments.styles.emacs
- ~~~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by Emacs.
-
+"""
+ pygments.styles.emacs
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by Emacs.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class EmacsStyle(Style):
- """
- The default style (inspired by Emacs 22).
- """
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #008800",
- Comment.Preproc: "noitalic",
- Comment.Special: "noitalic bold",
-
- Keyword: "bold #AA22FF",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "bold #00BB00",
-
- Operator: "#666666",
- Operator.Word: "bold #AA22FF",
-
- Name.Builtin: "#AA22FF",
- Name.Function: "#00A000",
- Name.Class: "#0000FF",
- Name.Namespace: "bold #0000FF",
- Name.Exception: "bold #D2413A",
- Name.Variable: "#B8860B",
- Name.Constant: "#880000",
- Name.Label: "#A0A000",
- Name.Entity: "bold #999999",
- Name.Attribute: "#BB4444",
- Name.Tag: "bold #008000",
- Name.Decorator: "#AA22FF",
-
- String: "#BB4444",
- String.Doc: "italic",
- String.Interpol: "bold #BB6688",
- String.Escape: "bold #BB6622",
- String.Regex: "#BB6688",
- String.Symbol: "#B8860B",
- String.Other: "#008000",
- Number: "#666666",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class EmacsStyle(Style):
+ """
+ The default style (inspired by Emacs 22).
+ """
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "italic #008800",
+ Comment.Preproc: "noitalic",
+ Comment.Special: "noitalic bold",
+
+ Keyword: "bold #AA22FF",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "bold #00BB00",
+
+ Operator: "#666666",
+ Operator.Word: "bold #AA22FF",
+
+ Name.Builtin: "#AA22FF",
+ Name.Function: "#00A000",
+ Name.Class: "#0000FF",
+ Name.Namespace: "bold #0000FF",
+ Name.Exception: "bold #D2413A",
+ Name.Variable: "#B8860B",
+ Name.Constant: "#880000",
+ Name.Label: "#A0A000",
+ Name.Entity: "bold #999999",
+ Name.Attribute: "#BB4444",
+ Name.Tag: "bold #008000",
+ Name.Decorator: "#AA22FF",
+
+ String: "#BB4444",
+ String.Doc: "italic",
+ String.Interpol: "bold #BB6688",
+ String.Escape: "bold #BB6622",
+ String.Regex: "#BB6688",
+ String.Symbol: "#B8860B",
+ String.Other: "#008000",
+ Number: "#666666",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/friendly.py b/contrib/python/Pygments/py3/pygments/styles/friendly.py
index 82f2479475..c354770760 100644
--- a/contrib/python/Pygments/py3/pygments/styles/friendly.py
+++ b/contrib/python/Pygments/py3/pygments/styles/friendly.py
@@ -1,72 +1,72 @@
-"""
- pygments.styles.friendly
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- A modern style based on the VIM pyte theme.
-
+"""
+ pygments.styles.friendly
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A modern style based on the VIM pyte theme.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class FriendlyStyle(Style):
- """
- A modern style based on the VIM pyte theme.
- """
-
- background_color = "#f0f0f0"
- default_style = ""
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class FriendlyStyle(Style):
+ """
+ A modern style based on the VIM pyte theme.
+ """
+
+ background_color = "#f0f0f0"
+ default_style = ""
line_number_color = "#666666"
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "italic #60a0b0",
- Comment.Preproc: "noitalic #007020",
- Comment.Special: "noitalic bg:#fff0f0",
-
- Keyword: "bold #007020",
- Keyword.Pseudo: "nobold",
- Keyword.Type: "nobold #902000",
-
- Operator: "#666666",
- Operator.Word: "bold #007020",
-
- Name.Builtin: "#007020",
- Name.Function: "#06287e",
- Name.Class: "bold #0e84b5",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "#007020",
- Name.Variable: "#bb60d5",
- Name.Constant: "#60add5",
- Name.Label: "bold #002070",
- Name.Entity: "bold #d55537",
- Name.Attribute: "#4070a0",
- Name.Tag: "bold #062873",
- Name.Decorator: "bold #555555",
-
- String: "#4070a0",
- String.Doc: "italic",
- String.Interpol: "italic #70a0d0",
- String.Escape: "bold #4070a0",
- String.Regex: "#235388",
- String.Symbol: "#517918",
- String.Other: "#c65d09",
- Number: "#40a070",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "italic #60a0b0",
+ Comment.Preproc: "noitalic #007020",
+ Comment.Special: "noitalic bg:#fff0f0",
+
+ Keyword: "bold #007020",
+ Keyword.Pseudo: "nobold",
+ Keyword.Type: "nobold #902000",
+
+ Operator: "#666666",
+ Operator.Word: "bold #007020",
+
+ Name.Builtin: "#007020",
+ Name.Function: "#06287e",
+ Name.Class: "bold #0e84b5",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "#007020",
+ Name.Variable: "#bb60d5",
+ Name.Constant: "#60add5",
+ Name.Label: "bold #002070",
+ Name.Entity: "bold #d55537",
+ Name.Attribute: "#4070a0",
+ Name.Tag: "bold #062873",
+ Name.Decorator: "bold #555555",
+
+ String: "#4070a0",
+ String.Doc: "italic",
+ String.Interpol: "italic #70a0d0",
+ String.Escape: "bold #4070a0",
+ String.Regex: "#235388",
+ String.Symbol: "#517918",
+ String.Other: "#c65d09",
+ Number: "#40a070",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/fruity.py b/contrib/python/Pygments/py3/pygments/styles/fruity.py
index 2446915d66..5114fa700a 100644
--- a/contrib/python/Pygments/py3/pygments/styles/fruity.py
+++ b/contrib/python/Pygments/py3/pygments/styles/fruity.py
@@ -1,41 +1,41 @@
-"""
- pygments.styles.fruity
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "fruity" vim theme.
-
+"""
+ pygments.styles.fruity
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of my "fruity" vim theme.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Token, Comment, Name, Keyword, \
- Generic, Number, String, Whitespace
-
-class FruityStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
-
- background_color = '#111111'
- highlight_color = '#333333'
-
- styles = {
- Whitespace: '#888888',
- Token: '#ffffff',
- Generic.Output: '#444444 bg:#222222',
- Keyword: '#fb660a bold',
- Keyword.Pseudo: 'nobold',
- Number: '#0086f7 bold',
- Name.Tag: '#fb660a bold',
- Name.Variable: '#fb660a',
- Comment: '#008800 bg:#0f140f italic',
- Name.Attribute: '#ff0086 bold',
- String: '#0086d2',
- Name.Function: '#ff0086 bold',
- Generic.Heading: '#ffffff bold',
- Keyword.Type: '#cdcaa9 bold',
- Generic.Subheading: '#ffffff bold',
- Name.Constant: '#0086d2',
- Comment.Preproc: '#ff0007 bold'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Token, Comment, Name, Keyword, \
+ Generic, Number, String, Whitespace
+
+class FruityStyle(Style):
+ """
+ Pygments version of the "native" vim theme.
+ """
+
+ background_color = '#111111'
+ highlight_color = '#333333'
+
+ styles = {
+ Whitespace: '#888888',
+ Token: '#ffffff',
+ Generic.Output: '#444444 bg:#222222',
+ Keyword: '#fb660a bold',
+ Keyword.Pseudo: 'nobold',
+ Number: '#0086f7 bold',
+ Name.Tag: '#fb660a bold',
+ Name.Variable: '#fb660a',
+ Comment: '#008800 bg:#0f140f italic',
+ Name.Attribute: '#ff0086 bold',
+ String: '#0086d2',
+ Name.Function: '#ff0086 bold',
+ Generic.Heading: '#ffffff bold',
+ Keyword.Type: '#cdcaa9 bold',
+ Generic.Subheading: '#ffffff bold',
+ Name.Constant: '#0086d2',
+ Comment.Preproc: '#ff0007 bold'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/igor.py b/contrib/python/Pygments/py3/pygments/styles/igor.py
index 278e9da1d4..131d4e00e7 100644
--- a/contrib/python/Pygments/py3/pygments/styles/igor.py
+++ b/contrib/python/Pygments/py3/pygments/styles/igor.py
@@ -1,28 +1,28 @@
-"""
- pygments.styles.igor
- ~~~~~~~~~~~~~~~~~~~~
-
- Igor Pro default style.
-
+"""
+ pygments.styles.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Igor Pro default style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String
-
-
-class IgorStyle(Style):
- """
- Pygments version of the official colors for Igor Pro procedures.
- """
- default_style = ""
-
- styles = {
- Comment: 'italic #FF0000',
- Keyword: '#0000FF',
- Name.Function: '#C34E00',
- Name.Decorator: '#CC00A3',
- Name.Class: '#007575',
- String: '#009C00'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String
+
+
+class IgorStyle(Style):
+ """
+ Pygments version of the official colors for Igor Pro procedures.
+ """
+ default_style = ""
+
+ styles = {
+ Comment: 'italic #FF0000',
+ Keyword: '#0000FF',
+ Name.Function: '#C34E00',
+ Name.Decorator: '#CC00A3',
+ Name.Class: '#007575',
+ String: '#009C00'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/lovelace.py b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
index ec8d2a9106..5d5ce791cd 100644
--- a/contrib/python/Pygments/py3/pygments/styles/lovelace.py
+++ b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
@@ -1,96 +1,96 @@
-"""
- pygments.styles.lovelace
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Lovelace by Miikka Salminen
-
- Pygments style by Miikka Salminen (https://github.com/miikkas)
- A desaturated, somewhat subdued style created for the Lovelace interactive
- learning environment.
-
+"""
+ pygments.styles.lovelace
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lovelace by Miikka Salminen
+
+ Pygments style by Miikka Salminen (https://github.com/miikkas)
+ A desaturated, somewhat subdued style created for the Lovelace interactive
+ learning environment.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Punctuation, Generic, Whitespace
-
-
-class LovelaceStyle(Style):
- """
- The style used in Lovelace interactive learning environment. Tries to avoid
- the "angry fruit salad" effect with desaturated and dim colours.
- """
- _KW_BLUE = '#2838b0'
- _NAME_GREEN = '#388038'
- _DOC_ORANGE = '#b85820'
- _OW_PURPLE = '#a848a8'
- _FUN_BROWN = '#785840'
- _STR_RED = '#b83838'
- _CLS_CYAN = '#287088'
- _ESCAPE_LIME = '#709030'
- _LABEL_CYAN = '#289870'
- _EXCEPT_YELLOW = '#908828'
-
- default_style = '#222222'
-
- styles = {
- Whitespace: '#a89028',
- Comment: 'italic #888888',
- Comment.Hashbang: _CLS_CYAN,
- Comment.Multiline: '#888888',
- Comment.Preproc: 'noitalic '+_LABEL_CYAN,
-
- Keyword: _KW_BLUE,
- Keyword.Constant: 'italic #444444',
- Keyword.Declaration: 'italic',
- Keyword.Type: 'italic',
-
- Operator: '#666666',
- Operator.Word: _OW_PURPLE,
-
- Punctuation: '#888888',
-
- Name.Attribute: _NAME_GREEN,
- Name.Builtin: _NAME_GREEN,
- Name.Builtin.Pseudo: 'italic',
- Name.Class: _CLS_CYAN,
- Name.Constant: _DOC_ORANGE,
- Name.Decorator: _CLS_CYAN,
- Name.Entity: _ESCAPE_LIME,
- Name.Exception: _EXCEPT_YELLOW,
- Name.Function: _FUN_BROWN,
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Punctuation, Generic, Whitespace
+
+
+class LovelaceStyle(Style):
+ """
+ The style used in Lovelace interactive learning environment. Tries to avoid
+ the "angry fruit salad" effect with desaturated and dim colours.
+ """
+ _KW_BLUE = '#2838b0'
+ _NAME_GREEN = '#388038'
+ _DOC_ORANGE = '#b85820'
+ _OW_PURPLE = '#a848a8'
+ _FUN_BROWN = '#785840'
+ _STR_RED = '#b83838'
+ _CLS_CYAN = '#287088'
+ _ESCAPE_LIME = '#709030'
+ _LABEL_CYAN = '#289870'
+ _EXCEPT_YELLOW = '#908828'
+
+ default_style = '#222222'
+
+ styles = {
+ Whitespace: '#a89028',
+ Comment: 'italic #888888',
+ Comment.Hashbang: _CLS_CYAN,
+ Comment.Multiline: '#888888',
+ Comment.Preproc: 'noitalic '+_LABEL_CYAN,
+
+ Keyword: _KW_BLUE,
+ Keyword.Constant: 'italic #444444',
+ Keyword.Declaration: 'italic',
+ Keyword.Type: 'italic',
+
+ Operator: '#666666',
+ Operator.Word: _OW_PURPLE,
+
+ Punctuation: '#888888',
+
+ Name.Attribute: _NAME_GREEN,
+ Name.Builtin: _NAME_GREEN,
+ Name.Builtin.Pseudo: 'italic',
+ Name.Class: _CLS_CYAN,
+ Name.Constant: _DOC_ORANGE,
+ Name.Decorator: _CLS_CYAN,
+ Name.Entity: _ESCAPE_LIME,
+ Name.Exception: _EXCEPT_YELLOW,
+ Name.Function: _FUN_BROWN,
Name.Function.Magic: _DOC_ORANGE,
- Name.Label: _LABEL_CYAN,
- Name.Namespace: _LABEL_CYAN,
- Name.Tag: _KW_BLUE,
- Name.Variable: '#b04040',
- Name.Variable.Global:_EXCEPT_YELLOW,
+ Name.Label: _LABEL_CYAN,
+ Name.Namespace: _LABEL_CYAN,
+ Name.Tag: _KW_BLUE,
+ Name.Variable: '#b04040',
+ Name.Variable.Global:_EXCEPT_YELLOW,
Name.Variable.Magic: _DOC_ORANGE,
-
- String: _STR_RED,
+
+ String: _STR_RED,
String.Affix: '#444444',
- String.Char: _OW_PURPLE,
+ String.Char: _OW_PURPLE,
String.Delimiter: _DOC_ORANGE,
- String.Doc: 'italic '+_DOC_ORANGE,
- String.Escape: _ESCAPE_LIME,
- String.Interpol: 'underline',
- String.Other: _OW_PURPLE,
- String.Regex: _OW_PURPLE,
-
- Number: '#444444',
-
- Generic.Deleted: '#c02828',
- Generic.Emph: 'italic',
- Generic.Error: '#c02828',
- Generic.Heading: '#666666',
- Generic.Subheading: '#444444',
- Generic.Inserted: _NAME_GREEN,
- Generic.Output: '#666666',
- Generic.Prompt: '#444444',
- Generic.Strong: 'bold',
- Generic.Traceback: _KW_BLUE,
-
- Error: 'bg:'+_OW_PURPLE,
- }
+ String.Doc: 'italic '+_DOC_ORANGE,
+ String.Escape: _ESCAPE_LIME,
+ String.Interpol: 'underline',
+ String.Other: _OW_PURPLE,
+ String.Regex: _OW_PURPLE,
+
+ Number: '#444444',
+
+ Generic.Deleted: '#c02828',
+ Generic.Emph: 'italic',
+ Generic.Error: '#c02828',
+ Generic.Heading: '#666666',
+ Generic.Subheading: '#444444',
+ Generic.Inserted: _NAME_GREEN,
+ Generic.Output: '#666666',
+ Generic.Prompt: '#444444',
+ Generic.Strong: 'bold',
+ Generic.Traceback: _KW_BLUE,
+
+ Error: 'bg:'+_OW_PURPLE,
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/manni.py b/contrib/python/Pygments/py3/pygments/styles/manni.py
index 167dcebd5a..e4edd7f670 100644
--- a/contrib/python/Pygments/py3/pygments/styles/manni.py
+++ b/contrib/python/Pygments/py3/pygments/styles/manni.py
@@ -1,74 +1,74 @@
-"""
- pygments.styles.manni
- ~~~~~~~~~~~~~~~~~~~~~
-
- A colorful style, inspired by the terminal highlighting style.
-
- This is a port of the style used in the `php port`_ of pygments
- by Manni. The style is called 'default' there.
-
+"""
+ pygments.styles.manni
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ A colorful style, inspired by the terminal highlighting style.
+
+ This is a port of the style used in the `php port`_ of pygments
+ by Manni. The style is called 'default' there.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class ManniStyle(Style):
- """
- A colorful style, inspired by the terminal highlighting style.
- """
-
- background_color = '#f0f3f3'
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #0099FF',
- Comment.Preproc: 'noitalic #009999',
- Comment.Special: 'bold',
-
- Keyword: 'bold #006699',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#007788',
-
- Operator: '#555555',
- Operator.Word: 'bold #000000',
-
- Name.Builtin: '#336666',
- Name.Function: '#CC00FF',
- Name.Class: 'bold #00AA88',
- Name.Namespace: 'bold #00CCFF',
- Name.Exception: 'bold #CC0000',
- Name.Variable: '#003333',
- Name.Constant: '#336600',
- Name.Label: '#9999FF',
- Name.Entity: 'bold #999999',
- Name.Attribute: '#330099',
- Name.Tag: 'bold #330099',
- Name.Decorator: '#9999FF',
-
- String: '#CC3300',
- String.Doc: 'italic',
- String.Interpol: '#AA0000',
- String.Escape: 'bold #CC3300',
- String.Regex: '#33AAAA',
- String.Symbol: '#FFCC33',
- String.Other: '#CC3300',
-
- Number: '#FF6600',
-
- Generic.Heading: 'bold #003300',
- Generic.Subheading: 'bold #003300',
- Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
- Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
- Generic.Error: '#FF0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: 'bold #000099',
- Generic.Output: '#AAAAAA',
- Generic.Traceback: '#99CC66',
-
- Error: 'bg:#FFAAAA #AA0000'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class ManniStyle(Style):
+ """
+ A colorful style, inspired by the terminal highlighting style.
+ """
+
+ background_color = '#f0f3f3'
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #0099FF',
+ Comment.Preproc: 'noitalic #009999',
+ Comment.Special: 'bold',
+
+ Keyword: 'bold #006699',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#007788',
+
+ Operator: '#555555',
+ Operator.Word: 'bold #000000',
+
+ Name.Builtin: '#336666',
+ Name.Function: '#CC00FF',
+ Name.Class: 'bold #00AA88',
+ Name.Namespace: 'bold #00CCFF',
+ Name.Exception: 'bold #CC0000',
+ Name.Variable: '#003333',
+ Name.Constant: '#336600',
+ Name.Label: '#9999FF',
+ Name.Entity: 'bold #999999',
+ Name.Attribute: '#330099',
+ Name.Tag: 'bold #330099',
+ Name.Decorator: '#9999FF',
+
+ String: '#CC3300',
+ String.Doc: 'italic',
+ String.Interpol: '#AA0000',
+ String.Escape: 'bold #CC3300',
+ String.Regex: '#33AAAA',
+ String.Symbol: '#FFCC33',
+ String.Other: '#CC3300',
+
+ Number: '#FF6600',
+
+ Generic.Heading: 'bold #003300',
+ Generic.Subheading: 'bold #003300',
+ Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
+ Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
+ Generic.Error: '#FF0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: 'bold #000099',
+ Generic.Output: '#AAAAAA',
+ Generic.Traceback: '#99CC66',
+
+ Error: 'bg:#FFAAAA #AA0000'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/monokai.py b/contrib/python/Pygments/py3/pygments/styles/monokai.py
index 9de82f35ef..436e1d2768 100644
--- a/contrib/python/Pygments/py3/pygments/styles/monokai.py
+++ b/contrib/python/Pygments/py3/pygments/styles/monokai.py
@@ -1,106 +1,106 @@
-"""
- pygments.styles.monokai
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Mimic the Monokai color scheme. Based on tango.py.
-
- http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
-
+"""
+ pygments.styles.monokai
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Mimic the Monokai color scheme. Based on tango.py.
+
+ http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Token, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-class MonokaiStyle(Style):
- """
- This style mimics the Monokai color scheme.
- """
-
- background_color = "#272822"
- highlight_color = "#49483e"
-
- styles = {
- # No corresponding class for the following:
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+class MonokaiStyle(Style):
+ """
+ This style mimics the Monokai color scheme.
+ """
+
+ background_color = "#272822"
+ highlight_color = "#49483e"
+
+ styles = {
+ # No corresponding class for the following:
Token: "#f8f8f2", # class: ''
- Whitespace: "", # class: 'w'
- Error: "#960050 bg:#1e0010", # class: 'err'
- Other: "", # class 'x'
-
- Comment: "#75715e", # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: "#66d9ef", # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: "#f92672", # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: "", # class: 'kt'
-
- Operator: "#f92672", # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: "#f8f8f2", # class: 'p'
-
- Name: "#f8f8f2", # class: 'n'
- Name.Attribute: "#a6e22e", # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: "#a6e22e", # class: 'nc' - to be revised
- Name.Constant: "#66d9ef", # class: 'no' - to be revised
- Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: "#a6e22e", # class: 'ne'
- Name.Function: "#a6e22e", # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: "", # class: 'nn' - to be revised
- Name.Other: "#a6e22e", # class: 'nx'
- Name.Tag: "#f92672", # class: 'nt' - like a keyword
- Name.Variable: "", # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: "#ae81ff", # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: "#ae81ff", # class: 'l'
- Literal.Date: "#e6db74", # class: 'ld'
-
- String: "#e6db74", # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: "", # class: 'sc'
- String.Doc: "", # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: "#ae81ff", # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: "", # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
-
- Generic: "", # class: 'g'
- Generic.Deleted: "#f92672", # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "", # class: 'gh'
- Generic.Inserted: "#a6e22e", # class: 'gi'
+ Whitespace: "", # class: 'w'
+ Error: "#960050 bg:#1e0010", # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: "#75715e", # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: "#66d9ef", # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: "#f92672", # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: "", # class: 'kt'
+
+ Operator: "#f92672", # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: "#f8f8f2", # class: 'p'
+
+ Name: "#f8f8f2", # class: 'n'
+ Name.Attribute: "#a6e22e", # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: "#a6e22e", # class: 'nc' - to be revised
+ Name.Constant: "#66d9ef", # class: 'no' - to be revised
+ Name.Decorator: "#a6e22e", # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: "#a6e22e", # class: 'ne'
+ Name.Function: "#a6e22e", # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: "", # class: 'nn' - to be revised
+ Name.Other: "#a6e22e", # class: 'nx'
+ Name.Tag: "#f92672", # class: 'nt' - like a keyword
+ Name.Variable: "", # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: "#ae81ff", # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: "#ae81ff", # class: 'l'
+ Literal.Date: "#e6db74", # class: 'ld'
+
+ String: "#e6db74", # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: "", # class: 'sc'
+ String.Doc: "", # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: "#ae81ff", # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: "", # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: "#f92672", # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "", # class: 'gh'
+ Generic.Inserted: "#a6e22e", # class: 'gi'
Generic.Output: "#66d9ef", # class: 'go'
Generic.Prompt: "bold #f92672", # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "#75715e", # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "#75715e", # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/murphy.py b/contrib/python/Pygments/py3/pygments/styles/murphy.py
index 9115c2d87f..2728e6838e 100644
--- a/contrib/python/Pygments/py3/pygments/styles/murphy.py
+++ b/contrib/python/Pygments/py3/pygments/styles/murphy.py
@@ -1,79 +1,79 @@
-"""
- pygments.styles.murphy
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Murphy's style from CodeRay.
-
+"""
+ pygments.styles.murphy
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Murphy's style from CodeRay.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class MurphyStyle(Style):
- """
- Murphy's style from CodeRay.
- """
-
- default_style = ""
-
- styles = {
- Whitespace: "#bbbbbb",
- Comment: "#666 italic",
- Comment.Preproc: "#579 noitalic",
- Comment.Special: "#c00 bold",
-
- Keyword: "bold #289",
- Keyword.Pseudo: "#08f",
- Keyword.Type: "#66f",
-
- Operator: "#333",
- Operator.Word: "bold #000",
-
- Name.Builtin: "#072",
- Name.Function: "bold #5ed",
- Name.Class: "bold #e9e",
- Name.Namespace: "bold #0e84b5",
- Name.Exception: "bold #F00",
- Name.Variable: "#036",
- Name.Variable.Instance: "#aaf",
- Name.Variable.Class: "#ccf",
- Name.Variable.Global: "#f84",
- Name.Constant: "bold #5ed",
- Name.Label: "bold #970",
- Name.Entity: "#800",
- Name.Attribute: "#007",
- Name.Tag: "#070",
- Name.Decorator: "bold #555",
-
- String: "bg:#e0e0ff",
- String.Char: "#88F bg:",
- String.Doc: "#D42 bg:",
- String.Interpol: "bg:#eee",
- String.Escape: "bold #666",
- String.Regex: "bg:#e0e0ff #000",
- String.Symbol: "#fc8 bg:",
- String.Other: "#f88",
-
- Number: "bold #60E",
- Number.Integer: "bold #66f",
- Number.Float: "bold #60E",
- Number.Hex: "bold #058",
- Number.Oct: "bold #40E",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#A00000",
- Generic.Inserted: "#00A000",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #c65d09",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "#F00 bg:#FAA"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class MurphyStyle(Style):
+ """
+ Murphy's style from CodeRay.
+ """
+
+ default_style = ""
+
+ styles = {
+ Whitespace: "#bbbbbb",
+ Comment: "#666 italic",
+ Comment.Preproc: "#579 noitalic",
+ Comment.Special: "#c00 bold",
+
+ Keyword: "bold #289",
+ Keyword.Pseudo: "#08f",
+ Keyword.Type: "#66f",
+
+ Operator: "#333",
+ Operator.Word: "bold #000",
+
+ Name.Builtin: "#072",
+ Name.Function: "bold #5ed",
+ Name.Class: "bold #e9e",
+ Name.Namespace: "bold #0e84b5",
+ Name.Exception: "bold #F00",
+ Name.Variable: "#036",
+ Name.Variable.Instance: "#aaf",
+ Name.Variable.Class: "#ccf",
+ Name.Variable.Global: "#f84",
+ Name.Constant: "bold #5ed",
+ Name.Label: "bold #970",
+ Name.Entity: "#800",
+ Name.Attribute: "#007",
+ Name.Tag: "#070",
+ Name.Decorator: "bold #555",
+
+ String: "bg:#e0e0ff",
+ String.Char: "#88F bg:",
+ String.Doc: "#D42 bg:",
+ String.Interpol: "bg:#eee",
+ String.Escape: "bold #666",
+ String.Regex: "bg:#e0e0ff #000",
+ String.Symbol: "#fc8 bg:",
+ String.Other: "#f88",
+
+ Number: "bold #60E",
+ Number.Integer: "bold #66f",
+ Number.Float: "bold #60E",
+ Number.Hex: "bold #058",
+ Number.Oct: "bold #40E",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#A00000",
+ Generic.Inserted: "#00A000",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #c65d09",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "#F00 bg:#FAA"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/native.py b/contrib/python/Pygments/py3/pygments/styles/native.py
index 9c2f2c16b8..dc9e0a8039 100644
--- a/contrib/python/Pygments/py3/pygments/styles/native.py
+++ b/contrib/python/Pygments/py3/pygments/styles/native.py
@@ -1,65 +1,65 @@
-"""
- pygments.styles.native
- ~~~~~~~~~~~~~~~~~~~~~~
-
- pygments version of my "native" vim theme.
-
+"""
+ pygments.styles.native
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ pygments version of my "native" vim theme.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-
-
-class NativeStyle(Style):
- """
- Pygments version of the "native" vim theme.
- """
-
- background_color = '#202020'
- highlight_color = '#404040'
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Token, Whitespace
+
+
+class NativeStyle(Style):
+ """
+ Pygments version of the "native" vim theme.
+ """
+
+ background_color = '#202020'
+ highlight_color = '#404040'
line_number_color = '#aaaaaa'
-
- styles = {
- Token: '#d0d0d0',
- Whitespace: '#666666',
-
- Comment: 'italic #999999',
- Comment.Preproc: 'noitalic bold #cd2828',
- Comment.Special: 'noitalic bold #e50808 bg:#520000',
-
- Keyword: 'bold #6ab825',
- Keyword.Pseudo: 'nobold',
- Operator.Word: 'bold #6ab825',
-
- String: '#ed9d13',
- String.Other: '#ffa500',
-
- Number: '#3677a9',
-
- Name.Builtin: '#24909d',
- Name.Variable: '#40ffff',
- Name.Constant: '#40ffff',
- Name.Class: 'underline #447fcf',
- Name.Function: '#447fcf',
- Name.Namespace: 'underline #447fcf',
- Name.Exception: '#bbbbbb',
- Name.Tag: 'bold #6ab825',
- Name.Attribute: '#bbbbbb',
- Name.Decorator: '#ffa500',
-
- Generic.Heading: 'bold #ffffff',
- Generic.Subheading: 'underline #ffffff',
- Generic.Deleted: '#d22323',
- Generic.Inserted: '#589819',
- Generic.Error: '#d22323',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#aaaaaa',
- Generic.Output: '#cccccc',
- Generic.Traceback: '#d22323',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+
+ styles = {
+ Token: '#d0d0d0',
+ Whitespace: '#666666',
+
+ Comment: 'italic #999999',
+ Comment.Preproc: 'noitalic bold #cd2828',
+ Comment.Special: 'noitalic bold #e50808 bg:#520000',
+
+ Keyword: 'bold #6ab825',
+ Keyword.Pseudo: 'nobold',
+ Operator.Word: 'bold #6ab825',
+
+ String: '#ed9d13',
+ String.Other: '#ffa500',
+
+ Number: '#3677a9',
+
+ Name.Builtin: '#24909d',
+ Name.Variable: '#40ffff',
+ Name.Constant: '#40ffff',
+ Name.Class: 'underline #447fcf',
+ Name.Function: '#447fcf',
+ Name.Namespace: 'underline #447fcf',
+ Name.Exception: '#bbbbbb',
+ Name.Tag: 'bold #6ab825',
+ Name.Attribute: '#bbbbbb',
+ Name.Decorator: '#ffa500',
+
+ Generic.Heading: 'bold #ffffff',
+ Generic.Subheading: 'underline #ffffff',
+ Generic.Deleted: '#d22323',
+ Generic.Inserted: '#589819',
+ Generic.Error: '#d22323',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#aaaaaa',
+ Generic.Output: '#cccccc',
+ Generic.Traceback: '#d22323',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
index 0c98f0058d..24ac1973ee 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
@@ -1,121 +1,121 @@
-"""
- pygments.styles.paraiso_dark
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Dark) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
+"""
+ pygments.styles.paraiso_dark
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Dark) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-BACKGROUND = "#2f1e2e"
-CURRENT_LINE = "#41323f"
-SELECTION = "#4f424c"
-FOREGROUND = "#e7e9db"
-COMMENT = "#776e71"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoDarkStyle(Style):
-
- default_style = ''
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#2f1e2e"
+CURRENT_LINE = "#41323f"
+SELECTION = "#4f424c"
+FOREGROUND = "#e7e9db"
+COMMENT = "#776e71"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoDarkStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
index 4c9f1392ac..e2ed6f7600 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
@@ -1,121 +1,121 @@
-"""
- pygments.styles.paraiso_light
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Paraíso (Light) by Jan T. Sott
-
- Pygments template by Jan T. Sott (https://github.com/idleberg)
- Created with Base16 Builder by Chris Kempson
- (https://github.com/chriskempson/base16-builder).
-
+"""
+ pygments.styles.paraiso_light
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Paraíso (Light) by Jan T. Sott
+
+ Pygments template by Jan T. Sott (https://github.com/idleberg)
+ Created with Base16 Builder by Chris Kempson
+ (https://github.com/chriskempson/base16-builder).
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, Text, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-BACKGROUND = "#e7e9db"
-CURRENT_LINE = "#b9b6b0"
-SELECTION = "#a39e9b"
-FOREGROUND = "#2f1e2e"
-COMMENT = "#8d8687"
-RED = "#ef6155"
-ORANGE = "#f99b15"
-YELLOW = "#fec418"
-GREEN = "#48b685"
-AQUA = "#5bc4bf"
-BLUE = "#06b6ef"
-PURPLE = "#815ba4"
-
-
-class ParaisoLightStyle(Style):
-
- default_style = ''
-
- background_color = BACKGROUND
- highlight_color = SELECTION
-
- styles = {
- # No corresponding class for the following:
- Text: FOREGROUND, # class: ''
- Whitespace: "", # class: 'w'
- Error: RED, # class: 'err'
- Other: "", # class 'x'
-
- Comment: COMMENT, # class: 'c'
- Comment.Multiline: "", # class: 'cm'
- Comment.Preproc: "", # class: 'cp'
- Comment.Single: "", # class: 'c1'
- Comment.Special: "", # class: 'cs'
-
- Keyword: PURPLE, # class: 'k'
- Keyword.Constant: "", # class: 'kc'
- Keyword.Declaration: "", # class: 'kd'
- Keyword.Namespace: AQUA, # class: 'kn'
- Keyword.Pseudo: "", # class: 'kp'
- Keyword.Reserved: "", # class: 'kr'
- Keyword.Type: YELLOW, # class: 'kt'
-
- Operator: AQUA, # class: 'o'
- Operator.Word: "", # class: 'ow' - like keywords
-
- Punctuation: FOREGROUND, # class: 'p'
-
- Name: FOREGROUND, # class: 'n'
- Name.Attribute: BLUE, # class: 'na' - to be revised
- Name.Builtin: "", # class: 'nb'
- Name.Builtin.Pseudo: "", # class: 'bp'
- Name.Class: YELLOW, # class: 'nc' - to be revised
- Name.Constant: RED, # class: 'no' - to be revised
- Name.Decorator: AQUA, # class: 'nd' - to be revised
- Name.Entity: "", # class: 'ni'
- Name.Exception: RED, # class: 'ne'
- Name.Function: BLUE, # class: 'nf'
- Name.Property: "", # class: 'py'
- Name.Label: "", # class: 'nl'
- Name.Namespace: YELLOW, # class: 'nn' - to be revised
- Name.Other: BLUE, # class: 'nx'
- Name.Tag: AQUA, # class: 'nt' - like a keyword
- Name.Variable: RED, # class: 'nv' - to be revised
- Name.Variable.Class: "", # class: 'vc' - to be revised
- Name.Variable.Global: "", # class: 'vg' - to be revised
- Name.Variable.Instance: "", # class: 'vi' - to be revised
-
- Number: ORANGE, # class: 'm'
- Number.Float: "", # class: 'mf'
- Number.Hex: "", # class: 'mh'
- Number.Integer: "", # class: 'mi'
- Number.Integer.Long: "", # class: 'il'
- Number.Oct: "", # class: 'mo'
-
- Literal: ORANGE, # class: 'l'
- Literal.Date: GREEN, # class: 'ld'
-
- String: GREEN, # class: 's'
- String.Backtick: "", # class: 'sb'
- String.Char: FOREGROUND, # class: 'sc'
- String.Doc: COMMENT, # class: 'sd' - like a comment
- String.Double: "", # class: 's2'
- String.Escape: ORANGE, # class: 'se'
- String.Heredoc: "", # class: 'sh'
- String.Interpol: ORANGE, # class: 'si'
- String.Other: "", # class: 'sx'
- String.Regex: "", # class: 'sr'
- String.Single: "", # class: 's1'
- String.Symbol: "", # class: 'ss'
-
- Generic: "", # class: 'g'
- Generic.Deleted: RED, # class: 'gd',
- Generic.Emph: "italic", # class: 'ge'
- Generic.Error: "", # class: 'gr'
- Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
- Generic.Inserted: GREEN, # class: 'gi'
- Generic.Output: "", # class: 'go'
- Generic.Prompt: "bold " + COMMENT, # class: 'gp'
- Generic.Strong: "bold", # class: 'gs'
- Generic.Subheading: "bold " + AQUA, # class: 'gu'
- Generic.Traceback: "", # class: 'gt'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, Text, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+BACKGROUND = "#e7e9db"
+CURRENT_LINE = "#b9b6b0"
+SELECTION = "#a39e9b"
+FOREGROUND = "#2f1e2e"
+COMMENT = "#8d8687"
+RED = "#ef6155"
+ORANGE = "#f99b15"
+YELLOW = "#fec418"
+GREEN = "#48b685"
+AQUA = "#5bc4bf"
+BLUE = "#06b6ef"
+PURPLE = "#815ba4"
+
+
+class ParaisoLightStyle(Style):
+
+ default_style = ''
+
+ background_color = BACKGROUND
+ highlight_color = SELECTION
+
+ styles = {
+ # No corresponding class for the following:
+ Text: FOREGROUND, # class: ''
+ Whitespace: "", # class: 'w'
+ Error: RED, # class: 'err'
+ Other: "", # class 'x'
+
+ Comment: COMMENT, # class: 'c'
+ Comment.Multiline: "", # class: 'cm'
+ Comment.Preproc: "", # class: 'cp'
+ Comment.Single: "", # class: 'c1'
+ Comment.Special: "", # class: 'cs'
+
+ Keyword: PURPLE, # class: 'k'
+ Keyword.Constant: "", # class: 'kc'
+ Keyword.Declaration: "", # class: 'kd'
+ Keyword.Namespace: AQUA, # class: 'kn'
+ Keyword.Pseudo: "", # class: 'kp'
+ Keyword.Reserved: "", # class: 'kr'
+ Keyword.Type: YELLOW, # class: 'kt'
+
+ Operator: AQUA, # class: 'o'
+ Operator.Word: "", # class: 'ow' - like keywords
+
+ Punctuation: FOREGROUND, # class: 'p'
+
+ Name: FOREGROUND, # class: 'n'
+ Name.Attribute: BLUE, # class: 'na' - to be revised
+ Name.Builtin: "", # class: 'nb'
+ Name.Builtin.Pseudo: "", # class: 'bp'
+ Name.Class: YELLOW, # class: 'nc' - to be revised
+ Name.Constant: RED, # class: 'no' - to be revised
+ Name.Decorator: AQUA, # class: 'nd' - to be revised
+ Name.Entity: "", # class: 'ni'
+ Name.Exception: RED, # class: 'ne'
+ Name.Function: BLUE, # class: 'nf'
+ Name.Property: "", # class: 'py'
+ Name.Label: "", # class: 'nl'
+ Name.Namespace: YELLOW, # class: 'nn' - to be revised
+ Name.Other: BLUE, # class: 'nx'
+ Name.Tag: AQUA, # class: 'nt' - like a keyword
+ Name.Variable: RED, # class: 'nv' - to be revised
+ Name.Variable.Class: "", # class: 'vc' - to be revised
+ Name.Variable.Global: "", # class: 'vg' - to be revised
+ Name.Variable.Instance: "", # class: 'vi' - to be revised
+
+ Number: ORANGE, # class: 'm'
+ Number.Float: "", # class: 'mf'
+ Number.Hex: "", # class: 'mh'
+ Number.Integer: "", # class: 'mi'
+ Number.Integer.Long: "", # class: 'il'
+ Number.Oct: "", # class: 'mo'
+
+ Literal: ORANGE, # class: 'l'
+ Literal.Date: GREEN, # class: 'ld'
+
+ String: GREEN, # class: 's'
+ String.Backtick: "", # class: 'sb'
+ String.Char: FOREGROUND, # class: 'sc'
+ String.Doc: COMMENT, # class: 'sd' - like a comment
+ String.Double: "", # class: 's2'
+ String.Escape: ORANGE, # class: 'se'
+ String.Heredoc: "", # class: 'sh'
+ String.Interpol: ORANGE, # class: 'si'
+ String.Other: "", # class: 'sx'
+ String.Regex: "", # class: 'sr'
+ String.Single: "", # class: 's1'
+ String.Symbol: "", # class: 'ss'
+
+ Generic: "", # class: 'g'
+ Generic.Deleted: RED, # class: 'gd',
+ Generic.Emph: "italic", # class: 'ge'
+ Generic.Error: "", # class: 'gr'
+ Generic.Heading: "bold " + FOREGROUND, # class: 'gh'
+ Generic.Inserted: GREEN, # class: 'gi'
+ Generic.Output: "", # class: 'go'
+ Generic.Prompt: "bold " + COMMENT, # class: 'gp'
+ Generic.Strong: "bold", # class: 'gs'
+ Generic.Subheading: "bold " + AQUA, # class: 'gu'
+ Generic.Traceback: "", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/pastie.py b/contrib/python/Pygments/py3/pygments/styles/pastie.py
index 14ec7007f5..c4bea36ae1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/pastie.py
+++ b/contrib/python/Pygments/py3/pygments/styles/pastie.py
@@ -1,74 +1,74 @@
-"""
- pygments.styles.pastie
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `pastie`_ default style.
-
- .. _pastie: http://pastie.caboo.se/
-
+"""
+ pygments.styles.pastie
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `pastie`_ default style.
+
+ .. _pastie: http://pastie.caboo.se/
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class PastieStyle(Style):
- """
- Style similar to the pastie default style.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#888888',
- Comment.Preproc: 'bold #cc0000',
- Comment.Special: 'bg:#fff0f0 bold #cc0000',
-
- String: 'bg:#fff0f0 #dd2200',
- String.Regex: 'bg:#fff0ff #008800',
- String.Other: 'bg:#f0fff0 #22bb22',
- String.Symbol: '#aa6600',
- String.Interpol: '#3333bb',
- String.Escape: '#0044dd',
-
- Operator.Word: '#008800',
-
- Keyword: 'bold #008800',
- Keyword.Pseudo: 'nobold',
- Keyword.Type: '#888888',
-
- Name.Class: 'bold #bb0066',
- Name.Exception: 'bold #bb0066',
- Name.Function: 'bold #0066bb',
- Name.Property: 'bold #336699',
- Name.Namespace: 'bold #bb0066',
- Name.Builtin: '#003388',
- Name.Variable: '#336699',
- Name.Variable.Class: '#336699',
- Name.Variable.Instance: '#3333bb',
- Name.Variable.Global: '#dd7700',
- Name.Constant: 'bold #003366',
- Name.Tag: 'bold #bb0066',
- Name.Attribute: '#336699',
- Name.Decorator: '#555555',
- Name.Label: 'italic #336699',
-
- Number: 'bold #0000DD',
-
- Generic.Heading: '#333',
- Generic.Subheading: '#666',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class PastieStyle(Style):
+ """
+ Style similar to the pastie default style.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: '#888888',
+ Comment.Preproc: 'bold #cc0000',
+ Comment.Special: 'bg:#fff0f0 bold #cc0000',
+
+ String: 'bg:#fff0f0 #dd2200',
+ String.Regex: 'bg:#fff0ff #008800',
+ String.Other: 'bg:#f0fff0 #22bb22',
+ String.Symbol: '#aa6600',
+ String.Interpol: '#3333bb',
+ String.Escape: '#0044dd',
+
+ Operator.Word: '#008800',
+
+ Keyword: 'bold #008800',
+ Keyword.Pseudo: 'nobold',
+ Keyword.Type: '#888888',
+
+ Name.Class: 'bold #bb0066',
+ Name.Exception: 'bold #bb0066',
+ Name.Function: 'bold #0066bb',
+ Name.Property: 'bold #336699',
+ Name.Namespace: 'bold #bb0066',
+ Name.Builtin: '#003388',
+ Name.Variable: '#336699',
+ Name.Variable.Class: '#336699',
+ Name.Variable.Instance: '#3333bb',
+ Name.Variable.Global: '#dd7700',
+ Name.Constant: 'bold #003366',
+ Name.Tag: 'bold #bb0066',
+ Name.Attribute: '#336699',
+ Name.Decorator: '#555555',
+ Name.Label: 'italic #336699',
+
+ Number: 'bold #0000DD',
+
+ Generic.Heading: '#333',
+ Generic.Subheading: '#666',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/perldoc.py b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
index 5eae210fd8..1be5ac1a02 100644
--- a/contrib/python/Pygments/py3/pygments/styles/perldoc.py
+++ b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
@@ -1,68 +1,68 @@
-"""
- pygments.styles.perldoc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the style used in the `perldoc`_ code blocks.
-
- .. _perldoc: http://perldoc.perl.org/
-
+"""
+ pygments.styles.perldoc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the style used in the `perldoc`_ code blocks.
+
+ .. _perldoc: http://perldoc.perl.org/
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class PerldocStyle(Style):
- """
- Style similar to the style used in the perldoc code blocks.
- """
-
- background_color = '#eeeedd'
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: '#228B22',
- Comment.Preproc: '#1e889b',
- Comment.Special: '#8B008B bold',
-
- String: '#CD5555',
- String.Heredoc: '#1c7e71 italic',
- String.Regex: '#B452CD',
- String.Other: '#cb6c20',
- String.Regex: '#1c7e71',
-
- Number: '#B452CD',
-
- Operator.Word: '#8B008B',
-
- Keyword: '#8B008B bold',
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class PerldocStyle(Style):
+ """
+ Style similar to the style used in the perldoc code blocks.
+ """
+
+ background_color = '#eeeedd'
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: '#228B22',
+ Comment.Preproc: '#1e889b',
+ Comment.Special: '#8B008B bold',
+
+ String: '#CD5555',
+ String.Heredoc: '#1c7e71 italic',
+ String.Regex: '#B452CD',
+ String.Other: '#cb6c20',
+ String.Regex: '#1c7e71',
+
+ Number: '#B452CD',
+
+ Operator.Word: '#8B008B',
+
+ Keyword: '#8B008B bold',
Keyword.Type: '#00688B',
-
- Name.Class: '#008b45 bold',
- Name.Exception: '#008b45 bold',
- Name.Function: '#008b45',
- Name.Namespace: '#008b45 underline',
- Name.Variable: '#00688B',
- Name.Constant: '#00688B',
- Name.Decorator: '#707a7c',
- Name.Tag: '#8B008B bold',
- Name.Attribute: '#658b00',
- Name.Builtin: '#658b00',
-
- Generic.Heading: 'bold #000080',
- Generic.Subheading: 'bold #800080',
- Generic.Deleted: '#aa0000',
- Generic.Inserted: '#00aa00',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+
+ Name.Class: '#008b45 bold',
+ Name.Exception: '#008b45 bold',
+ Name.Function: '#008b45',
+ Name.Namespace: '#008b45 underline',
+ Name.Variable: '#00688B',
+ Name.Constant: '#00688B',
+ Name.Decorator: '#707a7c',
+ Name.Tag: '#8B008B bold',
+ Name.Attribute: '#658b00',
+ Name.Builtin: '#658b00',
+
+ Generic.Heading: 'bold #000080',
+ Generic.Subheading: 'bold #800080',
+ Generic.Deleted: '#aa0000',
+ Generic.Inserted: '#00aa00',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/rrt.py b/contrib/python/Pygments/py3/pygments/styles/rrt.py
index 0b58152d4c..7c69c1104a 100644
--- a/contrib/python/Pygments/py3/pygments/styles/rrt.py
+++ b/contrib/python/Pygments/py3/pygments/styles/rrt.py
@@ -1,33 +1,33 @@
-"""
- pygments.styles.rrt
- ~~~~~~~~~~~~~~~~~~~
-
- pygments "rrt" theme, based on Zap and Emacs defaults.
-
+"""
+ pygments.styles.rrt
+ ~~~~~~~~~~~~~~~~~~~
+
+ pygments "rrt" theme, based on Zap and Emacs defaults.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
from pygments.token import Token, Comment, Name, Keyword, String
-
-
-class RrtStyle(Style):
- """
- Minimalistic "rrt" theme, based on Zap and Emacs defaults.
- """
-
- background_color = '#000000'
- highlight_color = '#0000ff'
-
- styles = {
+
+
+class RrtStyle(Style):
+ """
+ Minimalistic "rrt" theme, based on Zap and Emacs defaults.
+ """
+
+ background_color = '#000000'
+ highlight_color = '#0000ff'
+
+ styles = {
Token: '#dddddd',
- Comment: '#00ff00',
- Name.Function: '#ffff00',
- Name.Variable: '#eedd82',
- Name.Constant: '#7fffd4',
- Keyword: '#ff0000',
- Comment.Preproc: '#e5e5e5',
- String: '#87ceeb',
- Keyword.Type: '#ee82ee',
- }
+ Comment: '#00ff00',
+ Name.Function: '#ffff00',
+ Name.Variable: '#eedd82',
+ Name.Constant: '#7fffd4',
+ Keyword: '#ff0000',
+ Comment.Preproc: '#e5e5e5',
+ String: '#87ceeb',
+ Keyword.Type: '#ee82ee',
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/tango.py b/contrib/python/Pygments/py3/pygments/styles/tango.py
index 7a2f005b6c..0f1c6e8610 100644
--- a/contrib/python/Pygments/py3/pygments/styles/tango.py
+++ b/contrib/python/Pygments/py3/pygments/styles/tango.py
@@ -1,140 +1,140 @@
-"""
- pygments.styles.tango
- ~~~~~~~~~~~~~~~~~~~~~
-
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
-
- http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
-
- Butter: #fce94f #edd400 #c4a000
- Orange: #fcaf3e #f57900 #ce5c00
- Chocolate: #e9b96e #c17d11 #8f5902
- Chameleon: #8ae234 #73d216 #4e9a06
- Sky Blue: #729fcf #3465a4 #204a87
- Plum: #ad7fa8 #75507b #5c35cc
- Scarlet Red:#ef2929 #cc0000 #a40000
- Aluminium: #eeeeec #d3d7cf #babdb6
- #888a85 #555753 #2e3436
-
- Not all of the above colors are used; other colors added:
- very light grey: #f8f8f8 (for background)
-
- This style can be used as a template as it includes all the known
- Token types, unlike most (if not all) of the styles included in the
- Pygments distribution.
-
- However, since Crunchy is intended to be used by beginners, we have strived
- to create a style that gloss over subtle distinctions between different
- categories.
-
- Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
- have been chosen to have the same style. Similarly, keywords (Keyword.*),
- and Operator.Word (and, or, in) have been assigned the same style.
-
+"""
+ pygments.styles.tango
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ The Crunchy default Style inspired from the color palette from
+ the Tango Icon Theme Guidelines.
+
+ http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines
+
+ Butter: #fce94f #edd400 #c4a000
+ Orange: #fcaf3e #f57900 #ce5c00
+ Chocolate: #e9b96e #c17d11 #8f5902
+ Chameleon: #8ae234 #73d216 #4e9a06
+ Sky Blue: #729fcf #3465a4 #204a87
+ Plum: #ad7fa8 #75507b #5c35cc
+ Scarlet Red:#ef2929 #cc0000 #a40000
+ Aluminium: #eeeeec #d3d7cf #babdb6
+ #888a85 #555753 #2e3436
+
+ Not all of the above colors are used; other colors added:
+ very light grey: #f8f8f8 (for background)
+
+ This style can be used as a template as it includes all the known
+ Token types, unlike most (if not all) of the styles included in the
+ Pygments distribution.
+
+ However, since Crunchy is intended to be used by beginners, we have strived
+ to create a style that gloss over subtle distinctions between different
+ categories.
+
+ Taking Python for example, comments (Comment.*) and docstrings (String.Doc)
+ have been chosen to have the same style. Similarly, keywords (Keyword.*),
+ and Operator.Word (and, or, in) have been assigned the same style.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
-
-
-class TangoStyle(Style):
- """
- The Crunchy default Style inspired from the color palette from
- the Tango Icon Theme Guidelines.
- """
-
- # work in progress...
-
- background_color = "#f8f8f8"
- default_style = ""
-
- styles = {
- # No corresponding class for the following:
- #Text: "", # class: ''
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
+
+
+class TangoStyle(Style):
+ """
+ The Crunchy default Style inspired from the color palette from
+ the Tango Icon Theme Guidelines.
+ """
+
+ # work in progress...
+
+ background_color = "#f8f8f8"
+ default_style = ""
+
+ styles = {
+ # No corresponding class for the following:
+ #Text: "", # class: ''
Whitespace: "#f8f8f8", # class: 'w'
- Error: "#a40000 border:#ef2929", # class: 'err'
- Other: "#000000", # class 'x'
-
- Comment: "italic #8f5902", # class: 'c'
- Comment.Multiline: "italic #8f5902", # class: 'cm'
- Comment.Preproc: "italic #8f5902", # class: 'cp'
- Comment.Single: "italic #8f5902", # class: 'c1'
- Comment.Special: "italic #8f5902", # class: 'cs'
-
- Keyword: "bold #204a87", # class: 'k'
- Keyword.Constant: "bold #204a87", # class: 'kc'
- Keyword.Declaration: "bold #204a87", # class: 'kd'
- Keyword.Namespace: "bold #204a87", # class: 'kn'
- Keyword.Pseudo: "bold #204a87", # class: 'kp'
- Keyword.Reserved: "bold #204a87", # class: 'kr'
- Keyword.Type: "bold #204a87", # class: 'kt'
-
- Operator: "bold #ce5c00", # class: 'o'
- Operator.Word: "bold #204a87", # class: 'ow' - like keywords
-
- Punctuation: "bold #000000", # class: 'p'
-
- # because special names such as Name.Class, Name.Function, etc.
- # are not recognized as such later in the parsing, we choose them
- # to look the same as ordinary variables.
- Name: "#000000", # class: 'n'
- Name.Attribute: "#c4a000", # class: 'na' - to be revised
- Name.Builtin: "#204a87", # class: 'nb'
- Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
- Name.Class: "#000000", # class: 'nc' - to be revised
- Name.Constant: "#000000", # class: 'no' - to be revised
- Name.Decorator: "bold #5c35cc", # class: 'nd' - to be revised
- Name.Entity: "#ce5c00", # class: 'ni'
- Name.Exception: "bold #cc0000", # class: 'ne'
- Name.Function: "#000000", # class: 'nf'
- Name.Property: "#000000", # class: 'py'
- Name.Label: "#f57900", # class: 'nl'
- Name.Namespace: "#000000", # class: 'nn' - to be revised
- Name.Other: "#000000", # class: 'nx'
- Name.Tag: "bold #204a87", # class: 'nt' - like a keyword
- Name.Variable: "#000000", # class: 'nv' - to be revised
- Name.Variable.Class: "#000000", # class: 'vc' - to be revised
- Name.Variable.Global: "#000000", # class: 'vg' - to be revised
- Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
-
- # since the tango light blue does not show up well in text, we choose
- # a pure blue instead.
- Number: "bold #0000cf", # class: 'm'
- Number.Float: "bold #0000cf", # class: 'mf'
- Number.Hex: "bold #0000cf", # class: 'mh'
- Number.Integer: "bold #0000cf", # class: 'mi'
- Number.Integer.Long: "bold #0000cf", # class: 'il'
- Number.Oct: "bold #0000cf", # class: 'mo'
-
- Literal: "#000000", # class: 'l'
- Literal.Date: "#000000", # class: 'ld'
-
- String: "#4e9a06", # class: 's'
- String.Backtick: "#4e9a06", # class: 'sb'
- String.Char: "#4e9a06", # class: 'sc'
- String.Doc: "italic #8f5902", # class: 'sd' - like a comment
- String.Double: "#4e9a06", # class: 's2'
- String.Escape: "#4e9a06", # class: 'se'
- String.Heredoc: "#4e9a06", # class: 'sh'
- String.Interpol: "#4e9a06", # class: 'si'
- String.Other: "#4e9a06", # class: 'sx'
- String.Regex: "#4e9a06", # class: 'sr'
- String.Single: "#4e9a06", # class: 's1'
- String.Symbol: "#4e9a06", # class: 'ss'
-
- Generic: "#000000", # class: 'g'
- Generic.Deleted: "#a40000", # class: 'gd'
- Generic.Emph: "italic #000000", # class: 'ge'
- Generic.Error: "#ef2929", # class: 'gr'
- Generic.Heading: "bold #000080", # class: 'gh'
- Generic.Inserted: "#00A000", # class: 'gi'
- Generic.Output: "italic #000000", # class: 'go'
- Generic.Prompt: "#8f5902", # class: 'gp'
- Generic.Strong: "bold #000000", # class: 'gs'
- Generic.Subheading: "bold #800080", # class: 'gu'
- Generic.Traceback: "bold #a40000", # class: 'gt'
- }
+ Error: "#a40000 border:#ef2929", # class: 'err'
+ Other: "#000000", # class 'x'
+
+ Comment: "italic #8f5902", # class: 'c'
+ Comment.Multiline: "italic #8f5902", # class: 'cm'
+ Comment.Preproc: "italic #8f5902", # class: 'cp'
+ Comment.Single: "italic #8f5902", # class: 'c1'
+ Comment.Special: "italic #8f5902", # class: 'cs'
+
+ Keyword: "bold #204a87", # class: 'k'
+ Keyword.Constant: "bold #204a87", # class: 'kc'
+ Keyword.Declaration: "bold #204a87", # class: 'kd'
+ Keyword.Namespace: "bold #204a87", # class: 'kn'
+ Keyword.Pseudo: "bold #204a87", # class: 'kp'
+ Keyword.Reserved: "bold #204a87", # class: 'kr'
+ Keyword.Type: "bold #204a87", # class: 'kt'
+
+ Operator: "bold #ce5c00", # class: 'o'
+ Operator.Word: "bold #204a87", # class: 'ow' - like keywords
+
+ Punctuation: "bold #000000", # class: 'p'
+
+ # because special names such as Name.Class, Name.Function, etc.
+ # are not recognized as such later in the parsing, we choose them
+ # to look the same as ordinary variables.
+ Name: "#000000", # class: 'n'
+ Name.Attribute: "#c4a000", # class: 'na' - to be revised
+ Name.Builtin: "#204a87", # class: 'nb'
+ Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
+ Name.Class: "#000000", # class: 'nc' - to be revised
+ Name.Constant: "#000000", # class: 'no' - to be revised
+ Name.Decorator: "bold #5c35cc", # class: 'nd' - to be revised
+ Name.Entity: "#ce5c00", # class: 'ni'
+ Name.Exception: "bold #cc0000", # class: 'ne'
+ Name.Function: "#000000", # class: 'nf'
+ Name.Property: "#000000", # class: 'py'
+ Name.Label: "#f57900", # class: 'nl'
+ Name.Namespace: "#000000", # class: 'nn' - to be revised
+ Name.Other: "#000000", # class: 'nx'
+ Name.Tag: "bold #204a87", # class: 'nt' - like a keyword
+ Name.Variable: "#000000", # class: 'nv' - to be revised
+ Name.Variable.Class: "#000000", # class: 'vc' - to be revised
+ Name.Variable.Global: "#000000", # class: 'vg' - to be revised
+ Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
+
+ # since the tango light blue does not show up well in text, we choose
+ # a pure blue instead.
+ Number: "bold #0000cf", # class: 'm'
+ Number.Float: "bold #0000cf", # class: 'mf'
+ Number.Hex: "bold #0000cf", # class: 'mh'
+ Number.Integer: "bold #0000cf", # class: 'mi'
+ Number.Integer.Long: "bold #0000cf", # class: 'il'
+ Number.Oct: "bold #0000cf", # class: 'mo'
+
+ Literal: "#000000", # class: 'l'
+ Literal.Date: "#000000", # class: 'ld'
+
+ String: "#4e9a06", # class: 's'
+ String.Backtick: "#4e9a06", # class: 'sb'
+ String.Char: "#4e9a06", # class: 'sc'
+ String.Doc: "italic #8f5902", # class: 'sd' - like a comment
+ String.Double: "#4e9a06", # class: 's2'
+ String.Escape: "#4e9a06", # class: 'se'
+ String.Heredoc: "#4e9a06", # class: 'sh'
+ String.Interpol: "#4e9a06", # class: 'si'
+ String.Other: "#4e9a06", # class: 'sx'
+ String.Regex: "#4e9a06", # class: 'sr'
+ String.Single: "#4e9a06", # class: 's1'
+ String.Symbol: "#4e9a06", # class: 'ss'
+
+ Generic: "#000000", # class: 'g'
+ Generic.Deleted: "#a40000", # class: 'gd'
+ Generic.Emph: "italic #000000", # class: 'ge'
+ Generic.Error: "#ef2929", # class: 'gr'
+ Generic.Heading: "bold #000080", # class: 'gh'
+ Generic.Inserted: "#00A000", # class: 'gi'
+ Generic.Output: "italic #000000", # class: 'go'
+ Generic.Prompt: "#8f5902", # class: 'gp'
+ Generic.Strong: "bold #000000", # class: 'gs'
+ Generic.Subheading: "bold #800080", # class: 'gu'
+ Generic.Traceback: "bold #a40000", # class: 'gt'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/trac.py b/contrib/python/Pygments/py3/pygments/styles/trac.py
index b2a1fd4d2e..bcbf88bf52 100644
--- a/contrib/python/Pygments/py3/pygments/styles/trac.py
+++ b/contrib/python/Pygments/py3/pygments/styles/trac.py
@@ -1,62 +1,62 @@
-"""
- pygments.styles.trac
- ~~~~~~~~~~~~~~~~~~~~
-
- Port of the default trac highlighter design.
-
+"""
+ pygments.styles.trac
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Port of the default trac highlighter design.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace
-
-
-class TracStyle(Style):
- """
- Port of the default trac highlighter design.
- """
-
- default_style = ''
-
- styles = {
- Whitespace: '#bbbbbb',
- Comment: 'italic #999988',
- Comment.Preproc: 'bold noitalic #999999',
- Comment.Special: 'bold #999999',
-
- Operator: 'bold',
-
- String: '#bb8844',
- String.Regex: '#808000',
-
- Number: '#009999',
-
- Keyword: 'bold',
- Keyword.Type: '#445588',
-
- Name.Builtin: '#999999',
- Name.Function: 'bold #990000',
- Name.Class: 'bold #445588',
- Name.Exception: 'bold #990000',
- Name.Namespace: '#555555',
- Name.Variable: '#008080',
- Name.Constant: '#008080',
- Name.Tag: '#000080',
- Name.Attribute: '#008080',
- Name.Entity: '#800080',
-
- Generic.Heading: '#999999',
- Generic.Subheading: '#aaaaaa',
- Generic.Deleted: 'bg:#ffdddd #000000',
- Generic.Inserted: 'bg:#ddffdd #000000',
- Generic.Error: '#aa0000',
- Generic.Emph: 'italic',
- Generic.Strong: 'bold',
- Generic.Prompt: '#555555',
- Generic.Output: '#888888',
- Generic.Traceback: '#aa0000',
-
- Error: 'bg:#e3d2d2 #a61717'
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace
+
+
+class TracStyle(Style):
+ """
+ Port of the default trac highlighter design.
+ """
+
+ default_style = ''
+
+ styles = {
+ Whitespace: '#bbbbbb',
+ Comment: 'italic #999988',
+ Comment.Preproc: 'bold noitalic #999999',
+ Comment.Special: 'bold #999999',
+
+ Operator: 'bold',
+
+ String: '#bb8844',
+ String.Regex: '#808000',
+
+ Number: '#009999',
+
+ Keyword: 'bold',
+ Keyword.Type: '#445588',
+
+ Name.Builtin: '#999999',
+ Name.Function: 'bold #990000',
+ Name.Class: 'bold #445588',
+ Name.Exception: 'bold #990000',
+ Name.Namespace: '#555555',
+ Name.Variable: '#008080',
+ Name.Constant: '#008080',
+ Name.Tag: '#000080',
+ Name.Attribute: '#008080',
+ Name.Entity: '#800080',
+
+ Generic.Heading: '#999999',
+ Generic.Subheading: '#aaaaaa',
+ Generic.Deleted: 'bg:#ffdddd #000000',
+ Generic.Inserted: 'bg:#ddffdd #000000',
+ Generic.Error: '#aa0000',
+ Generic.Emph: 'italic',
+ Generic.Strong: 'bold',
+ Generic.Prompt: '#555555',
+ Generic.Output: '#888888',
+ Generic.Traceback: '#aa0000',
+
+ Error: 'bg:#e3d2d2 #a61717'
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/vim.py b/contrib/python/Pygments/py3/pygments/styles/vim.py
index f498606bdb..4cceb1f995 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vim.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vim.py
@@ -1,62 +1,62 @@
-"""
- pygments.styles.vim
- ~~~~~~~~~~~~~~~~~~~
-
- A highlighting style for Pygments, inspired by vim.
-
+"""
+ pygments.styles.vim
+ ~~~~~~~~~~~~~~~~~~~
+
+ A highlighting style for Pygments, inspired by vim.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Whitespace, Token
-
-
-class VimStyle(Style):
- """
- Styles somewhat like vim 7.0
- """
-
- background_color = "#000000"
- highlight_color = "#222222"
- default_style = "#cccccc"
-
- styles = {
- Token: "#cccccc",
- Whitespace: "",
- Comment: "#000080",
- Comment.Preproc: "",
- Comment.Special: "bold #cd0000",
-
- Keyword: "#cdcd00",
- Keyword.Declaration: "#00cd00",
- Keyword.Namespace: "#cd00cd",
- Keyword.Pseudo: "",
- Keyword.Type: "#00cd00",
-
- Operator: "#3399cc",
- Operator.Word: "#cdcd00",
-
- Name: "",
- Name.Class: "#00cdcd",
- Name.Builtin: "#cd00cd",
- Name.Exception: "bold #666699",
- Name.Variable: "#00cdcd",
-
- String: "#cd0000",
- Number: "#cd00cd",
-
- Generic.Heading: "bold #000080",
- Generic.Subheading: "bold #800080",
- Generic.Deleted: "#cd0000",
- Generic.Inserted: "#00cd00",
- Generic.Error: "#FF0000",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold #000080",
- Generic.Output: "#888",
- Generic.Traceback: "#04D",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Generic, Whitespace, Token
+
+
+class VimStyle(Style):
+ """
+ Styles somewhat like vim 7.0
+ """
+
+ background_color = "#000000"
+ highlight_color = "#222222"
+ default_style = "#cccccc"
+
+ styles = {
+ Token: "#cccccc",
+ Whitespace: "",
+ Comment: "#000080",
+ Comment.Preproc: "",
+ Comment.Special: "bold #cd0000",
+
+ Keyword: "#cdcd00",
+ Keyword.Declaration: "#00cd00",
+ Keyword.Namespace: "#cd00cd",
+ Keyword.Pseudo: "",
+ Keyword.Type: "#00cd00",
+
+ Operator: "#3399cc",
+ Operator.Word: "#cdcd00",
+
+ Name: "",
+ Name.Class: "#00cdcd",
+ Name.Builtin: "#cd00cd",
+ Name.Exception: "bold #666699",
+ Name.Variable: "#00cdcd",
+
+ String: "#cd0000",
+ Number: "#cd00cd",
+
+ Generic.Heading: "bold #000080",
+ Generic.Subheading: "bold #800080",
+ Generic.Deleted: "#cd0000",
+ Generic.Inserted: "#00cd00",
+ Generic.Error: "#FF0000",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold #000080",
+ Generic.Output: "#888",
+ Generic.Traceback: "#04D",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/vs.py b/contrib/python/Pygments/py3/pygments/styles/vs.py
index a504f912a2..1ca38d8c38 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vs.py
@@ -1,37 +1,37 @@
-"""
- pygments.styles.vs
- ~~~~~~~~~~~~~~~~~~
-
- Simple style with MS Visual Studio colors.
-
+"""
+ pygments.styles.vs
+ ~~~~~~~~~~~~~~~~~~
+
+ Simple style with MS Visual Studio colors.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Operator, Generic
-
-
-class VisualStudioStyle(Style):
-
- background_color = "#ffffff"
- default_style = ""
-
- styles = {
- Comment: "#008000",
- Comment.Preproc: "#0000ff",
- Keyword: "#0000ff",
- Operator.Word: "#0000ff",
- Keyword.Type: "#2b91af",
- Name.Class: "#2b91af",
- String: "#a31515",
-
- Generic.Heading: "bold",
- Generic.Subheading: "bold",
- Generic.Emph: "italic",
- Generic.Strong: "bold",
- Generic.Prompt: "bold",
-
- Error: "border:#FF0000"
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Operator, Generic
+
+
+class VisualStudioStyle(Style):
+
+ background_color = "#ffffff"
+ default_style = ""
+
+ styles = {
+ Comment: "#008000",
+ Comment.Preproc: "#0000ff",
+ Keyword: "#0000ff",
+ Operator.Word: "#0000ff",
+ Keyword.Type: "#2b91af",
+ Name.Class: "#2b91af",
+ String: "#a31515",
+
+ Generic.Heading: "bold",
+ Generic.Subheading: "bold",
+ Generic.Emph: "italic",
+ Generic.Strong: "bold",
+ Generic.Prompt: "bold",
+
+ Error: "border:#FF0000"
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/xcode.py b/contrib/python/Pygments/py3/pygments/styles/xcode.py
index fd8621e7d1..391a4c76d0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/xcode.py
+++ b/contrib/python/Pygments/py3/pygments/styles/xcode.py
@@ -1,50 +1,50 @@
-"""
- pygments.styles.xcode
- ~~~~~~~~~~~~~~~~~~~~~
-
- Style similar to the `Xcode` default theme.
-
+"""
+ pygments.styles.xcode
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Style similar to the `Xcode` default theme.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pygments.style import Style
-from pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Literal
-
-
-class XcodeStyle(Style):
- """
- Style similar to the Xcode default colouring theme.
- """
-
- default_style = ''
-
- styles = {
- Comment: '#177500',
- Comment.Preproc: '#633820',
-
- String: '#C41A16',
- String.Char: '#2300CE',
-
- Operator: '#000000',
-
- Keyword: '#A90D91',
-
- Name: '#000000',
- Name.Attribute: '#836C28',
- Name.Class: '#3F6E75',
- Name.Function: '#000000',
- Name.Builtin: '#A90D91',
- # In Obj-C code this token is used to colour Cocoa types
- Name.Builtin.Pseudo: '#5B269A',
- Name.Variable: '#000000',
- Name.Tag: '#000000',
- Name.Decorator: '#000000',
- # Workaround for a BUG here: lexer treats multiline method signatres as labels
- Name.Label: '#000000',
-
- Literal: '#1C01CE',
- Number: '#1C01CE',
- Error: '#000000',
- }
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import Keyword, Name, Comment, String, Error, \
+ Number, Operator, Literal
+
+
+class XcodeStyle(Style):
+ """
+ Style similar to the Xcode default colouring theme.
+ """
+
+ default_style = ''
+
+ styles = {
+ Comment: '#177500',
+ Comment.Preproc: '#633820',
+
+ String: '#C41A16',
+ String.Char: '#2300CE',
+
+ Operator: '#000000',
+
+ Keyword: '#A90D91',
+
+ Name: '#000000',
+ Name.Attribute: '#836C28',
+ Name.Class: '#3F6E75',
+ Name.Function: '#000000',
+ Name.Builtin: '#A90D91',
+ # In Obj-C code this token is used to colour Cocoa types
+ Name.Builtin.Pseudo: '#5B269A',
+ Name.Variable: '#000000',
+ Name.Tag: '#000000',
+ Name.Decorator: '#000000',
+ # Workaround for a BUG here: lexer treats multiline method signatres as labels
+ Name.Label: '#000000',
+
+ Literal: '#1C01CE',
+ Number: '#1C01CE',
+ Error: '#000000',
+ }
diff --git a/contrib/python/Pygments/py3/pygments/token.py b/contrib/python/Pygments/py3/pygments/token.py
index 9013acb709..2fcb0f4466 100644
--- a/contrib/python/Pygments/py3/pygments/token.py
+++ b/contrib/python/Pygments/py3/pygments/token.py
@@ -1,212 +1,212 @@
-"""
- pygments.token
- ~~~~~~~~~~~~~~
-
- Basic token types and the standard tokens.
-
+"""
+ pygments.token
+ ~~~~~~~~~~~~~~
+
+ Basic token types and the standard tokens.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-class _TokenType(tuple):
- parent = None
-
- def split(self):
- buf = []
- node = self
- while node is not None:
- buf.append(node)
- node = node.parent
- buf.reverse()
- return buf
-
- def __init__(self, *args):
- # no need to call super.__init__
- self.subtypes = set()
-
- def __contains__(self, val):
- return self is val or (
- type(val) is self.__class__ and
- val[:len(self)] == self
- )
-
- def __getattr__(self, val):
- if not val or not val[0].isupper():
- return tuple.__getattribute__(self, val)
- new = _TokenType(self + (val,))
- setattr(self, val, new)
- self.subtypes.add(new)
- new.parent = self
- return new
-
- def __repr__(self):
- return 'Token' + (self and '.' or '') + '.'.join(self)
-
+ :license: BSD, see LICENSE for details.
+"""
+
+
+class _TokenType(tuple):
+ parent = None
+
+ def split(self):
+ buf = []
+ node = self
+ while node is not None:
+ buf.append(node)
+ node = node.parent
+ buf.reverse()
+ return buf
+
+ def __init__(self, *args):
+ # no need to call super.__init__
+ self.subtypes = set()
+
+ def __contains__(self, val):
+ return self is val or (
+ type(val) is self.__class__ and
+ val[:len(self)] == self
+ )
+
+ def __getattr__(self, val):
+ if not val or not val[0].isupper():
+ return tuple.__getattribute__(self, val)
+ new = _TokenType(self + (val,))
+ setattr(self, val, new)
+ self.subtypes.add(new)
+ new.parent = self
+ return new
+
+ def __repr__(self):
+ return 'Token' + (self and '.' or '') + '.'.join(self)
+
def __copy__(self):
# These instances are supposed to be singletons
return self
-
+
def __deepcopy__(self, memo):
# These instances are supposed to be singletons
return self
-
+
Token = _TokenType()
-# Special token types
+# Special token types
Text = Token.Text
Whitespace = Text.Whitespace
Escape = Token.Escape
Error = Token.Error
-# Text that doesn't belong to this lexer (e.g. HTML in PHP)
+# Text that doesn't belong to this lexer (e.g. HTML in PHP)
Other = Token.Other
-
-# Common token types for source code
+
+# Common token types for source code
Keyword = Token.Keyword
Name = Token.Name
Literal = Token.Literal
String = Literal.String
Number = Literal.Number
-Punctuation = Token.Punctuation
+Punctuation = Token.Punctuation
Operator = Token.Operator
Comment = Token.Comment
-
-# Generic types for non-source code
+
+# Generic types for non-source code
Generic = Token.Generic
-
+
# String and some others are not direct children of Token.
-# alias them:
-Token.Token = Token
-Token.String = String
-Token.Number = Number
-
-
-def is_token_subtype(ttype, other):
- """
- Return True if ``ttype`` is a subtype of ``other``.
-
- exists for backwards compatibility. use ``ttype in other`` now.
- """
- return ttype in other
-
-
-def string_to_tokentype(s):
- """
- Convert a string into a token type::
-
- >>> string_to_token('String.Double')
- Token.Literal.String.Double
- >>> string_to_token('Token.Literal.Number')
- Token.Literal.Number
- >>> string_to_token('')
- Token
-
- Tokens that are already tokens are returned unchanged:
-
- >>> string_to_token(String)
- Token.Literal.String
- """
- if isinstance(s, _TokenType):
- return s
- if not s:
- return Token
- node = Token
- for item in s.split('.'):
- node = getattr(node, item)
- return node
-
-
-# Map standard token types to short names, used in CSS class naming.
-# If you add a new item, please be sure to run this file to perform
-# a consistency check for duplicate values.
-STANDARD_TYPES = {
- Token: '',
-
- Text: '',
- Whitespace: 'w',
- Escape: 'esc',
- Error: 'err',
- Other: 'x',
-
- Keyword: 'k',
- Keyword.Constant: 'kc',
- Keyword.Declaration: 'kd',
- Keyword.Namespace: 'kn',
- Keyword.Pseudo: 'kp',
- Keyword.Reserved: 'kr',
- Keyword.Type: 'kt',
-
- Name: 'n',
- Name.Attribute: 'na',
- Name.Builtin: 'nb',
- Name.Builtin.Pseudo: 'bp',
- Name.Class: 'nc',
- Name.Constant: 'no',
- Name.Decorator: 'nd',
- Name.Entity: 'ni',
- Name.Exception: 'ne',
- Name.Function: 'nf',
+# alias them:
+Token.Token = Token
+Token.String = String
+Token.Number = Number
+
+
+def is_token_subtype(ttype, other):
+ """
+ Return True if ``ttype`` is a subtype of ``other``.
+
+ exists for backwards compatibility. use ``ttype in other`` now.
+ """
+ return ttype in other
+
+
+def string_to_tokentype(s):
+ """
+ Convert a string into a token type::
+
+ >>> string_to_token('String.Double')
+ Token.Literal.String.Double
+ >>> string_to_token('Token.Literal.Number')
+ Token.Literal.Number
+ >>> string_to_token('')
+ Token
+
+ Tokens that are already tokens are returned unchanged:
+
+ >>> string_to_token(String)
+ Token.Literal.String
+ """
+ if isinstance(s, _TokenType):
+ return s
+ if not s:
+ return Token
+ node = Token
+ for item in s.split('.'):
+ node = getattr(node, item)
+ return node
+
+
+# Map standard token types to short names, used in CSS class naming.
+# If you add a new item, please be sure to run this file to perform
+# a consistency check for duplicate values.
+STANDARD_TYPES = {
+ Token: '',
+
+ Text: '',
+ Whitespace: 'w',
+ Escape: 'esc',
+ Error: 'err',
+ Other: 'x',
+
+ Keyword: 'k',
+ Keyword.Constant: 'kc',
+ Keyword.Declaration: 'kd',
+ Keyword.Namespace: 'kn',
+ Keyword.Pseudo: 'kp',
+ Keyword.Reserved: 'kr',
+ Keyword.Type: 'kt',
+
+ Name: 'n',
+ Name.Attribute: 'na',
+ Name.Builtin: 'nb',
+ Name.Builtin.Pseudo: 'bp',
+ Name.Class: 'nc',
+ Name.Constant: 'no',
+ Name.Decorator: 'nd',
+ Name.Entity: 'ni',
+ Name.Exception: 'ne',
+ Name.Function: 'nf',
Name.Function.Magic: 'fm',
- Name.Property: 'py',
- Name.Label: 'nl',
- Name.Namespace: 'nn',
- Name.Other: 'nx',
- Name.Tag: 'nt',
- Name.Variable: 'nv',
- Name.Variable.Class: 'vc',
- Name.Variable.Global: 'vg',
- Name.Variable.Instance: 'vi',
+ Name.Property: 'py',
+ Name.Label: 'nl',
+ Name.Namespace: 'nn',
+ Name.Other: 'nx',
+ Name.Tag: 'nt',
+ Name.Variable: 'nv',
+ Name.Variable.Class: 'vc',
+ Name.Variable.Global: 'vg',
+ Name.Variable.Instance: 'vi',
Name.Variable.Magic: 'vm',
-
- Literal: 'l',
- Literal.Date: 'ld',
-
- String: 's',
+
+ Literal: 'l',
+ Literal.Date: 'ld',
+
+ String: 's',
String.Affix: 'sa',
- String.Backtick: 'sb',
- String.Char: 'sc',
+ String.Backtick: 'sb',
+ String.Char: 'sc',
String.Delimiter: 'dl',
- String.Doc: 'sd',
- String.Double: 's2',
- String.Escape: 'se',
- String.Heredoc: 'sh',
- String.Interpol: 'si',
- String.Other: 'sx',
- String.Regex: 'sr',
- String.Single: 's1',
- String.Symbol: 'ss',
-
- Number: 'm',
- Number.Bin: 'mb',
- Number.Float: 'mf',
- Number.Hex: 'mh',
- Number.Integer: 'mi',
- Number.Integer.Long: 'il',
- Number.Oct: 'mo',
-
- Operator: 'o',
- Operator.Word: 'ow',
-
- Punctuation: 'p',
-
- Comment: 'c',
- Comment.Hashbang: 'ch',
- Comment.Multiline: 'cm',
- Comment.Preproc: 'cp',
- Comment.PreprocFile: 'cpf',
- Comment.Single: 'c1',
- Comment.Special: 'cs',
-
- Generic: 'g',
- Generic.Deleted: 'gd',
- Generic.Emph: 'ge',
- Generic.Error: 'gr',
- Generic.Heading: 'gh',
- Generic.Inserted: 'gi',
- Generic.Output: 'go',
- Generic.Prompt: 'gp',
- Generic.Strong: 'gs',
- Generic.Subheading: 'gu',
- Generic.Traceback: 'gt',
-}
+ String.Doc: 'sd',
+ String.Double: 's2',
+ String.Escape: 'se',
+ String.Heredoc: 'sh',
+ String.Interpol: 'si',
+ String.Other: 'sx',
+ String.Regex: 'sr',
+ String.Single: 's1',
+ String.Symbol: 'ss',
+
+ Number: 'm',
+ Number.Bin: 'mb',
+ Number.Float: 'mf',
+ Number.Hex: 'mh',
+ Number.Integer: 'mi',
+ Number.Integer.Long: 'il',
+ Number.Oct: 'mo',
+
+ Operator: 'o',
+ Operator.Word: 'ow',
+
+ Punctuation: 'p',
+
+ Comment: 'c',
+ Comment.Hashbang: 'ch',
+ Comment.Multiline: 'cm',
+ Comment.Preproc: 'cp',
+ Comment.PreprocFile: 'cpf',
+ Comment.Single: 'c1',
+ Comment.Special: 'cs',
+
+ Generic: 'g',
+ Generic.Deleted: 'gd',
+ Generic.Emph: 'ge',
+ Generic.Error: 'gr',
+ Generic.Heading: 'gh',
+ Generic.Inserted: 'gi',
+ Generic.Output: 'go',
+ Generic.Prompt: 'gp',
+ Generic.Strong: 'gs',
+ Generic.Subheading: 'gu',
+ Generic.Traceback: 'gt',
+}
diff --git a/contrib/python/Pygments/py3/pygments/unistring.py b/contrib/python/Pygments/py3/pygments/unistring.py
index 2872985c14..e15580d296 100644
--- a/contrib/python/Pygments/py3/pygments/unistring.py
+++ b/contrib/python/Pygments/py3/pygments/unistring.py
@@ -1,153 +1,153 @@
-"""
- pygments.unistring
- ~~~~~~~~~~~~~~~~~~
-
- Strings of all Unicode characters of a certain category.
- Used for matching in Unicode-aware languages. Run to regenerate.
-
- Inspired by chartypes_create.py from the MoinMoin project.
-
+"""
+ pygments.unistring
+ ~~~~~~~~~~~~~~~~~~
+
+ Strings of all Unicode characters of a certain category.
+ Used for matching in Unicode-aware languages. Run to regenerate.
+
+ Inspired by chartypes_create.py from the MoinMoin project.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
+ :license: BSD, see LICENSE for details.
+"""
+
Cc = '\x00-\x1f\x7f-\x9f'
-
+
Cf = '\xad\u0600-\u0605\u061c\u06dd\u070f\u08e2\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\U000110bd\U000110cd\U0001bca0-\U0001bca3\U0001d173-\U0001d17a\U000e0001\U000e0020-\U000e007f'
-
+
Cn = '\u0378-\u0379\u0380-\u0383\u038b\u038d\u03a2\u0530\u0557-\u0558\u058b-\u058c\u0590\u05c8-\u05cf\u05eb-\u05ee\u05f5-\u05ff\u061d\u070e\u074b-\u074c\u07b2-\u07bf\u07fb-\u07fc\u082e-\u082f\u083f\u085c-\u085d\u085f\u086b-\u089f\u08b5\u08be-\u08d2\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09c5-\u09c6\u09c9-\u09ca\u09cf-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09ff-\u0a00\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a50\u0a52-\u0a58\u0a5d\u0a5f-\u0a65\u0a77-\u0a80\u0a84\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0acf\u0ad1-\u0adf\u0ae4-\u0ae5\u0af2-\u0af8\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34\u0b3a-\u0b3b\u0b45-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b64-\u0b65\u0b78-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bcf\u0bd1-\u0bd6\u0bd8-\u0be5\u0bfb-\u0bff\u0c0d\u0c11\u0c29\u0c3a-\u0c3c\u0c45\u0c49\u0c4e-\u0c54\u0c57\u0c5b-\u0c5f\u0c64-\u0c65\u0c70-\u0c77\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbb\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce4-\u0ce5\u0cf0\u0cf3-\u0cff\u0d04\u0d0d\u0d11\u0d45\u0d49\u0d50-\u0d53\u0d64-\u0d65\u0d80-\u0d81\u0d84\u0d97-\u0d99\u0db2\u0dbc\u0dbe-\u0dbf\u0dc7-\u0dc9\u0dcb-\u0dce\u0dd5\u0dd7\u0de0-\u0de5\u0df0-\u0df1\u0df5-\u0e00\u0e3b-\u0e3e\u0e5c-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0edb\u0ee0-\u0eff\u0f48\u0f6d-\u0f70\u0f98\u0fbd\u0fcd\u0fdb-\u0fff\u10c6\u10c8-\u10cc\u10ce-\u10cf\u1249\u124e-\u124f\u1257\u1259\u125e-\u125f\u1289\u128e-\u128f\u12b1\u12b6-\u12b7\u12bf\u12c1\u12c6-\u12c7\u12d7\u1311\u1316-\u1317\u135b-\u135c\u137d-\u137f\u139a-\u139f\u13f6-\u13f7\u13fe-\u13ff\u169d-\u169f\u16f9-\u16ff\u170d\u1715-\u171f\u1737-\u173f\u1754-\u175f\u176d\u1771\u1774-\u177f\u17de-\u17df\u17ea-\u17ef\u17fa-\u17ff\u180f\u181a-\u181f\u1879-\u187f\u18ab-\u18af\u18f6-\u18ff\u191f\u192c-\u192f\u193c-\u193f\u1941-\u1943\u196e-\u196f\u1975-\u197f\u19ac-\u19af\u19ca-\u19cf\u19db-\u19dd\u1a1c-\u1a1d\u1a5f\u1a7d-\u1a7e\u1a8a-\u1a8f\u1a9a-\u1a9f\u1aae-\u1aaf\u1abf-\u1aff\u1b4c-\u1b4f\u1b7d-\u1b7f\u1bf4-\u1bfb\u1c38-\u1c3a\u1c4a-\u1c4c\u1c89-\u1c8f\u1cbb-\u1cbc\u1cc8-\u1ccf\u1cfa-\u1cff\u1dfa\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fc5\u1fd4-\u1fd5\u1fdc\u1ff0-\u1ff1\u1ff5\u1fff\u2065\u2072-\u2073\u208f\u209d-\u209f\u20c0-\u20cf\u20f1-\u20ff\u218c-\u218f\u2427-\u243f\u244b-\u245f\u2b74-\u2b75\u2b96-\u2b97\u2bc9\u2bff\u2c2f\u2c5f\u2cf4-\u2cf8\u2d26\u2d28-\u2d2c\u2d2e-\u2d2f\u2d68-\u2d6e\u2d71-\u2d7e\u2d97-\u2d9f\u2da7\u2daf\u2db7\u2dbf\u2dc7\u2dcf\u2dd7\u2ddf\u2e4f-\u2e7f\u2e9a\u2ef4-\u2eff\u2fd6-\u2fef\u2ffc-\u2fff\u3040\u3097-\u3098\u3100-\u3104\u3130\u318f\u31bb-\u31bf\u31e4-\u31ef\u321f\u32ff\u4db6-\u4dbf\u9ff0-\u9fff\ua48d-\ua48f\ua4c7-\ua4cf\ua62c-\ua63f\ua6f8-\ua6ff\ua7ba-\ua7f6\ua82c-\ua82f\ua83a-\ua83f\ua878-\ua87f\ua8c6-\ua8cd\ua8da-\ua8df\ua954-\ua95e\ua97d-\ua97f\ua9ce\ua9da-\ua9dd\ua9ff\uaa37-\uaa3f\uaa4e-\uaa4f\uaa5a-\uaa5b\uaac3-\uaada\uaaf7-\uab00\uab07-\uab08\uab0f-\uab10\uab17-\uab1f\uab27\uab2f\uab66-\uab6f\uabee-\uabef\uabfa-\uabff\ud7a4-\ud7af\ud7c7-\ud7ca\ud7fc-\ud7ff\ufa6e-\ufa6f\ufada-\ufaff\ufb07-\ufb12\ufb18-\ufb1c\ufb37\ufb3d\ufb3f\ufb42\ufb45\ufbc2-\ufbd2\ufd40-\ufd4f\ufd90-\ufd91\ufdc8-\ufdef\ufdfe-\ufdff\ufe1a-\ufe1f\ufe53\ufe67\ufe6c-\ufe6f\ufe75\ufefd-\ufefe\uff00\uffbf-\uffc1\uffc8-\uffc9\uffd0-\uffd1\uffd8-\uffd9\uffdd-\uffdf\uffe7\uffef-\ufff8\ufffe-\uffff\U0001000c\U00010027\U0001003b\U0001003e\U0001004e-\U0001004f\U0001005e-\U0001007f\U000100fb-\U000100ff\U00010103-\U00010106\U00010134-\U00010136\U0001018f\U0001019c-\U0001019f\U000101a1-\U000101cf\U000101fe-\U0001027f\U0001029d-\U0001029f\U000102d1-\U000102df\U000102fc-\U000102ff\U00010324-\U0001032c\U0001034b-\U0001034f\U0001037b-\U0001037f\U0001039e\U000103c4-\U000103c7\U000103d6-\U000103ff\U0001049e-\U0001049f\U000104aa-\U000104af\U000104d4-\U000104d7\U000104fc-\U000104ff\U00010528-\U0001052f\U00010564-\U0001056e\U00010570-\U000105ff\U00010737-\U0001073f\U00010756-\U0001075f\U00010768-\U000107ff\U00010806-\U00010807\U00010809\U00010836\U00010839-\U0001083b\U0001083d-\U0001083e\U00010856\U0001089f-\U000108a6\U000108b0-\U000108df\U000108f3\U000108f6-\U000108fa\U0001091c-\U0001091e\U0001093a-\U0001093e\U00010940-\U0001097f\U000109b8-\U000109bb\U000109d0-\U000109d1\U00010a04\U00010a07-\U00010a0b\U00010a14\U00010a18\U00010a36-\U00010a37\U00010a3b-\U00010a3e\U00010a49-\U00010a4f\U00010a59-\U00010a5f\U00010aa0-\U00010abf\U00010ae7-\U00010aea\U00010af7-\U00010aff\U00010b36-\U00010b38\U00010b56-\U00010b57\U00010b73-\U00010b77\U00010b92-\U00010b98\U00010b9d-\U00010ba8\U00010bb0-\U00010bff\U00010c49-\U00010c7f\U00010cb3-\U00010cbf\U00010cf3-\U00010cf9\U00010d28-\U00010d2f\U00010d3a-\U00010e5f\U00010e7f-\U00010eff\U00010f28-\U00010f2f\U00010f5a-\U00010fff\U0001104e-\U00011051\U00011070-\U0001107e\U000110c2-\U000110cc\U000110ce-\U000110cf\U000110e9-\U000110ef\U000110fa-\U000110ff\U00011135\U00011147-\U0001114f\U00011177-\U0001117f\U000111ce-\U000111cf\U000111e0\U000111f5-\U000111ff\U00011212\U0001123f-\U0001127f\U00011287\U00011289\U0001128e\U0001129e\U000112aa-\U000112af\U000112eb-\U000112ef\U000112fa-\U000112ff\U00011304\U0001130d-\U0001130e\U00011311-\U00011312\U00011329\U00011331\U00011334\U0001133a\U00011345-\U00011346\U00011349-\U0001134a\U0001134e-\U0001134f\U00011351-\U00011356\U00011358-\U0001135c\U00011364-\U00011365\U0001136d-\U0001136f\U00011375-\U000113ff\U0001145a\U0001145c\U0001145f-\U0001147f\U000114c8-\U000114cf\U000114da-\U0001157f\U000115b6-\U000115b7\U000115de-\U000115ff\U00011645-\U0001164f\U0001165a-\U0001165f\U0001166d-\U0001167f\U000116b8-\U000116bf\U000116ca-\U000116ff\U0001171b-\U0001171c\U0001172c-\U0001172f\U00011740-\U000117ff\U0001183c-\U0001189f\U000118f3-\U000118fe\U00011900-\U000119ff\U00011a48-\U00011a4f\U00011a84-\U00011a85\U00011aa3-\U00011abf\U00011af9-\U00011bff\U00011c09\U00011c37\U00011c46-\U00011c4f\U00011c6d-\U00011c6f\U00011c90-\U00011c91\U00011ca8\U00011cb7-\U00011cff\U00011d07\U00011d0a\U00011d37-\U00011d39\U00011d3b\U00011d3e\U00011d48-\U00011d4f\U00011d5a-\U00011d5f\U00011d66\U00011d69\U00011d8f\U00011d92\U00011d99-\U00011d9f\U00011daa-\U00011edf\U00011ef9-\U00011fff\U0001239a-\U000123ff\U0001246f\U00012475-\U0001247f\U00012544-\U00012fff\U0001342f-\U000143ff\U00014647-\U000167ff\U00016a39-\U00016a3f\U00016a5f\U00016a6a-\U00016a6d\U00016a70-\U00016acf\U00016aee-\U00016aef\U00016af6-\U00016aff\U00016b46-\U00016b4f\U00016b5a\U00016b62\U00016b78-\U00016b7c\U00016b90-\U00016e3f\U00016e9b-\U00016eff\U00016f45-\U00016f4f\U00016f7f-\U00016f8e\U00016fa0-\U00016fdf\U00016fe2-\U00016fff\U000187f2-\U000187ff\U00018af3-\U0001afff\U0001b11f-\U0001b16f\U0001b2fc-\U0001bbff\U0001bc6b-\U0001bc6f\U0001bc7d-\U0001bc7f\U0001bc89-\U0001bc8f\U0001bc9a-\U0001bc9b\U0001bca4-\U0001cfff\U0001d0f6-\U0001d0ff\U0001d127-\U0001d128\U0001d1e9-\U0001d1ff\U0001d246-\U0001d2df\U0001d2f4-\U0001d2ff\U0001d357-\U0001d35f\U0001d379-\U0001d3ff\U0001d455\U0001d49d\U0001d4a0-\U0001d4a1\U0001d4a3-\U0001d4a4\U0001d4a7-\U0001d4a8\U0001d4ad\U0001d4ba\U0001d4bc\U0001d4c4\U0001d506\U0001d50b-\U0001d50c\U0001d515\U0001d51d\U0001d53a\U0001d53f\U0001d545\U0001d547-\U0001d549\U0001d551\U0001d6a6-\U0001d6a7\U0001d7cc-\U0001d7cd\U0001da8c-\U0001da9a\U0001daa0\U0001dab0-\U0001dfff\U0001e007\U0001e019-\U0001e01a\U0001e022\U0001e025\U0001e02b-\U0001e7ff\U0001e8c5-\U0001e8c6\U0001e8d7-\U0001e8ff\U0001e94b-\U0001e94f\U0001e95a-\U0001e95d\U0001e960-\U0001ec70\U0001ecb5-\U0001edff\U0001ee04\U0001ee20\U0001ee23\U0001ee25-\U0001ee26\U0001ee28\U0001ee33\U0001ee38\U0001ee3a\U0001ee3c-\U0001ee41\U0001ee43-\U0001ee46\U0001ee48\U0001ee4a\U0001ee4c\U0001ee50\U0001ee53\U0001ee55-\U0001ee56\U0001ee58\U0001ee5a\U0001ee5c\U0001ee5e\U0001ee60\U0001ee63\U0001ee65-\U0001ee66\U0001ee6b\U0001ee73\U0001ee78\U0001ee7d\U0001ee7f\U0001ee8a\U0001ee9c-\U0001eea0\U0001eea4\U0001eeaa\U0001eebc-\U0001eeef\U0001eef2-\U0001efff\U0001f02c-\U0001f02f\U0001f094-\U0001f09f\U0001f0af-\U0001f0b0\U0001f0c0\U0001f0d0\U0001f0f6-\U0001f0ff\U0001f10d-\U0001f10f\U0001f16c-\U0001f16f\U0001f1ad-\U0001f1e5\U0001f203-\U0001f20f\U0001f23c-\U0001f23f\U0001f249-\U0001f24f\U0001f252-\U0001f25f\U0001f266-\U0001f2ff\U0001f6d5-\U0001f6df\U0001f6ed-\U0001f6ef\U0001f6fa-\U0001f6ff\U0001f774-\U0001f77f\U0001f7d9-\U0001f7ff\U0001f80c-\U0001f80f\U0001f848-\U0001f84f\U0001f85a-\U0001f85f\U0001f888-\U0001f88f\U0001f8ae-\U0001f8ff\U0001f90c-\U0001f90f\U0001f93f\U0001f971-\U0001f972\U0001f977-\U0001f979\U0001f97b\U0001f9a3-\U0001f9af\U0001f9ba-\U0001f9bf\U0001f9c3-\U0001f9cf\U0001fa00-\U0001fa5f\U0001fa6e-\U0001ffff\U0002a6d7-\U0002a6ff\U0002b735-\U0002b73f\U0002b81e-\U0002b81f\U0002cea2-\U0002ceaf\U0002ebe1-\U0002f7ff\U0002fa1e-\U000e0000\U000e0002-\U000e001f\U000e0080-\U000e00ff\U000e01f0-\U000effff\U000ffffe-\U000fffff\U0010fffe-\U0010ffff'
-
+
Co = '\ue000-\uf8ff\U000f0000-\U000ffffd\U00100000-\U0010fffd'
-
+
Cs = '\ud800-\udbff\\\udc00\udc01-\udfff'
-
+
Ll = 'a-z\xb5\xdf-\xf6\xf8-\xff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0529\u052b\u052d\u052f\u0560-\u0588\u10d0-\u10fa\u10fd-\u10ff\u13f8-\u13fd\u1c80-\u1c88\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua699\ua69b\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793-\ua795\ua797\ua799\ua79b\ua79d\ua79f\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7af\ua7b5\ua7b7\ua7b9\ua7fa\uab30-\uab5a\uab60-\uab65\uab70-\uabbf\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\U00010428-\U0001044f\U000104d8-\U000104fb\U00010cc0-\U00010cf2\U000118c0-\U000118df\U00016e60-\U00016e7f\U0001d41a-\U0001d433\U0001d44e-\U0001d454\U0001d456-\U0001d467\U0001d482-\U0001d49b\U0001d4b6-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d4cf\U0001d4ea-\U0001d503\U0001d51e-\U0001d537\U0001d552-\U0001d56b\U0001d586-\U0001d59f\U0001d5ba-\U0001d5d3\U0001d5ee-\U0001d607\U0001d622-\U0001d63b\U0001d656-\U0001d66f\U0001d68a-\U0001d6a5\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6e1\U0001d6fc-\U0001d714\U0001d716-\U0001d71b\U0001d736-\U0001d74e\U0001d750-\U0001d755\U0001d770-\U0001d788\U0001d78a-\U0001d78f\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7c9\U0001d7cb\U0001e922-\U0001e943'
-
+
Lm = '\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua69c-\ua69d\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\ua9e6\uaa70\uaadd\uaaf3-\uaaf4\uab5c-\uab5f\uff70\uff9e-\uff9f\U00016b40-\U00016b43\U00016f93-\U00016f9f\U00016fe0-\U00016fe1'
-
+
Lo = '\xaa\xba\u01bb\u01c0-\u01c3\u0294\u05d0-\u05ea\u05ef-\u05f2\u0620-\u063f\u0641-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u0800-\u0815\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0972-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32-\u0e33\u0e40-\u0e45\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2-\u0eb3\u0ebd\u0ec0-\u0ec4\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u1100-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16f1-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17dc\u1820-\u1842\u1844-\u1878\u1880-\u1884\u1887-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c77\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u2135-\u2138\u2d30-\u2d67\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3006\u303c\u3041-\u3096\u309f\u30a1-\u30fa\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua014\ua016-\ua48c\ua4d0-\ua4f7\ua500-\ua60b\ua610-\ua61f\ua62a-\ua62b\ua66e\ua6a0-\ua6e5\ua78f\ua7f7\ua7fb-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9e0-\ua9e4\ua9e7-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa6f\uaa71-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadc\uaae0-\uaaea\uaaf2\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff66-\uff6f\uff71-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U00010340\U00010342-\U00010349\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U00010450-\U0001049d\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016f00-\U00016f44\U00016f50\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001e800-\U0001e8c4\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
+
Lt = '\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc'
-
+
Lu = 'A-Z\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u037f\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0528\u052a\u052c\u052e\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u13a0-\u13f5\u1c90-\u1cba\u1cbd-\u1cbf\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua698\ua69a\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua796\ua798\ua79a\ua79c\ua79e\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa-\ua7ae\ua7b0-\ua7b4\ua7b6\ua7b8\uff21-\uff3a\U00010400-\U00010427\U000104b0-\U000104d3\U00010c80-\U00010cb2\U000118a0-\U000118bf\U00016e40-\U00016e5f\U0001d400-\U0001d419\U0001d434-\U0001d44d\U0001d468-\U0001d481\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b5\U0001d4d0-\U0001d4e9\U0001d504-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d538-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d56c-\U0001d585\U0001d5a0-\U0001d5b9\U0001d5d4-\U0001d5ed\U0001d608-\U0001d621\U0001d63c-\U0001d655\U0001d670-\U0001d689\U0001d6a8-\U0001d6c0\U0001d6e2-\U0001d6fa\U0001d71c-\U0001d734\U0001d756-\U0001d76e\U0001d790-\U0001d7a8\U0001d7ca\U0001e900-\U0001e921'
-
+
Mc = '\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u1cf7\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaa7d\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec\U00011000\U00011002\U00011082\U000110b0-\U000110b2\U000110b7-\U000110b8\U0001112c\U00011145-\U00011146\U00011182\U000111b3-\U000111b5\U000111bf-\U000111c0\U0001122c-\U0001122e\U00011232-\U00011233\U00011235\U000112e0-\U000112e2\U00011302-\U00011303\U0001133e-\U0001133f\U00011341-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011357\U00011362-\U00011363\U00011435-\U00011437\U00011440-\U00011441\U00011445\U000114b0-\U000114b2\U000114b9\U000114bb-\U000114be\U000114c1\U000115af-\U000115b1\U000115b8-\U000115bb\U000115be\U00011630-\U00011632\U0001163b-\U0001163c\U0001163e\U000116ac\U000116ae-\U000116af\U000116b6\U00011720-\U00011721\U00011726\U0001182c-\U0001182e\U00011838\U00011a39\U00011a57-\U00011a58\U00011a97\U00011c2f\U00011c3e\U00011ca9\U00011cb1\U00011cb4\U00011d8a-\U00011d8e\U00011d93-\U00011d94\U00011d96\U00011ef5-\U00011ef6\U00016f51-\U00016f7e\U0001d165-\U0001d166\U0001d16d-\U0001d172'
-
+
Me = '\u0488-\u0489\u1abe\u20dd-\u20e0\u20e2-\u20e4\ua670-\ua672'
-
+
Mn = '\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d3-\u08e1\u08e3-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u09fe\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0afa-\u0aff\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c00\u0c04\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0c81\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d00-\u0d01\u0d3b-\u0d3c\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u1885-\u1886\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1ab0-\u1abd\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab-\u1bad\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1cf8-\u1cf9\u1dc0-\u1df9\u1dfb-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69e-\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4-\ua8c5\ua8e0-\ua8f1\ua8ff\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\ua9e5\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaa7c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\U000101fd\U000102e0\U00010376-\U0001037a\U00010a01-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a0f\U00010a38-\U00010a3a\U00010a3f\U00010ae5-\U00010ae6\U00010d24-\U00010d27\U00010f46-\U00010f50\U00011001\U00011038-\U00011046\U0001107f-\U00011081\U000110b3-\U000110b6\U000110b9-\U000110ba\U00011100-\U00011102\U00011127-\U0001112b\U0001112d-\U00011134\U00011173\U00011180-\U00011181\U000111b6-\U000111be\U000111c9-\U000111cc\U0001122f-\U00011231\U00011234\U00011236-\U00011237\U0001123e\U000112df\U000112e3-\U000112ea\U00011300-\U00011301\U0001133b-\U0001133c\U00011340\U00011366-\U0001136c\U00011370-\U00011374\U00011438-\U0001143f\U00011442-\U00011444\U00011446\U0001145e\U000114b3-\U000114b8\U000114ba\U000114bf-\U000114c0\U000114c2-\U000114c3\U000115b2-\U000115b5\U000115bc-\U000115bd\U000115bf-\U000115c0\U000115dc-\U000115dd\U00011633-\U0001163a\U0001163d\U0001163f-\U00011640\U000116ab\U000116ad\U000116b0-\U000116b5\U000116b7\U0001171d-\U0001171f\U00011722-\U00011725\U00011727-\U0001172b\U0001182f-\U00011837\U00011839-\U0001183a\U00011a01-\U00011a0a\U00011a33-\U00011a38\U00011a3b-\U00011a3e\U00011a47\U00011a51-\U00011a56\U00011a59-\U00011a5b\U00011a8a-\U00011a96\U00011a98-\U00011a99\U00011c30-\U00011c36\U00011c38-\U00011c3d\U00011c3f\U00011c92-\U00011ca7\U00011caa-\U00011cb0\U00011cb2-\U00011cb3\U00011cb5-\U00011cb6\U00011d31-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d45\U00011d47\U00011d90-\U00011d91\U00011d95\U00011d97\U00011ef3-\U00011ef4\U00016af0-\U00016af4\U00016b30-\U00016b36\U00016f8f-\U00016f92\U0001bc9d-\U0001bc9e\U0001d167-\U0001d169\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e8d0-\U0001e8d6\U0001e944-\U0001e94a\U000e0100-\U000e01ef'
-
+
Nd = '0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0de6-\u0def\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\ua9f0-\ua9f9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19\U000104a0-\U000104a9\U00010d30-\U00010d39\U00011066-\U0001106f\U000110f0-\U000110f9\U00011136-\U0001113f\U000111d0-\U000111d9\U000112f0-\U000112f9\U00011450-\U00011459\U000114d0-\U000114d9\U00011650-\U00011659\U000116c0-\U000116c9\U00011730-\U00011739\U000118e0-\U000118e9\U00011c50-\U00011c59\U00011d50-\U00011d59\U00011da0-\U00011da9\U00016a60-\U00016a69\U00016b50-\U00016b59\U0001d7ce-\U0001d7ff\U0001e950-\U0001e959'
-
+
Nl = '\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef\U00010140-\U00010174\U00010341\U0001034a\U000103d1-\U000103d5\U00012400-\U0001246e'
-
+
No = '\xb2-\xb3\xb9\xbc-\xbe\u09f4-\u09f9\u0b72-\u0b77\u0bf0-\u0bf2\u0c78-\u0c7e\u0d58-\u0d5e\u0d70-\u0d78\u0f2a-\u0f33\u1369-\u137c\u17f0-\u17f9\u19da\u2070\u2074-\u2079\u2080-\u2089\u2150-\u215f\u2189\u2460-\u249b\u24ea-\u24ff\u2776-\u2793\u2cfd\u3192-\u3195\u3220-\u3229\u3248-\u324f\u3251-\u325f\u3280-\u3289\u32b1-\u32bf\ua830-\ua835\U00010107-\U00010133\U00010175-\U00010178\U0001018a-\U0001018b\U000102e1-\U000102fb\U00010320-\U00010323\U00010858-\U0001085f\U00010879-\U0001087f\U000108a7-\U000108af\U000108fb-\U000108ff\U00010916-\U0001091b\U000109bc-\U000109bd\U000109c0-\U000109cf\U000109d2-\U000109ff\U00010a40-\U00010a48\U00010a7d-\U00010a7e\U00010a9d-\U00010a9f\U00010aeb-\U00010aef\U00010b58-\U00010b5f\U00010b78-\U00010b7f\U00010ba9-\U00010baf\U00010cfa-\U00010cff\U00010e60-\U00010e7e\U00010f1d-\U00010f26\U00010f51-\U00010f54\U00011052-\U00011065\U000111e1-\U000111f4\U0001173a-\U0001173b\U000118ea-\U000118f2\U00011c5a-\U00011c6c\U00016b5b-\U00016b61\U00016e80-\U00016e96\U0001d2e0-\U0001d2f3\U0001d360-\U0001d378\U0001e8c7-\U0001e8cf\U0001ec71-\U0001ecab\U0001ecad-\U0001ecaf\U0001ecb1-\U0001ecb4\U0001f100-\U0001f10c'
-
+
Pc = '_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f'
-
+
Pd = '\\-\u058a\u05be\u1400\u1806\u2010-\u2015\u2e17\u2e1a\u2e3a-\u2e3b\u2e40\u301c\u3030\u30a0\ufe31-\ufe32\ufe58\ufe63\uff0d'
-
+
Pe = ')\\]}\u0f3b\u0f3d\u169c\u2046\u207e\u208e\u2309\u230b\u232a\u2769\u276b\u276d\u276f\u2771\u2773\u2775\u27c6\u27e7\u27e9\u27eb\u27ed\u27ef\u2984\u2986\u2988\u298a\u298c\u298e\u2990\u2992\u2994\u2996\u2998\u29d9\u29db\u29fd\u2e23\u2e25\u2e27\u2e29\u3009\u300b\u300d\u300f\u3011\u3015\u3017\u3019\u301b\u301e-\u301f\ufd3e\ufe18\ufe36\ufe38\ufe3a\ufe3c\ufe3e\ufe40\ufe42\ufe44\ufe48\ufe5a\ufe5c\ufe5e\uff09\uff3d\uff5d\uff60\uff63'
-
+
Pf = '\xbb\u2019\u201d\u203a\u2e03\u2e05\u2e0a\u2e0d\u2e1d\u2e21'
-
+
Pi = '\xab\u2018\u201b-\u201c\u201f\u2039\u2e02\u2e04\u2e09\u2e0c\u2e1c\u2e20'
-
+
Po = "!-#%-'*,.-/:-;?-@\\\\\xa1\xa7\xb6-\xb7\xbf\u037e\u0387\u055a-\u055f\u0589\u05c0\u05c3\u05c6\u05f3-\u05f4\u0609-\u060a\u060c-\u060d\u061b\u061e-\u061f\u066a-\u066d\u06d4\u0700-\u070d\u07f7-\u07f9\u0830-\u083e\u085e\u0964-\u0965\u0970\u09fd\u0a76\u0af0\u0c84\u0df4\u0e4f\u0e5a-\u0e5b\u0f04-\u0f12\u0f14\u0f85\u0fd0-\u0fd4\u0fd9-\u0fda\u104a-\u104f\u10fb\u1360-\u1368\u166d-\u166e\u16eb-\u16ed\u1735-\u1736\u17d4-\u17d6\u17d8-\u17da\u1800-\u1805\u1807-\u180a\u1944-\u1945\u1a1e-\u1a1f\u1aa0-\u1aa6\u1aa8-\u1aad\u1b5a-\u1b60\u1bfc-\u1bff\u1c3b-\u1c3f\u1c7e-\u1c7f\u1cc0-\u1cc7\u1cd3\u2016-\u2017\u2020-\u2027\u2030-\u2038\u203b-\u203e\u2041-\u2043\u2047-\u2051\u2053\u2055-\u205e\u2cf9-\u2cfc\u2cfe-\u2cff\u2d70\u2e00-\u2e01\u2e06-\u2e08\u2e0b\u2e0e-\u2e16\u2e18-\u2e19\u2e1b\u2e1e-\u2e1f\u2e2a-\u2e2e\u2e30-\u2e39\u2e3c-\u2e3f\u2e41\u2e43-\u2e4e\u3001-\u3003\u303d\u30fb\ua4fe-\ua4ff\ua60d-\ua60f\ua673\ua67e\ua6f2-\ua6f7\ua874-\ua877\ua8ce-\ua8cf\ua8f8-\ua8fa\ua8fc\ua92e-\ua92f\ua95f\ua9c1-\ua9cd\ua9de-\ua9df\uaa5c-\uaa5f\uaade-\uaadf\uaaf0-\uaaf1\uabeb\ufe10-\ufe16\ufe19\ufe30\ufe45-\ufe46\ufe49-\ufe4c\ufe50-\ufe52\ufe54-\ufe57\ufe5f-\ufe61\ufe68\ufe6a-\ufe6b\uff01-\uff03\uff05-\uff07\uff0a\uff0c\uff0e-\uff0f\uff1a-\uff1b\uff1f-\uff20\uff3c\uff61\uff64-\uff65\U00010100-\U00010102\U0001039f\U000103d0\U0001056f\U00010857\U0001091f\U0001093f\U00010a50-\U00010a58\U00010a7f\U00010af0-\U00010af6\U00010b39-\U00010b3f\U00010b99-\U00010b9c\U00010f55-\U00010f59\U00011047-\U0001104d\U000110bb-\U000110bc\U000110be-\U000110c1\U00011140-\U00011143\U00011174-\U00011175\U000111c5-\U000111c8\U000111cd\U000111db\U000111dd-\U000111df\U00011238-\U0001123d\U000112a9\U0001144b-\U0001144f\U0001145b\U0001145d\U000114c6\U000115c1-\U000115d7\U00011641-\U00011643\U00011660-\U0001166c\U0001173c-\U0001173e\U0001183b\U00011a3f-\U00011a46\U00011a9a-\U00011a9c\U00011a9e-\U00011aa2\U00011c41-\U00011c45\U00011c70-\U00011c71\U00011ef7-\U00011ef8\U00012470-\U00012474\U00016a6e-\U00016a6f\U00016af5\U00016b37-\U00016b3b\U00016b44\U00016e97-\U00016e9a\U0001bc9f\U0001da87-\U0001da8b\U0001e95e-\U0001e95f"
-
+
Ps = '(\\[{\u0f3a\u0f3c\u169b\u201a\u201e\u2045\u207d\u208d\u2308\u230a\u2329\u2768\u276a\u276c\u276e\u2770\u2772\u2774\u27c5\u27e6\u27e8\u27ea\u27ec\u27ee\u2983\u2985\u2987\u2989\u298b\u298d\u298f\u2991\u2993\u2995\u2997\u29d8\u29da\u29fc\u2e22\u2e24\u2e26\u2e28\u2e42\u3008\u300a\u300c\u300e\u3010\u3014\u3016\u3018\u301a\u301d\ufd3f\ufe17\ufe35\ufe37\ufe39\ufe3b\ufe3d\ufe3f\ufe41\ufe43\ufe47\ufe59\ufe5b\ufe5d\uff08\uff3b\uff5b\uff5f\uff62'
-
+
Sc = '$\xa2-\xa5\u058f\u060b\u07fe-\u07ff\u09f2-\u09f3\u09fb\u0af1\u0bf9\u0e3f\u17db\u20a0-\u20bf\ua838\ufdfc\ufe69\uff04\uffe0-\uffe1\uffe5-\uffe6\U0001ecb0'
-
+
Sk = '\\^`\xa8\xaf\xb4\xb8\u02c2-\u02c5\u02d2-\u02df\u02e5-\u02eb\u02ed\u02ef-\u02ff\u0375\u0384-\u0385\u1fbd\u1fbf-\u1fc1\u1fcd-\u1fcf\u1fdd-\u1fdf\u1fed-\u1fef\u1ffd-\u1ffe\u309b-\u309c\ua700-\ua716\ua720-\ua721\ua789-\ua78a\uab5b\ufbb2-\ufbc1\uff3e\uff40\uffe3\U0001f3fb-\U0001f3ff'
-
+
Sm = '+<->|~\xac\xb1\xd7\xf7\u03f6\u0606-\u0608\u2044\u2052\u207a-\u207c\u208a-\u208c\u2118\u2140-\u2144\u214b\u2190-\u2194\u219a-\u219b\u21a0\u21a3\u21a6\u21ae\u21ce-\u21cf\u21d2\u21d4\u21f4-\u22ff\u2320-\u2321\u237c\u239b-\u23b3\u23dc-\u23e1\u25b7\u25c1\u25f8-\u25ff\u266f\u27c0-\u27c4\u27c7-\u27e5\u27f0-\u27ff\u2900-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2aff\u2b30-\u2b44\u2b47-\u2b4c\ufb29\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe9-\uffec\U0001d6c1\U0001d6db\U0001d6fb\U0001d715\U0001d735\U0001d74f\U0001d76f\U0001d789\U0001d7a9\U0001d7c3\U0001eef0-\U0001eef1'
-
+
So = '\xa6\xa9\xae\xb0\u0482\u058d-\u058e\u060e-\u060f\u06de\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0d4f\u0d79\u0f01-\u0f03\u0f13\u0f15-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcc\u0fce-\u0fcf\u0fd5-\u0fd8\u109e-\u109f\u1390-\u1399\u1940\u19de-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2117\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u214a\u214c-\u214d\u214f\u218a-\u218b\u2195-\u2199\u219c-\u219f\u21a1-\u21a2\u21a4-\u21a5\u21a7-\u21ad\u21af-\u21cd\u21d0-\u21d1\u21d3\u21d5-\u21f3\u2300-\u2307\u230c-\u231f\u2322-\u2328\u232b-\u237b\u237d-\u239a\u23b4-\u23db\u23e2-\u2426\u2440-\u244a\u249c-\u24e9\u2500-\u25b6\u25b8-\u25c0\u25c2-\u25f7\u2600-\u266e\u2670-\u2767\u2794-\u27bf\u2800-\u28ff\u2b00-\u2b2f\u2b45-\u2b46\u2b4d-\u2b73\u2b76-\u2b95\u2b98-\u2bc8\u2bca-\u2bfe\u2ce5-\u2cea\u2e80-\u2e99\u2e9b-\u2ef3\u2f00-\u2fd5\u2ff0-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3247\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u32fe\u3300-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ua836-\ua837\ua839\uaa77-\uaa79\ufdfd\uffe4\uffe8\uffed-\uffee\ufffc-\ufffd\U00010137-\U0001013f\U00010179-\U00010189\U0001018c-\U0001018e\U00010190-\U0001019b\U000101a0\U000101d0-\U000101fc\U00010877-\U00010878\U00010ac8\U0001173f\U00016b3c-\U00016b3f\U00016b45\U0001bc9c\U0001d000-\U0001d0f5\U0001d100-\U0001d126\U0001d129-\U0001d164\U0001d16a-\U0001d16c\U0001d183-\U0001d184\U0001d18c-\U0001d1a9\U0001d1ae-\U0001d1e8\U0001d200-\U0001d241\U0001d245\U0001d300-\U0001d356\U0001d800-\U0001d9ff\U0001da37-\U0001da3a\U0001da6d-\U0001da74\U0001da76-\U0001da83\U0001da85-\U0001da86\U0001ecac\U0001f000-\U0001f02b\U0001f030-\U0001f093\U0001f0a0-\U0001f0ae\U0001f0b1-\U0001f0bf\U0001f0c1-\U0001f0cf\U0001f0d1-\U0001f0f5\U0001f110-\U0001f16b\U0001f170-\U0001f1ac\U0001f1e6-\U0001f202\U0001f210-\U0001f23b\U0001f240-\U0001f248\U0001f250-\U0001f251\U0001f260-\U0001f265\U0001f300-\U0001f3fa\U0001f400-\U0001f6d4\U0001f6e0-\U0001f6ec\U0001f6f0-\U0001f6f9\U0001f700-\U0001f773\U0001f780-\U0001f7d8\U0001f800-\U0001f80b\U0001f810-\U0001f847\U0001f850-\U0001f859\U0001f860-\U0001f887\U0001f890-\U0001f8ad\U0001f900-\U0001f90b\U0001f910-\U0001f93e\U0001f940-\U0001f970\U0001f973-\U0001f976\U0001f97a\U0001f97c-\U0001f9a2\U0001f9b0-\U0001f9b9\U0001f9c0-\U0001f9c2\U0001f9d0-\U0001f9ff\U0001fa60-\U0001fa6d'
-
+
Zl = '\u2028'
-
+
Zp = '\u2029'
-
+
Zs = ' \xa0\u1680\u2000-\u200a\u202f\u205f\u3000'
-
+
xid_continue = '0-9A-Z_a-z\xaa\xb5\xb7\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0300-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u0483-\u0487\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u05d0-\u05ea\u05ef-\u05f2\u0610-\u061a\u0620-\u0669\u066e-\u06d3\u06d5-\u06dc\u06df-\u06e8\u06ea-\u06fc\u06ff\u0710-\u074a\u074d-\u07b1\u07c0-\u07f5\u07fa\u07fd\u0800-\u082d\u0840-\u085b\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u08d3-\u08e1\u08e3-\u0963\u0966-\u096f\u0971-\u0983\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bc-\u09c4\u09c7-\u09c8\u09cb-\u09ce\u09d7\u09dc-\u09dd\u09df-\u09e3\u09e6-\u09f1\u09fc\u09fe\u0a01-\u0a03\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a3c\u0a3e-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a59-\u0a5c\u0a5e\u0a66-\u0a75\u0a81-\u0a83\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abc-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ad0\u0ae0-\u0ae3\u0ae6-\u0aef\u0af9-\u0aff\u0b01-\u0b03\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3c-\u0b44\u0b47-\u0b48\u0b4b-\u0b4d\u0b56-\u0b57\u0b5c-\u0b5d\u0b5f-\u0b63\u0b66-\u0b6f\u0b71\u0b82-\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd0\u0bd7\u0be6-\u0bef\u0c00-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c58-\u0c5a\u0c60-\u0c63\u0c66-\u0c6f\u0c80-\u0c83\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbc-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5-\u0cd6\u0cde\u0ce0-\u0ce3\u0ce6-\u0cef\u0cf1-\u0cf2\u0d00-\u0d03\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d44\u0d46-\u0d48\u0d4a-\u0d4e\u0d54-\u0d57\u0d5f-\u0d63\u0d66-\u0d6f\u0d7a-\u0d7f\u0d82-\u0d83\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2-\u0df3\u0e01-\u0e3a\u0e40-\u0e4e\u0e50-\u0e59\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb9\u0ebb-\u0ebd\u0ec0-\u0ec4\u0ec6\u0ec8-\u0ecd\u0ed0-\u0ed9\u0edc-\u0edf\u0f00\u0f18-\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e-\u0f47\u0f49-\u0f6c\u0f71-\u0f84\u0f86-\u0f97\u0f99-\u0fbc\u0fc6\u1000-\u1049\u1050-\u109d\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u135d-\u135f\u1369-\u1371\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176c\u176e-\u1770\u1772-\u1773\u1780-\u17d3\u17d7\u17dc-\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u1820-\u1878\u1880-\u18aa\u18b0-\u18f5\u1900-\u191e\u1920-\u192b\u1930-\u193b\u1946-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u19d0-\u19da\u1a00-\u1a1b\u1a20-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1aa7\u1ab0-\u1abd\u1b00-\u1b4b\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1bf3\u1c00-\u1c37\u1c40-\u1c49\u1c4d-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1cd0-\u1cd2\u1cd4-\u1cf9\u1d00-\u1df9\u1dfb-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u203f-\u2040\u2054\u2071\u207f\u2090-\u209c\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d7f-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2de0-\u2dff\u3005-\u3007\u3021-\u302f\u3031-\u3035\u3038-\u303c\u3041-\u3096\u3099-\u309a\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua62b\ua640-\ua66f\ua674-\ua67d\ua67f-\ua6f1\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua827\ua840-\ua873\ua880-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f7\ua8fb\ua8fd-\ua92d\ua930-\ua953\ua960-\ua97c\ua980-\ua9c0\ua9cf-\ua9d9\ua9e0-\ua9fe\uaa00-\uaa36\uaa40-\uaa4d\uaa50-\uaa59\uaa60-\uaa76\uaa7a-\uaac2\uaadb-\uaadd\uaae0-\uaaef\uaaf2-\uaaf6\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabea\uabec-\uabed\uabf0-\uabf9\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe00-\ufe0f\ufe20-\ufe2f\ufe33-\ufe34\ufe4d-\ufe4f\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff10-\uff19\uff21-\uff3a\uff3f\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U000101fd\U00010280-\U0001029c\U000102a0-\U000102d0\U000102e0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U0001037a\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104a0-\U000104a9\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00-\U00010a03\U00010a05-\U00010a06\U00010a0c-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a38-\U00010a3a\U00010a3f\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae6\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d27\U00010d30-\U00010d39\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f50\U00011000-\U00011046\U00011066-\U0001106f\U0001107f-\U000110ba\U000110d0-\U000110e8\U000110f0-\U000110f9\U00011100-\U00011134\U00011136-\U0001113f\U00011144-\U00011146\U00011150-\U00011173\U00011176\U00011180-\U000111c4\U000111c9-\U000111cc\U000111d0-\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U00011237\U0001123e\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112ea\U000112f0-\U000112f9\U00011300-\U00011303\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133b-\U00011344\U00011347-\U00011348\U0001134b-\U0001134d\U00011350\U00011357\U0001135d-\U00011363\U00011366-\U0001136c\U00011370-\U00011374\U00011400-\U0001144a\U00011450-\U00011459\U0001145e\U00011480-\U000114c5\U000114c7\U000114d0-\U000114d9\U00011580-\U000115b5\U000115b8-\U000115c0\U000115d8-\U000115dd\U00011600-\U00011640\U00011644\U00011650-\U00011659\U00011680-\U000116b7\U000116c0-\U000116c9\U00011700-\U0001171a\U0001171d-\U0001172b\U00011730-\U00011739\U00011800-\U0001183a\U000118a0-\U000118e9\U000118ff\U00011a00-\U00011a3e\U00011a47\U00011a50-\U00011a83\U00011a86-\U00011a99\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c36\U00011c38-\U00011c40\U00011c50-\U00011c59\U00011c72-\U00011c8f\U00011c92-\U00011ca7\U00011ca9-\U00011cb6\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d36\U00011d3a\U00011d3c-\U00011d3d\U00011d3f-\U00011d47\U00011d50-\U00011d59\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d8e\U00011d90-\U00011d91\U00011d93-\U00011d98\U00011da0-\U00011da9\U00011ee0-\U00011ef6\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016a60-\U00016a69\U00016ad0-\U00016aed\U00016af0-\U00016af4\U00016b00-\U00016b36\U00016b40-\U00016b43\U00016b50-\U00016b59\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50-\U00016f7e\U00016f8f-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001bc9d-\U0001bc9e\U0001d165-\U0001d169\U0001d16d-\U0001d172\U0001d17b-\U0001d182\U0001d185-\U0001d18b\U0001d1aa-\U0001d1ad\U0001d242-\U0001d244\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001d7ce-\U0001d7ff\U0001da00-\U0001da36\U0001da3b-\U0001da6c\U0001da75\U0001da84\U0001da9b-\U0001da9f\U0001daa1-\U0001daaf\U0001e000-\U0001e006\U0001e008-\U0001e018\U0001e01b-\U0001e021\U0001e023-\U0001e024\U0001e026-\U0001e02a\U0001e800-\U0001e8c4\U0001e8d0-\U0001e8d6\U0001e900-\U0001e94a\U0001e950-\U0001e959\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d\U000e0100-\U000e01ef'
-
+
xid_start = 'A-Z_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376-\u0377\u037b-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e-\u066f\u0671-\u06d3\u06d5\u06e5-\u06e6\u06ee-\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4-\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f-\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc-\u09dd\u09df-\u09e1\u09f0-\u09f1\u09fc\u0a05-\u0a0a\u0a0f-\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32-\u0a33\u0a35-\u0a36\u0a38-\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2-\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0-\u0ae1\u0af9\u0b05-\u0b0c\u0b0f-\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32-\u0b33\u0b35-\u0b39\u0b3d\u0b5c-\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99-\u0b9a\u0b9c\u0b9e-\u0b9f\u0ba3-\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60-\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0-\u0ce1\u0cf1-\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e40-\u0e46\u0e81-\u0e82\u0e84\u0e87-\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa-\u0eab\u0ead-\u0eb0\u0eb2\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065-\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae-\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5-\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2-\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fef\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a-\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7b9\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd-\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5-\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufbb1\ufbd3-\ufc5d\ufc64-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdf9\ufe71\ufe73\ufe77\ufe79\ufe7b\ufe7d\ufe7f-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uff9d\uffa0-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc\U00010000-\U0001000b\U0001000d-\U00010026\U00010028-\U0001003a\U0001003c-\U0001003d\U0001003f-\U0001004d\U00010050-\U0001005d\U00010080-\U000100fa\U00010140-\U00010174\U00010280-\U0001029c\U000102a0-\U000102d0\U00010300-\U0001031f\U0001032d-\U0001034a\U00010350-\U00010375\U00010380-\U0001039d\U000103a0-\U000103c3\U000103c8-\U000103cf\U000103d1-\U000103d5\U00010400-\U0001049d\U000104b0-\U000104d3\U000104d8-\U000104fb\U00010500-\U00010527\U00010530-\U00010563\U00010600-\U00010736\U00010740-\U00010755\U00010760-\U00010767\U00010800-\U00010805\U00010808\U0001080a-\U00010835\U00010837-\U00010838\U0001083c\U0001083f-\U00010855\U00010860-\U00010876\U00010880-\U0001089e\U000108e0-\U000108f2\U000108f4-\U000108f5\U00010900-\U00010915\U00010920-\U00010939\U00010980-\U000109b7\U000109be-\U000109bf\U00010a00\U00010a10-\U00010a13\U00010a15-\U00010a17\U00010a19-\U00010a35\U00010a60-\U00010a7c\U00010a80-\U00010a9c\U00010ac0-\U00010ac7\U00010ac9-\U00010ae4\U00010b00-\U00010b35\U00010b40-\U00010b55\U00010b60-\U00010b72\U00010b80-\U00010b91\U00010c00-\U00010c48\U00010c80-\U00010cb2\U00010cc0-\U00010cf2\U00010d00-\U00010d23\U00010f00-\U00010f1c\U00010f27\U00010f30-\U00010f45\U00011003-\U00011037\U00011083-\U000110af\U000110d0-\U000110e8\U00011103-\U00011126\U00011144\U00011150-\U00011172\U00011176\U00011183-\U000111b2\U000111c1-\U000111c4\U000111da\U000111dc\U00011200-\U00011211\U00011213-\U0001122b\U00011280-\U00011286\U00011288\U0001128a-\U0001128d\U0001128f-\U0001129d\U0001129f-\U000112a8\U000112b0-\U000112de\U00011305-\U0001130c\U0001130f-\U00011310\U00011313-\U00011328\U0001132a-\U00011330\U00011332-\U00011333\U00011335-\U00011339\U0001133d\U00011350\U0001135d-\U00011361\U00011400-\U00011434\U00011447-\U0001144a\U00011480-\U000114af\U000114c4-\U000114c5\U000114c7\U00011580-\U000115ae\U000115d8-\U000115db\U00011600-\U0001162f\U00011644\U00011680-\U000116aa\U00011700-\U0001171a\U00011800-\U0001182b\U000118a0-\U000118df\U000118ff\U00011a00\U00011a0b-\U00011a32\U00011a3a\U00011a50\U00011a5c-\U00011a83\U00011a86-\U00011a89\U00011a9d\U00011ac0-\U00011af8\U00011c00-\U00011c08\U00011c0a-\U00011c2e\U00011c40\U00011c72-\U00011c8f\U00011d00-\U00011d06\U00011d08-\U00011d09\U00011d0b-\U00011d30\U00011d46\U00011d60-\U00011d65\U00011d67-\U00011d68\U00011d6a-\U00011d89\U00011d98\U00011ee0-\U00011ef2\U00012000-\U00012399\U00012400-\U0001246e\U00012480-\U00012543\U00013000-\U0001342e\U00014400-\U00014646\U00016800-\U00016a38\U00016a40-\U00016a5e\U00016ad0-\U00016aed\U00016b00-\U00016b2f\U00016b40-\U00016b43\U00016b63-\U00016b77\U00016b7d-\U00016b8f\U00016e40-\U00016e7f\U00016f00-\U00016f44\U00016f50\U00016f93-\U00016f9f\U00016fe0-\U00016fe1\U00017000-\U000187f1\U00018800-\U00018af2\U0001b000-\U0001b11e\U0001b170-\U0001b2fb\U0001bc00-\U0001bc6a\U0001bc70-\U0001bc7c\U0001bc80-\U0001bc88\U0001bc90-\U0001bc99\U0001d400-\U0001d454\U0001d456-\U0001d49c\U0001d49e-\U0001d49f\U0001d4a2\U0001d4a5-\U0001d4a6\U0001d4a9-\U0001d4ac\U0001d4ae-\U0001d4b9\U0001d4bb\U0001d4bd-\U0001d4c3\U0001d4c5-\U0001d505\U0001d507-\U0001d50a\U0001d50d-\U0001d514\U0001d516-\U0001d51c\U0001d51e-\U0001d539\U0001d53b-\U0001d53e\U0001d540-\U0001d544\U0001d546\U0001d54a-\U0001d550\U0001d552-\U0001d6a5\U0001d6a8-\U0001d6c0\U0001d6c2-\U0001d6da\U0001d6dc-\U0001d6fa\U0001d6fc-\U0001d714\U0001d716-\U0001d734\U0001d736-\U0001d74e\U0001d750-\U0001d76e\U0001d770-\U0001d788\U0001d78a-\U0001d7a8\U0001d7aa-\U0001d7c2\U0001d7c4-\U0001d7cb\U0001e800-\U0001e8c4\U0001e900-\U0001e943\U0001ee00-\U0001ee03\U0001ee05-\U0001ee1f\U0001ee21-\U0001ee22\U0001ee24\U0001ee27\U0001ee29-\U0001ee32\U0001ee34-\U0001ee37\U0001ee39\U0001ee3b\U0001ee42\U0001ee47\U0001ee49\U0001ee4b\U0001ee4d-\U0001ee4f\U0001ee51-\U0001ee52\U0001ee54\U0001ee57\U0001ee59\U0001ee5b\U0001ee5d\U0001ee5f\U0001ee61-\U0001ee62\U0001ee64\U0001ee67-\U0001ee6a\U0001ee6c-\U0001ee72\U0001ee74-\U0001ee77\U0001ee79-\U0001ee7c\U0001ee7e\U0001ee80-\U0001ee89\U0001ee8b-\U0001ee9b\U0001eea1-\U0001eea3\U0001eea5-\U0001eea9\U0001eeab-\U0001eebb\U00020000-\U0002a6d6\U0002a700-\U0002b734\U0002b740-\U0002b81d\U0002b820-\U0002cea1\U0002ceb0-\U0002ebe0\U0002f800-\U0002fa1d'
-
-cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
-
+
+cats = ['Cc', 'Cf', 'Cn', 'Co', 'Cs', 'Ll', 'Lm', 'Lo', 'Lt', 'Lu', 'Mc', 'Me', 'Mn', 'Nd', 'Nl', 'No', 'Pc', 'Pd', 'Pe', 'Pf', 'Pi', 'Po', 'Ps', 'Sc', 'Sk', 'Sm', 'So', 'Zl', 'Zp', 'Zs']
+
# Generated from unidata 11.0.0
-
-def combine(*args):
+
+def combine(*args):
return ''.join(globals()[cat] for cat in args)
-
-
-def allexcept(*args):
- newcats = cats[:]
- for arg in args:
- newcats.remove(arg)
+
+
+def allexcept(*args):
+ newcats = cats[:]
+ for arg in args:
+ newcats.remove(arg)
return ''.join(globals()[cat] for cat in newcats)
-
-
-def _handle_runs(char_list): # pragma: no cover
- buf = []
- for c in char_list:
- if len(c) == 1:
- if buf and buf[-1][1] == chr(ord(c)-1):
- buf[-1] = (buf[-1][0], c)
- else:
- buf.append((c, c))
- else:
- buf.append((c, c))
- for a, b in buf:
- if a == b:
- yield a
- else:
+
+
+def _handle_runs(char_list): # pragma: no cover
+ buf = []
+ for c in char_list:
+ if len(c) == 1:
+ if buf and buf[-1][1] == chr(ord(c)-1):
+ buf[-1] = (buf[-1][0], c)
+ else:
+ buf.append((c, c))
+ else:
+ buf.append((c, c))
+ for a, b in buf:
+ if a == b:
+ yield a
+ else:
yield '%s-%s' % (a, b)
-
-
-if __name__ == '__main__': # pragma: no cover
- import unicodedata
-
+
+
+if __name__ == '__main__': # pragma: no cover
+ import unicodedata
+
categories = {'xid_start': [], 'xid_continue': []}
-
- with open(__file__) as fp:
- content = fp.read()
-
- header = content[:content.find('Cc =')]
- footer = content[content.find("def combine("):]
-
- for code in range(0x110000):
- c = chr(code)
- cat = unicodedata.category(c)
- if ord(c) == 0xdc00:
- # Hack to avoid combining this combining with the preceeding high
- # surrogate, 0xdbff, when doing a repr.
+
+ with open(__file__) as fp:
+ content = fp.read()
+
+ header = content[:content.find('Cc =')]
+ footer = content[content.find("def combine("):]
+
+ for code in range(0x110000):
+ c = chr(code)
+ cat = unicodedata.category(c)
+ if ord(c) == 0xdc00:
+ # Hack to avoid combining this combining with the preceeding high
+ # surrogate, 0xdbff, when doing a repr.
c = '\\' + c
- elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
- # Escape regex metachars.
+ elif ord(c) in (0x2d, 0x5b, 0x5c, 0x5d, 0x5e):
+ # Escape regex metachars.
c = '\\' + c
categories.setdefault(cat, []).append(c)
- # XID_START and XID_CONTINUE are special categories used for matching
- # identifiers in Python 3.
- if c.isidentifier():
+ # XID_START and XID_CONTINUE are special categories used for matching
+ # identifiers in Python 3.
+ if c.isidentifier():
categories['xid_start'].append(c)
- if ('a' + c).isidentifier():
+ if ('a' + c).isidentifier():
categories['xid_continue'].append(c)
-
- with open(__file__, 'w') as fp:
- fp.write(header)
-
+
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+
for cat in sorted(categories):
val = ''.join(_handle_runs(categories[cat]))
fp.write('%s = %a\n\n' % (cat, val))
-
+
cats = sorted(categories)
- cats.remove('xid_start')
- cats.remove('xid_continue')
- fp.write('cats = %r\n\n' % cats)
-
- fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
-
- fp.write(footer)
+ cats.remove('xid_start')
+ cats.remove('xid_continue')
+ fp.write('cats = %r\n\n' % cats)
+
+ fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
+
+ fp.write(footer)
diff --git a/contrib/python/Pygments/py3/pygments/util.py b/contrib/python/Pygments/py3/pygments/util.py
index 5d6ddc3f5b..75ff8a26bc 100644
--- a/contrib/python/Pygments/py3/pygments/util.py
+++ b/contrib/python/Pygments/py3/pygments/util.py
@@ -1,307 +1,307 @@
-"""
- pygments.util
- ~~~~~~~~~~~~~
-
- Utility functions.
-
+"""
+ pygments.util
+ ~~~~~~~~~~~~~
+
+ Utility functions.
+
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
from io import TextIOWrapper
-
-
-split_path_re = re.compile(r'[/\\ ]')
+
+
+split_path_re = re.compile(r'[/\\ ]')
doctype_lookup_re = re.compile(r'''
- <!DOCTYPE\s+(
- [a-zA-Z_][a-zA-Z0-9]*
- (?: \s+ # optional in HTML5
- [a-zA-Z_][a-zA-Z0-9]*\s+
- "[^"]*")?
- )
- [^>]*>
+ <!DOCTYPE\s+(
+ [a-zA-Z_][a-zA-Z0-9]*
+ (?: \s+ # optional in HTML5
+ [a-zA-Z_][a-zA-Z0-9]*\s+
+ "[^"]*")?
+ )
+ [^>]*>
''', re.DOTALL | re.MULTILINE | re.VERBOSE)
tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>',
re.UNICODE | re.IGNORECASE | re.DOTALL | re.MULTILINE)
-xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
-
-
-class ClassNotFound(ValueError):
- """Raised if one of the lookup functions didn't find a matching class."""
-
-
-class OptionError(Exception):
- pass
-
-
-def get_choice_opt(options, optname, allowed, default=None, normcase=False):
- string = options.get(optname, default)
- if normcase:
- string = string.lower()
- if string not in allowed:
- raise OptionError('Value for option %s must be one of %s' %
- (optname, ', '.join(map(str, allowed))))
- return string
-
-
-def get_bool_opt(options, optname, default=None):
- string = options.get(optname, default)
- if isinstance(string, bool):
- return string
- elif isinstance(string, int):
- return bool(string)
+xml_decl_re = re.compile(r'\s*<\?xml[^>]*\?>', re.I)
+
+
+class ClassNotFound(ValueError):
+ """Raised if one of the lookup functions didn't find a matching class."""
+
+
+class OptionError(Exception):
+ pass
+
+
+def get_choice_opt(options, optname, allowed, default=None, normcase=False):
+ string = options.get(optname, default)
+ if normcase:
+ string = string.lower()
+ if string not in allowed:
+ raise OptionError('Value for option %s must be one of %s' %
+ (optname, ', '.join(map(str, allowed))))
+ return string
+
+
+def get_bool_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ if isinstance(string, bool):
+ return string
+ elif isinstance(string, int):
+ return bool(string)
elif not isinstance(string, str):
- raise OptionError('Invalid type %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
- elif string.lower() in ('1', 'yes', 'true', 'on'):
- return True
- elif string.lower() in ('0', 'no', 'false', 'off'):
- return False
- else:
- raise OptionError('Invalid value %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
-
-
-def get_int_opt(options, optname, default=None):
- string = options.get(optname, default)
- try:
- return int(string)
- except TypeError:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
- except ValueError:
- raise OptionError('Invalid value %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
-
-
-def get_list_opt(options, optname, default=None):
- val = options.get(optname, default)
+ raise OptionError('Invalid type %r for option %s; use '
+ '1/0, yes/no, true/false, on/off' % (
+ string, optname))
+ elif string.lower() in ('1', 'yes', 'true', 'on'):
+ return True
+ elif string.lower() in ('0', 'no', 'false', 'off'):
+ return False
+ else:
+ raise OptionError('Invalid value %r for option %s; use '
+ '1/0, yes/no, true/false, on/off' % (
+ string, optname))
+
+
+def get_int_opt(options, optname, default=None):
+ string = options.get(optname, default)
+ try:
+ return int(string)
+ except TypeError:
+ raise OptionError('Invalid type %r for option %s; you '
+ 'must give an integer value' % (
+ string, optname))
+ except ValueError:
+ raise OptionError('Invalid value %r for option %s; you '
+ 'must give an integer value' % (
+ string, optname))
+
+
+def get_list_opt(options, optname, default=None):
+ val = options.get(optname, default)
if isinstance(val, str):
- return val.split()
- elif isinstance(val, (list, tuple)):
- return list(val)
- else:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give a list value' % (
- val, optname))
-
-
-def docstring_headline(obj):
- if not obj.__doc__:
- return ''
- res = []
- for line in obj.__doc__.strip().splitlines():
- if line.strip():
- res.append(" " + line.strip())
- else:
- break
- return ''.join(res).lstrip()
-
-
-def make_analysator(f):
- """Return a static text analyser function that returns float values."""
- def text_analyse(text):
- try:
- rv = f(text)
- except Exception:
- return 0.0
- if not rv:
- return 0.0
- try:
- return min(1.0, max(0.0, float(rv)))
- except (ValueError, TypeError):
- return 0.0
- text_analyse.__doc__ = f.__doc__
- return staticmethod(text_analyse)
-
-
-def shebang_matches(text, regex):
- r"""Check if the given regular expression matches the last part of the
- shebang if one exists.
-
- >>> from pygments.util import shebang_matches
- >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
- True
- >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
- False
- >>> shebang_matches('#!/usr/bin/startsomethingwith python',
- ... r'python(2\.\d)?')
- True
-
- It also checks for common windows executable file extensions::
-
- >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
- True
-
- Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
- the same as ``'perl -e'``)
-
- Note that this method automatically searches the whole string (eg:
- the regular expression is wrapped in ``'^$'``)
- """
- index = text.find('\n')
- if index >= 0:
- first_line = text[:index].lower()
- else:
- first_line = text.lower()
- if first_line.startswith('#!'):
- try:
- found = [x for x in split_path_re.split(first_line[2:].strip())
- if x and not x.startswith('-')][-1]
- except IndexError:
- return False
- regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
- if regex.search(found) is not None:
- return True
- return False
-
-
-def doctype_matches(text, regex):
- """Check if the doctype matches a regular expression (if present).
-
- Note that this method only checks the first part of a DOCTYPE.
- eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
- """
+ return val.split()
+ elif isinstance(val, (list, tuple)):
+ return list(val)
+ else:
+ raise OptionError('Invalid type %r for option %s; you '
+ 'must give a list value' % (
+ val, optname))
+
+
+def docstring_headline(obj):
+ if not obj.__doc__:
+ return ''
+ res = []
+ for line in obj.__doc__.strip().splitlines():
+ if line.strip():
+ res.append(" " + line.strip())
+ else:
+ break
+ return ''.join(res).lstrip()
+
+
+def make_analysator(f):
+ """Return a static text analyser function that returns float values."""
+ def text_analyse(text):
+ try:
+ rv = f(text)
+ except Exception:
+ return 0.0
+ if not rv:
+ return 0.0
+ try:
+ return min(1.0, max(0.0, float(rv)))
+ except (ValueError, TypeError):
+ return 0.0
+ text_analyse.__doc__ = f.__doc__
+ return staticmethod(text_analyse)
+
+
+def shebang_matches(text, regex):
+ r"""Check if the given regular expression matches the last part of the
+ shebang if one exists.
+
+ >>> from pygments.util import shebang_matches
+ >>> shebang_matches('#!/usr/bin/env python', r'python(2\.\d)?')
+ True
+ >>> shebang_matches('#!/usr/bin/python2.4', r'python(2\.\d)?')
+ True
+ >>> shebang_matches('#!/usr/bin/python-ruby', r'python(2\.\d)?')
+ False
+ >>> shebang_matches('#!/usr/bin/python/ruby', r'python(2\.\d)?')
+ False
+ >>> shebang_matches('#!/usr/bin/startsomethingwith python',
+ ... r'python(2\.\d)?')
+ True
+
+ It also checks for common windows executable file extensions::
+
+ >>> shebang_matches('#!C:\\Python2.4\\Python.exe', r'python(2\.\d)?')
+ True
+
+ Parameters (``'-f'`` or ``'--foo'`` are ignored so ``'perl'`` does
+ the same as ``'perl -e'``)
+
+ Note that this method automatically searches the whole string (eg:
+ the regular expression is wrapped in ``'^$'``)
+ """
+ index = text.find('\n')
+ if index >= 0:
+ first_line = text[:index].lower()
+ else:
+ first_line = text.lower()
+ if first_line.startswith('#!'):
+ try:
+ found = [x for x in split_path_re.split(first_line[2:].strip())
+ if x and not x.startswith('-')][-1]
+ except IndexError:
+ return False
+ regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
+ if regex.search(found) is not None:
+ return True
+ return False
+
+
+def doctype_matches(text, regex):
+ """Check if the doctype matches a regular expression (if present).
+
+ Note that this method only checks the first part of a DOCTYPE.
+ eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
+ """
m = doctype_lookup_re.search(text)
- if m is None:
- return False
+ if m is None:
+ return False
doctype = m.group(1)
- return re.compile(regex, re.I).match(doctype.strip()) is not None
-
-
-def html_doctype_matches(text):
- """Check if the file looks like it has a html doctype."""
- return doctype_matches(text, r'html')
-
-
-_looks_like_xml_cache = {}
-
-
-def looks_like_xml(text):
- """Check if a doctype exists or if we have some tags."""
- if xml_decl_re.match(text):
- return True
- key = hash(text)
- try:
- return _looks_like_xml_cache[key]
- except KeyError:
+ return re.compile(regex, re.I).match(doctype.strip()) is not None
+
+
+def html_doctype_matches(text):
+ """Check if the file looks like it has a html doctype."""
+ return doctype_matches(text, r'html')
+
+
+_looks_like_xml_cache = {}
+
+
+def looks_like_xml(text):
+ """Check if a doctype exists or if we have some tags."""
+ if xml_decl_re.match(text):
+ return True
+ key = hash(text)
+ try:
+ return _looks_like_xml_cache[key]
+ except KeyError:
m = doctype_lookup_re.search(text)
- if m is not None:
- return True
- rv = tag_re.search(text[:1000]) is not None
- _looks_like_xml_cache[key] = rv
- return rv
-
-
+ if m is not None:
+ return True
+ rv = tag_re.search(text[:1000]) is not None
+ _looks_like_xml_cache[key] = rv
+ return rv
+
+
def surrogatepair(c):
"""Given a unicode character code with length greater than 16 bits,
return the two 16 bit surrogate pair.
"""
- # From example D28 of:
- # http://www.unicode.org/book/ch03.pdf
- return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
-
-
-def format_lines(var_name, seq, raw=False, indent_level=0):
- """Formats a sequence of strings for output."""
- lines = []
- base_indent = ' ' * indent_level * 4
- inner_indent = ' ' * (indent_level + 1) * 4
- lines.append(base_indent + var_name + ' = (')
- if raw:
- # These should be preformatted reprs of, say, tuples.
- for i in seq:
- lines.append(inner_indent + i + ',')
- else:
- for i in seq:
- # Force use of single quotes
- r = repr(i + '"')
- lines.append(inner_indent + r[:-2] + r[-1] + ',')
- lines.append(base_indent + ')')
- return '\n'.join(lines)
-
-
-def duplicates_removed(it, already_seen=()):
- """
- Returns a list with duplicates removed from the iterable `it`.
-
- Order is preserved.
- """
- lst = []
- seen = set()
- for i in it:
- if i in seen or i in already_seen:
- continue
- lst.append(i)
- seen.add(i)
- return lst
-
-
+ # From example D28 of:
+ # http://www.unicode.org/book/ch03.pdf
+ return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
+
+
+def format_lines(var_name, seq, raw=False, indent_level=0):
+ """Formats a sequence of strings for output."""
+ lines = []
+ base_indent = ' ' * indent_level * 4
+ inner_indent = ' ' * (indent_level + 1) * 4
+ lines.append(base_indent + var_name + ' = (')
+ if raw:
+ # These should be preformatted reprs of, say, tuples.
+ for i in seq:
+ lines.append(inner_indent + i + ',')
+ else:
+ for i in seq:
+ # Force use of single quotes
+ r = repr(i + '"')
+ lines.append(inner_indent + r[:-2] + r[-1] + ',')
+ lines.append(base_indent + ')')
+ return '\n'.join(lines)
+
+
+def duplicates_removed(it, already_seen=()):
+ """
+ Returns a list with duplicates removed from the iterable `it`.
+
+ Order is preserved.
+ """
+ lst = []
+ seen = set()
+ for i in it:
+ if i in seen or i in already_seen:
+ continue
+ lst.append(i)
+ seen.add(i)
+ return lst
+
+
class Future:
- """Generic class to defer some work.
-
- Handled specially in RegexLexerMeta, to support regex string construction at
- first use.
- """
- def get(self):
- raise NotImplementedError
-
-
-def guess_decode(text):
- """Decode *text* with guessed encoding.
-
- First try UTF-8; this should fail for non-UTF-8 encodings.
- Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- try:
- text = text.decode('utf-8')
- return text, 'utf-8'
- except UnicodeDecodeError:
- try:
- import locale
- prefencoding = locale.getpreferredencoding()
- text = text.decode()
- return text, prefencoding
- except (UnicodeDecodeError, LookupError):
- text = text.decode('latin1')
- return text, 'latin1'
-
-
-def guess_decode_from_terminal(text, term):
- """Decode *text* coming from terminal *term*.
-
- First try the terminal encoding, if given.
- Then try UTF-8. Then try the preferred locale encoding.
- Fall back to latin-1, which always works.
- """
- if getattr(term, 'encoding', None):
- try:
- text = text.decode(term.encoding)
- except UnicodeDecodeError:
- pass
- else:
- return text, term.encoding
- return guess_decode(text)
-
-
-def terminal_encoding(term):
- """Return our best guess of encoding for the given *term*."""
- if getattr(term, 'encoding', None):
- return term.encoding
- import locale
- return locale.getpreferredencoding()
-
-
+ """Generic class to defer some work.
+
+ Handled specially in RegexLexerMeta, to support regex string construction at
+ first use.
+ """
+ def get(self):
+ raise NotImplementedError
+
+
+def guess_decode(text):
+ """Decode *text* with guessed encoding.
+
+ First try UTF-8; this should fail for non-UTF-8 encodings.
+ Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ try:
+ text = text.decode('utf-8')
+ return text, 'utf-8'
+ except UnicodeDecodeError:
+ try:
+ import locale
+ prefencoding = locale.getpreferredencoding()
+ text = text.decode()
+ return text, prefencoding
+ except (UnicodeDecodeError, LookupError):
+ text = text.decode('latin1')
+ return text, 'latin1'
+
+
+def guess_decode_from_terminal(text, term):
+ """Decode *text* coming from terminal *term*.
+
+ First try the terminal encoding, if given.
+ Then try UTF-8. Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ if getattr(term, 'encoding', None):
+ try:
+ text = text.decode(term.encoding)
+ except UnicodeDecodeError:
+ pass
+ else:
+ return text, term.encoding
+ return guess_decode(text)
+
+
+def terminal_encoding(term):
+ """Return our best guess of encoding for the given *term*."""
+ if getattr(term, 'encoding', None):
+ return term.encoding
+ import locale
+ return locale.getpreferredencoding()
+
+
class UnclosingTextIOWrapper(TextIOWrapper):
# Don't close underlying buffer on destruction.
def close(self):
diff --git a/contrib/python/Pygments/py3/ya.make b/contrib/python/Pygments/py3/ya.make
index 3fab931499..da56118d98 100644
--- a/contrib/python/Pygments/py3/ya.make
+++ b/contrib/python/Pygments/py3/ya.make
@@ -1,7 +1,7 @@
# Generated by devtools/yamaker (pypi).
PY3_LIBRARY()
-
+
OWNER(blinkov g:python-contrib)
VERSION(2.11.2)
@@ -14,282 +14,282 @@ NO_CHECK_IMPORTS(
pygments.sphinxext
)
-PY_SRCS(
- TOP_LEVEL
- pygments/__init__.py
+PY_SRCS(
+ TOP_LEVEL
+ pygments/__init__.py
pygments/__main__.py
- pygments/cmdline.py
- pygments/console.py
- pygments/filter.py
- pygments/filters/__init__.py
- pygments/formatter.py
- pygments/formatters/__init__.py
- pygments/formatters/_mapping.py
- pygments/formatters/bbcode.py
+ pygments/cmdline.py
+ pygments/console.py
+ pygments/filter.py
+ pygments/filters/__init__.py
+ pygments/formatter.py
+ pygments/formatters/__init__.py
+ pygments/formatters/_mapping.py
+ pygments/formatters/bbcode.py
pygments/formatters/groff.py
- pygments/formatters/html.py
- pygments/formatters/img.py
- pygments/formatters/irc.py
- pygments/formatters/latex.py
- pygments/formatters/other.py
+ pygments/formatters/html.py
+ pygments/formatters/img.py
+ pygments/formatters/irc.py
+ pygments/formatters/latex.py
+ pygments/formatters/other.py
pygments/formatters/pangomarkup.py
- pygments/formatters/rtf.py
- pygments/formatters/svg.py
- pygments/formatters/terminal.py
- pygments/formatters/terminal256.py
- pygments/lexer.py
- pygments/lexers/__init__.py
- pygments/lexers/_asy_builtins.py
- pygments/lexers/_cl_builtins.py
- pygments/lexers/_cocoa_builtins.py
- pygments/lexers/_csound_builtins.py
+ pygments/formatters/rtf.py
+ pygments/formatters/svg.py
+ pygments/formatters/terminal.py
+ pygments/formatters/terminal256.py
+ pygments/lexer.py
+ pygments/lexers/__init__.py
+ pygments/lexers/_asy_builtins.py
+ pygments/lexers/_cl_builtins.py
+ pygments/lexers/_cocoa_builtins.py
+ pygments/lexers/_csound_builtins.py
pygments/lexers/_julia_builtins.py
- pygments/lexers/_lasso_builtins.py
+ pygments/lexers/_lasso_builtins.py
pygments/lexers/_lilypond_builtins.py
- pygments/lexers/_lua_builtins.py
- pygments/lexers/_mapping.py
- pygments/lexers/_mql_builtins.py
+ pygments/lexers/_lua_builtins.py
+ pygments/lexers/_mapping.py
+ pygments/lexers/_mql_builtins.py
pygments/lexers/_mysql_builtins.py
- pygments/lexers/_openedge_builtins.py
- pygments/lexers/_php_builtins.py
- pygments/lexers/_postgres_builtins.py
- pygments/lexers/_scilab_builtins.py
- pygments/lexers/_sourcemod_builtins.py
- pygments/lexers/_stan_builtins.py
+ pygments/lexers/_openedge_builtins.py
+ pygments/lexers/_php_builtins.py
+ pygments/lexers/_postgres_builtins.py
+ pygments/lexers/_scilab_builtins.py
+ pygments/lexers/_sourcemod_builtins.py
+ pygments/lexers/_stan_builtins.py
pygments/lexers/_stata_builtins.py
pygments/lexers/_tsql_builtins.py
pygments/lexers/_usd_builtins.py
pygments/lexers/_vbscript_builtins.py
- pygments/lexers/_vim_builtins.py
- pygments/lexers/actionscript.py
- pygments/lexers/agile.py
- pygments/lexers/algebra.py
- pygments/lexers/ambient.py
+ pygments/lexers/_vim_builtins.py
+ pygments/lexers/actionscript.py
+ pygments/lexers/agile.py
+ pygments/lexers/algebra.py
+ pygments/lexers/ambient.py
pygments/lexers/amdgpu.py
pygments/lexers/ampl.py
pygments/lexers/apdlexer.py
- pygments/lexers/apl.py
- pygments/lexers/archetype.py
+ pygments/lexers/apl.py
+ pygments/lexers/archetype.py
pygments/lexers/arrow.py
pygments/lexers/asc.py
- pygments/lexers/asm.py
- pygments/lexers/automation.py
+ pygments/lexers/asm.py
+ pygments/lexers/automation.py
pygments/lexers/bare.py
- pygments/lexers/basic.py
+ pygments/lexers/basic.py
pygments/lexers/bdd.py
pygments/lexers/bibtex.py
pygments/lexers/boa.py
- pygments/lexers/business.py
- pygments/lexers/c_cpp.py
- pygments/lexers/c_like.py
+ pygments/lexers/business.py
+ pygments/lexers/c_cpp.py
+ pygments/lexers/c_like.py
pygments/lexers/capnproto.py
pygments/lexers/cddl.py
- pygments/lexers/chapel.py
+ pygments/lexers/chapel.py
pygments/lexers/clean.py
- pygments/lexers/compiled.py
- pygments/lexers/configs.py
- pygments/lexers/console.py
+ pygments/lexers/compiled.py
+ pygments/lexers/configs.py
+ pygments/lexers/console.py
pygments/lexers/crystal.py
- pygments/lexers/csound.py
- pygments/lexers/css.py
- pygments/lexers/d.py
- pygments/lexers/dalvik.py
- pygments/lexers/data.py
+ pygments/lexers/csound.py
+ pygments/lexers/css.py
+ pygments/lexers/d.py
+ pygments/lexers/dalvik.py
+ pygments/lexers/data.py
pygments/lexers/devicetree.py
- pygments/lexers/diff.py
- pygments/lexers/dotnet.py
- pygments/lexers/dsls.py
- pygments/lexers/dylan.py
- pygments/lexers/ecl.py
- pygments/lexers/eiffel.py
- pygments/lexers/elm.py
+ pygments/lexers/diff.py
+ pygments/lexers/dotnet.py
+ pygments/lexers/dsls.py
+ pygments/lexers/dylan.py
+ pygments/lexers/ecl.py
+ pygments/lexers/eiffel.py
+ pygments/lexers/elm.py
pygments/lexers/elpi.py
pygments/lexers/email.py
- pygments/lexers/erlang.py
- pygments/lexers/esoteric.py
- pygments/lexers/ezhil.py
- pygments/lexers/factor.py
- pygments/lexers/fantom.py
- pygments/lexers/felix.py
+ pygments/lexers/erlang.py
+ pygments/lexers/esoteric.py
+ pygments/lexers/ezhil.py
+ pygments/lexers/factor.py
+ pygments/lexers/fantom.py
+ pygments/lexers/felix.py
pygments/lexers/floscript.py
pygments/lexers/forth.py
- pygments/lexers/fortran.py
- pygments/lexers/foxpro.py
+ pygments/lexers/fortran.py
+ pygments/lexers/foxpro.py
pygments/lexers/freefem.py
- pygments/lexers/functional.py
+ pygments/lexers/functional.py
pygments/lexers/futhark.py
pygments/lexers/gcodelexer.py
pygments/lexers/gdscript.py
- pygments/lexers/go.py
- pygments/lexers/grammar_notation.py
- pygments/lexers/graph.py
- pygments/lexers/graphics.py
+ pygments/lexers/go.py
+ pygments/lexers/grammar_notation.py
+ pygments/lexers/graph.py
+ pygments/lexers/graphics.py
pygments/lexers/graphviz.py
pygments/lexers/gsql.py
- pygments/lexers/haskell.py
- pygments/lexers/haxe.py
- pygments/lexers/hdl.py
- pygments/lexers/hexdump.py
- pygments/lexers/html.py
- pygments/lexers/idl.py
- pygments/lexers/igor.py
- pygments/lexers/inferno.py
- pygments/lexers/installers.py
- pygments/lexers/int_fiction.py
- pygments/lexers/iolang.py
- pygments/lexers/j.py
- pygments/lexers/javascript.py
+ pygments/lexers/haskell.py
+ pygments/lexers/haxe.py
+ pygments/lexers/hdl.py
+ pygments/lexers/hexdump.py
+ pygments/lexers/html.py
+ pygments/lexers/idl.py
+ pygments/lexers/igor.py
+ pygments/lexers/inferno.py
+ pygments/lexers/installers.py
+ pygments/lexers/int_fiction.py
+ pygments/lexers/iolang.py
+ pygments/lexers/j.py
+ pygments/lexers/javascript.py
pygments/lexers/jslt.py
- pygments/lexers/julia.py
- pygments/lexers/jvm.py
+ pygments/lexers/julia.py
+ pygments/lexers/jvm.py
pygments/lexers/kuin.py
pygments/lexers/lilypond.py
- pygments/lexers/lisp.py
- pygments/lexers/make.py
- pygments/lexers/markup.py
- pygments/lexers/math.py
- pygments/lexers/matlab.py
+ pygments/lexers/lisp.py
+ pygments/lexers/make.py
+ pygments/lexers/markup.py
+ pygments/lexers/math.py
+ pygments/lexers/matlab.py
pygments/lexers/maxima.py
pygments/lexers/meson.py
pygments/lexers/mime.py
- pygments/lexers/ml.py
- pygments/lexers/modeling.py
- pygments/lexers/modula2.py
+ pygments/lexers/ml.py
+ pygments/lexers/modeling.py
+ pygments/lexers/modula2.py
pygments/lexers/monte.py
pygments/lexers/mosel.py
pygments/lexers/ncl.py
- pygments/lexers/nimrod.py
- pygments/lexers/nit.py
- pygments/lexers/nix.py
- pygments/lexers/oberon.py
- pygments/lexers/objective.py
- pygments/lexers/ooc.py
- pygments/lexers/other.py
- pygments/lexers/parasail.py
- pygments/lexers/parsers.py
- pygments/lexers/pascal.py
- pygments/lexers/pawn.py
- pygments/lexers/perl.py
- pygments/lexers/php.py
+ pygments/lexers/nimrod.py
+ pygments/lexers/nit.py
+ pygments/lexers/nix.py
+ pygments/lexers/oberon.py
+ pygments/lexers/objective.py
+ pygments/lexers/ooc.py
+ pygments/lexers/other.py
+ pygments/lexers/parasail.py
+ pygments/lexers/parsers.py
+ pygments/lexers/pascal.py
+ pygments/lexers/pawn.py
+ pygments/lexers/perl.py
+ pygments/lexers/php.py
pygments/lexers/pointless.py
pygments/lexers/pony.py
- pygments/lexers/praat.py
+ pygments/lexers/praat.py
pygments/lexers/procfile.py
- pygments/lexers/prolog.py
+ pygments/lexers/prolog.py
pygments/lexers/promql.py
- pygments/lexers/python.py
- pygments/lexers/qvt.py
- pygments/lexers/r.py
- pygments/lexers/rdf.py
- pygments/lexers/rebol.py
- pygments/lexers/resource.py
+ pygments/lexers/python.py
+ pygments/lexers/qvt.py
+ pygments/lexers/r.py
+ pygments/lexers/rdf.py
+ pygments/lexers/rebol.py
+ pygments/lexers/resource.py
pygments/lexers/ride.py
pygments/lexers/rita.py
pygments/lexers/rnc.py
- pygments/lexers/roboconf.py
- pygments/lexers/robotframework.py
- pygments/lexers/ruby.py
- pygments/lexers/rust.py
+ pygments/lexers/roboconf.py
+ pygments/lexers/robotframework.py
+ pygments/lexers/ruby.py
+ pygments/lexers/rust.py
pygments/lexers/sas.py
pygments/lexers/savi.py
pygments/lexers/scdoc.py
- pygments/lexers/scripting.py
+ pygments/lexers/scripting.py
pygments/lexers/sgf.py
- pygments/lexers/shell.py
+ pygments/lexers/shell.py
pygments/lexers/sieve.py
pygments/lexers/slash.py
- pygments/lexers/smalltalk.py
+ pygments/lexers/smalltalk.py
pygments/lexers/smithy.py
pygments/lexers/smv.py
- pygments/lexers/snobol.py
+ pygments/lexers/snobol.py
pygments/lexers/solidity.py
pygments/lexers/sophia.py
- pygments/lexers/special.py
+ pygments/lexers/special.py
pygments/lexers/spice.py
- pygments/lexers/sql.py
+ pygments/lexers/sql.py
pygments/lexers/srcinfo.py
pygments/lexers/stata.py
- pygments/lexers/supercollider.py
- pygments/lexers/tcl.py
+ pygments/lexers/supercollider.py
+ pygments/lexers/tcl.py
pygments/lexers/teal.py
- pygments/lexers/templates.py
+ pygments/lexers/templates.py
pygments/lexers/teraterm.py
- pygments/lexers/testing.py
- pygments/lexers/text.py
- pygments/lexers/textedit.py
- pygments/lexers/textfmts.py
- pygments/lexers/theorem.py
+ pygments/lexers/testing.py
+ pygments/lexers/text.py
+ pygments/lexers/textedit.py
+ pygments/lexers/textfmts.py
+ pygments/lexers/theorem.py
pygments/lexers/thingsdb.py
pygments/lexers/tnt.py
- pygments/lexers/trafficscript.py
+ pygments/lexers/trafficscript.py
pygments/lexers/typoscript.py
pygments/lexers/unicon.py
- pygments/lexers/urbi.py
+ pygments/lexers/urbi.py
pygments/lexers/usd.py
pygments/lexers/varnish.py
pygments/lexers/verification.py
- pygments/lexers/web.py
+ pygments/lexers/web.py
pygments/lexers/webassembly.py
pygments/lexers/webidl.py
- pygments/lexers/webmisc.py
+ pygments/lexers/webmisc.py
pygments/lexers/whiley.py
- pygments/lexers/x10.py
+ pygments/lexers/x10.py
pygments/lexers/xorg.py
pygments/lexers/yang.py
pygments/lexers/zig.py
- pygments/modeline.py
- pygments/plugin.py
- pygments/regexopt.py
- pygments/scanner.py
- pygments/sphinxext.py
- pygments/style.py
- pygments/styles/__init__.py
+ pygments/modeline.py
+ pygments/plugin.py
+ pygments/regexopt.py
+ pygments/scanner.py
+ pygments/sphinxext.py
+ pygments/style.py
+ pygments/styles/__init__.py
pygments/styles/abap.py
- pygments/styles/algol.py
- pygments/styles/algol_nu.py
- pygments/styles/arduino.py
- pygments/styles/autumn.py
- pygments/styles/borland.py
- pygments/styles/bw.py
- pygments/styles/colorful.py
- pygments/styles/default.py
+ pygments/styles/algol.py
+ pygments/styles/algol_nu.py
+ pygments/styles/arduino.py
+ pygments/styles/autumn.py
+ pygments/styles/borland.py
+ pygments/styles/bw.py
+ pygments/styles/colorful.py
+ pygments/styles/default.py
pygments/styles/dracula.py
- pygments/styles/emacs.py
- pygments/styles/friendly.py
+ pygments/styles/emacs.py
+ pygments/styles/friendly.py
pygments/styles/friendly_grayscale.py
- pygments/styles/fruity.py
+ pygments/styles/fruity.py
pygments/styles/gruvbox.py
- pygments/styles/igor.py
+ pygments/styles/igor.py
pygments/styles/inkpot.py
pygments/styles/lilypond.py
- pygments/styles/lovelace.py
- pygments/styles/manni.py
+ pygments/styles/lovelace.py
+ pygments/styles/manni.py
pygments/styles/material.py
- pygments/styles/monokai.py
- pygments/styles/murphy.py
- pygments/styles/native.py
+ pygments/styles/monokai.py
+ pygments/styles/murphy.py
+ pygments/styles/native.py
pygments/styles/onedark.py
- pygments/styles/paraiso_dark.py
- pygments/styles/paraiso_light.py
- pygments/styles/pastie.py
- pygments/styles/perldoc.py
+ pygments/styles/paraiso_dark.py
+ pygments/styles/paraiso_light.py
+ pygments/styles/pastie.py
+ pygments/styles/perldoc.py
pygments/styles/rainbow_dash.py
- pygments/styles/rrt.py
+ pygments/styles/rrt.py
pygments/styles/sas.py
pygments/styles/solarized.py
pygments/styles/stata_dark.py
pygments/styles/stata_light.py
- pygments/styles/tango.py
- pygments/styles/trac.py
- pygments/styles/vim.py
- pygments/styles/vs.py
- pygments/styles/xcode.py
+ pygments/styles/tango.py
+ pygments/styles/trac.py
+ pygments/styles/vim.py
+ pygments/styles/vs.py
+ pygments/styles/xcode.py
pygments/styles/zenburn.py
- pygments/token.py
- pygments/unistring.py
- pygments/util.py
-)
-
+ pygments/token.py
+ pygments/unistring.py
+ pygments/util.py
+)
+
RESOURCE_FILES(
PREFIX contrib/python/Pygments/py3/
.dist-info/METADATA
@@ -297,4 +297,4 @@ RESOURCE_FILES(
.dist-info/top_level.txt
)
-END()
+END()
diff --git a/contrib/python/Pygments/ya.make b/contrib/python/Pygments/ya.make
index 9aec5a65a8..02ccf423b1 100644
--- a/contrib/python/Pygments/ya.make
+++ b/contrib/python/Pygments/ya.make
@@ -1,5 +1,5 @@
PY23_LIBRARY()
-
+
LICENSE(Service-Py23-Proxy)
OWNER(g:python-contrib)
@@ -13,7 +13,7 @@ ENDIF()
NO_LINT()
END()
-
+
RECURSE(
py2
py3
diff --git a/contrib/python/ya.make b/contrib/python/ya.make
index d01ced9f3a..c5d23707b5 100644
--- a/contrib/python/ya.make
+++ b/contrib/python/ya.make
@@ -817,7 +817,7 @@ RECURSE(
pyfst
pygit2
PyGithub
- Pygments
+ Pygments
pygrib
pygtrie
PyHamcrest
diff --git a/contrib/restricted/cityhash-1.0.2/COPYING b/contrib/restricted/cityhash-1.0.2/COPYING
index bf15194dd5..31ce3be304 100644
--- a/contrib/restricted/cityhash-1.0.2/COPYING
+++ b/contrib/restricted/cityhash-1.0.2/COPYING
@@ -1,19 +1,19 @@
-// Copyright (c) 2011 Google, Inc.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-// THE SOFTWARE.
+// Copyright (c) 2011 Google, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
diff --git a/contrib/restricted/cityhash-1.0.2/city.cc b/contrib/restricted/cityhash-1.0.2/city.cc
index 86d11fda6e..26ef0270d6 100644
--- a/contrib/restricted/cityhash-1.0.2/city.cc
+++ b/contrib/restricted/cityhash-1.0.2/city.cc
@@ -1,479 +1,479 @@
-// Copyright (c) 2011 Google, Inc.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-// THE SOFTWARE.
-//
-// CityHash, by Geoff Pike and Jyrki Alakuijala
-//
-// This file provides CityHash64() and related functions.
-//
-// It's probably possible to create even faster hash functions by
-// writing a program that systematically explores some of the space of
-// possible hash functions, by using SIMD instructions, or by
-// compromising on hash quality.
-
-#include "config.h"
-#include <city.h>
-
-#include <algorithm>
-#include <string.h> // for memcpy and memset
-
-#ifdef __SSE4_2__
-#include <citycrc.h>
-#include <nmmintrin.h>
-#endif
-
-using namespace std;
-
-
-#if !defined(WORDS_BIGENDIAN)
-
-#define uint32_in_expected_order(x) (x)
-#define uint64_in_expected_order(x) (x)
-
-#else
-
-#ifdef _MSC_VER
-#include <stdlib.h>
-#define bswap_32(x) _byteswap_ulong(x)
-#define bswap_64(x) _byteswap_uint64(x)
-
-#elif defined(__APPLE__)
-// Mac OS X / Darwin features
-#include <libkern/OSByteOrder.h>
-#define bswap_32(x) OSSwapInt32(x)
-#define bswap_64(x) OSSwapInt64(x)
-
-#else
-#include <byteswap.h>
-#endif
-
-#define uint32_in_expected_order(x) (bswap_32(x))
-#define uint64_in_expected_order(x) (bswap_64(x))
-
-#endif // WORDS_BIGENDIAN
-
-#if !defined(LIKELY)
-#if HAVE_BUILTIN_EXPECT
-#define LIKELY(x) (__builtin_expect(!!(x), 1))
-#else
-#define LIKELY(x) (x)
-#endif
-#endif
-
-namespace CityHash_v1_0_2
-{
-
-static uint64 UNALIGNED_LOAD64(const char *p) {
- uint64 result;
- memcpy(&result, p, sizeof(result));
- return result;
-}
-
-static uint32 UNALIGNED_LOAD32(const char *p) {
- uint32 result;
- memcpy(&result, p, sizeof(result));
- return result;
-}
-
-static uint64 Fetch64(const char *p) {
- return uint64_in_expected_order(UNALIGNED_LOAD64(p));
-}
-
-static uint32 Fetch32(const char *p) {
- return uint32_in_expected_order(UNALIGNED_LOAD32(p));
-}
-
-// Some primes between 2^63 and 2^64 for various uses.
-static const uint64 k0 = 0xc3a5c85c97cb3127ULL;
-static const uint64 k1 = 0xb492b66fbe98f273ULL;
-static const uint64 k2 = 0x9ae16a3b2f90404fULL;
-static const uint64 k3 = 0xc949d7c7509e6557ULL;
-
-// Bitwise right rotate. Normally this will compile to a single
-// instruction, especially if the shift is a manifest constant.
-static uint64 Rotate(uint64 val, int shift) {
- // Avoid shifting by 64: doing so yields an undefined result.
- return shift == 0 ? val : ((val >> shift) | (val << (64 - shift)));
-}
-
-// Equivalent to Rotate(), but requires the second arg to be non-zero.
-// On x86-64, and probably others, it's possible for this to compile
-// to a single instruction if both args are already in registers.
-static uint64 RotateByAtLeast1(uint64 val, int shift) {
- return (val >> shift) | (val << (64 - shift));
-}
-
-static uint64 ShiftMix(uint64 val) {
- return val ^ (val >> 47);
-}
-
-static uint64 HashLen16(uint64 u, uint64 v) {
- return Hash128to64(uint128(u, v));
-}
-
-static uint64 HashLen0to16(const char *s, size_t len) {
- if (len > 8) {
- uint64 a = Fetch64(s);
- uint64 b = Fetch64(s + len - 8);
- return HashLen16(a, RotateByAtLeast1(b + len, len)) ^ b;
- }
- if (len >= 4) {
- uint64 a = Fetch32(s);
- return HashLen16(len + (a << 3), Fetch32(s + len - 4));
- }
- if (len > 0) {
- uint8 a = s[0];
- uint8 b = s[len >> 1];
- uint8 c = s[len - 1];
- uint32 y = static_cast<uint32>(a) + (static_cast<uint32>(b) << 8);
- uint32 z = len + (static_cast<uint32>(c) << 2);
- return ShiftMix(y * k2 ^ z * k3) * k2;
- }
- return k2;
-}
-
-// This probably works well for 16-byte strings as well, but it may be overkill
-// in that case.
-static uint64 HashLen17to32(const char *s, size_t len) {
- uint64 a = Fetch64(s) * k1;
- uint64 b = Fetch64(s + 8);
- uint64 c = Fetch64(s + len - 8) * k2;
- uint64 d = Fetch64(s + len - 16) * k0;
- return HashLen16(Rotate(a - b, 43) + Rotate(c, 30) + d,
- a + Rotate(b ^ k3, 20) - c + len);
-}
-
-// Return a 16-byte hash for 48 bytes. Quick and dirty.
-// Callers do best to use "random-looking" values for a and b.
-static pair<uint64, uint64> WeakHashLen32WithSeeds(
- uint64 w, uint64 x, uint64 y, uint64 z, uint64 a, uint64 b) {
- a += w;
- b = Rotate(b + a + z, 21);
- uint64 c = a;
- a += x;
- a += y;
- b += Rotate(a, 44);
- return make_pair(a + z, b + c);
-}
-
-// Return a 16-byte hash for s[0] ... s[31], a, and b. Quick and dirty.
-static pair<uint64, uint64> WeakHashLen32WithSeeds(
- const char* s, uint64 a, uint64 b) {
- return WeakHashLen32WithSeeds(Fetch64(s),
- Fetch64(s + 8),
- Fetch64(s + 16),
- Fetch64(s + 24),
- a,
- b);
-}
-
-// Return an 8-byte hash for 33 to 64 bytes.
-static uint64 HashLen33to64(const char *s, size_t len) {
- uint64 z = Fetch64(s + 24);
- uint64 a = Fetch64(s) + (len + Fetch64(s + len - 16)) * k0;
- uint64 b = Rotate(a + z, 52);
- uint64 c = Rotate(a, 37);
- a += Fetch64(s + 8);
- c += Rotate(a, 7);
- a += Fetch64(s + 16);
- uint64 vf = a + z;
- uint64 vs = b + Rotate(a, 31) + c;
- a = Fetch64(s + 16) + Fetch64(s + len - 32);
- z = Fetch64(s + len - 8);
- b = Rotate(a + z, 52);
- c = Rotate(a, 37);
- a += Fetch64(s + len - 24);
- c += Rotate(a, 7);
- a += Fetch64(s + len - 16);
- uint64 wf = a + z;
- uint64 ws = b + Rotate(a, 31) + c;
- uint64 r = ShiftMix((vf + ws) * k2 + (wf + vs) * k0);
- return ShiftMix(r * k0 + vs) * k2;
-}
-
-uint64 CityHash64(const char *s, size_t len) {
- if (len <= 32) {
- if (len <= 16) {
- return HashLen0to16(s, len);
- } else {
- return HashLen17to32(s, len);
- }
- } else if (len <= 64) {
- return HashLen33to64(s, len);
- }
-
- // For strings over 64 bytes we hash the end first, and then as we
- // loop we keep 56 bytes of state: v, w, x, y, and z.
- uint64 x = Fetch64(s);
- uint64 y = Fetch64(s + len - 16) ^ k1;
- uint64 z = Fetch64(s + len - 56) ^ k0;
- pair<uint64, uint64> v = WeakHashLen32WithSeeds(s + len - 64, len, y);
- pair<uint64, uint64> w = WeakHashLen32WithSeeds(s + len - 32, len * k1, k0);
- z += ShiftMix(v.second) * k1;
- x = Rotate(z + x, 39) * k1;
- y = Rotate(y, 33) * k1;
-
- // Decrease len to the nearest multiple of 64, and operate on 64-byte chunks.
- len = (len - 1) & ~static_cast<size_t>(63);
- do {
- x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
- y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
- x ^= w.second;
- y ^= v.first;
- z = Rotate(z ^ w.first, 33);
- v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
- w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
- std::swap(z, x);
- s += 64;
- len -= 64;
- } while (len != 0);
- return HashLen16(HashLen16(v.first, w.first) + ShiftMix(y) * k1 + z,
- HashLen16(v.second, w.second) + x);
-}
-
-uint64 CityHash64WithSeed(const char *s, size_t len, uint64 seed) {
- return CityHash64WithSeeds(s, len, k2, seed);
-}
-
-uint64 CityHash64WithSeeds(const char *s, size_t len,
- uint64 seed0, uint64 seed1) {
- return HashLen16(CityHash64(s, len) - seed0, seed1);
-}
-
-// A subroutine for CityHash128(). Returns a decent 128-bit hash for strings
-// of any length representable in ssize_t. Based on City and Murmur.
-static uint128 CityMurmur(const char *s, size_t len, uint128 seed) {
- uint64 a = Uint128Low64(seed);
- uint64 b = Uint128High64(seed);
- uint64 c = 0;
- uint64 d = 0;
- ssize_t l = len - 16;
- if (l <= 0) { // len <= 16
- a = ShiftMix(a * k1) * k1;
- c = b * k1 + HashLen0to16(s, len);
- d = ShiftMix(a + (len >= 8 ? Fetch64(s) : c));
- } else { // len > 16
- c = HashLen16(Fetch64(s + len - 8) + k1, a);
- d = HashLen16(b + len, c + Fetch64(s + len - 16));
- a += d;
- do {
- a ^= ShiftMix(Fetch64(s) * k1) * k1;
- a *= k1;
- b ^= a;
- c ^= ShiftMix(Fetch64(s + 8) * k1) * k1;
- c *= k1;
- d ^= c;
- s += 16;
- l -= 16;
- } while (l > 0);
- }
- a = HashLen16(a, c);
- b = HashLen16(d, b);
- return uint128(a ^ b, HashLen16(b, a));
-}
-
-uint128 CityHash128WithSeed(const char *s, size_t len, uint128 seed) {
- if (len < 128) {
- return CityMurmur(s, len, seed);
- }
-
- // We expect len >= 128 to be the common case. Keep 56 bytes of state:
- // v, w, x, y, and z.
- pair<uint64, uint64> v, w;
- uint64 x = Uint128Low64(seed);
- uint64 y = Uint128High64(seed);
- uint64 z = len * k1;
- v.first = Rotate(y ^ k1, 49) * k1 + Fetch64(s);
- v.second = Rotate(v.first, 42) * k1 + Fetch64(s + 8);
- w.first = Rotate(y + z, 35) * k1 + x;
- w.second = Rotate(x + Fetch64(s + 88), 53) * k1;
-
- // This is the same inner loop as CityHash64(), manually unrolled.
- do {
- x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
- y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
- x ^= w.second;
- y ^= v.first;
- z = Rotate(z ^ w.first, 33);
- v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
- w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
- std::swap(z, x);
- s += 64;
- x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
- y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
- x ^= w.second;
- y ^= v.first;
- z = Rotate(z ^ w.first, 33);
- v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
- w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
- std::swap(z, x);
- s += 64;
- len -= 128;
- } while (LIKELY(len >= 128));
- y += Rotate(w.first, 37) * k0 + z;
- x += Rotate(v.first + z, 49) * k0;
- // If 0 < len < 128, hash up to 4 chunks of 32 bytes each from the end of s.
- for (size_t tail_done = 0; tail_done < len; ) {
- tail_done += 32;
- y = Rotate(y - x, 42) * k0 + v.second;
- w.first += Fetch64(s + len - tail_done + 16);
- x = Rotate(x, 49) * k0 + w.first;
- w.first += v.first;
- v = WeakHashLen32WithSeeds(s + len - tail_done, v.first, v.second);
- }
- // At this point our 48 bytes of state should contain more than
- // enough information for a strong 128-bit hash. We use two
- // different 48-byte-to-8-byte hashes to get a 16-byte final result.
- x = HashLen16(x, v.first);
- y = HashLen16(y, w.first);
- return uint128(HashLen16(x + v.second, w.second) + y,
- HashLen16(x + w.second, y + v.second));
-}
-
-uint128 CityHash128(const char *s, size_t len) {
- if (len >= 16) {
- return CityHash128WithSeed(s + 16,
- len - 16,
- uint128(Fetch64(s) ^ k3,
- Fetch64(s + 8)));
- } else if (len >= 8) {
- return CityHash128WithSeed(NULL,
- 0,
- uint128(Fetch64(s) ^ (len * k0),
- Fetch64(s + len - 8) ^ k1));
- } else {
- return CityHash128WithSeed(s, len, uint128(k0, k1));
- }
-}
-
-#ifdef __SSE4_2__
-
-// Requires len >= 240.
-static void CityHashCrc256Long(const char *s, size_t len,
- uint32 seed, uint64 *result) {
- uint64 a = Fetch64(s + 56) + k0;
- uint64 b = Fetch64(s + 96) + k0;
- uint64 c = result[1] = HashLen16(b, len);
- uint64 d = result[2] = Fetch64(s + 120) * k0 + len;
- uint64 e = Fetch64(s + 184) + seed;
- uint64 f = seed;
- uint64 g = 0;
- uint64 h = 0;
- uint64 i = 0;
- uint64 j = 0;
- uint64 t = c + d;
-
- // 240 bytes of input per iter.
- size_t iters = len / 240;
- len -= iters * 240;
- do {
-#define CHUNK(multiplier, z) \
- { \
- uint64 old_a = a; \
- a = Rotate(b, 41 ^ z) * multiplier + Fetch64(s); \
- b = Rotate(c, 27 ^ z) * multiplier + Fetch64(s + 8); \
- c = Rotate(d, 41 ^ z) * multiplier + Fetch64(s + 16); \
- d = Rotate(e, 33 ^ z) * multiplier + Fetch64(s + 24); \
- e = Rotate(t, 25 ^ z) * multiplier + Fetch64(s + 32); \
- t = old_a; \
- } \
- f = _mm_crc32_u64(f, a); \
- g = _mm_crc32_u64(g, b); \
- h = _mm_crc32_u64(h, c); \
- i = _mm_crc32_u64(i, d); \
- j = _mm_crc32_u64(j, e); \
- s += 40
-
- CHUNK(1, 1); CHUNK(k0, 0);
- CHUNK(1, 1); CHUNK(k0, 0);
- CHUNK(1, 1); CHUNK(k0, 0);
- } while (--iters > 0);
- j += i << 32;
- a = HashLen16(a, j);
- h += g << 32;
- b = b * k0 + h;
- c = HashLen16(c, f) + i;
- d = HashLen16(d, e);
- pair<uint64, uint64> v(j + e, HashLen16(h, t));
- h = v.second + f;
- // If 0 < len < 240, hash chunks of 32 bytes each from the end of s.
- for (size_t tail_done = 0; tail_done < len; ) {
- tail_done += 32;
- c = Rotate(c - a, 42) * k0 + v.second;
- d += Fetch64(s + len - tail_done + 16);
- a = Rotate(a, 49) * k0 + d;
- d += v.first;
- v = WeakHashLen32WithSeeds(s + len - tail_done, v.first, v.second);
- }
-
- // Final mix.
- e = HashLen16(a, d) + v.first;
- f = HashLen16(b, c) + a;
- g = HashLen16(v.first, v.second) + c;
- result[0] = e + f + g + h;
- a = ShiftMix((a + g) * k0) * k0 + b;
- result[1] += a + result[0];
- a = ShiftMix(a * k0) * k0 + c;
- result[2] += a + result[1];
- a = ShiftMix((a + e) * k0) * k0;
- result[3] = a + result[2];
-}
-
-// Requires len < 240.
-static void CityHashCrc256Short(const char *s, size_t len, uint64 *result) {
- char buf[240];
- memcpy(buf, s, len);
- memset(buf + len, 0, 240 - len);
- CityHashCrc256Long(buf, 240, ~static_cast<uint32>(len), result);
-}
-
-void CityHashCrc256(const char *s, size_t len, uint64 *result) {
- if (LIKELY(len >= 240)) {
- CityHashCrc256Long(s, len, 0, result);
- } else {
- CityHashCrc256Short(s, len, result);
- }
-}
-
-uint128 CityHashCrc128WithSeed(const char *s, size_t len, uint128 seed) {
- if (len <= 900) {
- return CityHash128WithSeed(s, len, seed);
- } else {
- uint64 result[4];
- CityHashCrc256(s, len, result);
- uint64 u = Uint128High64(seed) + result[0];
- uint64 v = Uint128Low64(seed) + result[1];
- return uint128(HashLen16(u, v + result[2]),
- HashLen16(Rotate(v, 32), u * k0 + result[3]));
- }
-}
-
-uint128 CityHashCrc128(const char *s, size_t len) {
- if (len <= 900) {
- return CityHash128(s, len);
- } else {
- uint64 result[4];
- CityHashCrc256(s, len, result);
- return uint128(result[2], result[3]);
- }
-}
-
-#endif // __SSE4_2__
-
-} // namespace CityHash_v1_0_2
+// Copyright (c) 2011 Google, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+//
+// CityHash, by Geoff Pike and Jyrki Alakuijala
+//
+// This file provides CityHash64() and related functions.
+//
+// It's probably possible to create even faster hash functions by
+// writing a program that systematically explores some of the space of
+// possible hash functions, by using SIMD instructions, or by
+// compromising on hash quality.
+
+#include "config.h"
+#include <city.h>
+
+#include <algorithm>
+#include <string.h> // for memcpy and memset
+
+#ifdef __SSE4_2__
+#include <citycrc.h>
+#include <nmmintrin.h>
+#endif
+
+using namespace std;
+
+
+#if !defined(WORDS_BIGENDIAN)
+
+#define uint32_in_expected_order(x) (x)
+#define uint64_in_expected_order(x) (x)
+
+#else
+
+#ifdef _MSC_VER
+#include <stdlib.h>
+#define bswap_32(x) _byteswap_ulong(x)
+#define bswap_64(x) _byteswap_uint64(x)
+
+#elif defined(__APPLE__)
+// Mac OS X / Darwin features
+#include <libkern/OSByteOrder.h>
+#define bswap_32(x) OSSwapInt32(x)
+#define bswap_64(x) OSSwapInt64(x)
+
+#else
+#include <byteswap.h>
+#endif
+
+#define uint32_in_expected_order(x) (bswap_32(x))
+#define uint64_in_expected_order(x) (bswap_64(x))
+
+#endif // WORDS_BIGENDIAN
+
+#if !defined(LIKELY)
+#if HAVE_BUILTIN_EXPECT
+#define LIKELY(x) (__builtin_expect(!!(x), 1))
+#else
+#define LIKELY(x) (x)
+#endif
+#endif
+
+namespace CityHash_v1_0_2
+{
+
+static uint64 UNALIGNED_LOAD64(const char *p) {
+ uint64 result;
+ memcpy(&result, p, sizeof(result));
+ return result;
+}
+
+static uint32 UNALIGNED_LOAD32(const char *p) {
+ uint32 result;
+ memcpy(&result, p, sizeof(result));
+ return result;
+}
+
+static uint64 Fetch64(const char *p) {
+ return uint64_in_expected_order(UNALIGNED_LOAD64(p));
+}
+
+static uint32 Fetch32(const char *p) {
+ return uint32_in_expected_order(UNALIGNED_LOAD32(p));
+}
+
+// Some primes between 2^63 and 2^64 for various uses.
+static const uint64 k0 = 0xc3a5c85c97cb3127ULL;
+static const uint64 k1 = 0xb492b66fbe98f273ULL;
+static const uint64 k2 = 0x9ae16a3b2f90404fULL;
+static const uint64 k3 = 0xc949d7c7509e6557ULL;
+
+// Bitwise right rotate. Normally this will compile to a single
+// instruction, especially if the shift is a manifest constant.
+static uint64 Rotate(uint64 val, int shift) {
+ // Avoid shifting by 64: doing so yields an undefined result.
+ return shift == 0 ? val : ((val >> shift) | (val << (64 - shift)));
+}
+
+// Equivalent to Rotate(), but requires the second arg to be non-zero.
+// On x86-64, and probably others, it's possible for this to compile
+// to a single instruction if both args are already in registers.
+static uint64 RotateByAtLeast1(uint64 val, int shift) {
+ return (val >> shift) | (val << (64 - shift));
+}
+
+static uint64 ShiftMix(uint64 val) {
+ return val ^ (val >> 47);
+}
+
+static uint64 HashLen16(uint64 u, uint64 v) {
+ return Hash128to64(uint128(u, v));
+}
+
+static uint64 HashLen0to16(const char *s, size_t len) {
+ if (len > 8) {
+ uint64 a = Fetch64(s);
+ uint64 b = Fetch64(s + len - 8);
+ return HashLen16(a, RotateByAtLeast1(b + len, len)) ^ b;
+ }
+ if (len >= 4) {
+ uint64 a = Fetch32(s);
+ return HashLen16(len + (a << 3), Fetch32(s + len - 4));
+ }
+ if (len > 0) {
+ uint8 a = s[0];
+ uint8 b = s[len >> 1];
+ uint8 c = s[len - 1];
+ uint32 y = static_cast<uint32>(a) + (static_cast<uint32>(b) << 8);
+ uint32 z = len + (static_cast<uint32>(c) << 2);
+ return ShiftMix(y * k2 ^ z * k3) * k2;
+ }
+ return k2;
+}
+
+// This probably works well for 16-byte strings as well, but it may be overkill
+// in that case.
+static uint64 HashLen17to32(const char *s, size_t len) {
+ uint64 a = Fetch64(s) * k1;
+ uint64 b = Fetch64(s + 8);
+ uint64 c = Fetch64(s + len - 8) * k2;
+ uint64 d = Fetch64(s + len - 16) * k0;
+ return HashLen16(Rotate(a - b, 43) + Rotate(c, 30) + d,
+ a + Rotate(b ^ k3, 20) - c + len);
+}
+
+// Return a 16-byte hash for 48 bytes. Quick and dirty.
+// Callers do best to use "random-looking" values for a and b.
+static pair<uint64, uint64> WeakHashLen32WithSeeds(
+ uint64 w, uint64 x, uint64 y, uint64 z, uint64 a, uint64 b) {
+ a += w;
+ b = Rotate(b + a + z, 21);
+ uint64 c = a;
+ a += x;
+ a += y;
+ b += Rotate(a, 44);
+ return make_pair(a + z, b + c);
+}
+
+// Return a 16-byte hash for s[0] ... s[31], a, and b. Quick and dirty.
+static pair<uint64, uint64> WeakHashLen32WithSeeds(
+ const char* s, uint64 a, uint64 b) {
+ return WeakHashLen32WithSeeds(Fetch64(s),
+ Fetch64(s + 8),
+ Fetch64(s + 16),
+ Fetch64(s + 24),
+ a,
+ b);
+}
+
+// Return an 8-byte hash for 33 to 64 bytes.
+static uint64 HashLen33to64(const char *s, size_t len) {
+ uint64 z = Fetch64(s + 24);
+ uint64 a = Fetch64(s) + (len + Fetch64(s + len - 16)) * k0;
+ uint64 b = Rotate(a + z, 52);
+ uint64 c = Rotate(a, 37);
+ a += Fetch64(s + 8);
+ c += Rotate(a, 7);
+ a += Fetch64(s + 16);
+ uint64 vf = a + z;
+ uint64 vs = b + Rotate(a, 31) + c;
+ a = Fetch64(s + 16) + Fetch64(s + len - 32);
+ z = Fetch64(s + len - 8);
+ b = Rotate(a + z, 52);
+ c = Rotate(a, 37);
+ a += Fetch64(s + len - 24);
+ c += Rotate(a, 7);
+ a += Fetch64(s + len - 16);
+ uint64 wf = a + z;
+ uint64 ws = b + Rotate(a, 31) + c;
+ uint64 r = ShiftMix((vf + ws) * k2 + (wf + vs) * k0);
+ return ShiftMix(r * k0 + vs) * k2;
+}
+
+uint64 CityHash64(const char *s, size_t len) {
+ if (len <= 32) {
+ if (len <= 16) {
+ return HashLen0to16(s, len);
+ } else {
+ return HashLen17to32(s, len);
+ }
+ } else if (len <= 64) {
+ return HashLen33to64(s, len);
+ }
+
+ // For strings over 64 bytes we hash the end first, and then as we
+ // loop we keep 56 bytes of state: v, w, x, y, and z.
+ uint64 x = Fetch64(s);
+ uint64 y = Fetch64(s + len - 16) ^ k1;
+ uint64 z = Fetch64(s + len - 56) ^ k0;
+ pair<uint64, uint64> v = WeakHashLen32WithSeeds(s + len - 64, len, y);
+ pair<uint64, uint64> w = WeakHashLen32WithSeeds(s + len - 32, len * k1, k0);
+ z += ShiftMix(v.second) * k1;
+ x = Rotate(z + x, 39) * k1;
+ y = Rotate(y, 33) * k1;
+
+ // Decrease len to the nearest multiple of 64, and operate on 64-byte chunks.
+ len = (len - 1) & ~static_cast<size_t>(63);
+ do {
+ x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
+ y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
+ x ^= w.second;
+ y ^= v.first;
+ z = Rotate(z ^ w.first, 33);
+ v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
+ w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
+ std::swap(z, x);
+ s += 64;
+ len -= 64;
+ } while (len != 0);
+ return HashLen16(HashLen16(v.first, w.first) + ShiftMix(y) * k1 + z,
+ HashLen16(v.second, w.second) + x);
+}
+
+uint64 CityHash64WithSeed(const char *s, size_t len, uint64 seed) {
+ return CityHash64WithSeeds(s, len, k2, seed);
+}
+
+uint64 CityHash64WithSeeds(const char *s, size_t len,
+ uint64 seed0, uint64 seed1) {
+ return HashLen16(CityHash64(s, len) - seed0, seed1);
+}
+
+// A subroutine for CityHash128(). Returns a decent 128-bit hash for strings
+// of any length representable in ssize_t. Based on City and Murmur.
+static uint128 CityMurmur(const char *s, size_t len, uint128 seed) {
+ uint64 a = Uint128Low64(seed);
+ uint64 b = Uint128High64(seed);
+ uint64 c = 0;
+ uint64 d = 0;
+ ssize_t l = len - 16;
+ if (l <= 0) { // len <= 16
+ a = ShiftMix(a * k1) * k1;
+ c = b * k1 + HashLen0to16(s, len);
+ d = ShiftMix(a + (len >= 8 ? Fetch64(s) : c));
+ } else { // len > 16
+ c = HashLen16(Fetch64(s + len - 8) + k1, a);
+ d = HashLen16(b + len, c + Fetch64(s + len - 16));
+ a += d;
+ do {
+ a ^= ShiftMix(Fetch64(s) * k1) * k1;
+ a *= k1;
+ b ^= a;
+ c ^= ShiftMix(Fetch64(s + 8) * k1) * k1;
+ c *= k1;
+ d ^= c;
+ s += 16;
+ l -= 16;
+ } while (l > 0);
+ }
+ a = HashLen16(a, c);
+ b = HashLen16(d, b);
+ return uint128(a ^ b, HashLen16(b, a));
+}
+
+uint128 CityHash128WithSeed(const char *s, size_t len, uint128 seed) {
+ if (len < 128) {
+ return CityMurmur(s, len, seed);
+ }
+
+ // We expect len >= 128 to be the common case. Keep 56 bytes of state:
+ // v, w, x, y, and z.
+ pair<uint64, uint64> v, w;
+ uint64 x = Uint128Low64(seed);
+ uint64 y = Uint128High64(seed);
+ uint64 z = len * k1;
+ v.first = Rotate(y ^ k1, 49) * k1 + Fetch64(s);
+ v.second = Rotate(v.first, 42) * k1 + Fetch64(s + 8);
+ w.first = Rotate(y + z, 35) * k1 + x;
+ w.second = Rotate(x + Fetch64(s + 88), 53) * k1;
+
+ // This is the same inner loop as CityHash64(), manually unrolled.
+ do {
+ x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
+ y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
+ x ^= w.second;
+ y ^= v.first;
+ z = Rotate(z ^ w.first, 33);
+ v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
+ w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
+ std::swap(z, x);
+ s += 64;
+ x = Rotate(x + y + v.first + Fetch64(s + 16), 37) * k1;
+ y = Rotate(y + v.second + Fetch64(s + 48), 42) * k1;
+ x ^= w.second;
+ y ^= v.first;
+ z = Rotate(z ^ w.first, 33);
+ v = WeakHashLen32WithSeeds(s, v.second * k1, x + w.first);
+ w = WeakHashLen32WithSeeds(s + 32, z + w.second, y);
+ std::swap(z, x);
+ s += 64;
+ len -= 128;
+ } while (LIKELY(len >= 128));
+ y += Rotate(w.first, 37) * k0 + z;
+ x += Rotate(v.first + z, 49) * k0;
+ // If 0 < len < 128, hash up to 4 chunks of 32 bytes each from the end of s.
+ for (size_t tail_done = 0; tail_done < len; ) {
+ tail_done += 32;
+ y = Rotate(y - x, 42) * k0 + v.second;
+ w.first += Fetch64(s + len - tail_done + 16);
+ x = Rotate(x, 49) * k0 + w.first;
+ w.first += v.first;
+ v = WeakHashLen32WithSeeds(s + len - tail_done, v.first, v.second);
+ }
+ // At this point our 48 bytes of state should contain more than
+ // enough information for a strong 128-bit hash. We use two
+ // different 48-byte-to-8-byte hashes to get a 16-byte final result.
+ x = HashLen16(x, v.first);
+ y = HashLen16(y, w.first);
+ return uint128(HashLen16(x + v.second, w.second) + y,
+ HashLen16(x + w.second, y + v.second));
+}
+
+uint128 CityHash128(const char *s, size_t len) {
+ if (len >= 16) {
+ return CityHash128WithSeed(s + 16,
+ len - 16,
+ uint128(Fetch64(s) ^ k3,
+ Fetch64(s + 8)));
+ } else if (len >= 8) {
+ return CityHash128WithSeed(NULL,
+ 0,
+ uint128(Fetch64(s) ^ (len * k0),
+ Fetch64(s + len - 8) ^ k1));
+ } else {
+ return CityHash128WithSeed(s, len, uint128(k0, k1));
+ }
+}
+
+#ifdef __SSE4_2__
+
+// Requires len >= 240.
+static void CityHashCrc256Long(const char *s, size_t len,
+ uint32 seed, uint64 *result) {
+ uint64 a = Fetch64(s + 56) + k0;
+ uint64 b = Fetch64(s + 96) + k0;
+ uint64 c = result[1] = HashLen16(b, len);
+ uint64 d = result[2] = Fetch64(s + 120) * k0 + len;
+ uint64 e = Fetch64(s + 184) + seed;
+ uint64 f = seed;
+ uint64 g = 0;
+ uint64 h = 0;
+ uint64 i = 0;
+ uint64 j = 0;
+ uint64 t = c + d;
+
+ // 240 bytes of input per iter.
+ size_t iters = len / 240;
+ len -= iters * 240;
+ do {
+#define CHUNK(multiplier, z) \
+ { \
+ uint64 old_a = a; \
+ a = Rotate(b, 41 ^ z) * multiplier + Fetch64(s); \
+ b = Rotate(c, 27 ^ z) * multiplier + Fetch64(s + 8); \
+ c = Rotate(d, 41 ^ z) * multiplier + Fetch64(s + 16); \
+ d = Rotate(e, 33 ^ z) * multiplier + Fetch64(s + 24); \
+ e = Rotate(t, 25 ^ z) * multiplier + Fetch64(s + 32); \
+ t = old_a; \
+ } \
+ f = _mm_crc32_u64(f, a); \
+ g = _mm_crc32_u64(g, b); \
+ h = _mm_crc32_u64(h, c); \
+ i = _mm_crc32_u64(i, d); \
+ j = _mm_crc32_u64(j, e); \
+ s += 40
+
+ CHUNK(1, 1); CHUNK(k0, 0);
+ CHUNK(1, 1); CHUNK(k0, 0);
+ CHUNK(1, 1); CHUNK(k0, 0);
+ } while (--iters > 0);
+ j += i << 32;
+ a = HashLen16(a, j);
+ h += g << 32;
+ b = b * k0 + h;
+ c = HashLen16(c, f) + i;
+ d = HashLen16(d, e);
+ pair<uint64, uint64> v(j + e, HashLen16(h, t));
+ h = v.second + f;
+ // If 0 < len < 240, hash chunks of 32 bytes each from the end of s.
+ for (size_t tail_done = 0; tail_done < len; ) {
+ tail_done += 32;
+ c = Rotate(c - a, 42) * k0 + v.second;
+ d += Fetch64(s + len - tail_done + 16);
+ a = Rotate(a, 49) * k0 + d;
+ d += v.first;
+ v = WeakHashLen32WithSeeds(s + len - tail_done, v.first, v.second);
+ }
+
+ // Final mix.
+ e = HashLen16(a, d) + v.first;
+ f = HashLen16(b, c) + a;
+ g = HashLen16(v.first, v.second) + c;
+ result[0] = e + f + g + h;
+ a = ShiftMix((a + g) * k0) * k0 + b;
+ result[1] += a + result[0];
+ a = ShiftMix(a * k0) * k0 + c;
+ result[2] += a + result[1];
+ a = ShiftMix((a + e) * k0) * k0;
+ result[3] = a + result[2];
+}
+
+// Requires len < 240.
+static void CityHashCrc256Short(const char *s, size_t len, uint64 *result) {
+ char buf[240];
+ memcpy(buf, s, len);
+ memset(buf + len, 0, 240 - len);
+ CityHashCrc256Long(buf, 240, ~static_cast<uint32>(len), result);
+}
+
+void CityHashCrc256(const char *s, size_t len, uint64 *result) {
+ if (LIKELY(len >= 240)) {
+ CityHashCrc256Long(s, len, 0, result);
+ } else {
+ CityHashCrc256Short(s, len, result);
+ }
+}
+
+uint128 CityHashCrc128WithSeed(const char *s, size_t len, uint128 seed) {
+ if (len <= 900) {
+ return CityHash128WithSeed(s, len, seed);
+ } else {
+ uint64 result[4];
+ CityHashCrc256(s, len, result);
+ uint64 u = Uint128High64(seed) + result[0];
+ uint64 v = Uint128Low64(seed) + result[1];
+ return uint128(HashLen16(u, v + result[2]),
+ HashLen16(Rotate(v, 32), u * k0 + result[3]));
+ }
+}
+
+uint128 CityHashCrc128(const char *s, size_t len) {
+ if (len <= 900) {
+ return CityHash128(s, len);
+ } else {
+ uint64 result[4];
+ CityHashCrc256(s, len, result);
+ return uint128(result[2], result[3]);
+ }
+}
+
+#endif // __SSE4_2__
+
+} // namespace CityHash_v1_0_2
diff --git a/contrib/restricted/cityhash-1.0.2/city.h b/contrib/restricted/cityhash-1.0.2/city.h
index 268827427d..76a51b05c4 100644
--- a/contrib/restricted/cityhash-1.0.2/city.h
+++ b/contrib/restricted/cityhash-1.0.2/city.h
@@ -1,104 +1,104 @@
-// Copyright (c) 2011 Google, Inc.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-// THE SOFTWARE.
-//
-// CityHash, by Geoff Pike and Jyrki Alakuijala
-//
-// This file provides a few functions for hashing strings. On x86-64
-// hardware in 2011, CityHash64() is faster than other high-quality
-// hash functions, such as Murmur. This is largely due to higher
-// instruction-level parallelism. CityHash64() and CityHash128() also perform
-// well on hash-quality tests.
-//
-// CityHash128() is optimized for relatively long strings and returns
-// a 128-bit hash. For strings more than about 2000 bytes it can be
-// faster than CityHash64().
-//
-// Functions in the CityHash family are not suitable for cryptography.
-//
-// WARNING: This code has not been tested on big-endian platforms!
-// It is known to work well on little-endian platforms that have a small penalty
-// for unaligned reads, such as current Intel and AMD moderate-to-high-end CPUs.
-//
-// By the way, for some hash functions, given strings a and b, the hash
-// of a+b is easily derived from the hashes of a and b. This property
-// doesn't hold for any hash functions in this file.
-
-#ifndef CITY_HASH_H_
-#define CITY_HASH_H_
-
-#include <stdlib.h> // for size_t.
-#include <stdint.h>
-#include <utility>
-
-/** This is a version of CityHash that predates v1.0.3 algorithm change.
- * Why we need exactly this version?
- * Although hash values of CityHash are not recommended for storing persistently anywhere,
- * it has already been used this way in ClickHouse:
- * - for calculation of checksums of compressed chunks and for data parts;
- * - this version of CityHash is exposed in cityHash64 function in ClickHouse SQL language;
- * - and already used by many users for data ordering, sampling and sharding.
- */
-namespace CityHash_v1_0_2
-{
-
-typedef uint8_t uint8;
-typedef uint32_t uint32;
-typedef uint64_t uint64;
-typedef std::pair<uint64, uint64> uint128;
-
-
-inline uint64 Uint128Low64(const uint128& x) { return x.first; }
-inline uint64 Uint128High64(const uint128& x) { return x.second; }
-
-// Hash function for a byte array.
-uint64 CityHash64(const char *buf, size_t len);
-
-// Hash function for a byte array. For convenience, a 64-bit seed is also
-// hashed into the result.
-uint64 CityHash64WithSeed(const char *buf, size_t len, uint64 seed);
-
-// Hash function for a byte array. For convenience, two seeds are also
-// hashed into the result.
-uint64 CityHash64WithSeeds(const char *buf, size_t len,
- uint64 seed0, uint64 seed1);
-
-// Hash function for a byte array.
-uint128 CityHash128(const char *s, size_t len);
-
-// Hash function for a byte array. For convenience, a 128-bit seed is also
-// hashed into the result.
-uint128 CityHash128WithSeed(const char *s, size_t len, uint128 seed);
-
-// Hash 128 input bits down to 64 bits of output.
-// This is intended to be a reasonably good hash function.
-inline uint64 Hash128to64(const uint128& x) {
- // Murmur-inspired hashing.
- const uint64 kMul = 0x9ddfea08eb382d69ULL;
- uint64 a = (Uint128Low64(x) ^ Uint128High64(x)) * kMul;
- a ^= (a >> 47);
- uint64 b = (Uint128High64(x) ^ a) * kMul;
- b ^= (b >> 47);
- b *= kMul;
- return b;
-}
-
-} // namespace CityHash_v1_0_2
-
-#endif // CITY_HASH_H_
+// Copyright (c) 2011 Google, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+//
+// CityHash, by Geoff Pike and Jyrki Alakuijala
+//
+// This file provides a few functions for hashing strings. On x86-64
+// hardware in 2011, CityHash64() is faster than other high-quality
+// hash functions, such as Murmur. This is largely due to higher
+// instruction-level parallelism. CityHash64() and CityHash128() also perform
+// well on hash-quality tests.
+//
+// CityHash128() is optimized for relatively long strings and returns
+// a 128-bit hash. For strings more than about 2000 bytes it can be
+// faster than CityHash64().
+//
+// Functions in the CityHash family are not suitable for cryptography.
+//
+// WARNING: This code has not been tested on big-endian platforms!
+// It is known to work well on little-endian platforms that have a small penalty
+// for unaligned reads, such as current Intel and AMD moderate-to-high-end CPUs.
+//
+// By the way, for some hash functions, given strings a and b, the hash
+// of a+b is easily derived from the hashes of a and b. This property
+// doesn't hold for any hash functions in this file.
+
+#ifndef CITY_HASH_H_
+#define CITY_HASH_H_
+
+#include <stdlib.h> // for size_t.
+#include <stdint.h>
+#include <utility>
+
+/** This is a version of CityHash that predates v1.0.3 algorithm change.
+ * Why we need exactly this version?
+ * Although hash values of CityHash are not recommended for storing persistently anywhere,
+ * it has already been used this way in ClickHouse:
+ * - for calculation of checksums of compressed chunks and for data parts;
+ * - this version of CityHash is exposed in cityHash64 function in ClickHouse SQL language;
+ * - and already used by many users for data ordering, sampling and sharding.
+ */
+namespace CityHash_v1_0_2
+{
+
+typedef uint8_t uint8;
+typedef uint32_t uint32;
+typedef uint64_t uint64;
+typedef std::pair<uint64, uint64> uint128;
+
+
+inline uint64 Uint128Low64(const uint128& x) { return x.first; }
+inline uint64 Uint128High64(const uint128& x) { return x.second; }
+
+// Hash function for a byte array.
+uint64 CityHash64(const char *buf, size_t len);
+
+// Hash function for a byte array. For convenience, a 64-bit seed is also
+// hashed into the result.
+uint64 CityHash64WithSeed(const char *buf, size_t len, uint64 seed);
+
+// Hash function for a byte array. For convenience, two seeds are also
+// hashed into the result.
+uint64 CityHash64WithSeeds(const char *buf, size_t len,
+ uint64 seed0, uint64 seed1);
+
+// Hash function for a byte array.
+uint128 CityHash128(const char *s, size_t len);
+
+// Hash function for a byte array. For convenience, a 128-bit seed is also
+// hashed into the result.
+uint128 CityHash128WithSeed(const char *s, size_t len, uint128 seed);
+
+// Hash 128 input bits down to 64 bits of output.
+// This is intended to be a reasonably good hash function.
+inline uint64 Hash128to64(const uint128& x) {
+ // Murmur-inspired hashing.
+ const uint64 kMul = 0x9ddfea08eb382d69ULL;
+ uint64 a = (Uint128Low64(x) ^ Uint128High64(x)) * kMul;
+ a ^= (a >> 47);
+ uint64 b = (Uint128High64(x) ^ a) * kMul;
+ b ^= (b >> 47);
+ b *= kMul;
+ return b;
+}
+
+} // namespace CityHash_v1_0_2
+
+#endif // CITY_HASH_H_
diff --git a/contrib/restricted/cityhash-1.0.2/citycrc.h b/contrib/restricted/cityhash-1.0.2/citycrc.h
index f2d291e36d..1c50914229 100644
--- a/contrib/restricted/cityhash-1.0.2/citycrc.h
+++ b/contrib/restricted/cityhash-1.0.2/citycrc.h
@@ -1,51 +1,51 @@
-// Copyright (c) 2011 Google, Inc.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a copy
-// of this software and associated documentation files (the "Software"), to deal
-// in the Software without restriction, including without limitation the rights
-// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-// copies of the Software, and to permit persons to whom the Software is
-// furnished to do so, subject to the following conditions:
-//
-// The above copyright notice and this permission notice shall be included in
-// all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-// THE SOFTWARE.
-//
-// CityHash, by Geoff Pike and Jyrki Alakuijala
-//
-// This file declares the subset of the CityHash functions that require
-// _mm_crc32_u64(). See the CityHash README for details.
-//
-// Functions in the CityHash family are not suitable for cryptography.
-
-#ifndef CITY_HASH_CRC_H_
-#define CITY_HASH_CRC_H_
-
-#include <city.h>
-
-#ifdef __SSE4_2__
-
-namespace CityHash_v1_0_2
-{
-
-// Hash function for a byte array.
-uint128 CityHashCrc128(const char *s, size_t len);
-
-// Hash function for a byte array. For convenience, a 128-bit seed is also
-// hashed into the result.
-uint128 CityHashCrc128WithSeed(const char *s, size_t len, uint128 seed);
-
-// Hash function for a byte array. Sets result[0] ... result[3].
-void CityHashCrc256(const char *s, size_t len, uint64 *result);
-
-}
-
-#endif // __SSE4_2__
-#endif // CITY_HASH_CRC_H_
+// Copyright (c) 2011 Google, Inc.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+//
+// CityHash, by Geoff Pike and Jyrki Alakuijala
+//
+// This file declares the subset of the CityHash functions that require
+// _mm_crc32_u64(). See the CityHash README for details.
+//
+// Functions in the CityHash family are not suitable for cryptography.
+
+#ifndef CITY_HASH_CRC_H_
+#define CITY_HASH_CRC_H_
+
+#include <city.h>
+
+#ifdef __SSE4_2__
+
+namespace CityHash_v1_0_2
+{
+
+// Hash function for a byte array.
+uint128 CityHashCrc128(const char *s, size_t len);
+
+// Hash function for a byte array. For convenience, a 128-bit seed is also
+// hashed into the result.
+uint128 CityHashCrc128WithSeed(const char *s, size_t len, uint128 seed);
+
+// Hash function for a byte array. Sets result[0] ... result[3].
+void CityHashCrc256(const char *s, size_t len, uint64 *result);
+
+}
+
+#endif // __SSE4_2__
+#endif // CITY_HASH_CRC_H_
diff --git a/contrib/restricted/cityhash-1.0.2/config.h b/contrib/restricted/cityhash-1.0.2/config.h
index cca744a35c..d5ceb103f2 100644
--- a/contrib/restricted/cityhash-1.0.2/config.h
+++ b/contrib/restricted/cityhash-1.0.2/config.h
@@ -1,125 +1,125 @@
-/* config.h. Generated from config.h.in by configure. */
-/* config.h.in. Generated from configure.ac by autoheader. */
-
-/* Define if building universal (internal helper macro) */
-/* #undef AC_APPLE_UNIVERSAL_BUILD */
-
-/* Define to 1 if the compiler supports __builtin_expect. */
-#if _MSC_VER
-#define HAVE_BUILTIN_EXPECT 0
-#else
-#define HAVE_BUILTIN_EXPECT 1
-#endif
-
-/* Define to 1 if you have the <dlfcn.h> header file. */
-#define HAVE_DLFCN_H 1
-
-/* Define to 1 if you have the <inttypes.h> header file. */
-#define HAVE_INTTYPES_H 1
-
-/* Define to 1 if you have the <memory.h> header file. */
-#define HAVE_MEMORY_H 1
-
-/* Define to 1 if you have the <stdint.h> header file. */
-#define HAVE_STDINT_H 1
-
-/* Define to 1 if you have the <stdlib.h> header file. */
-#define HAVE_STDLIB_H 1
-
-/* Define to 1 if you have the <strings.h> header file. */
-#define HAVE_STRINGS_H 1
-
-/* Define to 1 if you have the <string.h> header file. */
-#define HAVE_STRING_H 1
-
-/* Define to 1 if you have the <sys/stat.h> header file. */
-#define HAVE_SYS_STAT_H 1
-
-/* Define to 1 if you have the <sys/types.h> header file. */
-#define HAVE_SYS_TYPES_H 1
-
-/* Define to 1 if you have the <unistd.h> header file. */
-#define HAVE_UNISTD_H 1
-
-/* Define to the sub-directory in which libtool stores uninstalled libraries.
- */
-#define LT_OBJDIR ".libs/"
-
-/* Define to the address where bug reports for this package should be sent. */
-#define PACKAGE_BUGREPORT "cityhash-discuss@googlegroups.com"
-
-/* Define to the full name of this package. */
-#define PACKAGE_NAME "CityHash"
-
-/* Define to the full name and version of this package. */
-#define PACKAGE_STRING "CityHash 1.0.2"
-
-/* Define to the one symbol short name of this package. */
-#define PACKAGE_TARNAME "cityhash"
-
-/* Define to the home page for this package. */
-#define PACKAGE_URL ""
-
-/* Define to the version of this package. */
-#define PACKAGE_VERSION "1.0.2"
-
-/* Define to 1 if you have the ANSI C header files. */
-#define STDC_HEADERS 1
-
-/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
- significant byte first (like Motorola and SPARC, unlike Intel). */
-#if defined AC_APPLE_UNIVERSAL_BUILD
-# if defined __BIG_ENDIAN__
-# define WORDS_BIGENDIAN 1
-# endif
-#else
-# ifndef WORDS_BIGENDIAN
-/* # undef WORDS_BIGENDIAN */
-# endif
-#endif
-
-/* Define for Solaris 2.5.1 so the uint32_t typedef from <sys/synch.h>,
- <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
- #define below would cause a syntax error. */
-/* #undef _UINT32_T */
-
-/* Define for Solaris 2.5.1 so the uint64_t typedef from <sys/synch.h>,
- <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
- #define below would cause a syntax error. */
-/* #undef _UINT64_T */
-
-/* Define for Solaris 2.5.1 so the uint8_t typedef from <sys/synch.h>,
- <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
- #define below would cause a syntax error. */
-/* #undef _UINT8_T */
-
-/* Define to `__inline__' or `__inline' if that's what the C compiler
- calls it, or to nothing if 'inline' is not supported under any name. */
-#ifndef __cplusplus
-/* #undef inline */
-#endif
-
-/* Define to `unsigned int' if <sys/types.h> does not define. */
-/* #undef size_t */
-
-/* Define to `int' if <sys/types.h> does not define. */
-/* #undef ssize_t */
-
-/* Define to the type of an unsigned integer type of width exactly 32 bits if
- such a type exists and the standard includes do not define it. */
-/* #undef uint32_t */
-
-/* Define to the type of an unsigned integer type of width exactly 64 bits if
- such a type exists and the standard includes do not define it. */
-/* #undef uint64_t */
-
-/* Define to the type of an unsigned integer type of width exactly 8 bits if
- such a type exists and the standard includes do not define it. */
-/* #undef uint8_t */
-
-#ifdef _MSC_VER
- #include <basetsd.h>
- typedef SSIZE_T ssize_t;
-#else
- #include <sys/types.h>
-#endif
+/* config.h. Generated from config.h.in by configure. */
+/* config.h.in. Generated from configure.ac by autoheader. */
+
+/* Define if building universal (internal helper macro) */
+/* #undef AC_APPLE_UNIVERSAL_BUILD */
+
+/* Define to 1 if the compiler supports __builtin_expect. */
+#if _MSC_VER
+#define HAVE_BUILTIN_EXPECT 0
+#else
+#define HAVE_BUILTIN_EXPECT 1
+#endif
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+ */
+#define LT_OBJDIR ".libs/"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "cityhash-discuss@googlegroups.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "CityHash"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "CityHash 1.0.2"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "cityhash"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.0.2"
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
+ significant byte first (like Motorola and SPARC, unlike Intel). */
+#if defined AC_APPLE_UNIVERSAL_BUILD
+# if defined __BIG_ENDIAN__
+# define WORDS_BIGENDIAN 1
+# endif
+#else
+# ifndef WORDS_BIGENDIAN
+/* # undef WORDS_BIGENDIAN */
+# endif
+#endif
+
+/* Define for Solaris 2.5.1 so the uint32_t typedef from <sys/synch.h>,
+ <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
+ #define below would cause a syntax error. */
+/* #undef _UINT32_T */
+
+/* Define for Solaris 2.5.1 so the uint64_t typedef from <sys/synch.h>,
+ <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
+ #define below would cause a syntax error. */
+/* #undef _UINT64_T */
+
+/* Define for Solaris 2.5.1 so the uint8_t typedef from <sys/synch.h>,
+ <pthread.h>, or <semaphore.h> is not used. If the typedef were allowed, the
+ #define below would cause a syntax error. */
+/* #undef _UINT8_T */
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+ calls it, or to nothing if 'inline' is not supported under any name. */
+#ifndef __cplusplus
+/* #undef inline */
+#endif
+
+/* Define to `unsigned int' if <sys/types.h> does not define. */
+/* #undef size_t */
+
+/* Define to `int' if <sys/types.h> does not define. */
+/* #undef ssize_t */
+
+/* Define to the type of an unsigned integer type of width exactly 32 bits if
+ such a type exists and the standard includes do not define it. */
+/* #undef uint32_t */
+
+/* Define to the type of an unsigned integer type of width exactly 64 bits if
+ such a type exists and the standard includes do not define it. */
+/* #undef uint64_t */
+
+/* Define to the type of an unsigned integer type of width exactly 8 bits if
+ such a type exists and the standard includes do not define it. */
+/* #undef uint8_t */
+
+#ifdef _MSC_VER
+ #include <basetsd.h>
+ typedef SSIZE_T ssize_t;
+#else
+ #include <sys/types.h>
+#endif
diff --git a/contrib/restricted/cityhash-1.0.2/ya.make b/contrib/restricted/cityhash-1.0.2/ya.make
index db6cfb6311..19115879e0 100644
--- a/contrib/restricted/cityhash-1.0.2/ya.make
+++ b/contrib/restricted/cityhash-1.0.2/ya.make
@@ -2,27 +2,27 @@ OWNER(
g:clickhouse
g:cpp-contrib
)
-
-# Origin: https://github.com/google/cityhash.git
-
-LIBRARY()
-
-VERSION(bc38ef45ddbbe640e48db7b8ef65e80ea7f71298)
-
-PROVIDES(cityhash)
-
+
+# Origin: https://github.com/google/cityhash.git
+
+LIBRARY()
+
+VERSION(bc38ef45ddbbe640e48db7b8ef65e80ea7f71298)
+
+PROVIDES(cityhash)
+
LICENSE(MIT)
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-NO_UTIL()
-
+NO_UTIL()
+
ADDINCL(
GLOBAL contrib/restricted/cityhash-1.0.2
)
-
-SRCS(
+
+SRCS(
city.cc
-)
-
-END()
+)
+
+END()
diff --git a/contrib/restricted/ya.make b/contrib/restricted/ya.make
index b52a206733..0a4afc4334 100644
--- a/contrib/restricted/ya.make
+++ b/contrib/restricted/ya.make
@@ -11,7 +11,7 @@ RECURSE(
aws
blis
boost
- cityhash-1.0.2
+ cityhash-1.0.2
cmph
cpuinfo
exiv2
diff --git a/util/digest/ya.make b/util/digest/ya.make
index e378a7e419..43dd6a8763 100644
--- a/util/digest/ya.make
+++ b/util/digest/ya.make
@@ -1,7 +1,7 @@
OWNER(g:util)
SUBSCRIBER(g:util-subscribers)
-
-PROVIDES(cityhash)
+
+PROVIDES(cityhash)
RECURSE(
benchmark
diff --git a/util/folder/path.cpp b/util/folder/path.cpp
index bfe0c67d68..a8b3a4535a 100644
--- a/util/folder/path.cpp
+++ b/util/folder/path.cpp
@@ -258,23 +258,23 @@ void TFsPath::ListNames(TVector<TString>& children) const {
}
}
-bool TFsPath::Contains(const TString& component) const {
- if (!IsDefined()) {
- return false;
- }
-
- TFsPath path = *this;
- while (path.Parent() != path) {
- if (path.GetName() == component) {
- return true;
- }
-
- path = path.Parent();
- }
-
- return false;
-}
-
+bool TFsPath::Contains(const TString& component) const {
+ if (!IsDefined()) {
+ return false;
+ }
+
+ TFsPath path = *this;
+ while (path.Parent() != path) {
+ if (path.GetName() == component) {
+ return true;
+ }
+
+ path = path.Parent();
+ }
+
+ return false;
+}
+
void TFsPath::List(TVector<TFsPath>& files) const {
TVector<TString> names;
ListNames(names);
diff --git a/util/folder/path.h b/util/folder/path.h
index 2fb4d6b4ef..74785492dd 100644
--- a/util/folder/path.h
+++ b/util/folder/path.h
@@ -155,9 +155,9 @@ public:
void List(TVector<TFsPath>& children) const;
void ListNames(TVector<TString>& children) const;
- // Check, if path contains at least one component with a specific name.
- bool Contains(const TString& component) const;
-
+ // Check, if path contains at least one component with a specific name.
+ bool Contains(const TString& component) const;
+
// fails to delete non-empty directory
void DeleteIfExists() const;
// delete recursively. Does nothing if not exists
diff --git a/util/network/sock.h b/util/network/sock.h
index b10be2f715..88835bd676 100644
--- a/util/network/sock.h
+++ b/util/network/sock.h
@@ -1,6 +1,6 @@
#pragma once
-#include <util/folder/path.h>
+#include <util/folder/path.h>
#include <util/system/defaults.h>
#include <util/string/cast.h>
#include <util/stream/output.h>
@@ -89,10 +89,10 @@ struct TSockAddrLocal: public ISockAddr {
return TString(Path);
}
- TFsPath ToPath() const {
- return TFsPath(Path);
- }
-
+ TFsPath ToPath() const {
+ return TFsPath(Path);
+ }
+
int ResolveAddr() const {
if (in.sin_port == 0) {
int ret = 0;
@@ -182,10 +182,10 @@ struct TSockAddrLocal: public sockaddr_un, public ISockAddr {
return TString(sun_path);
}
- TFsPath ToPath() const {
- return TFsPath(sun_path);
- }
-
+ TFsPath ToPath() const {
+ return TFsPath(sun_path);
+ }
+
int Bind(SOCKET s, ui16 mode) const override {
(void)unlink(sun_path);
@@ -405,7 +405,7 @@ public:
class TStreamSocket: public TBaseSocket {
protected:
- explicit TStreamSocket(SOCKET fd)
+ explicit TStreamSocket(SOCKET fd)
: TBaseSocket(fd)
{
}
@@ -413,9 +413,9 @@ protected:
public:
TStreamSocket()
: TBaseSocket(INVALID_SOCKET)
- {
- }
-
+ {
+ }
+
ssize_t Send(const void* msg, size_t len, int flags = 0) {
const ssize_t ret = send((SOCKET) * this, (const char*)msg, (int)len, flags);
if (ret < 0)
@@ -452,15 +452,15 @@ public:
return ret;
}
- int Accept(TStreamSocket* acceptedSock, ISockAddr* acceptedAddr = nullptr) {
- SOCKET s = INVALID_SOCKET;
- if (acceptedAddr) {
- socklen_t acceptedSize = acceptedAddr->Size();
- s = accept((SOCKET) * this, acceptedAddr->SockAddr(), &acceptedSize);
- } else {
- s = accept((SOCKET) * this, nullptr, nullptr);
- }
-
+ int Accept(TStreamSocket* acceptedSock, ISockAddr* acceptedAddr = nullptr) {
+ SOCKET s = INVALID_SOCKET;
+ if (acceptedAddr) {
+ socklen_t acceptedSize = acceptedAddr->Size();
+ s = accept((SOCKET) * this, acceptedAddr->SockAddr(), &acceptedSize);
+ } else {
+ s = accept((SOCKET) * this, nullptr, nullptr);
+ }
+
if (s == INVALID_SOCKET)
return -errno;
diff --git a/util/network/socket.h b/util/network/socket.h
index 40c8648b40..21d2525e8f 100644
--- a/util/network/socket.h
+++ b/util/network/socket.h
@@ -236,7 +236,7 @@ private:
class TSocket;
-class TSocketHolder: public TMoveOnly {
+class TSocketHolder: public TMoveOnly {
public:
inline TSocketHolder()
: Fd_(INVALID_SOCKET)
@@ -249,10 +249,10 @@ public:
}
inline TSocketHolder(TSocketHolder&& other) noexcept {
- Fd_ = other.Fd_;
- other.Fd_ = INVALID_SOCKET;
- }
-
+ Fd_ = other.Fd_;
+ other.Fd_ = INVALID_SOCKET;
+ }
+
inline TSocketHolder& operator=(TSocketHolder&& other) noexcept {
Close();
Swap(other);
diff --git a/util/random/entropy.cpp b/util/random/entropy.cpp
index 3617edb83d..5b7da84f56 100644
--- a/util/random/entropy.cpp
+++ b/util/random/entropy.cpp
@@ -177,25 +177,25 @@ namespace {
};
struct TDefaultTraits {
- THolder<TEntropyPoolStream> EP;
+ THolder<TEntropyPoolStream> EP;
TSeedStream SS;
inline TDefaultTraits() {
- Reset();
+ Reset();
}
inline IInputStream& EntropyPool() noexcept {
- return *EP;
+ return *EP;
}
inline IInputStream& Seed() noexcept {
return SS;
}
- inline void Reset() noexcept {
- EP.Reset(new TEntropyPoolStream(THostEntropy()));
- }
-
+ inline void Reset() noexcept {
+ EP.Reset(new TEntropyPoolStream(THostEntropy()));
+ }
+
static inline TDefaultTraits& Instance() {
auto res = SingletonWithPriority<TDefaultTraits, 0>();
@@ -216,6 +216,6 @@ IInputStream& Seed() {
return TRandomTraits::Instance().Seed();
}
-void ResetEntropyPool() {
- TRandomTraits::Instance().Reset();
+void ResetEntropyPool() {
+ TRandomTraits::Instance().Reset();
}
diff --git a/util/random/entropy.h b/util/random/entropy.h
index 62029c1b63..553d0a6fb2 100644
--- a/util/random/entropy.h
+++ b/util/random/entropy.h
@@ -16,6 +16,6 @@ IInputStream& EntropyPool();
IInputStream& Seed();
/*
- * Re-initialize entropy pool - useful after forking in multi-process programs.
+ * Re-initialize entropy pool - useful after forking in multi-process programs.
*/
-void ResetEntropyPool();
+void ResetEntropyPool();
diff --git a/util/random/random.cpp b/util/random/random.cpp
index 71f9323856..8c9f6bec77 100644
--- a/util/random/random.cpp
+++ b/util/random/random.cpp
@@ -86,8 +86,8 @@ DEF_RND(unsigned long)
DEF_RND(unsigned short)
DEF_RND(unsigned long long)
-#undef DEF_RND
-
+#undef DEF_RND
+
template <>
bool RandomNumber<bool>() {
return RandomNumber<ui8>() % 2 == 0;
@@ -113,11 +113,11 @@ template <>
long double RandomNumber<long double>() {
return RandomNumber<double>();
}
-
-void ResetRandomState() {
- *GetRndGen<ui32>() = TRndGen<ui32>();
- *GetRndGen<ui64>() = TRndGen<ui64>();
-}
+
+void ResetRandomState() {
+ *GetRndGen<ui32>() = TRndGen<ui32>();
+ *GetRndGen<ui64>() = TRndGen<ui64>();
+}
void SetRandomSeed(int seed) {
*GetRndGen<ui32>() = TRndGen<ui32>(seed);
diff --git a/util/random/random.h b/util/random/random.h
index 16b52d3995..623639d92c 100644
--- a/util/random/random.h
+++ b/util/random/random.h
@@ -18,11 +18,11 @@ T RandomNumber();
*/
template <class T>
T RandomNumber(T max);
-
-/*
- * Re-initialize random state - useful after forking in multi-process programs.
- */
-void ResetRandomState();
+
+/*
+ * Re-initialize random state - useful after forking in multi-process programs.
+ */
+void ResetRandomState();
/*
* Set random SEED
diff --git a/ydb/library/yql/udfs/common/clickhouse/client/base/common/SimpleCache.h b/ydb/library/yql/udfs/common/clickhouse/client/base/common/SimpleCache.h
index c3bf019c22..b2d63bf52f 100644
--- a/ydb/library/yql/udfs/common/clickhouse/client/base/common/SimpleCache.h
+++ b/ydb/library/yql/udfs/common/clickhouse/client/base/common/SimpleCache.h
@@ -1,82 +1,82 @@
-#pragma once
-
-#include <map>
-#include <tuple>
-#include <mutex>
+#pragma once
+
+#include <map>
+#include <tuple>
+#include <mutex>
#include <common/function_traits.h>
-
-
-/** The simplest cache for a free function.
- * You can also pass a static class method or lambda without captures.
- * The size is unlimited. Values are stored permanently and never evicted.
- * But single record or all cache can be manually dropped.
- * Mutex is used for synchronization.
- * Suitable only for the simplest cases.
- *
- * Usage
- *
- * SimpleCache<decltype(func), &func> func_cached;
- * std::cerr << func_cached(args...);
- */
-template <typename F, F* f>
-class SimpleCache
-{
-private:
- using Key = typename function_traits<F>::arguments_decay;
- using Result = typename function_traits<F>::result;
-
- std::map<Key, Result> cache;
- mutable std::mutex mutex;
-
-public:
- template <typename... Args>
- Result operator() (Args &&... args)
- {
+
+
+/** The simplest cache for a free function.
+ * You can also pass a static class method or lambda without captures.
+ * The size is unlimited. Values are stored permanently and never evicted.
+ * But single record or all cache can be manually dropped.
+ * Mutex is used for synchronization.
+ * Suitable only for the simplest cases.
+ *
+ * Usage
+ *
+ * SimpleCache<decltype(func), &func> func_cached;
+ * std::cerr << func_cached(args...);
+ */
+template <typename F, F* f>
+class SimpleCache
+{
+private:
+ using Key = typename function_traits<F>::arguments_decay;
+ using Result = typename function_traits<F>::result;
+
+ std::map<Key, Result> cache;
+ mutable std::mutex mutex;
+
+public:
+ template <typename... Args>
+ Result operator() (Args &&... args)
+ {
Key key{std::forward<Args>(args)...};
- {
- std::lock_guard lock(mutex);
-
- auto it = cache.find(key);
-
- if (cache.end() != it)
- return it->second;
- }
-
- /// The calculations themselves are not done under mutex.
+ {
+ std::lock_guard lock(mutex);
+
+ auto it = cache.find(key);
+
+ if (cache.end() != it)
+ return it->second;
+ }
+
+ /// The calculations themselves are not done under mutex.
Result res = std::apply(f, key);
-
- {
- std::lock_guard lock(mutex);
-
- cache.emplace(std::forward_as_tuple(args...), res);
- }
-
- return res;
- }
-
- template <typename... Args>
- void update(Args &&... args)
- {
+
+ {
+ std::lock_guard lock(mutex);
+
+ cache.emplace(std::forward_as_tuple(args...), res);
+ }
+
+ return res;
+ }
+
+ template <typename... Args>
+ void update(Args &&... args)
+ {
Key key{std::forward<Args>(args)...};
Result res = std::apply(f, key);
- {
- std::lock_guard lock(mutex);
- cache[key] = std::move(res);
- }
- }
-
- size_t size() const
- {
- std::lock_guard lock(mutex);
- return cache.size();
- }
-
- void drop()
- {
- std::lock_guard lock(mutex);
- cache.clear();
- }
-};
+ {
+ std::lock_guard lock(mutex);
+ cache[key] = std::move(res);
+ }
+ }
+
+ size_t size() const
+ {
+ std::lock_guard lock(mutex);
+ return cache.size();
+ }
+
+ void drop()
+ {
+ std::lock_guard lock(mutex);
+ cache.clear();
+ }
+};
diff --git a/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp b/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp
index 054e9be907..97c64f2b69 100644
--- a/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp
+++ b/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp
@@ -1,39 +1,39 @@
-#include <common/getThreadId.h>
-
+#include <common/getThreadId.h>
+
#if defined(OS_ANDROID)
#include <sys/types.h>
- #include <unistd.h>
+ #include <unistd.h>
#elif defined(OS_LINUX)
#include <unistd.h>
- #include <syscall.h>
+ #include <syscall.h>
#elif defined(OS_FREEBSD)
- #include <pthread_np.h>
-#else
- #include <pthread.h>
- #include <stdexcept>
-#endif
-
-
-static thread_local uint64_t current_tid = 0;
-uint64_t getThreadId()
-{
- if (!current_tid)
- {
+ #include <pthread_np.h>
+#else
+ #include <pthread.h>
+ #include <stdexcept>
+#endif
+
+
+static thread_local uint64_t current_tid = 0;
+uint64_t getThreadId()
+{
+ if (!current_tid)
+ {
#if defined(OS_ANDROID)
current_tid = gettid();
#elif defined(OS_LINUX)
- current_tid = syscall(SYS_gettid); /// This call is always successful. - man gettid
+ current_tid = syscall(SYS_gettid); /// This call is always successful. - man gettid
#elif defined(OS_FREEBSD)
- current_tid = pthread_getthreadid_np();
+ current_tid = pthread_getthreadid_np();
#elif defined(OS_SUNOS)
// On Solaris-derived systems, this returns the ID of the LWP, analogous
// to a thread.
current_tid = static_cast<uint64_t>(pthread_self());
-#else
- if (0 != pthread_threadid_np(nullptr, &current_tid))
- throw std::logic_error("pthread_threadid_np returned error");
-#endif
- }
-
- return current_tid;
-}
+#else
+ if (0 != pthread_threadid_np(nullptr, &current_tid))
+ throw std::logic_error("pthread_threadid_np returned error");
+#endif
+ }
+
+ return current_tid;
+}
diff --git a/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.h b/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.h
index a1b5ff5f3e..2885b0f80b 100644
--- a/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.h
+++ b/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.h
@@ -1,5 +1,5 @@
-#pragma once
-#include <cstdint>
-
-/// Obtain thread id from OS. The value is cached in thread local variable.
-uint64_t getThreadId();
+#pragma once
+#include <cstdint>
+
+/// Obtain thread id from OS. The value is cached in thread local variable.
+uint64_t getThreadId();
diff --git a/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.cpp b/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.cpp
index 49d566dac1..30ba1a6cb3 100644
--- a/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.cpp
+++ b/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.cpp
@@ -1,11 +1,11 @@
-/// This code was based on the code by Fedor Korotkiy (prime@yandex-team.ru) for YT product in Yandex.
-
+/// This code was based on the code by Fedor Korotkiy (prime@yandex-team.ru) for YT product in Yandex.
+
#include <common/defines.h>
-
-#if defined(__linux__) && !defined(THREAD_SANITIZER)
- #define USE_PHDR_CACHE 1
-#endif
-
+
+#if defined(__linux__) && !defined(THREAD_SANITIZER)
+ #define USE_PHDR_CACHE 1
+#endif
+
/// Thread Sanitizer uses dl_iterate_phdr function on initialization and fails if we provide our own.
#ifdef USE_PHDR_CACHE
@@ -20,105 +20,105 @@
# undef __msan_unpoison
# include <sanitizer/msan_interface.h>
# endif
-#endif
-
-#include <link.h>
-#include <dlfcn.h>
-#include <vector>
-#include <atomic>
-#include <cstddef>
-#include <stdexcept>
-
-
-namespace
-{
-
-// This is adapted from
-// https://github.com/scylladb/seastar/blob/master/core/exception_hacks.hh
-// https://github.com/scylladb/seastar/blob/master/core/exception_hacks.cc
-
-using DLIterateFunction = int (*) (int (*callback) (dl_phdr_info * info, size_t size, void * data), void * data);
-
-DLIterateFunction getOriginalDLIteratePHDR()
-{
- void * func = dlsym(RTLD_NEXT, "dl_iterate_phdr");
- if (!func)
- throw std::runtime_error("Cannot find dl_iterate_phdr function with dlsym");
- return reinterpret_cast<DLIterateFunction>(func);
-}
-
-
-using PHDRCache = std::vector<dl_phdr_info>;
-std::atomic<PHDRCache *> phdr_cache {};
-
-}
-
-
-extern "C"
-#ifndef __clang__
-[[gnu::visibility("default")]]
-[[gnu::externally_visible]]
-#endif
-int dl_iterate_phdr(int (*callback) (dl_phdr_info * info, size_t size, void * data), void * data)
-{
+#endif
+
+#include <link.h>
+#include <dlfcn.h>
+#include <vector>
+#include <atomic>
+#include <cstddef>
+#include <stdexcept>
+
+
+namespace
+{
+
+// This is adapted from
+// https://github.com/scylladb/seastar/blob/master/core/exception_hacks.hh
+// https://github.com/scylladb/seastar/blob/master/core/exception_hacks.cc
+
+using DLIterateFunction = int (*) (int (*callback) (dl_phdr_info * info, size_t size, void * data), void * data);
+
+DLIterateFunction getOriginalDLIteratePHDR()
+{
+ void * func = dlsym(RTLD_NEXT, "dl_iterate_phdr");
+ if (!func)
+ throw std::runtime_error("Cannot find dl_iterate_phdr function with dlsym");
+ return reinterpret_cast<DLIterateFunction>(func);
+}
+
+
+using PHDRCache = std::vector<dl_phdr_info>;
+std::atomic<PHDRCache *> phdr_cache {};
+
+}
+
+
+extern "C"
+#ifndef __clang__
+[[gnu::visibility("default")]]
+[[gnu::externally_visible]]
+#endif
+int dl_iterate_phdr(int (*callback) (dl_phdr_info * info, size_t size, void * data), void * data)
+{
auto * current_phdr_cache = phdr_cache.load();
- if (!current_phdr_cache)
- {
- // Cache is not yet populated, pass through to the original function.
- return getOriginalDLIteratePHDR()(callback, data);
- }
-
- int result = 0;
- for (auto & entry : *current_phdr_cache)
- {
- result = callback(&entry, offsetof(dl_phdr_info, dlpi_adds), data);
- if (result != 0)
- break;
- }
- return result;
-}
-
-
-extern "C"
-{
-#ifdef ADDRESS_SANITIZER
-void __lsan_ignore_object(const void *);
-#else
+ if (!current_phdr_cache)
+ {
+ // Cache is not yet populated, pass through to the original function.
+ return getOriginalDLIteratePHDR()(callback, data);
+ }
+
+ int result = 0;
+ for (auto & entry : *current_phdr_cache)
+ {
+ result = callback(&entry, offsetof(dl_phdr_info, dlpi_adds), data);
+ if (result != 0)
+ break;
+ }
+ return result;
+}
+
+
+extern "C"
+{
+#ifdef ADDRESS_SANITIZER
+void __lsan_ignore_object(const void *);
+#else
void __lsan_ignore_object(const void *) {} // NOLINT
-#endif
-}
-
-
-void updatePHDRCache()
-{
- // Fill out ELF header cache for access without locking.
- // This assumes no dynamic object loading/unloading after this point
-
- PHDRCache * new_phdr_cache = new PHDRCache;
- getOriginalDLIteratePHDR()([] (dl_phdr_info * info, size_t /*size*/, void * data)
- {
- // `info` is created by dl_iterate_phdr, which is a non-instrumented
- // libc function, so we have to unpoison it manually.
- __msan_unpoison(info, sizeof(*info));
-
- reinterpret_cast<PHDRCache *>(data)->push_back(*info);
- return 0;
- }, new_phdr_cache);
- phdr_cache.store(new_phdr_cache);
-
- /// Memory is intentionally leaked.
- __lsan_ignore_object(new_phdr_cache);
-}
-
-
-bool hasPHDRCache()
-{
- return phdr_cache.load() != nullptr;
-}
-
-#else
-
-void updatePHDRCache() {}
-bool hasPHDRCache() { return false; }
-
-#endif
+#endif
+}
+
+
+void updatePHDRCache()
+{
+ // Fill out ELF header cache for access without locking.
+ // This assumes no dynamic object loading/unloading after this point
+
+ PHDRCache * new_phdr_cache = new PHDRCache;
+ getOriginalDLIteratePHDR()([] (dl_phdr_info * info, size_t /*size*/, void * data)
+ {
+ // `info` is created by dl_iterate_phdr, which is a non-instrumented
+ // libc function, so we have to unpoison it manually.
+ __msan_unpoison(info, sizeof(*info));
+
+ reinterpret_cast<PHDRCache *>(data)->push_back(*info);
+ return 0;
+ }, new_phdr_cache);
+ phdr_cache.store(new_phdr_cache);
+
+ /// Memory is intentionally leaked.
+ __lsan_ignore_object(new_phdr_cache);
+}
+
+
+bool hasPHDRCache()
+{
+ return phdr_cache.load() != nullptr;
+}
+
+#else
+
+void updatePHDRCache() {}
+bool hasPHDRCache() { return false; }
+
+#endif
diff --git a/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.h b/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.h
index d2854ece0b..87d555fc69 100644
--- a/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.h
+++ b/ydb/library/yql/udfs/common/clickhouse/client/base/common/phdr_cache.h
@@ -1,19 +1,19 @@
-#pragma once
-
-/// This code was based on the code by Fedor Korotkiy (prime@yandex-team.ru) for YT product in Yandex.
-
-/** Collects all dl_phdr_info items and caches them in a static array.
- * Also rewrites dl_iterate_phdr with a lock-free version which consults the above cache
- * thus eliminating scalability bottleneck in C++ exception unwinding.
- * As a drawback, this only works if no dynamic object unloading happens after this point.
- * This function is thread-safe. You should call it to update cache after loading new shared libraries.
- * Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
- *
- * NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
- */
-void updatePHDRCache();
-
-/** Check if "dl_iterate_phdr" will be lock-free
- * to determine if some features like Query Profiler can be used.
- */
-bool hasPHDRCache();
+#pragma once
+
+/// This code was based on the code by Fedor Korotkiy (prime@yandex-team.ru) for YT product in Yandex.
+
+/** Collects all dl_phdr_info items and caches them in a static array.
+ * Also rewrites dl_iterate_phdr with a lock-free version which consults the above cache
+ * thus eliminating scalability bottleneck in C++ exception unwinding.
+ * As a drawback, this only works if no dynamic object unloading happens after this point.
+ * This function is thread-safe. You should call it to update cache after loading new shared libraries.
+ * Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
+ *
+ * NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
+ */
+void updatePHDRCache();
+
+/** Check if "dl_iterate_phdr" will be lock-free
+ * to determine if some features like Query Profiler can be used.
+ */
+bool hasPHDRCache();