aboutsummaryrefslogtreecommitdiffstats
path: root/contrib
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-05-20 07:58:40 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-05-20 08:05:00 +0300
commitbcd5bcc390793791d293d386b2ebefbe683fb4e1 (patch)
treec93e3b8c847237e7e7626f4a07f1b657bb34f04d /contrib
parent1a9f1508fe9c8c5927ffebf33197a6108e70501d (diff)
downloadydb-bcd5bcc390793791d293d386b2ebefbe683fb4e1.tar.gz
Intermediate changes
Diffstat (limited to 'contrib')
-rw-r--r--contrib/python/Pygments/py3/.dist-info/METADATA10
-rw-r--r--contrib/python/Pygments/py3/AUTHORS6
-rw-r--r--contrib/python/Pygments/py3/pygments/__init__.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/__main__.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/cmdline.py24
-rw-r--r--contrib/python/Pygments/py3/pygments/console.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/filter.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/filters/__init__.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/formatter.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/__init__.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/bbcode.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/groff.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/html.py75
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/img.py21
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/irc.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/latex.py51
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/other.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/rtf.py253
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/svg.py37
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/formatters/terminal256.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexer.py42
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/__init__.py23
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_ada_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_css_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lilypond_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_luau_builtins.py62
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mapping.py21
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_qlik_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_scheme_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/actionscript.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ada.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/agile.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/algebra.py20
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ambient.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/amdgpu.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ampl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/apdlexer.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/apl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/archetype.py20
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/arrow.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/arturo.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asc.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asm.py43
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/asn1.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/automation.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bare.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/basic.py105
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bdd.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/berry.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bibtex.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/blueprint.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/boa.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/bqn.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/business.py43
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_cpp.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/c_like.py126
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/capnproto.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/carbon.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/cddl.py17
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/chapel.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/clean.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/comal.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/compiled.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/configs.py93
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/console.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/cplint.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/crystal.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/csound.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/css.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/d.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dalvik.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/data.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dax.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/devicetree.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/diff.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dns.py17
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dotnet.py33
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dsls.py54
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/dylan.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ecl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/eiffel.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/elm.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/elpi.py41
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/email.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/erlang.py46
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/esoteric.py23
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ezhil.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/factor.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fantom.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/felix.py19
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fift.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/floscript.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/forth.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/fortran.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/foxpro.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/freefem.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/func.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/functional.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/futhark.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gdscript.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/go.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py15
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graph.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphics.py30
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphql.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/graphviz.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/gsql.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haskell.py59
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/haxe.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hdl.py15
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/hexdump.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/html.py31
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/idl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/igor.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/inferno.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/installers.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/int_fiction.py312
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/iolang.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/j.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/javascript.py37
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jmespath.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jslt.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jsonnet.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jsx.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/julia.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/jvm.py214
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/kuin.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/kusto.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ldap.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/lean.py145
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/lilypond.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/lisp.py428
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/macaulay2.py37
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/make.py11
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/markup.py232
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/math.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/matlab.py22
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/maxima.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/meson.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mime.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/minecraft.py43
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mips.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ml.py114
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modeling.py41
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/modula2.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mojo.py704
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/monte.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/mosel.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ncl.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nimrod.py15
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nit.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/nix.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/oberon.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/objective.py20
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ooc.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/openscad.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/other.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parasail.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/parsers.py85
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pascal.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pawn.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/perl.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/phix.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/php.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pointless.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/pony.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/praat.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/procfile.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/prolog.py25
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/promql.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/prql.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ptx.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/python.py40
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/q.py11
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/qlik.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/qvt.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/r.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rdf.py28
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rebol.py14
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/resource.py11
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ride.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rita.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rnc.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/roboconf.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/robotframework.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ruby.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/rust.py19
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sas.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/savi.py261
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/scdoc.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/scripting.py396
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sgf.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/shell.py236
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sieve.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/slash.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smalltalk.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smithy.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/smv.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/snobol.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/solidity.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/soong.py78
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sophia.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/special.py11
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/spice.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/sql.py40
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/srcinfo.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/stata.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/supercollider.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tact.py303
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tal.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tcl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/teal.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/templates.py203
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/teraterm.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/testing.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/text.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textedit.py15
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/textfmts.py22
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/theorem.py57
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/thingsdb.py76
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tlb.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tls.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/tnt.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/trafficscript.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/typoscript.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/typst.py104
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/ul4.py52
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/unicon.py16
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/urbi.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/usd.py17
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/varnish.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/verification.py9
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/verifpal.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/vip.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/vyper.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/web.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webassembly.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webidl.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/webmisc.py26
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/wgsl.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/whiley.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/wowtoc.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/wren.py7
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/x10.py13
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/xorg.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/yang.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/yara.py5
-rw-r--r--contrib/python/Pygments/py3/pygments/lexers/zig.py3
-rw-r--r--contrib/python/Pygments/py3/pygments/modeline.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/plugin.py22
-rw-r--r--contrib/python/Pygments/py3/pygments/regexopt.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/scanner.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/sphinxext.py24
-rw-r--r--contrib/python/Pygments/py3/pygments/style.py4
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/__init__.py6
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/_mapping.py1
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/abap.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/algol_nu.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/arduino.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/autumn.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/borland.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/bw.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/coffee.py80
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/colorful.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/default.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/dracula.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/emacs.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/friendly.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/friendly_grayscale.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/fruity.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/gh_dark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/gruvbox.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/igor.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/inkpot.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/lightbulb.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/lilypond.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/lovelace.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/manni.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/material.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/monokai.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/murphy.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/native.py8
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/nord.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/onedark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/paraiso_light.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/pastie.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/perldoc.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py38
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/rrt.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/sas.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/solarized.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/staroffice.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/stata_dark.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/stata_light.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/tango.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/trac.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vim.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/vs.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/xcode.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/styles/zenburn.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/token.py2
-rw-r--r--contrib/python/Pygments/py3/pygments/unistring.py10
-rw-r--r--contrib/python/Pygments/py3/pygments/util.py32
-rw-r--r--contrib/python/Pygments/py3/ya.make8
329 files changed, 4994 insertions, 2527 deletions
diff --git a/contrib/python/Pygments/py3/.dist-info/METADATA b/contrib/python/Pygments/py3/.dist-info/METADATA
index 4318a7b72f..3b102e1c8f 100644
--- a/contrib/python/Pygments/py3/.dist-info/METADATA
+++ b/contrib/python/Pygments/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.3
Name: Pygments
-Version: 2.17.2
+Version: 2.18.0
Summary: Pygments is a syntax highlighting package written in Python.
Project-URL: Homepage, https://pygments.org
Project-URL: Documentation, https://pygments.org/docs
@@ -22,7 +22,6 @@ Classifier: License :: OSI Approved :: BSD License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
@@ -32,9 +31,8 @@ Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Text Processing :: Filters
Classifier: Topic :: Utilities
-Requires-Python: >=3.7
+Requires-Python: >=3.8
Provides-Extra: plugins
-Requires-Dist: importlib-metadata; python_version < '3.8' and extra == 'plugins'
Provides-Extra: windows-terminal
Requires-Dist: colorama>=0.4.6; extra == 'windows-terminal'
Description-Content-Type: text/x-rst
@@ -55,5 +53,5 @@ are:
formats that PIL supports and ANSI sequences
* it is usable as a command-line tool and as a library
-Copyright 2006-2023 by the Pygments team, see ``AUTHORS``.
+Copyright 2006-2024 by the Pygments team, see ``AUTHORS``.
Licensed under the BSD, see ``LICENSE`` for details.
diff --git a/contrib/python/Pygments/py3/AUTHORS b/contrib/python/Pygments/py3/AUTHORS
index 0e9f512718..4ec64ba1ef 100644
--- a/contrib/python/Pygments/py3/AUTHORS
+++ b/contrib/python/Pygments/py3/AUTHORS
@@ -46,6 +46,7 @@ Other contributors, listed alphabetically, are:
* chebee7i -- Python traceback lexer improvements
* Hiram Chirino -- Scaml and Jade lexers
* Mauricio Caceres -- SAS and Stata lexers.
+* Michael Camilleri, John Gabriele, sogaiu -- Janet lexer
* Ian Cooper -- VGL lexer
* David Corbett -- Inform, Jasmin, JSGF, Snowball, and TADS 3 lexers
* Leaf Corcoran -- MoonScript lexer
@@ -115,6 +116,8 @@ Other contributors, listed alphabetically, are:
MSDOS session, BC, WDiff
* Brian R. Jackson -- Tea lexer
* Christian Jann -- ShellSession lexer
+* Jonas Camillus Jeppesen -- Line numbers and line highlighting for
+ RTF-formatter
* Dennis Kaarsemaker -- sources.list lexer
* Dmitri Kabak -- Inferno Limbo lexer
* Igor Kalnitsky -- vhdl lexer
@@ -195,6 +198,7 @@ Other contributors, listed alphabetically, are:
* Kashif Rasul -- CUDA lexer
* Nathan Reed -- HLSL lexer
* Justin Reidy -- MXML lexer
+* Jonathon Reinhart, Google LLC -- Soong lexer
* Norman Richards -- JSON lexer
* Corey Richardson -- Rust lexer updates
* Fabrizio Riguzzi -- cplint leder
@@ -274,6 +278,6 @@ Other contributors, listed alphabetically, are:
* vanillajonathan -- PRQL lexer
* Nikolay Antipov -- OpenSCAD lexer
* Markus Meyer, Nextron Systems -- YARA lexer
-
+* Hannes Römer -- Mojo lexer
Many thanks for all contributions!
diff --git a/contrib/python/Pygments/py3/pygments/__init__.py b/contrib/python/Pygments/py3/pygments/__init__.py
index 6b77c465c3..e5a669ccbf 100644
--- a/contrib/python/Pygments/py3/pygments/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/__init__.py
@@ -21,12 +21,12 @@
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from io import StringIO, BytesIO
-__version__ = '2.17.2'
+__version__ = '2.18.0'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
diff --git a/contrib/python/Pygments/py3/pygments/__main__.py b/contrib/python/Pygments/py3/pygments/__main__.py
index 5eb2c747aa..a243531748 100644
--- a/contrib/python/Pygments/py3/pygments/__main__.py
+++ b/contrib/python/Pygments/py3/pygments/__main__.py
@@ -4,7 +4,7 @@
Main entry point for ``python -m pygments``.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/cmdline.py b/contrib/python/Pygments/py3/pygments/cmdline.py
index 435231e651..df1c3b7a46 100644
--- a/contrib/python/Pygments/py3/pygments/cmdline.py
+++ b/contrib/python/Pygments/py3/pygments/cmdline.py
@@ -4,7 +4,7 @@
Command line interface.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -68,19 +68,19 @@ def _print_help(what, name):
try:
if what == 'lexer':
cls = get_lexer_by_name(name)
- print("Help on the %s lexer:" % cls.name)
+ print(f"Help on the {cls.name} lexer:")
print(dedent(cls.__doc__))
elif what == 'formatter':
cls = find_formatter_class(name)
- print("Help on the %s formatter:" % cls.name)
+ print(f"Help on the {cls.name} formatter:")
print(dedent(cls.__doc__))
elif what == 'filter':
cls = find_filter_class(name)
- print("Help on the %s filter:" % name)
+ print(f"Help on the {name} filter:")
print(dedent(cls.__doc__))
return 0
except (AttributeError, ValueError):
- print("%s not found!" % what, file=sys.stderr)
+ print(f"{what} not found!", file=sys.stderr)
return 1
@@ -97,7 +97,7 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print(('* %s\n %s %s') % i)
+ print(('* {}\n {} {}').format(*i))
elif what == 'formatter':
print()
@@ -112,7 +112,7 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print(('* %s\n %s %s') % i)
+ print(('* {}\n {} {}').format(*i))
elif what == 'filter':
print()
@@ -122,7 +122,7 @@ def _print_list(what):
for name in get_all_filters():
cls = find_filter_class(name)
print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
+ print(f" {docstring_headline(cls)}")
elif what == 'style':
print()
@@ -132,7 +132,7 @@ def _print_list(what):
for name in get_all_styles():
cls = get_style_by_name(name)
print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
+ print(f" {docstring_headline(cls)}")
def _print_list_as_json(requested_items):
@@ -185,8 +185,8 @@ def main_inner(parser, argns):
return 0
if argns.V:
- print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus '
- 'Chajdas and contributors.' % __version__)
+ print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus '
+ 'Chajdas and contributors.')
return 0
def is_only_option(opt):
@@ -659,7 +659,7 @@ def main(args=sys.argv):
msg = info[-1].strip()
if len(info) >= 3:
# extract relevant file and position info
- msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
+ msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:])
print(file=sys.stderr)
print('*** Error while highlighting:', file=sys.stderr)
print(msg, file=sys.stderr)
diff --git a/contrib/python/Pygments/py3/pygments/console.py b/contrib/python/Pygments/py3/pygments/console.py
index deb4937f74..4c1a06219c 100644
--- a/contrib/python/Pygments/py3/pygments/console.py
+++ b/contrib/python/Pygments/py3/pygments/console.py
@@ -4,7 +4,7 @@
Format colored console output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,12 +27,12 @@ light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brig
"brightmagenta", "brightcyan", "white"]
x = 30
-for d, l in zip(dark_colors, light_colors):
- codes[d] = esc + "%im" % x
- codes[l] = esc + "%im" % (60 + x)
+for dark, light in zip(dark_colors, light_colors):
+ codes[dark] = esc + "%im" % x
+ codes[light] = esc + "%im" % (60 + x)
x += 1
-del d, l, x
+del dark, light, x
codes["white"] = codes["bold"]
diff --git a/contrib/python/Pygments/py3/pygments/filter.py b/contrib/python/Pygments/py3/pygments/filter.py
index dafa08d156..aa6f76041b 100644
--- a/contrib/python/Pygments/py3/pygments/filter.py
+++ b/contrib/python/Pygments/py3/pygments/filter.py
@@ -4,7 +4,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,8 +62,7 @@ class FunctionFilter(Filter):
def __init__(self, **options):
if not hasattr(self, 'function'):
- raise TypeError('%r used without bound function' %
- self.__class__.__name__)
+ raise TypeError(f'{self.__class__.__name__!r} used without bound function')
Filter.__init__(self, **options)
def filter(self, lexer, stream):
diff --git a/contrib/python/Pygments/py3/pygments/filters/__init__.py b/contrib/python/Pygments/py3/pygments/filters/__init__.py
index 8bd53745cd..86e8dacf3d 100644
--- a/contrib/python/Pygments/py3/pygments/filters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/filters/__init__.py
@@ -5,7 +5,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -39,7 +39,7 @@ def get_filter_by_name(filtername, **options):
if cls:
return cls(**options)
else:
- raise ClassNotFound('filter %r not found' % filtername)
+ raise ClassNotFound(f'filter {filtername!r} not found')
def get_all_filters():
@@ -79,9 +79,9 @@ class CodeTagFilter(Filter):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags',
['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
- self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+ self.tag_re = re.compile(r'\b({})\b'.format('|'.join([
re.escape(tag) for tag in tags if tag
- ]))
+ ])))
def filter(self, lexer, stream):
regex = self.tag_re
diff --git a/contrib/python/Pygments/py3/pygments/formatter.py b/contrib/python/Pygments/py3/pygments/formatter.py
index 87183abbb2..cad3b388ca 100644
--- a/contrib/python/Pygments/py3/pygments/formatter.py
+++ b/contrib/python/Pygments/py3/pygments/formatter.py
@@ -4,7 +4,7 @@
Base formatter class.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -122,3 +122,8 @@ class Formatter:
# wrap the outfile in a StreamWriter
outfile = codecs.lookup(self.encoding)[3](outfile)
return self.format_unencoded(tokensource, outfile)
+
+ # Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to
+ # Formatter. This helps when using third-party type stubs from typeshed.
+ def __class_getitem__(cls, name):
+ return cls
diff --git a/contrib/python/Pygments/py3/pygments/formatters/__init__.py b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
index 6e482a1b73..014de975f5 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/__init__.py
@@ -4,7 +4,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -77,7 +77,7 @@ def get_formatter_by_name(_alias, **options):
"""
cls = find_formatter_class(_alias)
if cls is None:
- raise ClassNotFound("no formatter found for name %r" % _alias)
+ raise ClassNotFound(f"no formatter found for name {_alias!r}")
return cls(**options)
@@ -103,17 +103,16 @@ def load_formatter_from_file(filename, formattername="CustomFormatter", **option
exec(f.read(), custom_namespace)
# Retrieve the class `formattername` from that namespace
if formattername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (formattername, filename))
+ raise ClassNotFound(f'no valid {formattername} class found in {filename}')
formatter_class = custom_namespace[formattername]
# And finally instantiate it with the options
return formatter_class(**options)
except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ raise ClassNotFound(f'cannot read {filename}: {err}')
except ClassNotFound:
raise
except Exception as err:
- raise ClassNotFound('error when loading custom formatter: %s' % err)
+ raise ClassNotFound(f'error when loading custom formatter: {err}')
def get_formatter_for_filename(fn, **options):
@@ -135,7 +134,7 @@ def get_formatter_for_filename(fn, **options):
for filename in cls.filenames:
if _fn_matches(fn, filename):
return cls(**options)
- raise ClassNotFound("no formatter found for file name %r" % fn)
+ raise ClassNotFound(f"no formatter found for file name {fn!r}")
class _automodule(types.ModuleType):
diff --git a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
index 9ce4ebc5a4..9554081062 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/bbcode.py
@@ -4,7 +4,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,7 +60,7 @@ class BBCodeFormatter(Formatter):
for ttype, ndef in self.style:
start = end = ''
if ndef['color']:
- start += '[color=#%s]' % ndef['color']
+ start += '[color=#{}]'.format(ndef['color'])
end = '[/color]' + end
if ndef['bold']:
start += '[b]'
diff --git a/contrib/python/Pygments/py3/pygments/formatters/groff.py b/contrib/python/Pygments/py3/pygments/formatters/groff.py
index 687fd54967..a9e071128b 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/groff.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/groff.py
@@ -4,7 +4,7 @@
Formatter for groff output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -63,7 +63,7 @@ class GroffFormatter(Formatter):
for ttype, ndef in self.style:
start = end = ''
if ndef['color']:
- start += '\\m[%s]' % ndef['color']
+ start += '\\m[{}]'.format(ndef['color'])
end = '\\m[]' + end
if ndef['bold']:
start += bold
@@ -72,7 +72,7 @@ class GroffFormatter(Formatter):
start += italic
end = regular + end
if ndef['bgcolor']:
- start += '\\M[%s]' % ndef['bgcolor']
+ start += '\\M[{}]'.format(ndef['bgcolor'])
end = '\\M[]' + end
self.styles[ttype] = start, end
diff --git a/contrib/python/Pygments/py3/pygments/formatters/html.py b/contrib/python/Pygments/py3/pygments/formatters/html.py
index df2469e2a5..3330c1d588 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/html.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/html.py
@@ -4,7 +4,7 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,7 +62,7 @@ def _get_ttype_class(ttype):
CSSFILE_TEMPLATE = '''\
/*
generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
+Copyright 2006-2024 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
%(styledefs)s
@@ -73,7 +73,7 @@ DOC_HEADER = '''\
"http://www.w3.org/TR/html4/strict.dtd">
<!--
generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
+Copyright 2006-2024 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
<html>
@@ -488,7 +488,7 @@ class HtmlFormatter(Formatter):
name = self._get_css_class(ttype)
style = ''
if ndef['color']:
- style += 'color: %s; ' % webify(ndef['color'])
+ style += 'color: {}; '.format(webify(ndef['color']))
if ndef['bold']:
style += 'font-weight: bold; '
if ndef['italic']:
@@ -496,9 +496,9 @@ class HtmlFormatter(Formatter):
if ndef['underline']:
style += 'text-decoration: underline; '
if ndef['bgcolor']:
- style += 'background-color: %s; ' % webify(ndef['bgcolor'])
+ style += 'background-color: {}; '.format(webify(ndef['bgcolor']))
if ndef['border']:
- style += 'border: 1px solid %s; ' % webify(ndef['border'])
+ style += 'border: 1px solid {}; '.format(webify(ndef['border']))
if style:
t2c[ttype] = name
# save len(ttype) to enable ordering the styles by
@@ -530,7 +530,7 @@ class HtmlFormatter(Formatter):
styles.sort()
lines = [
- '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
+ f'{prefix(cls)} {{ {style} }} /* {repr(ttype)[6:]} */'
for (level, ttype, cls, style) in styles
]
@@ -548,24 +548,24 @@ class HtmlFormatter(Formatter):
if Text in self.ttype2class:
text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
lines.insert(
- 0, '%s{ background: %s;%s }' % (
+ 0, '{}{{ background: {};{} }}'.format(
prefix(''), bg_color, text_style
)
)
if hl_color is not None:
lines.insert(
- 0, '%s { background-color: %s }' % (prefix('hll'), hl_color)
+ 0, '{} {{ background-color: {} }}'.format(prefix('hll'), hl_color)
)
return lines
def get_linenos_style_defs(self):
lines = [
- 'pre { %s }' % self._pre_style,
- 'td.linenos .normal { %s }' % self._linenos_style,
- 'span.linenos { %s }' % self._linenos_style,
- 'td.linenos .special { %s }' % self._linenos_special_style,
- 'span.linenos.special { %s }' % self._linenos_special_style,
+ f'pre {{ {self._pre_style} }}',
+ f'td.linenos .normal {{ {self._linenos_style} }}',
+ f'span.linenos {{ {self._linenos_style} }}',
+ f'td.linenos .special {{ {self._linenos_special_style} }}',
+ f'span.linenos.special {{ {self._linenos_special_style} }}',
]
return lines
@@ -594,17 +594,15 @@ class HtmlFormatter(Formatter):
@property
def _linenos_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_color,
- self.style.line_number_background_color
- )
+ color = self.style.line_number_color
+ background_color = self.style.line_number_background_color
+ return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
@property
def _linenos_special_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_special_color,
- self.style.line_number_special_background_color
- )
+ color = self.style.line_number_special_color
+ background_color = self.style.line_number_special_background_color
+ return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
def _decodeifneeded(self, value):
if isinstance(value, bytes):
@@ -685,9 +683,9 @@ class HtmlFormatter(Formatter):
if nocls:
if special_line:
- style = ' style="%s"' % self._linenos_special_style
+ style = f' style="{self._linenos_special_style}"'
else:
- style = ' style="%s"' % self._linenos_style
+ style = f' style="{self._linenos_style}"'
else:
if special_line:
style = ' class="special"'
@@ -695,7 +693,7 @@ class HtmlFormatter(Formatter):
style = ' class="normal"'
if style:
- line = '<span%s>%s</span>' % (style, line)
+ line = f'<span{style}>{line}</span>'
lines.append(line)
@@ -744,9 +742,9 @@ class HtmlFormatter(Formatter):
if nocls:
if special_line:
- style = ' style="%s"' % self._linenos_special_style
+ style = f' style="{self._linenos_special_style}"'
else:
- style = ' style="%s"' % self._linenos_style
+ style = f' style="{self._linenos_style}"'
else:
if special_line:
style = ' class="linenos special"'
@@ -754,7 +752,7 @@ class HtmlFormatter(Formatter):
style = ' class="linenos"'
if style:
- linenos = '<span%s>%s</span>' % (style, line)
+ linenos = f'<span{style}>{line}</span>'
else:
linenos = line
@@ -791,13 +789,13 @@ class HtmlFormatter(Formatter):
style = []
if (self.noclasses and not self.nobackground and
self.style.background_color is not None):
- style.append('background: %s' % (self.style.background_color,))
+ style.append(f'background: {self.style.background_color}')
if self.cssstyles:
style.append(self.cssstyles)
style = '; '.join(style)
- yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
- (style and (' style="%s"' % style)) + '>')
+ yield 0, ('<div' + (self.cssclass and f' class="{self.cssclass}"') +
+ (style and (f' style="{style}"')) + '>')
yield from inner
yield 0, '</div>\n'
@@ -814,7 +812,7 @@ class HtmlFormatter(Formatter):
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
- yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
+ yield 0, ('<pre' + (style and f' style="{style}"') + '><span></span>')
yield from inner
yield 0, '</pre>'
@@ -843,18 +841,18 @@ class HtmlFormatter(Formatter):
try:
cspan = self.span_element_openers[ttype]
except KeyError:
- title = ' title="%s"' % '.'.join(ttype) if self.debug_token_types else ''
+ title = ' title="{}"'.format('.'.join(ttype)) if self.debug_token_types else ''
if nocls:
css_style = self._get_css_inline_styles(ttype)
if css_style:
css_style = self.class2style[css_style][0]
- cspan = '<span style="%s"%s>' % (css_style, title)
+ cspan = f'<span style="{css_style}"{title}>'
else:
cspan = ''
else:
css_class = self._get_css_classes(ttype)
if css_class:
- cspan = '<span class="%s"%s>' % (css_class, title)
+ cspan = f'<span class="{css_class}"{title}>'
else:
cspan = ''
self.span_element_openers[ttype] = cspan
@@ -927,11 +925,10 @@ class HtmlFormatter(Formatter):
if self.noclasses:
style = ''
if self.style.highlight_color is not None:
- style = (' style="background-color: %s"' %
- (self.style.highlight_color,))
- yield 1, '<span%s>%s</span>' % (style, value)
+ style = (f' style="background-color: {self.style.highlight_color}"')
+ yield 1, f'<span{style}>{value}</span>'
else:
- yield 1, '<span class="hll">%s</span>' % value
+ yield 1, f'<span class="hll">{value}</span>'
else:
yield 1, value
diff --git a/contrib/python/Pygments/py3/pygments/formatters/img.py b/contrib/python/Pygments/py3/pygments/formatters/img.py
index dcf09da97f..d5c97d47b5 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/img.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/img.py
@@ -4,7 +4,7 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
@@ -90,7 +90,7 @@ class FontManager:
self._create_nix()
def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
+ proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
stdout=subprocess.PIPE, stderr=None)
stdout, _ = proc.communicate()
if proc.returncode == 0:
@@ -110,8 +110,7 @@ class FontManager:
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
break
else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
+ raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
for stylename in STYLES[style]:
path = self._get_nix_font_path(self.font_name, stylename)
@@ -142,8 +141,7 @@ class FontManager:
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
break
else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
+ raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
for stylename in STYLES[style]:
path = self._get_mac_font_path(font_map, self.font_name, stylename)
@@ -160,15 +158,14 @@ class FontManager:
for suffix in ('', ' (TrueType)'):
for style in styles:
try:
- valname = '%s%s%s' % (basename, style and ' '+style, suffix)
+ valname = '{}{}{}'.format(basename, style and ' '+style, suffix)
val, _ = _winreg.QueryValueEx(key, valname)
return val
except OSError:
continue
else:
if fail:
- raise FontNotFound('Font %s (%s) not found in registry' %
- (basename, styles[0]))
+ raise FontNotFound(f'Font {basename} ({styles[0]}) not found in registry')
return None
def _create_win(self):
@@ -633,7 +630,11 @@ class ImageFormatter(Formatter):
fill=self.hl_color)
for pos, value, font, text_fg, text_bg in self.drawables:
if text_bg:
- text_size = draw.textsize(text=value, font=font)
+ # see deprecations https://pillow.readthedocs.io/en/stable/releasenotes/9.2.0.html#font-size-and-offset-methods
+ if hasattr(draw, 'textsize'):
+ text_size = draw.textsize(text=value, font=font)
+ else:
+ text_size = font.getbbox(value)[2:]
draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
draw.text(pos, value, font=font, fill=text_fg)
im.save(outfile, self.image_format.upper())
diff --git a/contrib/python/Pygments/py3/pygments/formatters/irc.py b/contrib/python/Pygments/py3/pygments/formatters/irc.py
index 334aeef492..a1e3979beb 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/irc.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/irc.py
@@ -4,7 +4,7 @@
Formatter for IRC output
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/formatters/latex.py b/contrib/python/Pygments/py3/pygments/formatters/latex.py
index b130bfaf69..2f7971e8d3 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/latex.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/latex.py
@@ -4,7 +4,7 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,21 +23,21 @@ def escape_tex(text, commandprefix):
return text.replace('\\', '\x00'). \
replace('{', '\x01'). \
replace('}', '\x02'). \
- replace('\x00', r'\%sZbs{}' % commandprefix). \
- replace('\x01', r'\%sZob{}' % commandprefix). \
- replace('\x02', r'\%sZcb{}' % commandprefix). \
- replace('^', r'\%sZca{}' % commandprefix). \
- replace('_', r'\%sZus{}' % commandprefix). \
- replace('&', r'\%sZam{}' % commandprefix). \
- replace('<', r'\%sZlt{}' % commandprefix). \
- replace('>', r'\%sZgt{}' % commandprefix). \
- replace('#', r'\%sZsh{}' % commandprefix). \
- replace('%', r'\%sZpc{}' % commandprefix). \
- replace('$', r'\%sZdl{}' % commandprefix). \
- replace('-', r'\%sZhy{}' % commandprefix). \
- replace("'", r'\%sZsq{}' % commandprefix). \
- replace('"', r'\%sZdq{}' % commandprefix). \
- replace('~', r'\%sZti{}' % commandprefix)
+ replace('\x00', rf'\{commandprefix}Zbs{{}}'). \
+ replace('\x01', rf'\{commandprefix}Zob{{}}'). \
+ replace('\x02', rf'\{commandprefix}Zcb{{}}'). \
+ replace('^', rf'\{commandprefix}Zca{{}}'). \
+ replace('_', rf'\{commandprefix}Zus{{}}'). \
+ replace('&', rf'\{commandprefix}Zam{{}}'). \
+ replace('<', rf'\{commandprefix}Zlt{{}}'). \
+ replace('>', rf'\{commandprefix}Zgt{{}}'). \
+ replace('#', rf'\{commandprefix}Zsh{{}}'). \
+ replace('%', rf'\{commandprefix}Zpc{{}}'). \
+ replace('$', rf'\{commandprefix}Zdl{{}}'). \
+ replace('-', rf'\{commandprefix}Zhy{{}}'). \
+ replace("'", rf'\{commandprefix}Zsq{{}}'). \
+ replace('"', rf'\{commandprefix}Zdq{{}}'). \
+ replace('~', rf'\{commandprefix}Zti{{}}')
DOC_TEMPLATE = r'''
@@ -304,17 +304,14 @@ class LatexFormatter(Formatter):
if ndef['mono']:
cmndef += r'\let\$$@ff=\textsf'
if ndef['color']:
- cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
- rgbcolor(ndef['color']))
+ cmndef += (r'\def\$$@tc##1{{\textcolor[rgb]{{{}}}{{##1}}}}'.format(rgbcolor(ndef['color'])))
if ndef['border']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
- r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
- (rgbcolor(ndef['border']),
+ cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{\string -\fboxrule}}'
+ r'\fcolorbox[rgb]{{{}}}{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['border']),
rgbcolor(ndef['bgcolor'])))
elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
- r'\colorbox[rgb]{%s}{\strut ##1}}}' %
- rgbcolor(ndef['bgcolor']))
+ cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{0pt}}'
+ r'\colorbox[rgb]{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['bgcolor'])))
if cmndef == '':
continue
cmndef = cmndef.replace('$$', cp)
@@ -329,7 +326,7 @@ class LatexFormatter(Formatter):
cp = self.commandprefix
styles = []
for name, definition in self.cmd2def.items():
- styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
+ styles.append(rf'\@namedef{{{cp}@tok@{name}}}{{{definition}}}')
return STYLE_TEMPLATE % {'cp': self.commandprefix,
'styles': '\n'.join(styles)}
@@ -410,10 +407,10 @@ class LatexFormatter(Formatter):
spl = value.split('\n')
for line in spl[:-1]:
if line:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
+ outfile.write(f"\\{cp}{{{styleval}}}{{{line}}}")
outfile.write('\n')
if spl[-1]:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
+ outfile.write(f"\\{cp}{{{styleval}}}{{{spl[-1]}}}")
else:
outfile.write(value)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/other.py b/contrib/python/Pygments/py3/pygments/formatters/other.py
index 8004764371..56e8f033ce 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/other.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/other.py
@@ -4,7 +4,7 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -74,8 +74,7 @@ class RawTokenFormatter(Formatter):
try:
colorize(self.error_color, '')
except KeyError:
- raise ValueError("Invalid color %r specified" %
- self.error_color)
+ raise ValueError(f"Invalid color {self.error_color!r} specified")
def format(self, tokensource, outfile):
try:
@@ -147,7 +146,7 @@ class TestcaseFormatter(Formatter):
outbuf = []
for ttype, value in tokensource:
rawbuf.append(value)
- outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
+ outbuf.append(f'{indentation}({ttype}, {value!r}),\n')
before = TESTCASE_BEFORE % (''.join(rawbuf),)
during = ''.join(outbuf)
diff --git a/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py b/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
index 50872fe247..550dabb41b 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/pangomarkup.py
@@ -4,7 +4,7 @@
Formatter for Pango markup output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -45,7 +45,7 @@ class PangoMarkupFormatter(Formatter):
start = ''
end = ''
if style['color']:
- start += '<span fgcolor="#%s">' % style['color']
+ start += '<span fgcolor="#{}">'.format(style['color'])
end = '</span>' + end
if style['bold']:
start += '<b>'
diff --git a/contrib/python/Pygments/py3/pygments/formatters/rtf.py b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
index d3a83fa603..7f8b7e4cd7 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/rtf.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/rtf.py
@@ -4,12 +4,14 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from collections import OrderedDict
from pygments.formatter import Formatter
-from pygments.util import get_int_opt, surrogatepair
+from pygments.style import _ansimap
+from pygments.util import get_bool_opt, get_int_opt, get_list_opt, surrogatepair
__all__ = ['RtfFormatter']
@@ -42,6 +44,59 @@ class RtfFormatter(Formatter):
default is 24 half-points, giving a size 12 font.
.. versionadded:: 2.0
+
+ `linenos`
+ Turn on line numbering (default: ``False``).
+
+ .. versionadded:: 2.18
+
+ `lineno_fontsize`
+ Font size for line numbers. Size is specified in half points
+ (default: `fontsize`).
+
+ .. versionadded:: 2.18
+
+ `lineno_padding`
+ Number of spaces between the (inline) line numbers and the
+ source code (default: ``2``).
+
+ .. versionadded:: 2.18
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ .. versionadded:: 2.18
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ .. versionadded:: 2.18
+
+ `lineno_color`
+ Color for line numbers specified as a hex triplet, e.g. ``'5e5e5e'``.
+ Defaults to the style's line number color if it is a hex triplet,
+ otherwise ansi bright black.
+
+ .. versionadded:: 2.18
+
+ `hl_lines`
+ Specify a list of lines to be highlighted, as line numbers separated by
+ spaces, e.g. ``'3 7 8'``. The line numbers are relative to the input
+ (i.e. the first line is line 1) unless `hl_linenostart` is set.
+
+ .. versionadded:: 2.18
+
+ `hl_color`
+ Color for highlighting the lines specified in `hl_lines`, specified as
+ a hex triplet (default: style's `highlight_color`).
+
+ .. versionadded:: 2.18
+
+ `hl_linenostart`
+ If set to ``True`` line numbers in `hl_lines` are specified
+ relative to `linenostart` (default ``False``).
+
+ .. versionadded:: 2.18
"""
name = 'RTF'
aliases = ['rtf']
@@ -62,6 +117,40 @@ class RtfFormatter(Formatter):
Formatter.__init__(self, **options)
self.fontface = options.get('fontface') or ''
self.fontsize = get_int_opt(options, 'fontsize', 0)
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.lineno_fontsize = get_int_opt(options, 'lineno_fontsize',
+ self.fontsize)
+ self.lineno_padding = get_int_opt(options, 'lineno_padding', 2)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.hl_linenostart = get_bool_opt(options, 'hl_linenostart', False)
+
+ self.hl_color = options.get('hl_color', '')
+ if not self.hl_color:
+ self.hl_color = self.style.highlight_color
+
+ self.hl_lines = []
+ for lineno in get_list_opt(options, 'hl_lines', []):
+ try:
+ lineno = int(lineno)
+ if self.hl_linenostart:
+ lineno = lineno - self.linenostart + 1
+ self.hl_lines.append(lineno)
+ except ValueError:
+ pass
+
+ self.lineno_color = options.get('lineno_color', '')
+ if not self.lineno_color:
+ if self.style.line_number_color == 'inherit':
+ # style color is the css value 'inherit'
+ # default to ansi bright-black
+ self.lineno_color = _ansimap['ansibrightblack']
+ else:
+ # style color is assumed to be a hex triplet as other
+ # colors in pygments/style.py
+ self.lineno_color = self.style.line_number_color
+
+ self.color_mapping = self._create_color_mapping()
def _escape(self, text):
return text.replace('\\', '\\\\') \
@@ -90,43 +179,145 @@ class RtfFormatter(Formatter):
# Force surrogate pairs
buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
- return ''.join(buf).replace('\n', '\\par\n')
+ return ''.join(buf).replace('\n', '\\par')
- def format_unencoded(self, tokensource, outfile):
- # rtf 1.8 header
- outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
- '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- '{\\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
-
- # convert colors and save them in a mapping to access them later.
- color_mapping = {}
+ @staticmethod
+ def hex_to_rtf_color(hex_color):
+ if hex_color[0] == "#":
+ hex_color = hex_color[1:]
+
+ return '\\red%d\\green%d\\blue%d;' % (
+ int(hex_color[0:2], 16),
+ int(hex_color[2:4], 16),
+ int(hex_color[4:6], 16)
+ )
+
+ def _split_tokens_on_newlines(self, tokensource):
+ """
+ Split tokens containing newline characters into multiple token
+ each representing a line of the input file. Needed for numbering
+ lines of e.g. multiline comments.
+ """
+ for ttype, value in tokensource:
+ if value == '\n':
+ yield (ttype, value)
+ elif "\n" in value:
+ lines = value.split("\n")
+ for line in lines[:-1]:
+ yield (ttype, line+"\n")
+ if lines[-1]:
+ yield (ttype, lines[-1])
+ else:
+ yield (ttype, value)
+
+ def _create_color_mapping(self):
+ """
+ Create a mapping of style hex colors to index/offset in
+ the RTF color table.
+ """
+ color_mapping = OrderedDict()
offset = 1
+
+ if self.linenos:
+ color_mapping[self.lineno_color] = offset
+ offset += 1
+
+ if self.hl_lines:
+ color_mapping[self.hl_color] = offset
+ offset += 1
+
for _, style in self.style:
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
- outfile.write('\\red%d\\green%d\\blue%d;' % (
- int(color[0:2], 16),
- int(color[2:4], 16),
- int(color[4:6], 16)
- ))
offset += 1
- outfile.write('}\\f0 ')
+
+ return color_mapping
+
+ @property
+ def _lineno_template(self):
+ if self.lineno_fontsize != self.fontsize:
+ return '{{\\fs{} \\cf{} %s{}}}'.format(self.lineno_fontsize,
+ self.color_mapping[self.lineno_color],
+ " " * self.lineno_padding)
+
+ return '{{\\cf{} %s{}}}'.format(self.color_mapping[self.lineno_color],
+ " " * self.lineno_padding)
+
+ @property
+ def _hl_open_str(self):
+ return rf'{{\highlight{self.color_mapping[self.hl_color]} '
+
+ @property
+ def _rtf_header(self):
+ lines = []
+ # rtf 1.8 header
+ lines.append('{\\rtf1\\ansi\\uc0\\deff0'
+ '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ % (self.fontface and ' '
+ + self._escape(self.fontface) or ''))
+
+ # color table
+ lines.append('{\\colortbl;')
+ for color, _ in self.color_mapping.items():
+ lines.append(self.hex_to_rtf_color(color))
+ lines.append('}')
+
+ # font and fontsize
+ lines.append('\\f0\\sa0')
if self.fontsize:
- outfile.write('\\fs%d' % self.fontsize)
+ lines.append('\\fs%d' % self.fontsize)
+
+ # ensure Libre Office Writer imports and renders consecutive
+ # space characters the same width, needed for line numbering.
+ # https://bugs.documentfoundation.org/show_bug.cgi?id=144050
+ lines.append('\\dntblnsbdb')
+
+ return lines
+
+ def format_unencoded(self, tokensource, outfile):
+ for line in self._rtf_header:
+ outfile.write(line + "\n")
+
+ tokensource = self._split_tokens_on_newlines(tokensource)
+
+ # first pass of tokens to count lines, needed for line numbering
+ if self.linenos:
+ line_count = 0
+ tokens = [] # for copying the token source generator
+ for ttype, value in tokensource:
+ tokens.append((ttype, value))
+ if value.endswith("\n"):
+ line_count += 1
+
+ # width of line number strings (for padding with spaces)
+ linenos_width = len(str(line_count+self.linenostart-1))
+
+ tokensource = tokens
# highlight stream
+ lineno = 1
+ start_new_line = True
for ttype, value in tokensource:
+ if start_new_line and lineno in self.hl_lines:
+ outfile.write(self._hl_open_str)
+
+ if start_new_line and self.linenos:
+ if (lineno-self.linenostart+1)%self.linenostep == 0:
+ current_lineno = lineno + self.linenostart - 1
+ lineno_str = str(current_lineno).rjust(linenos_width)
+ else:
+ lineno_str = "".rjust(linenos_width)
+ outfile.write(self._lineno_template % lineno_str)
+
while not self.style.styles_token(ttype) and ttype.parent:
ttype = ttype.parent
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
- buf.append('\\cb%d' % color_mapping[style['bgcolor']])
+ buf.append('\\cb%d' % self.color_mapping[style['bgcolor']])
if style['color']:
- buf.append('\\cf%d' % color_mapping[style['color']])
+ buf.append('\\cf%d' % self.color_mapping[style['color']])
if style['bold']:
buf.append('\\b')
if style['italic']:
@@ -135,12 +326,24 @@ class RtfFormatter(Formatter):
buf.append('\\ul')
if style['border']:
buf.append('\\chbrdr\\chcfpat%d' %
- color_mapping[style['border']])
+ self.color_mapping[style['border']])
start = ''.join(buf)
if start:
- outfile.write('{%s ' % start)
+ outfile.write(f'{{{start} ')
outfile.write(self._escape_text(value))
if start:
outfile.write('}')
+ start_new_line = False
+
+ # complete line of input
+ if value.endswith("\n"):
+ # close line highlighting
+ if lineno in self.hl_lines:
+ outfile.write('}')
+ # newline in RTF file after closing }
+ outfile.write("\n")
+
+ start_new_line = True
+ lineno += 1
- outfile.write('}')
+ outfile.write('}\n')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/svg.py b/contrib/python/Pygments/py3/pygments/formatters/svg.py
index e3cd26955a..74019b6961 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/svg.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/svg.py
@@ -4,7 +4,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,11 +60,11 @@ class SvgFormatter(Formatter):
`linenostep`
If set to a number n > 1, only every nth line number is printed.
-
+
`linenowidth`
Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
- for up to 4-digit line numbers. Increase width for longer code blocks).
-
+ for up to 4-digit line numbers. Increase width for longer code blocks).
+
`xoffset`
Starting offset in X direction, defaults to ``0``.
@@ -97,10 +97,11 @@ class SvgFormatter(Formatter):
self.fontsize = options.get('fontsize', '14px')
self.xoffset = get_int_opt(options, 'xoffset', 0)
fs = self.fontsize.strip()
- if fs.endswith('px'): fs = fs[:-2].strip()
+ if fs.endswith('px'):
+ fs = fs[:-2].strip()
try:
int_fs = int(fs)
- except:
+ except ValueError:
int_fs = 20
self.yoffset = get_int_opt(options, 'yoffset', int_fs)
self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
@@ -122,30 +123,27 @@ class SvgFormatter(Formatter):
y = self.yoffset
if not self.nowrap:
if self.encoding:
- outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
- self.encoding)
+ outfile.write(f'<?xml version="1.0" encoding="{self.encoding}"?>\n')
else:
outfile.write('<?xml version="1.0"?>\n')
outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
'"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
'svg10.dtd">\n')
outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write('<g font-family="%s" font-size="%s">\n' %
- (self.fontfamily, self.fontsize))
-
- counter = self.linenostart
+ outfile.write(f'<g font-family="{self.fontfamily}" font-size="{self.fontsize}">\n')
+
+ counter = self.linenostart
counter_step = self.linenostep
counter_style = self._get_style(Comment)
line_x = x
-
+
if self.linenos:
if counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
+ outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" {counter_style} text-anchor="end">{counter}</text>')
line_x += self.linenowidth + self.ystep
counter += 1
- outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
+ outfile.write(f'<text x="{line_x}" y="{y}" xml:space="preserve">')
for ttype, value in tokensource:
style = self._get_style(ttype)
tspan = style and '<tspan' + style + '>' or ''
@@ -159,11 +157,10 @@ class SvgFormatter(Formatter):
y += self.ystep
outfile.write('</text>\n')
if self.linenos and counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
-
+ outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" text-anchor="end" {counter_style}>{counter}</text>')
+
counter += 1
- outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
+ outfile.write(f'<text x="{line_x}" y="{y}" ' 'xml:space="preserve">')
outfile.write(tspan + parts[-1] + tspanend)
outfile.write('</text>')
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal.py b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
index 636f3503df..5c38805171 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal.py
@@ -4,7 +4,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
index dba5b63e21..6402d8c469 100644
--- a/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
+++ b/contrib/python/Pygments/py3/pygments/formatters/terminal256.py
@@ -10,7 +10,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexer.py b/contrib/python/Pygments/py3/pygments/lexer.py
index eb5403e798..3061964a7a 100644
--- a/contrib/python/Pygments/py3/pygments/lexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexer.py
@@ -4,7 +4,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -67,10 +67,12 @@ class Lexer(metaclass=LexerMeta):
:no-value:
.. autoattribute:: priority
- Lexers included in Pygments should have an additional attribute:
+ Lexers included in Pygments should have two additional attributes:
.. autoattribute:: url
:no-value:
+ .. autoattribute:: version_added
+ :no-value:
Lexers included in Pygments may have additional attributes:
@@ -130,9 +132,12 @@ class Lexer(metaclass=LexerMeta):
priority = 0
#: URL of the language specification/definition. Used in the Pygments
- #: documentation.
+ #: documentation. Set to an empty string to disable.
url = None
+ #: Version of Pygments in which the lexer was added.
+ version_added = None
+
#: Example file name. Relative to the ``tests/examplefiles`` directory.
#: This is used by the documentation generator to show an example.
_example = None
@@ -169,10 +174,9 @@ class Lexer(metaclass=LexerMeta):
def __repr__(self):
if self.options:
- return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
- self.options)
+ return f'<pygments.lexers.{self.__class__.__name__} with {self.options!r}>'
else:
- return '<pygments.lexers.%s>' % self.__class__.__name__
+ return f'<pygments.lexers.{self.__class__.__name__}>'
def add_filter(self, filter_, **options):
"""
@@ -506,7 +510,7 @@ class RegexLexerMeta(LexerMeta):
def _process_token(cls, token):
"""Preprocess the token component of a token definition."""
assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
+ f'token type must be simple type or callable, not {token!r}'
return token
def _process_new_state(cls, new_state, unprocessed, processed):
@@ -522,14 +526,14 @@ class RegexLexerMeta(LexerMeta):
elif new_state[:5] == '#pop:':
return -int(new_state[5:])
else:
- assert False, 'unknown new state %r' % new_state
+ assert False, f'unknown new state {new_state!r}'
elif isinstance(new_state, combined):
# combine a new state from existing ones
tmp_state = '_tmp_%d' % cls._tmpname
cls._tmpname += 1
itokens = []
for istate in new_state:
- assert istate != new_state, 'circular state ref %r' % istate
+ assert istate != new_state, f'circular state ref {istate!r}'
itokens.extend(cls._process_state(unprocessed,
processed, istate))
processed[tmp_state] = itokens
@@ -542,12 +546,12 @@ class RegexLexerMeta(LexerMeta):
'unknown new state ' + istate
return new_state
else:
- assert False, 'unknown new state def %r' % new_state
+ assert False, f'unknown new state def {new_state!r}'
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
- assert type(state) is str, "wrong state name %r" % state
- assert state[0] != '#', "invalid state name %r" % state
+ assert isinstance(state, str), f"wrong state name {state!r}"
+ assert state[0] != '#', f"invalid state name {state!r}"
if state in processed:
return processed[state]
tokens = processed[state] = []
@@ -555,7 +559,7 @@ class RegexLexerMeta(LexerMeta):
for tdef in unprocessed[state]:
if isinstance(tdef, include):
# it's a state reference
- assert tdef != state, "circular state reference %r" % state
+ assert tdef != state, f"circular state reference {state!r}"
tokens.extend(cls._process_state(unprocessed, processed,
str(tdef)))
continue
@@ -569,13 +573,12 @@ class RegexLexerMeta(LexerMeta):
tokens.append((re.compile('').match, None, new_state))
continue
- assert type(tdef) is tuple, "wrong rule def %r" % tdef
+ assert type(tdef) is tuple, f"wrong rule def {tdef!r}"
try:
rex = cls._process_regex(tdef[0], rflags, state)
except Exception as err:
- raise ValueError("uncompilable regex %r in state %r of %r: %s" %
- (tdef[0], state, cls, err)) from err
+ raise ValueError(f"uncompilable regex {tdef[0]!r} in state {state!r} of {cls!r}: {err}") from err
token = cls._process_token(tdef[1])
@@ -736,7 +739,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
elif new_state == '#push':
statestack.append(statestack[-1])
else:
- assert False, "wrong state def: %r" % new_state
+ assert False, f"wrong state def: {new_state!r}"
statetokens = tokendefs[statestack[-1]]
break
else:
@@ -768,8 +771,7 @@ class LexerContext:
self.stack = stack or ['root']
def __repr__(self):
- return 'LexerContext(%r, %r, %r)' % (
- self.text, self.pos, self.stack)
+ return f'LexerContext({self.text!r}, {self.pos!r}, {self.stack!r})'
class ExtendedRegexLexer(RegexLexer):
@@ -824,7 +826,7 @@ class ExtendedRegexLexer(RegexLexer):
elif new_state == '#push':
ctx.stack.append(ctx.stack[-1])
else:
- assert False, "wrong state def: %r" % new_state
+ assert False, f"wrong state def: {new_state!r}"
statetokens = tokendefs[ctx.stack[-1]]
break
else:
diff --git a/contrib/python/Pygments/py3/pygments/lexers/__init__.py b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
index 5701be7b6c..a2eaf71f22 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/__init__.py
@@ -4,7 +4,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -93,7 +93,7 @@ def find_lexer_class_by_name(_alias):
.. versionadded:: 2.2
"""
if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.values():
if _alias.lower() in aliases:
@@ -104,7 +104,7 @@ def find_lexer_class_by_name(_alias):
for cls in find_plugin_lexers():
if _alias.lower() in cls.aliases:
return cls
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
def get_lexer_by_name(_alias, **options):
@@ -117,7 +117,7 @@ def get_lexer_by_name(_alias, **options):
found.
"""
if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.values():
@@ -129,7 +129,7 @@ def get_lexer_by_name(_alias, **options):
for cls in find_plugin_lexers():
if _alias.lower() in cls.aliases:
return cls(**options)
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
@@ -154,17 +154,16 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
exec(f.read(), custom_namespace)
# Retrieve the class `lexername` from that namespace
if lexername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (lexername, filename))
+ raise ClassNotFound(f'no valid {lexername} class found in {filename}')
lexer_class = custom_namespace[lexername]
# And finally instantiate it with the options
return lexer_class(**options)
except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ raise ClassNotFound(f'cannot read {filename}: {err}')
except ClassNotFound:
raise
except Exception as err:
- raise ClassNotFound('error when loading custom lexer: %s' % err)
+ raise ClassNotFound(f'error when loading custom lexer: {err}')
def find_lexer_class_for_filename(_fn, code=None):
@@ -225,7 +224,7 @@ def get_lexer_for_filename(_fn, code=None, **options):
"""
res = find_lexer_class_for_filename(_fn, code)
if not res:
- raise ClassNotFound('no lexer for filename %r found' % _fn)
+ raise ClassNotFound(f'no lexer for filename {_fn!r} found')
return res(**options)
@@ -245,7 +244,7 @@ def get_lexer_for_mimetype(_mime, **options):
for cls in find_plugin_lexers():
if _mime in cls.mimetypes:
return cls(**options)
- raise ClassNotFound('no lexer for mimetype %r found' % _mime)
+ raise ClassNotFound(f'no lexer for mimetype {_mime!r} found')
def _iter_lexerclasses(plugins=True):
@@ -280,7 +279,7 @@ def guess_lexer_for_filename(_fn, _text, **options):
matching_lexers.add(lexer)
primary[lexer] = False
if not matching_lexers:
- raise ClassNotFound('no lexer for filename %r found' % fn)
+ raise ClassNotFound(f'no lexer for filename {fn!r} found')
if len(matching_lexers) == 1:
return matching_lexers.pop()(**options)
result = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_ada_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_ada_builtins.py
index 24f9b197b0..6b6a7c556f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_ada_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_ada_builtins.py
@@ -4,7 +4,7 @@
Ada builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
index c4a362ca6d..be73baf2d0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_asy_builtins.py
@@ -9,7 +9,7 @@
TODO: perl/python script in Asymptote SVN similar to asy-list.pl but only
for function and variable names.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
index beb7b4d6f1..1e02451caa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cl_builtins.py
@@ -4,7 +4,7 @@
ANSI Common Lisp builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
index 4b7dc1ad40..f45b85a287 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_cocoa_builtins.py
@@ -7,7 +7,7 @@
File may be also used as standalone generator for above.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -41,7 +41,7 @@ if __name__ == '__main__': # pragma: no cover
with open(headerFilePath, encoding='utf-8') as f:
content = f.read()
except UnicodeDecodeError:
- print("Decoding error for file: {0}".format(headerFilePath))
+ print(f"Decoding error for file: {headerFilePath}")
continue
res = re.findall(r'(?<=@interface )\w+', content)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
index 5f37306843..e1ea1a015c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_csound_builtins.py
@@ -2,7 +2,7 @@
pygments.lexers._csound_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_css_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_css_builtins.py
index fff992425c..60fa564b4a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_css_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_css_builtins.py
@@ -4,7 +4,7 @@
This file is autogenerated by scripts/get_css_properties.py
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
index 5d3c96fb8f..c73a2e0ec8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_julia_builtins.py
@@ -4,7 +4,7 @@
Julia builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
index 1fbe681931..f4d417e67b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lasso_builtins.py
@@ -4,7 +4,7 @@
Built-in Lasso types, traits, methods, and members.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lilypond_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lilypond_builtins.py
index 013c85ca32..948c7625ad 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lilypond_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lilypond_builtins.py
@@ -4,7 +4,7 @@
LilyPond builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
index 8170d5e825..b46fe4f021 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_lua_builtins.py
@@ -10,7 +10,7 @@
Run with `python -I` to regenerate.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -230,7 +230,7 @@ if __name__ == '__main__': # pragma: no cover
return m.groups()[0]
def get_lua_functions(version):
- f = urlopen('http://www.lua.org/manual/%s/' % version)
+ f = urlopen(f'http://www.lua.org/manual/{version}/')
r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
functions = []
for line in f:
@@ -258,14 +258,14 @@ if __name__ == '__main__': # pragma: no cover
with open(filename, 'w', encoding='utf-8') as fp:
fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(f'MODULES = {pprint.pformat(modules)}\n\n')
fp.write(footer)
def run():
version = get_newest_version()
functions = set()
for v in ('5.2', version):
- print('> Downloading function index for Lua %s' % v)
+ print(f'> Downloading function index for Lua {v}')
f = get_lua_functions(v)
print('> %d functions found, %d new:' %
(len(f), len(set(f) - functions)))
@@ -275,7 +275,7 @@ if __name__ == '__main__': # pragma: no cover
modules = {}
for full_function_name in functions:
- print('>> %s' % full_function_name)
+ print(f'>> {full_function_name}')
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
modules = {k: tuple(v) for k, v in modules.items()}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_luau_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_luau_builtins.py
new file mode 100644
index 0000000000..3b7dd15201
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/lexers/_luau_builtins.py
@@ -0,0 +1,62 @@
+"""
+ pygments.lexers._luau_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Includes the builtins for Luau and Roblox.
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+LUAU_BUILTINS = {
+ 'bit32',
+ 'buffer',
+ 'coroutine',
+ 'debug',
+ 'math',
+ 'os',
+ 'string',
+ 'table',
+ 'utf8',
+}
+
+ROBLOX_BUILTINS = {
+ 'task',
+
+ 'Axes',
+ 'BrickColor',
+ 'CatalogSearchParams',
+ 'CFrame',
+ 'Color3',
+ 'ColorSequence',
+ 'ColorSequenceKeypoint',
+ 'DateTime',
+ 'DockWidgetPluginGuiInfo',
+ 'Faces',
+ 'FloatCurveKey',
+ 'Font',
+ 'Instance',
+ 'NumberRange',
+ 'NumberSequence',
+ 'NumberSequenceKeypoint',
+ 'OverlapParams',
+ 'PathWaypoint',
+ 'PhysicalProperties',
+ 'Random',
+ 'Ray',
+ 'RaycastParams',
+ 'RaycastResult',
+ 'RBXScriptConnection',
+ 'RBXScriptSignal',
+ 'Rect',
+ 'Region3',
+ 'Region3int16',
+ 'SharedTable',
+ 'TweenInfo',
+ 'UDim',
+ 'UDim2',
+ 'Vector2',
+ 'Vector2int16',
+ 'Vector3',
+ 'Vector3int16',
+} \ No newline at end of file
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
index aaec80232a..0a62565d50 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mapping.py
@@ -46,7 +46,7 @@ LEXERS = {
'BSTLexer': ('pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BareLexer': ('pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BddLexer': ('pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
@@ -128,7 +128,7 @@ LEXERS = {
'DaxLexer': ('pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()),
'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
- 'DesktopLexer': ('pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ()),
+ 'DesktopLexer': ('pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ('application/x-desktop',)),
'DevicetreeLexer': ('pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
@@ -216,8 +216,8 @@ LEXERS = {
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang', 'hy'), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris',), ('*.hyb',), ('text/x-hybris', 'application/x-hybris')),
'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
'IconLexer': ('pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
@@ -234,6 +234,7 @@ LEXERS = {
'JMESPathLexer': ('pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()),
'JSLTLexer': ('pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'JanetLexer': ('pygments.lexers.lisp', 'Janet', ('janet',), ('*.janet', '*.jdn'), ('text/x-janet', 'application/x-janet')),
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), ('*.js.j2', '*.js.jinja2'), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
@@ -271,6 +272,7 @@ LEXERS = {
'LdaprcLexer': ('pygments.lexers.ldap', 'LDAP configuration file', ('ldapconf', 'ldaprc'), ('.ldaprc', 'ldaprc', 'ldap.conf'), ('text/x-ldapconf',)),
'LdifLexer': ('pygments.lexers.ldap', 'LDIF', ('ldif',), ('*.ldif',), ('text/x-ldif',)),
'Lean3Lexer': ('pygments.lexers.lean', 'Lean', ('lean', 'lean3'), ('*.lean',), ('text/x-lean', 'text/x-lean3')),
+ 'Lean4Lexer': ('pygments.lexers.lean', 'Lean4', ('lean4',), ('*.lean',), ('text/x-lean4',)),
'LessCssLexer': ('pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
'LilyPondLexer': ('pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
@@ -287,6 +289,7 @@ LEXERS = {
'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'LuauLexer': ('pygments.lexers.scripting', 'Luau', ('luau',), ('*.luau',), ()),
'MCFunctionLexer': ('pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
'MCSchemaLexer': ('pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)),
'MIMELexer': ('pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
@@ -314,6 +317,7 @@ LEXERS = {
'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MojoLexer': ('pygments.lexers.mojo', 'Mojo', ('mojo', '🔥'), ('*.mojo', '*.🔥'), ('text/x-mojo', 'application/x-mojo')),
'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
@@ -362,6 +366,7 @@ LEXERS = {
'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'OpenScadLexer': ('pygments.lexers.openscad', 'OpenSCAD', ('openscad',), ('*.scad',), ('application/x-openscad',)),
+ 'OrgLexer': ('pygments.lexers.markup', 'Org Mode', ('org', 'orgmode', 'org-mode'), ('*.org',), ('text/org',)),
'OutputLexer': ('pygments.lexers.special', 'Text output', ('output',), (), ()),
'PacmanConfLexer': ('pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
@@ -390,6 +395,7 @@ LEXERS = {
'ProcfileLexer': ('pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PromQLLexer': ('pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
+ 'PromelaLexer': ('pygments.lexers.c_like', 'Promela', ('promela',), ('*.pml', '*.prom', '*.prm', '*.promela', '*.pr', '*.pm'), ('text/x-promela',)),
'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PrqlLexer': ('pygments.lexers.prql', 'PRQL', ('prql',), ('*.prql',), ('application/prql', 'application/x-prql')),
@@ -400,7 +406,7 @@ LEXERS = {
'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python2Lexer': ('pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon', 'python-console'), (), ('text/x-python-doctest',)),
'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'PythonUL4Lexer': ('pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
@@ -473,6 +479,7 @@ LEXERS = {
'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SolidityLexer': ('pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
+ 'SoongLexer': ('pygments.lexers.soong', 'Soong', ('androidbp', 'bp', 'soong'), ('Android.bp',), ()),
'SophiaLexer': ('pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
@@ -494,6 +501,7 @@ LEXERS = {
'TAPLexer': ('pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'TNTLexer': ('pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
'TOMLLexer': ('pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)),
+ 'TactLexer': ('pygments.lexers.tact', 'Tact', ('tact',), ('*.tact',), ()),
'Tads3Lexer': ('pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TalLexer': ('pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)),
'TasmLexer': ('pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
@@ -523,6 +531,7 @@ LEXERS = {
'TypoScriptCssDataLexer': ('pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
+ 'TypstLexer': ('pygments.lexers.typst', 'Typst', ('typst',), ('*.typ',), ('text/x-typst',)),
'UL4Lexer': ('pygments.lexers.ul4', 'UL4', ('ul4',), ('*.ul4',), ()),
'UcodeLexer': ('pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
'UniconLexer': ('pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
@@ -537,7 +546,7 @@ LEXERS = {
'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+ 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas', 'visual-basic', 'visualbasic'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
'VelocityLexer': ('pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
index 3b89f1db7d..ca5bfe45a0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mql_builtins.py
@@ -4,7 +4,7 @@
Builtins for the MqlLexer.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
types = (
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
index d25ed9c7c0..95bbe17392 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_mysql_builtins.py
@@ -6,7 +6,7 @@
Run with `python -I` to update.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -1321,10 +1321,10 @@ if __name__ == '__main__': # pragma: no cover
data = f.read()
# Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % field_name, re.M | re.S)
+ re_match = re.compile(rf'^{field_name}\s*=\s*\($.*?^\s*\)$', re.M | re.S)
m = re_match.search(data)
if not m:
- raise ValueError('Could not find an existing definition for %s' % field_name)
+ raise ValueError(f'Could not find an existing definition for {field_name}')
new_block = format_lines(field_name, content)
data = data[:m.start()] + new_block + data[m.end():]
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
index 7fdfb41049..c5e9c34741 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_openedge_builtins.py
@@ -4,7 +4,7 @@
Builtin list for the OpenEdgeLexer.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
index 5366e75116..a1f0232a1a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_php_builtins.py
@@ -7,7 +7,7 @@
Run with `python -I` to regenerate.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -3299,7 +3299,7 @@ if __name__ == '__main__': # pragma: no cover
download = urlretrieve(PHP_MANUAL_URL)
with tarfile.open(download[0]) as tar:
tar.extractall()
- yield from glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB))
+ yield from glob.glob(f"{PHP_MANUAL_DIR}{PHP_REFERENCE_GLOB}")
os.remove(download[0])
def regenerate(filename, modules):
@@ -3311,7 +3311,7 @@ if __name__ == '__main__': # pragma: no cover
with open(filename, 'w', encoding='utf-8') as fp:
fp.write(header)
- fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(f'MODULES = {pprint.pformat(modules)}\n\n')
fp.write(footer)
def run():
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
index ecc2a7eef1..f29235598a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_postgres_builtins.py
@@ -6,7 +6,7 @@
Run with `python -I` to update itself.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -683,7 +683,8 @@ if __name__ == '__main__': # pragma: no cover
for t in tmp.split(']') if "(" not in t]:
for t in tmp.split(','):
t = t.strip()
- if not t: continue
+ if not t:
+ continue
dt.add(" ".join(t.split()))
dt = list(dt)
@@ -724,11 +725,10 @@ if __name__ == '__main__': # pragma: no cover
data = f.read()
# Line to start/end inserting
- re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
+ re_match = re.compile(rf'^{constname}\s*=\s*\($.*?^\s*\)$', re.M | re.S)
m = re_match.search(data)
if not m:
- raise ValueError('Could not find existing definition for %s' %
- (constname,))
+ raise ValueError(f'Could not find existing definition for {constname}')
new_block = format_lines(constname, content)
data = data[:m.start()] + new_block + data[m.end():]
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_qlik_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_qlik_builtins.py
index 697c12462b..f3af5230f2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_qlik_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_qlik_builtins.py
@@ -4,7 +4,7 @@
Qlik builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_scheme_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_scheme_builtins.py
index 8f2de9dbab..d93b9fd3bb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_scheme_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_scheme_builtins.py
@@ -4,7 +4,7 @@
Scheme builtins.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
index f2adba858f..c4992938db 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_scilab_builtins.py
@@ -4,7 +4,7 @@
Builtin list for the ScilabLexer.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -3060,10 +3060,10 @@ if __name__ == '__main__': # pragma: no cover
def extract_completion(var_type):
s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = s.communicate('''\
+ output = s.communicate(f'''\
fd = mopen("/dev/stderr", "wt");
-mputl(strcat(completion("", "%s"), "||"), fd);
-mclose(fd)\n''' % var_type)
+mputl(strcat(completion("", "{var_type}"), "||"), fd);
+mclose(fd)\n''')
if '||' not in output[1]:
raise Exception(output[0])
# Invalid DISPLAY causes this to be output:
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
index 02d3e779dd..613ce70844 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_sourcemod_builtins.py
@@ -8,7 +8,7 @@
Run with `python -I` to regenerate.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -1136,13 +1136,13 @@ if __name__ == '__main__': # pragma: no cover
def run():
version = get_version()
- print('> Downloading function index for SourceMod %s' % version)
+ print(f'> Downloading function index for SourceMod {version}')
functions = get_sm_functions()
print('> %d functions found:' % len(functions))
functionlist = []
for full_function_name in functions:
- print('>> %s' % full_function_name)
+ print(f'>> {full_function_name}')
functionlist.append(full_function_name)
regenerate(__file__, functionlist)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
index 23f7fa6032..afe3143706 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_stan_builtins.py
@@ -5,7 +5,7 @@
This file contains the names of functions for Stan used by
``pygments.lexers.math.StanLexer. This is for Stan language version 2.29.0.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
index 16251cff24..c806f50be1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_stata_builtins.py
@@ -4,7 +4,7 @@
Builtins for Stata
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
index 96b16bf4a4..1fc792bb8f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_tsql_builtins.py
@@ -4,7 +4,7 @@
These are manually translated lists from https://msdn.microsoft.com.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
index d891ad03d8..ff83574afc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_usd_builtins.py
@@ -4,7 +4,7 @@
A collection of known USD-related keywords, attributes, and types.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
index e7e96e6908..fba2218e28 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_vbscript_builtins.py
@@ -5,7 +5,7 @@
These are manually translated lists from
http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
index ccc8740f47..e1171ebbcf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/_vim_builtins.py
@@ -4,7 +4,7 @@
This file is autogenerated by scripts/get_vimkw.py
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
index e0e94a52e4..a23a2087e1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/actionscript.py
@@ -4,7 +4,7 @@
Lexers for ActionScript and MXML.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
-
- .. versionadded:: 0.9
"""
name = 'ActionScript'
@@ -29,6 +27,8 @@ class ActionScriptLexer(RegexLexer):
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
+ url = 'https://en.wikipedia.org/wiki/ActionScript'
+ version_added = '0.9'
flags = re.DOTALL
tokens = {
@@ -118,8 +118,6 @@ class ActionScriptLexer(RegexLexer):
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
-
- .. versionadded:: 0.11
"""
name = 'ActionScript 3'
@@ -128,6 +126,7 @@ class ActionScript3Lexer(RegexLexer):
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
+ version_added = '0.11'
identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + r'(?:\.<\w+>)?'
@@ -205,14 +204,13 @@ class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- .. versionadded:: 1.1
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
+ url = 'https://en.wikipedia.org/wiki/MXML'
+ version_added = '1.1'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ada.py b/contrib/python/Pygments/py3/pygments/lexers/ada.py
index 6a5e64406c..ec99c8f3a0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ada.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ada.py
@@ -4,7 +4,7 @@
Lexers for Ada family languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,14 @@ __all__ = ['AdaLexer']
class AdaLexer(RegexLexer):
"""
For Ada source code.
-
- .. versionadded:: 1.3
"""
name = 'Ada'
aliases = ['ada', 'ada95', 'ada2005']
filenames = ['*.adb', '*.ads', '*.ada']
mimetypes = ['text/x-ada']
+ url = 'https://www.adaic.org'
+ version_added = '1.3'
flags = re.MULTILINE | re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/agile.py b/contrib/python/Pygments/py3/pygments/lexers/agile.py
index c0c1a457a4..097beebb6b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/agile.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/agile.py
@@ -4,10 +4,12 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
+
from pygments.lexers.lisp import SchemeLexer
from pygments.lexers.jvm import IokeLexer, ClojureLexer
from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
diff --git a/contrib/python/Pygments/py3/pygments/lexers/algebra.py b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
index 95f17540ef..98e1e66d6c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/algebra.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/algebra.py
@@ -4,7 +4,7 @@
Lexers for computer algebra systems.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,13 +21,12 @@ __all__ = ['GAPLexer', 'GAPConsoleLexer', 'MathematicaLexer', 'MuPADLexer',
class GAPLexer(RegexLexer):
"""
For GAP source code.
-
- .. versionadded:: 2.0
"""
name = 'GAP'
url = 'https://www.gap-system.org'
aliases = ['gap']
filenames = ['*.g', '*.gd', '*.gi', '*.gap']
+ version_added = '2.0'
tokens = {
'root': [
@@ -92,12 +91,12 @@ class GAPLexer(RegexLexer):
class GAPConsoleLexer(Lexer):
"""
For GAP console sessions. Modeled after JuliaConsoleLexer.
-
- .. versionadded:: 2.14
"""
name = 'GAP session'
aliases = ['gap-console', 'gap-repl']
filenames = ['*.tst']
+ url = 'https://www.gap-system.org'
+ version_added = '2.14'
def get_tokens_unprocessed(self, text):
gaplexer = GAPLexer(**self.options)
@@ -149,8 +148,6 @@ class GAPConsoleLexer(Lexer):
class MathematicaLexer(RegexLexer):
"""
Lexer for Mathematica source code.
-
- .. versionadded:: 2.0
"""
name = 'Mathematica'
url = 'http://www.wolfram.com/mathematica/'
@@ -160,6 +157,7 @@ class MathematicaLexer(RegexLexer):
'application/vnd.wolfram.mathematica',
'application/vnd.wolfram.mathematica.package',
'application/vnd.wolfram.cdf']
+ version_added = '2.0'
# http://reference.wolfram.com/mathematica/guide/Syntax.html
operators = (
@@ -171,7 +169,7 @@ class MathematicaLexer(RegexLexer):
punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
+ return '({})'.format('|'.join(re.escape(entry) for entry in entries))
tokens = {
'root': [
@@ -198,13 +196,12 @@ class MuPADLexer(RegexLexer):
"""
A MuPAD lexer.
Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- .. versionadded:: 0.8
"""
name = 'MuPAD'
url = 'http://www.mupad.com'
aliases = ['mupad']
filenames = ['*.mu']
+ version_added = '0.8'
tokens = {
'root': [
@@ -270,13 +267,12 @@ class MuPADLexer(RegexLexer):
class BCLexer(RegexLexer):
"""
A BC lexer.
-
- .. versionadded:: 2.1
"""
name = 'BC'
url = 'https://www.gnu.org/software/bc/'
aliases = ['bc']
filenames = ['*.bc']
+ version_added = '2.1'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ambient.py b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
index deba0f3b0d..4441fd574b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ambient.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ambient.py
@@ -4,7 +4,7 @@
Lexers for AmbientTalk language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ __all__ = ['AmbientTalkLexer']
class AmbientTalkLexer(RegexLexer):
"""
Lexer for AmbientTalk source code.
-
- .. versionadded:: 2.0
"""
name = 'AmbientTalk'
url = 'https://code.google.com/p/ambienttalk'
filenames = ['*.at']
aliases = ['ambienttalk', 'ambienttalk/2', 'at']
mimetypes = ['text/x-ambienttalk']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py b/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
index 860dfd4421..6e4324533f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/amdgpu.py
@@ -4,7 +4,7 @@
Lexers for the AMDGPU ISA assembly.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,12 +19,12 @@ __all__ = ['AMDGPULexer']
class AMDGPULexer(RegexLexer):
"""
For AMD GPU assembly.
-
- .. versionadded:: 2.8
"""
name = 'AMDGPU'
aliases = ['amdgpu']
filenames = ['*.isa']
+ url = 'https://gpuopen.com/amd-isa-documentation'
+ version_added = '2.8'
flags = re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ampl.py b/contrib/python/Pygments/py3/pygments/lexers/ampl.py
index b5abcacfc8..ce3a774672 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ampl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ampl.py
@@ -4,7 +4,7 @@
Lexers for the AMPL language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,12 @@ __all__ = ['AmplLexer']
class AmplLexer(RegexLexer):
"""
For AMPL source code.
-
- .. versionadded:: 2.2
"""
name = 'Ampl'
url = 'http://ampl.com/'
aliases = ['ampl']
filenames = ['*.run']
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py b/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
index a50219c3b2..352288d642 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/apdlexer.py
@@ -4,7 +4,7 @@
Lexers for ANSYS Parametric Design Language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,12 +20,13 @@ __all__ = ['apdlexer']
class apdlexer(RegexLexer):
"""
For APDL source code.
-
- .. versionadded:: 2.9
"""
name = 'ANSYS parametric design language'
aliases = ['ansys', 'apdl']
filenames = ['*.ans']
+ url = 'https://www.ansys.com'
+ version_added = '2.9'
+
flags = re.IGNORECASE
# list of elements
diff --git a/contrib/python/Pygments/py3/pygments/lexers/apl.py b/contrib/python/Pygments/py3/pygments/lexers/apl.py
index 815184da12..d95aff36d2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/apl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/apl.py
@@ -4,7 +4,7 @@
Lexers for APL.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['APLLexer']
class APLLexer(RegexLexer):
"""
A simple APL lexer.
-
- .. versionadded:: 2.0
"""
name = 'APL'
url = 'https://en.m.wikipedia.org/wiki/APL_(programming_language)'
@@ -28,6 +26,7 @@ class APLLexer(RegexLexer):
'*.apl', '*.aplf', '*.aplo', '*.apln',
'*.aplc', '*.apli', '*.dyalog',
]
+ version_added = '2.0'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/archetype.py b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
index e8312d78e5..b019c4f637 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/archetype.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/archetype.py
@@ -2,18 +2,14 @@
pygments.lexers.archetype
~~~~~~~~~~~~~~~~~~~~~~~~~
- Lexer for Archetype-related syntaxes, including:
-
- - ODIN syntax <https://github.com/openEHR/odin>
- - ADL syntax <http://www.openehr.org/releases/trunk/architecture/am/adl2.pdf>
- - cADL sub-syntax of ADL
+ Lexer for Archetype-related syntaxes, including ODIN, ADL and cADL.
For uses of this syntax, see the openEHR archetypes <http://www.openEHR.org/ckm>
Contributed by Thomas Beale <https://github.com/wolandscat>,
<https://bitbucket.org/thomas_beale>.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -139,13 +135,13 @@ class AtomsLexer(RegexLexer):
class OdinLexer(AtomsLexer):
"""
Lexer for ODIN syntax.
-
- .. versionadded:: 2.1
"""
name = 'ODIN'
aliases = ['odin']
filenames = ['*.odin']
mimetypes = ['text/odin']
+ url = 'https://github.com/openEHR/odin'
+ version_added = '2.1'
tokens = {
'path': [
@@ -188,12 +184,12 @@ class OdinLexer(AtomsLexer):
class CadlLexer(AtomsLexer):
"""
Lexer for cADL syntax.
-
- .. versionadded:: 2.1
"""
name = 'cADL'
aliases = ['cadl']
filenames = ['*.cadl']
+ url = 'https://specifications.openehr.org/releases/AM/latest/ADL2.html#_cadl_constraint_adl'
+ version_added = '2.1'
tokens = {
'path': [
@@ -253,13 +249,13 @@ class CadlLexer(AtomsLexer):
class AdlLexer(AtomsLexer):
"""
Lexer for ADL syntax.
-
- .. versionadded:: 2.1
"""
name = 'ADL'
aliases = ['adl']
filenames = ['*.adl', '*.adls', '*.adlf', '*.adlx']
+ url = 'https://specifications.openehr.org/releases/AM/latest/ADL2.html'
+ version_added = '2.1'
tokens = {
'whitespace': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/arrow.py b/contrib/python/Pygments/py3/pygments/lexers/arrow.py
index 894b64d315..8532476305 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/arrow.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/arrow.py
@@ -4,7 +4,7 @@
Lexer for Arrow.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,13 @@ DECL = TYPES + r'(\s+)' + IDENT
class ArrowLexer(RegexLexer):
"""
Lexer for Arrow
-
- .. versionadded:: 2.7
"""
name = 'Arrow'
url = 'https://pypi.org/project/py-arrow-lang/'
aliases = ['arrow']
filenames = ['*.arw']
+ version_added = '2.7'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/arturo.py b/contrib/python/Pygments/py3/pygments/lexers/arturo.py
index 72258248f8..87c97378e7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/arturo.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/arturo.py
@@ -4,7 +4,7 @@
Lexer for the Arturo language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,14 +24,13 @@ class ArturoLexer(RegexLexer):
See `Arturo's Github <https://github.com/arturo-lang/arturo>`_
and `Arturo's Website <https://arturo-lang.io/>`_.
-
- .. versionadded:: 2.14.0
"""
name = 'Arturo'
aliases = ['arturo', 'art']
filenames = ['*.art']
url = 'https://arturo-lang.io/'
+ version_added = '2.14'
def __init__(self, **options):
self.handle_annotateds = get_bool_opt(options, 'handle_annotateds',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asc.py b/contrib/python/Pygments/py3/pygments/lexers/asc.py
index e261f41156..971f2093e2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asc.py
@@ -4,7 +4,7 @@
Lexer for various ASCII armored files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -19,8 +19,6 @@ class AscLexer(RegexLexer):
"""
Lexer for ASCII armored files, containing `-----BEGIN/END ...-----` wrapped
base64 data.
-
- .. versionadded:: 2.10
"""
name = 'ASCII armored'
aliases = ['asc', 'pem']
@@ -32,6 +30,8 @@ class AscLexer(RegexLexer):
]
mimetypes = ['application/pgp-keys', 'application/pgp-encrypted',
'application/pgp-signature', 'application/pem-certificate-chain']
+ url = 'https://www.openpgp.org'
+ version_added = '2.10'
flags = re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asm.py b/contrib/python/Pygments/py3/pygments/lexers/asm.py
index 0035c723f0..a049370bd7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asm.py
@@ -4,7 +4,7 @@
Lexers for assembly languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,6 +31,8 @@ class GasLexer(RegexLexer):
aliases = ['gas', 'asm']
filenames = ['*.s', '*.S']
mimetypes = ['text/x-gas']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = ''
#: optional Comment or Whitespace
string = r'"(\\"|[^"])*"'
@@ -167,6 +169,8 @@ class ObjdumpLexer(RegexLexer):
aliases = ['objdump']
filenames = ['*.objdump']
mimetypes = ['text/x-objdump']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = ''
tokens = _objdump_lexer_tokens(GasLexer)
@@ -179,6 +183,8 @@ class DObjdumpLexer(DelegatingLexer):
aliases = ['d-objdump']
filenames = ['*.d-objdump']
mimetypes = ['text/x-d-objdump']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = ''
def __init__(self, **options):
super().__init__(DLexer, ObjdumpLexer, **options)
@@ -192,6 +198,8 @@ class CppObjdumpLexer(DelegatingLexer):
aliases = ['cpp-objdump', 'c++-objdumb', 'cxx-objdump']
filenames = ['*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump']
mimetypes = ['text/x-cpp-objdump']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = ''
def __init__(self, **options):
super().__init__(CppLexer, ObjdumpLexer, **options)
@@ -205,6 +213,9 @@ class CObjdumpLexer(DelegatingLexer):
aliases = ['c-objdump']
filenames = ['*.c-objdump']
mimetypes = ['text/x-c-objdump']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = ''
+
def __init__(self, **options):
super().__init__(CLexer, ObjdumpLexer, **options)
@@ -213,13 +224,13 @@ class CObjdumpLexer(DelegatingLexer):
class HsailLexer(RegexLexer):
"""
For HSAIL assembly code.
-
- .. versionadded:: 2.2
"""
name = 'HSAIL'
aliases = ['hsail', 'hsa']
filenames = ['*.hsail']
mimetypes = ['text/x-hsail']
+ url = 'https://en.wikipedia.org/wiki/Heterogeneous_System_Architecture#HSA_Intermediate_Layer'
+ version_added = '2.2'
string = r'"[^"]*?"'
identifier = r'[a-zA-Z_][\w.]*'
@@ -354,6 +365,7 @@ class LlvmLexer(RegexLexer):
aliases = ['llvm']
filenames = ['*.ll']
mimetypes = ['text/x-llvm']
+ version_added = ''
#: optional Comment or Whitespace
string = r'"[^"]*?"'
@@ -490,14 +502,13 @@ class LlvmLexer(RegexLexer):
class LlvmMirBodyLexer(RegexLexer):
"""
For LLVM MIR examples without the YAML wrapper.
-
- .. versionadded:: 2.6
"""
name = 'LLVM-MIR Body'
url = 'https://llvm.org/docs/MIRLangRef.html'
aliases = ['llvm-mir-body']
filenames = []
mimetypes = []
+ version_added = '2.6'
tokens = {
'root': [
@@ -635,13 +646,12 @@ class LlvmMirLexer(RegexLexer):
machine specific intermediate representation. It allows LLVM's developers to
see the state of the compilation process at various points, as well as test
individual pieces of the compiler.
-
- .. versionadded:: 2.6
"""
name = 'LLVM-MIR'
url = 'https://llvm.org/docs/MIRLangRef.html'
aliases = ['llvm-mir']
filenames = ['*.mir']
+ version_added = '2.6'
tokens = {
'root': [
@@ -715,6 +725,8 @@ class NasmLexer(RegexLexer):
aliases = ['nasm']
filenames = ['*.asm', '*.ASM', '*.nasm']
mimetypes = ['text/x-nasm']
+ url = 'https://nasm.us'
+ version_added = ''
# Tasm uses the same file endings, but TASM is not as common as NASM, so
# we prioritize NASM higher by default
@@ -746,7 +758,7 @@ class NasmLexer(RegexLexer):
(r'^\s*%', Comment.Preproc, 'preproc'),
include('whitespace'),
(identifier + ':', Name.Label),
- (r'(%s)(\s+)(equ)' % identifier,
+ (rf'({identifier})(\s+)(equ)',
bygroups(Name.Constant, Whitespace, Keyword.Declaration),
'instruction-args'),
(directives, Keyword, 'instruction-args'),
@@ -796,13 +808,13 @@ class NasmLexer(RegexLexer):
class NasmObjdumpLexer(ObjdumpLexer):
"""
For the output of ``objdump -d -M intel``.
-
- .. versionadded:: 2.0
"""
name = 'objdump-nasm'
aliases = ['objdump-nasm']
filenames = ['*.objdump-intel']
mimetypes = ['text/x-nasm-objdump']
+ url = 'https://www.gnu.org/software/binutils'
+ version_added = '2.0'
tokens = _objdump_lexer_tokens(NasmLexer)
@@ -815,6 +827,8 @@ class TasmLexer(RegexLexer):
aliases = ['tasm']
filenames = ['*.asm', '*.ASM', '*.tasm']
mimetypes = ['text/x-tasm']
+ url = 'https://en.wikipedia.org/wiki/Turbo_Assembler'
+ version_added = ''
identifier = r'[@a-z$._?][\w$.?#@~]*'
hexn = r'(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)'
@@ -844,7 +858,7 @@ class TasmLexer(RegexLexer):
include('whitespace'),
(identifier + ':', Name.Label),
(directives, Keyword, 'instruction-args'),
- (r'(%s)(\s+)(%s)' % (identifier, datatype),
+ (rf'({identifier})(\s+)({datatype})',
bygroups(Name.Constant, Whitespace, Keyword.Declaration),
'instruction-args'),
(declkw, Keyword.Declaration, 'instruction-args'),
@@ -896,12 +910,12 @@ class TasmLexer(RegexLexer):
class Ca65Lexer(RegexLexer):
"""
For ca65 assembler sources.
-
- .. versionadded:: 1.6
"""
name = 'ca65 assembler'
aliases = ['ca65']
filenames = ['*.s']
+ url = 'https://cc65.github.io'
+ version_added = '1.6'
flags = re.IGNORECASE
@@ -935,14 +949,13 @@ class Ca65Lexer(RegexLexer):
class Dasm16Lexer(RegexLexer):
"""
For DCPU-16 Assembly.
-
- .. versionadded:: 2.4
"""
name = 'DASM16'
url = 'http://0x10c.com/doc/dcpu-16.txt'
aliases = ['dasm16']
filenames = ['*.dasm16', '*.dasm']
mimetypes = ['text/x-dasm16']
+ version_added = '2.4'
INSTRUCTIONS = [
'SET',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/asn1.py b/contrib/python/Pygments/py3/pygments/lexers/asn1.py
index 30632cb4df..8aa23646d9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/asn1.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/asn1.py
@@ -4,7 +4,7 @@
Pygments lexers for ASN.1.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -118,8 +118,6 @@ class Asn1Lexer(RegexLexer):
"""
Lexer for ASN.1 module definition
-
- .. versionadded:: 2.16
"""
flags = re.MULTILINE
@@ -128,6 +126,7 @@ class Asn1Lexer(RegexLexer):
aliases = ['asn1']
filenames = ["*.asn1"]
url = "https://www.itu.int/ITU-T/studygroups/com17/languages/X.680-0207.pdf"
+ version_added = '2.16'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/automation.py b/contrib/python/Pygments/py3/pygments/lexers/automation.py
index f0f7c5b946..e64aa9ae88 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/automation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/automation.py
@@ -4,7 +4,7 @@
Lexers for automation scripting languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['AutohotkeyLexer', 'AutoItLexer']
class AutohotkeyLexer(RegexLexer):
"""
For autohotkey source code.
-
- .. versionadded:: 1.4
"""
name = 'autohotkey'
url = 'http://www.autohotkey.com/'
aliases = ['autohotkey', 'ahk']
filenames = ['*.ahk', '*.ahkl']
mimetypes = ['text/x-autohotkey']
+ version_added = '1.4'
tokens = {
'root': [
@@ -199,14 +198,13 @@ class AutoItLexer(RegexLexer):
AutoIt is a freeware BASIC-like scripting language
designed for automating the Windows GUI and general scripting
-
- .. versionadded:: 1.6
"""
name = 'AutoIt'
url = 'http://www.autoitscript.com/site/autoit/'
aliases = ['autoit']
filenames = ['*.au3']
mimetypes = ['text/x-autoit']
+ version_added = '1.6'
# Keywords, functions, macros from au3.keywords.properties
# which can be found in AutoIt installed directory, e.g.
@@ -337,15 +335,15 @@ class AutoItLexer(RegexLexer):
include('garbage'),
],
'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
+ (r'(?i)(\s*)({})\b'.format('|'.join(keywords)),
bygroups(Text, Name.Builtin)),
],
'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
+ (r'(?i)({})\b'.format('|'.join(functions)),
Name.Function),
],
'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
+ (r'(?i)({})\b'.format('|'.join(macros)),
Name.Variable.Global),
],
'labels': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bare.py b/contrib/python/Pygments/py3/pygments/lexers/bare.py
index fce5ae0730..9049ea8467 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bare.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bare.py
@@ -4,7 +4,7 @@
Lexer for the BARE schema.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,13 +17,12 @@ __all__ = ['BareLexer']
class BareLexer(RegexLexer):
"""
For BARE schema source.
-
- .. versionadded:: 2.7
"""
name = 'BARE'
url = 'https://baremessages.org'
filenames = ['*.bare']
aliases = ['bare']
+ version_added = '2.7'
keywords = [
'type',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/basic.py b/contrib/python/Pygments/py3/pygments/lexers/basic.py
index 8837dacbae..9cfdf46228 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/basic.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/basic.py
@@ -4,7 +4,7 @@
Lexers for BASIC like languages (other than VB.net).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,8 +23,6 @@ __all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
class BlitzMaxLexer(RegexLexer):
"""
For BlitzMax source code.
-
- .. versionadded:: 1.4
"""
name = 'BlitzMax'
@@ -32,14 +30,14 @@ class BlitzMaxLexer(RegexLexer):
aliases = ['blitzmax', 'bmax']
filenames = ['*.bmx']
mimetypes = ['text/x-bmx']
+ version_added = '1.4'
bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
bmax_sktypes = r'@{1,2}|[!#$%]'
bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
bmax_name = r'[a-z_]\w*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+ bmax_var = (rf'({bmax_name})(?:(?:([ \t]*)({bmax_sktypes})|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
+ rf'|([ \t]*)(:)([ \t]*)(?:{bmax_lktypes}|({bmax_name})))(?:([ \t]*)(Ptr))?)')
bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
flags = re.MULTILINE | re.IGNORECASE
@@ -60,16 +58,14 @@ class BlitzMaxLexer(RegexLexer):
(r'\$[0-9a-f]+', Number.Hex),
(r'\%[10]+', Number.Bin),
# Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
+ (rf'(?:(?:(:)?([ \t]*)(:?{bmax_vopwords}|([+\-*/&|~]))|Or|And|Not|[=<>^]))', Operator),
(r'[(),.:\[\]]', Punctuation),
(r'(?:#[\w \t]*)', Name.Label),
(r'(?:\?[\w \t]*)', Comment.Preproc),
# Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+ (rf'\b(New)\b([ \t]?)([(]?)({bmax_name})',
bygroups(Keyword.Reserved, Whitespace, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
+ (rf'\b(Import|Framework|Module)([ \t]+)({bmax_name}\.{bmax_name})',
bygroups(Keyword.Reserved, Whitespace, Keyword.Namespace)),
(bmax_func, bygroups(Name.Function, Whitespace, Keyword.Type,
Operator, Whitespace, Punctuation, Whitespace,
@@ -78,7 +74,7 @@ class BlitzMaxLexer(RegexLexer):
(bmax_var, bygroups(Name.Variable, Whitespace, Keyword.Type, Operator,
Whitespace, Punctuation, Whitespace, Keyword.Type,
Name.Class, Whitespace, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+ (rf'\b(Type|Extends)([ \t]+)({bmax_name})',
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
# Keywords
(r'\b(Ptr)\b', Keyword.Type),
@@ -102,7 +98,7 @@ class BlitzMaxLexer(RegexLexer):
'RestoreData'), prefix=r'\b', suffix=r'\b'),
Keyword.Reserved),
# Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
+ (rf'({bmax_name})', Name.Variable),
],
'string': [
(r'""', String.Double),
@@ -115,8 +111,6 @@ class BlitzMaxLexer(RegexLexer):
class BlitzBasicLexer(RegexLexer):
"""
For BlitzBasic source code.
-
- .. versionadded:: 2.0
"""
name = 'BlitzBasic'
@@ -124,11 +118,11 @@ class BlitzBasicLexer(RegexLexer):
aliases = ['blitzbasic', 'b3d', 'bplus']
filenames = ['*.bb', '*.decls']
mimetypes = ['text/x-bb']
+ version_added = '2.0'
bb_sktypes = r'@{1,2}|[#$%]'
bb_name = r'[a-z]\w*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
+ bb_var = (rf'({bb_name})(?:([ \t]*)({bb_sktypes})|([ \t]*)([.])([ \t]*)(?:({bb_name})))?')
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -153,21 +147,21 @@ class BlitzBasicLexer(RegexLexer):
Operator),
(r'([+\-*/~=<>^])', Operator),
(r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
+ (rf'\.([ \t]*)({bb_name})', Name.Label),
# Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
+ (rf'\b(New)\b([ \t]+)({bb_name})',
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
+ (rf'\b(Gosub|Goto)\b([ \t]+)({bb_name})',
bygroups(Keyword.Reserved, Whitespace, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
+ (rf'\b(Object)\b([ \t]*)([.])([ \t]*)({bb_name})\b',
bygroups(Operator, Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
+ (rf'\b{bb_var}\b([ \t]*)(\()',
bygroups(Name.Function, Whitespace, Keyword.Type, Whitespace, Punctuation,
Whitespace, Name.Class, Whitespace, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
+ (rf'\b(Function)\b([ \t]+){bb_var}',
bygroups(Keyword.Reserved, Whitespace, Name.Function, Whitespace, Keyword.Type,
Whitespace, Punctuation, Whitespace, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
+ (rf'\b(Type)([ \t]+)({bb_name})',
bygroups(Keyword.Reserved, Whitespace, Name.Class)),
# Keywords
(r'\b(Pi|True|False|Null)\b', Keyword.Constant),
@@ -194,17 +188,15 @@ class BlitzBasicLexer(RegexLexer):
class MonkeyLexer(RegexLexer):
"""
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- .. versionadded:: 1.6
+ For Monkey source code.
"""
name = 'Monkey'
aliases = ['monkey']
filenames = ['*.monkey']
mimetypes = ['text/x-monkey']
+ url = 'https://blitzresearch.itch.io/monkeyx'
+ version_added = '1.6'
name_variable = r'[a-z_]\w*'
name_function = r'[A-Z]\w*'
@@ -238,7 +230,7 @@ class MonkeyLexer(RegexLexer):
(r'\$[0-9a-fA-Z]+', Number.Hex),
(r'\%[10]+', Number.Bin),
# Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
+ (rf'\b{keyword_type}\b', Keyword.Type),
# Exception handling
(r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
(r'Throwable', Name.Exception),
@@ -274,21 +266,21 @@ class MonkeyLexer(RegexLexer):
(r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
(r'[(){}!#,.:]', Punctuation),
# catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
+ (rf'{name_constant}\b', Name.Constant),
+ (rf'{name_function}\b', Name.Function),
+ (rf'{name_variable}\b', Name.Variable),
],
'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
+ (rf'(?i){name_function}\b', Name.Function),
(r':', Punctuation, 'classname'),
(r'\s+', Whitespace),
(r'\(', Punctuation, 'variables'),
(r'\)', Punctuation, '#pop')
],
'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
+ (rf'{name_module}\.', Name.Namespace),
+ (rf'{keyword_type}\b', Keyword.Type),
+ (rf'{name_class}\b', Name.Class),
# array (of given size)
(r'(\[)(\s*)(\d*)(\s*)(\])',
bygroups(Punctuation, Whitespace, Number.Integer, Whitespace, Punctuation)),
@@ -300,9 +292,9 @@ class MonkeyLexer(RegexLexer):
default('#pop')
],
'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
+ (rf'{name_constant}\b', Name.Constant),
+ (rf'{name_variable}\b', Name.Variable),
+ (rf'{keyword_type_special}', Keyword.Type),
(r'\s+', Whitespace),
(r':', Punctuation, 'classname'),
(r',', Punctuation, '#push'),
@@ -325,12 +317,12 @@ class MonkeyLexer(RegexLexer):
class CbmBasicV2Lexer(RegexLexer):
"""
For CBM BASIC V2 sources.
-
- .. versionadded:: 1.6
"""
name = 'CBM BASIC V2'
aliases = ['cbmbas']
filenames = ['*.bas']
+ url = 'https://en.wikipedia.org/wiki/Commodore_BASIC'
+ version_added = '1.6'
flags = re.IGNORECASE
@@ -362,17 +354,15 @@ class CbmBasicV2Lexer(RegexLexer):
class QBasicLexer(RegexLexer):
"""
- For
- `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
- source code.
-
- .. versionadded:: 2.0
+ For QBasic source code.
"""
name = 'QBasic'
aliases = ['qbasic', 'basic']
filenames = ['*.BAS', '*.bas']
mimetypes = ['text/basic']
+ url = 'https://en.wikipedia.org/wiki/QBasic'
+ version_added = '2.0'
declarations = ('DATA', 'LET')
@@ -475,26 +465,26 @@ class QBasicLexer(RegexLexer):
# can't use regular \b because of X$()
# XXX: use words() here
'declarations': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ (r'\b({})(?=\(|\b)'.format('|'.join(map(re.escape, declarations))),
Keyword.Declaration),
],
'functions': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ (r'\b({})(?=\(|\b)'.format('|'.join(map(re.escape, functions))),
Keyword.Reserved),
],
'metacommands': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ (r'\b({})(?=\(|\b)'.format('|'.join(map(re.escape, metacommands))),
Keyword.Constant),
],
'operators': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ (r'\b({})(?=\(|\b)'.format('|'.join(map(re.escape, operators))), Operator.Word),
],
'statements': [
- (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ (r'\b({})\b'.format('|'.join(map(re.escape, statements))),
Keyword.Reserved),
],
'keywords': [
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
],
}
@@ -506,12 +496,13 @@ class QBasicLexer(RegexLexer):
class VBScriptLexer(RegexLexer):
"""
VBScript is scripting language that is modeled on Visual Basic.
-
- .. versionadded:: 2.4
"""
name = 'VBScript'
aliases = ['vbscript']
filenames = ['*.vbs', '*.VBS']
+ url = 'https://learn.microsoft.com/en-us/previous-versions/t0aew7h6(v=vs.85)'
+ version_added = '2.4'
+
flags = re.IGNORECASE
tokens = {
@@ -573,8 +564,6 @@ class BBCBasicLexer(RegexLexer):
"""
BBC Basic was supplied on the BBC Micro, and later Acorn RISC OS.
It is also used by BBC Basic For Windows.
-
- .. versionadded:: 2.4
"""
base_keywords = ['OTHERWISE', 'AND', 'DIV', 'EOR', 'MOD', 'OR', 'ERROR',
'LINE', 'OFF', 'STEP', 'SPC', 'TAB', 'ELSE', 'THEN',
@@ -607,6 +596,8 @@ class BBCBasicLexer(RegexLexer):
name = 'BBC Basic'
aliases = ['bbcbasic']
filenames = ['*.bbc']
+ url = 'https://www.bbcbasic.co.uk/bbcbasic.html'
+ version_added = '2.4'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bdd.py b/contrib/python/Pygments/py3/pygments/lexers/bdd.py
index 8cf47f8de6..01688d572c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bdd.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bdd.py
@@ -3,9 +3,8 @@
~~~~~~~~~~~~~~~~~~~
Lexer for BDD(Behavior-driven development).
- More information: https://en.wikipedia.org/wiki/Behavior-driven_development
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +18,14 @@ class BddLexer(RegexLexer):
"""
Lexer for BDD(Behavior-driven development), which highlights not only
keywords, but also comments, punctuations, strings, numbers, and variables.
-
- .. versionadded:: 2.11
"""
name = 'Bdd'
aliases = ['bdd']
filenames = ['*.feature']
mimetypes = ['text/x-bdd']
+ url = 'https://en.wikipedia.org/wiki/Behavior-driven_development'
+ version_added = '2.11'
step_keywords = (r'Given|When|Then|Add|And|Feature|Scenario Outline|'
r'Scenario|Background|Examples|But')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/berry.py b/contrib/python/Pygments/py3/pygments/lexers/berry.py
index e078fa1674..873b723478 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/berry.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/berry.py
@@ -4,7 +4,7 @@
Lexer for Berry.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,14 +17,14 @@ __all__ = ['BerryLexer']
class BerryLexer(RegexLexer):
"""
- For `berry <http://github.com/berry-lang/berry>`_ source code.
-
- .. versionadded:: 2.12.0
+ For Berry source code.
"""
name = 'Berry'
aliases = ['berry', 'be']
filenames = ['*.be']
mimetypes = ['text/x-berry', 'application/x-berry']
+ url = 'https://berry-lang.github.io'
+ version_added = '2.12'
_name = r'\b[^\W\d]\w*'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bibtex.py b/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
index 34883cd839..15e4e04fcb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bibtex.py
@@ -4,7 +4,7 @@
Lexers for BibTeX bibliography data and styles
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,15 +21,15 @@ __all__ = ['BibTeXLexer', 'BSTLexer']
class BibTeXLexer(ExtendedRegexLexer):
"""
A lexer for BibTeX bibliography data format.
-
- .. versionadded:: 2.2
"""
name = 'BibTeX'
aliases = ['bibtex', 'bib']
filenames = ['*.bib']
mimetypes = ["text/x-bibtex"]
+ version_added = '2.2'
flags = re.IGNORECASE
+ url = 'https://texfaq.org/FAQ-BibTeXing'
ALLOWED_CHARS = r'@!$&*+\-./:;<>?\[\\\]^`|~'
IDENTIFIER = '[{}][{}]*'.format('a-z_' + ALLOWED_CHARS, r'\w' + ALLOWED_CHARS)
@@ -116,14 +116,14 @@ class BibTeXLexer(ExtendedRegexLexer):
class BSTLexer(RegexLexer):
"""
A lexer for BibTeX bibliography styles.
-
- .. versionadded:: 2.2
"""
name = 'BST'
aliases = ['bst', 'bst-pybtex']
filenames = ['*.bst']
+ version_added = '2.2'
flags = re.IGNORECASE | re.MULTILINE
+ url = 'https://texfaq.org/FAQ-BibTeXing'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/blueprint.py b/contrib/python/Pygments/py3/pygments/lexers/blueprint.py
index ec5c6188e3..b199c42fd3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/blueprint.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/blueprint.py
@@ -4,7 +4,7 @@
Lexer for the Blueprint UI markup language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -28,8 +28,6 @@ __all__ = ["BlueprintLexer"]
class BlueprintLexer(RegexLexer):
"""
For Blueprint UI markup.
-
- .. versionadded:: 2.16
"""
name = "Blueprint"
@@ -37,6 +35,7 @@ class BlueprintLexer(RegexLexer):
filenames = ["*.blp"]
mimetypes = ["text/x-blueprint"]
url = "https://gitlab.gnome.org/jwestman/blueprint-compiler"
+ version_added = '2.16'
flags = re.IGNORECASE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/boa.py b/contrib/python/Pygments/py3/pygments/lexers/boa.py
index f7baf3cd2e..211f9fd0a1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/boa.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/boa.py
@@ -4,7 +4,7 @@
Lexers for the Boa language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,13 +17,13 @@ __all__ = ['BoaLexer']
class BoaLexer(RegexLexer):
"""
- Lexer for the `Boa <http://boa.cs.iastate.edu/docs/>`_ language.
-
- .. versionadded:: 2.4
+ Lexer for the Boa language.
"""
name = 'Boa'
aliases = ['boa']
filenames = ['*.boa']
+ url = 'https://boa.cs.iastate.edu/docs'
+ version_added = '2.4'
reserved = words(
('input', 'output', 'of', 'weight', 'before', 'after', 'stop',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/bqn.py b/contrib/python/Pygments/py3/pygments/lexers/bqn.py
index af84b4d043..b0794ee03d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/bqn.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/bqn.py
@@ -4,7 +4,7 @@
Lexer for BQN.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['BQNLexer']
class BQNLexer(RegexLexer):
"""
A simple BQN lexer.
-
- .. versionadded:: 2.16
"""
name = 'BQN'
url = 'https://mlochbaum.github.io/BQN/index.html'
aliases = ['bqn']
filenames = ['*.bqn']
mimetypes = []
+ version_added = '2.16'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/business.py b/contrib/python/Pygments/py3/pygments/lexers/business.py
index d2f2dd3ae6..3d81df54d1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/business.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/business.py
@@ -4,7 +4,7 @@
Lexers for "business-oriented" languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,13 +23,14 @@ __all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
class CobolLexer(RegexLexer):
"""
Lexer for OpenCOBOL code.
-
- .. versionadded:: 1.6
"""
name = 'COBOL'
aliases = ['cobol']
filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
mimetypes = ['text/x-cobol']
+ url = 'https://en.wikipedia.org/wiki/COBOL'
+ version_added = '1.6'
+
flags = re.IGNORECASE | re.MULTILINE
# Data Types: by PICTURE and USAGE
@@ -221,13 +222,14 @@ class CobolLexer(RegexLexer):
class CobolFreeformatLexer(CobolLexer):
"""
Lexer for Free format OpenCOBOL code.
-
- .. versionadded:: 1.6
"""
name = 'COBOLFree'
aliases = ['cobolfree']
filenames = ['*.cbl', '*.CBL']
mimetypes = []
+ url = 'https://opencobol.add1tocobol.com'
+ version_added = '1.6'
+
flags = re.IGNORECASE | re.MULTILINE
tokens = {
@@ -240,13 +242,13 @@ class CobolFreeformatLexer(CobolLexer):
class ABAPLexer(RegexLexer):
"""
Lexer for ABAP, SAP's integrated language.
-
- .. versionadded:: 1.1
"""
name = 'ABAP'
aliases = ['abap']
filenames = ['*.abap', '*.ABAP']
mimetypes = ['text/x-abap']
+ url = 'https://community.sap.com/topics/abap'
+ version_added = '1.1'
flags = re.IGNORECASE | re.MULTILINE
@@ -448,15 +450,14 @@ class ABAPLexer(RegexLexer):
class OpenEdgeLexer(RegexLexer):
"""
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- .. versionadded:: 1.5
+ Lexer for OpenEdge ABL (formerly Progress) source code.
"""
name = 'OpenEdge ABL'
aliases = ['openedge', 'abl', 'progress']
filenames = ['*.p', '*.cls']
mimetypes = ['text/x-openedge', 'application/x-openedge']
+ url = 'https://www.progress.com/openedge/features/abl'
+ version_added = '1.5'
types = (r'(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
@@ -517,20 +518,20 @@ class OpenEdgeLexer(RegexLexer):
class GoodDataCLLexer(RegexLexer):
"""
- Lexer for `GoodData-CL
- <https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
-com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- .. versionadded:: 1.4
+ Lexer for GoodData-CL script files.
"""
name = 'GoodData-CL'
aliases = ['gooddata-cl']
filenames = ['*.gdc']
mimetypes = ['text/x-gooddata-cl']
+ url = 'https://github.com/gooddata/GoodData-CL'
+ version_added = '1.4'
flags = re.IGNORECASE
+
+ # Syntax:
+ # https://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt
tokens = {
'root': [
# Comments
@@ -564,17 +565,15 @@ com/gooddata/processor/COMMANDS.txt>`_
class MaqlLexer(RegexLexer):
"""
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- .. versionadded:: 1.4
+ Lexer for GoodData MAQL scripts.
"""
name = 'MAQL'
aliases = ['maql']
filenames = ['*.maql']
mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
+ url = 'https://help.gooddata.com/doc/enterprise/en/dashboards-and-insights/maql-analytical-query-language'
+ version_added = '1.4'
flags = re.IGNORECASE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
index ba6702f561..0f83b71935 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_cpp.py
@@ -4,7 +4,7 @@
Lexers for C/C++ languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -294,6 +294,8 @@ class CLexer(CFamilyLexer):
aliases = ['c']
filenames = ['*.c', '*.h', '*.idc', '*.x[bp]m']
mimetypes = ['text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap']
+ url = 'https://en.wikipedia.org/wiki/C_(programming_language)'
+ version_added = ''
priority = 0.1
tokens = {
@@ -347,6 +349,7 @@ class CppLexer(CFamilyLexer):
'*.cc', '*.hh', '*.cxx', '*.hxx',
'*.C', '*.H', '*.cp', '*.CPP', '*.tpp']
mimetypes = ['text/x-c++hdr', 'text/x-c++src']
+ version_added = ''
priority = 0.1
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/c_like.py b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
index a7379c9bb2..f5073c74ae 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/c_like.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/c_like.py
@@ -4,7 +4,7 @@
Lexers for other C-like languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,19 +20,18 @@ from pygments.lexers import _mql_builtins
__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
'CudaLexer', 'SwigLexer', 'MqlLexer', 'ArduinoLexer', 'CharmciLexer',
- 'OmgIdlLexer']
+ 'OmgIdlLexer', 'PromelaLexer']
class PikeLexer(CppLexer):
"""
For `Pike <http://pike.lysator.liu.se/>`_ source code.
-
- .. versionadded:: 2.0
"""
name = 'Pike'
aliases = ['pike']
filenames = ['*.pike', '*.pmod']
mimetypes = ['text/x-pike']
+ version_added = '2.0'
tokens = {
'statements': [
@@ -68,13 +67,12 @@ class NesCLexer(CLexer):
"""
For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
directives.
-
- .. versionadded:: 2.0
"""
name = 'nesC'
aliases = ['nesc']
filenames = ['*.nc']
mimetypes = ['text/x-nescsrc']
+ version_added = '2.0'
tokens = {
'statements': [
@@ -95,14 +93,15 @@ class NesCLexer(CLexer):
class ClayLexer(RegexLexer):
"""
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- .. versionadded:: 2.0
+ For Clay source.
"""
name = 'Clay'
filenames = ['*.clay']
aliases = ['clay']
mimetypes = ['text/x-clay']
+ url = 'http://claylabs.com/clay'
+ version_added = '2.0'
+
tokens = {
'root': [
(r'\s+', Whitespace),
@@ -147,13 +146,13 @@ class ClayLexer(RegexLexer):
class ECLexer(CLexer):
"""
For eC source code with preprocessor directives.
-
- .. versionadded:: 1.5
"""
name = 'eC'
aliases = ['ec']
filenames = ['*.ec', '*.eh']
mimetypes = ['text/x-echdr', 'text/x-ecsrc']
+ url = 'https://ec-lang.org'
+ version_added = '1.5'
tokens = {
'statements': [
@@ -180,13 +179,13 @@ class ECLexer(CLexer):
class ValaLexer(RegexLexer):
"""
For Vala source code with preprocessor directives.
-
- .. versionadded:: 1.1
"""
name = 'Vala'
aliases = ['vala', 'vapi']
filenames = ['*.vala', '*.vapi']
mimetypes = ['text/x-vala']
+ url = 'https://vala.dev'
+ version_added = '1.1'
tokens = {
'whitespace': [
@@ -277,15 +276,14 @@ class ValaLexer(RegexLexer):
class CudaLexer(CLexer):
"""
- For NVIDIA `CUDAâ„¢ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- .. versionadded:: 1.6
+ For NVIDIA CUDAâ„¢ source.
"""
name = 'CUDA'
filenames = ['*.cu', '*.cuh']
aliases = ['cuda', 'cu']
mimetypes = ['text/x-cuda']
+ url = 'https://developer.nvidia.com/category/zone/cuda-zone'
+ version_added = '1.6'
function_qualifiers = {'__device__', '__global__', '__host__',
'__noinline__', '__forceinline__'}
@@ -326,13 +324,12 @@ class CudaLexer(CLexer):
class SwigLexer(CppLexer):
"""
For `SWIG <http://www.swig.org/>`_ source code.
-
- .. versionadded:: 2.0
"""
name = 'SWIG'
aliases = ['swig']
filenames = ['*.swg', '*.i']
mimetypes = ['text/swig']
+ version_added = '2.0'
priority = 0.04 # Lower than C/C++ and Objective C/C++
tokens = {
@@ -393,13 +390,12 @@ class MqlLexer(CppLexer):
"""
For `MQL4 <http://docs.mql4.com/>`_ and
`MQL5 <http://www.mql5.com/en/docs>`_ source code.
-
- .. versionadded:: 2.0
"""
name = 'MQL'
aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
filenames = ['*.mq4', '*.mq5', '*.mqh']
mimetypes = ['text/x-mql']
+ version_added = '2.0'
tokens = {
'statements': [
@@ -420,14 +416,13 @@ class ArduinoLexer(CppLexer):
This is an extension of the CppLexer, as the Arduino® Language is a superset
of C++
-
- .. versionadded:: 2.1
"""
name = 'Arduino'
aliases = ['arduino']
filenames = ['*.ino']
mimetypes = ['text/x-arduino']
+ version_added = '2.1'
# Language sketch main structure functions
structure = {'setup', 'loop'}
@@ -545,13 +540,12 @@ class ArduinoLexer(CppLexer):
class CharmciLexer(CppLexer):
"""
For `Charm++ <https://charm.cs.illinois.edu>`_ interface files (.ci).
-
- .. versionadded:: 2.4
"""
name = 'Charmci'
aliases = ['charmci']
filenames = ['*.ci']
+ version_added = '2.4'
mimetypes = []
@@ -575,8 +569,6 @@ class CharmciLexer(CppLexer):
class OmgIdlLexer(CLexer):
"""
Lexer for Object Management Group Interface Definition Language.
-
- .. versionadded:: 2.9
"""
name = 'OMG Interface Definition Language'
@@ -584,6 +576,7 @@ class OmgIdlLexer(CLexer):
aliases = ['omg-idl']
filenames = ['*.idl', '*.pidl']
mimetypes = []
+ version_added = '2.9'
scoped_name = r'((::)?\w+)+'
@@ -664,3 +657,82 @@ class OmgIdlLexer(CLexer):
include('annotation_appl'),
],
}
+
+
+class PromelaLexer(CLexer):
+ """
+ For the Promela language used with SPIN.
+ """
+
+ name = 'Promela'
+ aliases = ['promela']
+ filenames = ['*.pml', '*.prom', '*.prm', '*.promela', '*.pr', '*.pm']
+ mimetypes = ['text/x-promela']
+ url = 'https://spinroot.com/spin/whatispin.html'
+ version_added = '2.18'
+
+ # Promela's language reference:
+ # https://spinroot.com/spin/Man/promela.html
+ # Promela's grammar definition:
+ # https://spinroot.com/spin/Man/grammar.html
+
+ tokens = {
+ 'statements': [
+ (r'(\[\]|<>|/\\|\\/)|(U|W|V)\b', Operator), # LTL Operators
+ (r'@', Punctuation), #remoterefs
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
+ inherit
+ ],
+ 'types': [
+ # Predefined (data types)
+ (words((
+ 'bit', 'bool', 'byte', 'pid', 'short', 'int', 'unsigned'),
+ suffix=r'\b'),
+ Keyword.Type),
+ ],
+ 'keywords': [
+ # ControlFlow
+ (words((
+ 'atomic', 'break', 'd_step', 'do', 'od', 'for', 'in', 'goto',
+ 'if', 'fi', 'unless'), suffix=r'\b'),
+ Keyword),
+ # BasicStatements
+ (words((
+ 'assert', 'get_priority', 'printf', 'printm', 'set_priority'),
+ suffix=r'\b'),
+ Name.Function),
+ # Embedded C Code
+ (words((
+ 'c_code', 'c_decl', 'c_expr', 'c_state', 'c_track'),
+ suffix=r'\b'),
+ Keyword),
+ # Predefined (local/global variables)
+ (words((
+ '_', '_last', '_nr_pr', '_pid', '_priority', 'else', 'np_',
+ 'STDIN'), suffix=r'\b'),
+ Name.Builtin),
+ # Predefined (functions)
+ (words((
+ 'empty', 'enabled', 'eval', 'full', 'len', 'nempty', 'nfull',
+ 'pc_value'), suffix=r'\b'),
+ Name.Function),
+ # Predefined (operators)
+ (r'run\b', Operator.Word),
+ # Declarators
+ (words((
+ 'active', 'chan', 'D_proctype', 'hidden', 'init', 'local',
+ 'mtype', 'never', 'notrace', 'proctype', 'show', 'trace',
+ 'typedef', 'xr', 'xs'), suffix=r'\b'),
+ Keyword.Declaration),
+ # Declarators (suffixes)
+ (words((
+ 'priority', 'provided'), suffix=r'\b'),
+ Keyword),
+ # MetaTerms (declarators)
+ (words((
+ 'inline', 'ltl', 'select'), suffix=r'\b'),
+ Keyword.Declaration),
+ # MetaTerms (keywords)
+ (r'skip\b', Keyword),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/capnproto.py b/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
index 04237f70d6..48e3c51b46 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/capnproto.py
@@ -4,7 +4,7 @@
Lexers for the Cap'n Proto schema language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,13 +17,12 @@ __all__ = ['CapnProtoLexer']
class CapnProtoLexer(RegexLexer):
"""
For Cap'n Proto source.
-
- .. versionadded:: 2.2
"""
name = 'Cap\'n Proto'
url = 'https://capnproto.org'
filenames = ['*.capnp']
aliases = ['capnp']
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/carbon.py b/contrib/python/Pygments/py3/pygments/lexers/carbon.py
index 758e8af361..c4e71d9c74 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/carbon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/carbon.py
@@ -4,12 +4,12 @@
Lexers for the Carbon programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
-from pygments.lexer import RegexLexer, bygroups, words
+from pygments.lexer import RegexLexer, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
@@ -19,14 +19,13 @@ __all__ = ['CarbonLexer']
class CarbonLexer(RegexLexer):
"""
For Carbon source.
-
- .. versionadded:: 2.15
"""
name = 'Carbon'
url = 'https://github.com/carbon-language/carbon-lang'
filenames = ['*.carbon']
aliases = ['carbon']
mimetypes = ['text/x-carbon']
+ version_added = '2.15'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/cddl.py b/contrib/python/Pygments/py3/pygments/lexers/cddl.py
index bd7f54aefd..18e3b03e71 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/cddl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/cddl.py
@@ -8,7 +8,7 @@
More information:
https://datatracker.ietf.org/doc/rfc8610/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,13 @@ __all__ = ['CddlLexer']
class CddlLexer(RegexLexer):
"""
Lexer for CDDL definitions.
-
- .. versionadded:: 2.8
"""
name = "CDDL"
url = 'https://datatracker.ietf.org/doc/rfc8610/'
aliases = ["cddl"]
filenames = ["*.cddl"]
mimetypes = ["text/x-cddl"]
+ version_added = '2.8'
_prelude_types = [
"any",
@@ -108,10 +107,10 @@ class CddlLexer(RegexLexer):
"root": [
include("commentsandwhitespace"),
# tag types
- (r"#(\d\.{uint})?".format(uint=_re_uint), Keyword.Type), # type or any
+ (rf"#(\d\.{_re_uint})?", Keyword.Type), # type or any
# occurrence
(
- r"({uint})?(\*)({uint})?".format(uint=_re_uint),
+ rf"({_re_uint})?(\*)({_re_uint})?",
bygroups(Number, Operator, Number),
),
(r"\?|\+", Operator), # occurrence
@@ -119,8 +118,8 @@ class CddlLexer(RegexLexer):
(r"(\.\.\.|\.\.)", Operator), # rangeop
(words(_controls, suffix=r"\b"), Operator.Word), # ctlops
# into choice op
- (r"&(?=\s*({groupname}|\())".format(groupname=_re_id), Operator),
- (r"~(?=\s*{})".format(_re_id), Operator), # unwrap op
+ (rf"&(?=\s*({_re_id}|\())", Operator),
+ (rf"~(?=\s*{_re_id})", Operator), # unwrap op
(r"//|/(?!/)", Operator), # double und single slash
(r"=>|/==|/=|=", Operator),
(r"[\[\]{}\(\),<>:]", Punctuation),
@@ -131,7 +130,7 @@ class CddlLexer(RegexLexer):
# Barewords as member keys (must be matched before values, types, typenames,
# groupnames).
# Token type is String as barewords are always interpreted as such.
- (r"({bareword})(\s*)(:)".format(bareword=_re_id),
+ (rf"({_re_id})(\s*)(:)",
bygroups(String, Whitespace, Punctuation)),
# predefined types
(words(_prelude_types, prefix=r"(?![\-_$@])\b", suffix=r"\b(?![\-_$@])"),
@@ -144,7 +143,7 @@ class CddlLexer(RegexLexer):
(r"0x[0-9a-fA-F]+(\.[0-9a-fA-F]+)?p[+-]?\d+", Number.Hex), # hexfloat
(r"0x[0-9a-fA-F]+", Number.Hex), # hex
# Float
- (r"{int}(?=(\.\d|e[+-]?\d))(?:\.\d+)?(?:e[+-]?\d+)?".format(int=_re_int),
+ (rf"{_re_int}(?=(\.\d|e[+-]?\d))(?:\.\d+)?(?:e[+-]?\d+)?",
Number.Float),
# Int
(_re_int, Number.Integer),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/chapel.py b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
index 9af5f0e385..8caea3506b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/chapel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/chapel.py
@@ -4,7 +4,7 @@
Lexer for the Chapel language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,12 @@ __all__ = ['ChapelLexer']
class ChapelLexer(RegexLexer):
"""
For Chapel source.
-
- .. versionadded:: 2.0
"""
name = 'Chapel'
url = 'https://chapel-lang.org/'
filenames = ['*.chpl']
aliases = ['chapel', 'chpl']
+ version_added = '2.0'
# mimetypes = ['text/x-chapel']
known_types = ('bool', 'bytes', 'complex', 'imag', 'int', 'locale',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/clean.py b/contrib/python/Pygments/py3/pygments/lexers/clean.py
index 73d93e0d27..119110be98 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/clean.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/clean.py
@@ -4,7 +4,7 @@
Lexer for the Clean language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,7 @@ class CleanLexer(ExtendedRegexLexer):
url = 'http://clean.cs.ru.nl/Clean'
aliases = ['clean']
filenames = ['*.icl', '*.dcl']
+ version_added = ''
keywords = (
'case', 'ccall', 'class', 'code', 'code inline', 'derive', 'export',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/comal.py b/contrib/python/Pygments/py3/pygments/lexers/comal.py
index 666595503f..4344ba32ec 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/comal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/comal.py
@@ -4,7 +4,7 @@
Lexer for COMAL-80.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,7 @@ class Comal80Lexer(RegexLexer):
url = 'https://en.wikipedia.org/wiki/COMAL'
aliases = ['comal', 'comal80']
filenames = ['*.cml', '*.comal']
+ version_added = ''
flags = re.IGNORECASE
#
# COMAL allows for some strange characters in names which we list here so
diff --git a/contrib/python/Pygments/py3/pygments/lexers/compiled.py b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
index fe27425b72..57e52df0ca 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/compiled.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/compiled.py
@@ -4,10 +4,11 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.jvm import JavaLexer, ScalaLexer
from pygments.lexers.c_cpp import CLexer, CppLexer
from pygments.lexers.d import DLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/configs.py b/contrib/python/Pygments/py3/pygments/lexers/configs.py
index 6c5e5425a5..4b52873c66 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/configs.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/configs.py
@@ -4,7 +4,7 @@
Lexers for configuration file formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,6 +36,8 @@ class IniLexer(RegexLexer):
'*.ini', '*.cfg', '*.inf', '.editorconfig',
]
mimetypes = ['text/x-ini', 'text/inf']
+ url = 'https://en.wikipedia.org/wiki/INI_file'
+ version_added = ''
tokens = {
'root': [
@@ -71,14 +73,14 @@ class IniLexer(RegexLexer):
class DesktopLexer(RegexLexer):
"""
Lexer for .desktop files.
-
- .. versionadded:: 2.16
"""
name = 'Desktop file'
url = "https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html"
aliases = ['desktop']
filenames = ['*.desktop']
+ mimetypes = ['application/x-desktop']
+ version_added = '2.16'
tokens = {
'root': [
@@ -101,8 +103,6 @@ class DesktopLexer(RegexLexer):
class SystemdLexer(RegexLexer):
"""
Lexer for systemd unit files.
-
- .. versionadded:: 2.16
"""
name = 'Systemd'
@@ -112,6 +112,7 @@ class SystemdLexer(RegexLexer):
'*.service', '*.socket', '*.device', '*.mount', '*.automount',
'*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope',
]
+ version_added = '2.16'
tokens = {
'root': [
@@ -146,8 +147,6 @@ class SystemdLexer(RegexLexer):
class RegeditLexer(RegexLexer):
"""
Lexer for Windows Registry files produced by regedit.
-
- .. versionadded:: 1.6
"""
name = 'reg'
@@ -155,6 +154,7 @@ class RegeditLexer(RegexLexer):
aliases = ['registry']
filenames = ['*.reg']
mimetypes = ['text/x-windows-registry']
+ version_added = '1.6'
tokens = {
'root': [
@@ -191,14 +191,14 @@ class PropertiesLexer(RegexLexer):
Lexer for configuration files in Java's properties format.
Note: trailing whitespace counts as part of the value as per spec
-
- .. versionadded:: 1.4
"""
name = 'Properties'
aliases = ['properties', 'jproperties']
filenames = ['*.properties']
mimetypes = ['text/x-java-properties']
+ url = 'https://en.wikipedia.org/wiki/.properties'
+ version_added = '1.4'
tokens = {
'root': [
@@ -258,29 +258,30 @@ def _rx_indent(level):
if level == 1:
level_repeat = ''
else:
- level_repeat = '{%s}' % level
- return r'(?:\t| %s\t| {%s})%s.*\n' % (space_repeat, tab_width, level_repeat)
+ level_repeat = f'{{{level}}}'
+ return rf'(?:\t| {space_repeat}\t| {{{tab_width}}}){level_repeat}.*\n'
class KconfigLexer(RegexLexer):
"""
For Linux-style Kconfig files.
-
- .. versionadded:: 1.6
"""
name = 'Kconfig'
aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
+ version_added = '1.6'
# Adjust this if new kconfig file names appear in your environment
filenames = ['Kconfig*', '*Config.in*', 'external.in*',
'standard-modules.in']
mimetypes = ['text/x-kconfig']
+ url = 'https://www.kernel.org/doc/html/latest/kbuild/kconfig-language.html'
+
# No re.MULTILINE, indentation-aware help text needs line-by-line handling
flags = 0
def call_indent(level):
# If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
+ return (_rx_indent(level), String.Doc, f'indent{level}')
def do_indent(level):
# Print paragraphs of indentation level >= {level} as String.Doc,
@@ -342,8 +343,6 @@ class KconfigLexer(RegexLexer):
class Cfengine3Lexer(RegexLexer):
"""
Lexer for CFEngine3 policy files.
-
- .. versionadded:: 1.5
"""
name = 'CFEngine3'
@@ -351,6 +350,7 @@ class Cfengine3Lexer(RegexLexer):
aliases = ['cfengine3', 'cf3']
filenames = ['*.cf']
mimetypes = []
+ version_added = '1.5'
tokens = {
'root': [
@@ -405,14 +405,14 @@ class ApacheConfLexer(RegexLexer):
"""
Lexer for configuration files following the Apache config file
format.
-
- .. versionadded:: 0.6
"""
name = 'ApacheConf'
aliases = ['apacheconf', 'aconf', 'apache']
filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
mimetypes = ['text/x-apacheconf']
+ url = 'https://httpd.apache.org/docs/current/configuring.html'
+ version_added = '0.6'
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -447,8 +447,6 @@ class ApacheConfLexer(RegexLexer):
class SquidConfLexer(RegexLexer):
"""
Lexer for squid configuration files.
-
- .. versionadded:: 0.9
"""
name = 'SquidConf'
@@ -456,6 +454,7 @@ class SquidConfLexer(RegexLexer):
aliases = ['squidconf', 'squid.conf', 'squid']
filenames = ['squid.conf']
mimetypes = ['text/x-squidconf']
+ version_added = '0.9'
flags = re.IGNORECASE
keywords = (
@@ -577,14 +576,13 @@ class SquidConfLexer(RegexLexer):
class NginxConfLexer(RegexLexer):
"""
Lexer for Nginx configuration files.
-
- .. versionadded:: 0.11
"""
name = 'Nginx configuration file'
url = 'http://nginx.net/'
aliases = ['nginx']
filenames = ['nginx.conf']
mimetypes = ['text/x-nginx-conf']
+ version_added = '0.11'
tokens = {
'root': [
@@ -624,14 +622,13 @@ class NginxConfLexer(RegexLexer):
class LighttpdConfLexer(RegexLexer):
"""
Lexer for Lighttpd configuration files.
-
- .. versionadded:: 0.11
"""
name = 'Lighttpd configuration file'
url = 'http://lighttpd.net/'
aliases = ['lighttpd', 'lighty']
filenames = ['lighttpd.conf']
mimetypes = ['text/x-lighttpd-conf']
+ version_added = '0.11'
tokens = {
'root': [
@@ -653,14 +650,13 @@ class LighttpdConfLexer(RegexLexer):
class DockerLexer(RegexLexer):
"""
Lexer for Docker configuration files.
-
- .. versionadded:: 2.0
"""
name = 'Docker'
url = 'http://docker.io'
aliases = ['docker', 'dockerfile']
filenames = ['Dockerfile', '*.docker']
mimetypes = ['text/x-dockerfile-config']
+ version_added = '2.0'
_keywords = (r'(?:MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)')
_bash_keywords = (r'(?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY)')
@@ -672,15 +668,15 @@ class DockerLexer(RegexLexer):
(r'#.*', Comment),
(r'(FROM)([ \t]*)(\S*)([ \t]*)(?:(AS)([ \t]*)(\S*))?',
bygroups(Keyword, Whitespace, String, Whitespace, Keyword, Whitespace, String)),
- (r'(ONBUILD)(\s+)(%s)' % (_lb,), bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(HEALTHCHECK)(\s+)((%s--\w+=\w+%s)*)' % (_lb, _lb),
+ (rf'(ONBUILD)(\s+)({_lb})', bygroups(Keyword, Whitespace, using(BashLexer))),
+ (rf'(HEALTHCHECK)(\s+)(({_lb}--\w+=\w+{_lb})*)',
bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(VOLUME|ENTRYPOINT|CMD|SHELL)(\s+)(%s)(\[.*?\])' % (_lb,),
+ (rf'(VOLUME|ENTRYPOINT|CMD|SHELL)(\s+)({_lb})(\[.*?\])',
bygroups(Keyword, Whitespace, using(BashLexer), using(JsonLexer))),
- (r'(LABEL|ENV|ARG)(\s+)((%s\w+=\w+%s)*)' % (_lb, _lb),
+ (rf'(LABEL|ENV|ARG)(\s+)(({_lb}\w+=\w+{_lb})*)',
bygroups(Keyword, Whitespace, using(BashLexer))),
- (r'(%s|VOLUME)\b(\s+)(.*)' % (_keywords), bygroups(Keyword, Whitespace, String)),
- (r'(%s)(\s+)' % (_bash_keywords,), bygroups(Keyword, Whitespace)),
+ (rf'({_keywords}|VOLUME)\b(\s+)(.*)', bygroups(Keyword, Whitespace, String)),
+ (rf'({_bash_keywords})(\s+)', bygroups(Keyword, Whitespace)),
(r'(.*\\\n)*.+', using(BashLexer)),
]
}
@@ -689,8 +685,6 @@ class DockerLexer(RegexLexer):
class TerraformLexer(ExtendedRegexLexer):
"""
Lexer for terraformi ``.tf`` files.
-
- .. versionadded:: 2.1
"""
name = 'Terraform'
@@ -698,6 +692,7 @@ class TerraformLexer(ExtendedRegexLexer):
aliases = ['terraform', 'tf', 'hcl']
filenames = ['*.tf', '*.hcl']
mimetypes = ['application/x-tf', 'application/x-terraform']
+ version_added = '2.1'
classes = ('backend', 'data', 'module', 'output', 'provider',
'provisioner', 'resource', 'variable')
@@ -861,13 +856,13 @@ class TermcapLexer(RegexLexer):
Lexer for termcap database source.
This is very simple and minimal.
-
- .. versionadded:: 2.1
"""
name = 'Termcap'
aliases = ['termcap']
filenames = ['termcap', 'termcap.src']
mimetypes = []
+ url = 'https://en.wikipedia.org/wiki/Termcap'
+ version_added = '2.1'
# NOTE:
# * multiline with trailing backslash
@@ -908,13 +903,13 @@ class TerminfoLexer(RegexLexer):
Lexer for terminfo database source.
This is very simple and minimal.
-
- .. versionadded:: 2.1
"""
name = 'Terminfo'
aliases = ['terminfo']
filenames = ['terminfo', 'terminfo.src']
mimetypes = []
+ url = 'https://en.wikipedia.org/wiki/Terminfo'
+ version_added = '2.1'
# NOTE:
# * multiline with leading whitespace
@@ -954,8 +949,6 @@ class PkgConfigLexer(RegexLexer):
"""
Lexer for pkg-config
(see also `manual page <http://linux.die.net/man/1/pkg-config>`_).
-
- .. versionadded:: 2.1
"""
name = 'PkgConfig'
@@ -963,6 +956,7 @@ class PkgConfigLexer(RegexLexer):
aliases = ['pkgconfig']
filenames = ['*.pc']
mimetypes = []
+ version_added = '2.1'
tokens = {
'root': [
@@ -1023,8 +1017,6 @@ class PacmanConfLexer(RegexLexer):
VerbosePkgLists
These are flags to switch on.
-
- .. versionadded:: 2.1
"""
name = 'PacmanConf'
@@ -1032,6 +1024,7 @@ class PacmanConfLexer(RegexLexer):
aliases = ['pacmanconf']
filenames = ['pacman.conf']
mimetypes = []
+ version_added = '2.1'
tokens = {
'root': [
@@ -1069,13 +1062,12 @@ class PacmanConfLexer(RegexLexer):
class AugeasLexer(RegexLexer):
"""
Lexer for Augeas.
-
- .. versionadded:: 2.4
"""
name = 'Augeas'
url = 'http://augeas.net'
aliases = ['augeas']
filenames = ['*.aug']
+ version_added = '2.4'
tokens = {
'root': [
@@ -1114,8 +1106,6 @@ class AugeasLexer(RegexLexer):
class TOMLLexer(RegexLexer):
"""
Lexer for TOML, a simple language for config files.
-
- .. versionadded:: 2.4
"""
name = 'TOML'
@@ -1123,6 +1113,7 @@ class TOMLLexer(RegexLexer):
filenames = ['*.toml', 'Pipfile', 'poetry.lock']
mimetypes = ['application/toml']
url = 'https://toml.io'
+ version_added = '2.4'
# Based on the TOML spec: https://toml.io/en/v1.0.0
@@ -1284,8 +1275,6 @@ class NestedTextLexer(RegexLexer):
"""
Lexer for *NextedText*, a human-friendly data format.
- .. versionadded:: 2.9
-
.. versionchanged:: 2.16
Added support for *NextedText* v3.0.
"""
@@ -1294,6 +1283,7 @@ class NestedTextLexer(RegexLexer):
url = 'https://nestedtext.org'
aliases = ['nestedtext', 'nt']
filenames = ['*.nt']
+ version_added = '2.9'
tokens = {
'root': [
@@ -1366,14 +1356,13 @@ class NestedTextLexer(RegexLexer):
class SingularityLexer(RegexLexer):
"""
Lexer for Singularity definition files.
-
- .. versionadded:: 2.6
"""
name = 'Singularity'
url = 'https://www.sylabs.io/guides/3.0/user-guide/definition_files.html'
aliases = ['singularity']
filenames = ['*.def', 'Singularity']
+ version_added = '2.6'
flags = re.IGNORECASE | re.MULTILINE | re.DOTALL
_headers = r'^(\s*)(bootstrap|from|osversion|mirrorurl|include|registry|namespace|includecmd)(:)'
@@ -1415,13 +1404,13 @@ class UnixConfigLexer(RegexLexer):
* ``/etc/group``
* ``/etc/passwd``
* ``/etc/shadow``
-
- .. versionadded:: 2.12
"""
name = 'Unix/Linux config files'
aliases = ['unixconfig', 'linuxconfig']
filenames = []
+ url = 'https://en.wikipedia.org/wiki/Configuration_file#Unix_and_Unix-like_operating_systems'
+ version_added = '2.12'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/console.py b/contrib/python/Pygments/py3/pygments/lexers/console.py
index ac498d58b0..b9f24745ec 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/console.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/console.py
@@ -4,7 +4,7 @@
Lexers for misc console output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,13 +19,13 @@ class VCTreeStatusLexer(RegexLexer):
"""
For colorizing output of version control status commands, like "hg
status" or "svn status".
-
- .. versionadded:: 2.0
"""
name = 'VCTreeStatus'
aliases = ['vctreestatus']
filenames = []
mimetypes = []
+ url = ""
+ version_added = '2.0'
tokens = {
'root': [
@@ -45,13 +45,13 @@ class VCTreeStatusLexer(RegexLexer):
class PyPyLogLexer(RegexLexer):
"""
Lexer for PyPy log files.
-
- .. versionadded:: 1.5
"""
name = "PyPy Log"
aliases = ["pypylog", "pypy"]
filenames = ["*.pypylog"]
mimetypes = ['application/x-pypylog']
+ url = 'pypy.org'
+ version_added = '1.5'
tokens = {
"root": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/cplint.py b/contrib/python/Pygments/py3/pygments/lexers/cplint.py
index 8a48c013f0..39b0a96e32 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/cplint.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/cplint.py
@@ -4,7 +4,7 @@
Lexer for the cplint language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,13 @@ class CplintLexer(PrologLexer):
"""
Lexer for cplint files, including CP-logic, Logic Programs with Annotated
Disjunctions, Distributional Clauses syntax, ProbLog, DTProbLog.
-
- .. versionadded:: 2.12
"""
name = 'cplint'
url = 'https://cplint.eu'
aliases = ['cplint']
filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl', '*.P', '*.lpad', '*.cpl']
mimetypes = ['text/x-cplint']
+ version_added = '2.12'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/crystal.py b/contrib/python/Pygments/py3/pygments/lexers/crystal.py
index e4df8b1dd7..6656815ab8 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/crystal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/crystal.py
@@ -4,7 +4,7 @@
Lexer for Crystal.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,8 +27,6 @@ CRYSTAL_OPERATORS = [
class CrystalLexer(ExtendedRegexLexer):
"""
For Crystal source code.
-
- .. versionadded:: 2.2
"""
name = 'Crystal'
@@ -36,6 +34,7 @@ class CrystalLexer(ExtendedRegexLexer):
aliases = ['cr', 'crystal']
filenames = ['*.cr']
mimetypes = ['text/x-crystal']
+ version_added = '2.2'
flags = re.DOTALL | re.MULTILINE
@@ -107,7 +106,7 @@ class CrystalLexer(ExtendedRegexLexer):
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
include('string-escaped' if name == 'sym' else 'string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
+ (rf'[^\\{end}#]+', ttype),
(r'[\\#]', ttype),
(end, ttype, '#pop'),
]
diff --git a/contrib/python/Pygments/py3/pygments/lexers/csound.py b/contrib/python/Pygments/py3/pygments/lexers/csound.py
index 64f03cff35..415fac3ef7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/csound.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/csound.py
@@ -4,7 +4,7 @@
Lexers for Csound languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -136,13 +136,12 @@ class CsoundLexer(RegexLexer):
class CsoundScoreLexer(CsoundLexer):
"""
For `Csound <https://csound.com>`_ scores.
-
- .. versionadded:: 2.1
"""
name = 'Csound Score'
aliases = ['csound-score', 'csound-sco']
filenames = ['*.sco']
+ version_added = '2.1'
tokens = {
'root': [
@@ -202,13 +201,12 @@ class CsoundScoreLexer(CsoundLexer):
class CsoundOrchestraLexer(CsoundLexer):
"""
For `Csound <https://csound.com>`_ orchestras.
-
- .. versionadded:: 2.1
"""
name = 'Csound Orchestra'
aliases = ['csound', 'csound-orc']
filenames = ['*.orc', '*.udo']
+ version_added = '2.1'
user_defined_opcodes = set()
@@ -411,14 +409,14 @@ class CsoundOrchestraLexer(CsoundLexer):
class CsoundDocumentLexer(RegexLexer):
"""
- For `Csound <https://csound.com>`_ documents.
-
- .. versionadded:: 2.1
+ For Csound documents.
"""
name = 'Csound Document'
aliases = ['csound-document', 'csound-csd']
filenames = ['*.csd']
+ url = 'https://csound.com'
+ version_added = '2.1'
# These tokens are based on those in XmlLexer in pygments/lexers/html.py. Making
# CsoundDocumentLexer a subclass of XmlLexer rather than RegexLexer may seem like a
diff --git a/contrib/python/Pygments/py3/pygments/lexers/css.py b/contrib/python/Pygments/py3/pygments/lexers/css.py
index d8a961f048..fb7936b82e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/css.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/css.py
@@ -4,7 +4,7 @@
Lexers for CSS and related stylesheet formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -178,6 +178,7 @@ class CssLexer(RegexLexer):
aliases = ['css']
filenames = ['*.css']
mimetypes = ['text/css']
+ version_added = ''
tokens = {
'root': [
@@ -445,8 +446,6 @@ def _starts_block(token, state):
class SassLexer(ExtendedRegexLexer):
"""
For Sass stylesheets.
-
- .. versionadded:: 1.3
"""
name = 'Sass'
@@ -454,6 +453,7 @@ class SassLexer(ExtendedRegexLexer):
aliases = ['sass']
filenames = ['*.sass']
mimetypes = ['text/x-sass']
+ version_added = '1.3'
flags = re.IGNORECASE | re.MULTILINE
@@ -535,6 +535,7 @@ class ScssLexer(RegexLexer):
aliases = ['scss']
filenames = ['*.scss']
mimetypes = ['text/x-scss']
+ version_added = ''
flags = re.IGNORECASE | re.DOTALL
tokens = {
@@ -579,8 +580,6 @@ class ScssLexer(RegexLexer):
class LessCssLexer(CssLexer):
"""
For LESS styleshets.
-
- .. versionadded:: 2.1
"""
name = 'LessCss'
@@ -588,6 +587,7 @@ class LessCssLexer(CssLexer):
aliases = ['less']
filenames = ['*.less']
mimetypes = ['text/x-less-css']
+ version_added = '2.1'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/d.py b/contrib/python/Pygments/py3/pygments/lexers/d.py
index db9020d489..58c53b7137 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/d.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/d.py
@@ -4,7 +4,7 @@
Lexers for D languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
class DLexer(RegexLexer):
"""
For D source.
-
- .. versionadded:: 1.2
"""
name = 'D'
url = 'https://dlang.org/'
filenames = ['*.d', '*.di']
aliases = ['d']
mimetypes = ['text/x-dsrc']
+ version_added = '1.2'
tokens = {
'root': [
@@ -194,6 +193,7 @@ class CrocLexer(RegexLexer):
filenames = ['*.croc']
aliases = ['croc']
mimetypes = ['text/x-crocsrc']
+ version_added = ''
tokens = {
'root': [
@@ -256,3 +256,4 @@ class MiniDLexer(CrocLexer):
filenames = [] # don't lex .md as MiniD, reserve for Markdown
aliases = ['minid']
mimetypes = ['text/x-minidsrc']
+ version_added = ''
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
index eb97bd511f..df854f1faa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dalvik.py
@@ -4,7 +4,7 @@
Pygments lexers for Dalvik VM-related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,13 @@ class SmaliLexer(RegexLexer):
"""
For Smali (Android/Dalvik) assembly
code.
-
- .. versionadded:: 1.6
"""
name = 'Smali'
url = 'http://code.google.com/p/smali/'
aliases = ['smali']
filenames = ['*.smali']
mimetypes = ['text/smali']
+ version_added = '1.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/data.py b/contrib/python/Pygments/py3/pygments/lexers/data.py
index afb5f7e59c..8f384bc356 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/data.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/data.py
@@ -4,7 +4,7 @@
Lexers for data file format.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,8 +31,6 @@ class YamlLexer(ExtendedRegexLexer):
"""
Lexer for YAML, a human-friendly data serialization
language.
-
- .. versionadded:: 0.11
"""
name = 'YAML'
@@ -40,6 +38,7 @@ class YamlLexer(ExtendedRegexLexer):
aliases = ['yaml']
filenames = ['*.yaml', '*.yml']
mimetypes = ['text/x-yaml']
+ version_added = '0.11'
def something(token_class):
"""Do not produce empty tokens."""
@@ -443,8 +442,6 @@ class JsonLexer(Lexer):
This allows users to highlight JSON as it is used in the wild.
No validation is performed on the input JSON document.
-
- .. versionadded:: 1.5
"""
name = 'JSON'
@@ -452,6 +449,7 @@ class JsonLexer(Lexer):
aliases = ['json', 'json-object']
filenames = ['*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock']
mimetypes = ['application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq']
+ version_added = '1.5'
# No validation of integers, floats, or constants is done.
# As long as the characters are members of the following
@@ -704,8 +702,6 @@ class JsonBareObjectLexer(JsonLexer):
"""
For JSON data structures (with missing object curly braces).
- .. versionadded:: 2.2
-
.. deprecated:: 2.8.0
Behaves the same as `JsonLexer` now.
@@ -715,13 +711,12 @@ class JsonBareObjectLexer(JsonLexer):
aliases = []
filenames = []
mimetypes = []
+ version_added = '2.2'
class JsonLdLexer(JsonLexer):
"""
For JSON-LD linked data.
-
- .. versionadded:: 2.0
"""
name = 'JSON-LD'
@@ -729,9 +724,10 @@ class JsonLdLexer(JsonLexer):
aliases = ['jsonld', 'json-ld']
filenames = ['*.jsonld']
mimetypes = ['application/ld+json']
+ version_added = '2.0'
json_ld_keywords = {
- '"@%s"' % keyword
+ f'"@{keyword}"'
for keyword in (
'base',
'container',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dax.py b/contrib/python/Pygments/py3/pygments/lexers/dax.py
index 39618c3bb9..aec4c4f666 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dax.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dax.py
@@ -4,7 +4,7 @@
Lexer for LilyPond.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,13 @@ class DaxLexer(RegexLexer):
"""
Lexer for Power BI DAX
Referenced from: https://github.com/sql-bi/SyntaxHighlighterBrushDax
-
- .. versionadded:: 2.15
"""
name = 'Dax'
aliases = ['dax']
filenames = ['*.dax']
url = 'https://learn.microsoft.com/en-us/dax/dax-function-reference'
mimetypes = []
+ version_added = '2.15'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/devicetree.py b/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
index 9221464f72..903eda93dc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/devicetree.py
@@ -4,7 +4,7 @@
Lexers for Devicetree language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['DevicetreeLexer']
class DevicetreeLexer(RegexLexer):
"""
Lexer for Devicetree files.
-
- .. versionadded:: 2.7
"""
name = 'Devicetree'
@@ -27,6 +25,7 @@ class DevicetreeLexer(RegexLexer):
aliases = ['devicetree', 'dts']
filenames = ['*.dts', '*.dtsi']
mimetypes = ['text/x-c']
+ version_added = '2.7'
#: optional Whitespace or /*...*/ style comment
_ws = r'\s*(?:/[*][^*/]*?[*]/\s*)*'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/diff.py b/contrib/python/Pygments/py3/pygments/lexers/diff.py
index 0ab85bfbf3..4b260da876 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/diff.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/diff.py
@@ -4,7 +4,7 @@
Lexers for diff/patch formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,8 @@ class DiffLexer(RegexLexer):
aliases = ['diff', 'udiff']
filenames = ['*.diff', '*.patch']
mimetypes = ['text/x-diff', 'text/x-patch']
+ url = 'https://en.wikipedia.org/wiki/Diff'
+ version_added = ''
tokens = {
'root': [
@@ -57,13 +59,13 @@ class DarcsPatchLexer(RegexLexer):
DarcsPatchLexer is a lexer for the various versions of the darcs patch
format. Examples of this format are derived by commands such as
``darcs annotate --patch`` and ``darcs send``.
-
- .. versionadded:: 0.10
"""
name = 'Darcs Patch'
aliases = ['dpatch']
filenames = ['*.dpatch', '*.darcspatch']
+ url = 'https://darcs.net'
+ version_added = '0.10'
DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
'replace')
@@ -83,7 +85,7 @@ class DarcsPatchLexer(RegexLexer):
(r'New patches:', Generic.Heading),
(r'Context:', Generic.Heading),
(r'Patch bundle hash:', Generic.Heading),
- (r'(\s*)(%s)(.*)(\n)' % '|'.join(DPATCH_KEYWORDS),
+ (r'(\s*)({})(.*)(\n)'.format('|'.join(DPATCH_KEYWORDS)),
bygroups(Whitespace, Keyword, Text, Whitespace)),
(r'\+', Generic.Inserted, "insert"),
(r'-', Generic.Deleted, "delete"),
@@ -119,8 +121,6 @@ class WDiffLexer(RegexLexer):
* It only works with normal output (without options like ``-l``).
* If the target files contain "[-", "-]", "{+", or "+}",
especially they are unbalanced, the lexer will get confused.
-
- .. versionadded:: 2.2
"""
name = 'WDiff'
@@ -128,6 +128,7 @@ class WDiffLexer(RegexLexer):
aliases = ['wdiff']
filenames = ['*.wdiff']
mimetypes = []
+ version_added = '2.2'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dns.py b/contrib/python/Pygments/py3/pygments/lexers/dns.py
index 18cab3192a..d0f98d032e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dns.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dns.py
@@ -4,7 +4,7 @@
Pygments lexers for DNS
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,8 +31,6 @@ class DnsZoneLexer(RegexLexer):
"""
Lexer for DNS zone file
-
- .. versionadded:: 2.16
"""
flags = re.MULTILINE
@@ -42,6 +40,7 @@ class DnsZoneLexer(RegexLexer):
filenames = [ "*.zone" ]
url = "https://datatracker.ietf.org/doc/html/rfc1035"
mimetypes = ['text/dns']
+ version_added = '2.16'
tokens = {
'root': [
@@ -73,16 +72,16 @@ class DnsZoneLexer(RegexLexer):
'values': [
(r'\n', Whitespace, "#pop"),
(r'\(', Punctuation, 'nested'),
- include('simple-values'),
+ include('simple-value'),
],
# Parsing nested values (...):
'nested': [
(r'\)', Punctuation, "#pop"),
- include('simple-values'),
+ include('multiple-simple-values'),
],
# Parsing values:
- 'simple-values': [
- (r'(;.*)(\n)', bygroups(Comment.Single, Whitespace)),
+ 'simple-value': [
+ (r'(;.*)', bygroups(Comment.Single)),
(r'[ \t]+', Whitespace),
(r"@\b", Operator),
('"', String, 'string'),
@@ -90,6 +89,10 @@ class DnsZoneLexer(RegexLexer):
(r'([0-9]+[smhdw]?)([ \t]+)', bygroups(Number.Integer, Whitespace)),
(r'\S+', Literal),
],
+ 'multiple-simple-values': [
+ include('simple-value'),
+ (r'[\n]+', Whitespace),
+ ],
'include': [
(r'([ \t]+)([^ \t\n]+)([ \t]+)([-\._a-zA-Z]+)([ \t]+)(;.*)?$',
bygroups(Whitespace, Comment.PreprocFile, Whitespace, Name, Whitespace, Comment.Single), '#pop'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
index 5c488dd974..17097c75da 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dotnet.py
@@ -4,7 +4,7 @@
Lexers for .net languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -50,6 +50,7 @@ class CSharpLexer(RegexLexer):
aliases = ['csharp', 'c#', 'cs']
filenames = ['*.cs']
mimetypes = ['text/x-csharp'] # inferred
+ version_added = ''
flags = re.MULTILINE | re.DOTALL
@@ -164,8 +165,6 @@ class NemerleLexer(RegexLexer):
``Lo`` category has more than 40,000 characters in it!
The default value is ``basic``.
-
- .. versionadded:: 1.5
"""
name = 'Nemerle'
@@ -173,6 +172,7 @@ class NemerleLexer(RegexLexer):
aliases = ['nemerle']
filenames = ['*.n']
mimetypes = ['text/x-nemerle'] # inferred
+ version_added = '1.5'
flags = re.MULTILINE | re.DOTALL
@@ -333,6 +333,7 @@ class BooLexer(RegexLexer):
aliases = ['boo']
filenames = ['*.boo']
mimetypes = ['text/x-boo']
+ version_added = ''
tokens = {
'root': [
@@ -399,9 +400,10 @@ class VbNetLexer(RegexLexer):
name = 'VB.net'
url = 'https://docs.microsoft.com/en-us/dotnet/visual-basic/'
- aliases = ['vb.net', 'vbnet', 'lobas', 'oobas', 'sobas']
+ aliases = ['vb.net', 'vbnet', 'lobas', 'oobas', 'sobas', 'visual-basic', 'visualbasic']
filenames = ['*.vb', '*.bas']
mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+ version_added = ''
uni_name = '[_' + uni.combine('Ll', 'Lt', 'Lm', 'Nl') + ']' + \
'[' + uni.combine('Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc',
@@ -510,6 +512,7 @@ class GenericAspxLexer(RegexLexer):
name = 'aspx-gen'
filenames = []
mimetypes = []
+ url = 'https://dotnet.microsoft.com/en-us/apps/aspnet'
flags = re.DOTALL
@@ -535,6 +538,8 @@ class CSharpAspxLexer(DelegatingLexer):
aliases = ['aspx-cs']
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
mimetypes = []
+ url = 'https://dotnet.microsoft.com/en-us/apps/aspnet'
+ version_added = ''
def __init__(self, **options):
super().__init__(CSharpLexer, GenericAspxLexer, **options)
@@ -555,6 +560,8 @@ class VbNetAspxLexer(DelegatingLexer):
aliases = ['aspx-vb']
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
mimetypes = []
+ url = 'https://dotnet.microsoft.com/en-us/apps/aspnet'
+ version_added = ''
def __init__(self, **options):
super().__init__(VbNetLexer, GenericAspxLexer, **options)
@@ -570,8 +577,6 @@ class VbNetAspxLexer(DelegatingLexer):
class FSharpLexer(RegexLexer):
"""
For the F# language (version 3.0).
-
- .. versionadded:: 1.5
"""
name = 'F#'
@@ -579,6 +584,7 @@ class FSharpLexer(RegexLexer):
aliases = ['fsharp', 'f#']
filenames = ['*.fs', '*.fsi', '*.fsx']
mimetypes = ['text/x-fsharp']
+ version_added = '1.5'
keywords = [
'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
@@ -649,12 +655,12 @@ class FSharpLexer(RegexLexer):
bygroups(Keyword, Whitespace, Name.Class)),
(r'\b(member|override)(\s+)(\w+)(\.)(\w+)',
bygroups(Keyword, Whitespace, Name, Punctuation, Name.Function)),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
(r'``([^`\n\r\t]|`[^`\n\r\t])+``', Name),
- (r'(%s)' % '|'.join(keyopts), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'({})'.format('|'.join(keyopts)), Operator),
+ (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+ (r'\b({})\b'.format('|'.join(word_operators)), Operator.Word),
+ (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
(r'(#)([ \t]*)(if|endif|else|line|nowarn|light|\d+)\b(.*?)(\n)',
bygroups(Comment.Preproc, Whitespace, Comment.Preproc,
Comment.Preproc, Whitespace)),
@@ -733,21 +739,20 @@ class XppLexer(RegexLexer):
"""
For X++ source code. This is based loosely on the CSharpLexer
-
- .. versionadded:: 2.15
"""
name = 'X++'
url = 'https://learn.microsoft.com/en-us/dynamics365/fin-ops-core/dev-itpro/dev-ref/xpp-language-reference'
aliases = ['xpp', 'x++']
filenames = ['*.xpp']
+ version_added = '2.15'
flags = re.MULTILINE
XPP_CHARS = ('@?(?:_|[^' +
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])' +
'[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*');
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*')
# Temporary, see
# https://github.com/thatch/regexlint/pull/49
XPP_CHARS = XPP_CHARS.replace('\x00', '\x01')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dsls.py b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
index 37a5ff6c33..35dfda9a38 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dsls.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dsls.py
@@ -4,7 +4,7 @@
Lexers for various domain-specific languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,14 +23,13 @@ __all__ = ['ProtoBufLexer', 'ZeekLexer', 'PuppetLexer', 'RslLexer',
class ProtoBufLexer(RegexLexer):
"""
Lexer for Protocol Buffer definition files.
-
- .. versionadded:: 1.4
"""
name = 'Protocol Buffer'
url = 'https://developers.google.com/protocol-buffers/'
aliases = ['protobuf', 'proto']
filenames = ['*.proto']
+ version_added = '1.4'
tokens = {
'root': [
@@ -85,14 +84,13 @@ class ProtoBufLexer(RegexLexer):
class ThriftLexer(RegexLexer):
"""
For Thrift interface definitions.
-
- .. versionadded:: 2.1
"""
name = 'Thrift'
url = 'https://thrift.apache.org/'
aliases = ['thrift']
filenames = ['*.thrift']
mimetypes = ['application/x-thrift']
+ version_added = '2.1'
tokens = {
'root': [
@@ -191,13 +189,12 @@ class ThriftLexer(RegexLexer):
class ZeekLexer(RegexLexer):
"""
For Zeek scripts.
-
- .. versionadded:: 2.5
"""
name = 'Zeek'
url = 'https://www.zeek.org/'
aliases = ['zeek', 'bro']
filenames = ['*.zeek', '*.bro']
+ version_added = '2.5'
_hex = r'[0-9a-fA-F]'
_float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
@@ -353,13 +350,12 @@ BroLexer = ZeekLexer
class PuppetLexer(RegexLexer):
"""
For Puppet configuration DSL.
-
- .. versionadded:: 1.6
"""
name = 'Puppet'
url = 'https://puppet.com/'
aliases = ['puppet']
filenames = ['*.pp']
+ version_added = '1.6'
tokens = {
'root': [
@@ -443,14 +439,13 @@ class RslLexer(RegexLexer):
RSL is the formal specification
language used in RAISE (Rigorous Approach to Industrial Software Engineering)
method.
-
- .. versionadded:: 2.0
"""
name = 'RSL'
url = 'http://en.wikipedia.org/wiki/RAISE'
aliases = ['rsl']
filenames = ['*.rsl']
mimetypes = ['text/rsl']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
@@ -505,13 +500,12 @@ class RslLexer(RegexLexer):
class MscgenLexer(RegexLexer):
"""
For Mscgen files.
-
- .. versionadded:: 1.6
"""
name = 'Mscgen'
url = 'http://www.mcternan.me.uk/mscgen/'
aliases = ['mscgen', 'msc']
filenames = ['*.msc']
+ version_added = '1.6'
_var = r'(\w+|"(?:\\"|[^"])*")'
@@ -555,13 +549,12 @@ class MscgenLexer(RegexLexer):
class VGLLexer(RegexLexer):
"""
For SampleManager VGL source code.
-
- .. versionadded:: 1.6
"""
name = 'VGL'
url = 'http://www.thermoscientific.com/samplemanager'
aliases = ['vgl']
filenames = ['*.rpf']
+ version_added = '1.6'
flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
@@ -589,8 +582,6 @@ class VGLLexer(RegexLexer):
class AlloyLexer(RegexLexer):
"""
For Alloy source code.
-
- .. versionadded:: 2.0
"""
name = 'Alloy'
@@ -598,6 +589,7 @@ class AlloyLexer(RegexLexer):
aliases = ['alloy']
filenames = ['*.als']
mimetypes = ['text/x-alloy']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
@@ -660,14 +652,13 @@ class PanLexer(RegexLexer):
Lexer for pan source files.
Based on tcsh lexer.
-
- .. versionadded:: 2.0
"""
name = 'Pan'
url = 'https://github.com/quattor/pan/'
aliases = ['pan']
filenames = ['*.pan']
+ version_added = '2.0'
tokens = {
'root': [
@@ -727,14 +718,13 @@ class PanLexer(RegexLexer):
class CrmshLexer(RegexLexer):
"""
Lexer for crmsh configuration files for Pacemaker clusters.
-
- .. versionadded:: 2.1
"""
name = 'Crmsh'
url = 'http://crmsh.github.io/'
aliases = ['crmsh', 'pcmk']
filenames = ['*.crmsh', '*.pcmk']
mimetypes = []
+ version_added = '2.1'
elem = words((
'node', 'primitive', 'group', 'clone', 'ms', 'location',
@@ -773,7 +763,7 @@ class CrmshLexer(RegexLexer):
(sub, Keyword),
(acl, Keyword),
# binary operators
- (r'(?:%s:)?(%s)(?![\w#$-])' % (val_qual, bin_ops), Operator.Word),
+ (rf'(?:{val_qual}:)?({bin_ops})(?![\w#$-])', Operator.Word),
# other operators
(bin_rel, Operator.Word),
(un_ops, Operator.Word),
@@ -781,11 +771,11 @@ class CrmshLexer(RegexLexer):
# builtin attributes (e.g. #uname)
(r'#[a-z]+(?![\w#$-])', Name.Builtin),
# acl_mod:blah
- (r'(%s)(:)("(?:""|[^"])*"|\S+)' % acl_mod,
+ (rf'({acl_mod})(:)("(?:""|[^"])*"|\S+)',
bygroups(Keyword, Punctuation, Name)),
# rsc_id[:(role|action)]
# NB: this matches all other identifiers
- (r'([\w#$-]+)(?:(:)(%s))?(?![\w#$-])' % rsc_role_action,
+ (rf'([\w#$-]+)(?:(:)({rsc_role_action}))?(?![\w#$-])',
bygroups(Name, Punctuation, Operator.Word)),
# punctuation
(r'(\\(?=\n)|[\[\](){}/:@])', Punctuation),
@@ -797,14 +787,13 @@ class CrmshLexer(RegexLexer):
class FlatlineLexer(RegexLexer):
"""
Lexer for Flatline expressions.
-
- .. versionadded:: 2.2
"""
name = 'Flatline'
url = 'https://github.com/bigmlcom/flatline'
aliases = ['flatline']
filenames = []
mimetypes = ['text/x-flatline']
+ version_added = '2.2'
special_forms = ('let',)
@@ -874,14 +863,13 @@ class FlatlineLexer(RegexLexer):
class SnowballLexer(ExtendedRegexLexer):
"""
Lexer for Snowball source code.
-
- .. versionadded:: 2.2
"""
name = 'Snowball'
url = 'https://snowballstem.org/'
aliases = ['snowball']
filenames = ['*.sbl']
+ version_added = '2.2'
_ws = r'\n\r\t '
@@ -897,8 +885,8 @@ class SnowballLexer(ExtendedRegexLexer):
def callback(lexer, match, ctx):
s = match.start()
text = match.group()
- string = re.compile(r'([^%s]*)(.)' % re.escape(lexer._start)).match
- escape = re.compile(r'([^%s]*)(.)' % re.escape(lexer._end)).match
+ string = re.compile(rf'([^{re.escape(lexer._start)}]*)(.)').match
+ escape = re.compile(rf'([^{re.escape(lexer._end)}]*)(.)').match
pos = 0
do_string = do_string_first
while pos < len(text):
@@ -934,7 +922,7 @@ class SnowballLexer(ExtendedRegexLexer):
include('root1'),
],
'root1': [
- (r'[%s]+' % _ws, Whitespace),
+ (rf'[{_ws}]+', Whitespace),
(r'\d+', Number.Integer),
(r"'", String.Single, 'string'),
(r'[()]', Punctuation),
@@ -957,9 +945,9 @@ class SnowballLexer(ExtendedRegexLexer):
(words(('size', 'limit', 'cursor', 'maxint', 'minint'),
suffix=r'\b'),
Name.Builtin),
- (r'(stringdef\b)([%s]*)([^%s]+)' % (_ws, _ws),
+ (rf'(stringdef\b)([{_ws}]*)([^{_ws}]+)',
bygroups(Keyword.Reserved, Whitespace, String.Escape)),
- (r'(stringescapes\b)([%s]*)(.)([%s]*)(.)' % (_ws, _ws),
+ (rf'(stringescapes\b)([{_ws}]*)(.)([{_ws}]*)(.)',
_stringescapes),
(r'[A-Za-z]\w*', Name),
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/dylan.py b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
index f5aa73ab77..8cd0d624df 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/dylan.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/dylan.py
@@ -4,7 +4,7 @@
Lexers for the Dylan language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,6 @@ __all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
class DylanLexer(RegexLexer):
"""
For the Dylan language.
-
- .. versionadded:: 0.7
"""
name = 'Dylan'
@@ -30,6 +28,7 @@ class DylanLexer(RegexLexer):
aliases = ['dylan']
filenames = ['*.dylan', '*.dyl', '*.intr']
mimetypes = ['text/x-dylan']
+ version_added = '0.7'
flags = re.IGNORECASE
@@ -214,15 +213,14 @@ class DylanLexer(RegexLexer):
class DylanLidLexer(RegexLexer):
"""
For Dylan LID (Library Interchange Definition) files.
-
- .. versionadded:: 1.6
"""
name = 'DylanLID'
aliases = ['dylan-lid', 'lid']
filenames = ['*.lid', '*.hdp']
mimetypes = ['text/x-dylan-lid']
-
+ url = 'http://www.opendylan.org/'
+ version_added = '1.6'
flags = re.IGNORECASE
tokens = {
@@ -245,13 +243,13 @@ class DylanConsoleLexer(Lexer):
For Dylan interactive console output.
This is based on a copy of the RubyConsoleLexer.
-
- .. versionadded:: 1.6
"""
name = 'Dylan session'
aliases = ['dylan-console', 'dylan-repl']
filenames = ['*.dylan-console']
mimetypes = ['text/x-dylan-console']
+ url = 'http://www.opendylan.org/'
+ version_added = '1.6'
_example = 'dylan-console/console'
_prompt_re = re.compile(r'\?| ')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ecl.py b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
index 7e93e0cb52..e7b4aaa7d0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ecl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ecl.py
@@ -4,7 +4,7 @@
Lexers for the ECL language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['ECLLexer']
class ECLLexer(RegexLexer):
"""
Lexer for the declarative big-data ECL language.
-
- .. versionadded:: 1.5
"""
name = 'ECL'
@@ -29,6 +27,7 @@ class ECLLexer(RegexLexer):
aliases = ['ecl']
filenames = ['*.ecl']
mimetypes = ['application/x-ecl']
+ version_added = '1.5'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
index 8a5a559501..a3adb3e62f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/eiffel.py
@@ -4,7 +4,7 @@
Lexer for the Eiffel language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['EiffelLexer']
class EiffelLexer(RegexLexer):
"""
For Eiffel source code.
-
- .. versionadded:: 2.0
"""
name = 'Eiffel'
url = 'https://www.eiffel.com'
aliases = ['eiffel']
filenames = ['*.e']
mimetypes = ['text/x-eiffel']
+ version_added = '2.0'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/elm.py b/contrib/python/Pygments/py3/pygments/lexers/elm.py
index 0e7ac3fabc..0a8939ba2e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/elm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/elm.py
@@ -4,7 +4,7 @@
Lexer for the Elm programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['ElmLexer']
class ElmLexer(RegexLexer):
"""
For Elm source code.
-
- .. versionadded:: 2.1
"""
name = 'Elm'
@@ -27,6 +25,7 @@ class ElmLexer(RegexLexer):
aliases = ['elm']
filenames = ['*.elm']
mimetypes = ['text/x-elm']
+ version_added = '2.1'
validName = r'[a-z_][a-zA-Z0-9_\']*'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/elpi.py b/contrib/python/Pygments/py3/pygments/lexers/elpi.py
index 6fc8b502d6..5efaffa874 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/elpi.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/elpi.py
@@ -4,7 +4,7 @@
Lexer for the `Elpi <http://github.com/LPCIC/elpi>`_ programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['ElpiLexer']
class ElpiLexer(RegexLexer):
"""
Lexer for the Elpi programming language.
-
- .. versionadded:: 2.11
"""
name = 'Elpi'
@@ -27,19 +25,20 @@ class ElpiLexer(RegexLexer):
aliases = ['elpi']
filenames = ['*.elpi']
mimetypes = ['text/x-elpi']
+ version_added = '2.11'
lcase_re = r"[a-z]"
ucase_re = r"[A-Z]"
digit_re = r"[0-9]"
schar2_re = r"([+*^?/<>`'@#~=&!])"
- schar_re = r"({}|-|\$|_)".format(schar2_re)
- idchar_re = r"({}|{}|{}|{})".format(lcase_re,ucase_re,digit_re,schar_re)
- idcharstarns_re = r"({}*(\.({}|{}){}*)*)".format(idchar_re, lcase_re, ucase_re, idchar_re)
- symbchar_re = r"({}|{}|{}|{}|:)".format(lcase_re, ucase_re, digit_re, schar_re)
- constant_re = r"({}{}*|{}{}|{}{}*|_{}+)".format(ucase_re, idchar_re, lcase_re, idcharstarns_re, schar2_re, symbchar_re, idchar_re)
+ schar_re = rf"({schar2_re}|-|\$|_)"
+ idchar_re = rf"({lcase_re}|{ucase_re}|{digit_re}|{schar_re})"
+ idcharstarns_re = rf"({idchar_re}*(\.({lcase_re}|{ucase_re}){idchar_re}*)*)"
+ symbchar_re = rf"({lcase_re}|{ucase_re}|{digit_re}|{schar_re}|:)"
+ constant_re = rf"({ucase_re}{idchar_re}*|{lcase_re}{idcharstarns_re}|{schar2_re}{symbchar_re}*|_{idchar_re}+)"
symbol_re = r"(,|<=>|->|:-|;|\?-|->|&|=>|\bas\b|\buvar\b|<|=<|=|==|>=|>|\bi<|\bi=<|\bi>=|\bi>|\bis\b|\br<|\br=<|\br>=|\br>|\bs<|\bs=<|\bs>=|\bs>|@|::|\[\]|`->|`:|`:=|\^|-|\+|\bi-|\bi\+|r-|r\+|/|\*|\bdiv\b|\bi\*|\bmod\b|\br\*|~|\bi~|\br~)"
- escape_re = r"\(({}|{})\)".format(constant_re,symbol_re)
- const_sym_re = r"({}|{}|{})".format(constant_re,symbol_re,escape_re)
+ escape_re = rf"\(({constant_re}|{symbol_re})\)"
+ const_sym_re = rf"({constant_re}|{symbol_re}|{escape_re})"
tokens = {
'root': [
@@ -54,35 +53,35 @@ class ElpiLexer(RegexLexer):
'elpi-string'),
(r"(:index)(\s*\()", bygroups(Keyword.Mode, Text.Whitespace),
'elpi-indexing-expr'),
- (r"\b(external pred|pred)(\s+)({})".format(const_sym_re),
+ (rf"\b(external pred|pred)(\s+)({const_sym_re})",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-pred-item'),
- (r"\b(external type|type)(\s+)(({}(,\s*)?)+)".format(const_sym_re),
+ (rf"\b(external type|type)(\s+)(({const_sym_re}(,\s*)?)+)",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
- (r"\b(kind)(\s+)(({}|,)+)".format(const_sym_re),
+ (rf"\b(kind)(\s+)(({const_sym_re}|,)+)",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
- (r"\b(typeabbrev)(\s+)({})".format(const_sym_re),
+ (rf"\b(typeabbrev)(\s+)({const_sym_re})",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-type'),
(r"\b(accumulate)(\s+)(\")",
bygroups(Keyword.Declaration, Text.Whitespace, String.Double),
'elpi-string'),
- (r"\b(accumulate|namespace|local)(\s+)({})".format(constant_re),
+ (rf"\b(accumulate|namespace|local)(\s+)({constant_re})",
bygroups(Keyword.Declaration, Text.Whitespace, Text)),
- (r"\b(shorten)(\s+)({}\.)".format(constant_re),
+ (rf"\b(shorten)(\s+)({constant_re}\.)",
bygroups(Keyword.Declaration, Text.Whitespace, Text)),
(r"\b(pi|sigma)(\s+)([a-zA-Z][A-Za-z0-9_ ]*)(\\)",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, Text)),
- (r"\b(constraint)(\s+)(({}(\s+)?)+)".format(const_sym_re),
+ (rf"\b(constraint)(\s+)(({const_sym_re}(\s+)?)+)",
bygroups(Keyword.Declaration, Text.Whitespace, Name.Function),
'elpi-chr-rule-start'),
- (r"(?=[A-Z_]){}".format(constant_re), Name.Variable),
- (r"(?=[a-z_]){}\\".format(constant_re), Name.Variable),
+ (rf"(?=[A-Z_]){constant_re}", Name.Variable),
+ (rf"(?=[a-z_]){constant_re}\\", Name.Variable),
(r"_", Name.Variable),
- (r"({}|!|=>|;)".format(symbol_re), Keyword.Declaration),
+ (rf"({symbol_re}|!|=>|;)", Keyword.Declaration),
(constant_re, Text),
(r"\[|\]|\||=>", Keyword.Declaration),
(r'"', String.Double, 'elpi-string'),
@@ -155,7 +154,7 @@ class ElpiLexer(RegexLexer):
'elpi-quote': [
(r'\{\{', Punctuation, '#push'),
(r'\}\}', Punctuation, '#pop'),
- (r"(lp:)((?=[A-Z_]){})".format(constant_re), bygroups(Keyword, Name.Variable)),
+ (rf"(lp:)((?=[A-Z_]){constant_re})", bygroups(Keyword, Name.Variable)),
(r"[^l\}]+", Text),
(r"l|\}", Text),
],
diff --git a/contrib/python/Pygments/py3/pygments/lexers/email.py b/contrib/python/Pygments/py3/pygments/lexers/email.py
index 36a8a95d9f..c0726e8b81 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/email.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/email.py
@@ -4,7 +4,7 @@
Lexer for the raw E-mail.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -119,14 +119,14 @@ class EmailLexer(DelegatingLexer):
`highlight-X-header`
Highlight the fields of ``X-`` user-defined email header. (default:
``False``).
-
- .. versionadded:: 2.5
"""
name = "E-mail"
aliases = ["email", "eml"]
filenames = ["*.eml"]
mimetypes = ["message/rfc822"]
+ url = "https://en.wikipedia.org/wiki/Email#Message_format"
+ version_added = '2.5'
def __init__(self, **options):
super().__init__(EmailHeaderLexer, MIMELexer, Comment, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/erlang.py b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
index e484664a17..1190182324 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/erlang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/erlang.py
@@ -4,7 +4,7 @@
Lexers for Erlang.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,8 +22,6 @@ __all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
class ErlangLexer(RegexLexer):
"""
For the Erlang functional programming language.
-
- .. versionadded:: 0.9
"""
name = 'Erlang'
@@ -31,6 +29,7 @@ class ErlangLexer(RegexLexer):
aliases = ['erlang']
filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
mimetypes = ['text/x-erlang']
+ version_added = '0.9'
keywords = (
'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
@@ -150,13 +149,13 @@ class ErlangLexer(RegexLexer):
class ErlangShellLexer(Lexer):
"""
Shell sessions in erl (for Erlang code).
-
- .. versionadded:: 1.1
"""
name = 'Erlang erl session'
aliases = ['erl']
filenames = ['*.erl-sh']
mimetypes = ['text/x-erl-shellsession']
+ url = 'https://www.erlang.org/'
+ version_added = '1.1'
_prompt_re = re.compile(r'(?:\([\w@_.]+\))?\d+>(?=\s|\Z)')
@@ -191,10 +190,10 @@ class ErlangShellLexer(Lexer):
def gen_elixir_string_rules(name, symbol, token):
states = {}
states['string_' + name] = [
- (r'[^#%s\\]+' % (symbol,), token),
+ (rf'[^#{symbol}\\]+', token),
include('escapes'),
(r'\\.', token),
- (r'(%s)' % (symbol,), bygroups(token), "#pop"),
+ (rf'({symbol})', bygroups(token), "#pop"),
include('interpol')
]
return states
@@ -203,32 +202,31 @@ def gen_elixir_string_rules(name, symbol, token):
def gen_elixir_sigstr_rules(term, term_class, token, interpol=True):
if interpol:
return [
- (r'[^#%s\\]+' % (term_class,), token),
+ (rf'[^#{term_class}\\]+', token),
include('escapes'),
(r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ (rf'{term}[a-zA-Z]*', token, '#pop'),
include('interpol')
]
else:
return [
- (r'[^%s\\]+' % (term_class,), token),
+ (rf'[^{term_class}\\]+', token),
(r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ (rf'{term}[a-zA-Z]*', token, '#pop'),
]
class ElixirLexer(RegexLexer):
"""
For the Elixir language.
-
- .. versionadded:: 1.5
"""
name = 'Elixir'
- url = 'http://elixir-lang.org'
+ url = 'https://elixir-lang.org'
aliases = ['elixir', 'ex', 'exs']
filenames = ['*.ex', '*.eex', '*.exs', '*.leex']
mimetypes = ['text/x-elixir']
+ version_added = '1.5'
KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
@@ -303,9 +301,9 @@ class ElixirLexer(RegexLexer):
for term, name in triquotes:
states['sigils'] += [
- (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (rf'(~[a-z])({term})', bygroups(token, String.Heredoc),
(name + '-end', name + '-intp')),
- (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (rf'(~[A-Z])({term})', bygroups(token, String.Heredoc),
(name + '-end', name + '-no-intp')),
]
@@ -337,12 +335,12 @@ class ElixirLexer(RegexLexer):
op3_re = "|".join(re.escape(s) for s in OPERATORS3)
op2_re = "|".join(re.escape(s) for s in OPERATORS2)
op1_re = "|".join(re.escape(s) for s in OPERATORS1)
- ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
+ ops_re = rf'(?:{op3_re}|{op2_re}|{op1_re})'
punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
alnum = r'\w'
- name_re = r'(?:\.\.\.|[a-z_]%s*[!?]?)' % alnum
- modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
- complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
+ name_re = rf'(?:\.\.\.|[a-z_]{alnum}*[!?]?)'
+ modname_re = rf'[A-Z]{alnum}*(?:\.[A-Z]{alnum}*)*'
+ complex_name_re = rf'(?:{name_re}|{modname_re}|{ops_re})'
special_atom_re = r'(?:\.\.\.|<<>>|%\{\}|%|\{\})'
long_hex_char_re = r'(\\x\{)([\da-fA-F]+)(\})'
@@ -375,7 +373,7 @@ class ElixirLexer(RegexLexer):
(r":'", String.Symbol, 'string_single_atom'),
# [keywords: ...]
- (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
+ (rf'({special_atom_re}|{complex_name_re})(:)(?=\s|\n)',
bygroups(String.Symbol, Punctuation)),
# @attributes
@@ -383,7 +381,7 @@ class ElixirLexer(RegexLexer):
# identifiers
(name_re, Name),
- (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
+ (rf'(%?)({modname_re})', bygroups(Punctuation, Name.Class)),
# operators and punctuation
(op3_re, Operator),
@@ -484,13 +482,13 @@ class ElixirConsoleLexer(Lexer):
[1,2,3]
iex> length [head | tail]
3
-
- .. versionadded:: 1.5
"""
name = 'Elixir iex session'
aliases = ['iex']
mimetypes = ['text/x-elixir-shellsession']
+ url = 'https://elixir-lang.org'
+ version_added = '1.5'
_prompt_re = re.compile(r'(iex|\.{3})((?:\([\w@_.]+\))?\d+|\(\d+\))?> ')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
index ccc280541f..e9fab0e46f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/esoteric.py
@@ -4,7 +4,7 @@
Lexers for esoteric languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,7 @@ class BrainfuckLexer(RegexLexer):
aliases = ['brainfuck', 'bf']
filenames = ['*.bf', '*.b']
mimetypes = ['application/x-brainfuck']
+ version_added = ''
tokens = {
'common': [
@@ -76,14 +77,13 @@ class BrainfuckLexer(RegexLexer):
class BefungeLexer(RegexLexer):
"""
Lexer for the esoteric Befunge language.
-
- .. versionadded:: 0.7
"""
name = 'Befunge'
url = 'http://en.wikipedia.org/wiki/Befunge'
aliases = ['befunge']
filenames = ['*.befunge']
mimetypes = ['application/x-befunge']
+ version_added = '0.7'
tokens = {
'root': [
@@ -106,18 +106,17 @@ class BefungeLexer(RegexLexer):
class CAmkESLexer(RegexLexer):
"""
Basic lexer for the input language for the CAmkES component platform.
-
- .. versionadded:: 2.1
"""
name = 'CAmkES'
url = 'https://sel4.systems/CAmkES/'
aliases = ['camkes', 'idl4']
filenames = ['*.camkes', '*.idl4']
+ version_added = '2.1'
tokens = {
'root': [
# C pre-processor directive
- (r'^(\s*)(#.*)(\n)', bygroups(Whitespace, Comment.Preproc,
+ (r'^(\s*)(#.*)(\n)', bygroups(Whitespace, Comment.Preproc,
Whitespace)),
# Whitespace, comments
@@ -181,13 +180,12 @@ class CapDLLexer(RegexLexer):
shadow type names, but these instances are currently incorrectly
highlighted as types. Supporting this would need a stateful lexer that is
considered unnecessarily complex for now.
-
- .. versionadded:: 2.2
"""
name = 'CapDL'
url = 'https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml'
aliases = ['capdl']
filenames = ['*.cdl']
+ version_added = '2.2'
tokens = {
'root': [
@@ -235,12 +233,12 @@ class RedcodeLexer(RegexLexer):
"""
A simple Redcode lexer based on ICWS'94.
Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- .. versionadded:: 0.8
"""
name = 'Redcode'
aliases = ['redcode']
filenames = ['*.cw']
+ url = 'https://en.wikipedia.org/wiki/Core_War'
+ version_added = '0.8'
opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
@@ -254,8 +252,8 @@ class RedcodeLexer(RegexLexer):
(r';.*$', Comment.Single),
# Lexemes:
# Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
+ (r'\b({})\b'.format('|'.join(opcodes)), Name.Function),
+ (r'\b({})\b'.format('|'.join(modifiers)), Name.Decorator),
(r'[A-Za-z_]\w+', Name),
# Operators
(r'[-+*/%]', Operator),
@@ -276,6 +274,7 @@ class AheuiLexer(RegexLexer):
url = 'http://aheui.github.io/'
aliases = ['aheui']
filenames = ['*.aheui']
+ version_added = ''
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
index 49478ea00d..697d90c697 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ezhil.py
@@ -4,7 +4,7 @@
Pygments lexers for Ezhil language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ __all__ = ['EzhilLexer']
class EzhilLexer(RegexLexer):
"""
Lexer for Ezhil, a Tamil script-based programming language.
-
- .. versionadded:: 2.1
"""
name = 'Ezhil'
url = 'http://ezhillang.org'
aliases = ['ezhil']
filenames = ['*.n']
mimetypes = ['text/x-ezhil']
+ version_added = '2.1'
# Refer to tamil.utf8.tamil_letters from open-tamil for a stricter version of this.
# This much simpler version is close enough, and includes combining marks.
_TALETTERS = '[a-zA-Z_]|[\u0b80-\u0bff]'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/factor.py b/contrib/python/Pygments/py3/pygments/lexers/factor.py
index aa0cd2d856..297433ce46 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/factor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/factor.py
@@ -4,7 +4,7 @@
Lexers for the Factor language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['FactorLexer']
class FactorLexer(RegexLexer):
"""
Lexer for the Factor language.
-
- .. versionadded:: 1.4
"""
name = 'Factor'
url = 'http://factorcode.org'
aliases = ['factor']
filenames = ['*.factor']
mimetypes = ['text/x-factor']
+ version_added = '1.4'
builtin_kernel = words((
'-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fantom.py b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
index 7182d8184a..a5ca59c3d4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fantom.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fantom.py
@@ -4,7 +4,7 @@
Lexer for the Fantom language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,13 +21,13 @@ __all__ = ['FantomLexer']
class FantomLexer(RegexLexer):
"""
For Fantom source code.
-
- .. versionadded:: 1.5
"""
name = 'Fantom'
aliases = ['fan']
filenames = ['*.fan']
mimetypes = ['application/x-fantom']
+ url = 'https://www.fantom.org'
+ version_added = '1.5'
# often used regexes
def s(str):
diff --git a/contrib/python/Pygments/py3/pygments/lexers/felix.py b/contrib/python/Pygments/py3/pygments/lexers/felix.py
index 5e34f056ef..681070e87f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/felix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/felix.py
@@ -4,7 +4,7 @@
Lexer for the Felix language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,8 +19,6 @@ __all__ = ['FelixLexer']
class FelixLexer(RegexLexer):
"""
For Felix source code.
-
- .. versionadded:: 1.2
"""
name = 'Felix'
@@ -28,6 +26,7 @@ class FelixLexer(RegexLexer):
aliases = ['felix', 'flx']
filenames = ['*.flx', '*.flxh']
mimetypes = ['text/x-felix']
+ version_added = '1.2'
preproc = (
'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
@@ -128,13 +127,13 @@ class FelixLexer(RegexLexer):
# IntegerLiteral
# -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
+ (rf'0[Bb][01_]+{decimal_suffixes}', Number.Bin),
# -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
+ (rf'0[0-7_]+{decimal_suffixes}', Number.Oct),
# -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
+ (rf'0[xX][0-9a-fA-F_]+{decimal_suffixes}', Number.Hex),
# -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
+ (rf'(0|[1-9][0-9_]*){decimal_suffixes}', Number.Integer),
# Strings
('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
@@ -153,8 +152,8 @@ class FelixLexer(RegexLexer):
(r'[a-zA-Z_]\w*:>', Name.Label),
# Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
+ (r'({})\b'.format('|'.join(name_builtins)), Name.Builtin),
+ (r'({})\b'.format('|'.join(name_pseudo)), Name.Builtin.Pseudo),
(r'[a-zA-Z_]\w*', Name),
],
'whitespace': [
@@ -169,7 +168,7 @@ class FelixLexer(RegexLexer):
(r'#', Comment.Preproc, 'macro'),
],
'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
+ (r'({})\b'.format('|'.join(operator_words)), Operator.Word),
(r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
],
'comment': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fift.py b/contrib/python/Pygments/py3/pygments/lexers/fift.py
index 027175b145..02fdf4812b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fift.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fift.py
@@ -4,7 +4,7 @@
Lexers for fift.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,6 +23,7 @@ class FiftLexer(RegexLexer):
aliases = ['fift', 'fif']
filenames = ['*.fif']
url = 'https://ton-blockchain.github.io/docs/fiftbase.pdf'
+ version_added = ''
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/floscript.py b/contrib/python/Pygments/py3/pygments/lexers/floscript.py
index 6cc2971a24..864560914c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/floscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/floscript.py
@@ -4,7 +4,7 @@
Lexer for FloScript
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['FloScriptLexer']
class FloScriptLexer(RegexLexer):
"""
For FloScript configuration language source code.
-
- .. versionadded:: 2.4
"""
name = 'FloScript'
url = 'https://github.com/ioflo/ioflo'
aliases = ['floscript', 'flo']
filenames = ['*.flo']
+ version_added = '2.4'
def innerstring_rules(ttype):
return [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/forth.py b/contrib/python/Pygments/py3/pygments/lexers/forth.py
index 7feeaef53f..1cf7072184 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/forth.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/forth.py
@@ -4,7 +4,7 @@
Lexer for the Forth language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,13 @@ __all__ = ['ForthLexer']
class ForthLexer(RegexLexer):
"""
Lexer for Forth files.
-
- .. versionadded:: 2.2
"""
name = 'Forth'
url = 'https://www.forth.com/forth/'
aliases = ['forth']
filenames = ['*.frt', '*.fs']
mimetypes = ['application/x-forth']
+ version_added = '2.2'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/fortran.py b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
index cee254f748..1a2b7f9506 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/fortran.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/fortran.py
@@ -4,7 +4,7 @@
Lexers for Fortran languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ __all__ = ['FortranLexer', 'FortranFixedLexer']
class FortranLexer(RegexLexer):
"""
Lexer for FORTRAN 90 code.
-
- .. versionadded:: 0.10
"""
name = 'Fortran'
url = 'https://fortran-lang.org/'
aliases = ['fortran', 'f90']
filenames = ['*.f03', '*.f90', '*.F03', '*.F90']
mimetypes = ['text/x-fortran']
+ version_added = '0.10'
flags = re.IGNORECASE | re.MULTILINE
# Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
@@ -173,12 +172,12 @@ class FortranLexer(RegexLexer):
class FortranFixedLexer(RegexLexer):
"""
Lexer for fixed format Fortran.
-
- .. versionadded:: 2.1
"""
name = 'FortranFixed'
aliases = ['fortranfixed']
filenames = ['*.f', '*.F']
+ url = 'https://fortran-lang.org/'
+ version_added = '2.1'
flags = re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
index 9d8d951c58..dd6fd18218 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/foxpro.py
@@ -4,7 +4,7 @@
Simple lexer for Microsoft Visual FoxPro source code.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,14 @@ class FoxProLexer(RegexLexer):
FoxPro syntax allows to shorten all keywords and function names
to 4 characters. Shortened forms are not recognized by this lexer.
-
- .. versionadded:: 1.6
"""
name = 'FoxPro'
aliases = ['foxpro', 'vfp', 'clipper', 'xbase']
filenames = ['*.PRG', '*.prg']
+ version_added = '1.6'
mimetype = []
+ url = 'https://learn.microsoft.com/en-us/previous-versions/visualstudio/foxpro'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/freefem.py b/contrib/python/Pygments/py3/pygments/lexers/freefem.py
index eab6cbfffa..ce8e254535 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/freefem.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/freefem.py
@@ -4,7 +4,7 @@
Lexer for FreeFem++ language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,6 @@ class FreeFemLexer(CppLexer):
This is an extension of the CppLexer, as the FreeFem Language is a superset
of C++.
-
- .. versionadded:: 2.4
"""
name = 'Freefem'
@@ -30,6 +28,7 @@ class FreeFemLexer(CppLexer):
aliases = ['freefem']
filenames = ['*.edp']
mimetypes = ['text/x-freefem']
+ version_added = '2.4'
# Language operators
operators = {'+', '-', '*', '.*', '/', './', '%', '^', '^-1', ':', '\''}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/func.py b/contrib/python/Pygments/py3/pygments/lexers/func.py
index 871f040581..3730569576 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/func.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/func.py
@@ -4,7 +4,7 @@
Lexers for FunC.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,6 +23,8 @@ class FuncLexer(RegexLexer):
name = 'FunC'
aliases = ['func', 'fc']
filenames = ['*.fc', '*.func']
+ url = 'https://docs.ton.org/develop/func/overview'
+ version_added = ''
# 1. Does not start from "
# 2. Can start from ` and end with `, containing any character
diff --git a/contrib/python/Pygments/py3/pygments/lexers/functional.py b/contrib/python/Pygments/py3/pygments/lexers/functional.py
index 6189dd2cc8..1f51ac9e08 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/functional.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/functional.py
@@ -4,10 +4,11 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
NewLispLexer, ShenLexer
from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
diff --git a/contrib/python/Pygments/py3/pygments/lexers/futhark.py b/contrib/python/Pygments/py3/pygments/lexers/futhark.py
index b0efa88afd..2b5bb59015 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/futhark.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/futhark.py
@@ -4,7 +4,7 @@
Lexer for the Futhark language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,13 @@ __all__ = ['FutharkLexer']
class FutharkLexer(RegexLexer):
"""
A Futhark lexer
-
- .. versionadded:: 2.8
"""
name = 'Futhark'
url = 'https://futhark-lang.org/'
aliases = ['futhark']
filenames = ['*.fut']
mimetypes = ['text/x-futhark']
+ version_added = '2.8'
num_types = ('i8', 'i16', 'i32', 'i64', 'u8', 'u16', 'u32', 'u64', 'f32', 'f64')
@@ -42,7 +41,7 @@ class FutharkLexer(RegexLexer):
'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
- num_postfix = r'(%s)?' % '|'.join(num_types)
+ num_postfix = r'({})?'.format('|'.join(num_types))
identifier_re = '[a-zA-Z_][a-zA-Z_0-9\']*'
@@ -53,12 +52,12 @@ class FutharkLexer(RegexLexer):
(r'--(.*?)$', Comment.Single),
(r'\s+', Whitespace),
(r'\(\)', Punctuation),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'\b(%s)(?!\')\b' % '|'.join(num_types + other_types), Keyword.Type),
+ (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved),
+ (r'\b({})(?!\')\b'.format('|'.join(num_types + other_types)), Keyword.Type),
# Identifiers
(r'#\[([a-zA-Z_\(\) ]*)\]', Comment.Preproc),
- (r'[#!]?(%s\.)*%s' % (identifier_re, identifier_re), Name),
+ (rf'[#!]?({identifier_re}\.)*{identifier_re}', Name),
(r'\\', Operator),
(r'[-+/%=!><|&*^][-+/%=!><|&*^.]*', Operator),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py b/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
index c3a4a58a96..1747d416d2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gcodelexer.py
@@ -4,7 +4,7 @@
Lexers for the G Code Language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,12 +17,12 @@ __all__ = ['GcodeLexer']
class GcodeLexer(RegexLexer):
"""
For gcode source code.
-
- .. versionadded:: 2.9
"""
name = 'g-code'
aliases = ['gcode']
filenames = ['*.gcode']
+ url = 'https://en.wikipedia.org/wiki/G-code'
+ version_added = '2.9'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gdscript.py b/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
index 0f4f6d4315..a6b8ad4e1c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gdscript.py
@@ -7,7 +7,7 @@
Modified by Daniel J. Ramirez <djrmuv@gmail.com> based on the original
python.py.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,6 +31,7 @@ class GDScriptLexer(RegexLexer):
aliases = ["gdscript", "gd"]
filenames = ["*.gd"]
mimetypes = ["text/x-gdscript", "application/x-gdscript"]
+ version_added = ''
def innerstring_rules(ttype):
return [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/go.py b/contrib/python/Pygments/py3/pygments/lexers/go.py
index fe4a1846d6..4d8326ad15 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/go.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/go.py
@@ -4,7 +4,7 @@
Lexers for the Google Go language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['GoLexer']
class GoLexer(RegexLexer):
"""
For Go source.
-
- .. versionadded:: 1.2
"""
name = 'Go'
url = 'https://go.dev/'
filenames = ['*.go']
aliases = ['go', 'golang']
mimetypes = ['text/x-gosrc']
+ version_added = '1.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
index 7927133414..cb2814376c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/grammar_notation.py
@@ -4,7 +4,7 @@
Lexers for grammar notations like BNF.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -39,14 +39,14 @@ class BnfLexer(RegexLexer):
Though these decision making might cause too minimal highlighting
and you might be disappointed, but it is reasonable for us.
-
- .. versionadded:: 2.1
"""
name = 'BNF'
aliases = ['bnf']
filenames = ['*.bnf']
mimetypes = ['text/x-bnf']
+ url = 'https://en.wikipedia.org/wiki/Backus%E2%80%93Naur_form'
+ version_added = '2.1'
tokens = {
'root': [
@@ -68,8 +68,6 @@ class AbnfLexer(RegexLexer):
Lexer for IETF 7405 ABNF.
(Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_) grammars.
-
- .. versionadded:: 2.1
"""
name = 'ABNF'
@@ -77,6 +75,7 @@ class AbnfLexer(RegexLexer):
aliases = ['abnf']
filenames = ['*.abnf']
mimetypes = ['text/x-abnf']
+ version_added = '2.1'
_core_rules = (
'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
@@ -133,14 +132,13 @@ class AbnfLexer(RegexLexer):
class JsgfLexer(RegexLexer):
"""
For JSpeech Grammar Format grammars.
-
- .. versionadded:: 2.2
"""
name = 'JSGF'
url = 'https://www.w3.org/TR/jsgf/'
aliases = ['jsgf']
filenames = ['*.jsgf']
mimetypes = ['application/jsgf', 'application/x-jsgf', 'text/jsgf']
+ version_added = '2.2'
tokens = {
'root': [
@@ -224,8 +222,6 @@ class PegLexer(RegexLexer):
* A single `a-z` character immediately before a string, or
multiple `a-z` characters following a string, are part of the
string (e.g., `r"..."` or `"..."ilmsuxa`).
-
- .. versionadded:: 2.6
"""
name = 'PEG'
@@ -233,6 +229,7 @@ class PegLexer(RegexLexer):
aliases = ['peg']
filenames = ['*.peg']
mimetypes = ['text/x-peg']
+ version_added = '2.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graph.py b/contrib/python/Pygments/py3/pygments/lexers/graph.py
index 753df361c8..840d5e9e6a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graph.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graph.py
@@ -4,7 +4,7 @@
Lexers for graph query languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,13 +23,12 @@ class CypherLexer(RegexLexer):
For Cypher Query Language
For the Cypher version in Neo4j 3.3
-
- .. versionadded:: 2.0
"""
name = 'Cypher'
url = 'https://neo4j.com/docs/developer-manual/3.3/cypher/'
aliases = ['cypher']
filenames = ['*.cyp', '*.cypher']
+ version_added = '2.0'
flags = re.MULTILINE | re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphics.py b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
index 8c112f5e80..c5a144d0c2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphics.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphics.py
@@ -4,7 +4,7 @@
Lexers for computer graphics and plotting related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,13 @@ __all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
class GLShaderLexer(RegexLexer):
"""
GLSL (OpenGL Shader) lexer.
-
- .. versionadded:: 1.1
"""
name = 'GLSL'
aliases = ['glsl']
filenames = ['*.vert', '*.frag', '*.geo']
mimetypes = ['text/x-glslsrc']
+ url = 'https://www.khronos.org/api/opengl'
+ version_added = '1.1'
tokens = {
'root': [
@@ -151,13 +151,13 @@ class GLShaderLexer(RegexLexer):
class HLSLShaderLexer(RegexLexer):
"""
HLSL (Microsoft Direct3D Shader) lexer.
-
- .. versionadded:: 2.3
"""
name = 'HLSL'
aliases = ['hlsl']
filenames = ['*.hlsl', '*.hlsli']
mimetypes = ['text/x-hlsl']
+ url = 'https://learn.microsoft.com/en-us/windows/win32/direct3dhlsl/dx-graphics-hlsl'
+ version_added = '2.3'
tokens = {
'root': [
@@ -305,20 +305,19 @@ class HLSLShaderLexer(RegexLexer):
class PostScriptLexer(RegexLexer):
"""
Lexer for PostScript files.
-
- .. versionadded:: 1.4
"""
name = 'PostScript'
url = 'https://en.wikipedia.org/wiki/PostScript'
aliases = ['postscript', 'postscr']
filenames = ['*.ps', '*.eps']
mimetypes = ['application/postscript']
+ version_added = '1.4'
delimiter = r'()<>\[\]{}/%\s'
- delimiter_end = r'(?=[%s])' % delimiter
+ delimiter_end = rf'(?=[{delimiter}])'
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+ valid_name_chars = rf'[^{delimiter}]'
+ valid_name = rf"{valid_name_chars}+{delimiter_end}"
tokens = {
'root': [
@@ -343,7 +342,7 @@ class PostScriptLexer(RegexLexer):
(r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
# References
- (r'\/%s' % valid_name, Name.Variable),
+ (rf'\/{valid_name}', Name.Variable),
# Names
(valid_name, Name.Function), # Anything else is executed
@@ -400,14 +399,13 @@ class PostScriptLexer(RegexLexer):
class AsymptoteLexer(RegexLexer):
"""
For Asymptote source code.
-
- .. versionadded:: 1.2
"""
name = 'Asymptote'
url = 'http://asymptote.sf.net/'
aliases = ['asymptote', 'asy']
filenames = ['*.asy']
mimetypes = ['text/x-asymptote']
+ version_added = '1.2'
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
@@ -528,8 +526,6 @@ def _shortened_many(*words):
class GnuplotLexer(RegexLexer):
"""
For Gnuplot plotting scripts.
-
- .. versionadded:: 0.11
"""
name = 'Gnuplot'
@@ -537,6 +533,7 @@ class GnuplotLexer(RegexLexer):
aliases = ['gnuplot']
filenames = ['*.plot', '*.plt']
mimetypes = ['text/x-gnuplot']
+ version_added = '0.11'
tokens = {
'root': [
@@ -689,14 +686,13 @@ class GnuplotLexer(RegexLexer):
class PovrayLexer(RegexLexer):
"""
For Persistence of Vision Raytracer files.
-
- .. versionadded:: 0.11
"""
name = 'POVRay'
url = 'http://www.povray.org/'
aliases = ['pov']
filenames = ['*.pov', '*.inc']
mimetypes = ['text/x-povray']
+ version_added = '0.11'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphql.py b/contrib/python/Pygments/py3/pygments/lexers/graphql.py
index b17e4a6c81..2bcb383e90 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphql.py
@@ -8,7 +8,7 @@
More information:
https://graphql.org/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,13 +57,12 @@ KEYWORDS = (
class GraphQLLexer(RegexLexer):
"""
Lexer for GraphQL syntax
-
- .. versionadded:: 2.16
"""
name = "GraphQL"
aliases = ["graphql"]
filenames = ["*.graphql"]
url = "https://graphql.org"
+ version_added = '2.16'
tokens = {
"ignored_tokens": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/graphviz.py b/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
index 1e4ba02436..f58e1ffaa9 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/graphviz.py
@@ -4,7 +4,7 @@
Lexer for the DOT language (graphviz).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,13 @@ __all__ = ['GraphvizLexer']
class GraphvizLexer(RegexLexer):
"""
For graphviz DOT graph description language.
-
- .. versionadded:: 2.8
"""
name = 'Graphviz'
url = 'https://www.graphviz.org/doc/info/lang.html'
aliases = ['graphviz', 'dot']
filenames = ['*.gv', '*.dot']
mimetypes = ['text/x-graphviz', 'text/vnd.graphviz']
+ version_added = '2.8'
tokens = {
'root': [
(r'\s+', Whitespace),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/gsql.py b/contrib/python/Pygments/py3/pygments/lexers/gsql.py
index 222ee7029e..5f55af3a25 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/gsql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/gsql.py
@@ -4,7 +4,7 @@
Lexers for TigerGraph GSQL graph query language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,13 @@ class GSQLLexer(RegexLexer):
"""
For GSQL queries (version 3.x).
-
- .. versionadded:: 2.10
"""
name = 'GSQL'
url = 'https://docs.tigergraph.com/dev/gsql-ref'
aliases = ['gsql']
filenames = ['*.gsql']
+ version_added = '2.10'
flags = re.MULTILINE | re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haskell.py b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
index 2c5fa13445..4983abc8c7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haskell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haskell.py
@@ -4,7 +4,7 @@
Lexers for Haskell and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,14 +24,13 @@ __all__ = ['HaskellLexer', 'HspecLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexe
class HaskellLexer(RegexLexer):
"""
A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 0.8
"""
name = 'Haskell'
url = 'https://www.haskell.org/'
aliases = ['haskell', 'hs']
filenames = ['*.hs']
mimetypes = ['text/x-haskell']
+ version_added = '0.8'
reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
'family', 'if', 'in', 'infix[lr]?', 'instance',
@@ -53,7 +52,7 @@ class HaskellLexer(RegexLexer):
(r'\bimport\b', Keyword.Reserved, 'import'),
(r'\bmodule\b', Keyword.Reserved, 'module'),
(r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved),
(r"'[^\\]'", String.Char), # this has to come before the TH quote
(r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
(r"'?[_" + uni.Ll + r"][\w']*", Name),
@@ -157,14 +156,13 @@ class HaskellLexer(RegexLexer):
class HspecLexer(HaskellLexer):
"""
A Haskell lexer with support for Hspec constructs.
-
- .. versionadded:: 2.4.0
"""
name = 'Hspec'
aliases = ['hspec']
filenames = ['*Spec.hs']
mimetypes = []
+ version_added = '2.4'
tokens = {
'root': [
@@ -181,14 +179,13 @@ class IdrisLexer(RegexLexer):
A lexer for the dependently typed programming language Idris.
Based on the Haskell and Agda Lexer.
-
- .. versionadded:: 2.0
"""
name = 'Idris'
url = 'https://www.idris-lang.org/'
aliases = ['idris', 'idr']
filenames = ['*.idr']
mimetypes = ['text/x-idris']
+ version_added = '2.0'
reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
@@ -211,7 +208,7 @@ class IdrisLexer(RegexLexer):
tokens = {
'root': [
# Comments
- (r'^(\s*)(%%(%s))' % '|'.join(directives),
+ (r'^(\s*)(%({}))'.format('|'.join(directives)),
bygroups(Whitespace, Keyword.Reserved)),
(r'(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$', bygroups(Whitespace, Comment.Single)),
(r'(\s*)(\|{3}.*?)$', bygroups(Whitespace, Comment.Single)),
@@ -220,7 +217,7 @@ class IdrisLexer(RegexLexer):
(r'^(\s*)([^\s(){}]+)(\s*)(:)(\s*)',
bygroups(Whitespace, Name.Function, Whitespace, Operator.Word, Whitespace)),
# Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace), 'module'),
(r"('')?[A-Z][\w\']*", Keyword.Type),
(r'[a-z][\w\']*', Text),
@@ -292,8 +289,6 @@ class AgdaLexer(RegexLexer):
"""
For the Agda dependently typed functional programming language and
proof assistant.
-
- .. versionadded:: 2.0
"""
name = 'Agda'
@@ -301,6 +296,7 @@ class AgdaLexer(RegexLexer):
aliases = ['agda']
filenames = ['*.agda']
mimetypes = ['text/x-agda']
+ version_added = '2.0'
reserved = (
'abstract', 'codata', 'coinductive', 'constructor', 'data', 'do',
@@ -325,7 +321,7 @@ class AgdaLexer(RegexLexer):
(r'\{!', Comment.Directive, 'hole'),
# Lexemes:
# Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved),
(r'(import|module)(\s+)', bygroups(Keyword.Reserved, Whitespace),
'module'),
(r'\b(Set|Prop)[\u2080-\u2089]*\b', Keyword.Type),
@@ -365,13 +361,13 @@ class AgdaLexer(RegexLexer):
class CryptolLexer(RegexLexer):
"""
FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 2.0
"""
name = 'Cryptol'
aliases = ['cryptol', 'cry']
filenames = ['*.cry']
mimetypes = ['text/x-cryptol']
+ url = 'https://www.cryptol.net'
+ version_added = '2.0'
reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
@@ -394,7 +390,7 @@ class CryptolLexer(RegexLexer):
(r'\bimport\b', Keyword.Reserved, 'import'),
(r'\bmodule\b', Keyword.Reserved, 'module'),
(r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'\b({})(?!\')\b'.format('|'.join(reserved)), Keyword.Reserved),
(r'^[_a-z][\w\']*', Name.Function),
(r"'?[_a-z][\w']*", Name),
(r"('')?[A-Z][\w\']*", Keyword.Type),
@@ -573,13 +569,13 @@ class LiterateHaskellLexer(LiterateLexer):
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
"""
name = 'Literate Haskell'
aliases = ['literate-haskell', 'lhaskell', 'lhs']
filenames = ['*.lhs']
mimetypes = ['text/x-literate-haskell']
+ url = 'https://wiki.haskell.org/Literate_programming'
+ version_added = '0.9'
def __init__(self, **options):
hslexer = HaskellLexer(**options)
@@ -596,13 +592,13 @@ class LiterateIdrisLexer(LiterateLexer):
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
"""
name = 'Literate Idris'
aliases = ['literate-idris', 'lidris', 'lidr']
filenames = ['*.lidr']
mimetypes = ['text/x-literate-idris']
+ url = 'https://idris2.readthedocs.io/en/latest/reference/literate.html'
+ version_added = '2.0'
def __init__(self, **options):
hslexer = IdrisLexer(**options)
@@ -619,13 +615,13 @@ class LiterateAgdaLexer(LiterateLexer):
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
"""
name = 'Literate Agda'
aliases = ['literate-agda', 'lagda']
filenames = ['*.lagda']
mimetypes = ['text/x-literate-agda']
+ url = 'https://agda.readthedocs.io/en/latest/tools/literate-programming.html'
+ version_added = '2.0'
def __init__(self, **options):
agdalexer = AgdaLexer(**options)
@@ -642,13 +638,13 @@ class LiterateCryptolLexer(LiterateLexer):
If given, must be ``"bird"`` or ``"latex"``. If not given, the style
is autodetected: if the first non-whitespace character in the source
is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
"""
name = 'Literate Cryptol'
aliases = ['literate-cryptol', 'lcryptol', 'lcry']
filenames = ['*.lcry']
mimetypes = ['text/x-literate-cryptol']
+ url = 'https://www.cryptol.net'
+ version_added = '2.0'
def __init__(self, **options):
crylexer = CryptolLexer(**options)
@@ -658,8 +654,6 @@ class LiterateCryptolLexer(LiterateLexer):
class KokaLexer(RegexLexer):
"""
Lexer for the Koka language.
-
- .. versionadded:: 1.6
"""
name = 'Koka'
@@ -667,6 +661,7 @@ class KokaLexer(RegexLexer):
aliases = ['koka']
filenames = ['*.kk', '*.kki']
mimetypes = ['text/x-koka']
+ version_added = '1.6'
keywords = [
'infix', 'infixr', 'infixl',
@@ -726,7 +721,7 @@ class KokaLexer(RegexLexer):
'alias-type'),
(r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ ((r'({})'.format('|'.join(typeStartKeywords))) +
r'(\s+)([a-z]\w*)?', bygroups(Keyword, Whitespace, tokenTypeDef),
'type'),
@@ -748,9 +743,9 @@ class KokaLexer(RegexLexer):
bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword, Whitespace, Name.Function)),
# keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
+ (r'({})'.format('|'.join(typekeywords)) + boundary, Keyword.Type),
+ (r'({})'.format('|'.join(keywords)) + boundary, Keyword),
+ (r'({})'.format('|'.join(builtin)) + boundary, Keyword.Pseudo),
(r'::?|:=|\->|[=.]' + sboundary, Keyword),
# names
@@ -811,8 +806,8 @@ class KokaLexer(RegexLexer):
include('whitespace'),
# keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
+ (r'({})'.format('|'.join(typekeywords)) + boundary, Keyword),
+ (r'(?=(({})'.format('|'.join(keywords)) + boundary + '))',
Keyword, '#pop'), # need to match because names overlap...
# kinds
diff --git a/contrib/python/Pygments/py3/pygments/lexers/haxe.py b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
index 6e99b10bc9..d9bf654ccf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/haxe.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/haxe.py
@@ -4,7 +4,7 @@
Lexers for Haxe and related stuff.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,6 @@ __all__ = ['HaxeLexer', 'HxmlLexer']
class HaxeLexer(ExtendedRegexLexer):
"""
For Haxe source code.
-
- .. versionadded:: 1.3
"""
name = 'Haxe'
@@ -30,6 +28,7 @@ class HaxeLexer(ExtendedRegexLexer):
aliases = ['haxe', 'hxsl', 'hx']
filenames = ['*.hx', '*.hxsl']
mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+ version_added = '1.3'
# keywords extracted from lexer.mll in the haxe compiler source
keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
@@ -898,13 +897,12 @@ class HaxeLexer(ExtendedRegexLexer):
class HxmlLexer(RegexLexer):
"""
Lexer for haXe build files.
-
- .. versionadded:: 1.6
"""
name = 'Hxml'
url = 'https://haxe.org/manual/compiler-usage-hxml.html'
aliases = ['haxeml', 'hxml']
filenames = ['*.hxml']
+ version_added = '1.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hdl.py b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
index 319ec93100..57e560f9de 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hdl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hdl.py
@@ -4,7 +4,7 @@
Lexers for hardware descriptor languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,13 @@ __all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer']
class VerilogLexer(RegexLexer):
"""
For verilog source code with preprocessor directives.
-
- .. versionadded:: 1.4
"""
name = 'verilog'
aliases = ['verilog', 'v']
filenames = ['*.v']
mimetypes = ['text/x-verilog']
+ url = 'https://en.wikipedia.org/wiki/Verilog'
+ version_added = '1.4'
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
@@ -147,13 +147,13 @@ class SystemVerilogLexer(RegexLexer):
"""
Extends verilog lexer to recognise all SystemVerilog keywords from IEEE
1800-2009 standard.
-
- .. versionadded:: 1.5
"""
name = 'systemverilog'
aliases = ['systemverilog', 'sv']
filenames = ['*.sv', '*.svh']
mimetypes = ['text/x-systemverilog']
+ url = 'https://en.wikipedia.org/wiki/SystemVerilog'
+ version_added = '1.5'
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
@@ -373,13 +373,13 @@ class SystemVerilogLexer(RegexLexer):
class VhdlLexer(RegexLexer):
"""
For VHDL source code.
-
- .. versionadded:: 1.5
"""
name = 'vhdl'
aliases = ['vhdl']
filenames = ['*.vhdl', '*.vhd']
mimetypes = ['text/x-vhdl']
+ url = 'https://en.wikipedia.org/wiki/VHDL'
+ version_added = '1.5'
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -387,6 +387,7 @@ class VhdlLexer(RegexLexer):
(r'\s+', Whitespace),
(r'(\\)(\n)', bygroups(String.Escape, Whitespace)), # line continuation
(r'--.*?$', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
(r"'(U|X|0|1|Z|W|L|H|-)'", String.Char),
(r'[~!%^&*+=|?:<>/-]', Operator),
(r"'[a-z_]\w*", Name.Attribute),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
index 9672fd4dcf..28d3429caf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/hexdump.py
@@ -4,7 +4,7 @@
Lexers for hexadecimal dumps.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,11 +33,11 @@ class HexdumpLexer(RegexLexer):
* ``od -t x1z FILE``
* ``xxd FILE``
* ``DEBUG.EXE FILE.COM`` and entering ``d`` to the prompt.
-
- .. versionadded:: 2.1
"""
name = 'Hexdump'
aliases = ['hexdump']
+ url = 'https://en.wikipedia.org/wiki/Hex_dump'
+ version_added = '2.1'
hd = r'[0-9A-Ha-h]'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/html.py b/contrib/python/Pygments/py3/pygments/lexers/html.py
index 27fb7534cd..c4217a2195 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/html.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/html.py
@@ -4,7 +4,7 @@
Lexers for HTML, XML and related markup.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,6 +36,7 @@ class HtmlLexer(RegexLexer):
aliases = ['html']
filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
mimetypes = ['text/html', 'application/xhtml+xml']
+ version_added = ''
flags = re.IGNORECASE | re.DOTALL
tokens = {
@@ -106,8 +107,6 @@ class HtmlLexer(RegexLexer):
class DtdLexer(RegexLexer):
"""
A lexer for DTDs (Document Type Definitions).
-
- .. versionadded:: 1.5
"""
flags = re.MULTILINE | re.DOTALL
@@ -116,6 +115,8 @@ class DtdLexer(RegexLexer):
aliases = ['dtd']
filenames = ['*.dtd']
mimetypes = ['application/xml-dtd']
+ url = 'https://en.wikipedia.org/wiki/Document_type_definition'
+ version_added = '1.5'
tokens = {
'root': [
@@ -204,6 +205,8 @@ class XmlLexer(RegexLexer):
'*.wsdl', '*.wsf']
mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
'application/rss+xml', 'application/atom+xml']
+ url = 'https://www.w3.org/XML'
+ version_added = ''
tokens = {
'root': [
@@ -238,14 +241,14 @@ class XmlLexer(RegexLexer):
class XsltLexer(XmlLexer):
"""
A lexer for XSLT.
-
- .. versionadded:: 0.10
"""
name = 'XSLT'
aliases = ['xslt']
filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
mimetypes = ['application/xsl+xml', 'application/xslt+xml']
+ url = 'https://www.w3.org/TR/xslt-30'
+ version_added = '0.10'
EXTRA_KEYWORDS = {
'apply-imports', 'apply-templates', 'attribute',
@@ -275,14 +278,14 @@ class XsltLexer(XmlLexer):
class HamlLexer(ExtendedRegexLexer):
"""
For Haml markup.
-
- .. versionadded:: 1.3
"""
name = 'Haml'
aliases = ['haml']
filenames = ['*.haml']
mimetypes = ['text/x-haml']
+ url = 'https://haml.info'
+ version_added = '1.3'
flags = re.IGNORECASE
# Haml can include " |\n" anywhere,
@@ -385,15 +388,15 @@ class HamlLexer(ExtendedRegexLexer):
class ScamlLexer(ExtendedRegexLexer):
"""
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- .. versionadded:: 1.4
+ For Scaml markup. Scaml is Haml for Scala.
"""
name = 'Scaml'
aliases = ['scaml']
filenames = ['*.scaml']
mimetypes = ['text/x-scaml']
+ url = 'https://scalate.github.io/scalate/'
+ version_added = '1.4'
flags = re.IGNORECASE
# Scaml does not yet support the " |\n" notation to
@@ -500,14 +503,14 @@ class PugLexer(ExtendedRegexLexer):
For Pug markup.
Pug is a variant of Scaml, see:
http://scalate.fusesource.org/documentation/scaml-reference.html
-
- .. versionadded:: 1.4
"""
name = 'Pug'
aliases = ['pug', 'jade']
filenames = ['*.pug', '*.jade']
mimetypes = ['text/x-pug', 'text/x-jade']
+ url = 'https://pugjs.org'
+ version_added = '1.4'
flags = re.IGNORECASE
_dot = r'.'
@@ -608,13 +611,13 @@ JadeLexer = PugLexer # compat
class UrlEncodedLexer(RegexLexer):
"""
Lexer for urlencoded data
-
- .. versionadded:: 2.16
"""
name = 'urlencoded'
aliases = ['urlencoded']
mimetypes = ['application/x-www-form-urlencoded']
+ url = 'https://en.wikipedia.org/wiki/Percent-encoding'
+ version_added = '2.16'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/idl.py b/contrib/python/Pygments/py3/pygments/lexers/idl.py
index c4d8b3180c..fab3c1721f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/idl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/idl.py
@@ -4,7 +4,7 @@
Lexers for IDL.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ __all__ = ['IDLLexer']
class IDLLexer(RegexLexer):
"""
Pygments Lexer for IDL (Interactive Data Language).
-
- .. versionadded:: 1.6
"""
name = 'IDL'
url = 'https://www.l3harrisgeospatial.com/Software-Technology/IDL'
aliases = ['idl']
filenames = ['*.pro']
mimetypes = ['text/idl']
+ version_added = '1.6'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/igor.py b/contrib/python/Pygments/py3/pygments/lexers/igor.py
index b25badbb13..84ec32fd28 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/igor.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/igor.py
@@ -4,7 +4,7 @@
Lexers for Igor Pro.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,15 +19,14 @@ __all__ = ['IgorLexer']
class IgorLexer(RegexLexer):
"""
Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- .. versionadded:: 2.0
"""
name = 'Igor'
aliases = ['igor', 'igorpro']
filenames = ['*.ipf']
mimetypes = ['text/ipf']
+ url = 'http://www.wavemetrics.com'
+ version_added = '2.0'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/inferno.py b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
index ce1fe036d3..3513df2afc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/inferno.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/inferno.py
@@ -4,7 +4,7 @@
Lexers for Inferno os and all the related stuff.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,14 +24,13 @@ class LimboLexer(RegexLexer):
TODO:
- maybe implement better var declaration highlighting
- some simple syntax error highlighting
-
- .. versionadded:: 2.0
"""
name = 'Limbo'
url = 'http://www.vitanuova.com/inferno/limbo.html'
aliases = ['limbo']
filenames = ['*.b']
mimetypes = ['text/limbo']
+ version_added = '2.0'
tokens = {
'whitespace': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/installers.py b/contrib/python/Pygments/py3/pygments/lexers/installers.py
index dcf8bdfb96..2a9c5daa1b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/installers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/installers.py
@@ -4,7 +4,7 @@
Lexers for installer/packager DSLs and formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,13 @@ __all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
class NSISLexer(RegexLexer):
"""
For NSIS scripts.
-
- .. versionadded:: 1.6
"""
name = 'NSIS'
url = 'http://nsis.sourceforge.net/'
aliases = ['nsis', 'nsi', 'nsh']
filenames = ['*.nsi', '*.nsh']
mimetypes = ['text/x-nsis']
+ version_added = '1.6'
flags = re.IGNORECASE
@@ -147,14 +146,14 @@ class NSISLexer(RegexLexer):
class RPMSpecLexer(RegexLexer):
"""
For RPM ``.spec`` files.
-
- .. versionadded:: 1.6
"""
name = 'RPMSpec'
aliases = ['spec']
filenames = ['*.spec']
mimetypes = ['text/x-rpm-spec']
+ url = 'https://rpm-software-management.github.io/rpm/manual/spec.html'
+ version_added = '1.6'
_directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
'post[a-z]*|trigger[a-z]*|files)')
@@ -221,14 +220,14 @@ class RPMSpecLexer(RegexLexer):
class SourcesListLexer(RegexLexer):
"""
Lexer that highlights debian sources.list files.
-
- .. versionadded:: 0.7
"""
name = 'Debian Sourcelist'
aliases = ['debsources', 'sourceslist', 'sources.list']
filenames = ['sources.list']
+ version_added = '0.7'
mimetype = ['application/x-debian-sourceslist']
+ url = 'https://wiki.debian.org/SourcesList'
tokens = {
'root': [
@@ -269,13 +268,12 @@ class SourcesListLexer(RegexLexer):
class DebianControlLexer(RegexLexer):
"""
Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
- .. versionadded:: 0.9
"""
name = 'Debian Control file'
url = 'https://www.debian.org/doc/debian-policy/ch-controlfields.html'
aliases = ['debcontrol', 'control']
filenames = ['control']
+ version_added = '0.9'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
index 4f4d55d6cb..a7bc667565 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/int_fiction.py
@@ -4,7 +4,7 @@
Lexers for interactive fiction languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,13 @@ __all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer',
class Inform6Lexer(RegexLexer):
"""
For Inform 6 source code.
-
- .. versionadded:: 2.0
"""
name = 'Inform 6'
url = 'http://inform-fiction.org/'
aliases = ['inform6', 'i6']
filenames = ['*.inf']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
@@ -45,13 +44,13 @@ class Inform6Lexer(RegexLexer):
tokens = {
'root': [
- (r'\A(!%%[^%s]*[%s])+' % (_newline, _newline), Comment.Preproc,
+ (rf'\A(!%[^{_newline}]*[{_newline}])+', Comment.Preproc,
'directive'),
default('directive')
],
'_whitespace': [
(r'\s+', Text),
- (r'![^%s]*' % _newline, Comment.Single)
+ (rf'![^{_newline}]*', Comment.Single)
],
'default': [
include('_whitespace'),
@@ -65,20 +64,20 @@ class Inform6Lexer(RegexLexer):
'_expression': [
include('_whitespace'),
(r'(?=sp\b)', Text, '#pop'),
- (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
+ (rf'(?=[{_dquote}{_squote}$0-9#a-zA-Z_])', Text,
('#pop', 'value')),
- (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
- (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
+ (rf'\+\+|[{_dash}]{{1,2}}(?!>)|~~?', Operator),
+ (rf'(?=[()\[{_dash},?@{{:;])', Text, '#pop')
],
'expression': [
include('_whitespace'),
(r'\(', Punctuation, ('expression', '_expression')),
(r'\)', Punctuation, '#pop'),
(r'\[', Punctuation, ('#pop', 'statements', 'locals')),
- (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
- (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
+ (rf'>(?=(\s+|(![^{_newline}]*))*[>;])', Punctuation),
+ (rf'\+\+|[{_dash}]{{2}}(?!>)', Operator),
(r',', Punctuation, '_expression'),
- (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
+ (rf'&&?|\|\|?|[=~><]?=|[{_dash}]{{1,2}}>?|\.\.?[&#]?|::|[<>+*/%]',
Operator, '_expression'),
(r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
'_expression'),
@@ -90,7 +89,7 @@ class Inform6Lexer(RegexLexer):
'_assembly-expression': [
(r'\(', Punctuation, ('#push', '_expression')),
(r'[\[\]]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, '_expression'),
+ (rf'[{_dash}]>', Punctuation, '_expression'),
(r'sp\b', Keyword.Pseudo),
(r';', Punctuation, '#pop:3'),
include('expression')
@@ -117,25 +116,25 @@ class Inform6Lexer(RegexLexer):
'value': [
include('_whitespace'),
# Strings
- (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
- (r'([%s])(@\{[0-9a-fA-F]*\})([%s])' % (_squote, _squote),
+ (rf'[{_squote}][^@][{_squote}]', String.Char, '#pop'),
+ (rf'([{_squote}])(@\{{[0-9a-fA-F]*\}})([{_squote}])',
bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'([%s])(@.{2})([%s])' % (_squote, _squote),
+ (rf'([{_squote}])(@.{{2}})([{_squote}])',
bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
+ (rf'[{_squote}]', String.Single, ('#pop', 'dictionary-word')),
+ (rf'[{_dquote}]', String.Double, ('#pop', 'string')),
# Numbers
- (r'\$[<>]?[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
+ (rf'\$[<>]?[+{_dash}][0-9]*\.?[0-9]*([eE][+{_dash}]?[0-9]+)?',
Number.Float, '#pop'),
(r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
(r'\$\$[01]+', Number.Bin, '#pop'),
(r'[0-9]+', Number.Integer, '#pop'),
# Values prefixed by hashes
- (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
- (r'(#g\$)(%s)' % _name,
+ (rf'(##|#a\$)({_name})', bygroups(Operator, Name), '#pop'),
+ (rf'(#g\$)({_name})',
bygroups(Operator, Name.Variable.Global), '#pop'),
(r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
- (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
+ (rf'(#r\$)({_name})', bygroups(Operator, Name.Function), '#pop'),
(r'#', Name.Builtin, ('#pop', 'system-constant')),
# System functions
(words((
@@ -182,32 +181,31 @@ class Inform6Lexer(RegexLexer):
],
# Strings
'dictionary-word': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _squote, String.Single),
- (r'[({]', String.Single),
+ (rf'[~^]+|//[^{_squote}]*', String.Escape),
+ (rf'[^~^/\\@({{{_squote}]+', String.Single),
+ (r'[/({]', String.Single),
(r'@\{[0-9a-fA-F]*\}', String.Escape),
(r'@.{2}', String.Escape),
- (r'[%s]' % _squote, String.Single, '#pop')
+ (rf'[{_squote}]', String.Single, '#pop')
],
'string': [
(r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _dquote, String.Double),
+ (rf'[^~^\\@({{{_dquote}]+', String.Double),
(r'[({]', String.Double),
(r'\\', String.Escape),
- (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
- (_newline, _newline), String.Escape),
- (r'@(\\\s*[%s]\s*)*[({]((\\\s*[%s]\s*)*[0-9a-zA-Z_])*'
- r'(\\\s*[%s]\s*)*[)}]' % (_newline, _newline, _newline),
+ (rf'@(\\\s*[{_newline}]\s*)*@((\\\s*[{_newline}]\s*)*[0-9])*', String.Escape),
+ (rf'@(\\\s*[{_newline}]\s*)*[({{]((\\\s*[{_newline}]\s*)*[0-9a-zA-Z_])*'
+ rf'(\\\s*[{_newline}]\s*)*[)}}]',
String.Escape),
- (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
+ (rf'@(\\\s*[{_newline}]\s*)*.(\\\s*[{_newline}]\s*)*.',
String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
+ (rf'[{_dquote}]', String.Double, '#pop')
],
'plain-string': [
- (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
+ (rf'[^~^\\({{\[\]{_dquote}]+', String.Double),
(r'[~^({\[\]]', String.Double),
(r'\\', String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
+ (rf'[{_dquote}]', String.Double, '#pop')
],
# Names
'_constant': [
@@ -326,7 +324,7 @@ class Inform6Lexer(RegexLexer):
default('#pop')
],
'_object-head': [
- (r'[%s]>' % _dash, Punctuation),
+ (rf'[{_dash}]>', Punctuation),
(r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
include('_global')
],
@@ -356,7 +354,7 @@ class Inform6Lexer(RegexLexer):
include('_whitespace'),
(r';', Punctuation, '#pop'),
(r'[/*]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, 'value'),
+ (rf'[{_dash}]>', Punctuation, 'value'),
(r'(noun|scope)\b', Keyword, '=routine'),
default('_directive-keyword')
],
@@ -376,12 +374,12 @@ class Inform6Lexer(RegexLexer):
# Include, Link, Message
'diagnostic': [
include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
+ (rf'[{_dquote}]', String.Double, ('#pop', 'message-string')),
default(('#pop', 'before-plain-string?', 'directive-keyword?'))
],
'before-plain-string?': [
include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')),
+ (rf'[{_dquote}]', String.Double, ('#pop', 'plain-string')),
default('#pop')
],
'message-string': [
@@ -400,7 +398,7 @@ class Inform6Lexer(RegexLexer):
'terminating', 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
Keyword, '#pop'),
(r'static\b', Keyword),
- (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
+ (rf'[{_dash}]{{1,2}}>|[+=]', Punctuation, '#pop')
],
'_directive-keyword': [
include('_directive-keyword!'),
@@ -413,7 +411,7 @@ class Inform6Lexer(RegexLexer):
'property-keyword*': [
include('_whitespace'),
(words(('additive', 'individual', 'long'),
- suffix=r'\b(?=(\s*|(![^%s]*[%s]))*[_a-zA-Z])' % (_newline, _newline)),
+ suffix=rf'\b(?=(\s*|(![^{_newline}]*[{_newline}]))*[_a-zA-Z])'),
Keyword),
default('#pop')
],
@@ -445,7 +443,7 @@ class Inform6Lexer(RegexLexer):
(r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
(r'objectloop\b', Keyword,
('_keyword-expression', 'variable?', '(?')),
- (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
+ (rf'print(_ret)?\b|(?=[{_dquote}])', Keyword, 'print-list'),
(r'\.', Name.Label, 'label?'),
(r'@', Keyword, 'opcode'),
(r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
@@ -459,9 +457,9 @@ class Inform6Lexer(RegexLexer):
(r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
Keyword, '#pop'),
(r'(a|A|an|address|char|name|number|object|property|string|the|'
- r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
+ rf'The)\b(?=(\s+|(![^{_newline}]*))*\))', Keyword.Pseudo,
'#pop'),
- (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
+ (rf'{_name}(?=(\s+|(![^{_newline}]*))*\))', Name.Function,
'#pop'),
default('#pop')
],
@@ -490,7 +488,8 @@ class Inform6Lexer(RegexLexer):
# Assembly
'opcode': [
include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
+ (rf'[{_dquote}]', String.Double, ('operands', 'plain-string')),
+ (rf'[{_dash}]{{1,2}}>', Punctuation, 'operands'),
(_name, Keyword, 'operands')
],
'operands': [
@@ -543,21 +542,20 @@ class Inform6Lexer(RegexLexer):
class Inform7Lexer(RegexLexer):
"""
For Inform 7 source code.
-
- .. versionadded:: 2.0
"""
name = 'Inform 7'
url = 'http://inform7.com/'
aliases = ['inform7', 'i7']
filenames = ['*.ni', '*.i7x']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
_dash = Inform6Lexer._dash
_dquote = Inform6Lexer._dquote
_newline = Inform6Lexer._newline
- _start = r'\A|(?<=[%s])' % _newline
+ _start = rf'\A|(?<=[{_newline}])'
# There are three variants of Inform 7, differing in how to
# interpret at signs and braces in I6T. In top-level inclusions, at
@@ -571,47 +569,46 @@ class Inform7Lexer(RegexLexer):
tokens[level] = {
'+i6-root': list(Inform6Lexer.tokens['root']),
'+i6t-root': [ # For Inform6TemplateLexer
- (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
+ (rf'[^{Inform6Lexer._newline}]*', Comment.Preproc,
('directive', '+p'))
],
'root': [
(r'(\|?\s)+', Text),
(r'\[', Comment.Multiline, '+comment'),
- (r'[%s]' % _dquote, Generic.Heading,
+ (rf'[{_dquote}]', Generic.Heading,
('+main', '+titling', '+titling-string')),
default(('+main', '+heading?'))
],
'+titling-string': [
- (r'[^%s]+' % _dquote, Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '#pop')
+ (rf'[^{_dquote}]+', Generic.Heading),
+ (rf'[{_dquote}]', Generic.Heading, '#pop')
],
'+titling': [
(r'\[', Comment.Multiline, '+comment'),
- (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
- (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
+ (rf'[^{_dquote}.;:|{_newline}]+', Generic.Heading),
+ (rf'[{_dquote}]', Generic.Heading, '+titling-string'),
+ (rf'[{_newline}]{{2}}|(?<=[\s{_dquote}])\|[\s{_dquote}]',
Text, ('#pop', '+heading?')),
- (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
- (r'[|%s]' % _newline, Generic.Heading)
+ (rf'[.;:]|(?<=[\s{_dquote}])\|', Text, '#pop'),
+ (rf'[|{_newline}]', Generic.Heading)
],
'+main': [
- (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
- (r'[%s]' % _dquote, String.Double, '+text'),
+ (rf'(?i)[^{_dquote}:a\[(|{_newline}]+', Text),
+ (rf'[{_dquote}]', String.Double, '+text'),
(r':', Text, '+phrase-definition'),
(r'(?i)\bas\b', Text, '+use-option'),
(r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ (rf'(\([{_dash}])(.*?)([{_dash}]\))',
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive'),
i6t='+i6t-not-inline'), Punctuation)),
- (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
- (_start, _dquote, _newline), Text, '+heading?'),
- (r'(?i)[a(|%s]' % _newline, Text)
+ (rf'({_start}|(?<=[\s;:.{_dquote}]))\|\s|[{_newline}]{{2,}}', Text, '+heading?'),
+ (rf'(?i)[a(|{_newline}]', Text)
],
'+phrase-definition': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ (rf'(\([{_dash}])(.*?)([{_dash}]\))',
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive',
'default', 'statements'),
@@ -621,7 +618,7 @@ class Inform7Lexer(RegexLexer):
'+use-option': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ (rf'(\([{_dash}])(.*?)([{_dash}]\))',
bygroups(Punctuation,
using(this, state=('+i6-root', 'directive'),
i6t='+i6t-use-option'), Punctuation), '#pop'),
@@ -633,16 +630,16 @@ class Inform7Lexer(RegexLexer):
(r'\]', Comment.Multiline, '#pop')
],
'+text': [
- (r'[^\[%s]+' % _dquote, String.Double),
+ (rf'[^\[{_dquote}]+', String.Double),
(r'\[.*?\]', String.Interpol),
- (r'[%s]' % _dquote, String.Double, '#pop')
+ (rf'[{_dquote}]', String.Double, '#pop')
],
'+heading?': [
(r'(\|?\s)+', Text),
(r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
- (r'[%s]{1,3}' % _dash, Text),
- (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
+ (rf'[{_dash}]{{4}}\s+', Text, '+documentation-heading'),
+ (rf'[{_dash}]{{1,3}}', Text),
+ (rf'(?i)(volume|book|part|chapter|section)\b[^{_newline}]*',
Generic.Heading, '#pop'),
default('#pop')
],
@@ -655,25 +652,24 @@ class Inform7Lexer(RegexLexer):
'+documentation-heading2': [
(r'\s+', Text),
(r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s' % _dash, Text, '+documentation'),
+ (rf'[{_dash}]{{4}}\s', Text, '+documentation'),
default('#pop:2')
],
'+documentation': [
- (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
- (_start, _newline), Generic.Heading),
- (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
+ (rf'(?i)({_start})\s*(chapter|example)\s*:[^{_newline}]*', Generic.Heading),
+ (rf'(?i)({_start})\s*section\s*:[^{_newline}]*',
Generic.Subheading),
- (r'((%s)\t.*?[%s])+' % (_start, _newline),
+ (rf'(({_start})\t.*?[{_newline}])+',
using(this, state='+main')),
- (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
+ (rf'[^{_newline}\[]+|[{_newline}\[]', Text),
(r'\[', Comment.Multiline, '+comment'),
],
'+i6t-not-inline': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@c( .*?)?([{_newline}]|\Z)',
Comment.Preproc),
- (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ (rf'({_start})@([{_dash}]+|Purpose:)[^{_newline}]*',
Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@p( .*?)?([{_newline}]|\Z)',
Generic.Heading, '+p')
],
'+i6t-use-option': [
@@ -686,12 +682,12 @@ class Inform7Lexer(RegexLexer):
Punctuation))
],
'+i6t': [
- (r'(\{[%s])(![^}]*)(\}?)' % _dash,
+ (rf'(\{{[{_dash}])(![^}}]*)(\}}?)',
bygroups(Punctuation, Comment.Single, Punctuation)),
- (r'(\{[%s])(lines)(:)([^}]*)(\}?)' % _dash,
+ (rf'(\{{[{_dash}])(lines)(:)([^}}]*)(\}}?)',
bygroups(Punctuation, Keyword, Punctuation, Text,
Punctuation), '+lines'),
- (r'(\{[%s])([^:}]*)(:?)([^}]*)(\}?)' % _dash,
+ (rf'(\{{[{_dash}])([^:}}]*)(:?)([^}}]*)(\}}?)',
bygroups(Punctuation, Keyword, Punctuation, Text,
Punctuation)),
(r'(\(\+)(.*?)(\+\)|\Z)',
@@ -700,25 +696,25 @@ class Inform7Lexer(RegexLexer):
],
'+p': [
(r'[^@]+', Comment.Preproc),
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@c( .*?)?([{_newline}]|\Z)',
Comment.Preproc, '#pop'),
- (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@([{_dash}]|Purpose:)', Comment.Preproc),
+ (rf'({_start})@p( .*?)?([{_newline}]|\Z)',
Generic.Heading),
(r'@', Comment.Preproc)
],
'+lines': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@c( .*?)?([{_newline}]|\Z)',
Comment.Preproc),
- (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ (rf'({_start})@([{_dash}]|Purpose:)[^{_newline}]*',
Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ (rf'({_start})@p( .*?)?([{_newline}]|\Z)',
Generic.Heading, '+p'),
- (r'(%s)@\w*[ %s]' % (_start, _newline), Keyword),
- (r'![^%s]*' % _newline, Comment.Single),
- (r'(\{)([%s]endlines)(\})' % _dash,
+ (rf'({_start})@\w*[ {_newline}]', Keyword),
+ (rf'![^{_newline}]*', Comment.Single),
+ (rf'(\{{)([{_dash}]endlines)(\}})',
bygroups(Punctuation, Keyword, Punctuation), '#pop'),
- (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
+ (rf'[^@!{{]+?([{_newline}]|\Z)|.', Text)
]
}
# Inform 7 can include snippets of Inform 6 template language,
@@ -747,13 +743,12 @@ class Inform7Lexer(RegexLexer):
class Inform6TemplateLexer(Inform7Lexer):
"""
For Inform 6 template code.
-
- .. versionadded:: 2.0
"""
name = 'Inform 6 template'
aliases = ['i6t']
filenames = ['*.i6t']
+ version_added = '2.0'
def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
@@ -767,6 +762,8 @@ class Tads3Lexer(RegexLexer):
name = 'TADS 3'
aliases = ['tads3']
filenames = ['*.t']
+ url = 'https://www.tads.org'
+ version_added = ''
flags = re.DOTALL | re.MULTILINE
@@ -778,55 +775,50 @@ class Tads3Lexer(RegexLexer):
_no_quote = r'(?=\s|\\?>)'
_operator = (r'(?:&&|\|\||\+\+|--|\?\?|::|[.,@\[\]~]|'
r'(?:[=+\-*/%!&|^]|<<?|>>?>?)=?)')
- _ws = r'(?:\\|\s|%s|%s)' % (_comment_single, _comment_multiline)
- _ws_pp = r'(?:\\\n|[^\S\n]|%s|%s)' % (_comment_single, _comment_multiline)
+ _ws = rf'(?:\\|\s|{_comment_single}|{_comment_multiline})'
+ _ws_pp = rf'(?:\\\n|[^\S\n]|{_comment_single}|{_comment_multiline})'
def _make_string_state(triple, double, verbatim=None, _escape=_escape):
if verbatim:
- verbatim = ''.join(['(?:%s|%s)' % (re.escape(c.lower()),
- re.escape(c.upper()))
+ verbatim = ''.join([f'(?:{re.escape(c.lower())}|{re.escape(c.upper())})'
for c in verbatim])
char = r'"' if double else r"'"
token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
- prefix = '%s%s' % ('t' if triple else '', 'd' if double else 's')
- tag_state_name = '%sqt' % prefix
+ escaped_quotes = rf'+|{char}(?!{char}{{2}})' if triple else r''
+ prefix = '{}{}'.format('t' if triple else '', 'd' if double else 's')
+ tag_state_name = f'{prefix}qt'
state = []
if triple:
state += [
- (r'%s{3,}' % char, token, '#pop'),
- (r'\\%s+' % char, String.Escape),
+ (rf'{char}{{3,}}', token, '#pop'),
+ (rf'\\{char}+', String.Escape),
(char, token)
]
else:
state.append((char, token, '#pop'))
state += [
include('s/verbatim'),
- (r'[^\\<&{}%s]+' % char, token)
+ (rf'[^\\<&{{}}{char}]+', token)
]
if verbatim:
# This regex can't use `(?i)` because escape sequences are
# case-sensitive. `<\XMP>` works; `<\xmp>` doesn't.
- state.append((r'\\?<(/|\\\\|(?!%s)\\)%s(?=[\s=>])' %
- (_escape, verbatim),
- Name.Tag, ('#pop', '%sqs' % prefix, tag_state_name)))
+ state.append((rf'\\?<(/|\\\\|(?!{_escape})\\){verbatim}(?=[\s=>])',
+ Name.Tag, ('#pop', f'{prefix}qs', tag_state_name)))
else:
state += [
- (r'\\?<!([^><\\%s]|<(?!<)|\\%s%s|%s|\\.)*>?' %
- (char, char, escaped_quotes, _escape), Comment.Multiline),
+ (rf'\\?<!([^><\\{char}]|<(?!<)|\\{char}{escaped_quotes}|{_escape}|\\.)*>?', Comment.Multiline),
(r'(?i)\\?<listing(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/listing' % prefix, tag_state_name)),
+ ('#pop', f'{prefix}qs/listing', tag_state_name)),
(r'(?i)\\?<xmp(?=[\s=>]|\\>)', Name.Tag,
- ('#pop', '%sqs/xmp' % prefix, tag_state_name)),
- (r'\\?<([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)*' %
- (char, char, escaped_quotes, _escape), Name.Tag,
+ ('#pop', f'{prefix}qs/xmp', tag_state_name)),
+ (rf'\\?<([^\s=><\\{char}]|<(?!<)|\\{char}{escaped_quotes}|{_escape}|\\.)*', Name.Tag,
tag_state_name),
include('s/entity')
]
state += [
include('s/escape'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
+ (rf'\{{([^}}<\\{char}]|<(?!<)|\\{char}{escaped_quotes}|{_escape}|\\.)*\}}', String.Interpol),
(r'[\\&{}<]', token)
]
return state
@@ -834,22 +826,20 @@ class Tads3Lexer(RegexLexer):
def _make_tag_state(triple, double, _escape=_escape):
char = r'"' if double else r"'"
quantifier = r'{3,}' if triple else r''
- state_name = '%s%sqt' % ('t' if triple else '', 'd' if double else 's')
+ state_name = '{}{}qt'.format('t' if triple else '', 'd' if double else 's')
token = String.Double if double else String.Single
- escaped_quotes = r'+|%s(?!%s{2})' % (char, char) if triple else r''
+ escaped_quotes = rf'+|{char}(?!{char}{{2}})' if triple else r''
return [
- (r'%s%s' % (char, quantifier), token, '#pop:2'),
+ (rf'{char}{quantifier}', token, '#pop:2'),
(r'(\s|\\\n)+', Text),
(r'(=)(\\?")', bygroups(Punctuation, String.Double),
- 'dqs/%s' % state_name),
+ f'dqs/{state_name}'),
(r"(=)(\\?')", bygroups(Punctuation, String.Single),
- 'sqs/%s' % state_name),
- (r'=', Punctuation, 'uqs/%s' % state_name),
+ f'sqs/{state_name}'),
+ (r'=', Punctuation, f'uqs/{state_name}'),
(r'\\?>', Name.Tag, '#pop'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (char, char, escaped_quotes, _escape), String.Interpol),
- (r'([^\s=><\\%s]|<(?!<)|\\%s%s|%s|\\.)+' %
- (char, char, escaped_quotes, _escape), Name.Attribute),
+ (rf'\{{([^}}<\\{char}]|<(?!<)|\\{char}{escaped_quotes}|{_escape}|\\.)*\}}', String.Interpol),
+ (rf'([^\s=><\\{char}]|<(?!<)|\\{char}{escaped_quotes}|{_escape}|\\.)+', Name.Attribute),
include('s/escape'),
include('s/verbatim'),
include('s/entity'),
@@ -863,16 +853,15 @@ class Tads3Lexer(RegexLexer):
host_char = r'"' if host_double else r"'"
host_quantifier = r'{3,}' if host_triple else r''
host_token = String.Double if host_double else String.Single
- escaped_quotes = (r'+|%s(?!%s{2})' % (host_char, host_char)
+ escaped_quotes = (rf'+|{host_char}(?!{host_char}{{2}})'
if host_triple else r'')
return [
- (r'%s%s' % (host_char, host_quantifier), host_token, '#pop:3'),
- (r'%s%s' % (r'' if token is String.Other else r'\\?', terminator),
+ (rf'{host_char}{host_quantifier}', host_token, '#pop:3'),
+ (r'{}{}'.format(r'' if token is String.Other else r'\\?', terminator),
token, '#pop'),
include('s/verbatim'),
include('s/entity'),
- (r'\{([^}<\\%s]|<(?!<)|\\%s%s|%s|\\.)*\}' %
- (host_char, host_char, escaped_quotes, _escape), String.Interpol),
+ (rf'\{{([^}}<\\{host_char}]|<(?!<)|\\{host_char}{escaped_quotes}|{_escape}|\\.)*\}}', String.Interpol),
(r'([^\s"\'<%s{}\\&])+' % (r'>' if token is String.Other else r''),
token),
include('s/escape'),
@@ -888,7 +877,7 @@ class Tads3Lexer(RegexLexer):
r'delegated|do|else|for|foreach|finally|goto|if|inherited|'
r'invokee|local|nil|new|operator|replaced|return|self|switch|'
r'targetobj|targetprop|throw|true|try|while)\b)', Text, 'block'),
- (r'(%s)(%s*)(\()' % (_name, _ws),
+ (rf'({_name})({_ws}*)(\()',
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation),
('block?/root', 'more/parameters', 'main/parameters')),
@@ -911,24 +900,24 @@ class Tads3Lexer(RegexLexer):
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
(r':', Punctuation, ('classes', 'class')),
- (r'(%s?)(%s*)(\()' % (_name, _ws),
+ (rf'({_name}?)({_ws}*)(\()',
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation),
('block?', 'more/parameters', 'main/parameters')),
- (r'(%s)(%s*)(\{)' % (_name, _ws),
+ (rf'({_name})({_ws}*)(\{{)',
bygroups(Name.Function, using(this, state='whitespace'),
Punctuation), 'block'),
- (r'(%s)(%s*)(:)' % (_name, _ws),
+ (rf'({_name})({_ws}*)(:)',
bygroups(Name.Variable, using(this, state='whitespace'),
Punctuation),
('object-body/no-braces', 'classes', 'class')),
include('whitespace'),
- (r'->|%s' % _operator, Punctuation, 'main'),
+ (rf'->|{_operator}', Punctuation, 'main'),
default('main/object-body')
],
'main/object-body': [
include('main/basic'),
- (r'(%s)(%s*)(=?)' % (_name, _ws),
+ (rf'({_name})({_ws}*)(=?)',
bygroups(Name.Variable, using(this, state='whitespace'),
Punctuation), ('#pop', 'more', 'main')),
default('#pop:2')
@@ -951,7 +940,7 @@ class Tads3Lexer(RegexLexer):
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
(r'default\b', Keyword.Reserved),
- (r'(%s)(%s*)(:)' % (_name, _ws),
+ (rf'({_name})({_ws}*)(:)',
bygroups(Name.Label, using(this, state='whitespace'),
Punctuation)),
include('whitespace')
@@ -986,23 +975,23 @@ class Tads3Lexer(RegexLexer):
(r'R"', String.Regex, ('#pop', 'dqr')),
(r"R'", String.Regex, ('#pop', 'sqr')),
# Two-token keywords
- (r'(extern)(%s+)(object\b)' % _ws,
+ (rf'(extern)({_ws}+)(object\b)',
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved)),
- (r'(function|method)(%s*)(\()' % _ws,
+ (rf'(function|method)({_ws}*)(\()',
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Punctuation),
('#pop', 'block?', 'more/parameters', 'main/parameters')),
- (r'(modify)(%s+)(grammar\b)' % _ws,
+ (rf'(modify)({_ws}+)(grammar\b)',
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved),
('#pop', 'object-body/no-braces', ':', 'grammar')),
- (r'(new)(%s+(?=(?:function|method)\b))' % _ws,
+ (rf'(new)({_ws}+(?=(?:function|method)\b))',
bygroups(Keyword.Reserved, using(this, state='whitespace'))),
- (r'(object)(%s+)(template\b)' % _ws,
+ (rf'(object)({_ws}+)(template\b)',
bygroups(Keyword.Reserved, using(this, state='whitespace'),
Keyword.Reserved), ('#pop', 'template')),
- (r'(string)(%s+)(template\b)' % _ws,
+ (rf'(string)({_ws}+)(template\b)',
bygroups(Keyword, using(this, state='whitespace'),
Keyword.Reserved), ('#pop', 'function-name')),
# Keywords
@@ -1039,7 +1028,7 @@ class Tads3Lexer(RegexLexer):
(r'self\b', Name.Builtin.Pseudo, '#pop'),
(r'template\b', Keyword.Reserved, ('#pop', 'template')),
# Operators
- (r'(__objref|defined)(%s*)(\()' % _ws,
+ (rf'(__objref|defined)({_ws}*)(\()',
bygroups(Operator.Word, using(this, state='whitespace'),
Operator), ('#pop', 'more/__objref', 'main')),
(r'delegated\b', Operator.Word),
@@ -1065,7 +1054,7 @@ class Tads3Lexer(RegexLexer):
include('whitespace'),
(_operator, Operator, 'main'),
(r'\?', Operator, ('main', 'more/conditional', 'main')),
- (r'(is|not)(%s+)(in\b)' % _ws,
+ (rf'(is|not)({_ws}+)(in\b)',
bygroups(Operator.Word, using(this, state='whitespace'),
Operator.Word)),
(r'[^\s!"%-_a-z{-~]+', Error) # Averts an infinite loop
@@ -1112,9 +1101,9 @@ class Tads3Lexer(RegexLexer):
],
# Parameter list
'main/parameters': [
- (r'(%s)(%s*)(?=:)' % (_name, _ws),
+ (rf'({_name})({_ws}*)(?=:)',
bygroups(Name.Variable, using(this, state='whitespace')), '#pop'),
- (r'(%s)(%s+)(%s)' % (_name, _ws, _name),
+ (rf'({_name})({_ws}+)({_name})',
bygroups(Name.Class, using(this, state='whitespace'),
Name.Variable), '#pop'),
(r'\[+', Punctuation),
@@ -1123,7 +1112,7 @@ class Tads3Lexer(RegexLexer):
default('#pop')
],
'more/parameters': [
- (r'(:)(%s*(?=[?=,:)]))' % _ws,
+ (rf'(:)({_ws}*(?=[?=,:)]))',
bygroups(Punctuation, using(this, state='whitespace'))),
(r'[?\]]+', Punctuation),
(r'[:)]', Punctuation, ('#pop', 'multimethod?')),
@@ -1181,10 +1170,10 @@ class Tads3Lexer(RegexLexer):
'grammar-rules': [
include('string'),
include('whitespace'),
- (r'(\[)(%s*)(badness)' % _ws,
+ (rf'(\[)({_ws}*)(badness)',
bygroups(Punctuation, using(this, state='whitespace'), Keyword),
'main'),
- (r'->|%s|[()]' % _operator, Punctuation),
+ (rf'->|{_operator}|[()]', Punctuation),
(_name, Name.Constant),
default('#pop:2')
],
@@ -1193,7 +1182,7 @@ class Tads3Lexer(RegexLexer):
],
'function-name': [
(r'(<<([^>]|>>>|>(?!>))*>>)+', String.Interpol),
- (r'(?=%s?%s*[({])' % (_name, _ws), Text, '#pop'),
+ (rf'(?={_name}?{_ws}*[({{])', Text, '#pop'),
(_name, Name.Function, '#pop'),
include('whitespace')
],
@@ -1219,7 +1208,7 @@ class Tads3Lexer(RegexLexer):
include('string'),
(r'inherited\b', Keyword.Reserved),
include('whitespace'),
- (r'->|\?|%s' % _operator, Punctuation),
+ (rf'->|\?|{_operator}', Punctuation),
(_name, Name.Variable)
],
@@ -1259,11 +1248,11 @@ class Tads3Lexer(RegexLexer):
# Whitespace and comments
'whitespace': [
- (r'^%s*#(%s|[^\n]|(?<=\\)\n)*\n?' % (_ws_pp, _comment_multiline),
+ (rf'^{_ws_pp}*#({_comment_multiline}|[^\n]|(?<=\\)\n)*\n?',
Comment.Preproc),
(_comment_single, Comment.Single),
(_comment_multiline, Comment.Multiline),
- (r'\\+\n+%s*#?|\n+|([^\S\n]|\\)+' % _ws_pp, Text)
+ (rf'\\+\n+{_ws_pp}*#?|\n+|([^\S\n]|\\)+', Text)
],
# Strings
@@ -1274,15 +1263,15 @@ class Tads3Lexer(RegexLexer):
(r"'", String.Single, 'sqs')
],
's/escape': [
- (r'\{\{|\}\}|%s' % _escape, String.Escape)
+ (rf'\{{\{{|\}}\}}|{_escape}', String.Escape)
],
's/verbatim': [
(r'<<\s*(as\s+decreasingly\s+likely\s+outcomes|cycling|else|end|'
r'first\s+time|one\s+of|only|or|otherwise|'
r'(sticky|(then\s+)?(purely\s+)?at)\s+random|stopping|'
r'(then\s+)?(half\s+)?shuffled|\|\|)\s*>>', String.Interpol),
- (r'<<(%%(_(%s|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*(%s|\\?.)|'
- r'\s*((else|otherwise)\s+)?(if|unless)\b)?' % (_escape, _escape),
+ (rf'<<(%(_({_escape}|\\?.)|[\-+ ,#]|\[\d*\]?)*\d*\.?\d*({_escape}|\\?.)|'
+ r'\s*((else|otherwise)\s+)?(if|unless)\b)?',
String.Interpol, ('block/embed', 'more/embed', 'main'))
],
's/entity': [
@@ -1345,23 +1334,22 @@ class Tads3Lexer(RegexLexer):
}
def get_tokens_unprocessed(self, text, **kwargs):
- pp = r'^%s*#%s*' % (self._ws_pp, self._ws_pp)
+ pp = rf'^{self._ws_pp}*#{self._ws_pp}*'
if_false_level = 0
for index, token, value in (
RegexLexer.get_tokens_unprocessed(self, text, **kwargs)):
if if_false_level == 0: # Not in a false #if
if (token is Comment.Preproc and
- re.match(r'%sif%s+(0|nil)%s*$\n?' %
- (pp, self._ws_pp, self._ws_pp), value)):
+ re.match(rf'{pp}if{self._ws_pp}+(0|nil){self._ws_pp}*$\n?', value)):
if_false_level = 1
else: # In a false #if
if token is Comment.Preproc:
if (if_false_level == 1 and
- re.match(r'%sel(if|se)\b' % pp, value)):
+ re.match(rf'{pp}el(if|se)\b', value)):
if_false_level = 0
- elif re.match(r'%sif' % pp, value):
+ elif re.match(rf'{pp}if', value):
if_false_level += 1
- elif re.match(r'%sendif\b' % pp, value):
+ elif re.match(rf'{pp}endif\b', value):
if_false_level -= 1
else:
token = Comment
diff --git a/contrib/python/Pygments/py3/pygments/lexers/iolang.py b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
index 268fbde611..119ce6b211 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/iolang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/iolang.py
@@ -4,7 +4,7 @@
Lexers for the Io language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['IoLexer']
class IoLexer(RegexLexer):
"""
For Io (a small, prototype-based programming language) source.
-
- .. versionadded:: 0.10
"""
name = 'Io'
url = 'http://iolanguage.com/'
filenames = ['*.io']
aliases = ['io']
mimetypes = ['text/x-iosrc']
+ version_added = '0.10'
tokens = {
'root': [
(r'\n', Whitespace),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/j.py b/contrib/python/Pygments/py3/pygments/lexers/j.py
index e99363cb02..959e8ca509 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/j.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/j.py
@@ -4,7 +4,7 @@
Lexer for the J programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['JLexer']
class JLexer(RegexLexer):
"""
For J source code.
-
- .. versionadded:: 2.1
"""
name = 'J'
@@ -27,6 +25,7 @@ class JLexer(RegexLexer):
aliases = ['j']
filenames = ['*.ijs']
mimetypes = ['text/x-j']
+ version_added = '2.1'
validName = r'\b[a-zA-Z]\w*'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/javascript.py b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
index bc5e2e43cb..ea7bd10ca0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/javascript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/javascript.py
@@ -4,7 +4,7 @@
Lexers for JavaScript and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -41,6 +41,7 @@ class JavascriptLexer(RegexLexer):
filenames = ['*.js', '*.jsm', '*.mjs', '*.cjs']
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript']
+ version_added = ''
flags = re.DOTALL | re.MULTILINE
@@ -137,8 +138,6 @@ class JavascriptLexer(RegexLexer):
class TypeScriptLexer(JavascriptLexer):
"""
For TypeScript source code.
-
- .. versionadded:: 1.6
"""
name = 'TypeScript'
@@ -146,6 +145,7 @@ class TypeScriptLexer(JavascriptLexer):
aliases = ['typescript', 'ts']
filenames = ['*.ts']
mimetypes = ['application/x-typescript', 'text/x-typescript']
+ version_added = '1.6'
# Higher priority than the TypoScriptLexer, as TypeScript is far more
# common these days
@@ -175,8 +175,6 @@ class TypeScriptLexer(JavascriptLexer):
class KalLexer(RegexLexer):
"""
For Kal source code.
-
- .. versionadded:: 2.0
"""
name = 'Kal'
@@ -184,6 +182,7 @@ class KalLexer(RegexLexer):
aliases = ['kal']
filenames = ['*.kal']
mimetypes = ['text/kal', 'application/kal']
+ version_added = '2.0'
flags = re.DOTALL
tokens = {
@@ -309,8 +308,6 @@ class KalLexer(RegexLexer):
class LiveScriptLexer(RegexLexer):
"""
For LiveScript source code.
-
- .. versionadded:: 1.6
"""
name = 'LiveScript'
@@ -318,6 +315,7 @@ class LiveScriptLexer(RegexLexer):
aliases = ['livescript', 'live-script']
filenames = ['*.ls']
mimetypes = ['text/livescript']
+ version_added = '1.6'
flags = re.DOTALL
tokens = {
@@ -422,8 +420,6 @@ class LiveScriptLexer(RegexLexer):
class DartLexer(RegexLexer):
"""
For Dart source code.
-
- .. versionadded:: 1.5
"""
name = 'Dart'
@@ -431,6 +427,7 @@ class DartLexer(RegexLexer):
aliases = ['dart']
filenames = ['*.dart']
mimetypes = ['text/x-dart']
+ version_added = '1.5'
flags = re.MULTILINE | re.DOTALL
@@ -537,15 +534,16 @@ class LassoLexer(RegexLexer):
`requiredelimiters`
If given and ``True``, only highlight code between delimiters as Lasso
(default: ``False``).
-
- .. versionadded:: 1.6
"""
name = 'Lasso'
aliases = ['lasso', 'lassoscript']
filenames = ['*.lasso', '*.lasso[89]']
+ version_added = '1.6'
alias_filenames = ['*.incl', '*.inc', '*.las']
mimetypes = ['text/x-lasso']
+ url = 'https://www.lassosoft.com'
+
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
@@ -790,14 +788,14 @@ class LassoLexer(RegexLexer):
class ObjectiveJLexer(RegexLexer):
"""
For Objective-J source code with preprocessor directives.
-
- .. versionadded:: 1.3
"""
name = 'Objective-J'
aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
filenames = ['*.j']
mimetypes = ['text/x-objective-j']
+ url = 'https://www.cappuccino.dev/learn/objective-j.html'
+ version_added = '1.3'
#: optional Comment or Whitespace
_ws = r'(?:\s|//[^\n]*\n|/[*](?:[^*]|[*][^/])*[*]/)*'
@@ -1013,8 +1011,6 @@ class ObjectiveJLexer(RegexLexer):
class CoffeeScriptLexer(RegexLexer):
"""
For CoffeeScript source code.
-
- .. versionadded:: 1.3
"""
name = 'CoffeeScript'
@@ -1022,6 +1018,7 @@ class CoffeeScriptLexer(RegexLexer):
aliases = ['coffeescript', 'coffee-script', 'coffee']
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
+ version_added = '1.3'
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
@@ -1127,14 +1124,13 @@ class CoffeeScriptLexer(RegexLexer):
class MaskLexer(RegexLexer):
"""
For Mask markup.
-
- .. versionadded:: 2.0
"""
name = 'Mask'
url = 'https://github.com/atmajs/MaskJS'
aliases = ['mask']
filenames = ['*.mask']
mimetypes = ['text/x-mask']
+ version_added = '2.0'
flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
tokens = {
@@ -1255,6 +1251,8 @@ class EarlGreyLexer(RegexLexer):
aliases = ['earl-grey', 'earlgrey', 'eg']
filenames = ['*.eg']
mimetypes = ['text/x-earl-grey']
+ url = 'https://github.com/breuleux/earl-grey'
+ version_added = ''
tokens = {
'root': [
@@ -1461,8 +1459,6 @@ class EarlGreyLexer(RegexLexer):
class JuttleLexer(RegexLexer):
"""
For Juttle source code.
-
- .. versionadded:: 2.2
"""
name = 'Juttle'
@@ -1471,6 +1467,7 @@ class JuttleLexer(RegexLexer):
filenames = ['*.juttle']
mimetypes = ['application/juttle', 'application/x-juttle',
'text/x-juttle', 'text/juttle']
+ version_added = '2.2'
flags = re.DOTALL | re.MULTILINE
@@ -1548,6 +1545,8 @@ class NodeConsoleLexer(Lexer):
name = 'Node.js REPL console session'
aliases = ['nodejsrepl', ]
mimetypes = ['text/x-nodejsrepl', ]
+ url = 'https://nodejs.org'
+ version_added = ''
def get_tokens_unprocessed(self, text):
jslexer = JavascriptLexer(**self.options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jmespath.py b/contrib/python/Pygments/py3/pygments/lexers/jmespath.py
index 74aa57274b..e7bfc3e45c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jmespath.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jmespath.py
@@ -4,7 +4,7 @@
Lexers for the JMESPath language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,6 +23,7 @@ class JMESPathLexer(RegexLexer):
url = 'https://jmespath.org'
filenames = ['*.jp']
aliases = ['jmespath', 'jp']
+ version_added = ''
tokens = {
'string': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jslt.py b/contrib/python/Pygments/py3/pygments/lexers/jslt.py
index 0d79f8b18b..297c3c6b5f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jslt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jslt.py
@@ -4,7 +4,7 @@
Lexers for the JSLT language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,13 @@ _WORD_END = r'(?=[^0-9A-Z_a-z-])'
class JSLTLexer(RegexLexer):
"""
For JSLT source.
-
- .. versionadded:: 2.10
"""
name = 'JSLT'
url = 'https://github.com/schibsted/jslt'
filenames = ['*.jslt']
aliases = ['jslt']
mimetypes = ['text/x-jslt']
+ version_added = '2.10'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jsonnet.py b/contrib/python/Pygments/py3/pygments/lexers/jsonnet.py
index 3905f88718..aeaf53dc86 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jsonnet.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jsonnet.py
@@ -4,7 +4,7 @@
Lexer for Jsonnet data templating language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ jsonnet_function_token = jsonnet_token + r'(?=\()'
def string_rules(quote_mark):
return [
- (r"[^{}\\]".format(quote_mark), String),
+ (rf"[^{quote_mark}\\]", String),
(r"\\.", String.Escape),
(quote_mark, String, '#pop'),
]
@@ -28,7 +28,7 @@ def string_rules(quote_mark):
def quoted_field_name(quote_mark):
return [
- (r'([^{quote}\\]|\\.)*{quote}'.format(quote=quote_mark),
+ (rf'([^{quote_mark}\\]|\\.)*{quote_mark}',
Name.Variable, 'field_separator')
]
@@ -40,6 +40,7 @@ class JsonnetLexer(RegexLexer):
aliases = ['jsonnet']
filenames = ['*.jsonnet', '*.libsonnet']
url = "https://jsonnet.org"
+ version_added = ''
tokens = {
# Not used by itself
'_comments': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jsx.py b/contrib/python/Pygments/py3/pygments/lexers/jsx.py
index 90cecc0277..b2eece6f60 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jsx.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jsx.py
@@ -4,7 +4,7 @@
Lexers for JSX (React).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['JsxLexer']
class JsxLexer(JavascriptLexer):
"""For JavaScript Syntax Extension (JSX).
-
- .. versionadded:: 2.17
"""
name = "JSX"
@@ -29,6 +27,7 @@ class JsxLexer(JavascriptLexer):
filenames = ["*.jsx", "*.react"]
mimetypes = ["text/jsx", "text/typescript-jsx"]
url = "https://facebook.github.io/jsx/"
+ version_added = '2.17'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/julia.py b/contrib/python/Pygments/py3/pygments/lexers/julia.py
index 9975ca0f87..faacaa55c7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/julia.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/julia.py
@@ -4,7 +4,7 @@
Lexers for the Julia language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,8 +27,6 @@ operator_suffixes = r'[²³¹ʰʲʳʷʸˡˢˣᴬᴮᴰᴱᴳᴴᴵᴶᴷᴸᴹá´
class JuliaLexer(RegexLexer):
"""
For Julia source code.
-
- .. versionadded:: 1.6
"""
name = 'Julia'
@@ -36,6 +34,7 @@ class JuliaLexer(RegexLexer):
aliases = ['julia', 'jl']
filenames = ['*.jl']
mimetypes = ['text/x-julia', 'application/x-julia']
+ version_added = '1.6'
tokens = {
'root': [
@@ -247,11 +246,11 @@ class JuliaLexer(RegexLexer):
class JuliaConsoleLexer(Lexer):
"""
For Julia console sessions. Modeled after MatlabSessionLexer.
-
- .. versionadded:: 1.6
"""
name = 'Julia console'
aliases = ['jlcon', 'julia-repl']
+ url = 'https://julialang.org/'
+ version_added = '1.6'
def get_tokens_unprocessed(self, text):
jllexer = JuliaLexer(**self.options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/jvm.py b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
index 9b4f8d65f8..d631c5c368 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/jvm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/jvm.py
@@ -4,7 +4,7 @@
Pygments lexers for JVM languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,6 +33,7 @@ class JavaLexer(RegexLexer):
aliases = ['java']
filenames = ['*.java']
mimetypes = ['text/x-java']
+ version_added = ''
flags = re.MULTILINE | re.DOTALL
@@ -118,8 +119,6 @@ class JavaLexer(RegexLexer):
class AspectJLexer(JavaLexer):
"""
For AspectJ source code.
-
- .. versionadded:: 1.6
"""
name = 'AspectJ'
@@ -127,6 +126,7 @@ class AspectJLexer(JavaLexer):
aliases = ['aspectj']
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
+ version_added = '1.6'
aj_keywords = {
'aspect', 'pointcut', 'privileged', 'call', 'execution',
@@ -165,21 +165,22 @@ class ScalaLexer(RegexLexer):
aliases = ['scala']
filenames = ['*.scala']
mimetypes = ['text/x-scala']
+ version_added = ''
flags = re.MULTILINE | re.DOTALL
opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
- letterOrDigit = '(?:%s|[0-9])' % letter
- letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
- alphaId = '%s+' % letter
- simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
- idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
- idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
- plainid = '(?:%s|%s+)' % (idrest, opchar)
+ letterOrDigit = f'(?:{letter}|[0-9])'
+ letterOrDigitNoDollarSign = '(?:{}|[0-9])'.format(letter.replace('\\$', ''))
+ alphaId = f'{letter}+'
+ simpleInterpolatedVariable = f'{letter}{letterOrDigitNoDollarSign}*'
+ idrest = f'{letter}{letterOrDigit}*(?:(?<=_){opchar}+)?'
+ idUpper = f'{upperLetter}{letterOrDigit}*(?:(?<=_){opchar}+)?'
+ plainid = f'(?:{idrest}|{opchar}+)'
backQuotedId = r'`[^`]+`'
- anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
+ anyId = rf'(?:{plainid}|{backQuotedId})'
notStartOfComment = r'(?!//|/\*)'
endOfLineMaybeWithComment = r'(?=\s*(//|$))'
@@ -256,7 +257,7 @@ class ScalaLexer(RegexLexer):
r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
],
'annotations': [
- (r'@%s' % idrest, Name.Decorator),
+ (rf'@{idrest}', Name.Decorator),
],
'using': [
# using is a soft keyword, can only be used in the first position of
@@ -264,29 +265,26 @@ class ScalaLexer(RegexLexer):
(r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Whitespace, Keyword, Whitespace)),
],
'declarations': [
- (r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ (rf'\b(def)\b(\s*){notStartOfComment}({anyId})?',
bygroups(Keyword, Whitespace, Name.Function)),
- (r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ (rf'\b(trait)\b(\s*){notStartOfComment}({anyId})?',
bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
+ (rf'\b(?:(case)(\s+))?(class|object|enum)\b(\s*){notStartOfComment}({anyId})?',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class)),
- (r'(?<!\.)\b(type)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
+ (rf'(?<!\.)\b(type)\b(\s*){notStartOfComment}({anyId})?',
bygroups(Keyword, Whitespace, Name.Class)),
(r'\b(val|var)\b', Keyword.Declaration),
- (r'\b(package)(\s+)(object)\b(\s*)%s(%s)?' %
- (notStartOfComment, anyId),
+ (rf'\b(package)(\s+)(object)\b(\s*){notStartOfComment}({anyId})?',
bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Namespace)),
(r'\b(package)(\s+)', bygroups(Keyword, Whitespace), 'package'),
- (r'\b(given)\b(\s*)(%s)' % idUpper,
+ (rf'\b(given)\b(\s*)({idUpper})',
bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(given)\b(\s*)(%s)?' % anyId,
+ (rf'\b(given)\b(\s*)({anyId})?',
bygroups(Keyword, Whitespace, Name)),
],
'inheritance': [
(r'\b(extends|with|derives)\b(\s*)'
- r'(%s|%s|(?=\([^\)]+=>)|(?=%s)|(?="))?' %
- (idUpper, backQuotedId, plainid),
+ rf'({idUpper}|{backQuotedId}|(?=\([^\)]+=>)|(?={plainid})|(?="))?',
bygroups(Keyword, Whitespace, Name.Class)),
],
'extension': [
@@ -296,10 +294,9 @@ class ScalaLexer(RegexLexer):
# end is a soft keyword, should only be highlighted in certain cases
(r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
bygroups(Keyword, Whitespace, Keyword)),
- (r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
+ (rf'\b(end)(\s+)({idUpper}){endOfLineMaybeWithComment}',
bygroups(Keyword, Whitespace, Name.Class)),
- (r'\b(end)(\s+)(%s|%s)?%s' %
- (backQuotedId, plainid, endOfLineMaybeWithComment),
+ (rf'\b(end)(\s+)({backQuotedId}|{plainid})?{endOfLineMaybeWithComment}',
bygroups(Keyword, Whitespace, Name.Namespace)),
],
'punctuation': [
@@ -310,10 +307,10 @@ class ScalaLexer(RegexLexer):
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
],
'operators': [
- (r'(%s{2,})(\s+)' % opchar, bygroups(Operator, Whitespace)),
+ (rf'({opchar}{{2,}})(\s+)', bygroups(Operator, Whitespace)),
(r'/(?![/*])', Operator),
(words(operators), Operator),
- (r'(?<!%s)(!|&&|\|\|)(?!%s)' % (opchar, opchar), Operator),
+ (rf'(?<!{opchar})(!|&&|\|\|)(?!{opchar})', Operator),
],
'constants': [
(r'\b(this|super)\b', Name.Builtin.Pseudo),
@@ -336,7 +333,7 @@ class ScalaLexer(RegexLexer):
(r'raw"(\\\\|\\"|[^"])*"', String),
],
'symbols': [
- (r"('%s)(?!')" % plainid, String.Symbol),
+ (rf"('{plainid})(?!')", String.Symbol),
],
'singleton-type': [
(r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
@@ -344,7 +341,7 @@ class ScalaLexer(RegexLexer):
'inline': [
# inline is a soft modifier, only highlighted if followed by if,
# match or parameters.
- (r'\b(inline)(?=\s+(%s|%s)\s*:)' % (plainid, backQuotedId),
+ (rf'\b(inline)(?=\s+({plainid}|{backQuotedId})\s*:)',
Keyword),
(r'\b(inline)\b(?=(?:.(?!\b(?:val|def|given)\b))*\b(if|match)\b)',
Keyword),
@@ -427,7 +424,7 @@ class ScalaLexer(RegexLexer):
# Helpers
'qualified-name': [
(idUpper, Name.Class),
- (r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
+ (rf'({anyId})(\.)', bygroups(Name.Namespace, Punctuation)),
(r'\.', Punctuation),
(anyId, Name),
(r'[^\S\n]+', Whitespace),
@@ -435,7 +432,7 @@ class ScalaLexer(RegexLexer):
'interpolated-string-common': [
(r'[^"$\\]+', String),
(r'\$\$', String.Escape),
- (r'(\$)(%s)' % simpleInterpolatedVariable,
+ (rf'(\$)({simpleInterpolatedVariable})',
bygroups(String.Interpol, Name)),
(r'\$\{', String.Interpol, 'interpolated-string-brace'),
(r'\\.', String),
@@ -446,14 +443,14 @@ class ScalaLexer(RegexLexer):
class GosuLexer(RegexLexer):
"""
For Gosu source code.
-
- .. versionadded:: 1.5
"""
name = 'Gosu'
aliases = ['gosu']
filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
mimetypes = ['text/x-gosu']
+ url = 'https://gosu-lang.github.io'
+ version_added = '1.5'
flags = re.MULTILINE | re.DOTALL
@@ -525,14 +522,14 @@ class GosuLexer(RegexLexer):
class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
-
- .. versionadded:: 1.5
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
+ url = 'https://gosu-lang.github.io'
+ version_added = '1.5'
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
@@ -543,8 +540,6 @@ class GosuTemplateLexer(Lexer):
class GroovyLexer(RegexLexer):
"""
For Groovy source code.
-
- .. versionadded:: 1.5
"""
name = 'Groovy'
@@ -552,6 +547,7 @@ class GroovyLexer(RegexLexer):
aliases = ['groovy']
filenames = ['*.groovy','*.gradle']
mimetypes = ['text/x-groovy']
+ version_added = '1.5'
flags = re.MULTILINE | re.DOTALL
@@ -622,14 +618,13 @@ class IokeLexer(RegexLexer):
"""
For Ioke (a strongly typed, dynamic,
prototype based programming language) source.
-
- .. versionadded:: 1.4
"""
name = 'Ioke'
url = 'https://ioke.org/'
filenames = ['*.ik']
aliases = ['ioke', 'ik']
mimetypes = ['text/x-iokesrc']
+ version_added = '1.4'
tokens = {
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
@@ -813,14 +808,13 @@ class IokeLexer(RegexLexer):
class ClojureLexer(RegexLexer):
"""
Lexer for Clojure source code.
-
- .. versionadded:: 0.11
"""
name = 'Clojure'
url = 'http://clojure.org/'
aliases = ['clojure', 'clj']
filenames = ['*.clj', '*.cljc']
mimetypes = ['text/x-clojure', 'application/x-clojure']
+ version_added = '0.11'
special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
@@ -954,14 +948,13 @@ class ClojureLexer(RegexLexer):
class ClojureScriptLexer(ClojureLexer):
"""
Lexer for ClojureScript source code.
-
- .. versionadded:: 2.0
"""
name = 'ClojureScript'
url = 'http://clojure.org/clojurescript'
aliases = ['clojurescript', 'cljs']
filenames = ['*.cljs']
mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
+ version_added = '2.0'
class TeaLangLexer(RegexLexer):
@@ -1014,8 +1007,6 @@ class TeaLangLexer(RegexLexer):
class CeylonLexer(RegexLexer):
"""
For Ceylon source code.
-
- .. versionadded:: 1.6
"""
name = 'Ceylon'
@@ -1023,6 +1014,7 @@ class CeylonLexer(RegexLexer):
aliases = ['ceylon']
filenames = ['*.ceylon']
mimetypes = ['text/x-ceylon']
+ version_added = '1.6'
flags = re.MULTILINE | re.DOTALL
@@ -1094,8 +1086,6 @@ class CeylonLexer(RegexLexer):
class KotlinLexer(RegexLexer):
"""
For Kotlin source code.
-
- .. versionadded:: 1.5
"""
name = 'Kotlin'
@@ -1103,6 +1093,7 @@ class KotlinLexer(RegexLexer):
aliases = ['kotlin']
filenames = ['*.kt', '*.kts']
mimetypes = ['text/x-kotlin']
+ version_added = '1.5'
flags = re.MULTILINE | re.DOTALL
@@ -1250,8 +1241,6 @@ class KotlinLexer(RegexLexer):
class XtendLexer(RegexLexer):
"""
For Xtend source code.
-
- .. versionadded:: 1.6
"""
name = 'Xtend'
@@ -1259,6 +1248,7 @@ class XtendLexer(RegexLexer):
aliases = ['xtend']
filenames = ['*.xtend']
mimetypes = ['text/x-xtend']
+ version_added = '1.6'
flags = re.MULTILINE | re.DOTALL
@@ -1279,7 +1269,7 @@ class XtendLexer(RegexLexer):
Keyword),
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Declaration),
+ r'transient|volatile|val|var)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Whitespace)),
@@ -1316,8 +1306,6 @@ class XtendLexer(RegexLexer):
class PigLexer(RegexLexer):
"""
For Pig Latin source code.
-
- .. versionadded:: 2.0
"""
name = 'Pig'
@@ -1325,6 +1313,7 @@ class PigLexer(RegexLexer):
aliases = ['pig']
filenames = ['*.pig']
mimetypes = ['text/x-pig']
+ version_added = '2.0'
flags = re.MULTILINE | re.IGNORECASE
@@ -1384,14 +1373,13 @@ class PigLexer(RegexLexer):
class GoloLexer(RegexLexer):
"""
For Golo source code.
-
- .. versionadded:: 2.0
"""
name = 'Golo'
url = 'http://golo-lang.org/'
filenames = ['*.golo']
aliases = ['golo']
+ version_added = '2.0'
tokens = {
'root': [
@@ -1498,21 +1486,20 @@ class GoloLexer(RegexLexer):
class JasminLexer(RegexLexer):
"""
For Jasmin assembly code.
-
- .. versionadded:: 2.0
"""
name = 'Jasmin'
url = 'http://jasmin.sourceforge.net/'
aliases = ['jasmin', 'jasminxt']
filenames = ['*.j']
+ version_added = '2.0'
_whitespace = r' \n\t\r'
- _ws = r'(?:[%s]+)' % _whitespace
- _separator = r'%s:=' % _whitespace
- _break = r'(?=[%s]|$)' % _separator
- _name = r'[^%s]+' % _separator
- _unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
+ _ws = rf'(?:[{_whitespace}]+)'
+ _separator = rf'{_whitespace}:='
+ _break = rf'(?=[{_separator}]|$)'
+ _name = rf'[^{_separator}]+'
+ _unqualified_name = rf'(?:[^{_separator}.;\[/]+)'
tokens = {
'default': [
@@ -1523,36 +1510,36 @@ class JasminLexer(RegexLexer):
(r':', Punctuation, 'label'),
(_ws, Whitespace),
(r';.*', Comment.Single),
- (r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
- (r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
+ (rf'(\$[-+])?0x-?[\da-fA-F]+{_break}', Number.Hex),
+ (rf'(\$[-+]|\+)?-?\d+{_break}', Number.Integer),
(r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
- r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
- (r'\$%s' % _name, Name.Variable),
+ rf'[\x00-\x08\x0b\x0c\x0e-\x1f]*{_break}', Number.Float),
+ (rf'\${_name}', Name.Variable),
# Directives
- (r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
+ (rf'\.annotation{_break}', Keyword.Reserved, 'annotation'),
(r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
r'annotation|bridge|class|default|enum|field|final|fpstrict|'
r'interface|native|private|protected|public|signature|static|'
- r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
+ rf'synchronized|synthetic|transient|varargs|volatile){_break}',
Keyword.Reserved),
- (r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
+ (rf'\.catch{_break}', Keyword.Reserved, 'caught-exception'),
(r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
- r'invisibleparam|outer|visible|visibleparam)%s' % _break,
+ rf'invisibleparam|outer|visible|visibleparam){_break}',
Keyword.Reserved, 'class/convert-dots'),
- (r'\.field%s' % _break, Keyword.Reserved,
+ (rf'\.field{_break}', Keyword.Reserved,
('descriptor/convert-dots', 'field')),
- (r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
+ (rf'(\.end|\.limit|use){_break}', Keyword.Reserved,
'no-verification'),
- (r'\.method%s' % _break, Keyword.Reserved, 'method'),
- (r'\.set%s' % _break, Keyword.Reserved, 'var'),
- (r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
- (r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
- (r'is%s' % _break, Keyword.Reserved,
+ (rf'\.method{_break}', Keyword.Reserved, 'method'),
+ (rf'\.set{_break}', Keyword.Reserved, 'var'),
+ (rf'\.throws{_break}', Keyword.Reserved, 'exception'),
+ (rf'(from|offset|to|using){_break}', Keyword.Reserved, 'label'),
+ (rf'is{_break}', Keyword.Reserved,
('descriptor/convert-dots', 'var')),
- (r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
- (r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
+ (rf'(locals|stack){_break}', Keyword.Reserved, 'verification'),
+ (rf'method{_break}', Keyword.Reserved, 'enclosing-method'),
# Instructions
(words((
@@ -1578,14 +1565,14 @@ class JasminLexer(RegexLexer):
'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
- (r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
+ (rf'(anewarray|checkcast|instanceof|ldc|ldc_w|new){_break}',
Keyword.Reserved, 'class/no-dots'),
(r'invoke(dynamic|interface|nonvirtual|special|'
- r'static|virtual)%s' % _break, Keyword.Reserved,
+ rf'static|virtual){_break}', Keyword.Reserved,
'invocation'),
- (r'(getfield|putfield)%s' % _break, Keyword.Reserved,
+ (rf'(getfield|putfield){_break}', Keyword.Reserved,
('descriptor/no-dots', 'field')),
- (r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
+ (rf'(getstatic|putstatic){_break}', Keyword.Reserved,
('descriptor/no-dots', 'static')),
(words((
'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
@@ -1593,9 +1580,9 @@ class JasminLexer(RegexLexer):
'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
'ifnull', 'jsr', 'jsr_w'), suffix=_break),
Keyword.Reserved, 'label'),
- (r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
+ (rf'(multianewarray|newarray){_break}', Keyword.Reserved,
'descriptor/convert-dots'),
- (r'tableswitch%s' % _break, Keyword.Reserved, 'table')
+ (rf'tableswitch{_break}', Keyword.Reserved, 'table')
],
'quote': [
(r"'", String.Single, '#pop'),
@@ -1612,25 +1599,25 @@ class JasminLexer(RegexLexer):
(r'\n+', Whitespace),
(r"'", String.Single, 'quote'),
include('default'),
- (r'(%s)([ \t\r]*)(:)' % _name,
+ (rf'({_name})([ \t\r]*)(:)',
bygroups(Name.Label, Whitespace, Punctuation)),
(_name, String.Other)
],
'annotation': [
(r'\n', Whitespace, ('#pop', 'annotation-body')),
- (r'default%s' % _break, Keyword.Reserved,
+ (rf'default{_break}', Keyword.Reserved,
('#pop', 'annotation-default')),
include('default')
],
'annotation-body': [
(r'\n+', Whitespace),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ (rf'\.end{_break}', Keyword.Reserved, '#pop'),
include('default'),
(_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
],
'annotation-default': [
(r'\n+', Whitespace),
- (r'\.end%s' % _break, Keyword.Reserved, '#pop'),
+ (rf'\.end{_break}', Keyword.Reserved, '#pop'),
include('default'),
default(('annotation-items', 'descriptor/no-dots'))
],
@@ -1640,42 +1627,42 @@ class JasminLexer(RegexLexer):
(_name, String.Other)
],
'caught-exception': [
- (r'all%s' % _break, Keyword, '#pop'),
+ (rf'all{_break}', Keyword, '#pop'),
include('exception')
],
'class/convert-dots': [
include('default'),
- (r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
+ (rf'(L)((?:{_unqualified_name}[/.])*)({_name})(;)',
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ (rf'((?:{_unqualified_name}[/.])*)({_name})',
bygroups(Name.Namespace, Name.Class), '#pop')
],
'class/no-dots': [
include('default'),
(r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ (rf'(L)((?:{_unqualified_name}/)*)({_name})(;)',
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
- (r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
+ (rf'((?:{_unqualified_name}/)*)({_name})',
bygroups(Name.Namespace, Name.Class), '#pop')
],
'descriptor/convert-dots': [
include('default'),
(r'\[+', Punctuation),
- (r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
+ (rf'(L)((?:{_unqualified_name}[/.])*)({_name}?)(;)',
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ (rf'[^{_separator}\[)L]+', Keyword.Type, '#pop'),
default('#pop')
],
'descriptor/no-dots': [
include('default'),
(r'\[+', Punctuation),
- (r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
+ (rf'(L)((?:{_unqualified_name}/)*)({_name})(;)',
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
- (r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
+ (rf'[^{_separator}\[)L]+', Keyword.Type, '#pop'),
default('#pop')
],
'descriptors/convert-dots': [
@@ -1684,26 +1671,24 @@ class JasminLexer(RegexLexer):
],
'enclosing-method': [
(_ws, Whitespace),
- (r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
+ (rf'(?=[^{_separator}]*\()', Text, ('#pop', 'invocation')),
default(('#pop', 'class/convert-dots'))
],
'exception': [
include('default'),
- (r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
+ (rf'((?:{_unqualified_name}[/.])*)({_name})',
bygroups(Name.Namespace, Name.Exception), '#pop')
],
'field': [
- (r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
+ (rf'static{_break}', Keyword.Reserved, ('#pop', 'static')),
include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
+ (rf'((?:{_unqualified_name}[/.](?=[^{_separator}]*[/.]))*)({_unqualified_name}[/.])?({_name})',
bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
'#pop')
],
'invocation': [
include('default'),
- (r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
- (_unqualified_name, _separator, _unqualified_name, _name),
+ (rf'((?:{_unqualified_name}[/.](?=[^{_separator}(]*[/.]))*)({_unqualified_name}[/.])?({_name})(\()',
bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
@@ -1714,23 +1699,22 @@ class JasminLexer(RegexLexer):
],
'method': [
include('default'),
- (r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
+ (rf'({_name})(\()', bygroups(Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'no-verification': [
- (r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
+ (rf'(locals|method|stack){_break}', Keyword.Reserved, '#pop'),
include('default')
],
'static': [
include('default'),
- (r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
- (_unqualified_name, _separator, _unqualified_name, _name),
+ (rf'((?:{_unqualified_name}[/.](?=[^{_separator}]*[/.]))*)({_unqualified_name}[/.])?({_name})',
bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
],
'table': [
(r'\n+', Whitespace),
- (r'default%s' % _break, Keyword.Reserved, '#pop'),
+ (rf'default{_break}', Keyword.Reserved, '#pop'),
include('default'),
(_name, Name.Label)
],
@@ -1740,10 +1724,9 @@ class JasminLexer(RegexLexer):
],
'verification': [
include('default'),
- (r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
- _break, Keyword, '#pop'),
- (r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
- (r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
+ (rf'(Double|Float|Integer|Long|Null|Top|UninitializedThis){_break}', Keyword, '#pop'),
+ (rf'Object{_break}', Keyword, ('#pop', 'class/no-dots')),
+ (rf'Uninitialized{_break}', Keyword, ('#pop', 'label'))
]
}
@@ -1763,8 +1746,6 @@ class JasminLexer(RegexLexer):
class SarlLexer(RegexLexer):
"""
For SARL source code.
-
- .. versionadded:: 2.4
"""
name = 'SARL'
@@ -1772,6 +1753,7 @@ class SarlLexer(RegexLexer):
aliases = ['sarl']
filenames = ['*.sarl']
mimetypes = ['text/x-sarl']
+ version_added = '2.4'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/kuin.py b/contrib/python/Pygments/py3/pygments/lexers/kuin.py
index aeb9cad2d4..228a4b3c96 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/kuin.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/kuin.py
@@ -4,7 +4,7 @@
Lexers for the Kuin language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,12 @@ __all__ = ['KuinLexer']
class KuinLexer(RegexLexer):
"""
For Kuin source code.
-
- .. versionadded:: 2.9
"""
name = 'Kuin'
url = 'https://github.com/kuina/Kuin'
aliases = ['kuin']
filenames = ['*.kn']
+ version_added = '2.9'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/kusto.py b/contrib/python/Pygments/py3/pygments/lexers/kusto.py
index 9f30fd6751..7b146861cf 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/kusto.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/kusto.py
@@ -4,7 +4,7 @@
Lexers for Kusto Query Language (KQL).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -43,14 +43,13 @@ KUSTO_PUNCTUATION = [
class KustoLexer(RegexLexer):
"""For Kusto Query Language source code.
-
- .. versionadded:: 2.17
"""
name = "Kusto"
aliases = ["kql", "kusto"]
filenames = ["*.kql", "*.kusto", ".csl"]
url = "https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query"
+ version_added = '2.17'
tokens = {
"root": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ldap.py b/contrib/python/Pygments/py3/pygments/lexers/ldap.py
index a669f79004..17d14b017a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ldap.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ldap.py
@@ -4,7 +4,7 @@
Pygments lexers for LDAP.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,8 +22,6 @@ class LdifLexer(RegexLexer):
"""
Lexer for LDIF
-
- .. versionadded:: 2.17
"""
name = 'LDIF'
@@ -31,6 +29,7 @@ class LdifLexer(RegexLexer):
filenames = ['*.ldif']
mimetypes = ["text/x-ldif"]
url = "https://datatracker.ietf.org/doc/html/rfc2849"
+ version_added = '2.17'
tokens = {
'root': [
@@ -106,8 +105,6 @@ class LdifLexer(RegexLexer):
class LdaprcLexer(RegexLexer):
"""
Lexer for OpenLDAP configuration files.
-
- .. versionadded:: 2.17
"""
name = 'LDAP configuration file'
@@ -115,6 +112,7 @@ class LdaprcLexer(RegexLexer):
filenames = ['.ldaprc', 'ldaprc', 'ldap.conf']
mimetypes = ["text/x-ldapconf"]
url = 'https://www.openldap.org/software//man.cgi?query=ldap.conf&sektion=5&apropos=0&manpath=OpenLDAP+2.4-Release'
+ version_added = '2.17'
_sasl_keywords = r'SASL_(?:MECH|REALM|AUTHCID|AUTHZID|CBINDING)'
_tls_keywords = r'TLS_(?:CACERT|CACERTDIR|CERT|ECNAME|KEY|CIPHER_SUITE|PROTOCOL_MIN|RANDFILE|CRLFILE)'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lean.py b/contrib/python/Pygments/py3/pygments/lexers/lean.py
index d16cd73c57..b44d2a0423 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/lean.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/lean.py
@@ -4,32 +4,38 @@
Lexers for the Lean theorem prover.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+
import re
-from pygments.lexer import RegexLexer, default, words, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Whitespace
+from pygments.lexer import RegexLexer, words, include
+from pygments.token import Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
-__all__ = ['Lean3Lexer']
+__all__ = ['Lean3Lexer', 'Lean4Lexer']
class Lean3Lexer(RegexLexer):
"""
For the Lean 3 theorem prover.
-
- .. versionadded:: 2.0
"""
name = 'Lean'
url = 'https://leanprover-community.github.io/lean3'
aliases = ['lean', 'lean3']
filenames = ['*.lean']
mimetypes = ['text/x-lean', 'text/x-lean3']
+ version_added = '2.0'
+
+ # from https://github.com/leanprover/vscode-lean/blob/1589ca3a65e394b3789409707febbd2d166c9344/syntaxes/lean.json#L186C20-L186C217
+ _name_segment = (
+ "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-â…ð’œ-ð–Ÿ]"
+ "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-â…ð’œ-ð–Ÿ0-9'â¿-₉â‚-ₜᵢ-ᵪ])*")
+ _name = _name_segment + r"(\." + _name_segment + r")*"
tokens = {
'expression': [
- (r'\s+', Text),
+ (r'\s+', Whitespace),
(r'/--', String.Doc, 'docstring'),
(r'/-', Comment, 'comment'),
(r'--.*?$', Comment.Single),
@@ -43,9 +49,8 @@ class Lean3Lexer(RegexLexer):
(words((
'(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',',
)), Operator),
- (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]'
- r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079'
- r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name),
+ (_name, Name),
+ (r'``?' + _name, String.Symbol),
(r'0x[A-Za-z0-9]+', Number.Integer),
(r'0b[01]+', Number.Integer),
(r'\d+', Number.Integer),
@@ -102,13 +107,13 @@ class Lean3Lexer(RegexLexer):
include('expression'),
],
'comment': [
- (r'[^/-]', Comment.Multiline),
+ (r'[^/-]+', Comment.Multiline),
(r'/-', Comment.Multiline, '#push'),
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
'docstring': [
- (r'[^/-]', String.Doc),
+ (r'[^/-]+', String.Doc),
(r'-/', String.Doc, '#pop'),
(r'[/-]', String.Doc)
],
@@ -119,4 +124,118 @@ class Lean3Lexer(RegexLexer):
],
}
+ def analyse_text(text):
+ if re.search(r'^import [a-z]', text, re.MULTILINE):
+ return 0.1
+
+
LeanLexer = Lean3Lexer
+
+
+class Lean4Lexer(RegexLexer):
+ """
+ For the Lean 4 theorem prover.
+ """
+
+ name = 'Lean4'
+ url = 'https://github.com/leanprover/lean4'
+ aliases = ['lean4']
+ filenames = ['*.lean']
+ mimetypes = ['text/x-lean4']
+ version_added = '2.18'
+
+ # same as Lean3Lexer, with `!` and `?` allowed
+ _name_segment = (
+ "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-â…ð’œ-ð–Ÿ]"
+ "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-â…ð’œ-ð–Ÿ0-9'â¿-₉â‚-ₜᵢ-ᵪ!?])*")
+ _name = _name_segment + r"(\." + _name_segment + r")*"
+
+ keywords1 = (
+ 'import', 'unif_hint',
+ 'renaming', 'inline', 'hiding', 'lemma', 'variable',
+ 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias',
+ '#help', 'precedence', 'postfix', 'prefix',
+ 'infix', 'infixl', 'infixr', 'notation', '#eval',
+ '#check', '#reduce', '#exit', 'end', 'private', 'using', 'namespace',
+ 'instance', 'section', 'protected',
+ 'export', 'set_option', 'extends', 'open', 'example',
+ '#print', 'opaque',
+ 'def', 'macro', 'elab', 'syntax', 'macro_rules', '#reduce', 'where',
+ 'abbrev', 'noncomputable', 'class', 'attribute', '#synth', 'mutual',
+ 'scoped', 'local',
+ )
+
+ keywords2 = (
+ 'forall', 'fun', 'obtain', 'from', 'have', 'show', 'assume',
+ 'let', 'if', 'else', 'then', 'by', 'in', 'with',
+ 'calc', 'match', 'nomatch', 'do', 'at',
+ )
+
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop', 'Sort',
+ )
+
+ operators = (
+ '!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!',
+ '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
+ '<-', '=', '==', '>', '_', '|', '||', '~', '=>', '<=', '>=',
+ '/\\', '\\/', '∀', 'Π', 'λ', '↔', '∧', '∨', '≠', '≤', '≥',
+ '¬', 'â»Â¹', 'â¬', 'â–¸', '→', '∃', '≈', '×', '⌞',
+ '⌟', '≡', '⟨', '⟩', "↦",
+ )
+
+ punctuation = ('(', ')', ':', '{', '}', '[', ']', '⦃', '⦄',
+ ':=', ',')
+
+ tokens = {
+ 'expression': [
+ (r'\s+', Whitespace),
+ (r'/--', String.Doc, 'docstring'),
+ (r'/-', Comment, 'comment'),
+ (r'--.*$', Comment.Single),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
+ (words(operators), Name.Builtin.Pseudo),
+ (words(punctuation), Operator),
+ (_name_segment, Name),
+ (r'``?' + _name, String.Symbol),
+ (r'(?<=\.)\d+', Number),
+ (r'(\d+\.\d*)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'"', String.Double, 'string'),
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ (r'\S', Name.Builtin.Pseudo),
+ ],
+ 'root': [
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (r'@\[', Keyword.Declaration, 'attribute'),
+ include('expression')
+ ],
+ 'attribute': [
+ (r'\]', Keyword.Declaration, '#pop'),
+ include('expression'),
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/-]+', Comment.Multiline),
+ (r'/-', Comment.Multiline, '#push'),
+ (r'-/', Comment.Multiline, '#pop'),
+ (r'[/-]', Comment.Multiline)
+ ],
+ 'docstring': [
+ (r'[^/-]+', String.Doc),
+ (r'-/', String.Doc, '#pop'),
+ (r'[/-]', String.Doc)
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ (r'\\[n"\\\n]', String.Escape),
+ ('"', String.Double, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'^import [A-Z]', text, re.MULTILINE):
+ return 0.1
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lilypond.py b/contrib/python/Pygments/py3/pygments/lexers/lilypond.py
index 6b4ed20d9e..d42906ccc5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/lilypond.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/lilypond.py
@@ -4,7 +4,7 @@
Lexer for LilyPond.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -51,14 +51,13 @@ class LilyPondLexer(SchemeLexer):
.. important::
This lexer is meant to be used in conjunction with the ``lilypond`` style.
-
- .. versionadded:: 2.11
"""
name = 'LilyPond'
url = 'https://lilypond.org'
aliases = ['lilypond']
filenames = ['*.ly']
mimetypes = []
+ version_added = '2.11'
flags = re.DOTALL | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/lisp.py b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
index 966b6063ab..e6cc5875fa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/lisp.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/lisp.py
@@ -4,7 +4,7 @@
Lexers for Lispy languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,7 +20,7 @@ from pygments.lexers._scheme_builtins import scheme_keywords, scheme_builtins
__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
- 'XtlangLexer', 'FennelLexer']
+ 'XtlangLexer', 'FennelLexer', 'JanetLexer']
class SchemeLexer(RegexLexer):
@@ -31,14 +31,13 @@ class SchemeLexer(RegexLexer):
at http://paste.lisp.org/ to cover as much syntax as possible.
It supports the full Scheme syntax as defined in R5RS.
-
- .. versionadded:: 0.6
"""
name = 'Scheme'
url = 'http://www.scheme-reports.org/'
aliases = ['scheme', 'scm']
filenames = ['*.scm', '*.ss']
mimetypes = ['text/x-scheme', 'application/x-scheme']
+ version_added = '0.6'
flags = re.DOTALL | re.MULTILINE
@@ -296,14 +295,13 @@ class SchemeLexer(RegexLexer):
class CommonLispLexer(RegexLexer):
"""
A Common Lisp lexer.
-
- .. versionadded:: 0.9
"""
name = 'Common Lisp'
url = 'https://lisp-lang.org/'
aliases = ['common-lisp', 'cl', 'lisp']
filenames = ['*.cl', '*.lisp']
mimetypes = ['text/x-common-lisp']
+ version_added = '0.9'
flags = re.IGNORECASE | re.MULTILINE
@@ -316,7 +314,7 @@ class CommonLispLexer(RegexLexer):
# symbol token, reverse-engineered from hyperspec
# Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
+ symbol = rf'(\|[^|]+\||(?:{nonmacro})(?:{constituent})*)'
def __init__(self, **options):
from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
@@ -485,14 +483,13 @@ class CommonLispLexer(RegexLexer):
class HyLexer(RegexLexer):
"""
Lexer for Hy source code.
-
- .. versionadded:: 2.0
"""
name = 'Hy'
url = 'http://hylang.org/'
- aliases = ['hylang']
+ aliases = ['hylang', 'hy']
filenames = ['*.hy']
mimetypes = ['text/x-hy', 'application/x-hy']
+ version_added = '2.0'
special_forms = (
'cond', 'for', '->', '->>', 'car',
@@ -522,7 +519,7 @@ class HyLexer(RegexLexer):
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#:-]+'
+ valid_name = r"[^ \t\n\r\f\v()[\]{};\"'`~]+"
def _multi_escape(entries):
return words(entries, suffix=' ')
@@ -534,8 +531,7 @@ class HyLexer(RegexLexer):
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
- (r',+', Text),
- (r'\s+', Whitespace),
+ (r'[ \t\n\r\f\v]+', Whitespace),
# numbers
(r'-?\d+\.\d+', Number.Float),
@@ -601,8 +597,6 @@ class RacketLexer(RegexLexer):
"""
Lexer for Racket source code (formerly
known as PLT Scheme).
-
- .. versionadded:: 1.6
"""
name = 'Racket'
@@ -610,6 +604,7 @@ class RacketLexer(RegexLexer):
aliases = ['racket', 'rkt']
filenames = ['*.rkt', '*.rktd', '*.rktl']
mimetypes = ['text/x-racket', 'application/x-racket']
+ version_added = '1.6'
# Generated by example.rkt
_keywords = (
@@ -1391,19 +1386,17 @@ class RacketLexer(RegexLexer):
_opening_parenthesis = r'[([{]'
_closing_parenthesis = r'[)\]}]'
_delimiters = r'()[\]{}",\'`;\s'
- _symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
+ _symbol = rf'(?:\|[^|]*\||\\[\w\W]|[^|\\{_delimiters}]+)+'
_exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
_exponent = r'(?:[defls][-+]?\d+)'
_inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
- _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
- r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
- _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
- _exponent)
- _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
+ _inexact_simple = (rf'(?:{_inexact_simple_no_hashes}|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
+ r'\d+(?:\.\d*#+|/\d+#+)))')
+ _inexact_normal_no_hashes = rf'(?:{_inexact_simple_no_hashes}{_exponent}?)'
+ _inexact_normal = rf'(?:{_inexact_simple}{_exponent}?)'
_inexact_special = r'(?:(?:inf|nan)\.[0f])'
- _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
- _inexact_special)
- _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
+ _inexact_real = rf'(?:[-+]?{_inexact_normal}|[-+]{_inexact_special})'
+ _inexact_unsigned = rf'(?:{_inexact_normal}|{_inexact_special})'
tokens = {
'root': [
@@ -1423,36 +1416,29 @@ class RacketLexer(RegexLexer):
# onto Pygments token types; some judgment calls here.
# #d or no prefix
- (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
+ (rf'(?i){_exact_decimal_prefix}[-+]?\d+(?=[{_delimiters}])',
Number.Integer, '#pop'),
- (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
- (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
- (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
- (_exact_decimal_prefix, _inexact_normal_no_hashes,
- _inexact_normal_no_hashes, _inexact_normal_no_hashes,
- _delimiters), Number, '#pop'),
+ (rf'(?i){_exact_decimal_prefix}[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[{_delimiters}])', Number.Float, '#pop'),
+ (rf'(?i){_exact_decimal_prefix}[-+]?({_inexact_normal_no_hashes}([-+]{_inexact_normal_no_hashes}?i)?|[-+]{_inexact_normal_no_hashes}?i)(?=[{_delimiters}])', Number, '#pop'),
# Inexact without explicit #i
- (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
- (_inexact_real, _inexact_unsigned, _inexact_unsigned,
- _inexact_real, _inexact_real, _delimiters), Number.Float,
+ (rf'(?i)(#d)?({_inexact_real}([-+]{_inexact_unsigned}?i)?|[-+]{_inexact_unsigned}?i|{_inexact_real}@{_inexact_real})(?=[{_delimiters}])', Number.Float,
'#pop'),
# The remaining extflonums
- (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
- (_inexact_simple, _delimiters), Number.Float, '#pop'),
+ (rf'(?i)(([-+]?{_inexact_simple}t[-+]?\d+)|[-+](inf|nan)\.t)(?=[{_delimiters}])', Number.Float, '#pop'),
# #b
- (r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
+ (rf'(?iu)(#[ei])?#b{_symbol}', Number.Bin, '#pop'),
# #o
- (r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
+ (rf'(?iu)(#[ei])?#o{_symbol}', Number.Oct, '#pop'),
# #x
- (r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
+ (rf'(?iu)(#[ei])?#x{_symbol}', Number.Hex, '#pop'),
# #i is always inexact, i.e. float
- (r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
+ (rf'(?iu)(#d)?#i{_symbol}', Number.Float, '#pop'),
# Strings and characters
(r'#?"', String.Double, ('#pop', 'string')),
@@ -1465,7 +1451,7 @@ class RacketLexer(RegexLexer):
(r'#(true|false|[tTfF])', Name.Constant, '#pop'),
# Keyword argument names (e.g. #:keyword)
- (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
+ (rf'#:{_symbol}', Keyword.Declaration, '#pop'),
# Reader extensions
(r'(#lang |#!)(\S+)',
@@ -1473,8 +1459,8 @@ class RacketLexer(RegexLexer):
(r'#reader', Keyword.Namespace, 'quoted-datum'),
# Other syntax
- (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
- (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
+ (rf"(?i)\.(?=[{_delimiters}])|#c[is]|#['`]|#,@?", Operator),
+ (rf"'|#[s&]|#hash(eqv?)?|#\d*(?={_opening_parenthesis})",
Operator, ('#pop', 'quoted-datum'))
],
'datum*': [
@@ -1488,15 +1474,15 @@ class RacketLexer(RegexLexer):
],
'unquoted-datum': [
include('datum'),
- (r'quote(?=[%s])' % _delimiters, Keyword,
+ (rf'quote(?=[{_delimiters}])', Keyword,
('#pop', 'quoted-datum')),
(r'`', Operator, ('#pop', 'quasiquoted-datum')),
- (r'quasiquote(?=[%s])' % _delimiters, Keyword,
+ (rf'quasiquote(?=[{_delimiters}])', Keyword,
('#pop', 'quasiquoted-datum')),
(_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
- (words(_keywords, suffix='(?=[%s])' % _delimiters),
+ (words(_keywords, suffix=f'(?=[{_delimiters}])'),
Keyword, '#pop'),
- (words(_builtins, suffix='(?=[%s])' % _delimiters),
+ (words(_builtins, suffix=f'(?=[{_delimiters}])'),
Name.Builtin, '#pop'),
(_symbol, Name, '#pop'),
include('datum*')
@@ -1508,7 +1494,7 @@ class RacketLexer(RegexLexer):
'quasiquoted-datum': [
include('datum'),
(r',@?', Operator, ('#pop', 'unquoted-datum')),
- (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
+ (rf'unquote(-splicing)?(?=[{_delimiters}])', Keyword,
('#pop', 'unquoted-datum')),
(_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
include('datum*')
@@ -1543,8 +1529,6 @@ class RacketLexer(RegexLexer):
class NewLispLexer(RegexLexer):
"""
For newLISP source code (version 10.3.0).
-
- .. versionadded:: 1.5
"""
name = 'NewLisp'
@@ -1552,6 +1536,7 @@ class NewLispLexer(RegexLexer):
aliases = ['newlisp']
filenames = ['*.lsp', '*.nl', '*.kif']
mimetypes = ['text/x-newlisp', 'application/x-newlisp']
+ version_added = '1.5'
flags = re.IGNORECASE | re.MULTILINE
@@ -1676,13 +1661,13 @@ class EmacsLispLexer(RegexLexer):
"""
An ELisp lexer, parsing a stream and outputting the tokens
needed to highlight elisp code.
-
- .. versionadded:: 2.1
"""
name = 'EmacsLisp'
aliases = ['emacs-lisp', 'elisp', 'emacs']
filenames = ['*.el']
mimetypes = ['text/x-elisp', 'application/x-elisp']
+ url = 'https://www.gnu.org/software/emacs'
+ version_added = '2.1'
flags = re.MULTILINE
@@ -1695,7 +1680,7 @@ class EmacsLispLexer(RegexLexer):
# symbol token, reverse-engineered from hyperspec
# Take a deep breath...
- symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
+ symbol = rf'((?:{nonmacro})(?:{constituent})*)'
macros = {
'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
@@ -2308,7 +2293,7 @@ class EmacsLispLexer(RegexLexer):
],
'string': [
(r'[^"\\`]+', String),
- (r'`%s\'' % symbol, String.Symbol),
+ (rf'`{symbol}\'', String.Symbol),
(r'`', String),
(r'\\.', String),
(r'\\\n', String),
@@ -2320,14 +2305,13 @@ class EmacsLispLexer(RegexLexer):
class ShenLexer(RegexLexer):
"""
Lexer for Shen source code.
-
- .. versionadded:: 2.1
"""
name = 'Shen'
url = 'http://shenlanguage.org/'
aliases = ['shen']
filenames = ['*.shen']
mimetypes = ['text/x-shen', 'application/x-shen']
+ version_added = '2.1'
DECLARATIONS = (
'datatype', 'define', 'defmacro', 'defprolog', 'defcc',
@@ -2375,9 +2359,9 @@ class ShenLexer(RegexLexer):
MAPPINGS.update((s, Keyword) for s in SPECIAL_FORMS)
valid_symbol_chars = r'[\w!$%*+,<=>?/.\'@&#:-]'
- valid_name = '%s+' % valid_symbol_chars
- symbol_name = r'[a-z!$%%*+,<=>?/.\'@&#_-]%s*' % valid_symbol_chars
- variable = r'[A-Z]%s*' % valid_symbol_chars
+ valid_name = f'{valid_symbol_chars}+'
+ symbol_name = rf'[a-z!$%*+,<=>?/.\'@&#_-]{valid_symbol_chars}*'
+ variable = rf'[A-Z]{valid_symbol_chars}*'
tokens = {
'string': [
@@ -2485,13 +2469,13 @@ class ShenLexer(RegexLexer):
class CPSALexer(RegexLexer):
"""
A CPSA lexer based on the CPSA language as of version 2.2.12
-
- .. versionadded:: 2.1
"""
name = 'CPSA'
aliases = ['cpsa']
filenames = ['*.cpsa']
mimetypes = []
+ url = 'https://web.cs.wpi.edu/~guttman/cs564/cpsauser.html'
+ version_added = '2.1'
# list of known keywords and builtins taken form vim 6.4 scheme.vim
# syntax file.
@@ -2566,14 +2550,13 @@ class XtlangLexer(RegexLexer):
This is a mixture of Scheme and xtlang, really. Keyword lists are
taken from the Extempore Emacs mode
(https://github.com/extemporelang/extempore-emacs-mode)
-
- .. versionadded:: 2.2
"""
name = 'xtlang'
url = 'http://extempore.moso.com.au'
aliases = ['extempore']
filenames = ['*.xtm']
mimetypes = []
+ version_added = '2.2'
common_keywords = (
'lambda', 'define', 'if', 'else', 'cond', 'and',
@@ -2769,13 +2752,12 @@ class FennelLexer(RegexLexer):
Fennel compiles to Lua, so all the Lua builtins are recognized as well
as the special forms that are particular to the Fennel compiler.
-
- .. versionadded:: 2.3
"""
name = 'Fennel'
url = 'https://fennel-lang.org'
aliases = ['fennel', 'fnl']
filenames = ['*.fnl']
+ version_added = '2.3'
# this list is current as of Fennel version 0.10.0.
special_forms = (
@@ -2846,3 +2828,319 @@ class FennelLexer(RegexLexer):
(r'#', Punctuation),
]
}
+
+
+class JanetLexer(RegexLexer):
+ """A lexer for the Janet programming language.
+ """
+ name = 'Janet'
+ url = 'https://janet-lang.org/'
+ aliases = ['janet']
+ filenames = ['*.janet', '*.jdn']
+ mimetypes = ['text/x-janet', 'application/x-janet']
+ version_added = '2.18'
+
+ # XXX: gets too slow
+ #flags = re.MULTILINE | re.VERBOSE
+
+ special_forms = (
+ 'break', 'def', 'do', 'fn', 'if', 'quote', 'quasiquote', 'splice',
+ 'set', 'unquote', 'upscope', 'var', 'while'
+ )
+
+ builtin_macros = (
+ '%=', '*=', '++', '+=', '--', '-=', '->', '->>', '-?>',
+ '-?>>', '/=', 'and', 'as->', 'as-macro', 'as?->',
+ 'assert', 'case', 'catseq', 'chr', 'comment', 'compif',
+ 'comptime', 'compwhen', 'cond', 'coro', 'def-',
+ 'default', 'defdyn', 'defer', 'defmacro', 'defmacro-',
+ 'defn', 'defn-', 'delay', 'doc', 'each', 'eachk',
+ 'eachp', 'edefer', 'ev/do-thread', 'ev/gather',
+ 'ev/spawn', 'ev/spawn-thread', 'ev/with-deadline',
+ 'ffi/defbind', 'fiber-fn', 'for', 'forever', 'forv',
+ 'generate', 'if-let', 'if-not', 'if-with', 'import',
+ 'juxt', 'label', 'let', 'loop', 'match', 'or', 'prompt',
+ 'protect', 'repeat', 'seq', 'short-fn', 'tabseq',
+ 'toggle', 'tracev', 'try', 'unless', 'use', 'var-',
+ 'varfn', 'when', 'when-let', 'when-with', 'with',
+ 'with-dyns', 'with-syms', 'with-vars',
+ # obsolete builtin macros
+ 'eachy'
+ )
+
+ builtin_functions = (
+ '%', '*', '+', '-', '/', '<', '<=', '=', '>', '>=',
+ 'abstract?', 'accumulate', 'accumulate2', 'all',
+ 'all-bindings', 'all-dynamics', 'any?', 'apply',
+ 'array', 'array/clear', 'array/concat', 'array/ensure',
+ 'array/fill', 'array/insert', 'array/new',
+ 'array/new-filled', 'array/peek', 'array/pop',
+ 'array/push', 'array/remove', 'array/slice',
+ 'array/trim', 'array/weak', 'array?', 'asm',
+ 'bad-compile', 'bad-parse', 'band', 'blshift', 'bnot',
+ 'boolean?', 'bor', 'brshift', 'brushift', 'buffer',
+ 'buffer/bit', 'buffer/bit-clear', 'buffer/bit-set',
+ 'buffer/bit-toggle', 'buffer/blit', 'buffer/clear',
+ 'buffer/fill', 'buffer/format', 'buffer/from-bytes',
+ 'buffer/new', 'buffer/new-filled', 'buffer/popn',
+ 'buffer/push', 'buffer/push-at', 'buffer/push-byte',
+ 'buffer/push-string', 'buffer/push-word',
+ 'buffer/slice', 'buffer/trim', 'buffer?', 'bxor',
+ 'bytes?', 'cancel', 'cfunction?', 'cli-main', 'cmp',
+ 'comp', 'compare', 'compare<', 'compare<=', 'compare=',
+ 'compare>', 'compare>=', 'compile', 'complement',
+ 'count', 'curenv', 'debug', 'debug/arg-stack',
+ 'debug/break', 'debug/fbreak', 'debug/lineage',
+ 'debug/stack', 'debug/stacktrace', 'debug/step',
+ 'debug/unbreak', 'debug/unfbreak', 'debugger',
+ 'debugger-on-status', 'dec', 'deep-not=', 'deep=',
+ 'defglobal', 'describe', 'dictionary?', 'disasm',
+ 'distinct', 'div', 'doc*', 'doc-format', 'doc-of',
+ 'dofile', 'drop', 'drop-until', 'drop-while', 'dyn',
+ 'eflush', 'empty?', 'env-lookup', 'eprin', 'eprinf',
+ 'eprint', 'eprintf', 'error', 'errorf',
+ 'ev/acquire-lock', 'ev/acquire-rlock',
+ 'ev/acquire-wlock', 'ev/all-tasks', 'ev/call',
+ 'ev/cancel', 'ev/capacity', 'ev/chan', 'ev/chan-close',
+ 'ev/chunk', 'ev/close', 'ev/count', 'ev/deadline',
+ 'ev/full', 'ev/give', 'ev/give-supervisor', 'ev/go',
+ 'ev/lock', 'ev/read', 'ev/release-lock',
+ 'ev/release-rlock', 'ev/release-wlock', 'ev/rselect',
+ 'ev/rwlock', 'ev/select', 'ev/sleep', 'ev/take',
+ 'ev/thread', 'ev/thread-chan', 'ev/write', 'eval',
+ 'eval-string', 'even?', 'every?', 'extreme', 'false?',
+ 'ffi/align', 'ffi/call', 'ffi/calling-conventions',
+ 'ffi/close', 'ffi/context', 'ffi/free', 'ffi/jitfn',
+ 'ffi/lookup', 'ffi/malloc', 'ffi/native',
+ 'ffi/pointer-buffer', 'ffi/pointer-cfunction',
+ 'ffi/read', 'ffi/signature', 'ffi/size', 'ffi/struct',
+ 'ffi/trampoline', 'ffi/write', 'fiber/can-resume?',
+ 'fiber/current', 'fiber/getenv', 'fiber/last-value',
+ 'fiber/maxstack', 'fiber/new', 'fiber/root',
+ 'fiber/setenv', 'fiber/setmaxstack', 'fiber/status',
+ 'fiber?', 'file/close', 'file/flush', 'file/lines',
+ 'file/open', 'file/read', 'file/seek', 'file/tell',
+ 'file/temp', 'file/write', 'filter', 'find',
+ 'find-index', 'first', 'flatten', 'flatten-into',
+ 'flush', 'flycheck', 'freeze', 'frequencies',
+ 'from-pairs', 'function?', 'gccollect', 'gcinterval',
+ 'gcsetinterval', 'gensym', 'get', 'get-in', 'getline',
+ 'getproto', 'group-by', 'has-key?', 'has-value?',
+ 'hash', 'idempotent?', 'identity', 'import*', 'in',
+ 'inc', 'index-of', 'indexed?', 'int/s64',
+ 'int/to-bytes', 'int/to-number', 'int/u64', 'int?',
+ 'interleave', 'interpose', 'invert', 'juxt*', 'keep',
+ 'keep-syntax', 'keep-syntax!', 'keys', 'keyword',
+ 'keyword/slice', 'keyword?', 'kvs', 'last', 'length',
+ 'lengthable?', 'load-image', 'macex', 'macex1',
+ 'maclintf', 'make-env', 'make-image', 'map', 'mapcat',
+ 'marshal', 'math/abs', 'math/acos', 'math/acosh',
+ 'math/asin', 'math/asinh', 'math/atan', 'math/atan2',
+ 'math/atanh', 'math/cbrt', 'math/ceil', 'math/cos',
+ 'math/cosh', 'math/erf', 'math/erfc', 'math/exp',
+ 'math/exp2', 'math/expm1', 'math/floor', 'math/gamma',
+ 'math/gcd', 'math/hypot', 'math/lcm', 'math/log',
+ 'math/log-gamma', 'math/log10', 'math/log1p',
+ 'math/log2', 'math/next', 'math/pow', 'math/random',
+ 'math/rng', 'math/rng-buffer', 'math/rng-int',
+ 'math/rng-uniform', 'math/round', 'math/seedrandom',
+ 'math/sin', 'math/sinh', 'math/sqrt', 'math/tan',
+ 'math/tanh', 'math/trunc', 'max', 'max-of', 'mean',
+ 'memcmp', 'merge', 'merge-into', 'merge-module', 'min',
+ 'min-of', 'mod', 'module/add-paths',
+ 'module/expand-path', 'module/find', 'module/value',
+ 'nan?', 'nat?', 'native', 'neg?', 'net/accept',
+ 'net/accept-loop', 'net/address', 'net/address-unpack',
+ 'net/chunk', 'net/close', 'net/connect', 'net/flush',
+ 'net/listen', 'net/localname', 'net/peername',
+ 'net/read', 'net/recv-from', 'net/send-to',
+ 'net/server', 'net/setsockopt', 'net/shutdown',
+ 'net/write', 'next', 'nil?', 'not', 'not=', 'number?',
+ 'odd?', 'one?', 'os/arch', 'os/cd', 'os/chmod',
+ 'os/clock', 'os/compiler', 'os/cpu-count',
+ 'os/cryptorand', 'os/cwd', 'os/date', 'os/dir',
+ 'os/environ', 'os/execute', 'os/exit', 'os/getenv',
+ 'os/isatty', 'os/link', 'os/lstat', 'os/mkdir',
+ 'os/mktime', 'os/open', 'os/perm-int', 'os/perm-string',
+ 'os/pipe', 'os/posix-exec', 'os/posix-fork',
+ 'os/proc-close', 'os/proc-kill', 'os/proc-wait',
+ 'os/readlink', 'os/realpath', 'os/rename', 'os/rm',
+ 'os/rmdir', 'os/setenv', 'os/shell', 'os/sigaction',
+ 'os/sleep', 'os/spawn', 'os/stat', 'os/strftime',
+ 'os/symlink', 'os/time', 'os/touch', 'os/umask',
+ 'os/which', 'pairs', 'parse', 'parse-all',
+ 'parser/byte', 'parser/clone', 'parser/consume',
+ 'parser/eof', 'parser/error', 'parser/flush',
+ 'parser/has-more', 'parser/insert', 'parser/new',
+ 'parser/produce', 'parser/state', 'parser/status',
+ 'parser/where', 'partial', 'partition', 'partition-by',
+ 'peg/compile', 'peg/find', 'peg/find-all', 'peg/match',
+ 'peg/replace', 'peg/replace-all', 'pos?', 'postwalk',
+ 'pp', 'prewalk', 'prin', 'prinf', 'print', 'printf',
+ 'product', 'propagate', 'put', 'put-in', 'quit',
+ 'range', 'reduce', 'reduce2', 'repl', 'require',
+ 'resume', 'return', 'reverse', 'reverse!',
+ 'run-context', 'sandbox', 'scan-number', 'setdyn',
+ 'signal', 'slice', 'slurp', 'some', 'sort', 'sort-by',
+ 'sorted', 'sorted-by', 'spit', 'string',
+ 'string/ascii-lower', 'string/ascii-upper',
+ 'string/bytes', 'string/check-set', 'string/find',
+ 'string/find-all', 'string/format', 'string/from-bytes',
+ 'string/has-prefix?', 'string/has-suffix?',
+ 'string/join', 'string/repeat', 'string/replace',
+ 'string/replace-all', 'string/reverse', 'string/slice',
+ 'string/split', 'string/trim', 'string/triml',
+ 'string/trimr', 'string?', 'struct', 'struct/getproto',
+ 'struct/proto-flatten', 'struct/to-table',
+ 'struct/with-proto', 'struct?', 'sum', 'symbol',
+ 'symbol/slice', 'symbol?', 'table', 'table/clear',
+ 'table/clone', 'table/getproto', 'table/new',
+ 'table/proto-flatten', 'table/rawget', 'table/setproto',
+ 'table/to-struct', 'table/weak', 'table/weak-keys',
+ 'table/weak-values', 'table?', 'take', 'take-until',
+ 'take-while', 'thaw', 'trace', 'true?', 'truthy?',
+ 'tuple', 'tuple/brackets', 'tuple/setmap',
+ 'tuple/slice', 'tuple/sourcemap', 'tuple/type',
+ 'tuple?', 'type', 'unmarshal', 'untrace', 'update',
+ 'update-in', 'values', 'varglobal', 'walk',
+ 'warn-compile', 'xprin', 'xprinf', 'xprint', 'xprintf',
+ 'yield', 'zero?', 'zipcoll',
+ # obsolete builtin functions
+ 'tarray/buffer', 'tarray/copy-bytes', 'tarray/length',
+ 'tarray/new', 'tarray/properties', 'tarray/slice',
+ 'tarray/swap-bytes', 'thread/close', 'thread/current',
+ 'thread/exit', 'thread/new', 'thread/receive',
+ 'thread/send'
+ )
+
+ builtin_variables = (
+ 'debugger-env', 'default-peg-grammar', 'janet/build',
+ 'janet/config-bits', 'janet/version', 'load-image-dict',
+ 'make-image-dict', 'math/-inf', 'math/e', 'math/inf',
+ 'math/int-max', 'math/int-min', 'math/int32-max',
+ 'math/int32-min', 'math/nan', 'math/pi', 'module/cache',
+ 'module/loaders', 'module/loading', 'module/paths',
+ 'root-env', 'stderr', 'stdin', 'stdout'
+ )
+
+ constants = (
+ 'false', 'nil', 'true'
+ )
+
+ # XXX: this form not usable to pass to `suffix=`
+ #_token_end = r'''
+ # (?= # followed by one of:
+ # \s # whitespace
+ # | \# # comment
+ # | [)\]] # end delimiters
+ # | $ # end of file
+ # )
+ #'''
+
+ # ...so, express it like this
+ _token_end = r'(?=\s|#|[)\]]|$)'
+
+ _first_char = r'[a-zA-Z!$%&*+\-./<=>?@^_]'
+ _rest_char = rf'([0-9:]|{_first_char})'
+
+ valid_name = rf'{_first_char}({_rest_char})*'
+
+ _radix_unit = r'[0-9a-zA-Z][0-9a-zA-Z_]*'
+
+ # exponent marker, optional sign, one or more alphanumeric
+ _radix_exp = r'&[+-]?[0-9a-zA-Z]+'
+
+ # 2af3__bee_
+ _hex_unit = r'[0-9a-fA-F][0-9a-fA-F_]*'
+
+ # 12_000__
+ _dec_unit = r'[0-9][0-9_]*'
+
+ # E-23
+ # lower or uppercase e, optional sign, one or more digits
+ _dec_exp = r'[eE][+-]?[0-9]+'
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+
+ (r'\s+', Whitespace),
+
+ # radix number
+ (rf'''(?x)
+ [+-]? [0-9]{{1,2}} r {_radix_unit} \. ({_radix_unit})?
+ ({_radix_exp})?
+ ''',
+ Number),
+
+ (rf'''(?x)
+ [+-]? [0-9]{{1,2}} r (\.)? {_radix_unit}
+ ({_radix_exp})?
+ ''',
+ Number),
+
+ # hex number
+ (rf'(?x) [+-]? 0x {_hex_unit} \. ({_hex_unit})?',
+ Number.Hex),
+
+ (rf'(?x) [+-]? 0x (\.)? {_hex_unit}',
+ Number.Hex),
+
+ # decimal number
+ (rf'(?x) [+-]? {_dec_unit} \. ({_dec_unit})? ({_dec_exp})?',
+ Number.Float),
+
+ (rf'(?x) [+-]? (\.)? {_dec_unit} ({_dec_exp})?',
+ Number.Float),
+
+ # strings and buffers
+ (r'@?"', String, 'string'),
+
+ # long-strings and long-buffers
+ #
+ # non-empty content enclosed by a pair of n-backticks
+ # with optional leading @
+ (r'@?(`+)(.|\n)+?\1', String),
+
+ # things that hang out on front
+ #
+ # ' ~ , ; |
+ (r"['~,;|]", Operator),
+
+ # collection delimiters
+ #
+ # @( ( )
+ # @[ [ ]
+ # @{ { }
+ (r'@?[(\[{]|[)\]}]', Punctuation),
+
+ # constants
+ (words(constants, suffix=_token_end), Keyword.Constants),
+
+ # keywords
+ (rf'(:({_rest_char})+|:)', Name.Constant),
+
+ # symbols
+ (words(builtin_variables, suffix=_token_end),
+ Name.Variable.Global),
+
+ (words(special_forms, prefix=r'(?<=\()', suffix=_token_end),
+ Keyword.Reserved),
+
+ (words(builtin_macros, prefix=r'(?<=\()', suffix=_token_end),
+ Name.Builtin),
+
+ (words(builtin_functions, prefix=r'(?<=\()', suffix=_token_end),
+ Name.Function),
+
+ # other symbols
+ (valid_name, Name.Variable),
+ ],
+ 'string': [
+ (r'\\(u[0-9a-fA-F]{4}|U[0-9a-fA-F]{6})', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/macaulay2.py b/contrib/python/Pygments/py3/pygments/lexers/macaulay2.py
index a624890d85..6ea03ae23d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/macaulay2.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/macaulay2.py
@@ -4,7 +4,7 @@
Lexer for Macaulay2.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -13,7 +13,7 @@ from pygments.token import Comment, Keyword, Name, String, Text
__all__ = ['Macaulay2Lexer']
-# Auto-generated for Macaulay2-1.22. Do not modify this file manually.
+# Auto-generated for Macaulay2-1.23. Do not modify this file manually.
M2KEYWORDS = (
"and",
@@ -60,6 +60,7 @@ M2DATATYPES = (
"AngleBarList",
"Array",
"AssociativeExpression",
+ "AtomicInt",
"Bag",
"BasicList",
"BettiTally",
@@ -222,6 +223,7 @@ M2DATATYPES = (
"Sequence",
"Set",
"SheafExpression",
+ "SheafMap",
"SheafOfRings",
"SMALL",
"SPAN",
@@ -334,6 +336,7 @@ M2FUNCTIONS = (
"borel",
"cacheValue",
"cancelTask",
+ "canonicalBundle",
"capture",
"ceiling",
"centerString",
@@ -369,6 +372,7 @@ M2FUNCTIONS = (
"commonest",
"commonRing",
"comodule",
+ "compareExchange",
"complement",
"complete",
"components",
@@ -459,6 +463,7 @@ M2FUNCTIONS = (
"even",
"EXAMPLE",
"examples",
+ "exchange",
"exec",
"exp",
"expectedReesIdeal",
@@ -624,6 +629,7 @@ M2FUNCTIONS = (
"isInputFile",
"isIsomorphic",
"isIsomorphism",
+ "isLiftable",
"isLinearType",
"isListener",
"isLLL",
@@ -638,6 +644,7 @@ M2FUNCTIONS = (
"isPrimary",
"isPrime",
"isPrimitive",
+ "isProjective",
"isPseudoprime",
"isQuotientModule",
"isQuotientOf",
@@ -846,7 +853,9 @@ M2FUNCTIONS = (
"prune",
"pseudocode",
"pseudoRemainder",
+ "pullback",
"pushForward",
+ "pushout",
"QQParser",
"QRDecomposition",
"quotient",
@@ -857,6 +866,7 @@ M2FUNCTIONS = (
"randomKRationalPoint",
"randomMutableMatrix",
"rank",
+ "rays",
"read",
"readDirectory",
"readlink",
@@ -939,6 +949,7 @@ M2FUNCTIONS = (
"setupEmacs",
"sheaf",
"sheafHom",
+ "sheafMap",
"show",
"showHtml",
"showTex",
@@ -968,6 +979,7 @@ M2FUNCTIONS = (
"standardPairs",
"stashValue",
"status",
+ "store",
"style",
"sub",
"sublists",
@@ -1075,10 +1087,12 @@ M2FUNCTIONS = (
)
M2CONSTANTS = (
+ "A1BrouwerDegrees",
"AbstractToricVarieties",
"Acknowledgement",
"AdditionalPaths",
"AdjointIdeal",
+ "AdjunctionForSurfaces",
"AfterEval",
"AfterNoPrint",
"AfterPrint",
@@ -1097,6 +1111,7 @@ M2CONSTANTS = (
"AuxiliaryFiles",
"backtrace",
"Bareiss",
+ "Base",
"BaseFunction",
"baseRings",
"BaseRow",
@@ -1279,6 +1294,7 @@ M2CONSTANTS = (
"globalAssignmentHooks",
"GlobalHookStore",
"GlobalReleaseHook",
+ "GlobalSectionLimit",
"Gorenstein",
"GradedLieAlgebras",
"GraphicalModels",
@@ -1389,6 +1405,7 @@ M2CONSTANTS = (
"MapleInterface",
"Markov",
"MatchingFields",
+ "MatrixSchubert",
"Matroids",
"maxAllowableThreads",
"maxExponent",
@@ -1449,6 +1466,8 @@ M2CONSTANTS = (
"NumericalLinearAlgebra",
"NumericalSchubertCalculus",
"NumericSolutions",
+ "numTBBThreads",
+ "OIGroebnerBases",
"OldPolyhedra",
"OldToricVectorBundles",
"OnlineLookup",
@@ -1471,6 +1490,8 @@ M2CONSTANTS = (
"PackageTemplate",
"PairLimit",
"PairsRemaining",
+ "ParallelF4",
+ "ParallelizeByDegree",
"Parametrization",
"Parsing",
"path",
@@ -1480,6 +1501,7 @@ M2CONSTANTS = (
"PhylogeneticTrees",
"pi",
"PieriMaps",
+ "PlaneCurveLinearSeries",
"PlaneCurveSingularities",
"Points",
"Polyhedra",
@@ -1515,9 +1537,12 @@ M2CONSTANTS = (
"pruningMap",
"PseudomonomialPrimaryDecomposition",
"Pullback",
+ "pullbackMaps",
"PushForward",
+ "pushoutMaps",
"Python",
"QthPower",
+ "QuadraticIdealExamplesByRoos",
"Quasidegrees",
"QuaternaryQuartics",
"QuillenSuslin",
@@ -1568,6 +1593,7 @@ M2CONSTANTS = (
"Reverse",
"RevLex",
"Right",
+ "RInterface",
"rootPath",
"rootURI",
"RunDirectory",
@@ -1575,6 +1601,7 @@ M2CONSTANTS = (
"RunExternalM2",
"SagbiGbDetection",
"Saturation",
+ "SaturationMap",
"Schubert2",
"SchurComplexes",
"SchurFunctors",
@@ -1647,6 +1674,7 @@ M2CONSTANTS = (
"TangentCone",
"TateOnProducts",
"TensorComplexes",
+ "TerraciniLoci",
"Test",
"testExample",
"TestIdeals",
@@ -1664,6 +1692,7 @@ M2CONSTANTS = (
"ToricTopology",
"ToricVectorBundles",
"Torsion",
+ "TorsionFree",
"TotalPairs",
"Tree",
"TriangularSets",
@@ -1690,9 +1719,11 @@ M2CONSTANTS = (
"UseHilbertFunction",
"UserMode",
"UseSyzygies",
+ "Valuations",
"Variable",
"VariableBaseName",
"Variables",
+ "Varieties",
"Vasconcelos",
"VectorFields",
"VectorGraphics",
@@ -1709,6 +1740,7 @@ M2CONSTANTS = (
"WebApp",
"Weights",
"WeylAlgebra",
+ "WeylAlgebras",
"WeylGroups",
"WhitneyStratifications",
"Wrap",
@@ -1722,6 +1754,7 @@ class Macaulay2Lexer(RegexLexer):
url = 'https://macaulay2.com/'
aliases = ['macaulay2']
filenames = ['*.m2']
+ version_added = '2.12'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/make.py b/contrib/python/Pygments/py3/pygments/lexers/make.py
index 0f54ab6937..463e46e75b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/make.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/make.py
@@ -4,7 +4,7 @@
Lexers for Makefiles and similar.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -31,6 +31,8 @@ class MakefileLexer(Lexer):
aliases = ['make', 'makefile', 'mf', 'bsdmake']
filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
mimetypes = ['text/x-makefile']
+ url = 'https://en.wikipedia.org/wiki/Make_(software)'
+ version_added = ''
r_special = re.compile(
r'^(?:'
@@ -67,14 +69,14 @@ class MakefileLexer(Lexer):
class BaseMakefileLexer(RegexLexer):
"""
Lexer for simple Makefiles (no preprocessing).
-
- .. versionadded:: 0.10
"""
name = 'Base Makefile'
aliases = ['basemake']
filenames = []
mimetypes = []
+ url = 'https://en.wikipedia.org/wiki/Make_(software)'
+ version_added = '0.10'
tokens = {
'root': [
@@ -129,14 +131,13 @@ class BaseMakefileLexer(RegexLexer):
class CMakeLexer(RegexLexer):
"""
Lexer for CMake files.
-
- .. versionadded:: 1.2
"""
name = 'CMake'
url = 'https://cmake.org/documentation/'
aliases = ['cmake']
filenames = ['*.cmake', 'CMakeLists.txt']
mimetypes = ['text/x-cmake']
+ version_added = '1.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/markup.py b/contrib/python/Pygments/py3/pygments/lexers/markup.py
index bb4c7cecfd..5d5ff2c9fd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/markup.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/markup.py
@@ -4,7 +4,7 @@
Lexers for non-HTML markup languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,25 +19,26 @@ from pygments.lexers.data import JsonLexer
from pygments.lexer import RegexLexer, DelegatingLexer, include, bygroups, \
using, this, do_insertions, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Generic, Other, Whitespace
+ Number, Punctuation, Generic, Other, Whitespace, Literal
from pygments.util import get_bool_opt, ClassNotFound
__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer',
'MozPreprocHashLexer', 'MozPreprocPercentLexer',
'MozPreprocXulLexer', 'MozPreprocJavascriptLexer',
- 'MozPreprocCssLexer', 'MarkdownLexer', 'TiddlyWiki5Lexer', 'WikitextLexer']
+ 'MozPreprocCssLexer', 'MarkdownLexer', 'OrgLexer', 'TiddlyWiki5Lexer',
+ 'WikitextLexer']
class BBCodeLexer(RegexLexer):
"""
A lexer that highlights BBCode(-like) syntax.
-
- .. versionadded:: 0.6
"""
name = 'BBCode'
aliases = ['bbcode']
mimetypes = ['text/x-bbcode']
+ url = 'https://www.bbcode.org/'
+ version_added = '0.6'
tokens = {
'root': [
@@ -64,14 +65,15 @@ class BBCodeLexer(RegexLexer):
class MoinWikiLexer(RegexLexer):
"""
For MoinMoin (and Trac) Wiki markup.
-
- .. versionadded:: 0.7
"""
name = 'MoinMoin/Trac Wiki markup'
aliases = ['trac-wiki', 'moin']
filenames = []
mimetypes = ['text/x-trac-wiki']
+ url = 'https://moinmo.in'
+ version_added = '0.7'
+
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -110,8 +112,6 @@ class RstLexer(RegexLexer):
"""
For reStructuredText markup.
- .. versionadded:: 0.7
-
Additional options accepted:
`handlecodeblocks`
@@ -127,6 +127,7 @@ class RstLexer(RegexLexer):
aliases = ['restructuredtext', 'rst', 'rest']
filenames = ['*.rst', '*.rest']
mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
+ version_added = '0.7'
flags = re.MULTILINE
def _handle_sourcecode(self, match):
@@ -172,9 +173,7 @@ class RstLexer(RegexLexer):
# from docutils.parsers.rst.states
closers = '\'")]}>\u2019\u201d\xbb!?'
unicode_delimiters = '\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
+ end_string_suffix = (rf'((?=$)|(?=[-/:.,; \n\x00{re.escape(unicode_delimiters)}{re.escape(closers)}]))')
tokens = {
'root': [
@@ -222,7 +221,7 @@ class RstLexer(RegexLexer):
bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
Punctuation, Text, using(this, state='inline'))),
# Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
+ (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment),
# Field list marker
(r'^( *)(:(?:\\\\|\\:|[^:\n])+:(?=\s))([ \t]*)',
bygroups(Text, Name.Class, Text)),
@@ -283,6 +282,8 @@ class TexLexer(RegexLexer):
aliases = ['tex', 'latex']
filenames = ['*.tex', '*.aux', '*.toc']
mimetypes = ['text/x-tex', 'text/x-latex']
+ url = 'https://tug.org'
+ version_added = ''
tokens = {
'general': [
@@ -295,13 +296,13 @@ class TexLexer(RegexLexer):
(r'\\\(', String, 'inlinemath'),
(r'\$\$', String.Backtick, 'displaymath'),
(r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ (r'\\([a-zA-Z@_:]+|\S?)', Keyword, 'command'),
(r'\\$', Keyword),
include('general'),
(r'[^\\$%&_^{}]+', Text),
],
'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
+ (r'\\([a-zA-Z]+|\S?)', Name.Variable),
include('general'),
(r'[0-9]+', Number),
(r'[-=!+*/()\[\]]', Operator),
@@ -336,14 +337,14 @@ class GroffLexer(RegexLexer):
"""
Lexer for the (g)roff typesetting language, supporting groff
extensions. Mainly useful for highlighting manpage sources.
-
- .. versionadded:: 0.6
"""
name = 'Groff'
aliases = ['groff', 'nroff', 'man']
filenames = ['*.[1-9]', '*.man', '*.1p', '*.3pm']
mimetypes = ['application/x-troff', 'text/troff']
+ url = 'https://www.gnu.org/software/groff'
+ version_added = '0.6'
tokens = {
'root': [
@@ -393,13 +394,13 @@ class MozPreprocHashLexer(RegexLexer):
Lexer for Mozilla Preprocessor files (with '#' as the marker).
Other data is left untouched.
-
- .. versionadded:: 2.0
"""
name = 'mozhashpreproc'
aliases = [name]
filenames = []
mimetypes = []
+ url = 'https://firefox-source-docs.mozilla.org/build/buildsystem/preprocessor.html'
+ version_added = '2.0'
tokens = {
'root': [
@@ -434,13 +435,13 @@ class MozPreprocPercentLexer(MozPreprocHashLexer):
Lexer for Mozilla Preprocessor files (with '%' as the marker).
Other data is left untouched.
-
- .. versionadded:: 2.0
"""
name = 'mozpercentpreproc'
aliases = [name]
filenames = []
mimetypes = []
+ url = 'https://firefox-source-docs.mozilla.org/build/buildsystem/preprocessor.html'
+ version_added = '2.0'
tokens = {
'root': [
@@ -454,13 +455,13 @@ class MozPreprocXulLexer(DelegatingLexer):
"""
Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
`XmlLexer`.
-
- .. versionadded:: 2.0
"""
name = "XUL+mozpreproc"
aliases = ['xul+mozpreproc']
filenames = ['*.xul.in']
mimetypes = []
+ url = 'https://firefox-source-docs.mozilla.org/build/buildsystem/preprocessor.html'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(XmlLexer, MozPreprocHashLexer, **options)
@@ -470,13 +471,13 @@ class MozPreprocJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
`JavascriptLexer`.
-
- .. versionadded:: 2.0
"""
name = "Javascript+mozpreproc"
aliases = ['javascript+mozpreproc']
filenames = ['*.js.in']
mimetypes = []
+ url = 'https://firefox-source-docs.mozilla.org/build/buildsystem/preprocessor.html'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(JavascriptLexer, MozPreprocHashLexer, **options)
@@ -486,13 +487,13 @@ class MozPreprocCssLexer(DelegatingLexer):
"""
Subclass of the `MozPreprocHashLexer` that highlights unlexed data with the
`CssLexer`.
-
- .. versionadded:: 2.0
"""
name = "CSS+mozpreproc"
aliases = ['css+mozpreproc']
filenames = ['*.css.in']
mimetypes = []
+ url = 'https://firefox-source-docs.mozilla.org/build/buildsystem/preprocessor.html'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(CssLexer, MozPreprocPercentLexer, **options)
@@ -501,14 +502,13 @@ class MozPreprocCssLexer(DelegatingLexer):
class MarkdownLexer(RegexLexer):
"""
For Markdown markup.
-
- .. versionadded:: 2.2
"""
name = 'Markdown'
url = 'https://daringfireball.net/projects/markdown/'
aliases = ['markdown', 'md']
filenames = ['*.md', '*.markdown']
mimetypes = ["text/x-markdown"]
+ version_added = '2.2'
flags = re.MULTILINE
def _handle_codeblock(self, match):
@@ -618,18 +618,125 @@ class MarkdownLexer(RegexLexer):
self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
RegexLexer.__init__(self, **options)
+class OrgLexer(RegexLexer):
+ """
+ For Org Mode markup.
+ """
+ name = 'Org Mode'
+ url = 'https://orgmode.org'
+ aliases = ['org', 'orgmode', 'org-mode']
+ filenames = ['*.org']
+ mimetypes = ["text/org"]
+ version_added = '2.18'
+
+ def _inline(start, end):
+ return rf'(?<!\w){start}(.|\n(?!\n))+?{end}(?!\w)'
+
+ tokens = {
+ 'root': [
+ (r'^# .*', Comment.Single),
+
+ # Headings
+ (r'^(\* )(COMMENT)( .*)',
+ bygroups(Generic.Heading, Comment.Preproc, Generic.Heading)),
+ (r'^(\*\*+ )(COMMENT)( .*)',
+ bygroups(Generic.Subheading, Comment.Preproc, Generic.Subheading)),
+ (r'^(\* )(DONE)( .*)',
+ bygroups(Generic.Heading, Generic.Deleted, Generic.Heading)),
+ (r'^(\*\*+ )(DONE)( .*)',
+ bygroups(Generic.Subheading, Generic.Deleted, Generic.Subheading)),
+ (r'^(\* )(TODO)( .*)',
+ bygroups(Generic.Heading, Generic.Error, Generic.Heading)),
+ (r'^(\*\*+ )(TODO)( .*)',
+ bygroups(Generic.Subheading, Generic.Error, Generic.Subheading)),
+
+ (r'^(\* .+?)( :[a-zA-Z0-9_@:]+:)?$', bygroups(Generic.Heading, Generic.Emph)),
+ (r'^(\*\*+ .+?)( :[a-zA-Z0-9_@:]+:)?$', bygroups(Generic.Subheading, Generic.Emph)),
+
+ # Unordered lists items, including TODO items and description items
+ (r'^(?:( *)([+-] )|( +)(\* ))(\[[ X-]\])?(.+ ::)?',
+ bygroups(Whitespace, Keyword, Whitespace, Keyword, Generic.Prompt, Name.Label)),
+
+ # Ordered list items
+ (r'^( *)([0-9]+[.)])( \[@[0-9]+\])?', bygroups(Whitespace, Keyword, Generic.Emph)),
+
+ # Dynamic blocks
+ (r'(?i)^( *#\+begin: *)((?:.|\n)*?)(^ *#\+end: *$)',
+ bygroups(Operator.Word, using(this), Operator.Word)),
+
+ # Comment blocks
+ (r'(?i)^( *#\+begin_comment *\n)((?:.|\n)*?)(^ *#\+end_comment *$)',
+ bygroups(Operator.Word, Comment.Multiline, Operator.Word)),
+
+ # Source code blocks
+ # TODO: language-dependent syntax highlighting (see Markdown lexer)
+ (r'(?i)^( *#\+begin_src .*)((?:.|\n)*?)(^ *#\+end_src *$)',
+ bygroups(Operator.Word, Text, Operator.Word)),
+
+ # Other blocks
+ (r'(?i)^( *#\+begin_\w+)( *\n)((?:.|\n)*?)(^ *#\+end_\w+)( *$)',
+ bygroups(Operator.Word, Whitespace, Text, Operator.Word, Whitespace)),
+
+ # Keywords
+ (r'^(#\+\w+:)(.*)$', bygroups(Name.Namespace, Text)),
+
+ # Properties and drawers
+ (r'(?i)^( *:\w+: *\n)((?:.|\n)*?)(^ *:end: *$)',
+ bygroups(Name.Decorator, Comment.Special, Name.Decorator)),
+
+ # Line break operator
+ (r'\\\\$', Operator),
+
+ # Deadline, Scheduled, CLOSED
+ (r'(?i)^( *(?:DEADLINE|SCHEDULED): )(<.+?> *)$',
+ bygroups(Generic.Error, Literal.Date)),
+ (r'(?i)^( *CLOSED: )(\[.+?\] *)$',
+ bygroups(Generic.Deleted, Literal.Date)),
+
+ # Bold
+ (_inline(r'\*', r'\*+'), Generic.Strong),
+ # Italic
+ (_inline(r'/', r'/'), Generic.Emph),
+ # Verbatim
+ (_inline(r'=', r'='), String), # TODO token
+ # Code
+ (_inline(r'~', r'~'), String),
+ # Strikethrough
+ (_inline(r'\+', r'\+'), Generic.Deleted),
+ # Underline
+ (_inline(r'_', r'_+'), Generic.EmphStrong),
+
+ # Dates
+ (r'<.+?>', Literal.Date),
+ # Macros
+ (r'\{\{\{.+?\}\}\}', Comment.Preproc),
+ # Footnotes
+ (r'(?<!\[)\[fn:.+?\]', Name.Tag),
+ # Links
+ (r'(?s)(\[\[)(.*?)(\]\[)(.*?)(\]\])',
+ bygroups(Punctuation, Name.Attribute, Punctuation, Name.Tag, Punctuation)),
+ (r'(?s)(\[\[)(.+?)(\]\])', bygroups(Punctuation, Name.Attribute, Punctuation)),
+ (r'(<<)(.+?)(>>)', bygroups(Punctuation, Name.Attribute, Punctuation)),
+
+ # Tables
+ (r'^( *)(\|[ -].*?[ -]\|)$', bygroups(Whitespace, String)),
+
+ # Any other text
+ (r'[^#*+\-0-9:\\/=~_<{\[|\n]+', Text),
+ (r'[#*+\-0-9:\\/=~_<{\[|\n]', Text),
+ ],
+ }
class TiddlyWiki5Lexer(RegexLexer):
"""
For TiddlyWiki5 markup.
-
- .. versionadded:: 2.7
"""
name = 'tiddler'
url = 'https://tiddlywiki.com/#TiddlerFiles'
aliases = ['tid']
filenames = ['*.tid']
mimetypes = ["text/vnd.tiddlywiki"]
+ version_added = '2.7'
flags = re.MULTILINE
def _handle_codeblock(self, match):
@@ -786,19 +893,18 @@ class WikitextLexer(RegexLexer):
installations, so we only highlight common syntaxes (built-in or from
popular extensions), and also assume templates produce no unbalanced
syntaxes.
-
- .. versionadded:: 2.15
"""
name = 'Wikitext'
url = 'https://www.mediawiki.org/wiki/Wikitext'
aliases = ['wikitext', 'mediawiki']
filenames = []
mimetypes = ['text/x-wiki']
+ version_added = '2.15'
flags = re.MULTILINE
def nowiki_tag_rules(tag_name):
return [
- (r'(?i)(</)({})(\s*)(>)'.format(tag_name), bygroups(Punctuation,
+ (rf'(?i)(</)({tag_name})(\s*)(>)', bygroups(Punctuation,
Name.Tag, Whitespace, Punctuation), '#pop'),
include('entity'),
include('text'),
@@ -806,15 +912,15 @@ class WikitextLexer(RegexLexer):
def plaintext_tag_rules(tag_name):
return [
- (r'(?si)(.*?)(</)({})(\s*)(>)'.format(tag_name), bygroups(Text,
+ (rf'(?si)(.*?)(</)({tag_name})(\s*)(>)', bygroups(Text,
Punctuation, Name.Tag, Whitespace, Punctuation), '#pop'),
]
- def delegate_tag_rules(tag_name, lexer):
+ def delegate_tag_rules(tag_name, lexer, **lexer_kwargs):
return [
- (r'(?i)(</)({})(\s*)(>)'.format(tag_name), bygroups(Punctuation,
+ (rf'(?i)(</)({tag_name})(\s*)(>)', bygroups(Punctuation,
Name.Tag, Whitespace, Punctuation), '#pop'),
- (r'(?si).+?(?=</{}\s*>)'.format(tag_name), using(lexer)),
+ (rf'(?si).+?(?=</{tag_name}\s*>)', using(lexer, **lexer_kwargs)),
]
def text_rules(token):
@@ -946,8 +1052,6 @@ class WikitextLexer(RegexLexer):
'sh-latn', 'sh-cyrl',
# KuConverter.php
'ku', 'ku-arab', 'ku-latn',
- # KkConverter.php
- 'kk', 'kk-cyrl', 'kk-latn', 'kk-arab', 'kk-kz', 'kk-tr', 'kk-cn',
# IuConverter.php
'iu', 'ike-cans', 'ike-latn',
# GanConverter.php
@@ -1020,7 +1124,7 @@ class WikitextLexer(RegexLexer):
(r'(?i)\b(?:{}){}{}*'.format('|'.join(protocols),
link_address, link_char_class), Name.Label),
# Magic links
- (r'\b(?:RFC|PMID){}+[0-9]+\b'.format(nbsp_char),
+ (rf'\b(?:RFC|PMID){nbsp_char}+[0-9]+\b',
Name.Function.Magic),
(r"""(?x)
\bISBN {nbsp_char}
@@ -1035,7 +1139,7 @@ class WikitextLexer(RegexLexer):
'redirect-inner': [
(r'(\]\])(\s*?\n)', bygroups(Punctuation, Whitespace), '#pop'),
(r'(\#)([^#]*?)', bygroups(Punctuation, Name.Label)),
- (r'(?i)[{}]+'.format(title_char), Name.Tag),
+ (rf'(?i)[{title_char}]+', Name.Tag),
],
'list': [
# Description lists
@@ -1062,9 +1166,9 @@ class WikitextLexer(RegexLexer):
r"""(?xi)
(\[\[)
(File|Image) (:)
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*)
- (?: (\#) ([%s]*?) )?
- """ % (title_char, f'{title_char}#'),
+ ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | <!--[\s\S]*?--> )*)
+ (?: (\#) ([{}]*?) )?
+ """.format(title_char, f'{title_char}#'),
bygroups(Punctuation, Name.Namespace, Punctuation,
using(this, state=['wikilink-name']), Punctuation, Name.Label),
'medialink-inner'
@@ -1072,24 +1176,24 @@ class WikitextLexer(RegexLexer):
# Wikilinks
(
r"""(?xi)
- (\[\[)(?!%s) # Should not contain URLs
- (?: ([%s]*) (:))?
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*?)
- (?: (\#) ([%s]*?) )?
+ (\[\[)(?!{}) # Should not contain URLs
+ (?: ([{}]*) (:))?
+ ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | <!--[\s\S]*?--> )*?)
+ (?: (\#) ([{}]*?) )?
(\]\])
- """ % ('|'.join(protocols), title_char.replace('/', ''),
+ """.format('|'.join(protocols), title_char.replace('/', ''),
title_char, f'{title_char}#'),
bygroups(Punctuation, Name.Namespace, Punctuation,
using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation)
),
(
r"""(?xi)
- (\[\[)(?!%s)
- (?: ([%s]*) (:))?
- ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | <!--[\s\S]*?--> )*?)
- (?: (\#) ([%s]*?) )?
+ (\[\[)(?!{})
+ (?: ([{}]*) (:))?
+ ((?: [{}] | \{{{{2,3}}[^{{}}]*?\}}{{2,3}} | <!--[\s\S]*?--> )*?)
+ (?: (\#) ([{}]*?) )?
(\|)
- """ % ('|'.join(protocols), title_char.replace('/', ''),
+ """.format('|'.join(protocols), title_char.replace('/', ''),
title_char, f'{title_char}#'),
bygroups(Punctuation, Name.Namespace, Punctuation,
using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation),
@@ -1192,7 +1296,7 @@ class WikitextLexer(RegexLexer):
r"""(?xi)
(-\{{) # Use {{ to escape format()
([^|]) (\|)
- (?:
+ (?:
(?: ([^;]*?) (=>))?
(\s* (?:{variants}) \s*) (:)
)?
@@ -1322,9 +1426,9 @@ class WikitextLexer(RegexLexer):
'parameter-inner',
),
# Magic variables
- (r'(?i)(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars_i),
+ (r'(?i)(\{{\{{)(\s*)({})(\s*)(\}}\}})'.format('|'.join(magic_vars_i)),
bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
- (r'(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars),
+ (r'(\{{\{{)(\s*)({})(\s*)(\}}\}})'.format('|'.join(magic_vars)),
bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
# Parser functions & templates
(r'\{\{', Punctuation, 'template-begin-space'),
@@ -1350,17 +1454,17 @@ class WikitextLexer(RegexLexer):
(r'\s+', Whitespace),
# Parser functions
(
- r'(?i)(\#[%s]*?|%s)(:)' % (title_char,
+ r'(?i)(\#[{}]*?|{})(:)'.format(title_char,
'|'.join(parser_functions_i)),
bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
),
(
- r'(%s)(:)' % ('|'.join(parser_functions)),
+ r'({})(:)'.format('|'.join(parser_functions)),
bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
),
# Templates
(
- r'(?i)([%s]*?)(:)' % title_char,
+ rf'(?i)([{title_char}]*?)(:)',
bygroups(Name.Namespace, Punctuation), ('#pop', 'template-name')
),
default(('#pop', 'template-name'),),
@@ -1539,9 +1643,9 @@ class WikitextLexer(RegexLexer):
'tag-gallery': plaintext_tag_rules('gallery'),
'tag-graph': plaintext_tag_rules('graph'),
'tag-rss': plaintext_tag_rules('rss'),
- 'tag-math': delegate_tag_rules('math', TexLexer),
- 'tag-chem': delegate_tag_rules('chem', TexLexer),
- 'tag-ce': delegate_tag_rules('ce', TexLexer),
+ 'tag-math': delegate_tag_rules('math', TexLexer, state='math'),
+ 'tag-chem': delegate_tag_rules('chem', TexLexer, state='math'),
+ 'tag-ce': delegate_tag_rules('ce', TexLexer, state='math'),
'tag-templatedata': delegate_tag_rules('templatedata', JsonLexer),
'text-italic': text_rules(Generic.Emph),
'text-bold': text_rules(Generic.Strong),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/math.py b/contrib/python/Pygments/py3/pygments/lexers/math.py
index 530f8539c9..66fd898ba1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/math.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/math.py
@@ -4,10 +4,11 @@
Just export lexers that were contained in this module.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.python import NumPyLexer
from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
OctaveLexer, ScilabLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/matlab.py b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
index 753a6efcf0..7005a3f28c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/matlab.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/matlab.py
@@ -4,7 +4,7 @@
Lexers for Matlab and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,13 +23,13 @@ __all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
class MatlabLexer(RegexLexer):
"""
For Matlab source code.
-
- .. versionadded:: 0.10
"""
name = 'Matlab'
aliases = ['matlab']
filenames = ['*.m']
mimetypes = ['text/matlab']
+ url = 'https://www.mathworks.com/products/matlab.html'
+ version_added = '0.10'
_operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
@@ -41,8 +41,8 @@ class MatlabLexer(RegexLexer):
# numbers (must come before punctuation to handle `.5`; cannot use
# `\b` due to e.g. `5. + .5`). The negative lookahead on operators
# avoids including the dot in `1./x` (the dot is part of `./`).
- (r'(?<!\w)((\d+\.\d+)|(\d*\.\d+)|(\d+\.(?!%s)))'
- r'([eEf][+-]?\d+)?(?!\w)' % _operators, Number.Float),
+ (rf'(?<!\w)((\d+\.\d+)|(\d*\.\d+)|(\d+\.(?!{_operators})))'
+ r'([eEf][+-]?\d+)?(?!\w)', Number.Float),
(r'\b\d+[eEf][+-]?[0-9]+\b', Number.Float),
(r'\b\d+\b', Number.Integer),
@@ -2665,7 +2665,7 @@ class MatlabLexer(RegexLexer):
# `cd ./ foo`.). Here, the regex checks that the first word in the
# line is not followed by <spaces> and then
# (equal | open-parenthesis | <operator><space> | <space>).
- (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|%s\s|\s)' % _operators,
+ (rf'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|{_operators}\s|\s)',
bygroups(Whitespace, Name, Whitespace), 'commandargs'),
include('expressions')
@@ -2742,11 +2742,11 @@ class MatlabSessionLexer(Lexer):
"""
For Matlab sessions. Modeled after PythonConsoleLexer.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- .. versionadded:: 0.10
"""
name = 'Matlab session'
aliases = ['matlabsession']
+ url = 'https://www.mathworks.com/products/matlab.html'
+ version_added = '0.10'
def get_tokens_unprocessed(self, text):
mlexer = MatlabLexer(**self.options)
@@ -2811,14 +2811,13 @@ class MatlabSessionLexer(Lexer):
class OctaveLexer(RegexLexer):
"""
For GNU Octave source code.
-
- .. versionadded:: 1.5
"""
name = 'Octave'
url = 'https://www.gnu.org/software/octave/index'
aliases = ['octave']
filenames = ['*.m']
mimetypes = ['text/octave']
+ version_added = '1.5'
# These lists are generated automatically.
# Run the following in bash shell:
@@ -3229,14 +3228,13 @@ class OctaveLexer(RegexLexer):
class ScilabLexer(RegexLexer):
"""
For Scilab source code.
-
- .. versionadded:: 1.5
"""
name = 'Scilab'
url = 'https://www.scilab.org/'
aliases = ['scilab']
filenames = ['*.sci', '*.sce', '*.tst']
mimetypes = ['text/scilab']
+ version_added = '1.5'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/maxima.py b/contrib/python/Pygments/py3/pygments/lexers/maxima.py
index 4c6dc7962c..305b7552f4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/maxima.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/maxima.py
@@ -6,7 +6,7 @@
Derived from pygments/lexers/algebra.py.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,13 +22,12 @@ class MaximaLexer(RegexLexer):
"""
A Maxima lexer.
Derived from pygments.lexers.MuPADLexer.
-
- .. versionadded:: 2.11
"""
name = 'Maxima'
url = 'http://maxima.sourceforge.net'
aliases = ['maxima', 'macsyma']
filenames = ['*.mac', '*.max']
+ version_added = '2.11'
keywords = ('if', 'then', 'else', 'elseif',
'do', 'while', 'repeat', 'until',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/meson.py b/contrib/python/Pygments/py3/pygments/lexers/meson.py
index f74f7191a3..8127b1691e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/meson.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/meson.py
@@ -4,7 +4,7 @@
Pygments lexer for the Meson build system
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,8 +22,6 @@ class MesonLexer(RegexLexer):
https://mesonbuild.com/Syntax.html#grammar for version 0.58.
Some of those definitions are improperly transcribed, so the Meson++
implementation was also checked: https://github.com/dcbaker/meson-plus-plus.
-
- .. versionadded:: 2.10
"""
# TODO String interpolation @VARNAME@ inner matches
@@ -34,6 +32,7 @@ class MesonLexer(RegexLexer):
aliases = ['meson', 'meson.build']
filenames = ['meson.build', 'meson_options.txt']
mimetypes = ['text/x-meson']
+ version_added = '2.10'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mime.py b/contrib/python/Pygments/py3/pygments/lexers/mime.py
index 8bf16f74fd..4b309dd004 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/mime.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/mime.py
@@ -4,7 +4,7 @@
Lexer for Multipurpose Internet Mail Extensions (MIME) data.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,8 +46,6 @@ class MIMELexer(RegexLexer):
`Content-Transfer-Encoding`
Treat the data as a specific encoding. Or this lexer would try to parse
from header by default. (default: None)
-
- .. versionadded:: 2.5
"""
name = "MIME"
@@ -55,6 +53,8 @@ class MIMELexer(RegexLexer):
mimetypes = ["multipart/mixed",
"multipart/related",
"multipart/alternative"]
+ url = 'https://en.wikipedia.org/wiki/MIME'
+ version_added = '2.5'
def __init__(self, **options):
super().__init__(**options)
@@ -95,7 +95,7 @@ class MIMELexer(RegexLexer):
return
# find boundary
- bdry_pattern = r"^--%s(--)?\n" % re.escape(self.boundary)
+ bdry_pattern = rf"^--{re.escape(self.boundary)}(--)?\n"
bdry_matcher = re.compile(bdry_pattern, re.MULTILINE)
# some data has prefix text before first boundary
diff --git a/contrib/python/Pygments/py3/pygments/lexers/minecraft.py b/contrib/python/Pygments/py3/pygments/lexers/minecraft.py
index 11faa00096..99de0d5be0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/minecraft.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/minecraft.py
@@ -3,19 +3,19 @@
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Minecraft related languages.
-
+
SNBT. A data communication format used in Minecraft.
wiki: https://minecraft.wiki/w/NBT_format
-
+
MCFunction. The Function file for Minecraft Data packs and Add-ons.
official: https://learn.microsoft.com/en-us/minecraft/creator/documents/functionsintroduction
wiki: https://minecraft.wiki/w/Function
-
+
MCSchema. A kind of data Schema for Minecraft Add-on Development.
official: https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/
community example: https://www.mcbe-dev.net/addons/data-driven/manifest.html
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -28,8 +28,6 @@ __all__ = ['SNBTLexer', 'MCFunctionLexer', 'MCSchemaLexer']
class SNBTLexer(RegexLexer):
"""Lexer for stringified NBT, a data format used in Minecraft
-
- .. versionadded:: 2.12.0
"""
name = "SNBT"
@@ -37,6 +35,7 @@ class SNBTLexer(RegexLexer):
aliases = ["snbt"]
filenames = ["*.snbt"]
mimetypes = ["text/snbt"]
+ version_added = '2.12'
tokens = {
"root": [
@@ -102,8 +101,6 @@ class SNBTLexer(RegexLexer):
class MCFunctionLexer(RegexLexer):
"""Lexer for the mcfunction scripting language used in Minecraft
Modelled somewhat after the `GitHub mcfunction grammar <https://github.com/Arcensoth/language-mcfunction>`_.
-
- .. versionadded:: 2.12.0
"""
name = "MCFunction"
@@ -111,6 +108,7 @@ class MCFunctionLexer(RegexLexer):
aliases = ["mcfunction", "mcf"]
filenames = ["*.mcfunction"]
mimetypes = ["text/mcfunction"]
+ version_added = '2.12'
# Used to denotate the start of a block comment, borrowed from Github's mcfunction
_block_comment_prefix = "[>!]"
@@ -218,26 +216,26 @@ class MCFunctionLexer(RegexLexer):
"selectors": [
(r"@[a-z]", Name.Variable),
],
-
+
## Generic Property Container
# There are several, differing instances where the language accepts
# specific contained keys or contained key, value pairings.
- #
+ #
# Property Maps:
# - Starts with either `[` or `{`
# - Key separated by `:` or `=`
# - Deliminated by `,`
- #
+ #
# Property Lists:
# - Starts with `[`
# - Deliminated by `,`
- #
+ #
# For simplicity, these patterns match a generic, nestable structure
# which follow a key, value pattern. For normal lists, there's only keys.
# This allow some "illegal" structures, but we'll accept those for
# sake of simplicity
- #
+ #
# Examples:
# - `[facing=up, powered=true]` (blockstate)
# - `[name="hello world", nbt={key: 1b}]` (selector + nbt)
@@ -298,7 +296,7 @@ class MCFunctionLexer(RegexLexer):
],
"property.delimiter": [
include("whitespace"),
-
+
(r"[:=]!?", Punctuation, "property.value"),
(r",", Punctuation),
@@ -321,15 +319,14 @@ class MCFunctionLexer(RegexLexer):
class MCSchemaLexer(RegexLexer):
"""Lexer for Minecraft Add-ons data Schemas, an interface structure standard used in Minecraft
-
- .. versionadded:: 2.14.0
"""
-
+
name = 'MCSchema'
url = 'https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/'
aliases = ['mcschema']
filenames = ['*.mcschema']
mimetypes = ['text/mcschema']
+ version_added = '2.14'
tokens = {
'commentsandwhitespace': [
@@ -360,35 +357,35 @@ class MCSchemaLexer(RegexLexer):
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
-
+
# keywords for optional word and field types
(r'(?<=: )opt', Operator.Word),
(r'(?<=\s)[\w-]*(?=(\s+"|\n))', Keyword.Declaration),
-
+
# numeric literals
(r'0[bB][01]+', Number.Bin),
(r'0[oO]?[0-7]+', Number.Oct),
(r'0[xX][0-9a-fA-F]+', Number.Hex),
(r'\d+', Number.Integer),
(r'(\.\d+|\d+\.\d*|\d+)([eE][-+]?\d+)?', Number.Float),
-
+
# possible punctuations
(r'\.\.\.|=>', Punctuation),
(r'\+\+|--|~|\?\?=?|\?|:|\\(?=\n)|'
r'(<<|>>>?|==?|!=?|(?:\*\*|\|\||&&|[-<>+*%&|^/]))=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
-
+
# strings
(r"'", String.Single, 'singlestring'),
(r'"', String.Double, 'doublestring'),
-
+
# title line
(r'[\w-]*?(?=:\{?\n)', String.Symbol),
# title line with a version code, formatted
# `major.minor.patch-prerelease+buildmeta`
(r'([\w-]*?)(:)(\d+)(?:(\.)(\d+)(?:(\.)(\d+)(?:(\-)((?:[^\W_]|-)*(?:\.(?:[^\W_]|-)*)*))?(?:(\+)((?:[^\W_]|-)+(?:\.(?:[^\W_]|-)+)*))?)?)?(?=:\{?\n)', bygroups(String.Symbol, Operator, Number.Integer, Operator, Number.Integer, Operator, Number.Integer, Operator, String, Operator, String)),
-
+
(r'.*\n', Text),
]
}
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mips.py b/contrib/python/Pygments/py3/pygments/lexers/mips.py
index 257605d7e5..8f249e18a5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/mips.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/mips.py
@@ -4,7 +4,7 @@
Lexers for MIPS assembly.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -24,10 +24,12 @@ class MIPSLexer(RegexLexer):
name = 'MIPS'
aliases = ['mips']
+ version_added = ''
# TODO: add '*.s' and '*.asm', which will require designing an analyse_text
# method for this lexer and refactoring those from Gas and Nasm in order to
# have relatively reliable detection
filenames = ['*.mips', '*.MIPS']
+ url = 'https://mips.com'
keywords = [
# Arithmetic insturctions
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ml.py b/contrib/python/Pygments/py3/pygments/lexers/ml.py
index 3dfa6d9345..ff50c6f0cb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ml.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ml.py
@@ -4,7 +4,7 @@
Lexers for ML family languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,14 @@ __all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer', 'ReasonLexer', 'FStarLexer']
class SMLLexer(RegexLexer):
"""
For the Standard ML language.
-
- .. versionadded:: 1.5
"""
name = 'Standard ML'
aliases = ['sml']
filenames = ['*.sml', '*.sig', '*.fun']
mimetypes = ['text/x-standardml', 'application/x-standardml']
+ url = 'https://en.wikipedia.org/wiki/Standard_ML'
+ version_added = '1.5'
alphanumid_reserved = {
# Core
@@ -121,7 +121,7 @@ class SMLLexer(RegexLexer):
'core': [
# Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
+ (r'({})'.format('|'.join(re.escape(z) for z in nonid_reserved)),
Punctuation),
# Special constants: strings, floats, numbers in decimal and hex
@@ -137,8 +137,8 @@ class SMLLexer(RegexLexer):
# Labels
(r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
+ (rf'#\s*({alphanumid_re})', Name.Label),
+ (rf'#\s+({symbolicid_re})', Name.Label),
# Some reserved words trigger a special, local lexer state change
(r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
(r'\b(exception)\b(?!\')', Keyword.Reserved, 'ename'),
@@ -148,14 +148,14 @@ class SMLLexer(RegexLexer):
# Regular identifiers, long and otherwise
(r'\'[\w\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
+ (rf'({alphanumid_re})(\.)', long_id_callback, "dotted"),
+ (rf'({alphanumid_re})', id_callback),
+ (rf'({symbolicid_re})', id_callback),
],
'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
+ (rf'({alphanumid_re})(\.)', long_id_callback),
+ (rf'({alphanumid_re})', end_id_callback, "#pop"),
+ (rf'({symbolicid_re})', end_id_callback, "#pop"),
(r'\s+', Error),
(r'\S+', Error),
],
@@ -208,7 +208,7 @@ class SMLLexer(RegexLexer):
'string': stringy(String.Double),
'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
+ (r'(?=\b({})\b(?!\'))'.format('|'.join(alphanumid_reserved)), Text, '#pop'),
],
# Dealing with what comes after module system keywords
@@ -216,7 +216,7 @@ class SMLLexer(RegexLexer):
include('whitespace'),
include('breakout'),
- (r'(%s)' % alphanumid_re, Name.Namespace),
+ (rf'({alphanumid_re})', Name.Namespace),
default('#pop'),
],
@@ -226,8 +226,8 @@ class SMLLexer(RegexLexer):
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
+ (rf'({alphanumid_re})', Name.Function, '#pop'),
+ (rf'({symbolicid_re})', Name.Function, '#pop'),
# Ignore interesting function declarations like "fun (x + y) = ..."
default('#pop'),
@@ -239,12 +239,12 @@ class SMLLexer(RegexLexer):
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
+ (rf'({alphanumid_re})(\s*)(=(?!{symbolicid_re}))',
bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
+ (rf'({symbolicid_re})(\s*)(=(?!{symbolicid_re}))',
bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
+ (rf'({alphanumid_re})', Name.Variable, '#pop'),
+ (rf'({symbolicid_re})', Name.Variable, '#pop'),
# Ignore interesting patterns like 'val (x, y)'
default('#pop'),
@@ -257,10 +257,10 @@ class SMLLexer(RegexLexer):
(r'\'[\w\']*', Name.Decorator),
(r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
+ (rf'=(?!{symbolicid_re})', Punctuation, ('#pop', 'typbind')),
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
+ (rf'({alphanumid_re})', Keyword.Type),
+ (rf'({symbolicid_re})', Keyword.Type),
(r'\S+', Error, '#pop'),
],
@@ -284,11 +284,11 @@ class SMLLexer(RegexLexer):
(r'\(', Punctuation, 'tyvarseq'),
(r'(=)(\s*)(datatype)',
bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
+ (rf'=(?!{symbolicid_re})', Punctuation,
('#pop', 'datbind', 'datcon')),
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
+ (rf'({alphanumid_re})', Keyword.Type),
+ (rf'({symbolicid_re})', Keyword.Type),
(r'\S+', Error, '#pop'),
],
@@ -300,9 +300,9 @@ class SMLLexer(RegexLexer):
(r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
(r'\b(of)\b(?!\')', Keyword.Reserved),
- (r'(\|)(\s*)(%s)' % alphanumid_re,
+ (rf'(\|)(\s*)({alphanumid_re})',
bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
+ (rf'(\|)(\s+)({symbolicid_re})',
bygroups(Punctuation, Text, Name.Class)),
include('breakout'),
@@ -314,20 +314,20 @@ class SMLLexer(RegexLexer):
'ename': [
include('whitespace'),
- (r'(and\b)(\s+)(%s)' % alphanumid_re,
+ (rf'(and\b)(\s+)({alphanumid_re})',
bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(and\b)(\s*)(%s)' % symbolicid_re,
+ (rf'(and\b)(\s*)({symbolicid_re})',
bygroups(Keyword.Reserved, Text, Name.Class)),
(r'\b(of)\b(?!\')', Keyword.Reserved),
- (r'(%s)|(%s)' % (alphanumid_re, symbolicid_re), Name.Class),
+ (rf'({alphanumid_re})|({symbolicid_re})', Name.Class),
default('#pop'),
],
'datcon': [
include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
+ (rf'({alphanumid_re})', Name.Class, '#pop'),
+ (rf'({symbolicid_re})', Name.Class, '#pop'),
(r'\S+', Error, '#pop'),
],
@@ -355,8 +355,6 @@ class SMLLexer(RegexLexer):
class OcamlLexer(RegexLexer):
"""
For the OCaml language.
-
- .. versionadded:: 0.7
"""
name = 'OCaml'
@@ -364,6 +362,7 @@ class OcamlLexer(RegexLexer):
aliases = ['ocaml']
filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
mimetypes = ['text/x-ocaml']
+ version_added = '0.7'
keywords = (
'and', 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
@@ -399,11 +398,11 @@ class OcamlLexer(RegexLexer):
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+ (r'({})'.format('|'.join(keyopts[::-1])), Operator),
+ (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+ (r'\b({})\b'.format('|'.join(word_operators)), Operator.Word),
+ (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
(r"[^\W\d][\w']*", Name),
@@ -448,14 +447,14 @@ class OcamlLexer(RegexLexer):
class OpaLexer(RegexLexer):
"""
Lexer for the Opa language.
-
- .. versionadded:: 1.5
"""
name = 'Opa'
aliases = ['opa']
filenames = ['*.opa']
mimetypes = ['text/x-opa']
+ url = 'http://opalang.org'
+ version_added = '1.5'
# most of these aren't strictly keywords
# but if you color only real keywords, you might just
@@ -557,8 +556,8 @@ class OpaLexer(RegexLexer):
# way to syntactic distinguish binding constructions
# unfortunately, this colors the equal in {x=2} too
(r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
+ (rf'({op_re})+', Operator),
+ (rf'({punc_re})+', Operator),
# coercions
(r':', Operator, 'type'),
@@ -771,8 +770,6 @@ class OpaLexer(RegexLexer):
class ReasonLexer(RegexLexer):
"""
For the ReasonML language.
-
- .. versionadded:: 2.6
"""
name = 'ReasonML'
@@ -780,6 +777,7 @@ class ReasonLexer(RegexLexer):
aliases = ['reasonml', 'reason']
filenames = ['*.re', '*.rei']
mimetypes = ['text/x-reasonml']
+ version_added = '2.6'
keywords = (
'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
@@ -815,11 +813,11 @@ class ReasonLexer(RegexLexer):
(r'\b([A-Z][\w\']*)', Name.Class),
(r'//.*?\n', Comment.Single),
(r'\/\*(?!/)', Comment.Multiline, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+ (r'({})'.format('|'.join(keyopts[::-1])), Operator.Word),
+ (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+ (r'\b({})\b'.format('|'.join(word_operators)), Operator.Word),
+ (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
(r"[^\W\d][\w']*", Name),
@@ -864,7 +862,6 @@ class ReasonLexer(RegexLexer):
class FStarLexer(RegexLexer):
"""
For the F* language.
- .. versionadded:: 2.7
"""
name = 'FStar'
@@ -872,6 +869,7 @@ class FStarLexer(RegexLexer):
aliases = ['fstar']
filenames = ['*.fst', '*.fsti']
mimetypes = ['text/x-fstar']
+ version_added = '2.7'
keywords = (
'abstract', 'attributes', 'noeq', 'unopteq', 'and'
@@ -912,12 +910,12 @@ class FStarLexer(RegexLexer):
(r'\b([A-Z][\w\']*)', Name.Class),
(r'\(\*(?![)])', Comment, 'comment'),
(r'\/\/.+$', Comment),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
- (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+ (r'\b({})\b'.format('|'.join(assume_keywords)), Name.Exception),
+ (r'\b({})\b'.format('|'.join(decl_keywords)), Keyword.Declaration),
+ (r'({})'.format('|'.join(keyopts[::-1])), Operator),
+ (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
+ (r'\b({})\b'.format('|'.join(primitives)), Keyword.Type),
(r"[^\W\d][\w']*", Name),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modeling.py b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
index e24768072d..b5393e7483 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modeling.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modeling.py
@@ -4,7 +4,7 @@
Lexers for modeling languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,14 +23,13 @@ __all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
class ModelicaLexer(RegexLexer):
"""
For Modelica source code.
-
- .. versionadded:: 1.1
"""
name = 'Modelica'
url = 'http://www.modelica.org/'
aliases = ['modelica']
filenames = ['*.mo']
mimetypes = ['text/x-modelica']
+ version_added = '1.1'
flags = re.DOTALL | re.MULTILINE
@@ -100,13 +99,13 @@ class BugsLexer(RegexLexer):
"""
Pygments Lexer for OpenBugs and WinBugs
models.
-
- .. versionadded:: 1.6
"""
name = 'BUGS'
aliases = ['bugs', 'winbugs', 'openbugs']
filenames = ['*.bug']
+ url = 'https://www.mrc-bsu.cam.ac.uk/software/bugs/openbugs'
+ version_added = '1.6'
_FUNCTIONS = (
# Scalar functions
@@ -166,8 +165,7 @@ class BugsLexer(RegexLexer):
# Reserved Words
(r'(for|in)(?![\w.])', Keyword.Reserved),
# Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
+ (r'({})(?=\s*\()'.format(r'|'.join(_FUNCTIONS + _DISTRIBUTIONS)),
Name.Builtin),
# Regular variable names
(r'[A-Za-z][\w.]*', Name),
@@ -195,13 +193,13 @@ class BugsLexer(RegexLexer):
class JagsLexer(RegexLexer):
"""
Pygments Lexer for JAGS.
-
- .. versionadded:: 1.6
"""
name = 'JAGS'
aliases = ['jags']
filenames = ['*.jag', '*.bug']
+ url = 'https://mcmc-jags.sourceforge.io'
+ version_added = '1.6'
# JAGS
_FUNCTIONS = (
@@ -215,7 +213,7 @@ class JagsLexer(RegexLexer):
# Truncation/Censoring (should I include)
'T', 'I')
# Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
+ _DISTRIBUTIONS = tuple(f'[dpq]{x}' for x in
('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
@@ -251,9 +249,9 @@ class JagsLexer(RegexLexer):
(r'(for|in)(?![\w.])', Keyword.Reserved),
# Builtins
# Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ (r'({})(?=\s*\()'.format(r'|'.join(_FUNCTIONS
+ _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
+ + _OTHER_DISTRIBUTIONS)),
Name.Builtin),
# Names
include('names'),
@@ -286,13 +284,13 @@ class StanLexer(RegexLexer):
The Stan modeling language is specified in the *Stan Modeling Language
User's Guide and Reference Manual, v2.17.0*,
`pdf <https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf>`__.
-
- .. versionadded:: 1.6
"""
name = 'Stan'
aliases = ['stan']
filenames = ['*.stan']
+ url = 'https://mc-stan.org'
+ version_added = '1.6'
tokens = {
'whitespace': [
@@ -310,19 +308,18 @@ class StanLexer(RegexLexer):
# block start
include('whitespace'),
# Block start
- (r'(%s)(\s*)(\{)' %
- r'|'.join(('functions', 'data', r'transformed\s+?data',
+ (r'({})(\s*)(\{{)'.format(r'|'.join(('functions', 'data', r'transformed\s+?data',
'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
+ 'model', r'generated\s+quantities'))),
bygroups(Keyword.Namespace, Text, Punctuation)),
# target keyword
(r'target\s*\+=', Keyword),
# Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
+ (r'({})\b'.format(r'|'.join(_stan_builtins.KEYWORDS)), Keyword),
# Truncation
(r'T(?=\s*\[)', Keyword),
# Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ (r'({})\b'.format(r'|'.join(_stan_builtins.TYPES)), Keyword.Type),
# < should be punctuation, but elsewhere I can't tell if it is in
# a range constraint
(r'(<)(\s*)(upper|lower|offset|multiplier)(\s*)(=)',
@@ -332,12 +329,12 @@ class StanLexer(RegexLexer):
# Punctuation
(r"[;,\[\]()]", Punctuation),
# Builtin
- (r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
- (r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
+ (r'({})(?=\s*\()'.format('|'.join(_stan_builtins.FUNCTIONS)), Name.Builtin),
+ (r'(~)(\s*)({})(?=\s*\()'.format('|'.join(_stan_builtins.DISTRIBUTIONS)),
bygroups(Operator, Whitespace, Name.Builtin)),
# Special names ending in __, like lp__
(r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
+ (r'({})\b'.format(r'|'.join(_stan_builtins.RESERVED)), Keyword.Reserved),
# user-defined functions
(r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
# Imaginary Literals
diff --git a/contrib/python/Pygments/py3/pygments/lexers/modula2.py b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
index 8bd4765aa6..8f5a72a354 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/modula2.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/modula2.py
@@ -4,7 +4,7 @@
Multi-Dialect Lexer for Modula-2.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -153,8 +153,6 @@ class Modula2Lexer(RegexLexer):
``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
Render standard library ADTs as ordinary library types.
- .. versionadded:: 1.3
-
.. versionchanged:: 2.1
Added multi-dialect support.
"""
@@ -163,6 +161,7 @@ class Modula2Lexer(RegexLexer):
aliases = ['modula2', 'm2']
filenames = ['*.def', '*.mod']
mimetypes = ['text/x-modula2']
+ version_added = '1.3'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mojo.py b/contrib/python/Pygments/py3/pygments/lexers/mojo.py
new file mode 100644
index 0000000000..0191dce0aa
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/lexers/mojo.py
@@ -0,0 +1,704 @@
+"""
+ pygments.lexers.mojo
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Mojo and related languages.
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import keyword
+
+from pygments import unistring as uni
+from pygments.lexer import (
+ RegexLexer,
+ bygroups,
+ combined,
+ default,
+ include,
+ this,
+ using,
+ words,
+)
+from pygments.token import (
+ Comment,
+ # Error,
+ Keyword,
+ Name,
+ Number,
+ Operator,
+ Punctuation,
+ String,
+ Text,
+ Whitespace,
+)
+from pygments.util import shebang_matches
+
+__all__ = ["MojoLexer"]
+
+
+class MojoLexer(RegexLexer):
+ """
+ For Mojo source code (version 24.2.1).
+ """
+
+ name = "Mojo"
+ url = "https://docs.modular.com/mojo/"
+ aliases = ["mojo", "🔥"]
+ filenames = [
+ "*.mojo",
+ "*.🔥",
+ ]
+ mimetypes = [
+ "text/x-mojo",
+ "application/x-mojo",
+ ]
+ version_added = "2.18"
+
+ uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
+
+ def innerstring_rules(ttype):
+ return [
+ # the old style '%s' % (...) string formatting (still valid in Py3)
+ (
+ r"%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?"
+ "[hlL]?[E-GXc-giorsaux%]",
+ String.Interpol,
+ ),
+ # the new style '{}'.format(...) string formatting
+ (
+ r"\{"
+ r"((\w+)((\.\w+)|(\[[^\]]+\]))*)?" # field name
+ r"(\![sra])?" # conversion
+ r"(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?"
+ r"\}",
+ String.Interpol,
+ ),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"%{\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # unhandled string formatting sign
+ (r"%|(\{{1,2})", ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ def fstring_rules(ttype):
+ return [
+ # Assuming that a '}' is the closing brace after format specifier.
+ # Sadly, this means that we won't detect syntax error. But it's
+ # more important to parse correct syntax correctly, than to
+ # highlight invalid syntax.
+ (r"\}", String.Interpol),
+ (r"\{", String.Interpol, "expr-inside-fstring"),
+ # backslashes, quotes and formatting signs must be parsed one at a time
+ (r'[^\\\'"{}\n]+', ttype),
+ (r'[\'"\\]', ttype),
+ # newlines are an error (use "nl" state)
+ ]
+
+ tokens = {
+ "root": [
+ (r"\s+", Whitespace),
+ (
+ r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
+ bygroups(Whitespace, String.Affix, String.Doc),
+ ),
+ (
+ r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
+ bygroups(Whitespace, String.Affix, String.Doc),
+ ),
+ (r"\A#!.+$", Comment.Hashbang),
+ (r"#.*$", Comment.Single),
+ (r"\\\n", Whitespace),
+ (r"\\", Whitespace),
+ include("keywords"),
+ include("soft-keywords"),
+ # In the original PR, all the below here used ((?:\s|\\\s)+) to
+ # designate whitespace, but I can't find any example of this being
+ # needed in the example file, so we're replacing it with `\s+`.
+ (
+ r"(alias)(\s+)",
+ bygroups(Keyword, Whitespace),
+ "varname", # TODO varname the right fit?
+ ),
+ (r"(var)(\s+)", bygroups(Keyword, Whitespace), "varname"),
+ (r"(def)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
+ (r"(fn)(\s+)", bygroups(Keyword, Whitespace), "funcname"),
+ (
+ r"(class)(\s+)",
+ bygroups(Keyword, Whitespace),
+ "classname",
+ ), # not implemented yet
+ (r"(struct)(\s+)", bygroups(Keyword, Whitespace), "structname"),
+ (r"(trait)(\s+)", bygroups(Keyword, Whitespace), "structname"),
+ (r"(from)(\s+)", bygroups(Keyword.Namespace, Whitespace), "fromimport"),
+ (r"(import)(\s+)", bygroups(Keyword.Namespace, Whitespace), "import"),
+ include("expr"),
+ ],
+ "expr": [
+ # raw f-strings
+ (
+ '(?i)(rf|fr)(""")',
+ bygroups(String.Affix, String.Double),
+ combined("rfstringescape", "tdqf"),
+ ),
+ (
+ "(?i)(rf|fr)(''')",
+ bygroups(String.Affix, String.Single),
+ combined("rfstringescape", "tsqf"),
+ ),
+ (
+ '(?i)(rf|fr)(")',
+ bygroups(String.Affix, String.Double),
+ combined("rfstringescape", "dqf"),
+ ),
+ (
+ "(?i)(rf|fr)(')",
+ bygroups(String.Affix, String.Single),
+ combined("rfstringescape", "sqf"),
+ ),
+ # non-raw f-strings
+ (
+ '([fF])(""")',
+ bygroups(String.Affix, String.Double),
+ combined("fstringescape", "tdqf"),
+ ),
+ (
+ "([fF])(''')",
+ bygroups(String.Affix, String.Single),
+ combined("fstringescape", "tsqf"),
+ ),
+ (
+ '([fF])(")',
+ bygroups(String.Affix, String.Double),
+ combined("fstringescape", "dqf"),
+ ),
+ (
+ "([fF])(')",
+ bygroups(String.Affix, String.Single),
+ combined("fstringescape", "sqf"),
+ ),
+ # raw bytes and strings
+ ('(?i)(rb|br|r)(""")', bygroups(String.Affix, String.Double), "tdqs"),
+ ("(?i)(rb|br|r)(''')", bygroups(String.Affix, String.Single), "tsqs"),
+ ('(?i)(rb|br|r)(")', bygroups(String.Affix, String.Double), "dqs"),
+ ("(?i)(rb|br|r)(')", bygroups(String.Affix, String.Single), "sqs"),
+ # non-raw strings
+ (
+ '([uU]?)(""")',
+ bygroups(String.Affix, String.Double),
+ combined("stringescape", "tdqs"),
+ ),
+ (
+ "([uU]?)(''')",
+ bygroups(String.Affix, String.Single),
+ combined("stringescape", "tsqs"),
+ ),
+ (
+ '([uU]?)(")',
+ bygroups(String.Affix, String.Double),
+ combined("stringescape", "dqs"),
+ ),
+ (
+ "([uU]?)(')",
+ bygroups(String.Affix, String.Single),
+ combined("stringescape", "sqs"),
+ ),
+ # non-raw bytes
+ (
+ '([bB])(""")',
+ bygroups(String.Affix, String.Double),
+ combined("bytesescape", "tdqs"),
+ ),
+ (
+ "([bB])(''')",
+ bygroups(String.Affix, String.Single),
+ combined("bytesescape", "tsqs"),
+ ),
+ (
+ '([bB])(")',
+ bygroups(String.Affix, String.Double),
+ combined("bytesescape", "dqs"),
+ ),
+ (
+ "([bB])(')",
+ bygroups(String.Affix, String.Single),
+ combined("bytesescape", "sqs"),
+ ),
+ (r"[^\S\n]+", Text),
+ include("numbers"),
+ (r"!=|==|<<|>>|:=|[-~+/*%=<>&^|.]", Operator),
+ (r"([]{}:\(\),;[])+", Punctuation),
+ (r"(in|is|and|or|not)\b", Operator.Word),
+ include("expr-keywords"),
+ include("builtins"),
+ include("magicfuncs"),
+ include("magicvars"),
+ include("name"),
+ ],
+ "expr-inside-fstring": [
+ (r"[{([]", Punctuation, "expr-inside-fstring-inner"),
+ # without format specifier
+ (
+ r"(=\s*)?" # debug (https://bugs.python.org/issue36817)
+ r"(\![sraf])?" # conversion
+ r"\}",
+ String.Interpol,
+ "#pop",
+ ),
+ # with format specifier
+ # we'll catch the remaining '}' in the outer scope
+ (
+ r"(=\s*)?" # debug (https://bugs.python.org/issue36817)
+ r"(\![sraf])?" # conversion
+ r":",
+ String.Interpol,
+ "#pop",
+ ),
+ (r"\s+", Whitespace), # allow new lines
+ include("expr"),
+ ],
+ "expr-inside-fstring-inner": [
+ (r"[{([]", Punctuation, "expr-inside-fstring-inner"),
+ (r"[])}]", Punctuation, "#pop"),
+ (r"\s+", Whitespace), # allow new lines
+ include("expr"),
+ ],
+ "expr-keywords": [
+ # Based on https://docs.python.org/3/reference/expressions.html
+ (
+ words(
+ (
+ "async for", # TODO https://docs.modular.com/mojo/roadmap#no-async-for-or-async-with
+ "async with", # TODO https://docs.modular.com/mojo/roadmap#no-async-for-or-async-with
+ "await",
+ "else",
+ "for",
+ "if",
+ "lambda",
+ "yield",
+ "yield from",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ ),
+ (words(("True", "False", "None"), suffix=r"\b"), Keyword.Constant),
+ ],
+ "keywords": [
+ (
+ words(
+ (
+ "assert",
+ "async",
+ "await",
+ "borrowed",
+ "break",
+ "continue",
+ "del",
+ "elif",
+ "else",
+ "except",
+ "finally",
+ "for",
+ "global",
+ "if",
+ "lambda",
+ "pass",
+ "raise",
+ "nonlocal",
+ "return",
+ "try",
+ "while",
+ "yield",
+ "yield from",
+ "as",
+ "with",
+ ),
+ suffix=r"\b",
+ ),
+ Keyword,
+ ),
+ (words(("True", "False", "None"), suffix=r"\b"), Keyword.Constant),
+ ],
+ "soft-keywords": [
+ # `match`, `case` and `_` soft keywords
+ (
+ r"(^[ \t]*)" # at beginning of line + possible indentation
+ r"(match|case)\b" # a possible keyword
+ r"(?![ \t]*(?:" # not followed by...
+ r"[:,;=^&|@~)\]}]|(?:" + # characters and keywords that mean this isn't
+ # pattern matching (but None/True/False is ok)
+ r"|".join(k for k in keyword.kwlist if k[0].islower())
+ + r")\b))",
+ bygroups(Whitespace, Keyword),
+ "soft-keywords-inner",
+ ),
+ ],
+ "soft-keywords-inner": [
+ # optional `_` keyword
+ (r"(\s+)([^\n_]*)(_\b)", bygroups(Whitespace, using(this), Keyword)),
+ default("#pop"),
+ ],
+ "builtins": [
+ (
+ words(
+ (
+ "__import__",
+ "abs",
+ "aiter",
+ "all",
+ "any",
+ "bin",
+ "bool",
+ "bytearray",
+ "breakpoint",
+ "bytes",
+ "callable",
+ "chr",
+ "classmethod",
+ "compile",
+ "complex",
+ "delattr",
+ "dict",
+ "dir",
+ "divmod",
+ "enumerate",
+ "eval",
+ "filter",
+ "float",
+ "format",
+ "frozenset",
+ "getattr",
+ "globals",
+ "hasattr",
+ "hash",
+ "hex",
+ "id",
+ "input",
+ "int",
+ "isinstance",
+ "issubclass",
+ "iter",
+ "len",
+ "list",
+ "locals",
+ "map",
+ "max",
+ "memoryview",
+ "min",
+ "next",
+ "object",
+ "oct",
+ "open",
+ "ord",
+ "pow",
+ "print",
+ "property",
+ "range",
+ "repr",
+ "reversed",
+ "round",
+ "set",
+ "setattr",
+ "slice",
+ "sorted",
+ "staticmethod",
+ "str",
+ "sum",
+ "super",
+ "tuple",
+ "type",
+ "vars",
+ "zip",
+ # Mojo builtin types: https://docs.modular.com/mojo/stdlib/builtin/
+ "AnyType",
+ "Coroutine",
+ "DType",
+ "Error",
+ "Int",
+ "List",
+ "ListLiteral",
+ "Scalar",
+ "Int8",
+ "UInt8",
+ "Int16",
+ "UInt16",
+ "Int32",
+ "UInt32",
+ "Int64",
+ "UInt64",
+ "BFloat16",
+ "Float16",
+ "Float32",
+ "Float64",
+ "SIMD",
+ "String",
+ "Tensor",
+ "Tuple",
+ "Movable",
+ "Copyable",
+ "CollectionElement",
+ ),
+ prefix=r"(?<!\.)",
+ suffix=r"\b",
+ ),
+ Name.Builtin,
+ ),
+ (r"(?<!\.)(self|Ellipsis|NotImplemented|cls)\b", Name.Builtin.Pseudo),
+ (
+ words(
+ ("Error",),
+ prefix=r"(?<!\.)",
+ suffix=r"\b",
+ ),
+ Name.Exception,
+ ),
+ ],
+ "magicfuncs": [
+ (
+ words(
+ (
+ "__abs__",
+ "__add__",
+ "__aenter__",
+ "__aexit__",
+ "__aiter__",
+ "__and__",
+ "__anext__",
+ "__await__",
+ "__bool__",
+ "__bytes__",
+ "__call__",
+ "__complex__",
+ "__contains__",
+ "__del__",
+ "__delattr__",
+ "__delete__",
+ "__delitem__",
+ "__dir__",
+ "__divmod__",
+ "__enter__",
+ "__eq__",
+ "__exit__",
+ "__float__",
+ "__floordiv__",
+ "__format__",
+ "__ge__",
+ "__get__",
+ "__getattr__",
+ "__getattribute__",
+ "__getitem__",
+ "__gt__",
+ "__hash__",
+ "__iadd__",
+ "__iand__",
+ "__ifloordiv__",
+ "__ilshift__",
+ "__imatmul__",
+ "__imod__",
+ "__imul__",
+ "__index__",
+ "__init__",
+ "__instancecheck__",
+ "__int__",
+ "__invert__",
+ "__ior__",
+ "__ipow__",
+ "__irshift__",
+ "__isub__",
+ "__iter__",
+ "__itruediv__",
+ "__ixor__",
+ "__le__",
+ "__len__",
+ "__length_hint__",
+ "__lshift__",
+ "__lt__",
+ "__matmul__",
+ "__missing__",
+ "__mod__",
+ "__mul__",
+ "__ne__",
+ "__neg__",
+ "__new__",
+ "__next__",
+ "__or__",
+ "__pos__",
+ "__pow__",
+ "__prepare__",
+ "__radd__",
+ "__rand__",
+ "__rdivmod__",
+ "__repr__",
+ "__reversed__",
+ "__rfloordiv__",
+ "__rlshift__",
+ "__rmatmul__",
+ "__rmod__",
+ "__rmul__",
+ "__ror__",
+ "__round__",
+ "__rpow__",
+ "__rrshift__",
+ "__rshift__",
+ "__rsub__",
+ "__rtruediv__",
+ "__rxor__",
+ "__set__",
+ "__setattr__",
+ "__setitem__",
+ "__str__",
+ "__sub__",
+ "__subclasscheck__",
+ "__truediv__",
+ "__xor__",
+ ),
+ suffix=r"\b",
+ ),
+ Name.Function.Magic,
+ ),
+ ],
+ "magicvars": [
+ (
+ words(
+ (
+ "__annotations__",
+ "__bases__",
+ "__class__",
+ "__closure__",
+ "__code__",
+ "__defaults__",
+ "__dict__",
+ "__doc__",
+ "__file__",
+ "__func__",
+ "__globals__",
+ "__kwdefaults__",
+ "__module__",
+ "__mro__",
+ "__name__",
+ "__objclass__",
+ "__qualname__",
+ "__self__",
+ "__slots__",
+ "__weakref__",
+ ),
+ suffix=r"\b",
+ ),
+ Name.Variable.Magic,
+ ),
+ ],
+ "numbers": [
+ (
+ r"(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)"
+ r"([eE][+-]?\d(?:_?\d)*)?",
+ Number.Float,
+ ),
+ (r"\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?", Number.Float),
+ (r"0[oO](?:_?[0-7])+", Number.Oct),
+ (r"0[bB](?:_?[01])+", Number.Bin),
+ (r"0[xX](?:_?[a-fA-F0-9])+", Number.Hex),
+ (r"\d(?:_?\d)*", Number.Integer),
+ ],
+ "name": [
+ (r"@" + uni_name, Name.Decorator),
+ (r"@", Operator), # new matrix multiplication operator
+ (uni_name, Name),
+ ],
+ "varname": [
+ (uni_name, Name.Variable, "#pop"),
+ ],
+ "funcname": [
+ include("magicfuncs"),
+ (uni_name, Name.Function, "#pop"),
+ default("#pop"),
+ ],
+ "classname": [
+ (uni_name, Name.Class, "#pop"),
+ ],
+ "structname": [
+ (uni_name, Name.Struct, "#pop"),
+ ],
+ "import": [
+ (r"(\s+)(as)(\s+)", bygroups(Whitespace, Keyword, Whitespace)),
+ (r"\.", Name.Namespace),
+ (uni_name, Name.Namespace),
+ (r"(\s*)(,)(\s*)", bygroups(Whitespace, Operator, Whitespace)),
+ default("#pop"), # all else: go back
+ ],
+ "fromimport": [
+ (r"(\s+)(import)\b", bygroups(Whitespace, Keyword.Namespace), "#pop"),
+ (r"\.", Name.Namespace),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r"None\b", Keyword.Constant, "#pop"),
+ (uni_name, Name.Namespace),
+ default("#pop"),
+ ],
+ "rfstringescape": [
+ (r"\{\{", String.Escape),
+ (r"\}\}", String.Escape),
+ ],
+ "fstringescape": [
+ include("rfstringescape"),
+ include("stringescape"),
+ ],
+ "bytesescape": [
+ (r'\\([\\abfnrtv"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ "stringescape": [
+ (r"\\(N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8})", String.Escape),
+ include("bytesescape"),
+ ],
+ "fstrings-single": fstring_rules(String.Single),
+ "fstrings-double": fstring_rules(String.Double),
+ "strings-single": innerstring_rules(String.Single),
+ "strings-double": innerstring_rules(String.Double),
+ "dqf": [
+ (r'"', String.Double, "#pop"),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include("fstrings-double"),
+ ],
+ "sqf": [
+ (r"'", String.Single, "#pop"),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include("fstrings-single"),
+ ],
+ "dqs": [
+ (r'"', String.Double, "#pop"),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include("strings-double"),
+ ],
+ "sqs": [
+ (r"'", String.Single, "#pop"),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include("strings-single"),
+ ],
+ "tdqf": [
+ (r'"""', String.Double, "#pop"),
+ include("fstrings-double"),
+ (r"\n", String.Double),
+ ],
+ "tsqf": [
+ (r"'''", String.Single, "#pop"),
+ include("fstrings-single"),
+ (r"\n", String.Single),
+ ],
+ "tdqs": [
+ (r'"""', String.Double, "#pop"),
+ include("strings-double"),
+ (r"\n", String.Double),
+ ],
+ "tsqs": [
+ (r"'''", String.Single, "#pop"),
+ include("strings-single"),
+ (r"\n", String.Single),
+ ],
+ }
+
+ def analyse_text(text):
+ return (
+ shebang_matches(text, r"mojo?") or "import " in text[:1000]
+ ) # TODO supported?
diff --git a/contrib/python/Pygments/py3/pygments/lexers/monte.py b/contrib/python/Pygments/py3/pygments/lexers/monte.py
index 18f5a036c2..74a2d036cc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/monte.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/monte.py
@@ -4,7 +4,7 @@
Lexer for the Monte programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -78,13 +78,12 @@ _safeScope = [
class MonteLexer(RegexLexer):
"""
Lexer for the Monte programming language.
-
- .. versionadded:: 2.2
"""
name = 'Monte'
url = 'https://monte.readthedocs.io/'
aliases = ['monte']
filenames = ['*.mt']
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/mosel.py b/contrib/python/Pygments/py3/pygments/lexers/mosel.py
index f3c86cc573..0032c8f591 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/mosel.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/mosel.py
@@ -5,7 +5,7 @@
Lexers for the mosel language.
http://www.fico.com/en/products/fico-xpress-optimization
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -391,12 +391,12 @@ FUNCTIONS = (
class MoselLexer(RegexLexer):
"""
For the Mosel optimization language.
-
- .. versionadded:: 2.6
"""
name = 'Mosel'
aliases = ['mosel']
filenames = ['*.mos']
+ url = 'https://www.fico.com/fico-xpress-optimization/docs/latest/mosel/mosel_lang/dhtml/moselreflang.html'
+ version_added = '2.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ncl.py b/contrib/python/Pygments/py3/pygments/lexers/ncl.py
index b1ec1454b4..499ef9411b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ncl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ncl.py
@@ -4,7 +4,7 @@
Lexers for NCAR Command Language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,14 @@ __all__ = ['NCLLexer']
class NCLLexer(RegexLexer):
"""
Lexer for NCL code.
-
- .. versionadded:: 2.2
"""
name = 'NCL'
aliases = ['ncl']
filenames = ['*.ncl']
mimetypes = ['text/ncl']
+ url = 'https://www.ncl.ucar.edu'
+ version_added = '2.2'
+
flags = re.MULTILINE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
index 11f5b936db..b8be9797b1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nimrod.py
@@ -4,7 +4,7 @@
Lexer for the Nim language (formerly known as Nimrod).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['NimrodLexer']
class NimrodLexer(RegexLexer):
"""
For Nim source code.
-
- .. versionadded:: 1.5
"""
name = 'Nimrod'
@@ -29,6 +27,7 @@ class NimrodLexer(RegexLexer):
aliases = ['nimrod', 'nim']
filenames = ['*.nim', '*.nimrod']
mimetypes = ['text/x-nim']
+ version_added = '1.5'
flags = re.MULTILINE | re.IGNORECASE
@@ -97,15 +96,15 @@ class NimrodLexer(RegexLexer):
("'", String.Char, 'chars'),
# Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
+ (rf'({underscorize(opWords)})\b', Operator.Word),
(r'(proc|func|method|macro|template)(\s)(?![(\[\]])',
bygroups(Keyword, Text.Whitespace), 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include', 'export']),
+ (rf'({underscorize(keywords)})\b', Keyword),
+ (r'({})\b'.format(underscorize(['from', 'import', 'include', 'export'])),
Keyword.Namespace),
(r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Name.Builtin),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
+ (rf'({underscorize(types)})\b', Name.Builtin),
+ (rf'({underscorize(keywordsPseudo)})\b', Keyword.Pseudo),
# Identifiers
(r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nit.py b/contrib/python/Pygments/py3/pygments/lexers/nit.py
index b4e85f304d..51b0a21472 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nit.py
@@ -4,7 +4,7 @@
Lexer for the Nit language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['NitLexer']
class NitLexer(RegexLexer):
"""
For nit source.
-
- .. versionadded:: 2.0
"""
name = 'Nit'
url = 'http://nitlanguage.org'
aliases = ['nit']
filenames = ['*.nit']
+ version_added = '2.0'
tokens = {
'root': [
(r'#.*?$', Comment.Single),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/nix.py b/contrib/python/Pygments/py3/pygments/lexers/nix.py
index 7ab59bb8c9..f763835b53 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/nix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/nix.py
@@ -4,7 +4,7 @@
Lexers for the NixOS Nix language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['NixLexer']
class NixLexer(RegexLexer):
"""
For the Nix language.
-
- .. versionadded:: 2.0
"""
name = 'Nix'
@@ -29,6 +27,7 @@ class NixLexer(RegexLexer):
aliases = ['nixos', 'nix']
filenames = ['*.nix']
mimetypes = ['text/x-nix']
+ version_added = '2.0'
keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
'else', 'then', '...']
@@ -51,10 +50,10 @@ class NixLexer(RegexLexer):
(r'\s+', Text),
# keywords
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
+ ('({})'.format('|'.join(re.escape(entry) + '\\b' for entry in keywords)), Keyword),
# highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ ('({})'.format('|'.join(re.escape(entry) + '\\b' for entry in builtins)),
Name.Builtin),
(r'\b(true|false|null)\b', Name.Constant),
@@ -71,7 +70,7 @@ class NixLexer(RegexLexer):
(r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
# operators
- ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
+ ('({})'.format('|'.join(re.escape(entry) for entry in operators)),
Operator),
# word operators
@@ -80,7 +79,7 @@ class NixLexer(RegexLexer):
(r'\{', Punctuation, 'block'),
# punctuations
- ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
+ ('({})'.format('|'.join(re.escape(entry) for entry in punctuations)), Punctuation),
# strings
(r'"', String.Double, 'doublequote'),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/oberon.py b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
index 3aaa763302..8edf34b8e3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/oberon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/oberon.py
@@ -4,7 +4,7 @@
Lexers for Oberon family languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,13 @@ __all__ = ['ComponentPascalLexer']
class ComponentPascalLexer(RegexLexer):
"""
For Component Pascal source code.
-
- .. versionadded:: 2.1
"""
name = 'Component Pascal'
aliases = ['componentpascal', 'cp']
filenames = ['*.cp', '*.cps']
mimetypes = ['text/x-component-pascal']
+ url = 'https://blackboxframework.org'
+ version_added = '2.1'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/objective.py b/contrib/python/Pygments/py3/pygments/lexers/objective.py
index 2e4332adfa..a5ee98d2c0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/objective.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/objective.py
@@ -4,7 +4,7 @@
Lexers for Objective-C family languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -201,6 +201,7 @@ class ObjectiveCLexer(objective(CLexer)):
aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
filenames = ['*.m', '*.h']
mimetypes = ['text/x-objective-c']
+ version_added = ''
priority = 0.05 # Lower than C
@@ -213,20 +214,20 @@ class ObjectiveCppLexer(objective(CppLexer)):
aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
filenames = ['*.mm', '*.hh']
mimetypes = ['text/x-objective-c++']
+ version_added = ''
priority = 0.05 # Lower than C++
class LogosLexer(ObjectiveCppLexer):
"""
For Logos + Objective-C source code with preprocessor directives.
-
- .. versionadded:: 1.6
"""
name = 'Logos'
aliases = ['logos']
filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
mimetypes = ['text/x-logos']
+ version_added = '1.6'
priority = 0.25
tokens = {
@@ -283,14 +284,13 @@ class LogosLexer(ObjectiveCppLexer):
class SwiftLexer(RegexLexer):
"""
For Swift source.
-
- .. versionadded:: 2.0
"""
name = 'Swift'
url = 'https://www.swift.org/'
filenames = ['*.swift']
aliases = ['swift']
mimetypes = ['text/x-swift']
+ version_added = '2.0'
tokens = {
'root': [
@@ -403,6 +403,7 @@ class SwiftLexer(RegexLexer):
r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
(r'[0-9][0-9_]*', Number.Integer),
# String Literal
+ (r'"""', String, 'string-multi'),
(r'"', String, 'string'),
# Operators and Punctuation
@@ -477,8 +478,15 @@ class SwiftLexer(RegexLexer):
include('root')
],
'string': [
- (r'\\\(', String.Interpol, 'string-intp'),
(r'"', String, '#pop'),
+ include("string-common"),
+ ],
+ 'string-multi': [
+ (r'"""', String, '#pop'),
+ include("string-common"),
+ ],
+ 'string-common': [
+ (r'\\\(', String.Interpol, 'string-intp'),
(r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
(r'[^\\"]+', String),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ooc.py b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
index c4600eaeed..881a57aff6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ooc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ooc.py
@@ -4,7 +4,7 @@
Lexers for the Ooc language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['OocLexer']
class OocLexer(RegexLexer):
"""
For Ooc source code
-
- .. versionadded:: 1.2
"""
name = 'Ooc'
url = 'http://ooc-lang.org/'
aliases = ['ooc']
filenames = ['*.ooc']
mimetypes = ['text/x-ooc']
+ version_added = '1.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/openscad.py b/contrib/python/Pygments/py3/pygments/lexers/openscad.py
index de8fdaf61b..97fb92a032 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/openscad.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/openscad.py
@@ -4,7 +4,7 @@
Lexers for the OpenSCAD languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,14 +16,13 @@ __all__ = ['OpenScadLexer']
class OpenScadLexer(RegexLexer):
"""For openSCAD code.
-
- .. versionadded:: 2.16
"""
name = "OpenSCAD"
url = "https://openscad.org/"
aliases = ["openscad"]
filenames = ["*.scad"]
mimetypes = ["application/x-openscad"]
+ version_added = '2.16'
tokens = {
"root": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/other.py b/contrib/python/Pygments/py3/pygments/lexers/other.py
index f2c07d7edc..838104c507 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/other.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/other.py
@@ -4,10 +4,11 @@
Just export lexer classes previously contained in this module.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
TcshLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parasail.py b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
index 5a7238ec7c..c525ff0b7a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parasail.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parasail.py
@@ -4,7 +4,7 @@
Lexer for ParaSail.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['ParaSailLexer']
class ParaSailLexer(RegexLexer):
"""
For ParaSail source code.
-
- .. versionadded:: 2.1
"""
name = 'ParaSail'
@@ -29,6 +27,7 @@ class ParaSailLexer(RegexLexer):
aliases = ['parasail']
filenames = ['*.psi', '*.psl']
mimetypes = ['text/x-parasail']
+ version_added = '2.1'
flags = re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/parsers.py b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
index 0415ac6a7f..2e0ae0ccad 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/parsers.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/parsers.py
@@ -4,7 +4,7 @@
Lexers for parser generators.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -38,14 +38,13 @@ class RagelLexer(RegexLexer):
:class:`RagelEmbeddedLexer` instead (or one of the
language-specific subclasses).
- .. versionadded:: 1.1
-
"""
name = 'Ragel'
url = 'http://www.colm.net/open-source/ragel/'
aliases = ['ragel']
filenames = []
+ version_added = '1.1'
tokens = {
'whitespace': [
@@ -133,13 +132,13 @@ class RagelEmbeddedLexer(RegexLexer):
This will only highlight Ragel statements. If you want host language
highlighting then call the language-specific Ragel lexer.
-
- .. versionadded:: 1.1
"""
name = 'Embedded Ragel'
aliases = ['ragel-em']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
tokens = {
'root': [
@@ -213,13 +212,13 @@ class RagelEmbeddedLexer(RegexLexer):
class RagelRubyLexer(DelegatingLexer):
"""
A lexer for Ragel in a Ruby host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in Ruby Host'
aliases = ['ragel-ruby', 'ragel-rb']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
@@ -231,13 +230,13 @@ class RagelRubyLexer(DelegatingLexer):
class RagelCLexer(DelegatingLexer):
"""
A lexer for Ragel in a C host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in C Host'
aliases = ['ragel-c']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(CLexer, RagelEmbeddedLexer, **options)
@@ -249,13 +248,13 @@ class RagelCLexer(DelegatingLexer):
class RagelDLexer(DelegatingLexer):
"""
A lexer for Ragel in a D host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in D Host'
aliases = ['ragel-d']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(DLexer, RagelEmbeddedLexer, **options)
@@ -267,13 +266,13 @@ class RagelDLexer(DelegatingLexer):
class RagelCppLexer(DelegatingLexer):
"""
A lexer for Ragel in a C++ host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in CPP Host'
aliases = ['ragel-cpp']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(CppLexer, RagelEmbeddedLexer, **options)
@@ -285,13 +284,13 @@ class RagelCppLexer(DelegatingLexer):
class RagelObjectiveCLexer(DelegatingLexer):
"""
A lexer for Ragel in an Objective C host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in Objective C Host'
aliases = ['ragel-objc']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
@@ -303,13 +302,13 @@ class RagelObjectiveCLexer(DelegatingLexer):
class RagelJavaLexer(DelegatingLexer):
"""
A lexer for Ragel in a Java host file.
-
- .. versionadded:: 1.1
"""
name = 'Ragel in Java Host'
aliases = ['ragel-java']
filenames = ['*.rl']
+ url = 'http://www.colm.net/open-source/ragel/'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
@@ -320,18 +319,16 @@ class RagelJavaLexer(DelegatingLexer):
class AntlrLexer(RegexLexer):
"""
- Generic `ANTLR`_ Lexer.
+ Generic ANTLR Lexer.
Should not be called directly, instead
use DelegatingLexer for your target language.
-
- .. versionadded:: 1.1
-
- .. _ANTLR: http://www.antlr.org/
"""
name = 'ANTLR'
aliases = ['antlr']
filenames = []
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
_id = r'[A-Za-z]\w*'
_TOKEN_REF = r'[A-Z]\w*'
@@ -516,13 +513,13 @@ class AntlrLexer(RegexLexer):
class AntlrCppLexer(DelegatingLexer):
"""
ANTLR with C++ Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With CPP Target'
aliases = ['antlr-cpp']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(CppLexer, AntlrLexer, **options)
@@ -535,13 +532,13 @@ class AntlrCppLexer(DelegatingLexer):
class AntlrObjectiveCLexer(DelegatingLexer):
"""
ANTLR with Objective-C Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With ObjectiveC Target'
aliases = ['antlr-objc']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(ObjectiveCLexer, AntlrLexer, **options)
@@ -554,13 +551,13 @@ class AntlrObjectiveCLexer(DelegatingLexer):
class AntlrCSharpLexer(DelegatingLexer):
"""
ANTLR with C# Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With C# Target'
aliases = ['antlr-csharp', 'antlr-c#']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(CSharpLexer, AntlrLexer, **options)
@@ -573,13 +570,13 @@ class AntlrCSharpLexer(DelegatingLexer):
class AntlrPythonLexer(DelegatingLexer):
"""
ANTLR with Python Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With Python Target'
aliases = ['antlr-python']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(PythonLexer, AntlrLexer, **options)
@@ -592,13 +589,13 @@ class AntlrPythonLexer(DelegatingLexer):
class AntlrJavaLexer(DelegatingLexer):
"""
ANTLR with Java Target
-
- .. versionadded:: 1.
"""
name = 'ANTLR With Java Target'
aliases = ['antlr-java']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(JavaLexer, AntlrLexer, **options)
@@ -611,13 +608,13 @@ class AntlrJavaLexer(DelegatingLexer):
class AntlrRubyLexer(DelegatingLexer):
"""
ANTLR with Ruby Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With Ruby Target'
aliases = ['antlr-ruby', 'antlr-rb']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(RubyLexer, AntlrLexer, **options)
@@ -630,13 +627,13 @@ class AntlrRubyLexer(DelegatingLexer):
class AntlrPerlLexer(DelegatingLexer):
"""
ANTLR with Perl Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With Perl Target'
aliases = ['antlr-perl']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(PerlLexer, AntlrLexer, **options)
@@ -649,13 +646,13 @@ class AntlrPerlLexer(DelegatingLexer):
class AntlrActionScriptLexer(DelegatingLexer):
"""
ANTLR with ActionScript Target
-
- .. versionadded:: 1.1
"""
name = 'ANTLR With ActionScript Target'
aliases = ['antlr-actionscript', 'antlr-as']
filenames = ['*.G', '*.g']
+ url = 'https://www.antlr.org'
+ version_added = '1.1'
def __init__(self, **options):
from pygments.lexers.actionscript import ActionScriptLexer
@@ -737,14 +734,14 @@ class TreetopBaseLexer(RegexLexer):
class TreetopLexer(DelegatingLexer):
"""
- A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
-
- .. versionadded:: 1.6
+ A lexer for Treetop grammars.
"""
name = 'Treetop'
aliases = ['treetop']
filenames = ['*.treetop', '*.tt']
+ url = 'https://cjheath.github.io/treetop'
+ version_added = '1.6'
def __init__(self, **options):
super().__init__(RubyLexer, TreetopBaseLexer, **options)
@@ -753,16 +750,16 @@ class TreetopLexer(DelegatingLexer):
class EbnfLexer(RegexLexer):
"""
Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ <https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
grammars.
-
- .. versionadded:: 2.0
"""
name = 'EBNF'
aliases = ['ebnf']
filenames = ['*.ebnf']
mimetypes = ['text/x-ebnf']
+ url = 'https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form'
+ version_added = '2.0'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pascal.py b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
index 34df19215f..bb5e1e75ed 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pascal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pascal.py
@@ -4,7 +4,7 @@
Lexers for Pascal family languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -12,12 +12,12 @@ import re
from pygments.lexer import Lexer
from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+from pygments.token import Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace
from pygments.scanner import Scanner
# compatibility import
-from pygments.lexers.modula2 import Modula2Lexer
+from pygments.lexers.modula2 import Modula2Lexer # noqa: F401
__all__ = ['DelphiLexer', 'PortugolLexer']
@@ -29,6 +29,7 @@ class PortugolLexer(Lexer):
filenames = ['*.alg', '*.portugol']
mimetypes = []
url = "https://www.apoioinformatica.inf.br/produtos/visualg/linguagem"
+ version_added = ''
def __init__(self, **options):
Lexer.__init__(self, **options)
@@ -60,6 +61,8 @@ class DelphiLexer(Lexer):
aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
filenames = ['*.pas', '*.dpr']
mimetypes = ['text/x-pascal']
+ url = 'https://www.embarcadero.com/products/delphi'
+ version_added = ''
TURBO_PASCAL_KEYWORDS = (
'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pawn.py b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
index 36b48fcbf2..484e8dcb9b 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pawn.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pawn.py
@@ -4,7 +4,7 @@
Lexers for the Pawn languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,13 +19,13 @@ __all__ = ['SourcePawnLexer', 'PawnLexer']
class SourcePawnLexer(RegexLexer):
"""
For SourcePawn source code with preprocessor directives.
-
- .. versionadded:: 1.6
"""
name = 'SourcePawn'
aliases = ['sp']
filenames = ['*.sp']
mimetypes = ['text/x-sourcepawn']
+ url = 'https://github.com/alliedmodders/sourcepawn'
+ version_added = '1.6'
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
@@ -129,14 +129,14 @@ class SourcePawnLexer(RegexLexer):
class PawnLexer(RegexLexer):
"""
For Pawn source code.
-
- .. versionadded:: 2.0
"""
name = 'Pawn'
aliases = ['pawn']
filenames = ['*.p', '*.pwn', '*.inc']
mimetypes = ['text/x-pawn']
+ url = 'https://www.compuphase.com/pawn/pawn.htm'
+ version_added = '2.0'
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/perl.py b/contrib/python/Pygments/py3/pygments/lexers/perl.py
index 88c6486a6a..3ec19445c4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/perl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/perl.py
@@ -4,7 +4,7 @@
Lexers for Perl, Raku and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,6 +29,7 @@ class PerlLexer(RegexLexer):
aliases = ['perl', 'pl']
filenames = ['*.pl', '*.pm', '*.t', '*.perl']
mimetypes = ['text/x-perl', 'application/x-perl']
+ version_added = ''
flags = re.DOTALL | re.MULTILINE
# TODO: give this to a perl guy who knows how to parse perl...
@@ -225,8 +226,6 @@ class PerlLexer(RegexLexer):
class Perl6Lexer(ExtendedRegexLexer):
"""
For Raku (a.k.a. Perl 6) source code.
-
- .. versionadded:: 2.0
"""
name = 'Perl6'
@@ -236,6 +235,7 @@ class Perl6Lexer(ExtendedRegexLexer):
'*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
'*.rakutest', '*.rakudoc']
mimetypes = ['text/x-perl6', 'application/x-perl6']
+ version_added = '2.0'
flags = re.MULTILINE | re.DOTALL
PERL6_IDENTIFIER_RANGE = r"['\w:-]"
diff --git a/contrib/python/Pygments/py3/pygments/lexers/phix.py b/contrib/python/Pygments/py3/pygments/lexers/phix.py
index fb08b1dc77..29082e05cd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/phix.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/phix.py
@@ -4,7 +4,7 @@
Lexers for Phix.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,6 @@ class PhixLexer(RegexLexer):
"""
Pygments Lexer for Phix files (.exw).
See http://phix.x10.mx
-
- .. versionadded:: 2.14.0
"""
name = 'Phix'
@@ -30,6 +28,7 @@ class PhixLexer(RegexLexer):
aliases = ['phix']
filenames = ['*.exw']
mimetypes = ['text/x-phix']
+ version_added = '2.14'
flags = re.MULTILINE # nb: **NOT** re.DOTALL! (totally spanners comment handling)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/php.py b/contrib/python/Pygments/py3/pygments/lexers/php.py
index a0a0021aad..4f00c6f48a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/php.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/php.py
@@ -4,7 +4,7 @@
Lexers for PHP and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,14 +25,13 @@ class ZephirLexer(RegexLexer):
Zephir is a compiled high level language aimed
to the creation of C-extensions for PHP.
-
- .. versionadded:: 2.0
"""
name = 'Zephir'
url = 'http://zephir-lang.com/'
aliases = ['zephir']
filenames = ['*.zep']
+ version_added = '2.0'
zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
zephir_type = ['bit', 'bits', 'string']
@@ -97,12 +96,11 @@ class PsyshConsoleLexer(Lexer):
=> Closure($name): string {#2371 …3}
>>> $greeting('World')
=> "Hello, World"
-
- .. versionadded:: 2.7
"""
name = 'PsySH console session for PHP'
url = 'https://psysh.org/'
aliases = ['psysh']
+ version_added = '2.7'
def __init__(self, **options):
options['startinline'] = True
@@ -172,6 +170,7 @@ class PhpLexer(RegexLexer):
aliases = ['php', 'php3', 'php4', 'php5']
filenames = ['*.php', '*.php[345]', '*.inc']
mimetypes = ['text/x-php']
+ version_added = ''
# Note that a backslash is included, PHP uses a backslash as a namespace
# separator.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pointless.py b/contrib/python/Pygments/py3/pygments/lexers/pointless.py
index eb73b2a795..4e330e3da6 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pointless.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pointless.py
@@ -4,7 +4,7 @@
Lexers for Pointless.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['PointlessLexer']
class PointlessLexer(RegexLexer):
"""
For Pointless source code.
-
- .. versionadded:: 2.7
"""
name = 'Pointless'
url = 'https://ptls.dev'
aliases = ['pointless']
filenames = ['*.ptls']
+ version_added = '2.7'
ops = words([
"+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
diff --git a/contrib/python/Pygments/py3/pygments/lexers/pony.py b/contrib/python/Pygments/py3/pygments/lexers/pony.py
index 03adc5fd0f..f7fc769db5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/pony.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/pony.py
@@ -4,7 +4,7 @@
Lexers for Pony and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,13 @@ __all__ = ['PonyLexer']
class PonyLexer(RegexLexer):
"""
For Pony source code.
-
- .. versionadded:: 2.4
"""
name = 'Pony'
aliases = ['pony']
filenames = ['*.pony']
+ url = 'https://www.ponylang.io'
+ version_added = '2.4'
_caps = r'(iso|trn|ref|val|box|tag)'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/praat.py b/contrib/python/Pygments/py3/pygments/lexers/praat.py
index c8533a65c3..e10cac489e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/praat.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/praat.py
@@ -4,7 +4,7 @@
Lexer for Praat
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['PraatLexer']
class PraatLexer(RegexLexer):
"""
For Praat scripts.
-
- .. versionadded:: 2.1
"""
name = 'Praat'
url = 'http://www.praat.org'
aliases = ['praat']
filenames = ['*.praat', '*.proc', '*.psc']
+ version_added = '2.1'
keywords = (
'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/procfile.py b/contrib/python/Pygments/py3/pygments/lexers/procfile.py
index 4f9b59c350..3b42b3164d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/procfile.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/procfile.py
@@ -4,7 +4,7 @@
Lexer for Procfile file format.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,12 @@ class ProcfileLexer(RegexLexer):
The format is used to run processes on Heroku or is used by Foreman or
Honcho tools.
-
- .. versionadded:: 2.10
"""
name = 'Procfile'
url = 'https://devcenter.heroku.com/articles/procfile#procfile-format'
aliases = ['procfile']
filenames = ['Procfile']
+ version_added = '2.10'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/prolog.py b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
index 37c1e9c7ed..7578889d11 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/prolog.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/prolog.py
@@ -4,7 +4,7 @@
Lexers for Prolog and Prolog-like languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,6 +25,8 @@ class PrologLexer(RegexLexer):
aliases = ['prolog']
filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
mimetypes = ['text/x-prolog']
+ url = 'https://en.wikipedia.org/wiki/Prolog'
+ version_added = ''
tokens = {
'root': [
@@ -89,8 +91,6 @@ class PrologLexer(RegexLexer):
class LogtalkLexer(RegexLexer):
"""
For Logtalk source code.
-
- .. versionadded:: 0.10
"""
name = 'Logtalk'
@@ -98,6 +98,7 @@ class LogtalkLexer(RegexLexer):
aliases = ['logtalk']
filenames = ['*.lgt', '*.logtalk']
mimetypes = ['text/x-logtalk']
+ version_added = '0.10'
tokens = {
'root': [
@@ -148,7 +149,7 @@ class LogtalkLexer(RegexLexer):
# Control constructs
(r'(ca(ll|tch)|throw)(?=[(])', Keyword),
(r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
- (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
+ (r'(uninstantiation|type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
# All solutions
(r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
# Multi-threading predicates
@@ -229,13 +230,13 @@ class LogtalkLexer(RegexLexer):
(r'[?@]', Operator),
# Existential quantifier
(r'\^', Operator),
- # Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
# Punctuation
(r'[()\[\],.|]', Text),
# Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
+ # Double-quoted terms
+ (r'"', String, 'double_quoted_term'),
],
'quoted_atom': [
@@ -246,6 +247,14 @@ class LogtalkLexer(RegexLexer):
(r'\\', String),
],
+ 'double_quoted_term': [
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'\\', String),
+ ],
+
'directive': [
# Conditional compilation directives
(r'(el)?if(?=[(])', Keyword, 'root'),
@@ -279,8 +288,8 @@ class LogtalkLexer(RegexLexer):
# Atoms
(r"[a-z][a-zA-Z0-9_]*", Text),
(r"'", String, 'quoted_atom'),
- # Strings
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ # Double-quoted terms
+ (r'"', String, 'double_quoted_term'),
# End of entity-opening directive
(r'([)]\.)', Text, 'root'),
# Scope operator
diff --git a/contrib/python/Pygments/py3/pygments/lexers/promql.py b/contrib/python/Pygments/py3/pygments/lexers/promql.py
index b6d2d666a5..7996c3d074 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/promql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/promql.py
@@ -4,7 +4,7 @@
Lexer for Prometheus Query Language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,6 +29,7 @@ class PromQLLexer(RegexLexer):
url = 'https://prometheus.io/docs/prometheus/latest/querying/basics/'
aliases = ["promql"]
filenames = ["*.promql"]
+ version_added = ''
base_keywords = (
words(
diff --git a/contrib/python/Pygments/py3/pygments/lexers/prql.py b/contrib/python/Pygments/py3/pygments/lexers/prql.py
index 4c2f12ef3c..1f22eb29d0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/prql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/prql.py
@@ -4,7 +4,7 @@
Lexer for the PRQL query language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,8 +19,6 @@ class PrqlLexer(RegexLexer):
"""
For PRQL source code.
- .. versionadded:: 2.17
-
grammar: https://github.com/PRQL/prql/tree/main/grammars
"""
@@ -29,6 +27,7 @@ class PrqlLexer(RegexLexer):
aliases = ['prql']
filenames = ['*.prql']
mimetypes = ['application/prql', 'application/x-prql']
+ version_added = '2.17'
builtinTypes = words((
"bool",
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ptx.py b/contrib/python/Pygments/py3/pygments/lexers/ptx.py
index 218d69465a..405e7f7e2d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ptx.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ptx.py
@@ -4,7 +4,7 @@
Lexer for other PTX language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,13 @@ class PtxLexer(RegexLexer):
"""
For NVIDIA `PTX <https://docs.nvidia.com/cuda/parallel-thread-execution/>`_
source.
-
- .. versionadded:: 2.16
"""
name = 'PTX'
url = "https://docs.nvidia.com/cuda/parallel-thread-execution/"
filenames = ['*.ptx']
aliases = ['ptx']
mimetypes = ['text/x-ptx']
+ version_added = '2.16'
#: optional Comment or Whitespace
string = r'"[^"]*?"'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/python.py b/contrib/python/Pygments/py3/pygments/lexers/python.py
index cdb88ab43a..287305c75c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/python.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/python.py
@@ -4,15 +4,14 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
import keyword
-from pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \
- bygroups, using, default, words, combined, do_insertions, this, line_re
+from pygments.lexer import DelegatingLexer, RegexLexer, include, \
+ bygroups, using, default, words, combined, this
from pygments.util import get_bool_opt, shebang_matches
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Other, Error, Whitespace
@@ -27,8 +26,6 @@ class PythonLexer(RegexLexer):
"""
For Python source code (version 3.x).
- .. versionadded:: 0.10
-
.. versionchanged:: 2.5
This is now the default ``PythonLexer``. It is still available as the
alias ``Python3Lexer``.
@@ -61,8 +58,9 @@ class PythonLexer(RegexLexer):
]
mimetypes = ['text/x-python', 'application/x-python',
'text/x-python3', 'application/x-python3']
+ version_added = '0.10'
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+ uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
def innerstring_rules(ttype):
return [
@@ -224,7 +222,8 @@ class PythonLexer(RegexLexer):
r'(match|case)\b' # a possible keyword
r'(?![ \t]*(?:' # not followed by...
r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't
- r'|'.join(keyword.kwlist) + r')\b))', # pattern matching
+ # pattern matching (but None/True/False is ok)
+ r'|'.join(k for k in keyword.kwlist if k[0].islower()) + r')\b))',
bygroups(Text, Keyword), 'soft-keywords-inner'),
],
'soft-keywords-inner': [
@@ -429,6 +428,7 @@ class Python2Lexer(RegexLexer):
aliases = ['python2', 'py2']
filenames = [] # now taken over by PythonLexer (3.x)
mimetypes = ['text/x-python2', 'application/x-python2']
+ version_added = ''
def innerstring_rules(ttype):
return [
@@ -637,7 +637,7 @@ class Python2Lexer(RegexLexer):
class _PythonConsoleLexerBase(RegexLexer):
name = 'Python console session'
- aliases = ['pycon']
+ aliases = ['pycon', 'python-console']
mimetypes = ['text/x-python-doctest']
"""Auxiliary lexer for `PythonConsoleLexer`.
@@ -696,8 +696,10 @@ class PythonConsoleLexer(DelegatingLexer):
"""
name = 'Python console session'
- aliases = ['pycon']
+ aliases = ['pycon', 'python-console']
mimetypes = ['text/x-python-doctest']
+ url = 'https://python.org'
+ version_added = ''
def __init__(self, **options):
python3 = get_bool_opt(options, 'python3', True)
@@ -721,8 +723,6 @@ class PythonTracebackLexer(RegexLexer):
"""
For Python 3.x tracebacks, with support for chained exceptions.
- .. versionadded:: 1.0
-
.. versionchanged:: 2.5
This is now the default ``PythonTracebackLexer``. It is still available
as the alias ``Python3TracebackLexer``.
@@ -732,6 +732,8 @@ class PythonTracebackLexer(RegexLexer):
aliases = ['pytb', 'py3tb']
filenames = ['*.pytb', '*.py3tb']
mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+ url = 'https://python.org'
+ version_added = '1.0'
tokens = {
'root': [
@@ -778,8 +780,6 @@ class Python2TracebackLexer(RegexLexer):
"""
For Python tracebacks.
- .. versionadded:: 0.7
-
.. versionchanged:: 2.5
This class has been renamed from ``PythonTracebackLexer``.
``PythonTracebackLexer`` now refers to the Python 3 variant.
@@ -789,6 +789,8 @@ class Python2TracebackLexer(RegexLexer):
aliases = ['py2tb']
filenames = ['*.py2tb']
mimetypes = ['text/x-python2-traceback']
+ url = 'https://python.org'
+ version_added = '0.7'
tokens = {
'root': [
@@ -825,8 +827,6 @@ class Python2TracebackLexer(RegexLexer):
class CythonLexer(RegexLexer):
"""
For Pyrex and Cython source code.
-
- .. versionadded:: 1.1
"""
name = 'Cython'
@@ -834,6 +834,7 @@ class CythonLexer(RegexLexer):
aliases = ['cython', 'pyx', 'pyrex']
filenames = ['*.pyx', '*.pxd', '*.pxi']
mimetypes = ['text/x-cython', 'application/x-cython']
+ version_added = '1.1'
tokens = {
'root': [
@@ -1007,13 +1008,13 @@ class DgLexer(RegexLexer):
Lexer for dg,
a functional and object-oriented programming language
running on the CPython 3 VM.
-
- .. versionadded:: 1.6
"""
name = 'dg'
aliases = ['dg']
filenames = ['*.dg']
mimetypes = ['text/x-dg']
+ url = 'http://pyos.github.io/dg'
+ version_added = '1.6'
tokens = {
'root': [
@@ -1104,13 +1105,12 @@ class DgLexer(RegexLexer):
class NumPyLexer(PythonLexer):
"""
A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
"""
name = 'NumPy'
url = 'https://numpy.org/'
aliases = ['numpy']
+ version_added = '0.10'
# override the mimetypes to not inherit them from python
mimetypes = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/q.py b/contrib/python/Pygments/py3/pygments/lexers/q.py
index e5c159137a..6ab121c887 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/q.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/q.py
@@ -4,7 +4,7 @@
Lexer for the Q programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,14 +17,14 @@ __all__ = ["KLexer", "QLexer"]
class KLexer(RegexLexer):
"""
- For `K <https://code.kx.com/>`_ source code.
-
- .. versionadded:: 2.12
+ For K source code.
"""
name = "K"
aliases = ["k"]
filenames = ["*.k"]
+ url = "https://code.kx.com"
+ version_added = '2.12'
tokens = {
"whitespace": [
@@ -152,13 +152,12 @@ class KLexer(RegexLexer):
class QLexer(KLexer):
"""
For `Q <https://code.kx.com/>`_ source code.
-
- .. versionadded:: 2.12
"""
name = "Q"
aliases = ["q"]
filenames = ["*.q"]
+ version_added = '2.12'
tokens = {
"root": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/qlik.py b/contrib/python/Pygments/py3/pygments/lexers/qlik.py
index b265b60164..b9b8721a2e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/qlik.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/qlik.py
@@ -4,7 +4,7 @@
Lexer for the qlik scripting language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,13 +22,13 @@ __all__ = ["QlikLexer"]
class QlikLexer(RegexLexer):
"""
Lexer for qlik code, including .qvs files
-
- .. versionadded:: 2.12
"""
name = "Qlik"
aliases = ["qlik", "qlikview", "qliksense", "qlikscript"]
filenames = ["*.qvs", "*.qvw"]
+ url = "https://qlik.com"
+ version_added = '2.12'
flags = re.IGNORECASE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/qvt.py b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
index dc329f7ca3..c3012100ce 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/qvt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/qvt.py
@@ -4,7 +4,7 @@
Lexer for QVT Operational language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,11 +18,11 @@ __all__ = ['QVToLexer']
class QVToLexer(RegexLexer):
"""
- For the `QVT Operational Mapping language <http://www.omg.org/spec/QVT/1.1/>`_.
+ For the QVT Operational Mapping language.
Reference for implementing this: «Meta Object Facility (MOF) 2.0
Query/View/Transformation Specification», Version 1.1 - January 2011
- (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
+ (https://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
particular.
Notable tokens assignments:
@@ -41,6 +41,8 @@ class QVToLexer(RegexLexer):
name = 'QVTO'
aliases = ['qvto', 'qvt']
filenames = ['*.qvto']
+ url = 'https://www.omg.org/spec/QVT/1.1'
+ version_added = ''
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/r.py b/contrib/python/Pygments/py3/pygments/lexers/r.py
index ed62fa2e3f..8d7e4a8dfa 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/r.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/r.py
@@ -4,7 +4,7 @@
Lexers for the R/S languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -28,6 +28,8 @@ class RConsoleLexer(Lexer):
name = 'RConsole'
aliases = ['rconsole', 'rout']
filenames = ['*.Rout']
+ url = 'https://www.r-project.org'
+ version_added = ''
def get_tokens_unprocessed(self, text):
slexer = SLexer(**self.options)
@@ -67,8 +69,6 @@ class RConsoleLexer(Lexer):
class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
-
- .. versionadded:: 0.10
"""
name = 'S'
@@ -76,6 +76,8 @@ class SLexer(RegexLexer):
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+ url = 'https://www.r-project.org'
+ version_added = '0.10'
valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
tokens = {
@@ -126,7 +128,7 @@ class SLexer(RegexLexer):
],
'root': [
# calls:
- (r'(%s)\s*(?=\()' % valid_name, Name.Function),
+ (rf'({valid_name})\s*(?=\()', Name.Function),
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
@@ -159,13 +161,13 @@ class RdLexer(RegexLexer):
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <http://developer.r-project.org/parseRd.pdf>`_.
-
- .. versionadded:: 1.6
"""
name = 'Rd'
aliases = ['rd']
filenames = ['*.Rd']
mimetypes = ['text/x-r-doc']
+ url = 'http://cran.r-project.org/doc/manuals/R-exts.html'
+ version_added = '1.6'
# To account for verbatim / LaTeX-like / and R-like areas
# would require parsing.
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rdf.py b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
index c4fb998c4d..1a0571be7a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rdf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rdf.py
@@ -4,7 +4,7 @@
Lexers for semantic web and RDF query languages and markup.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,14 @@ __all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
class SparqlLexer(RegexLexer):
"""
- Lexer for `SPARQL <https://www.w3.org/TR/sparql11-query/>`_ query language.
-
- .. versionadded:: 2.0
+ Lexer for SPARQL query language.
"""
name = 'SPARQL'
aliases = ['sparql']
filenames = ['*.rq', '*.sparql']
mimetypes = ['application/sparql-query']
+ url = 'https://www.w3.org/TR/sparql11-query'
+ version_added = '2.0'
# character group definitions ::
@@ -177,14 +177,14 @@ class SparqlLexer(RegexLexer):
class TurtleLexer(RegexLexer):
"""
- Lexer for `Turtle <http://www.w3.org/TR/turtle/>`_ data language.
-
- .. versionadded:: 2.1
+ Lexer for Turtle data language.
"""
name = 'Turtle'
aliases = ['turtle']
filenames = ['*.ttl']
mimetypes = ['text/turtle', 'application/x-turtle']
+ url = 'https://www.w3.org/TR/turtle'
+ version_added = '2.1'
# character group definitions ::
PN_CHARS_BASE_GRP = ('a-zA-Z'
@@ -243,10 +243,10 @@ class TurtleLexer(RegexLexer):
(r'\s+', Text),
# Base / prefix
- (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ (r'(@base|BASE)(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
Punctuation)),
- (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
+ (r'(@prefix|PREFIX)(\s+){PNAME_NS}(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
Name.Variable, Whitespace, Punctuation)),
@@ -254,7 +254,7 @@ class TurtleLexer(RegexLexer):
(r'(?<=\s)a(?=\s)', Keyword.Type),
# IRIREF
- (r'%(IRIREF)s' % patterns, Name.Variable),
+ (r'{IRIREF}'.format(**patterns), Name.Variable),
# PrefixedName
(r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
@@ -305,7 +305,7 @@ class TurtleLexer(RegexLexer):
(r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
bygroups(Operator, Generic.Emph), '#pop:2'),
- (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
+ (r'(\^\^){IRIREF}'.format(**patterns), bygroups(Operator, Generic.Emph), '#pop:2'),
default('#pop:2'),
@@ -316,18 +316,20 @@ class TurtleLexer(RegexLexer):
# but each has a recognizable and distinct syntax.
def analyse_text(text):
for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
- if re.search(r'^\s*%s' % t, text):
+ if re.search(rf'^\s*{t}', text):
return 0.80
class ShExCLexer(RegexLexer):
"""
- Lexer for `ShExC <https://shex.io/shex-semantics/#shexc>`_ shape expressions language syntax.
+ Lexer for ShExC shape expressions language syntax.
"""
name = 'ShExC'
aliases = ['shexc', 'shex']
filenames = ['*.shex']
mimetypes = ['text/shex']
+ url = 'https://shex.io/shex-semantics/#shexc'
+ version_added = ''
# character group definitions ::
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rebol.py b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
index 6170f0c367..94656f4f26 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rebol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rebol.py
@@ -4,7 +4,7 @@
Lexers for the REBOL and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,14 @@ __all__ = ['RebolLexer', 'RedLexer']
class RebolLexer(RegexLexer):
"""
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- .. versionadded:: 1.1
+ A REBOL lexer.
"""
name = 'REBOL'
aliases = ['rebol']
filenames = ['*.r', '*.r3', '*.reb']
mimetypes = ['text/x-rebol']
+ url = 'http://www.rebol.com'
+ version_added = '1.1'
flags = re.IGNORECASE | re.MULTILINE
@@ -240,14 +240,14 @@ class RebolLexer(RegexLexer):
class RedLexer(RegexLexer):
"""
- A `Red-language <http://www.red-lang.org/>`_ lexer.
-
- .. versionadded:: 2.0
+ A Red-language lexer.
"""
name = 'Red'
aliases = ['red', 'red/system']
filenames = ['*.red', '*.reds']
mimetypes = ['text/x-red', 'text/x-red-system']
+ url = 'https://www.red-lang.org'
+ version_added = '2.0'
flags = re.IGNORECASE | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/resource.py b/contrib/python/Pygments/py3/pygments/lexers/resource.py
index 2583ba874e..f2e965cd1f 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/resource.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/resource.py
@@ -4,7 +4,7 @@
Lexer for resource definition files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['ResourceLexer']
class ResourceLexer(RegexLexer):
- """Lexer for `ICU Resource bundles
- <http://userguide.icu-project.org/locale/resources>`_.
-
- .. versionadded:: 2.0
+ """Lexer for ICU Resource bundles.
"""
name = 'ResourceBundle'
aliases = ['resourcebundle', 'resource']
filenames = []
+ url = 'https://unicode-org.github.io/icu/userguide/locale/resources.html'
+ version_added = '2.0'
_types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
':int', ':alias')
@@ -37,7 +36,7 @@ class ResourceLexer(RegexLexer):
(r'"', String, 'string'),
(r'-?\d+', Number.Integer),
(r'[,{}]', Operator),
- (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
+ (r'([^\s{{:]+)(\s*)({}?)'.format('|'.join(_types)),
bygroups(Name, Text, Keyword)),
(r'\s+', Text),
(words(_types), Keyword),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ride.py b/contrib/python/Pygments/py3/pygments/lexers/ride.py
index 077fcc786e..1c3ba928b1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ride.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ride.py
@@ -4,7 +4,7 @@
Lexer for the Ride programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,16 +17,15 @@ __all__ = ['RideLexer']
class RideLexer(RegexLexer):
"""
- For `Ride <https://docs.wavesplatform.com/en/ride/about-ride.html>`_
- source code.
-
- .. versionadded:: 2.6
+ For Ride source code.
"""
name = 'Ride'
aliases = ['ride']
filenames = ['*.ride']
mimetypes = ['text/x-ride']
+ url = 'https://docs.waves.tech/en/ride'
+ version_added = '2.6'
validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rita.py b/contrib/python/Pygments/py3/pygments/lexers/rita.py
index 9aa856977e..53da9be262 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rita.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rita.py
@@ -4,7 +4,7 @@
Lexers for RITA language
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['RitaLexer']
class RitaLexer(RegexLexer):
"""
Lexer for RITA.
-
- .. versionadded:: 2.11
"""
name = 'Rita'
url = 'https://github.com/zaibacu/rita-dsl'
filenames = ['*.rita']
aliases = ['rita']
mimetypes = ['text/rita']
+ version_added = '2.11'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rnc.py b/contrib/python/Pygments/py3/pygments/lexers/rnc.py
index d71717538c..af157d63bd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rnc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rnc.py
@@ -4,7 +4,7 @@
Lexer for Relax-NG Compact syntax
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['RNCCompactLexer']
class RNCCompactLexer(RegexLexer):
"""
For RelaxNG-compact syntax.
-
- .. versionadded:: 2.2
"""
name = 'Relax-NG Compact'
url = 'http://relaxng.org'
aliases = ['rng-compact', 'rnc']
filenames = ['*.rnc']
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
index 5d7d76e0bb..7e0b105449 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/roboconf.py
@@ -4,7 +4,7 @@
Lexers for Roboconf DSL.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,12 +17,12 @@ __all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
class RoboconfGraphLexer(RegexLexer):
"""
Lexer for Roboconf graph files.
-
- .. versionadded:: 2.1
"""
name = 'Roboconf Graph'
aliases = ['roboconf-graph']
filenames = ['*.graph']
+ url = 'https://roboconf.github.io/en/user-guide/graph-definition.html'
+ version_added = '2.1'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
@@ -52,12 +52,12 @@ class RoboconfGraphLexer(RegexLexer):
class RoboconfInstancesLexer(RegexLexer):
"""
Lexer for Roboconf instances files.
-
- .. versionadded:: 2.1
"""
name = 'Roboconf Instances'
aliases = ['roboconf-instances']
filenames = ['*.instances']
+ url = 'https://roboconf.github.io'
+ version_added = '2.1'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
index 3b676cce2a..5449441940 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/robotframework.py
@@ -4,7 +4,7 @@
Lexer for Robot Framework.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -57,14 +57,13 @@ class RobotFrameworkLexer(Lexer):
For Robot Framework test data.
Supports both space and pipe separated plain text formats.
-
- .. versionadded:: 1.6
"""
name = 'RobotFramework'
url = 'http://robotframework.org'
aliases = ['robotframework']
filenames = ['*.robot', '*.resource']
mimetypes = ['text/x-robotframework']
+ version_added = '1.6'
def __init__(self, **options):
options['tabsize'] = 2
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ruby.py b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
index 466d6e7521..134ff779f2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ruby.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ruby.py
@@ -4,7 +4,7 @@
Lexers for Ruby and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -36,6 +36,7 @@ class RubyLexer(ExtendedRegexLexer):
filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
'*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
mimetypes = ['text/x-ruby', 'application/x-ruby']
+ version_added = ''
flags = re.DOTALL | re.MULTILINE
@@ -124,7 +125,7 @@ class RubyLexer(ExtendedRegexLexer):
('backtick', String.Backtick, '`'):
states['simple-'+name] = [
include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
+ (rf'[^\\{end}#]+', ttype),
(r'[\\#]', ttype),
(end, ttype, '#pop'),
]
@@ -399,6 +400,8 @@ class RubyConsoleLexer(Lexer):
name = 'Ruby irb session'
aliases = ['rbcon', 'irb']
mimetypes = ['text/x-ruby-shellsession']
+ url = 'https://www.ruby-lang.org'
+ version_added = ''
_example = 'rbcon/console'
_prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
@@ -436,14 +439,13 @@ class FancyLexer(RegexLexer):
Fancy is a self-hosted, pure object-oriented, dynamic,
class-based, concurrent general-purpose programming language
running on Rubinius, the Ruby VM.
-
- .. versionadded:: 1.5
"""
name = 'Fancy'
url = 'https://github.com/bakkdoor/fancy'
filenames = ['*.fy', '*.fancypack']
aliases = ['fancy', 'fy']
mimetypes = ['text/x-fancysrc']
+ version_added = '1.5'
tokens = {
# copied from PerlLexer:
diff --git a/contrib/python/Pygments/py3/pygments/lexers/rust.py b/contrib/python/Pygments/py3/pygments/lexers/rust.py
index db68bb3461..04e15c8c77 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/rust.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/rust.py
@@ -4,7 +4,7 @@
Lexers for the Rust language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['RustLexer']
class RustLexer(RegexLexer):
"""
Lexer for the Rust programming language (version 1.47).
-
- .. versionadded:: 1.6
"""
name = 'Rust'
url = 'https://www.rust-lang.org/'
filenames = ['*.rs', '*.rs.in']
aliases = ['rust', 'rs']
mimetypes = ['text/rust', 'text/x-rust']
+ version_added = '1.6'
keyword_types = (words((
'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
@@ -99,16 +98,16 @@ class RustLexer(RegexLexer):
(r'let\b', Keyword.Declaration),
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
- (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
+ (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Whitespace, Keyword)),
keyword_types,
(r'[sS]elf\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust's src/libstd/prelude.rs)
builtin_funcs_types,
builtin_macros,
# Path separators, so types don't catch them.
- (r'::\b', Text),
+ (r'::\b', Punctuation),
# Types in positions.
- (r'(?::|->)', Text, 'typename'),
+ (r'(?::|->)', Punctuation, 'typename'),
# Labels
(r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
@@ -157,7 +156,7 @@ class RustLexer(RegexLexer):
# Misc
# Lone hashes: not used in Rust syntax, but allowed in macro
# arguments, most famously for quote::quote!()
- (r'#', Text),
+ (r'#', Punctuation),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
@@ -172,17 +171,17 @@ class RustLexer(RegexLexer):
(r'[*/]', String.Doc),
],
'modname': [
- (r'\s+', Text),
+ (r'\s+', Whitespace),
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
default('#pop'),
],
'funcname': [
- (r'\s+', Text),
+ (r'\s+', Whitespace),
(r'[a-zA-Z_]\w*', Name.Function, '#pop'),
default('#pop'),
],
'typename': [
- (r'\s+', Text),
+ (r'\s+', Whitespace),
(r'&', Keyword.Pseudo),
(r"'", Operator, 'lifetime'),
builtin_funcs_types,
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sas.py b/contrib/python/Pygments/py3/pygments/lexers/sas.py
index c34066b02e..c3039090eb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sas.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sas.py
@@ -4,7 +4,7 @@
Lexer for SAS.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,8 +19,6 @@ __all__ = ['SASLexer']
class SASLexer(RegexLexer):
"""
For SAS files.
-
- .. versionadded:: 2.2
"""
# Syntax from syntax/sas.vim by James Kidd <james.kidd@covance.com>
@@ -28,6 +26,8 @@ class SASLexer(RegexLexer):
aliases = ['sas']
filenames = ['*.SAS', '*.sas']
mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
+ url = 'https://en.wikipedia.org/wiki/SAS_(software)'
+ version_added = '2.2'
flags = re.IGNORECASE | re.MULTILINE
builtins_macros = (
diff --git a/contrib/python/Pygments/py3/pygments/lexers/savi.py b/contrib/python/Pygments/py3/pygments/lexers/savi.py
index 48927f3abf..f65818f37c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/savi.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/savi.py
@@ -4,7 +4,7 @@
Lexer for Savi.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -30,141 +30,142 @@ __all__ = ['SaviLexer']
# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
class SaviLexer(RegexLexer):
- """
- For Savi source code.
+ """
+ For Savi source code.
- .. versionadded: 2.10
- """
+ .. versionadded: 2.10
+ """
- name = 'Savi'
- url = 'https://github.com/savi-lang/savi'
- aliases = ['savi']
- filenames = ['*.savi']
+ name = 'Savi'
+ url = 'https://github.com/savi-lang/savi'
+ aliases = ['savi']
+ filenames = ['*.savi']
+ version_added = ''
- tokens = {
- "root": [
- # Line Comment
- (r'//.*?$', Comment.Single),
+ tokens = {
+ "root": [
+ # Line Comment
+ (r'//.*?$', Comment.Single),
- # Doc Comment
- (r'::.*?$', Comment.Single),
+ # Doc Comment
+ (r'::.*?$', Comment.Single),
- # Capability Operator
- (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
+ # Capability Operator
+ (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
- # Double-Quote String
- (r'\w?"', String.Double, "string.double"),
+ # Double-Quote String
+ (r'\w?"', String.Double, "string.double"),
- # Single-Char String
- (r"'", String.Char, "string.char"),
+ # Single-Char String
+ (r"'", String.Char, "string.char"),
- # Type Name
- (r'(_?[A-Z]\w*)', Name.Class),
+ # Type Name
+ (r'(_?[A-Z]\w*)', Name.Class),
+
+ # Nested Type Name
+ (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
- # Nested Type Name
- (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
-
- # Declare
- (r'^([ \t]*)(:\w+)',
- bygroups(Whitespace, Name.Tag),
- "decl"),
-
- # Error-Raising Calls/Names
- (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
-
- # Numeric Values
- (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
-
- # Hex Numeric Values
- (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
-
- # Binary Numeric Values
- (r'\b0b([01_]+)\b', Number.Bin),
-
- # Function Call (with braces)
- (r'\w+(?=\()', Name.Function),
-
- # Function Call (with receiver)
- (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
-
- # Function Call (with self receiver)
- (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
-
- # Parenthesis
- (r'\(', Punctuation, "root"),
- (r'\)', Punctuation, "#pop"),
-
- # Brace
- (r'\{', Punctuation, "root"),
- (r'\}', Punctuation, "#pop"),
-
- # Bracket
- (r'\[', Punctuation, "root"),
- (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
- (r'\]', Punctuation, "#pop"),
-
- # Punctuation
- (r'[,;:\.@]', Punctuation),
-
- # Piping Operators
- (r'(\|\>)', Operator),
-
- # Branching Operators
- (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
-
- # Comparison Operators
- (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
-
- # Arithmetic Operators
- (r'(\+|\-|\/|\*|\%)', Operator),
-
- # Assignment Operators
- (r'(\=)', Operator),
-
- # Other Operators
- (r'(\!|\<\<|\<|\&|\|)', Operator),
-
- # Identifiers
- (r'\b\w+\b', Name),
-
- # Whitespace
- (r'[ \t\r]+\n*|\n+', Whitespace),
- ],
-
- # Declare (nested rules)
- "decl": [
- (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
- (r':', Punctuation, "#pop"),
- (r'\n', Whitespace, "#pop"),
- include("root"),
- ],
-
- # Double-Quote String (nested rules)
- "string.double": [
- (r'\\\(', String.Interpol, "string.interpolation"),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\[bfnrt\\\']', String.Escape),
- (r'\\"', String.Escape),
- (r'"', String.Double, "#pop"),
- (r'[^\\"]+', String.Double),
- (r'.', Error),
- ],
-
- # Single-Char String (nested rules)
- "string.char": [
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\[bfnrt\\\']', String.Escape),
- (r"\\'", String.Escape),
- (r"'", String.Char, "#pop"),
- (r"[^\\']+", String.Char),
- (r'.', Error),
- ],
-
- # Interpolation inside String (nested rules)
- "string.interpolation": [
- (r"\)", String.Interpol, "#pop"),
- include("root"),
- ]
- }
+ # Declare
+ (r'^([ \t]*)(:\w+)',
+ bygroups(Whitespace, Name.Tag),
+ "decl"),
+
+ # Error-Raising Calls/Names
+ (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
+
+ # Numeric Values
+ (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
+
+ # Hex Numeric Values
+ (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
+
+ # Binary Numeric Values
+ (r'\b0b([01_]+)\b', Number.Bin),
+
+ # Function Call (with braces)
+ (r'\w+(?=\()', Name.Function),
+
+ # Function Call (with receiver)
+ (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
+
+ # Function Call (with self receiver)
+ (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
+
+ # Parenthesis
+ (r'\(', Punctuation, "root"),
+ (r'\)', Punctuation, "#pop"),
+
+ # Brace
+ (r'\{', Punctuation, "root"),
+ (r'\}', Punctuation, "#pop"),
+
+ # Bracket
+ (r'\[', Punctuation, "root"),
+ (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
+ (r'\]', Punctuation, "#pop"),
+
+ # Punctuation
+ (r'[,;:\.@]', Punctuation),
+
+ # Piping Operators
+ (r'(\|\>)', Operator),
+
+ # Branching Operators
+ (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
+
+ # Comparison Operators
+ (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
+
+ # Arithmetic Operators
+ (r'(\+|\-|\/|\*|\%)', Operator),
+
+ # Assignment Operators
+ (r'(\=)', Operator),
+
+ # Other Operators
+ (r'(\!|\<\<|\<|\&|\|)', Operator),
+
+ # Identifiers
+ (r'\b\w+\b', Name),
+
+ # Whitespace
+ (r'[ \t\r]+\n*|\n+', Whitespace),
+ ],
+
+ # Declare (nested rules)
+ "decl": [
+ (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
+ (r':', Punctuation, "#pop"),
+ (r'\n', Whitespace, "#pop"),
+ include("root"),
+ ],
+
+ # Double-Quote String (nested rules)
+ "string.double": [
+ (r'\\\(', String.Interpol, "string.interpolation"),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\[bfnrt\\\']', String.Escape),
+ (r'\\"', String.Escape),
+ (r'"', String.Double, "#pop"),
+ (r'[^\\"]+', String.Double),
+ (r'.', Error),
+ ],
+
+ # Single-Char String (nested rules)
+ "string.char": [
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\[bfnrt\\\']', String.Escape),
+ (r"\\'", String.Escape),
+ (r"'", String.Char, "#pop"),
+ (r"[^\\']+", String.Char),
+ (r'.', Error),
+ ],
+
+ # Interpolation inside String (nested rules)
+ "string.interpolation": [
+ (r"\)", String.Interpol, "#pop"),
+ include("root"),
+ ]
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/scdoc.py b/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
index 90478acf6b..fecdd9c921 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/scdoc.py
@@ -4,7 +4,7 @@
Lexer for scdoc, a simple man page generator.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,13 +19,12 @@ __all__ = ['ScdocLexer']
class ScdocLexer(RegexLexer):
"""
`scdoc` is a simple man page generator for POSIX systems written in C99.
-
- .. versionadded:: 2.5
"""
name = 'scdoc'
url = 'https://git.sr.ht/~sircmpwn/scdoc'
aliases = ['scdoc', 'scd']
filenames = ['*.scd', '*.scdoc']
+ version_added = '2.5'
flags = re.MULTILINE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/scripting.py b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
index eab7ec95cd..d38387e073 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/scripting.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/scripting.py
@@ -4,7 +4,7 @@
Lexer for scripting and embedded languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -16,7 +16,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error, Whitespace, Other
from pygments.util import get_bool_opt, get_list_opt
-__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+__all__ = ['LuaLexer', 'LuauLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
@@ -49,11 +49,12 @@ class LuaLexer(RegexLexer):
aliases = ['lua']
filenames = ['*.lua', '*.wlua']
mimetypes = ['text/x-lua', 'application/x-lua']
+ version_added = ''
_comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
_comment_single = r'(?:--.*$)'
_space = r'(?:\s+)'
- _s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
+ _s = rf'(?:{_comment_multiline}|{_comment_single}|{_space})'
_name = r'(?:[^\W\d]\w*)'
tokens = {
@@ -101,7 +102,7 @@ class LuaLexer(RegexLexer):
'funcname': [
include('ws'),
(r'[.:]', Punctuation),
- (r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
+ (rf'{_name}(?={_s}*[.:])', Name.Class),
(_name, Name.Function, '#pop'),
# inline function
(r'\(', Punctuation, '#pop'),
@@ -162,11 +163,324 @@ class LuaLexer(RegexLexer):
continue
yield index, token, value
+def _luau_make_expression(should_pop, _s):
+ temp_list = [
+ (r'0[xX][\da-fA-F_]*', Number.Hex, '#pop'),
+ (r'0[bB][\d_]*', Number.Bin, '#pop'),
+ (r'\.?\d[\d_]*(?:\.[\d_]*)?(?:[eE][+-]?[\d_]+)?', Number.Float, '#pop'),
+
+ (words((
+ 'true', 'false', 'nil'
+ ), suffix=r'\b'), Keyword.Constant, '#pop'),
+
+ (r'\[(=*)\[[.\n]*?\]\1\]', String, '#pop'),
+
+ (r'(\.)([a-zA-Z_]\w*)(?=%s*[({"\'])', bygroups(Punctuation, Name.Function), '#pop'),
+ (r'(\.)([a-zA-Z_]\w*)', bygroups(Punctuation, Name.Variable), '#pop'),
+
+ (rf'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*(?={_s}*[({{"\'])', Name.Other, '#pop'),
+ (r'[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*', Name, '#pop'),
+ ]
+ if should_pop:
+ return temp_list
+ return [entry[:2] for entry in temp_list]
+
+def _luau_make_expression_special(should_pop):
+ temp_list = [
+ (r'\{', Punctuation, ('#pop', 'closing_brace_base', 'expression')),
+ (r'\(', Punctuation, ('#pop', 'closing_parenthesis_base', 'expression')),
+
+ (r'::?', Punctuation, ('#pop', 'type_end', 'type_start')),
+
+ (r"'", String.Single, ('#pop', 'string_single')),
+ (r'"', String.Double, ('#pop', 'string_double')),
+ (r'`', String.Backtick, ('#pop', 'string_interpolated')),
+ ]
+ if should_pop:
+ return temp_list
+ return [(entry[0], entry[1], entry[2][1:]) for entry in temp_list]
+
+class LuauLexer(RegexLexer):
+ """
+ For Luau source code.
+
+ Additional options accepted:
+
+ `include_luau_builtins`
+ If given and ``True``, automatically highlight Luau builtins
+ (default: ``True``).
+ `include_roblox_builtins`
+ If given and ``True``, automatically highlight Roblox-specific builtins
+ (default: ``False``).
+ `additional_builtins`
+ If given, must be a list of additional builtins to highlight.
+ `disabled_builtins`
+ If given, must be a list of builtins that will not be highlighted.
+ """
+
+ name = 'Luau'
+ url = 'https://luau-lang.org/'
+ aliases = ['luau']
+ filenames = ['*.luau']
+ version_added = '2.18'
+
+ _comment_multiline = r'(?:--\[(?P<level>=*)\[[\w\W]*?\](?P=level)\])'
+ _comment_single = r'(?:--.*$)'
+ _s = r'(?:{}|{}|{})'.format(_comment_multiline, _comment_single, r'\s+')
+
+ tokens = {
+ 'root': [
+ (r'#!.*', Comment.Hashbang, 'base'),
+ default('base'),
+ ],
+
+ 'ws': [
+ (_comment_multiline, Comment.Multiline),
+ (_comment_single, Comment.Single),
+ (r'\s+', Whitespace),
+ ],
+
+ 'base': [
+ include('ws'),
+
+ *_luau_make_expression_special(False),
+ (r'\.\.\.', Punctuation),
+
+ (rf'type\b(?={_s}+[a-zA-Z_])', Keyword.Reserved, 'type_declaration'),
+ (rf'export\b(?={_s}+[a-zA-Z_])', Keyword.Reserved),
+
+ (r'(?:\.\.|//|[+\-*\/%^<>=])=?', Operator, 'expression'),
+ (r'~=', Operator, 'expression'),
+
+ (words((
+ 'and', 'or', 'not'
+ ), suffix=r'\b'), Operator.Word, 'expression'),
+
+ (words((
+ 'elseif', 'for', 'if', 'in', 'repeat', 'return', 'until',
+ 'while'), suffix=r'\b'), Keyword.Reserved, 'expression'),
+ (r'local\b', Keyword.Declaration, 'expression'),
+
+ (r'function\b', Keyword.Reserved, ('expression', 'func_name')),
+
+ (r'[\])};]+', Punctuation),
+
+ include('expression_static'),
+ *_luau_make_expression(False, _s),
+
+ (r'[\[.,]', Punctuation, 'expression'),
+ ],
+ 'expression_static': [
+ (words((
+ 'break', 'continue', 'do', 'else', 'elseif', 'end', 'for',
+ 'if', 'in', 'repeat', 'return', 'then', 'until', 'while'),
+ suffix=r'\b'), Keyword.Reserved),
+ ],
+ 'expression': [
+ include('ws'),
+
+ (r'if\b', Keyword.Reserved, ('ternary', 'expression')),
+
+ (r'local\b', Keyword.Declaration),
+ *_luau_make_expression_special(True),
+ (r'\.\.\.', Punctuation, '#pop'),
+
+ (r'function\b', Keyword.Reserved, 'func_name'),
+
+ include('expression_static'),
+ *_luau_make_expression(True, _s),
+
+ default('#pop'),
+ ],
+ 'ternary': [
+ include('ws'),
+
+ (r'else\b', Keyword.Reserved, '#pop'),
+ (words((
+ 'then', 'elseif',
+ ), suffix=r'\b'), Operator.Reserved, 'expression'),
+
+ default('#pop'),
+ ],
+
+ 'closing_brace_pop': [
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'closing_parenthesis_pop': [
+ (r'\)', Punctuation, '#pop'),
+ ],
+ 'closing_gt_pop': [
+ (r'>', Punctuation, '#pop'),
+ ],
+
+ 'closing_parenthesis_base': [
+ include('closing_parenthesis_pop'),
+ include('base'),
+ ],
+ 'closing_parenthesis_type': [
+ include('closing_parenthesis_pop'),
+ include('type'),
+ ],
+ 'closing_brace_base': [
+ include('closing_brace_pop'),
+ include('base'),
+ ],
+ 'closing_brace_type': [
+ include('closing_brace_pop'),
+ include('type'),
+ ],
+ 'closing_gt_type': [
+ include('closing_gt_pop'),
+ include('type'),
+ ],
+
+ 'string_escape': [
+ (r'\\z\s*', String.Escape),
+ (r'\\(?:[abfnrtvz\\"\'`\{\n])|[\r\n]{1,2}|x[\da-fA-F]{2}|\d{1,3}|'
+ r'u\{\}[\da-fA-F]*\}', String.Escape),
+ ],
+ 'string_single': [
+ include('string_escape'),
+
+ (r"'", String.Single, "#pop"),
+ (r"[^\\']+", String.Single),
+ ],
+ 'string_double': [
+ include('string_escape'),
+
+ (r'"', String.Double, "#pop"),
+ (r'[^\\"]+', String.Double),
+ ],
+ 'string_interpolated': [
+ include('string_escape'),
+
+ (r'\{', Punctuation, ('closing_brace_base', 'expression')),
+
+ (r'`', String.Backtick, "#pop"),
+ (r'[^\\`\{]+', String.Backtick),
+ ],
+
+ 'func_name': [
+ include('ws'),
+
+ (r'[.:]', Punctuation),
+ (rf'[a-zA-Z_]\w*(?={_s}*[.:])', Name.Class),
+ (r'[a-zA-Z_]\w*', Name.Function),
+
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'type': [
+ include('ws'),
+
+ (r'\(', Punctuation, 'closing_parenthesis_type'),
+ (r'\{', Punctuation, 'closing_brace_type'),
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r"'", String.Single, 'string_single'),
+ (r'"', String.Double, 'string_double'),
+
+ (r'[|&\.,\[\]:=]+', Punctuation),
+ (r'->', Punctuation),
+
+ (r'typeof\(', Name.Builtin, ('closing_parenthesis_base',
+ 'expression')),
+ (r'[a-zA-Z_]\w*', Name.Class),
+ ],
+ 'type_start': [
+ include('ws'),
+
+ (r'\(', Punctuation, ('#pop', 'closing_parenthesis_type')),
+ (r'\{', Punctuation, ('#pop', 'closing_brace_type')),
+ (r'<', Punctuation, ('#pop', 'closing_gt_type')),
+
+ (r"'", String.Single, ('#pop', 'string_single')),
+ (r'"', String.Double, ('#pop', 'string_double')),
+
+ (r'typeof\(', Name.Builtin, ('#pop', 'closing_parenthesis_base',
+ 'expression')),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ ],
+ 'type_end': [
+ include('ws'),
+
+ (r'[|&\.]', Punctuation, 'type_start'),
+ (r'->', Punctuation, 'type_start'),
+
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ default('#pop'),
+ ],
+ 'type_declaration': [
+ include('ws'),
+
+ (r'[a-zA-Z_]\w*', Name.Class),
+ (r'<', Punctuation, 'closing_gt_type'),
+
+ (r'=', Punctuation, ('#pop', 'type_end', 'type_start')),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.include_luau_builtins = get_bool_opt(
+ options, 'include_luau_builtins', True)
+ self.include_roblox_builtins = get_bool_opt(
+ options, 'include_roblox_builtins', False)
+ self.additional_builtins = get_list_opt(options, 'additional_builtins', [])
+ self.disabled_builtins = get_list_opt(options, 'disabled_builtins', [])
+
+ self._builtins = set(self.additional_builtins)
+ if self.include_luau_builtins:
+ from pygments.lexers._luau_builtins import LUAU_BUILTINS
+ self._builtins.update(LUAU_BUILTINS)
+ if self.include_roblox_builtins:
+ from pygments.lexers._luau_builtins import ROBLOX_BUILTINS
+ self._builtins.update(ROBLOX_BUILTINS)
+ if self.additional_builtins:
+ self._builtins.update(self.additional_builtins)
+ self._builtins.difference_update(self.disabled_builtins)
+
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Other:
+ split_value = value.split('.')
+ complete_value = []
+ new_index = index
+ for position in range(len(split_value), 0, -1):
+ potential_string = '.'.join(split_value[:position])
+ if potential_string in self._builtins:
+ yield index, Name.Builtin, potential_string
+ new_index += len(potential_string)
+
+ if complete_value:
+ yield new_index, Punctuation, '.'
+ new_index += 1
+ break
+ complete_value.insert(0, split_value[position - 1])
+
+ for position, substring in enumerate(complete_value):
+ if position + 1 == len(complete_value):
+ if token is Name:
+ yield new_index, Name.Variable, substring
+ continue
+ yield new_index, Name.Function, substring
+ continue
+ yield new_index, Name.Variable, substring
+ new_index += len(substring)
+ yield new_index, Punctuation, '.'
+ new_index += 1
+
+ continue
+ yield index, token, value
+
class MoonScriptLexer(LuaLexer):
"""
For MoonScript source code.
-
- .. versionadded:: 1.5
"""
name = 'MoonScript'
@@ -174,6 +488,7 @@ class MoonScriptLexer(LuaLexer):
aliases = ['moonscript', 'moon']
filenames = ['*.moon']
mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+ version_added = '1.5'
tokens = {
'root': [
@@ -234,8 +549,6 @@ class MoonScriptLexer(LuaLexer):
class ChaiscriptLexer(RegexLexer):
"""
For ChaiScript source code.
-
- .. versionadded:: 2.0
"""
name = 'ChaiScript'
@@ -243,6 +556,7 @@ class ChaiscriptLexer(RegexLexer):
aliases = ['chaiscript', 'chai']
filenames = ['*.chai']
mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+ version_added = '2.0'
flags = re.DOTALL | re.MULTILINE
@@ -301,14 +615,14 @@ class ChaiscriptLexer(RegexLexer):
class LSLLexer(RegexLexer):
"""
For Second Life's Linden Scripting Language source code.
-
- .. versionadded:: 2.0
"""
name = 'LSL'
aliases = ['lsl']
filenames = ['*.lsl']
mimetypes = ['text/x-lsl']
+ url = 'https://wiki.secondlife.com/wiki/Linden_Scripting_Language'
+ version_added = '2.0'
flags = re.MULTILINE
@@ -389,14 +703,13 @@ class AppleScriptLexer(RegexLexer):
<http://developer.apple.com/documentation/AppleScript/
Reference/StudioReference>`_.
Contributed by Andreas Amann <aamann@mac.com>.
-
- .. versionadded:: 1.0
"""
name = 'AppleScript'
url = 'https://developer.apple.com/library/archive/documentation/AppleScript/Conceptual/AppleScriptLangGuide/introduction/ASLR_intro.html'
aliases = ['applescript']
filenames = ['*.applescript']
+ version_added = '1.0'
flags = re.MULTILINE | re.DOTALL
@@ -672,26 +985,26 @@ class AppleScriptLexer(RegexLexer):
r'numeric strings|punctuation|white space)',
bygroups(Keyword, Name.Builtin)),
(r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
+ (r"\b({})\b".format('|'.join(Operators)), Operator.Word),
(r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
+ r'({})'.format('|'.join(StudioEvents[::-1])),
bygroups(Keyword, Name.Function)),
(r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
+ (r'\b(as )({})\b'.format('|'.join(Classes)),
bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(Literals)), Name.Constant),
+ (r'\b({})\b'.format('|'.join(Commands)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(Control)), Keyword),
+ (r'\b({})\b'.format('|'.join(Declarations)), Keyword),
+ (r'\b({})\b'.format('|'.join(Reserved)), Name.Builtin),
+ (r'\b({})s?\b'.format('|'.join(BuiltIn)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(HandlerParams)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(StudioProperties)), Name.Attribute),
+ (r'\b({})s?\b'.format('|'.join(StudioClasses)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(StudioCommands)), Name.Builtin),
+ (r'\b({})\b'.format('|'.join(References)), Name.Builtin),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
+ (rf'\b({Identifiers})\b', Name.Variable),
(r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
(r'[-+]?\d+', Number.Integer),
],
@@ -710,14 +1023,13 @@ class RexxLexer(RegexLexer):
a wide range of different platforms with its roots found on mainframe
systems. It is popular for I/O- and data based tasks and can act as glue
language to bind different applications together.
-
- .. versionadded:: 2.0
"""
name = 'Rexx'
url = 'http://www.rexxinfo.org/'
aliases = ['rexx', 'arexx']
filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
mimetypes = ['text/x-rexx']
+ version_added = '2.0'
flags = re.IGNORECASE
tokens = {
@@ -781,7 +1093,8 @@ class RexxLexer(RegexLexer):
]
}
- _c = lambda s: re.compile(s, re.MULTILINE)
+ def _c(s):
+ return re.compile(s, re.MULTILINE)
_ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
_ADDRESS_PATTERN = _c(r'^\s*address\s+')
_DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
@@ -821,14 +1134,13 @@ class RexxLexer(RegexLexer):
class MOOCodeLexer(RegexLexer):
"""
For MOOCode (the MOO scripting language).
-
- .. versionadded:: 0.9
"""
name = 'MOOCode'
url = 'http://www.moo.mud.org/'
filenames = ['*.moo']
aliases = ['moocode', 'moo']
mimetypes = ['text/x-moocode']
+ version_added = '0.9'
tokens = {
'root': [
@@ -864,14 +1176,14 @@ class MOOCodeLexer(RegexLexer):
class HybrisLexer(RegexLexer):
"""
For Hybris source code.
-
- .. versionadded:: 1.4
"""
name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
+ aliases = ['hybris']
+ filenames = ['*.hyb']
mimetypes = ['text/x-hybris', 'application/x-hybris']
+ url = 'https://github.com/evilsocket/hybris'
+ version_added = '1.4'
flags = re.MULTILINE | re.DOTALL
@@ -962,13 +1274,13 @@ class EasytrieveLexer(RegexLexer):
converting sequential data. Furthermore it can layout data for reports.
It is mainly used on mainframe platforms and can access several of the
mainframe's native file formats. It is somewhat comparable to awk.
-
- .. versionadded:: 2.1
"""
name = 'Easytrieve'
aliases = ['easytrieve']
filenames = ['*.ezt', '*.mac']
mimetypes = ['text/x-easytrieve']
+ url = 'https://www.broadcom.com/products/mainframe/application-development/easytrieve-report-generator'
+ version_added = '2.1'
flags = 0
# Note: We cannot use r'\b' at the start and end of keywords because
@@ -1154,13 +1466,14 @@ class JclLexer(RegexLexer):
is a scripting language used on mainframe platforms to instruct the system
on how to run a batch job or start a subsystem. It is somewhat
comparable to MS DOS batch and Unix shell scripts.
-
- .. versionadded:: 2.1
"""
name = 'JCL'
aliases = ['jcl']
filenames = ['*.jcl']
mimetypes = ['text/x-jcl']
+ url = 'https://en.wikipedia.org/wiki/Job_Control_Language'
+ version_added = '2.1'
+
flags = re.IGNORECASE
tokens = {
@@ -1234,8 +1547,6 @@ class JclLexer(RegexLexer):
class MiniScriptLexer(RegexLexer):
"""
For MiniScript source code.
-
- .. versionadded:: 2.6
"""
name = 'MiniScript'
@@ -1243,6 +1554,7 @@ class MiniScriptLexer(RegexLexer):
aliases = ['miniscript', 'ms']
filenames = ['*.ms']
mimetypes = ['text/x-minicript', 'application/x-miniscript']
+ version_added = '2.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sgf.py b/contrib/python/Pygments/py3/pygments/lexers/sgf.py
index 0fad263e0e..21861e4a20 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sgf.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sgf.py
@@ -4,7 +4,7 @@
Lexer for Smart Game Format (sgf) file format.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,13 +20,12 @@ class SmartGameFormatLexer(RegexLexer):
The format is used to store game records of board games for two players
(mainly Go game).
-
- .. versionadded:: 2.4
"""
name = 'SmartGameFormat'
url = 'https://www.red-bean.com/sgf/'
aliases = ['sgf']
filenames = ['*.sgf']
+ version_added = '2.4'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/shell.py b/contrib/python/Pygments/py3/pygments/lexers/shell.py
index eabf4ec942..cfab8591cc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/shell.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/shell.py
@@ -4,7 +4,7 @@
Lexers for various shells.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,18 +25,18 @@ __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
class BashLexer(RegexLexer):
"""
Lexer for (ba|k|z|)sh shell scripts.
-
- .. versionadded:: 0.6
"""
name = 'Bash'
- aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
+ aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'*.exheres-0', '*.exlib', '*.zsh',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
'.kshrc', 'kshrc',
'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
+ url = 'https://en.wikipedia.org/wiki/Unix_shell'
+ version_added = '0.6'
tokens = {
'root': [
@@ -129,14 +129,13 @@ class BashLexer(RegexLexer):
class SlurmBashLexer(BashLexer):
"""
Lexer for (ba|k|z|)sh Slurm scripts.
-
- .. versionadded:: 2.4
"""
name = 'Slurm'
aliases = ['slurm', 'sbatch']
filenames = ['*.sl']
mimetypes = []
+ version_added = '2.4'
EXTRA_KEYWORDS = {'srun'}
def get_tokens_unprocessed(self, text):
@@ -225,14 +224,14 @@ class BashSessionLexer(ShellSessionBaseLexer):
"""
Lexer for Bash shell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 1.1
"""
name = 'Bash Session'
aliases = ['console', 'shell-session']
filenames = ['*.sh-session', '*.shell-session']
mimetypes = ['application/x-shell-session', 'application/x-sh-session']
+ url = 'https://en.wikipedia.org/wiki/Unix_shell'
+ version_added = '1.1'
_innerLexerCls = BashLexer
_ps1rgx = re.compile(
@@ -244,13 +243,13 @@ class BashSessionLexer(ShellSessionBaseLexer):
class BatchLexer(RegexLexer):
"""
Lexer for the DOS/Windows Batch file format.
-
- .. versionadded:: 0.7
"""
name = 'Batchfile'
aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
filenames = ['*.bat', '*.cmd']
mimetypes = ['application/x-dos-batch']
+ url = 'https://en.wikipedia.org/wiki/Batch_file'
+ version_added = '0.7'
flags = re.MULTILINE | re.IGNORECASE
@@ -258,28 +257,25 @@ class BatchLexer(RegexLexer):
_punct = r'&<>|'
_ws = r'\t\v\f\r ,;=\xa0'
_nlws = r'\s\x1a\xa0,;='
- _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
- _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
- (_nl, _ws, _nl, _punct))
- _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
- _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
- _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
- _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
- _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
+ _space = rf'(?:(?:(?:\^[{_nl}])?[{_ws}])+)'
+ _keyword_terminator = (rf'(?=(?:\^[{_nl}]?)?[{_ws}+./:[\\\]]|[{_nl}{_punct}(])')
+ _token_terminator = rf'(?=\^?[{_ws}]|[{_punct}{_nl}])'
+ _start_label = rf'((?:(?<=^[^:])|^[^:]?)[{_ws}]*)(:)'
+ _label = rf'(?:(?:[^{_nlws}{_punct}+:^]|\^[{_nl}]?[\w\W])*)'
+ _label_compound = rf'(?:(?:[^{_nlws}{_punct}+:^)]|\^[{_nl}]?[^)])*)'
+ _number = rf'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+){_token_terminator})'
_opword = r'(?:equ|geq|gtr|leq|lss|neq)'
- _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
- _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
- r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
- r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
- r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
- r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
- (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
- _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
- _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
- _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
- _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
- _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token))
+ _string = rf'(?:"[^{_nl}"]*(?:"|(?=[{_nl}])))'
+ _variable = (r'(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
+ rf'[^%:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%{_nl}^]|'
+ rf'\^[^%{_nl}])[^={_nl}]*=(?:[^%{_nl}^]|\^[^%{_nl}])*)?)?%))|'
+ rf'(?:\^?![^!:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
+ rf'[^!{_nl}^]|\^[^!{_nl}])[^={_nl}]*=(?:[^!{_nl}^]|\^[^!{_nl}])*)?)?\^?!))')
+ _core_token = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct}])+)'
+ _core_token_compound = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct})])+)'
+ _token = rf'(?:[{_punct}]+|{_core_token})'
+ _token_compound = rf'(?:[{_punct}]+|{_core_token_compound})'
+ _stoken = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token})+)')
def _make_begin_state(compound, _core_token=_core_token,
_core_token_compound=_core_token_compound,
@@ -288,81 +284,71 @@ class BatchLexer(RegexLexer):
_space=_space, _start_label=_start_label,
_stoken=_stoken, _token_terminator=_token_terminator,
_variable=_variable, _ws=_ws):
- rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
+ rest = '(?:{}|{}|[^"%{}{}{}])*'.format(_string, _variable, _nl, _punct,
')' if compound else '')
- rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
- rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
- set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
+ rest_of_line = rf'(?:(?:[^{_nl}^]|\^[{_nl}]?[\w\W])*)'
+ rest_of_line_compound = rf'(?:(?:[^{_nl}^)]|\^[{_nl}]?[^)])*)'
+ set_space = rf'((?:(?:\^[{_nl}]?)?[^\S\n])*)'
suffix = ''
if compound:
- _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
- _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
+ _keyword_terminator = rf'(?:(?=\))|{_keyword_terminator})'
+ _token_terminator = rf'(?:(?=\))|{_token_terminator})'
suffix = '/compound'
return [
((r'\)', Punctuation, '#pop') if compound else
- (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
+ (rf'\)((?=\()|{_token_terminator}){rest_of_line}',
Comment.Single)),
- (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
+ (rf'(?={_start_label})', Text, f'follow{suffix}'),
(_space, using(this, state='text')),
- include('redirect%s' % suffix),
- (r'[%s]+' % _nl, Text),
+ include(f'redirect{suffix}'),
+ (rf'[{_nl}]+', Text),
(r'\(', Punctuation, 'root/compound'),
(r'@+', Punctuation),
- (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
- r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
- (_nl, _token_terminator, _space,
- _core_token_compound if compound else _core_token, _nl, _nl),
+ (rf'((?:for|if|rem)(?:(?=(?:\^[{_nl}]?)?/)|(?:(?!\^)|'
+ rf'(?<=m))(?:(?=\()|{_token_terminator})))({_space}?{_core_token_compound if compound else _core_token}?(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?)',
bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
- (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
- (_keyword_terminator, rest, _nl, _nl, rest),
+ f'follow{suffix}'),
+ (rf'(goto{_keyword_terminator})({rest}(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?{rest})',
bygroups(Keyword, using(this, state='text')),
- 'follow%s' % suffix),
+ f'follow{suffix}'),
(words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
'title', 'type', 'ver', 'verify', 'vol'),
- suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
- (r'(call)(%s?)(:)' % _space,
+ suffix=_keyword_terminator), Keyword, f'follow{suffix}'),
+ (rf'(call)({_space}?)(:)',
bygroups(Keyword, using(this, state='text'), Punctuation),
- 'call%s' % suffix),
- (r'call%s' % _keyword_terminator, Keyword),
- (r'(for%s(?!\^))(%s)(/f%s)' %
- (_token_terminator, _space, _token_terminator),
+ f'call{suffix}'),
+ (rf'call{_keyword_terminator}', Keyword),
+ (rf'(for{_token_terminator}(?!\^))({_space})(/f{_token_terminator})',
bygroups(Keyword, using(this, state='text'), Keyword),
('for/f', 'for')),
- (r'(for%s(?!\^))(%s)(/l%s)' %
- (_token_terminator, _space, _token_terminator),
+ (rf'(for{_token_terminator}(?!\^))({_space})(/l{_token_terminator})',
bygroups(Keyword, using(this, state='text'), Keyword),
('for/l', 'for')),
- (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
- (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
+ (rf'for{_token_terminator}(?!\^)', Keyword, ('for2', 'for')),
+ (rf'(goto{_keyword_terminator})({_space}?)(:?)',
bygroups(Keyword, using(this, state='text'), Punctuation),
- 'label%s' % suffix),
- (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
- (_token_terminator, _space, _token_terminator, _space,
- _token_terminator, _space),
+ f'label{suffix}'),
+ (rf'(if(?:(?=\()|{_token_terminator})(?!\^))({_space}?)((?:/i{_token_terminator})?)({_space}?)((?:not{_token_terminator})?)({_space}?)',
bygroups(Keyword, using(this, state='text'), Keyword,
using(this, state='text'), Keyword,
using(this, state='text')), ('(?', 'if')),
- (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
- (_token_terminator, _space, _stoken, _keyword_terminator,
- rest_of_line_compound if compound else rest_of_line),
- Comment.Single, 'follow%s' % suffix),
- (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
+ (rf'rem(((?=\()|{_token_terminator}){_space}?{_stoken}?.*|{_keyword_terminator}{rest_of_line_compound if compound else rest_of_line})',
+ Comment.Single, f'follow{suffix}'),
+ (rf'(set{_keyword_terminator}){set_space}(/a)',
bygroups(Keyword, using(this, state='text'), Keyword),
- 'arithmetic%s' % suffix),
- (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
- r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
- (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
+ f'arithmetic{suffix}'),
+ (r'(set{}){}((?:/p)?){}((?:(?:(?:\^[{}]?)?[^"{}{}^={}]|'
+ r'\^[{}]?[^"=])+)?)((?:(?:\^[{}]?)?=)?)'.format(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
')' if compound else '', _nl, _nl),
bygroups(Keyword, using(this, state='text'), Keyword,
using(this, state='text'), using(this, state='variable'),
Punctuation),
- 'follow%s' % suffix),
- default('follow%s' % suffix)
+ f'follow{suffix}'),
+ default(f'follow{suffix}')
]
def _make_follow_state(compound, _label=_label,
@@ -375,11 +361,10 @@ class BatchLexer(RegexLexer):
if compound:
state.append((r'(?=\))', Text, '#pop'))
state += [
- (r'%s([%s]*)(%s)(.*)' %
- (_start_label, _ws, _label_compound if compound else _label),
+ (rf'{_start_label}([{_ws}]*)({_label_compound if compound else _label})(.*)',
bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
- include('redirect%s' % suffix),
- (r'(?=[%s])' % _nl, Text, '#pop'),
+ include(f'redirect{suffix}'),
+ (rf'(?=[{_nl}])', Text, '#pop'),
(r'\|\|?|&&?', Punctuation, '#pop'),
include('text')
]
@@ -397,9 +382,8 @@ class BatchLexer(RegexLexer):
(r'0x[\da-f]+', Number.Hex),
(r'\d+', Number.Integer),
(r'[(),]+', Punctuation),
- (r'([%s]|%%|\^\^)+' % op, Operator),
- (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
- (_string, _variable, _nl, op, _nlws, _punct, _nlws,
+ (rf'([{op}]|%|\^\^)+', Operator),
+ (r'({}|{}|(\^[{}]?)?[^(){}%\^"{}{}]|\^[{}]?{})+'.format(_string, _variable, _nl, op, _nlws, _punct, _nlws,
r'[^)]' if compound else r'[\w\W]'),
using(this, state='variable')),
(r'(?=[\x00|&])', Text, '#pop'),
@@ -422,8 +406,7 @@ class BatchLexer(RegexLexer):
state = []
if compound:
state.append((r'(?=\))', Text, '#pop'))
- state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
- (_label_compound if compound else _label, _string,
+ state.append((r'({}?)((?:{}|{}|\^[{}]?{}|[^"%^{}{}{}])*)'.format(_label_compound if compound else _label, _string,
_variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
_punct, r')' if compound else ''),
bygroups(Name.Label, Comment.Single), '#pop'))
@@ -434,14 +417,11 @@ class BatchLexer(RegexLexer):
_nl=_nl, _punct=_punct, _stoken=_stoken,
_string=_string, _space=_space,
_variable=_variable, _nlws=_nlws):
- stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
- (_punct, _string, _variable, _core_token_compound))
+ stoken_compound = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token_compound})+)')
return [
- (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
- (_nlws, _nlws),
+ (rf'((?:(?<=[{_nlws}])\d)?)(>>?&|<&)([{_nlws}]*)(\d)',
bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
- (r'((?:(?<=[%s])(?<!\^[%s])\d)?)(>>?|<)(%s?%s)' %
- (_nlws, _nl, _space, stoken_compound if compound else _stoken),
+ (rf'((?:(?<=[{_nlws}])(?<!\^[{_nl}])\d)?)(>>?|<)({_space}?{stoken_compound if compound else _stoken})',
bygroups(Number.Integer, Punctuation, using(this, state='text')))
]
@@ -460,13 +440,13 @@ class BatchLexer(RegexLexer):
'redirect/compound': _make_redirect_state(True),
'variable-or-escape': [
(_variable, Name.Variable),
- (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
+ (rf'%%|\^[{_nl}]?(\^!|[\w\W])', String.Escape)
],
'string': [
(r'"', String.Double, '#pop'),
(_variable, Name.Variable),
(r'\^!|%%', String.Escape),
- (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
+ (rf'[^"%^{_nl}]+|[%^]', String.Double),
default('#pop')
],
'sqstring': [
@@ -480,34 +460,34 @@ class BatchLexer(RegexLexer):
'text': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
- (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
+ (rf'[^"%^{_nlws}{_punct}\d)]+|.', Text)
],
'variable': [
(r'"', String.Double, 'string'),
include('variable-or-escape'),
- (r'[^"%%^%s]+|.' % _nl, Name.Variable)
+ (rf'[^"%^{_nl}]+|.', Name.Variable)
],
'for': [
- (r'(%s)(in)(%s)(\()' % (_space, _space),
+ (rf'({_space})(in)({_space})(\()',
bygroups(using(this, state='text'), Keyword,
using(this, state='text'), Punctuation), '#pop'),
include('follow')
],
'for2': [
(r'\)', Punctuation),
- (r'(%s)(do%s)' % (_space, _token_terminator),
+ (rf'({_space})(do{_token_terminator})',
bygroups(using(this, state='text'), Keyword), '#pop'),
- (r'[%s]+' % _nl, Text),
+ (rf'[{_nl}]+', Text),
include('follow')
],
'for/f': [
- (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
+ (rf'(")((?:{_variable}|[^"])*?")([{_nlws}]*)(\))',
bygroups(String.Double, using(this, state='string'), Text,
Punctuation)),
(r'"', String.Double, ('#pop', 'for2', 'string')),
- (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
+ (rf"('(?:%%|{_variable}|[\w\W])*?')([{_nlws}]*)(\))",
bygroups(using(this, state='sqstring'), Text, Punctuation)),
- (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
+ (rf'(`(?:%%|{_variable}|[\w\W])*?`)([{_nlws}]*)(\))',
bygroups(using(this, state='bqstring'), Text, Punctuation)),
include('for2')
],
@@ -516,25 +496,24 @@ class BatchLexer(RegexLexer):
include('for2')
],
'if': [
- (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
- (_token_terminator, _space),
+ (rf'((?:cmdextversion|errorlevel){_token_terminator})({_space})(\d+)',
bygroups(Keyword, using(this, state='text'),
Number.Integer), '#pop'),
- (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
+ (rf'(defined{_token_terminator})({_space})({_stoken})',
bygroups(Keyword, using(this, state='text'),
using(this, state='variable')), '#pop'),
- (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
+ (rf'(exist{_token_terminator})({_space}{_stoken})',
bygroups(Keyword, using(this, state='text')), '#pop'),
- (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
+ (rf'({_number}{_space})({_opword})({_space}{_number})',
bygroups(using(this, state='arithmetic'), Operator.Word,
using(this, state='arithmetic')), '#pop'),
(_stoken, using(this, state='text'), ('#pop', 'if2')),
],
'if2': [
- (r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
+ (rf'({_space}?)(==)({_space}?{_stoken})',
bygroups(using(this, state='text'), Operator,
using(this, state='text')), '#pop'),
- (r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
+ (rf'({_space})({_opword})({_space}{_stoken})',
bygroups(using(this, state='text'), Operator.Word,
using(this, state='text')), '#pop')
],
@@ -545,7 +524,7 @@ class BatchLexer(RegexLexer):
],
'else?': [
(_space, using(this, state='text')),
- (r'else%s' % _token_terminator, Keyword, '#pop'),
+ (rf'else{_token_terminator}', Keyword, '#pop'),
default('#pop')
]
}
@@ -555,14 +534,14 @@ class MSDOSSessionLexer(ShellSessionBaseLexer):
"""
Lexer for MS DOS shell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
"""
name = 'MSDOS Session'
aliases = ['doscon']
filenames = []
mimetypes = []
+ url = 'https://en.wikipedia.org/wiki/MS-DOS'
+ version_added = '2.1'
_innerLexerCls = BatchLexer
_ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
@@ -572,14 +551,14 @@ class MSDOSSessionLexer(ShellSessionBaseLexer):
class TcshLexer(RegexLexer):
"""
Lexer for tcsh scripts.
-
- .. versionadded:: 0.10
"""
name = 'Tcsh'
aliases = ['tcsh', 'csh']
filenames = ['*.tcsh', '*.csh']
mimetypes = ['application/x-csh']
+ url = 'https://www.tcsh.org'
+ version_added = '0.10'
tokens = {
'root': [
@@ -641,14 +620,14 @@ class TcshSessionLexer(ShellSessionBaseLexer):
"""
Lexer for Tcsh sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
"""
name = 'Tcsh Session'
aliases = ['tcshcon']
filenames = []
mimetypes = []
+ url = 'https://www.tcsh.org'
+ version_added = '2.1'
_innerLexerCls = TcshLexer
_ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
@@ -658,13 +637,13 @@ class TcshSessionLexer(ShellSessionBaseLexer):
class PowerShellLexer(RegexLexer):
"""
For Windows PowerShell code.
-
- .. versionadded:: 1.5
"""
name = 'PowerShell'
aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
filenames = ['*.ps1', '*.psm1']
mimetypes = ['text/x-powershell']
+ url = 'https://learn.microsoft.com/en-us/powershell'
+ version_added = '1.5'
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
@@ -721,7 +700,7 @@ class PowerShellLexer(RegexLexer):
# of '$(...)' blocks in strings
(r'\(', Punctuation, 'child'),
(r'\s+', Text),
- (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
+ (r'^(\s*#[#\s]*)(\.(?:{}))([^\n]*$)'.format('|'.join(commenthelp)),
bygroups(Comment, String.Doc, Comment)),
(r'#[^\n]*?$', Comment),
(r'(&lt;|<)#', Comment.Multiline, 'multline'),
@@ -733,10 +712,10 @@ class PowerShellLexer(RegexLexer):
(r"'([^']|'')*'", String.Single),
(r'(\$|@@|@)((global|script|private|env):)?\w+',
Name.Variable),
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'-(%s)\b' % '|'.join(operators), Operator),
- (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
- (r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
+ (r'({})\b'.format('|'.join(keywords)), Keyword),
+ (r'-({})\b'.format('|'.join(operators)), Operator),
+ (r'({})-[a-z_]\w*\b'.format('|'.join(verbs)), Name.Builtin),
+ (r'({})\s'.format('|'.join(aliases_)), Name.Builtin),
(r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
(r'-[a-z_]\w*', Name),
(r'\w+', Name),
@@ -749,7 +728,7 @@ class PowerShellLexer(RegexLexer):
'multline': [
(r'[^#&.]+', Comment.Multiline),
(r'#(>|&gt;)', Comment.Multiline, '#pop'),
- (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
+ (r'\.({})'.format('|'.join(commenthelp)), String.Doc),
(r'[#&.]', Comment.Multiline),
],
'string': [
@@ -773,14 +752,14 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
"""
Lexer for PowerShell sessions, i.e. command lines, including a
prompt, interspersed with output.
-
- .. versionadded:: 2.1
"""
name = 'PowerShell Session'
aliases = ['pwsh-session', 'ps1con']
filenames = []
mimetypes = []
+ url = 'https://learn.microsoft.com/en-us/powershell'
+ version_added = '2.1'
_innerLexerCls = PowerShellLexer
_bare_continuation = True
@@ -791,14 +770,14 @@ class PowerShellSessionLexer(ShellSessionBaseLexer):
class FishShellLexer(RegexLexer):
"""
Lexer for Fish shell scripts.
-
- .. versionadded:: 2.1
"""
name = 'Fish'
aliases = ['fish', 'fishshell']
filenames = ['*.fish', '*.load']
mimetypes = ['application/x-fish']
+ url = 'https://fishshell.com'
+ version_added = '2.1'
tokens = {
'root': [
@@ -862,15 +841,14 @@ class FishShellLexer(RegexLexer):
class ExeclineLexer(RegexLexer):
"""
- Lexer for Laurent Bercot's execline language
- (https://skarnet.org/software/execline).
-
- .. versionadded:: 2.7
+ Lexer for Laurent Bercot's execline language.
"""
name = 'execline'
aliases = ['execline']
filenames = ['*.exec']
+ url = 'https://skarnet.org/software/execline'
+ version_added = '2.7'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sieve.py b/contrib/python/Pygments/py3/pygments/lexers/sieve.py
index 8287b07e53..19d9d549dc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sieve.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sieve.py
@@ -13,7 +13,7 @@
https://tools.ietf.org/html/rfc5429
https://tools.ietf.org/html/rfc8580
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,12 +27,12 @@ __all__ = ["SieveLexer"]
class SieveLexer(RegexLexer):
"""
Lexer for sieve format.
-
- .. versionadded:: 2.6
"""
name = 'Sieve'
filenames = ['*.siv', '*.sieve']
aliases = ['sieve']
+ url = 'https://en.wikipedia.org/wiki/Sieve_(mail_filtering_language)'
+ version_added = '2.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/slash.py b/contrib/python/Pygments/py3/pygments/lexers/slash.py
index cce47ce87f..5ff20592e3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/slash.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/slash.py
@@ -2,10 +2,9 @@
pygments.lexers.slash
~~~~~~~~~~~~~~~~~~~~~
- Lexer for the `Slash <https://github.com/arturadib/Slash-A>`_ programming
- language.
+ Lexer for the Slash programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -171,13 +170,13 @@ class SlashLanguageLexer(ExtendedRegexLexer):
class SlashLexer(DelegatingLexer):
"""
Lexer for the Slash programming language.
-
- .. versionadded:: 2.4
"""
name = 'Slash'
aliases = ['slash']
filenames = ['*.sla']
+ url = 'https://github.com/arturadib/Slash-A'
+ version_added = '2.4'
def __init__(self, **options):
from pygments.lexers.web import HtmlLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
index 58d870e543..df46aa2b9c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smalltalk.py
@@ -4,7 +4,7 @@
Lexers for Smalltalk and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ class SmalltalkLexer(RegexLexer):
For Smalltalk syntax.
Contributed by Stefan Matthias Aust.
Rewritten by Nils Winter.
-
- .. versionadded:: 0.10
"""
name = 'Smalltalk'
url = 'http://www.smalltalk.org/'
filenames = ['*.st']
aliases = ['smalltalk', 'squeak', 'st']
mimetypes = ['text/x-smalltalk']
+ version_added = '0.10'
tokens = {
'root': [
@@ -140,14 +139,13 @@ class SmalltalkLexer(RegexLexer):
class NewspeakLexer(RegexLexer):
"""
For Newspeak syntax.
-
- .. versionadded:: 1.1
"""
name = 'Newspeak'
url = 'http://newspeaklanguage.org/'
filenames = ['*.ns2']
aliases = ['newspeak', ]
mimetypes = ['text/x-newspeak']
+ version_added = '1.1'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smithy.py b/contrib/python/Pygments/py3/pygments/lexers/smithy.py
index 3f48bfa455..3835258abc 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smithy.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smithy.py
@@ -4,7 +4,7 @@
Lexers for the Smithy IDL.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,12 @@ __all__ = ['SmithyLexer']
class SmithyLexer(RegexLexer):
"""
For Smithy IDL
-
- .. versionadded:: 2.10
"""
name = 'Smithy'
url = 'https://awslabs.github.io/smithy/'
filenames = ['*.smithy']
aliases = ['smithy']
+ version_added = '2.10'
unquoted = r'[A-Za-z0-9_\.#$-]+'
identifier = r"[A-Za-z0-9_\.#$-]+"
diff --git a/contrib/python/Pygments/py3/pygments/lexers/smv.py b/contrib/python/Pygments/py3/pygments/lexers/smv.py
index 2584086e24..5bccf22d3a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/smv.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/smv.py
@@ -4,7 +4,7 @@
Lexers for the SMV languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,14 @@ __all__ = ['NuSMVLexer']
class NuSMVLexer(RegexLexer):
"""
Lexer for the NuSMV language.
-
- .. versionadded:: 2.2
"""
name = 'NuSMV'
aliases = ['nusmv']
filenames = ['*.smv']
mimetypes = []
+ url = 'https://nusmv.fbk.eu'
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/snobol.py b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
index 28087de244..c8c1d03d27 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/snobol.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/snobol.py
@@ -4,7 +4,7 @@
Lexers for the SNOBOL language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,14 @@ class SnobolLexer(RegexLexer):
Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
Does not require spaces around binary operators.
-
- .. versionadded:: 1.5
"""
name = "Snobol"
aliases = ["snobol"]
filenames = ['*.snobol']
mimetypes = ['text/x-snobol']
+ url = 'https://www.regressive.org/snobol4'
+ version_added = '1.5'
tokens = {
# root state, start of line
diff --git a/contrib/python/Pygments/py3/pygments/lexers/solidity.py b/contrib/python/Pygments/py3/pygments/lexers/solidity.py
index f1654e46fa..b3ad9241d5 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/solidity.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/solidity.py
@@ -4,7 +4,7 @@
Lexers for Solidity.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,14 @@ __all__ = ['SolidityLexer']
class SolidityLexer(RegexLexer):
"""
For Solidity source code.
-
- .. versionadded:: 2.5
"""
name = 'Solidity'
aliases = ['solidity']
filenames = ['*.sol']
mimetypes = []
+ url = 'https://soliditylang.org'
+ version_added = '2.5'
datatype = (
r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/soong.py b/contrib/python/Pygments/py3/pygments/lexers/soong.py
new file mode 100644
index 0000000000..b47d87fb39
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/lexers/soong.py
@@ -0,0 +1,78 @@
+"""
+ pygments.lexers.soong
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Soong (Android.bp Blueprint) files.
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, include
+from pygments.token import Comment, Name, Number, Operator, Punctuation, \
+ String, Whitespace
+
+__all__ = ['SoongLexer']
+
+class SoongLexer(RegexLexer):
+ name = 'Soong'
+ version_added = '2.18'
+ url = 'https://source.android.com/docs/setup/reference/androidbp'
+ aliases = ['androidbp', 'bp', 'soong']
+ filenames = ['Android.bp']
+
+ tokens = {
+ 'root': [
+ # A variable assignment
+ (r'(\w*)(\s*)(\+?=)(\s*)',
+ bygroups(Name.Variable, Whitespace, Operator, Whitespace),
+ 'assign-rhs'),
+
+ # A top-level module
+ (r'(\w*)(\s*)(\{)',
+ bygroups(Name.Function, Whitespace, Punctuation),
+ 'in-rule'),
+
+ # Everything else
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay
+ ],
+ 'assign-rhs': [
+ include('expr'),
+ (r'\n', Whitespace, '#pop'),
+ ],
+ 'in-list': [
+ include('expr'),
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay in a list
+ (r',', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ ],
+ 'in-map': [
+ # A map key
+ (r'(\w+)(:)(\s*)', bygroups(Name, Punctuation, Whitespace)),
+
+ include('expr'),
+ include('comments'),
+ (r'\s+', Whitespace), # newlines okay in a map
+ (r',', Punctuation),
+ (r'\}', Punctuation, '#pop'),
+ ],
+ 'in-rule': [
+ # Just re-use map syntax
+ include('in-map'),
+ ],
+ 'comments': [
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'expr': [
+ (r'(true|false)\b', Name.Builtin),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'".*?"', String),
+ (r'\{', Punctuation, 'in-map'),
+ (r'\[', Punctuation, 'in-list'),
+ (r'\w+', Name),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sophia.py b/contrib/python/Pygments/py3/pygments/lexers/sophia.py
index fc4928c31e..89dbee79eb 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sophia.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sophia.py
@@ -6,7 +6,7 @@
Derived from pygments/lexers/reason.py.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,14 +19,14 @@ __all__ = ['SophiaLexer']
class SophiaLexer(RegexLexer):
"""
A Sophia lexer.
-
- .. versionadded:: 2.11
"""
name = 'Sophia'
aliases = ['sophia']
filenames = ['*.aes']
mimetypes = []
+ url = 'https://docs.aeternity.com/aesophia'
+ version_added = '2.11'
keywords = (
'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
@@ -100,4 +100,3 @@ class SophiaLexer(RegexLexer):
default('#pop'),
],
}
-
diff --git a/contrib/python/Pygments/py3/pygments/lexers/special.py b/contrib/python/Pygments/py3/pygments/lexers/special.py
index 45565ac0fd..22ad1e6cec 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/special.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/special.py
@@ -4,7 +4,7 @@
Special lexers.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,9 @@ class TextLexer(Lexer):
aliases = ['text']
filenames = ['*.txt']
mimetypes = ['text/plain']
+ url = ""
+ version_added = ''
+
priority = 0.01
def get_tokens_unprocessed(self, text):
@@ -38,11 +41,11 @@ class TextLexer(Lexer):
class OutputLexer(Lexer):
"""
Simple lexer that highlights everything as ``Token.Generic.Output``.
-
- .. versionadded:: 2.10
"""
name = 'Text output'
aliases = ['output']
+ url = ""
+ version_added = '2.10'
def get_tokens_unprocessed(self, text):
yield 0, Generic.Output, text
@@ -65,6 +68,8 @@ class RawTokenLexer(Lexer):
aliases = []
filenames = []
mimetypes = ['application/x-pygments-tokens']
+ url = 'https://pygments.org/docs/formatters/#RawTokenFormatter'
+ version_added = ''
def __init__(self, **options):
self.compress = get_choice_opt(options, 'compress',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/spice.py b/contrib/python/Pygments/py3/pygments/lexers/spice.py
index 5c2d8f2961..ec7cd97857 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/spice.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/spice.py
@@ -4,7 +4,7 @@
Lexers for the Spice programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['SpiceLexer']
class SpiceLexer(RegexLexer):
"""
For Spice source.
-
- .. versionadded:: 2.11
"""
name = 'Spice'
url = 'https://www.spicelang.com'
filenames = ['*.spice']
aliases = ['spice', 'spicelang']
mimetypes = ['text/x-spice']
+ version_added = '2.11'
tokens = {
'root': [
@@ -39,12 +38,13 @@ class SpiceLexer(RegexLexer):
# keywords
(r'(import|as)\b', Keyword.Namespace),
(r'(f|p|type|struct|interface|enum|alias|operator)\b', Keyword.Declaration),
- (words(('if', 'else', 'for', 'foreach', 'do', 'while', 'break',
- 'continue', 'return', 'assert', 'unsafe', 'ext'), suffix=r'\b'), Keyword),
- (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap'),
+ (words(('if', 'else', 'switch', 'case', 'default', 'for', 'foreach', 'do',
+ 'while', 'break', 'continue', 'fallthrough', 'return', 'assert',
+ 'unsafe', 'ext'), suffix=r'\b'), Keyword),
+ (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap', 'compose'),
suffix=r'\b'), Keyword.Pseudo),
- (words(('new', 'switch', 'case', 'yield', 'stash', 'pick', 'sync',
- 'class'), suffix=r'\b'), Keyword.Reserved),
+ (words(('new', 'yield', 'stash', 'pick', 'sync', 'class'), suffix=r'\b'),
+ Keyword.Reserved),
(r'(true|false|nil)\b', Keyword.Constant),
(words(('double', 'int', 'short', 'long', 'byte', 'char', 'string',
'bool', 'dyn'), suffix=r'\b'), Keyword.Type),
diff --git a/contrib/python/Pygments/py3/pygments/lexers/sql.py b/contrib/python/Pygments/py3/pygments/lexers/sql.py
index 2880841c37..31b45fbb7d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/sql.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/sql.py
@@ -36,7 +36,7 @@
The ``tests/examplefiles`` contains a few test files with data to be
parsed by these lexers.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -154,13 +154,13 @@ class PostgresBase:
class PostgresLexer(PostgresBase, RegexLexer):
"""
Lexer for the PostgreSQL dialect of SQL.
-
- .. versionadded:: 1.5
"""
name = 'PostgreSQL SQL dialect'
aliases = ['postgresql', 'postgres']
mimetypes = ['text/x-postgresql']
+ url = 'https://www.postgresql.org'
+ version_added = '1.5'
flags = re.IGNORECASE
tokens = {
@@ -210,15 +210,16 @@ class PostgresLexer(PostgresBase, RegexLexer):
class PlPgsqlLexer(PostgresBase, RegexLexer):
"""
Handle the extra syntax in Pl/pgSQL language.
-
- .. versionadded:: 1.5
"""
name = 'PL/pgSQL'
aliases = ['plpgsql']
mimetypes = ['text/x-plpgsql']
+ url = 'https://www.postgresql.org/docs/current/plpgsql.html'
+ version_added = '1.5'
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
+ # FIXME: use inheritance
+ tokens = {name: state[:] for (name, state) in PostgresLexer.tokens.items()}
# extend the keywords list
for i, pattern in enumerate(tokens['root']):
@@ -252,7 +253,7 @@ class PsqlRegexLexer(PostgresBase, RegexLexer):
aliases = [] # not public
flags = re.IGNORECASE
- tokens = {k: l[:] for (k, l) in PostgresLexer.tokens.items()}
+ tokens = {name: state[:] for (name, state) in PostgresLexer.tokens.items()}
tokens['root'].append(
(r'\\[^\s]+', Keyword.Pseudo, 'psql-command'))
@@ -302,13 +303,13 @@ class lookahead:
class PostgresConsoleLexer(Lexer):
"""
Lexer for psql sessions.
-
- .. versionadded:: 1.5
"""
name = 'PostgreSQL console (psql)'
aliases = ['psql', 'postgresql-console', 'postgres-console']
mimetypes = ['text/x-postgresql-psql']
+ url = 'https://www.postgresql.org'
+ version_added = '1.5'
def get_tokens_unprocessed(self, data):
sql = PsqlRegexLexer(**self.options)
@@ -374,14 +375,14 @@ class PostgresConsoleLexer(Lexer):
class PostgresExplainLexer(RegexLexer):
"""
Handle PostgreSQL EXPLAIN output
-
- .. versionadded:: 2.15
"""
name = 'PostgreSQL EXPLAIN dialect'
aliases = ['postgres-explain']
filenames = ['*.explain']
mimetypes = ['text/x-postgresql-explain']
+ url = 'https://www.postgresql.org/docs/current/using-explain.html'
+ version_added = '2.15'
tokens = {
'root': [
@@ -459,8 +460,8 @@ class PostgresExplainLexer(RegexLexer):
# strings
(r"'(''|[^'])*'", String.Single),
# numbers
- (r'\d+\.\d+', Number.Float),
- (r'(\d+)', Number.Integer),
+ (r'-?\d+\.\d+', Number.Float),
+ (r'(-?\d+)', Number.Integer),
# boolean
(r'(true|false)', Name.Constant),
@@ -567,6 +568,8 @@ class SqlLexer(RegexLexer):
aliases = ['sql']
filenames = ['*.sql']
mimetypes = ['text/x-sql']
+ url = 'https://en.wikipedia.org/wiki/SQL'
+ version_added = ''
flags = re.IGNORECASE
tokens = {
@@ -701,6 +704,8 @@ class TransactSqlLexer(RegexLexer):
aliases = ['tsql', 't-sql']
filenames = ['*.sql']
mimetypes = ['text/x-tsql']
+ url = 'https://www.tsql.info'
+ version_added = ''
flags = re.IGNORECASE
@@ -785,6 +790,8 @@ class MySqlLexer(RegexLexer):
name = 'MySQL'
aliases = ['mysql']
mimetypes = ['text/x-mysql']
+ url = 'https://www.mysql.com'
+ version_added = ''
flags = re.IGNORECASE
tokens = {
@@ -959,14 +966,14 @@ class MySqlLexer(RegexLexer):
class SqliteConsoleLexer(Lexer):
"""
Lexer for example sessions using sqlite3.
-
- .. versionadded:: 0.11
"""
name = 'sqlite3con'
aliases = ['sqlite3']
filenames = ['*.sqlite3-console']
mimetypes = ['text/x-sqlite3-console']
+ url = 'https://www.sqlite.org'
+ version_added = '0.11'
def get_tokens_unprocessed(self, data):
sql = SqlLexer(**self.options)
@@ -1000,14 +1007,13 @@ class SqliteConsoleLexer(Lexer):
class RqlLexer(RegexLexer):
"""
Lexer for Relation Query Language.
-
- .. versionadded:: 2.0
"""
name = 'RQL'
url = 'http://www.logilab.org/project/rql'
aliases = ['rql']
filenames = ['*.rql']
mimetypes = ['text/x-rql']
+ version_added = '2.0'
flags = re.IGNORECASE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/srcinfo.py b/contrib/python/Pygments/py3/pygments/lexers/srcinfo.py
index c4da88b105..abba0c3dc0 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/srcinfo.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/srcinfo.py
@@ -7,7 +7,7 @@
The description of the format can be found in the wiki:
https://wiki.archlinux.org/title/.SRCINFO
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -34,13 +34,13 @@ architecture_dependent_keywords = (
class SrcinfoLexer(RegexLexer):
"""Lexer for .SRCINFO files used by Arch Linux Packages.
-
- .. versionadded:: 2.11
"""
name = 'Srcinfo'
aliases = ['srcinfo']
filenames = ['.SRCINFO']
+ url = 'https://wiki.archlinux.org/title/.SRCINFO'
+ version_added = '2.11'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/stata.py b/contrib/python/Pygments/py3/pygments/lexers/stata.py
index 917c999853..5fca5b743e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/stata.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/stata.py
@@ -4,7 +4,7 @@
Lexer for Stata
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,8 +21,6 @@ __all__ = ['StataLexer']
class StataLexer(RegexLexer):
"""
For Stata do files.
-
- .. versionadded:: 2.2
"""
# Syntax based on
# - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
@@ -31,6 +29,7 @@ class StataLexer(RegexLexer):
name = 'Stata'
url = 'http://www.stata.com/'
+ version_added = '2.2'
aliases = ['stata', 'do']
filenames = ['*.do', '*.ado']
mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
diff --git a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
index ea7a176764..b4a719cfde 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/supercollider.py
@@ -4,7 +4,7 @@
Lexer for SuperCollider
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,8 +20,6 @@ __all__ = ['SuperColliderLexer']
class SuperColliderLexer(RegexLexer):
"""
For SuperCollider source code.
-
- .. versionadded:: 2.1
"""
name = 'SuperCollider'
@@ -29,6 +27,7 @@ class SuperColliderLexer(RegexLexer):
aliases = ['supercollider', 'sc']
filenames = ['*.sc', '*.scd']
mimetypes = ['application/supercollider', 'text/supercollider']
+ version_added = '2.1'
flags = re.DOTALL | re.MULTILINE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tact.py b/contrib/python/Pygments/py3/pygments/lexers/tact.py
new file mode 100644
index 0000000000..8ede906f21
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/lexers/tact.py
@@ -0,0 +1,303 @@
+"""
+ pygments.lexers.tact
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Tact.
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Comment, Operator, Keyword, Name, String, \
+ Number, Whitespace, Punctuation
+
+__all__ = ['TactLexer']
+
+
+class TactLexer(RegexLexer):
+ """For Tact source code."""
+
+ name = 'Tact'
+ aliases = ['tact']
+ filenames = ['*.tact']
+ url = "https://tact-lang.org"
+ version_added = '2.18'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'[.;(),\[\]{}]', Punctuation),
+ (r'\?|!!', Operator),
+ include('comments'),
+ include('import-in'),
+ include('struct-in'),
+ include('contract-or-trait-in'),
+ include('annotation-in'),
+ include('fun-declaration-in'),
+ include('const-declaration-in'),
+ include('statements'),
+ ],
+ 'import-in': [
+ (r'((?<=\.\.\.)|(?<![.$]))\b(import)\b(\s*)', bygroups(Punctuation, Keyword, Whitespace), 'import'),
+ ],
+ 'import': [
+ (r';', Punctuation, '#pop'),
+ include('comments'),
+ include('string-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'struct-in': [
+ (r'((?<=\.\.\.)|(?<![.$]))\b(struct|message)\b', bygroups(Punctuation, Keyword), 'struct'),
+ ],
+ 'struct': [
+ include('comments'),
+ include('struct-header'),
+ include('struct-body-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'struct-header': [
+ include('comments'),
+ (r'\b\w+', Name.Class),
+ (r'(\()((?:\b0[xX])[0-9a-fA-F][0-9a-fA-F_]*\b)(\))', bygroups(Punctuation, Number.Hex, Punctuation)),
+ (r'(\()((?:\b[0-9]+\b))(\))', bygroups(Punctuation, Number.Integer, Punctuation)),
+ ],
+ 'struct-body-in': [
+ (r'\{', Punctuation, 'struct-body'),
+ ],
+ 'struct-body': [
+ (r'\}', Punctuation, '#pop:2'),
+ include('comments'),
+ include('field-declaration-in'),
+ ],
+ 'contract-or-trait-in': [
+ (r'((?<=\.\.\.)|(?<![.$]))\b(contract|trait)\b', Keyword, 'contract-or-trait'),
+ ],
+ 'contract-or-trait': [
+ include('comments'),
+ (r'with', Keyword),
+ (r'\b\w+', Name.Class),
+ include('contract-or-trait-body-in'),
+ (r'\s+', Whitespace),
+ (r',', Punctuation),
+ ],
+ 'contract-or-trait-body-in': [
+ (r'\{', Punctuation, 'contract-or-trait-body'),
+ ],
+ 'contract-or-trait-body': [
+ (r'\}', Punctuation, '#pop:2'),
+ include('comments'),
+ include('init-declaration-in'),
+ include('receive-declaration-in'),
+ include('bounce-declaration-in'),
+ include('fun-declaration-in'),
+ include('const-declaration-in'),
+ include('field-declaration-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'field-declaration-in': [
+ (r'\b\w+', Name.Property, 'field-declaration'),
+ ],
+ 'field-declaration': [
+ (r';', Punctuation, '#pop'),
+ include('comments'),
+ include('type-annotation-in'),
+ include('variable-init-in'),
+ ],
+ 'const-declaration-in': [
+ (r'(?=\b(?:(?:get|native|extends|mutates|virtual|override|inline|abstract)\s*)*const\b)', Keyword, 'const-declaration'),
+ ],
+ 'const-declaration': [
+ (r'(;)', Punctuation, '#pop'),
+ (r'const', Keyword),
+ (words(('get', 'native', 'extends', 'mutates', 'virtual', 'override', 'inline', 'abstract'), suffix=r'\b'), Keyword),
+ (r'\b\w+\b', Name.Constant),
+ include('comments'),
+ include('type-annotation-in'),
+ include('variable-init-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'init-declaration-in': [
+ (r'(init)', Keyword, 'init-declaration')
+ ],
+ 'init-declaration': [
+ (r'(?<=\})', Punctuation, '#pop'),
+ include('comments'),
+ include('fun-arguments-in'),
+ include('block-declaration-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'receive-declaration-in': [
+ (r'(receive|exernal)', Keyword, 'receive-declaration')
+ ],
+ 'receive-declaration': [
+ (r'(?<=\})', Punctuation, '#pop'),
+ include('comments'),
+ include('fun-arguments-in'),
+ include('block-declaration-in'),
+ ],
+ 'bounce-declaration-in': [
+ (r'(bounced)', Keyword, 'bounce-declaration')
+ ],
+ 'bounce-declaration': [
+ (r'(?<=\})', Punctuation, '#pop'),
+ include('comments'),
+ include('fun-arguments-in'),
+ include('block-declaration-in'),
+ ],
+ 'fun-declaration-in': [
+ (r'(?=\b(?:(?:get|native|extends|mutates|virtual|override|inline|abstract)\s*)*fun\b)', Keyword, 'fun-declaration')
+ ],
+ 'fun-declaration': [
+ (r'(?<=\}|\;)', Punctuation, '#pop'),
+ (r'fun', Keyword),
+ (r'\b(get|native|extends|mutates|virtual|override|inline|abstract)\b', Keyword),
+ (r'\b[\w]+', Name.Function),
+ include('fun-declaration-body'),
+ (r'[,;]', Punctuation),
+ ],
+ 'fun-declaration-body': [
+ include('comments'),
+ include('fun-arguments-in'),
+ include('type-annotation-in'),
+ include('block-declaration-in'),
+ (r'\s+', Whitespace),
+ ],
+ 'fun-arguments-in': [
+ (r'\(', Punctuation, 'fun-arguments'),
+ ],
+ 'fun-arguments': [
+ (r'\)', Punctuation, '#pop'),
+ include('comments'),
+ include('string-in'),
+ include('type-annotation-in'),
+ (r'(self)|(\b[\w]+\b)', bygroups(Name.Variable.Instance, Name.Variable)),
+ (r',', Punctuation),
+ (r'\s+', Whitespace),
+ ],
+ 'block-declaration-in': [
+ (r'\{', Punctuation, 'block-declaration')
+ ],
+ 'block-declaration': [
+ (r'\}', Punctuation, '#pop'),
+ include('statements'),
+ ],
+ 'statements': [
+ include('comments'),
+ include('block-declaration-in'),
+ include('expressions'),
+ ],
+ 'annotation-in': [
+ (r'(@)(\w+)(\()', bygroups(Keyword.Pseudo, Keyword, Punctuation), 'annotation')
+ ],
+ 'annotation': [
+ (r'\)', Punctuation, '#pop'),
+ include('annotation-argument'),
+ (r'\s+', Whitespace),
+ ],
+ 'annotation-argument': [
+ (r'\w+', Name.Function.Magic),
+ ],
+ 'expressions': [
+ include('comments'),
+ include('type-annotation-in'),
+ include('keywords'),
+ include('numeric'),
+ include('string-in'),
+ include('variable'),
+ include('function-call'),
+ include('struct-init-in'),
+ ],
+ 'struct-init-in': [
+ (r'(\b\w+)(\s*)(\{)', bygroups(Name.Class, Whitespace, Punctuation), 'struct-init')
+ ],
+ 'struct-init': [
+ (r'(\})', Punctuation, '#pop'),
+ include('comments'),
+ include('struct-property-in'),
+ (r'\s+', Whitespace),
+ (r',', Punctuation),
+ ],
+ 'struct-property-in': [
+ (r'(\b[\w]+)(\s*)(:)', bygroups(Name.Property, Whitespace, Punctuation), 'struct-property')
+ ],
+ 'struct-property': [
+ (r'(?=\}|\,)', Punctuation, '#pop'),
+ include('comments'),
+ include('expressions'),
+ (r'\s+', Whitespace),
+ ],
+ 'variable-init-in': [
+ (r'(=)', Operator, 'variable-init')
+ ],
+ 'variable-init': [
+ (r'(?=\}|\{|\,|\;)',Punctuation, '#pop'),
+ include('comments'),
+ include('expressions'),
+ (r'\s+', Whitespace),
+ ],
+ 'type-annotation-in': [
+ (r'(:)(\s+)', bygroups(Punctuation, Whitespace), 'type-annotation')
+ ],
+ 'type-annotation': [
+ (r'(?=\{|\;|\=|\,|\))', Punctuation, '#pop'),
+ include('comments'),
+ include('type-as-in'),
+ include('type-generic-in'),
+ (r'\?', Operator),
+ (r'\b\w+', Keyword.Type),
+ (r'\s+', Whitespace),
+ ],
+ 'type-generic-in': [
+ (r'<', Punctuation, 'type-generic'),
+ ],
+ 'type-generic': [
+ (r'>', Punctuation, '#pop'),
+ include('comments'),
+ include('type-as-in'),
+ (r'\b\w+', Keyword.Type),
+ (r'\s+', Whitespace),
+ (r',', Punctuation),
+ ],
+ 'type-as-in': [
+ (r'\b(as)(\s+)', bygroups(Keyword, Whitespace), 'type-as'),
+ ],
+ 'type-as': [
+ (r'(?=\{|\;|\=|\,|\)|\>)', Punctuation, '#pop'),
+ include('comments'),
+ (r'\b\w+', Keyword.Type),
+ (r'\s+', Whitespace),
+ ],
+ 'keywords': [
+ (words(('if', 'else', 'while', 'do', 'until', 'repeat', 'return', 'extends', 'mutates', 'virtual', 'override', 'inline', 'native', 'let', 'const', 'fun', 'self', 'is', 'initOf', 'map', 'bounced', 'get', 'as'), prefix=r'\b', suffix=r'\b'), Keyword),
+ (r'(<=>|>=|<=|!=|==|\^>>|~>>|>>|<<|\/%|\^%|~%|\^\/|~\/|\+=|-=|\*=|\/=|~\/=|\^\/=|%=|\^%=|<<=|>>=|~>>=|\^>>=|&=|\|=|\^=|\^|=|~|\/|%|-|\*|\+|>|<|&|\||:|\?)', Operator),
+ (words(('true', 'false'), prefix=r'\b', suffix=r'\b'), Keyword.Constant),
+ ],
+ 'string-in': [
+ (r'"', String, 'string'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\.', String.Escape),
+ (r'[^\\"]+', String.Double),
+ ],
+ 'numeric': [
+ (r'(?:\b0[xX])[0-9a-fA-F][0-9a-fA-F_]*\b', Number.Hex),
+ (r'(?:\b[0-9]+\b)', Number.Integer),
+ ],
+ 'comments': [
+ (r'//.*', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comments-multiline'),
+ ],
+ 'comments-multiline': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[^*]+', Comment.Multiline),
+ (r'[*]', Comment.Multiline),
+ ],
+ 'variable': [
+ (r'\b\w+\b(?!\s*\()(?!\s*\{)', Name.Variable)
+ ],
+ 'function-call': [
+ (r'\b\w+\b(?=\s*\()(?!\s*\{)', Name.Function)
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tal.py b/contrib/python/Pygments/py3/pygments/lexers/tal.py
index 170b781a93..86249b85d2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tal.py
@@ -6,7 +6,7 @@
.. versionadded:: 2.12
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,15 +19,15 @@ __all__ = ['TalLexer']
class TalLexer(RegexLexer):
"""
- For `Uxntal <https://wiki.xxiivv.com/site/uxntal.html>`_ source code.
-
- .. versionadded:: 2.12
+ For Uxntal source code.
"""
name = 'Tal'
aliases = ['tal', 'uxntal']
filenames = ['*.tal']
mimetypes = ['text/x-uxntal']
+ url = 'https://wiki.xxiivv.com/site/uxntal.html'
+ version_added = '2.12'
instructions = [
'BRK', 'LIT', 'INC', 'POP', 'DUP', 'NIP', 'SWP', 'OVR', 'ROT',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tcl.py b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
index f444223aa1..436438c177 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tcl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tcl.py
@@ -4,7 +4,7 @@
Lexers for Tcl and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,8 +19,6 @@ __all__ = ['TclLexer']
class TclLexer(RegexLexer):
"""
For Tcl source code.
-
- .. versionadded:: 0.10
"""
keyword_cmds_re = words((
@@ -48,6 +46,7 @@ class TclLexer(RegexLexer):
aliases = ['tcl']
filenames = ['*.tcl', '*.rvt']
mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
+ version_added = '0.10'
def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
return [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/teal.py b/contrib/python/Pygments/py3/pygments/lexers/teal.py
index e488e0931a..1862fba245 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/teal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/teal.py
@@ -4,7 +4,7 @@
Lexer for TEAL.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,13 +21,12 @@ class TealLexer(RegexLexer):
For more information about the grammar, see:
https://github.com/algorand/go-algorand/blob/master/data/transactions/logic/assembler.go
-
- .. versionadded:: 2.9
"""
name = 'teal'
url = 'https://developer.algorand.org/docs/reference/teal/specification/'
aliases = ['teal']
filenames = ['*.teal']
+ version_added = '2.9'
keywords = words({
'Sender', 'Fee', 'FirstValid', 'FirstValidTime', 'LastValid', 'Note',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/templates.py b/contrib/python/Pygments/py3/pygments/lexers/templates.py
index 5f46a47a81..4084fbd503 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/templates.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/templates.py
@@ -4,7 +4,7 @@
Lexers for various template engines' markup.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,6 +62,7 @@ class ErbLexer(Lexer):
url = 'https://github.com/ruby/erb'
aliases = ['erb']
mimetypes = ['application/x-ruby-templating']
+ version_added = ''
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
@@ -156,6 +157,7 @@ class SmartyLexer(RegexLexer):
aliases = ['smarty']
filenames = ['*.tpl']
mimetypes = ['application/x-smarty']
+ version_added = ''
flags = re.MULTILINE | re.DOTALL
@@ -212,6 +214,7 @@ class VelocityLexer(RegexLexer):
url = 'https://velocity.apache.org/'
aliases = ['velocity']
filenames = ['*.vm', '*.fhtml']
+ version_added = ''
flags = re.MULTILINE | re.DOTALL
@@ -292,8 +295,10 @@ class VelocityHtmlLexer(DelegatingLexer):
name = 'HTML+Velocity'
aliases = ['html+velocity']
+ version_added = ''
alias_filenames = ['*.html', '*.fhtml']
mimetypes = ['text/html+velocity']
+ url = 'https://velocity.apache.org/'
def __init__(self, **options):
super().__init__(HtmlLexer, VelocityLexer, **options)
@@ -308,8 +313,10 @@ class VelocityXmlLexer(DelegatingLexer):
name = 'XML+Velocity'
aliases = ['xml+velocity']
+ version_added = ''
alias_filenames = ['*.xml', '*.vm']
mimetypes = ['application/xml+velocity']
+ url = 'https://velocity.apache.org/'
def __init__(self, **options):
super().__init__(XmlLexer, VelocityLexer, **options)
@@ -323,8 +330,8 @@ class VelocityXmlLexer(DelegatingLexer):
class DjangoLexer(RegexLexer):
"""
- Generic `django <http://www.djangoproject.com/documentation/templates/>`_
- and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
+ Generic `Django <https://www.djangoproject.com/documentation/templates/>`_
+ and `Jinja <https://jinja.palletsprojects.com>`_ template lexer.
It just highlights django/jinja code between the preprocessor directives,
other data is left untouched by the lexer.
@@ -333,6 +340,8 @@ class DjangoLexer(RegexLexer):
name = 'Django/Jinja'
aliases = ['django', 'jinja']
mimetypes = ['application/x-django-templating', 'application/x-jinja']
+ url = 'https://www.djangoproject.com/documentation/templates'
+ version_added = ''
flags = re.M | re.S
@@ -408,8 +417,6 @@ class MyghtyLexer(RegexLexer):
"""
Generic myghty templates lexer. Code that isn't Myghty
markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.6
"""
name = 'Myghty'
@@ -417,6 +424,7 @@ class MyghtyLexer(RegexLexer):
aliases = ['myghty']
filenames = ['*.myt', 'autodelegate']
mimetypes = ['application/x-myghty']
+ version_added = '0.6'
tokens = {
'root': [
@@ -455,13 +463,13 @@ class MyghtyHtmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `HtmlLexer`.
-
- .. versionadded:: 0.6
"""
name = 'HTML+Myghty'
aliases = ['html+myghty']
mimetypes = ['text/html+myghty']
+ url = 'http://www.myghty.org/'
+ version_added = '0.6'
def __init__(self, **options):
super().__init__(HtmlLexer, MyghtyLexer, **options)
@@ -471,13 +479,13 @@ class MyghtyXmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `XmlLexer`.
-
- .. versionadded:: 0.6
"""
name = 'XML+Myghty'
aliases = ['xml+myghty']
mimetypes = ['application/xml+myghty']
+ url = 'http://www.myghty.org/'
+ version_added = '0.6'
def __init__(self, **options):
super().__init__(XmlLexer, MyghtyLexer, **options)
@@ -487,8 +495,6 @@ class MyghtyJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `JavascriptLexer`.
-
- .. versionadded:: 0.6
"""
name = 'JavaScript+Myghty'
@@ -496,6 +502,8 @@ class MyghtyJavascriptLexer(DelegatingLexer):
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
+ url = 'http://www.myghty.org/'
+ version_added = '0.6'
def __init__(self, **options):
super().__init__(JavascriptLexer, MyghtyLexer, **options)
@@ -505,13 +513,13 @@ class MyghtyCssLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexed data
with the `CssLexer`.
-
- .. versionadded:: 0.6
"""
name = 'CSS+Myghty'
aliases = ['css+myghty']
mimetypes = ['text/css+myghty']
+ url = 'http://www.myghty.org/'
+ version_added = '0.6'
def __init__(self, **options):
super().__init__(CssLexer, MyghtyLexer, **options)
@@ -521,14 +529,13 @@ class MasonLexer(RegexLexer):
"""
Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't
Mason markup is HTML.
-
- .. versionadded:: 1.4
"""
name = 'Mason'
url = 'http://www.masonhq.com/'
aliases = ['mason']
filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
mimetypes = ['application/x-mason']
+ version_added = '1.4'
tokens = {
'root': [
@@ -576,8 +583,6 @@ class MakoLexer(RegexLexer):
"""
Generic mako templates lexer. Code that isn't Mako
markup is yielded as `Token.Other`.
-
- .. versionadded:: 0.7
"""
name = 'Mako'
@@ -585,6 +590,7 @@ class MakoLexer(RegexLexer):
aliases = ['mako']
filenames = ['*.mao']
mimetypes = ['application/x-mako']
+ version_added = '0.7'
tokens = {
'root': [
@@ -643,13 +649,13 @@ class MakoHtmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `HtmlLexer`.
-
- .. versionadded:: 0.7
"""
name = 'HTML+Mako'
aliases = ['html+mako']
mimetypes = ['text/html+mako']
+ url = 'http://www.makotemplates.org/'
+ version_added = '0.7'
def __init__(self, **options):
super().__init__(HtmlLexer, MakoLexer, **options)
@@ -659,13 +665,13 @@ class MakoXmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `XmlLexer`.
-
- .. versionadded:: 0.7
"""
name = 'XML+Mako'
aliases = ['xml+mako']
mimetypes = ['application/xml+mako']
+ url = 'http://www.makotemplates.org/'
+ version_added = '0.7'
def __init__(self, **options):
super().__init__(XmlLexer, MakoLexer, **options)
@@ -675,8 +681,6 @@ class MakoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `JavascriptLexer`.
-
- .. versionadded:: 0.7
"""
name = 'JavaScript+Mako'
@@ -684,6 +688,8 @@ class MakoJavascriptLexer(DelegatingLexer):
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
+ url = 'http://www.makotemplates.org/'
+ version_added = '0.7'
def __init__(self, **options):
super().__init__(JavascriptLexer, MakoLexer, **options)
@@ -693,13 +699,13 @@ class MakoCssLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `CssLexer`.
-
- .. versionadded:: 0.7
"""
name = 'CSS+Mako'
aliases = ['css+mako']
mimetypes = ['text/css+mako']
+ url = 'http://www.makotemplates.org/'
+ version_added = '0.7'
def __init__(self, **options):
super().__init__(CssLexer, MakoLexer, **options)
@@ -734,6 +740,7 @@ class CheetahLexer(RegexLexer):
aliases = ['cheetah', 'spitfire']
filenames = ['*.tmpl', '*.spt']
mimetypes = ['application/x-cheetah', 'application/x-spitfire']
+ version_added = ''
tokens = {
'root': [
@@ -773,6 +780,8 @@ class CheetahHtmlLexer(DelegatingLexer):
name = 'HTML+Cheetah'
aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
+ url = 'http://www.cheetahtemplate.org/'
+ version_added = ''
def __init__(self, **options):
super().__init__(HtmlLexer, CheetahLexer, **options)
@@ -787,6 +796,8 @@ class CheetahXmlLexer(DelegatingLexer):
name = 'XML+Cheetah'
aliases = ['xml+cheetah', 'xml+spitfire']
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
+ url = 'http://www.cheetahtemplate.org/'
+ version_added = ''
def __init__(self, **options):
super().__init__(XmlLexer, CheetahLexer, **options)
@@ -807,6 +818,8 @@ class CheetahJavascriptLexer(DelegatingLexer):
'application/x-javascript+spitfire',
'text/x-javascript+spitfire',
'text/javascript+spitfire']
+ url = 'http://www.cheetahtemplate.org/'
+ version_added = ''
def __init__(self, **options):
super().__init__(JavascriptLexer, CheetahLexer, **options)
@@ -818,9 +831,10 @@ class GenshiTextLexer(RegexLexer):
"""
name = 'Genshi Text'
- url = 'http://genshi.edgewall.org/'
+ url = 'https://genshi.edgewall.org/'
aliases = ['genshitext']
mimetypes = ['application/x-genshi-text', 'text/x-genshi']
+ version_added = ''
tokens = {
'root': [
@@ -913,14 +927,16 @@ class GenshiMarkupLexer(RegexLexer):
class HtmlGenshiLexer(DelegatingLexer):
"""
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ A lexer that highlights `genshi <https://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid HTML templates.
"""
name = 'HTML+Genshi'
aliases = ['html+genshi', 'html+kid']
+ version_added = ''
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+genshi']
+ url = 'https://genshi.edgewall.org/'
def __init__(self, **options):
super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
@@ -936,15 +952,17 @@ class HtmlGenshiLexer(DelegatingLexer):
class GenshiLexer(DelegatingLexer):
"""
- A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
+ A lexer that highlights `genshi <https://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid XML templates.
"""
name = 'Genshi'
aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
filenames = ['*.kid']
+ version_added = ''
alias_filenames = ['*.xml']
mimetypes = ['application/x-genshi', 'application/x-kid']
+ url = 'https://genshi.edgewall.org/'
def __init__(self, **options):
super().__init__(XmlLexer, GenshiMarkupLexer, **options)
@@ -966,10 +984,12 @@ class JavascriptGenshiLexer(DelegatingLexer):
name = 'JavaScript+Genshi Text'
aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
'javascript+genshi']
+ version_added = ''
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+genshi',
'text/x-javascript+genshi',
'text/javascript+genshi']
+ url = 'https://genshi.edgewall.org'
def __init__(self, **options):
super().__init__(JavascriptLexer, GenshiTextLexer, **options)
@@ -985,8 +1005,10 @@ class CssGenshiLexer(DelegatingLexer):
name = 'CSS+Genshi Text'
aliases = ['css+genshitext', 'css+genshi']
+ version_added = ''
alias_filenames = ['*.css']
mimetypes = ['text/css+genshi']
+ url = 'https://genshi.edgewall.org'
def __init__(self, **options):
super().__init__(CssLexer, GenshiTextLexer, **options)
@@ -1006,8 +1028,11 @@ class RhtmlLexer(DelegatingLexer):
name = 'RHTML'
aliases = ['rhtml', 'html+erb', 'html+ruby']
filenames = ['*.rhtml']
+ version_added = ''
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+ruby']
+ url = 'https://github.com/ruby/erb'
+
def __init__(self, **options):
super().__init__(HtmlLexer, ErbLexer, **options)
@@ -1028,8 +1053,10 @@ class XmlErbLexer(DelegatingLexer):
name = 'XML+Ruby'
aliases = ['xml+ruby', 'xml+erb']
+ version_added = ''
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
+ url = 'https://github.com/ruby/erb'
def __init__(self, **options):
super().__init__(XmlLexer, ErbLexer, **options)
@@ -1048,8 +1075,10 @@ class CssErbLexer(DelegatingLexer):
name = 'CSS+Ruby'
aliases = ['css+ruby', 'css+erb']
+ version_added = ''
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
+ url = 'https://github.com/ruby/erb'
def __init__(self, **options):
super().__init__(CssLexer, ErbLexer, **options)
@@ -1066,10 +1095,12 @@ class JavascriptErbLexer(DelegatingLexer):
name = 'JavaScript+Ruby'
aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
+ version_added = ''
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
'text/javascript+ruby']
+ url = 'https://github.com/ruby/erb'
def __init__(self, **options):
super().__init__(JavascriptLexer, ErbLexer, **options)
@@ -1088,11 +1119,14 @@ class HtmlPhpLexer(DelegatingLexer):
name = 'HTML+PHP'
aliases = ['html+php']
filenames = ['*.phtml']
+ version_added = ''
alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
'*.php[345]']
mimetypes = ['application/x-php',
'application/x-httpd-php', 'application/x-httpd-php3',
'application/x-httpd-php4', 'application/x-httpd-php5']
+ url = 'https://www.php.net'
+
def __init__(self, **options):
super().__init__(HtmlLexer, PhpLexer, **options)
@@ -1111,8 +1145,10 @@ class XmlPhpLexer(DelegatingLexer):
name = 'XML+PHP'
aliases = ['xml+php']
+ version_added = ''
alias_filenames = ['*.xml', '*.php', '*.php[345]']
mimetypes = ['application/xml+php']
+ url = 'https://www.php.net'
def __init__(self, **options):
super().__init__(XmlLexer, PhpLexer, **options)
@@ -1131,8 +1167,10 @@ class CssPhpLexer(DelegatingLexer):
name = 'CSS+PHP'
aliases = ['css+php']
+ version_added = ''
alias_filenames = ['*.css']
mimetypes = ['text/css+php']
+ url = 'https://www.php.net'
def __init__(self, **options):
super().__init__(CssLexer, PhpLexer, **options)
@@ -1149,10 +1187,12 @@ class JavascriptPhpLexer(DelegatingLexer):
name = 'JavaScript+PHP'
aliases = ['javascript+php', 'js+php']
+ version_added = ''
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
'text/javascript+php']
+ url = 'https://www.php.net'
def __init__(self, **options):
super().__init__(JavascriptLexer, PhpLexer, **options)
@@ -1171,8 +1211,10 @@ class HtmlSmartyLexer(DelegatingLexer):
name = 'HTML+Smarty'
aliases = ['html+smarty']
+ version_added = ''
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
mimetypes = ['text/html+smarty']
+ url = 'https://www.smarty.net/'
def __init__(self, **options):
super().__init__(HtmlLexer, SmartyLexer, **options)
@@ -1192,8 +1234,10 @@ class XmlSmartyLexer(DelegatingLexer):
name = 'XML+Smarty'
aliases = ['xml+smarty']
+ version_added = ''
alias_filenames = ['*.xml', '*.tpl']
mimetypes = ['application/xml+smarty']
+ url = 'https://www.smarty.net/'
def __init__(self, **options):
super().__init__(XmlLexer, SmartyLexer, **options)
@@ -1213,8 +1257,10 @@ class CssSmartyLexer(DelegatingLexer):
name = 'CSS+Smarty'
aliases = ['css+smarty']
+ version_added = ''
alias_filenames = ['*.css', '*.tpl']
mimetypes = ['text/css+smarty']
+ url = 'https://www.smarty.net/'
def __init__(self, **options):
super().__init__(CssLexer, SmartyLexer, **options)
@@ -1231,10 +1277,12 @@ class JavascriptSmartyLexer(DelegatingLexer):
name = 'JavaScript+Smarty'
aliases = ['javascript+smarty', 'js+smarty']
+ version_added = ''
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
'text/javascript+smarty']
+ url = 'https://www.smarty.net/'
def __init__(self, **options):
super().__init__(JavascriptLexer, SmartyLexer, **options)
@@ -1254,8 +1302,10 @@ class HtmlDjangoLexer(DelegatingLexer):
name = 'HTML+Django/Jinja'
aliases = ['html+django', 'html+jinja', 'htmldjango']
filenames = ['*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2']
+ version_added = ''
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
+ url = 'https://www.djangoproject.com/documentation/templates'
def __init__(self, **options):
super().__init__(HtmlLexer, DjangoLexer, **options)
@@ -1276,8 +1326,10 @@ class XmlDjangoLexer(DelegatingLexer):
name = 'XML+Django/Jinja'
aliases = ['xml+django', 'xml+jinja']
filenames = ['*.xml.j2', '*.xml.jinja2']
+ version_added = ''
alias_filenames = ['*.xml']
mimetypes = ['application/xml+django', 'application/xml+jinja']
+ url = 'https://www.djangoproject.com/documentation/templates'
def __init__(self, **options):
super().__init__(XmlLexer, DjangoLexer, **options)
@@ -1298,8 +1350,10 @@ class CssDjangoLexer(DelegatingLexer):
name = 'CSS+Django/Jinja'
aliases = ['css+django', 'css+jinja']
filenames = ['*.css.j2', '*.css.jinja2']
+ version_added = ''
alias_filenames = ['*.css']
mimetypes = ['text/css+django', 'text/css+jinja']
+ url = 'https://www.djangoproject.com/documentation/templates'
def __init__(self, **options):
super().__init__(CssLexer, DjangoLexer, **options)
@@ -1318,6 +1372,7 @@ class JavascriptDjangoLexer(DelegatingLexer):
aliases = ['javascript+django', 'js+django',
'javascript+jinja', 'js+jinja']
filenames = ['*.js.j2', '*.js.jinja2']
+ version_added = ''
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
@@ -1325,6 +1380,7 @@ class JavascriptDjangoLexer(DelegatingLexer):
'text/x-javascript+jinja',
'text/javascript+django',
'text/javascript+jinja']
+ url = 'https://www.djangoproject.com/documentation/templates'
def __init__(self, **options):
super().__init__(JavascriptLexer, DjangoLexer, **options)
@@ -1361,13 +1417,13 @@ class JspRootLexer(RegexLexer):
class JspLexer(DelegatingLexer):
"""
Lexer for Java Server Pages.
-
- .. versionadded:: 0.7
"""
name = 'Java Server Page'
aliases = ['jsp']
filenames = ['*.jsp']
mimetypes = ['application/x-jsp']
+ url = 'https://projects.eclipse.org/projects/ee4j.jsp'
+ version_added = '0.7'
def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
@@ -1384,13 +1440,13 @@ class JspLexer(DelegatingLexer):
class EvoqueLexer(RegexLexer):
"""
For files using the Evoque templating system.
-
- .. versionadded:: 1.1
"""
name = 'Evoque'
aliases = ['evoque']
filenames = ['*.evoque']
mimetypes = ['application/x-evoque']
+ url = 'https://gizmojo.org/templating'
+ version_added = '1.1'
flags = re.DOTALL
@@ -1442,13 +1498,13 @@ class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`HtmlLexer`.
-
- .. versionadded:: 1.1
"""
name = 'HTML+Evoque'
aliases = ['html+evoque']
filenames = ['*.html']
mimetypes = ['text/html+evoque']
+ url = 'https://gizmojo.org/templating'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(HtmlLexer, EvoqueLexer, **options)
@@ -1461,13 +1517,13 @@ class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`XmlLexer`.
-
- .. versionadded:: 1.1
"""
name = 'XML+Evoque'
aliases = ['xml+evoque']
filenames = ['*.xml']
mimetypes = ['application/xml+evoque']
+ url = 'https://gizmojo.org/templating'
+ version_added = '1.1'
def __init__(self, **options):
super().__init__(XmlLexer, EvoqueLexer, **options)
@@ -1484,6 +1540,9 @@ class ColdfusionLexer(RegexLexer):
aliases = ['cfs']
filenames = []
mimetypes = []
+ url = 'https://www.adobe.com/products/coldfusion-family.html'
+ version_added = ''
+
flags = re.IGNORECASE
tokens = {
@@ -1532,6 +1591,7 @@ class ColdfusionMarkupLexer(RegexLexer):
aliases = ['cf']
filenames = []
mimetypes = []
+ url = 'https://www.adobe.com/products/coldfusion-family.html'
tokens = {
'root': [
@@ -1578,6 +1638,8 @@ class ColdfusionHtmlLexer(DelegatingLexer):
aliases = ['cfm']
filenames = ['*.cfm', '*.cfml']
mimetypes = ['application/x-coldfusion']
+ url = 'https://www.adobe.com/products/coldfusion-family.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
@@ -1586,13 +1648,13 @@ class ColdfusionHtmlLexer(DelegatingLexer):
class ColdfusionCFCLexer(DelegatingLexer):
"""
Coldfusion markup/script components
-
- .. versionadded:: 2.0
"""
name = 'Coldfusion CFC'
aliases = ['cfc']
filenames = ['*.cfc']
mimetypes = []
+ url = 'https://www.adobe.com/products/coldfusion-family.html'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
@@ -1601,13 +1663,13 @@ class ColdfusionCFCLexer(DelegatingLexer):
class SspLexer(DelegatingLexer):
"""
Lexer for Scalate Server Pages.
-
- .. versionadded:: 1.4
"""
name = 'Scalate Server Page'
aliases = ['ssp']
filenames = ['*.ssp']
mimetypes = ['application/x-ssp']
+ url = 'https://scalate.github.io/scalate/'
+ version_added = '1.4'
def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
@@ -1647,14 +1709,14 @@ class TeaTemplateRootLexer(RegexLexer):
class TeaTemplateLexer(DelegatingLexer):
"""
- Lexer for `Tea Templates <http://teatrove.org/>`_.
-
- .. versionadded:: 1.5
+ Lexer for Tea Templates.
"""
name = 'Tea'
aliases = ['tea']
filenames = ['*.tea']
mimetypes = ['text/x-tea']
+ url = 'https://github.com/teatrove/teatrove'
+ version_added = '1.5'
def __init__(self, **options):
super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
@@ -1674,17 +1736,17 @@ class LassoHtmlLexer(DelegatingLexer):
`HtmlLexer`.
Nested JavaScript and CSS is also highlighted.
-
- .. versionadded:: 1.6
"""
name = 'HTML+Lasso'
aliases = ['html+lasso']
+ version_added = '1.6'
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['text/html+lasso',
'application/x-httpd-lasso',
'application/x-httpd-lasso[89]']
+ url = 'https://www.lassosoft.com'
def __init__(self, **options):
super().__init__(HtmlLexer, LassoLexer, **options)
@@ -1700,15 +1762,15 @@ class LassoXmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`XmlLexer`.
-
- .. versionadded:: 1.6
"""
name = 'XML+Lasso'
aliases = ['xml+lasso']
+ version_added = '1.6'
alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['application/xml+lasso']
+ url = 'https://www.lassosoft.com'
def __init__(self, **options):
super().__init__(XmlLexer, LassoLexer, **options)
@@ -1724,14 +1786,14 @@ class LassoCssLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`CssLexer`.
-
- .. versionadded:: 1.6
"""
name = 'CSS+Lasso'
aliases = ['css+lasso']
+ version_added = '1.6'
alias_filenames = ['*.css']
mimetypes = ['text/css+lasso']
+ url = 'https://www.lassosoft.com'
def __init__(self, **options):
options['requiredelimiters'] = True
@@ -1750,16 +1812,16 @@ class LassoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`JavascriptLexer`.
-
- .. versionadded:: 1.6
"""
name = 'JavaScript+Lasso'
aliases = ['javascript+lasso', 'js+lasso']
+ version_added = '1.6'
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
'text/javascript+lasso']
+ url = 'https://www.lassosoft.com'
def __init__(self, **options):
options['requiredelimiters'] = True
@@ -1776,13 +1838,12 @@ class HandlebarsLexer(RegexLexer):
Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.0
"""
name = "Handlebars"
url = 'https://handlebarsjs.com/'
aliases = ['handlebars']
+ version_added = '2.0'
tokens = {
'root': [
@@ -1851,14 +1912,14 @@ class HandlebarsHtmlLexer(DelegatingLexer):
"""
Subclass of the `HandlebarsLexer` that highlights unlexed data with the
`HtmlLexer`.
-
- .. versionadded:: 2.0
"""
name = "HTML+Handlebars"
aliases = ["html+handlebars"]
filenames = ['*.handlebars', '*.hbs']
mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
+ url = 'https://handlebarsjs.com/'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(HtmlLexer, HandlebarsLexer, **options)
@@ -1870,14 +1931,14 @@ class YamlJinjaLexer(DelegatingLexer):
`YamlLexer`.
Commonly used in Saltstack salt states.
-
- .. versionadded:: 2.0
"""
name = 'YAML+Jinja'
aliases = ['yaml+jinja', 'salt', 'sls']
filenames = ['*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2']
mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
+ url = 'https://jinja.palletsprojects.com'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(YamlLexer, DjangoLexer, **options)
@@ -1886,13 +1947,12 @@ class YamlJinjaLexer(DelegatingLexer):
class LiquidLexer(RegexLexer):
"""
Lexer for Liquid templates.
-
- .. versionadded:: 2.0
"""
name = 'liquid'
url = 'https://www.rubydoc.info/github/Shopify/liquid'
aliases = ['liquid']
filenames = ['*.liquid']
+ version_added = '2.0'
tokens = {
'root': [
@@ -2095,13 +2155,13 @@ class TwigLexer(RegexLexer):
It just highlights Twig code between the preprocessor directives,
other data is left untouched by the lexer.
-
- .. versionadded:: 2.0
"""
name = 'Twig'
aliases = ['twig']
mimetypes = ['application/x-twig']
+ url = 'https://twig.symfony.com'
+ version_added = '2.0'
flags = re.M | re.S
@@ -2130,7 +2190,7 @@ class TwigLexer(RegexLexer):
Other, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
- (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
+ (rf'(\{{%)(-?\s*)(filter)(\s+)({_ident_inner})',
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'tag'),
(r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
@@ -2138,9 +2198,9 @@ class TwigLexer(RegexLexer):
(r'\{', Other),
],
'varnames': [
- (r'(\|)(\s*)(%s)' % _ident_inner,
+ (rf'(\|)(\s*)({_ident_inner})',
bygroups(Operator, Text, Name.Function)),
- (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
+ (rf'(is)(\s+)(not)?(\s*)({_ident_inner})',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
(r'(in|not|and|b-and|or|b-or|b-xor|is'
@@ -2176,14 +2236,14 @@ class TwigHtmlLexer(DelegatingLexer):
"""
Subclass of the `TwigLexer` that highlights unlexed data with the
`HtmlLexer`.
-
- .. versionadded:: 2.0
"""
name = "HTML+Twig"
aliases = ["html+twig"]
filenames = ['*.twig']
mimetypes = ['text/html+twig']
+ url = 'https://twig.symfony.com'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(HtmlLexer, TwigLexer, **options)
@@ -2196,13 +2256,12 @@ class Angular2Lexer(RegexLexer):
Highlights only the Angular template tags (stuff between `{{` and `}}` and
special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
Everything else is left for a delegating lexer.
-
- .. versionadded:: 2.1
"""
name = "Angular2"
url = 'https://angular.io/guide/template-syntax'
aliases = ['ng2']
+ version_added = '2.1'
tokens = {
'root': [
@@ -2256,13 +2315,13 @@ class Angular2HtmlLexer(DelegatingLexer):
"""
Subclass of the `Angular2Lexer` that highlights unlexed data with the
`HtmlLexer`.
-
- .. versionadded:: 2.0
"""
name = "HTML + Angular2"
aliases = ["html+ng2"]
filenames = ['*.ng2']
+ url = 'https://angular.io/guide/template-syntax'
+ version_added = '2.0'
def __init__(self, **options):
super().__init__(HtmlLexer, Angular2Lexer, **options)
@@ -2271,13 +2330,13 @@ class Angular2HtmlLexer(DelegatingLexer):
class SqlJinjaLexer(DelegatingLexer):
"""
Templated SQL lexer.
-
- .. versionadded:: 2.13
"""
name = 'SQL+Jinja'
aliases = ['sql+jinja']
filenames = ['*.sql', '*.sql.j2', '*.sql.jinja2']
+ url = 'https://jinja.palletsprojects.com'
+ version_added = '2.13'
def __init__(self, **options):
super().__init__(SqlLexer, DjangoLexer, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/teraterm.py b/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
index 2c0d3b27ab..9a88337bf1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/teraterm.py
@@ -4,7 +4,7 @@
Lexer for Tera Term macro files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,13 @@ __all__ = ['TeraTermLexer']
class TeraTermLexer(RegexLexer):
"""
For Tera Term macro source code.
-
- .. versionadded:: 2.4
"""
name = 'Tera Term macro'
url = 'https://ttssh2.osdn.jp/'
aliases = ['teratermmacro', 'teraterm', 'ttl']
filenames = ['*.ttl']
mimetypes = ['text/x-teratermmacro']
+ version_added = '2.4'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/testing.py b/contrib/python/Pygments/py3/pygments/lexers/testing.py
index dec3a15d03..7df7fa7806 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/testing.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/testing.py
@@ -4,7 +4,7 @@
Lexers for testing languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,13 +17,13 @@ __all__ = ['GherkinLexer', 'TAPLexer']
class GherkinLexer(RegexLexer):
"""
For Gherkin syntax.
-
- .. versionadded:: 1.2
"""
name = 'Gherkin'
aliases = ['gherkin', 'cucumber']
filenames = ['*.feature']
mimetypes = ['text/x-gherkin']
+ url = 'https://cucumber.io/docs/gherkin'
+ version_added = '1.2'
feature_keywords = '^(기능|機能|功能|フィーãƒãƒ£|خاصية|תכונה|Функціонал|ФункционалноÑÑ‚|Функционал|Фича|ОÑобина|МогућноÑÑ‚|Özellik|WÅ‚aÅ›ciwość|Tính năng|Trajto|SavybÄ—|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|JellemzÅ‘|FÄ«Äa|Funzionalità|Funktionalität|Funkcionalnost|FunkcionalitÄte|FuncÈ›ionalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
feature_element_keywords = '^(\\s*)(시나리오 개요|시나리오|ë°°ê²½|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|剧本大纲|剧本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلÙية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий ÑтруктураÑи|Сценарий|Структура Ñценарію|Структура Ñценарија|Структура ÑценариÑ|Скица|Рамка на Ñценарий|Пример|ПредыÑториÑ|ПредиÑториÑ|Позадина|Передумова|ОÑнова|Концепт|КонтекÑÑ‚|ZaÅ‚ożenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|SituÄcija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|ScenÄrijs pÄ“c parauga|ScenÄrijs|Scenár|Scenaro|Scenariusz|Scenariul de ÅŸablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus Å¡ablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|NáÄrt Scénáře|NáÄrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|GeçmiÅŸ|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|CondiÅ£ii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
@@ -136,13 +136,12 @@ class GherkinLexer(RegexLexer):
class TAPLexer(RegexLexer):
"""
For Test Anything Protocol (TAP) output.
-
- .. versionadded:: 2.1
"""
name = 'TAP'
url = 'https://testanything.org/'
aliases = ['tap']
filenames = ['*.tap']
+ version_added = '2.1'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/text.py b/contrib/python/Pygments/py3/pygments/lexers/text.py
index e35b0b5f0a..17e2056b71 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/text.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/text.py
@@ -4,10 +4,11 @@
Lexers for non-source code file types.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer, \
UnixConfigLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textedit.py b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
index 09defdbe2a..0e1f2b2bed 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textedit.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textedit.py
@@ -4,7 +4,7 @@
Lexers for languages related to text processing.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -22,14 +22,14 @@ __all__ = ['AwkLexer', 'SedLexer', 'VimLexer']
class AwkLexer(RegexLexer):
"""
For Awk scripts.
-
- .. versionadded:: 1.5
"""
name = 'Awk'
aliases = ['awk', 'gawk', 'mawk', 'nawk']
filenames = ['*.awk']
mimetypes = ['application/x-awk']
+ url = 'https://en.wikipedia.org/wiki/AWK'
+ version_added = '1.5'
tokens = {
'commentsandwhitespace': [
@@ -49,7 +49,7 @@ class AwkLexer(RegexLexer):
'root': [
(r'^(?=\s|/)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
+ (r'\+\+|--|\|\||&&|in\b|\$|!?~|\?|:|'
r'(\*\*|[-<>+*%\^/!=|])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
@@ -81,6 +81,8 @@ class SedLexer(RegexLexer):
aliases = ['sed', 'gsed', 'ssed']
filenames = ['*.sed', '*.[gs]sed']
mimetypes = ['text/x-sed']
+ url = 'https://en.wikipedia.org/wiki/Sed'
+ version_added = ''
flags = re.MULTILINE
# Match the contents within delimiters such as /<contents>/
@@ -111,14 +113,15 @@ class SedLexer(RegexLexer):
class VimLexer(RegexLexer):
"""
Lexer for VimL script files.
-
- .. versionadded:: 0.8
"""
name = 'VimL'
aliases = ['vim']
filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
'_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
mimetypes = ['text/x-vim']
+ url = 'https://www.vim.org'
+ version_added = '0.8'
+
flags = re.MULTILINE
_python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
index c7cfb6d041..f2a9253f9a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/textfmts.py
@@ -4,7 +4,7 @@
Lexers for various text formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,6 +29,8 @@ class IrcLogsLexer(RegexLexer):
aliases = ['irc']
filenames = ['*.weechatlog']
mimetypes = ['text/x-irclog']
+ url = 'https://en.wikipedia.org/wiki/Internet_Relay_Chat'
+ version_added = ''
flags = re.VERBOSE | re.MULTILINE
timestamp = r"""
@@ -86,13 +88,13 @@ class IrcLogsLexer(RegexLexer):
class GettextLexer(RegexLexer):
"""
Lexer for Gettext catalog files.
-
- .. versionadded:: 0.9
"""
name = 'Gettext Catalog'
aliases = ['pot', 'po']
filenames = ['*.pot', '*.po']
mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
+ url = 'https://www.gnu.org/software/gettext'
+ version_added = '0.9'
tokens = {
'root': [
@@ -114,12 +116,12 @@ class GettextLexer(RegexLexer):
class HttpLexer(RegexLexer):
"""
Lexer for HTTP sessions.
-
- .. versionadded:: 1.5
"""
name = 'HTTP'
aliases = ['http']
+ url = 'https://httpwg.org/specs'
+ version_added = '1.5'
flags = re.DOTALL
@@ -206,13 +208,12 @@ class HttpLexer(RegexLexer):
class TodotxtLexer(RegexLexer):
"""
Lexer for Todo.txt todo list format.
-
- .. versionadded:: 2.0
"""
name = 'Todotxt'
url = 'http://todotxt.com/'
aliases = ['todotxt']
+ version_added = '2.0'
# *.todotxt is not a standard extension for Todo.txt files; including it
# makes testing easier, and also makes autodetecting file type easier.
filenames = ['todo.txt', '*.todotxt']
@@ -308,8 +309,6 @@ class NotmuchLexer(RegexLexer):
"""
For Notmuch email text format.
- .. versionadded:: 2.5
-
Additional options accepted:
`body_lexer`
@@ -320,6 +319,7 @@ class NotmuchLexer(RegexLexer):
name = 'Notmuch'
url = 'https://notmuchmail.org/'
aliases = ['notmuch']
+ version_added = '2.5'
def _highlight_code(self, match):
code = match.group(1)
@@ -390,12 +390,12 @@ class NotmuchLexer(RegexLexer):
class KernelLogLexer(RegexLexer):
"""
For Linux Kernel log ("dmesg") output.
-
- .. versionadded:: 2.6
"""
name = 'Kernel log'
aliases = ['kmsg', 'dmesg']
filenames = ['*.kmsg', '*.dmesg']
+ url = 'https://fr.wikipedia.org/wiki/Dmesg'
+ version_added = '2.6'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/theorem.py b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
index abf09ae171..79f0606c02 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/theorem.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/theorem.py
@@ -6,16 +6,15 @@
See also :mod:`pygments.lexers.lean`
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import RegexLexer, default, words, include
+from pygments.lexer import RegexLexer, bygroups, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Whitespace
-from pygments.lexers.lean import LeanLexer
+# compatibility import
+from pygments.lexers.lean import LeanLexer # noqa: F401
__all__ = ['CoqLexer', 'IsabelleLexer']
@@ -23,8 +22,6 @@ __all__ = ['CoqLexer', 'IsabelleLexer']
class CoqLexer(RegexLexer):
"""
For the Coq theorem prover.
-
- .. versionadded:: 1.5
"""
name = 'Coq'
@@ -32,17 +29,18 @@ class CoqLexer(RegexLexer):
aliases = ['coq']
filenames = ['*.v']
mimetypes = ['text/x-coq']
+ version_added = '1.5'
flags = 0 # no re.MULTILINE
keywords1 = (
# Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Include', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Axioms', 'Hypothesis',
'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
- 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Example', 'Let',
- 'Ltac', 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
- 'Arguments', 'Types', 'Unset', 'Contextual', 'Strict', 'Prenex',
+ 'Open', 'Close', 'Bind', 'Declare', 'Delimit', 'Definition', 'Example', 'Let',
+ 'Ltac', 'Ltac2', 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
+ 'Arguments', 'Types', 'Contextual', 'Strict', 'Prenex',
'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Variant', 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Fact',
'Remark', 'Corollary', 'Proposition', 'Property', 'Goal',
@@ -51,7 +49,8 @@ class CoqLexer(RegexLexer):
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
'Universe', 'Polymorphic', 'Monomorphic', 'Context', 'Scheme', 'From',
- 'Undo', 'Fail', 'Function',
+ 'Undo', 'Fail', 'Function', 'Program', 'Elpi', 'Extract', 'Opaque',
+ 'Transparent', 'Unshelve', 'Next Obligation',
)
keywords2 = (
# Gallina
@@ -99,7 +98,7 @@ class CoqLexer(RegexLexer):
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
'->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
+ r'\[\|', ']', '_', '`', r'\{', r'\{<', r'lp:\{\{', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/', r'\{\|', r'\|\}',
# 'Π', 'Σ', # Not defined in the standard library
'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥',
@@ -115,8 +114,10 @@ class CoqLexer(RegexLexer):
(r'\(\*', Comment, 'comment'),
(r'\b(?:[^\W\d][\w\']*\.)+[^\W\d][\w\']*\b', Name),
(r'\bEquations\b\??', Keyword.Namespace),
+ (r'\b(Elpi)(\s+)(Program|Query|Accumulate|Command|Typecheck|Db|Export|Tactic)?\b', bygroups(Keyword.Namespace,Text,Keyword.Namespace)),
# Very weak heuristic to distinguish the Set vernacular from the Set sort
- (r'\bSet(?=[ \t]+[A-Z][a-z][^\n]*?\.)', Keyword.Namespace),
+ (r'\bUnset\b|\bSet(?=[ \t]+[A-Z][a-z][^\n]*?\.)', Keyword.Namespace, 'set-options'),
+ (r'\b(?:String|Number)\s+Notation', Keyword.Namespace, 'sn-notation'),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
@@ -125,8 +126,8 @@ class CoqLexer(RegexLexer):
(words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
# (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'({})'.format('|'.join(keyopts[::-1])), Operator),
+ (rf'({infix_syms}|{prefix_syms})?{operators}', Operator),
(r"[^\W\d][\w']*", Name),
@@ -146,8 +147,27 @@ class CoqLexer(RegexLexer):
(r'[~?][a-z][\w\']*:', Name),
(r'\S', Name.Builtin.Pseudo),
],
+ 'set-options': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Namespace),
+ (r'"', String.Double, 'string'),
+ (r'\d+', Number.Integer),
+ (r'\.', Punctuation, '#pop'),
+ ],
+ 'sn-notation': [
+ (r'\s+', Text),
+ # Extra keywords to highlight only in this scope
+ (r'\b(?:via|mapping|abstract|warning|after)\b', Keyword),
+ (r'=>|[()\[\]:,]', Operator),
+ (r'\b[^\W\d][\w\']*(?:\.[^\W\d][\w\']*)*\b', Name),
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'\(\*', Comment, 'comment'),
+ (r'\.', Punctuation, '#pop'),
+ ],
'comment': [
- (r'[^(*)]+', Comment),
+ # Consume comments like ***** as one token
+ (r'([^(*)]+|\*+(?!\)))+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
@@ -175,8 +195,6 @@ class CoqLexer(RegexLexer):
class IsabelleLexer(RegexLexer):
"""
For the Isabelle proof assistant.
-
- .. versionadded:: 2.0
"""
name = 'Isabelle'
@@ -184,6 +202,7 @@ class IsabelleLexer(RegexLexer):
aliases = ['isabelle']
filenames = ['*.thy']
mimetypes = ['text/x-isabelle']
+ version_added = '2.0'
keyword_minor = (
'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
diff --git a/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py b/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
index 13230f8b3d..7336653088 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/thingsdb.py
@@ -4,7 +4,7 @@
Lexers for the ThingsDB language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,12 +18,12 @@ __all__ = ['ThingsDBLexer']
class ThingsDBLexer(RegexLexer):
"""
Lexer for the ThingsDB programming language.
-
- .. versionadded:: 2.9
"""
name = 'ThingsDB'
aliases = ['ti', 'thingsdb']
filenames = ['*.ti']
+ url = 'https://www.thingsdb.net'
+ version_added = '2.9'
tokens = {
'root': [
@@ -39,21 +39,24 @@ class ThingsDBLexer(RegexLexer):
(r'([-+]?0x[0-9a-fA-F]+)', Number.Hex),
(r'[-+]?[0-9]+', Number.Integer),
(r'[-+]?((inf|nan)([^0-9A-Za-z_]|$)|[0-9]*\.[0-9]+(e[+-][0-9]+)?)',
- Number.Float),
+ Number.Float),
# strings
(r'(?:"(?:[^"]*)")+', String.Double),
(r"(?:'(?:[^']*)')+", String.Single),
+ (r"(?:`(?:[^`]*)`)+", String.Backtick),
# literals
(r'(true|false|nil)\b', Keyword.Constant),
+ # name constants
+ (r'(FULL|USER|GRANT|CHANGE|JOIN|RUN|QUERY|'
+ r'DEBUG|INFO|WARNING|ERROR|CRITICAL|'
+ r'NO_IDS|INT_MIN|INT_MAX)\b', Name.Constant),
+
# regular expressions
(r'(/[^/\\]*(?:\\.[^/\\]*)*/i?)', String.Regex),
- # thing id's
- (r'#[0-9]+', Comment.Preproc),
-
# name, assignments and functions
include('names'),
@@ -62,26 +65,47 @@ class ThingsDBLexer(RegexLexer):
],
'names': [
(r'(\.)'
- r'(add|call|contains|del|endswith|extend|filter|find|findindex|'
- r'get|has|id|indexof|keys|len|lower|map|pop|push|remove|set|sort|'
- r'splice|startswith|test|unwrap|upper|values|wrap)'
+ r'(first|last|then|else|load|at|again_in|again_at|err|cancel|'
+ r'closure|set_closure|args|set_args|owner|set_owner|equals|copy|'
+ r'dup|assign|week|weekday|yday|zone|len|call|doc|emit|extract|'
+ r'choice|code|format|msg|each|every|extend|extend_unique|filter|'
+ r'find|flat|find_index|has|index_of|count|sum|is_unique|unique|'
+ r'join|map|map_id|map_wrap|map_type|vmap|move|pop|push|fill|'
+ r'remove|replace|restrict|restriction|shift|sort|splice|to|add|'
+ r'one|clear|contains|ends_with|name|lower|replace|reverse|'
+ r'starts_with|split|test|trim|trim_left|trim_right|upper|del|ren|'
+ r'to_type|to_thing|get|id|keys|reduce|set|some|value|values|wrap|'
+ r'unshift|unwrap|search)'
r'(\()',
bygroups(Name.Function, Name.Function, Punctuation), 'arguments'),
- (r'(array|assert|assert_err|auth_err|backup_info|backups_info|'
- r'bad_data_err|bool|closure|collection_info|collections_info|'
- r'counters|deep|del_backup|del_collection|del_expired|del_node|'
- r'del_procedure|del_token|del_type|del_user|err|float|'
- r'forbidden_err|grant|int|isarray|isascii|isbool|isbytes|iserr|'
- r'isfloat|isinf|isint|islist|isnan|isnil|israw|isset|isstr|'
- r'isthing|istuple|isutf8|lookup_err|max_quota_err|mod_type|new|'
- r'new_backup|new_collection|new_node|new_procedure|new_token|'
- r'new_type|new_user|node_err|node_info|nodes_info|now|'
- r'num_arguments_err|operation_err|overflow_err|procedure_doc|'
- r'procedure_info|procedures_info|raise|refs|rename_collection|'
- r'rename_user|reset_counters|return|revoke|run|set_log_level|set|'
- r'set_quota|set_type|shutdown|str|syntax_err|thing|try|type|'
- r'type_err|type_count|type_info|types_info|user_info|users_info|'
- r'value_err|wse|zero_div_err)'
+ (r'(alt_raise|assert|base64_encode|base64_decode|bool|bytes|'
+ r'closure|datetime|deep|future|is_future|del_enum|del_type|room|'
+ r'is_room|task|tasks|is_task|is_email|is_url|is_tel|is_time_zone|'
+ r'timeit|enum|enum_info|enum_map|enums_info|err|regex|is_regex|'
+ r'change_id|float|has_enum|has_type|int|is_array|is_ascii|'
+ r'is_float|is_bool|is_bytes|is_closure|is_datetime|is_enum|'
+ r'is_err|is_mpdata|is_inf|is_int|is_list|is_nan|is_nil|is_raw|'
+ r'is_set|is_str|is_thing|is_timeval|is_tuple|is_utf8|json_dump|'
+ r'json_load|list|log|import|export|root|mod_enum|mod_type|new|'
+ r'new_type|now|raise|rand|range|randint|randstr|refs|rename_enum|'
+ r'set|set_enum|set_type|str|thing|timeval|try|type|type_assert|'
+ r'type_count|type_info|types_info|nse|wse|backup_info|'
+ r'backups_info|backups_ok|counters|del_backup|has_backup|'
+ r'new_backup|node_info|nodes_info|reset_counters|restart_module|'
+ r'set_log_level|shutdown|has_module|del_module|module_info|'
+ r'modules_info|new_module|deploy_module|rename_module|'
+ r'refresh_module|set_module_conf|set_module_scope|'
+ r'collections_info|del_collection|del_expired|del_node|del_token|'
+ r'del_user|grant|has_collection|has_node|has_token|has_user|'
+ r'new_collection|new_node|new_token|new_user|rename_collection|'
+ r'rename_user|restore|revoke|set_password|set_time_zone|'
+ r'set_default_deep|time_zones_info|user_info|users_info|'
+ r'del_procedure|has_procedure|new_procedure|mod_procedure|'
+ r'procedure_doc|procedure_info|procedures_info|rename_procedure|'
+ r'run|assert_err|auth_err|bad_data_err|cancelled_err|'
+ r'rename_type|forbidden_err|lookup_err|max_quota_err|node_err|'
+ r'num_arguments_err|operation_err|overflow_err|syntax_err|'
+ r'collection_info|type_err|value_err|zero_div_err)'
r'(\()',
bygroups(Name.Function, Punctuation),
'arguments'),
@@ -90,7 +114,7 @@ class ThingsDBLexer(RegexLexer):
bygroups(Name.Attribute, Text, Operator)),
(r'\.[A-Za-z_][0-9A-Za-z_]*', Name.Attribute),
(r'([A-Za-z_][0-9A-Za-z_]*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator)),
+ bygroups(Name.Variable, Text, Operator)),
(r'[A-Za-z_][0-9A-Za-z_]*', Name.Variable),
],
'whitespace': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tlb.py b/contrib/python/Pygments/py3/pygments/lexers/tlb.py
index ac629dc848..45d0743f11 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tlb.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tlb.py
@@ -4,7 +4,7 @@
Lexers for TL-b.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,6 +23,8 @@ class TlbLexer(RegexLexer):
name = 'Tl-b'
aliases = ['tlb']
filenames = ['*.tlb']
+ url = 'https://docs.ton.org/#/overviews/TL-B'
+ version_added = ''
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tls.py b/contrib/python/Pygments/py3/pygments/lexers/tls.py
index 34f8d4f26c..f7027bb02d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tls.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tls.py
@@ -4,7 +4,7 @@
Lexers for the TLS presentation language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
@@ -19,14 +19,13 @@ __all__ = ['TlsLexer']
class TlsLexer(RegexLexer):
"""
The TLS presentation language, described in RFC 8446.
-
- .. versionadded:: 2.16
"""
name = 'TLS Presentation Language'
url = 'https://www.rfc-editor.org/rfc/rfc8446#section-3'
filenames = []
aliases = ['tls']
mimetypes = []
+ version_added = '2.16'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/tnt.py b/contrib/python/Pygments/py3/pygments/lexers/tnt.py
index 2251373c5a..bb444fdf86 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/tnt.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/tnt.py
@@ -4,7 +4,7 @@
Lexer for Typographic Number Theory.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,14 +21,13 @@ class TNTLexer(Lexer):
"""
Lexer for Typographic Number Theory, as described in the book
Gödel, Escher, Bach, by Douglas R. Hofstadter
-
- .. versionadded:: 2.7
"""
name = 'Typographic Number Theory'
url = 'https://github.com/Kenny2github/language-tnt'
aliases = ['tnt']
filenames = ['*.tnt']
+ version_added = '2.7'
cur = []
diff --git a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
index b48124be68..9480ddc02c 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/trafficscript.py
@@ -4,7 +4,7 @@
Lexer for RiverBed's TrafficScript (RTS) language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,12 +17,12 @@ __all__ = ['RtsLexer']
class RtsLexer(RegexLexer):
"""
For Riverbed Stingray Traffic Manager
-
- .. versionadded:: 2.1
"""
name = 'TrafficScript'
aliases = ['trafficscript', 'rts']
filenames = ['*.rts']
+ url = 'https://riverbed.com'
+ version_added = '2.1'
tokens = {
'root' : [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/typoscript.py b/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
index dff6e5ed81..0f39772529 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/typoscript.py
@@ -13,7 +13,7 @@
`TypoScriptHtmlDataLexer`
Lexer that highlights markers, constants and registers within html tags.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -29,12 +29,12 @@ __all__ = ['TypoScriptLexer', 'TypoScriptCssDataLexer', 'TypoScriptHtmlDataLexer
class TypoScriptCssDataLexer(RegexLexer):
"""
Lexer that highlights markers, constants and registers within css blocks.
-
- .. versionadded:: 2.2
"""
name = 'TypoScriptCssData'
aliases = ['typoscriptcssdata']
+ url = 'http://docs.typo3.org/typo3cms/TyposcriptReference/'
+ version_added = '2.2'
tokens = {
'root': [
@@ -64,12 +64,12 @@ class TypoScriptCssDataLexer(RegexLexer):
class TypoScriptHtmlDataLexer(RegexLexer):
"""
Lexer that highlights markers, constants and registers within html tags.
-
- .. versionadded:: 2.2
"""
name = 'TypoScriptHtmlData'
aliases = ['typoscripthtmldata']
+ url = 'http://docs.typo3.org/typo3cms/TyposcriptReference/'
+ version_added = '2.2'
tokens = {
'root': [
@@ -99,8 +99,6 @@ class TypoScriptHtmlDataLexer(RegexLexer):
class TypoScriptLexer(RegexLexer):
"""
Lexer for TypoScript code.
-
- .. versionadded:: 2.2
"""
name = 'TypoScript'
@@ -108,6 +106,7 @@ class TypoScriptLexer(RegexLexer):
aliases = ['typoscript']
filenames = ['*.typoscript']
mimetypes = ['text/x-typoscript']
+ version_added = '2.2'
flags = re.DOTALL | re.MULTILINE
diff --git a/contrib/python/Pygments/py3/pygments/lexers/typst.py b/contrib/python/Pygments/py3/pygments/lexers/typst.py
new file mode 100644
index 0000000000..5daa6766fb
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/lexers/typst.py
@@ -0,0 +1,104 @@
+"""
+ pygments.lexers.typst
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Typst language.
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, bygroups, include
+from pygments.token import Comment, Keyword, Name, String, Punctuation, \
+ Whitespace, Generic, Operator, Number, Text
+
+__all__ = ['TypstLexer']
+
+
+class TypstLexer(RegexLexer):
+ """
+ For Typst code.
+ """
+
+ name = 'Typst'
+ aliases = ['typst']
+ filenames = ['*.typ']
+ mimetypes = ['text/x-typst']
+ url = 'https://typst.app'
+ version_added = '2.18'
+
+ tokens = {
+ 'root': [
+ include('markup'),
+ ],
+ 'common': [
+ (r'[ \t]+', Whitespace),
+ (r'((?!=[*_$`\-+0-9/<@\\#\[]|https?://).)+', Text),
+ ],
+ 'markup': [
+ include('comment'),
+ (r'^\s*=+.*$', Generic.Heading),
+ (r'[*][^*]*[*]', Generic.Strong),
+ (r'_[^_]*_', Generic.Emph),
+ (r'\$', Punctuation, 'maths'),
+ (r'`[^`]*`', String.Backtick), # inline code
+ (r'^\s*-', Punctuation), # unnumbered list
+ (r'^\s*\+', Punctuation), # numbered list
+ (r'^\s*[0-9.]+', Punctuation), # numbered list variant
+ (r'^(\s*/\s+)([^:]+)(:)', bygroups(Punctuation, Name.Variable, Punctuation)), # definitions
+ (r'<[a-zA-Z_][a-zA-Z0-9_-]*>', Name.Label), # label
+ (r'@[a-zA-Z_][a-zA-Z0-9_-]*', Name.Label), # reference
+ (r'\\#', Text), # escaped
+ (words(('#let', '#set', '#show'), suffix=r'\b'), Keyword.Declaration, 'inline_code'),
+ (r'(#[a-zA-Z_][a-zA-Z0-9_]*)(\[)', bygroups(Name.Function, Punctuation), 'markup'),
+ (r'(#[a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function, Punctuation), 'inline_code'),
+ (r'#[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
+ (r'```(?:.|\n)*?```', String.Backtick), # code block
+ (r'https?://[0-9a-zA-Z~/%#&=\',;.+?]*', Generic.Emph), # links
+ (words((r'---', r'\\', r'~', r'--', r'...'), suffix=r'\b'), Punctuation), # special chars shorthand
+ (r'\\\[', Punctuation), # escaped
+ (r'\\\]', Punctuation), # escaped
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ include('common'),
+ ],
+ 'maths': [
+ include('comment'),
+ (words(('_', '^', '+', '-', '/', '*', '->', '<-', '!=', '=='),
+ suffix=r'\b'), Operator),
+ (words((r'\\', '$='), suffix=r'\b'), Operator), # maths markup operators
+ (r'\\\$', Punctuation), # escaped
+ (r'\$', Punctuation, '#pop'), # end of math mode
+ include('code'),
+ ],
+ 'comment': [
+ (r'//.*$', Comment.Single),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ ],
+ 'code': [
+ include('comment'),
+ (r'\[', Punctuation, 'markup'),
+ (r'\(|\{', Punctuation, 'code'),
+ (r'\)|\}', Punctuation, '#pop'),
+ (r'"[^"]*"', String.Double),
+ (r'[=,]', Operator),
+ (words(('and', 'or', 'not'), suffix=r'\b'), Operator.Word),
+ (r'=>|<=|==|!=|>|<|-=|\+=|\*=|/=|\+|-|\\|\*', Operator), # comparisons
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(:)', bygroups(Name.Variable, Punctuation), '#push'),
+ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function, Punctuation), '#push'),
+ (words(('as', 'break', 'export', 'continue', 'else', 'for', 'if',
+ 'import', 'in', 'include', 'return', 'while'), suffix=r'\b'),
+ Keyword.Reserved),
+ (words(('auto', 'none', 'true', 'false'), suffix=r'\b'), Keyword.Constant),
+ (r'([0-9.]+)(mm|pt|cm|in|em|fr|%)', bygroups(Number, Keyword.Reserved)),
+ (words(('let', 'set', 'show'), suffix=r'\b'), Keyword.Declaration),
+ # FIXME: make this work
+ ## (r'(import|include)( *)(")([^"])(")',
+ ## bygroups(Keyword.Reserved, Text, Punctuation, String.Double, Punctuation)),
+ include('common'),
+ ],
+ 'inline_code': [
+ (r';$', Punctuation, '#pop'),
+ include('code'),
+ ],
+ }
diff --git a/contrib/python/Pygments/py3/pygments/lexers/ul4.py b/contrib/python/Pygments/py3/pygments/lexers/ul4.py
index a40c20f679..467d3381c4 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/ul4.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/ul4.py
@@ -4,9 +4,7 @@
Lexer for the UL4 templating language.
- More information: https://python.livinglogic.de/UL4.html
-
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -25,8 +23,6 @@ __all__ = ['UL4Lexer', 'HTMLUL4Lexer', 'XMLUL4Lexer', 'CSSUL4Lexer',
class UL4Lexer(RegexLexer):
"""
Generic lexer for UL4.
-
- .. versionadded:: 2.12
"""
flags = re.MULTILINE | re.DOTALL
@@ -34,6 +30,8 @@ class UL4Lexer(RegexLexer):
name = 'UL4'
aliases = ['ul4']
filenames = ['*.ul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = '2.12'
tokens = {
"root": [
@@ -54,12 +52,26 @@ class UL4Lexer(RegexLexer):
),
(
# Comment:
+ # ``<?note?>...<?end note?>``
+ r"<\?\s*note\s*\?>",
+ Comment,
+ "note", # Switch to "note" mode
+ ),
+ (
+ # Comment:
# ``<?note foobar?>``
r"<\?\s*note\s.*?\?>",
Comment,
),
(
# Template documentation:
+ # ``<?doc?>...<?end doc?>``
+ r"<\?\s*doc\s*\?>",
+ String.Doc,
+ "doc",
+ ),
+ (
+ # Template documentation:
# ``<?doc foobar?>``
r"<\?\s*doc\s.*?\?>",
String.Doc,
@@ -113,6 +125,26 @@ class UL4Lexer(RegexLexer):
(r"[^<]+", Comment),
(r".", Comment),
],
+ # Note mode ignores everything upto the matching ``<?end note?>`` tag
+ "note": [
+ # Nested ``<?note?>`` tag
+ (r"<\?\s*note\s*\?>", Comment, "#push"),
+ # ``<?end note?>`` tag
+ (r"<\?\s*end\s+note\s*\?>", Comment, "#pop"),
+ # Everything else
+ (r"[^<]+", Comment),
+ (r".", Comment),
+ ],
+ # Doc mode ignores everything upto the matching ``<?end doc?>`` tag
+ "doc": [
+ # Nested ``<?doc?>`` tag
+ (r"<\?\s*doc\s*\?>", String.Doc, "#push"),
+ # ``<?end doc?>`` tag
+ (r"<\?\s*end\s+doc\s*\?>", String.Doc, "#pop"),
+ # Everything else
+ (r"[^<]+", String.Doc),
+ (r".", String.Doc),
+ ],
# UL4 expressions
"ul4": [
# End the tag
@@ -210,6 +242,8 @@ class HTMLUL4Lexer(DelegatingLexer):
name = 'HTML+UL4'
aliases = ['html+ul4']
filenames = ['*.htmlul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(HtmlLexer, UL4Lexer, **options)
@@ -223,6 +257,8 @@ class XMLUL4Lexer(DelegatingLexer):
name = 'XML+UL4'
aliases = ['xml+ul4']
filenames = ['*.xmlul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(XmlLexer, UL4Lexer, **options)
@@ -236,6 +272,8 @@ class CSSUL4Lexer(DelegatingLexer):
name = 'CSS+UL4'
aliases = ['css+ul4']
filenames = ['*.cssul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(CssLexer, UL4Lexer, **options)
@@ -249,6 +287,8 @@ class JavascriptUL4Lexer(DelegatingLexer):
name = 'Javascript+UL4'
aliases = ['js+ul4']
filenames = ['*.jsul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(JavascriptLexer, UL4Lexer, **options)
@@ -262,6 +302,8 @@ class PythonUL4Lexer(DelegatingLexer):
name = 'Python+UL4'
aliases = ['py+ul4']
filenames = ['*.pyul4']
+ url = 'https://python.livinglogic.de/UL4.html'
+ version_added = ''
def __init__(self, **options):
super().__init__(PythonLexer, UL4Lexer, **options)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/unicon.py b/contrib/python/Pygments/py3/pygments/lexers/unicon.py
index 9cd6790a47..dcf129d49a 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/unicon.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/unicon.py
@@ -4,7 +4,7 @@
Lexers for the Icon and Unicon languages, including ucode VM.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,14 @@ __all__ = ['IconLexer', 'UcodeLexer', 'UniconLexer']
class UniconLexer(RegexLexer):
"""
For Unicon source code.
-
- .. versionadded:: 2.4
"""
name = 'Unicon'
aliases = ['unicon']
filenames = ['*.icn']
mimetypes = ['text/unicon']
+ url = 'https://www.unicon.org'
+ version_added = '2.4'
flags = re.MULTILINE
@@ -166,13 +166,14 @@ class UniconLexer(RegexLexer):
class IconLexer(RegexLexer):
"""
Lexer for Icon.
-
- .. versionadded:: 1.6
"""
name = 'Icon'
aliases = ['icon']
filenames = ['*.icon', '*.ICON']
mimetypes = []
+ url = 'https://www2.cs.arizona.edu/icon'
+ version_added = '1.6'
+
flags = re.MULTILINE
tokens = {
@@ -306,13 +307,14 @@ class IconLexer(RegexLexer):
class UcodeLexer(RegexLexer):
"""
Lexer for Icon ucode files.
-
- .. versionadded:: 2.4
"""
name = 'ucode'
aliases = ['ucode']
filenames = ['*.u', '*.u1', '*.u2']
mimetypes = []
+ url = 'http://www.unicon.org'
+ version_added = '2.4'
+
flags = re.MULTILINE
tokens = {
diff --git a/contrib/python/Pygments/py3/pygments/lexers/urbi.py b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
index 3857ff20d4..3a81385c5d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/urbi.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/urbi.py
@@ -4,7 +4,7 @@
Lexers for UrbiScript language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -20,14 +20,14 @@ __all__ = ['UrbiscriptLexer']
class UrbiscriptLexer(ExtendedRegexLexer):
"""
For UrbiScript source code.
-
- .. versionadded:: 1.5
"""
name = 'UrbiScript'
aliases = ['urbiscript']
filenames = ['*.u']
mimetypes = ['application/x-urbiscript']
+ url = 'https://github.com/urbiforge/urbi'
+ version_added = '1.5'
flags = re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/usd.py b/contrib/python/Pygments/py3/pygments/lexers/usd.py
index 79a2ad9434..083125961d 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/usd.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/usd.py
@@ -4,7 +4,7 @@
The module that parses Pixar's Universal Scene Description file format.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -30,34 +30,29 @@ _WHITESPACE = r"([ \t]+)"
class UsdLexer(RegexLexer):
"""
A lexer that parses Pixar's Universal Scene Description file format.
-
- .. versionadded:: 2.6
"""
name = "USD"
url = 'https://graphics.pixar.com/usd/release/index.html'
aliases = ["usd", "usda"]
filenames = ["*.usd", "*.usda"]
+ version_added = '2.6'
tokens = {
"root": [
- (r"(custom){_WHITESPACE}(uniform)(\s+){}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ (rf"(custom){_WHITESPACE}(uniform)(\s+){_TYPE}(\s+){_BASE_ATTRIBUTE}(\s*)(=)",
bygroups(Keyword.Token, Whitespace, Keyword.Token, Whitespace,
Keyword.Type, Whitespace, Name.Attribute, Text,
Name.Keyword.Tokens, Whitespace, Operator)),
- (r"(custom){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ (rf"(custom){_WHITESPACE}{_TYPE}(\s+){_BASE_ATTRIBUTE}(\s*)(=)",
bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
Operator)),
- (r"(uniform){_WHITESPACE}{}(\s+){}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ (rf"(uniform){_WHITESPACE}{_TYPE}(\s+){_BASE_ATTRIBUTE}(\s*)(=)",
bygroups(Keyword.Token, Whitespace, Keyword.Type, Whitespace,
Name.Attribute, Text, Name.Keyword.Tokens, Whitespace,
Operator)),
- (r"{}{_WHITESPACE}{}(\s*)(=)".format(
- _TYPE, _BASE_ATTRIBUTE, _WHITESPACE=_WHITESPACE),
+ (rf"{_TYPE}{_WHITESPACE}{_BASE_ATTRIBUTE}(\s*)(=)",
bygroups(Keyword.Type, Whitespace, Name.Attribute, Text,
Name.Keyword.Tokens, Whitespace, Operator)),
] +
diff --git a/contrib/python/Pygments/py3/pygments/lexers/varnish.py b/contrib/python/Pygments/py3/pygments/lexers/varnish.py
index 3c22792a8b..db17d80060 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/varnish.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/varnish.py
@@ -4,7 +4,7 @@
Lexers for Varnish configuration
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,13 +19,13 @@ __all__ = ['VCLLexer', 'VCLSnippetLexer']
class VCLLexer(RegexLexer):
"""
For Varnish Configuration Language (VCL).
-
- .. versionadded:: 2.2
"""
name = 'VCL'
aliases = ['vcl']
filenames = ['*.vcl']
mimetypes = ['text/x-vclsrc']
+ url = 'https://www.varnish-software.com/developers/tutorials/varnish-configuration-language-vcl'
+ version_added = '2.2'
def analyse_text(text):
# If the very first line is 'vcl 4.0;' it's pretty much guaranteed
@@ -160,13 +160,13 @@ class VCLLexer(RegexLexer):
class VCLSnippetLexer(VCLLexer):
"""
For Varnish Configuration Language snippets.
-
- .. versionadded:: 2.2
"""
name = 'VCLSnippets'
aliases = ['vclsnippets', 'vclsnippet']
mimetypes = ['text/x-vclsnippet']
filenames = []
+ url = 'https://www.varnish-software.com/developers/tutorials/varnish-configuration-language-vcl'
+ version_added = '2.2'
def analyse_text(text):
# override method inherited from VCLLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/verification.py b/contrib/python/Pygments/py3/pygments/lexers/verification.py
index 41d45d4067..2d396b0e65 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/verification.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/verification.py
@@ -4,7 +4,7 @@
Lexer for Intermediate Verification Languages (IVLs).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,13 +18,12 @@ __all__ = ['BoogieLexer', 'SilverLexer']
class BoogieLexer(RegexLexer):
"""
For Boogie source code.
-
- .. versionadded:: 2.1
"""
name = 'Boogie'
url = 'https://boogie-docs.readthedocs.io/en/latest/'
aliases = ['boogie']
filenames = ['*.bpl']
+ version_added = '2.1'
tokens = {
'root': [
@@ -66,12 +65,12 @@ class BoogieLexer(RegexLexer):
class SilverLexer(RegexLexer):
"""
For Silver source code.
-
- .. versionadded:: 2.2
"""
name = 'Silver'
aliases = ['silver']
filenames = ['*.sil', '*.vpr']
+ url = 'https://github.com/viperproject/silver'
+ version_added = '2.2'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/verifpal.py b/contrib/python/Pygments/py3/pygments/lexers/verifpal.py
index 6953dd7b70..dad5cb5452 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/verifpal.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/verifpal.py
@@ -4,7 +4,7 @@
Lexers for Verifpal languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['VerifpalLexer']
class VerifpalLexer(RegexLexer):
"""
For Verifpal code.
-
- .. versionadded:: 2.16
"""
name = 'Verifpal'
@@ -27,6 +25,7 @@ class VerifpalLexer(RegexLexer):
filenames = ['*.vp']
mimetypes = ['text/x-verifpal']
url = 'https://verifpal.com'
+ version_added = '2.16'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/vip.py b/contrib/python/Pygments/py3/pygments/lexers/vip.py
index 1b25d5cab6..cabb678b31 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/vip.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/vip.py
@@ -4,7 +4,7 @@
Lexers for Visual Prolog & Grammar files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -85,13 +85,12 @@ class VisualPrologBaseLexer(RegexLexer):
class VisualPrologLexer(VisualPrologBaseLexer):
"""Lexer for VisualProlog
-
- .. versionadded:: 2.17
"""
name = 'Visual Prolog'
url = 'https://www.visual-prolog.com/'
aliases = ['visualprolog']
filenames = ['*.pro', '*.cl', '*.i', '*.pack', '*.ph']
+ version_added = '2.17'
majorkw = ('goal', 'namespace', 'interface', 'class', 'implement', 'where', 'open', 'inherits', 'supports', 'resolve',
'delegate', 'monitor', 'constants', 'domains', 'predicates', 'constructors', 'properties', 'clauses', 'facts')
@@ -121,14 +120,13 @@ class VisualPrologLexer(VisualPrologBaseLexer):
class VisualPrologGrammarLexer(VisualPrologBaseLexer):
"""Lexer for VisualProlog grammar
-
- .. versionadded:: 2.17
"""
name = 'Visual Prolog Grammar'
url = 'https://www.visual-prolog.com/'
aliases = ['visualprologgrammar']
filenames = ['*.vipgrm']
+ version_added = '2.17'
majorkw = ('open', 'namespace', 'grammar', 'nonterminals',
'startsymbols', 'terminals', 'rules', 'precedence')
diff --git a/contrib/python/Pygments/py3/pygments/lexers/vyper.py b/contrib/python/Pygments/py3/pygments/lexers/vyper.py
index ff9d0b0440..4155028901 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/vyper.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/vyper.py
@@ -4,7 +4,7 @@
Lexer for the Vyper Smart Contract language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,13 +17,12 @@ __all__ = ['VyperLexer']
class VyperLexer(RegexLexer):
"""For the Vyper smart contract language.
-
- .. versionadded:: 2.17
"""
name = 'Vyper'
aliases = ['vyper']
filenames = ['*.vy']
url = "https://vyper.readthedocs.io"
+ version_added = '2.17'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/web.py b/contrib/python/Pygments/py3/pygments/lexers/web.py
index 9e52653160..1c052abeff 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/web.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/web.py
@@ -4,10 +4,11 @@
Just export previously exported lexers.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+# ruff: noqa: F401
from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
HamlLexer, ScamlLexer, JadeLexer
from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webassembly.py b/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
index f674eadc51..db3ce5e3f2 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webassembly.py
@@ -8,7 +8,7 @@
and https://webassembly.github.io/spec/core/text/.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -63,14 +63,13 @@ builtins = (
class WatLexer(RegexLexer):
"""Lexer for the WebAssembly text format.
-
- .. versionadded:: 2.9
"""
name = 'WebAssembly'
url = 'https://webassembly.org/'
aliases = ['wast', 'wat']
filenames = ['*.wat', '*.wast']
+ version_added = '2.9'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webidl.py b/contrib/python/Pygments/py3/pygments/lexers/webidl.py
index 5fcbe69c00..22a4d61dc3 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webidl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webidl.py
@@ -4,7 +4,7 @@
Lexers for Web IDL, including some extensions.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -39,14 +39,13 @@ _string = r'"[^"]*"'
class WebIDLLexer(RegexLexer):
"""
For Web IDL.
-
- .. versionadded:: 2.6
"""
name = 'Web IDL'
url = 'https://www.w3.org/wiki/Web_IDL'
aliases = ['webidl']
filenames = ['*.webidl']
+ version_added = '2.6'
tokens = {
'common': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
index 787a8a6ece..815c7a3135 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/webmisc.py
@@ -4,7 +4,7 @@
Lexers for misc. web stuff.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,8 +26,6 @@ __all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
class DuelLexer(RegexLexer):
"""
Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
-
- .. versionadded:: 1.4
"""
name = 'Duel'
@@ -35,6 +33,7 @@ class DuelLexer(RegexLexer):
aliases = ['duel', 'jbst', 'jsonml+bst']
filenames = ['*.duel', '*.jbst']
mimetypes = ['text/x-duel', 'text/x-jbst']
+ version_added = '1.4'
flags = re.DOTALL
@@ -59,14 +58,13 @@ class XQueryLexer(ExtendedRegexLexer):
"""
An XQuery lexer, parsing a stream and outputting the tokens needed to
highlight xquery code.
-
- .. versionadded:: 1.4
"""
name = 'XQuery'
url = 'https://www.w3.org/XML/Query/'
aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
mimetypes = ['text/xquery', 'application/xquery']
+ version_added = '1.4'
xquery_parse_state = []
@@ -82,13 +80,13 @@ class XQueryLexer(ExtendedRegexLexer):
# ncnamechar = ncnamestartchar + (r"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
# r"[\u203F-\u2040]")
ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
+ ncname = f"(?:{ncnamestartchar}+{ncnamechar}*)"
pitarget_namestartchar = r"(?:[A-KN-WYZ]|_|:|[a-kn-wyz])"
pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
+ pitarget = f"{pitarget_namestartchar}+{pitarget_namechar}*"
+ prefixedname = f"{ncname}:{ncname}"
unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
+ qname = f"(?:{prefixedname}|{unprefixedname})"
entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
@@ -813,8 +811,6 @@ class XQueryLexer(ExtendedRegexLexer):
class QmlLexer(RegexLexer):
"""
For QML files.
-
- .. versionadded:: 1.6
"""
# QML is based on javascript, so much of this is taken from the
@@ -825,6 +821,7 @@ class QmlLexer(RegexLexer):
aliases = ['qml', 'qbs']
filenames = ['*.qml', '*.qbs']
mimetypes = ['application/x-qml', 'application/x-qt.qbs+qml']
+ version_added = '1.6'
# pasted from JavascriptLexer, with some additions
flags = re.DOTALL | re.MULTILINE
@@ -891,8 +888,6 @@ class CirruLexer(RegexLexer):
* using ``$`` as folding operator
* using ``,`` as unfolding operator
* using indentations for nested blocks
-
- .. versionadded:: 2.0
"""
name = 'Cirru'
@@ -900,6 +895,7 @@ class CirruLexer(RegexLexer):
aliases = ['cirru']
filenames = ['*.cirru']
mimetypes = ['text/x-cirru']
+ version_added = '2.0'
flags = re.MULTILINE
tokens = {
@@ -940,14 +936,14 @@ class CirruLexer(RegexLexer):
class SlimLexer(ExtendedRegexLexer):
"""
For Slim markup.
-
- .. versionadded:: 2.0
"""
name = 'Slim'
aliases = ['slim']
filenames = ['*.slim']
mimetypes = ['text/x-slim']
+ url = 'https://slim-template.github.io'
+ version_added = '2.0'
flags = re.IGNORECASE
_dot = r'(?: \|\n(?=.* \|)|.)'
diff --git a/contrib/python/Pygments/py3/pygments/lexers/wgsl.py b/contrib/python/Pygments/py3/pygments/lexers/wgsl.py
index f233421552..145e0c0b34 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/wgsl.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/wgsl.py
@@ -4,11 +4,11 @@
Lexer for the WebGPU Shading Language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.lexer import RegexLexer, include, words, default
from pygments.token import Comment, Operator, Keyword, Name, \
Number, Punctuation, Whitespace
from pygments import unistring as uni
@@ -27,20 +27,19 @@ NotLineEndRE = '[^' + "".join(LineEndCodePoints) + ']'
LineEndRE = '[' + "".join(LineEndCodePoints) + ']'
# https://www.w3.org/TR/WGSL/#syntax-ident_pattern_token
-ident_pattern_token = '([{}][{}]+)|[{}]'.format(uni.xid_start,uni.xid_continue,uni.xid_start)
+ident_pattern_token = f'([{uni.xid_start}][{uni.xid_continue}]+)|[{uni.xid_start}]'
class WgslLexer(RegexLexer):
"""
Lexer for the WebGPU Shading Language.
-
- .. versionadded:: 2.15
"""
name = 'WebGPU Shading Language'
url = 'https://www.w3.org/TR/WGSL/'
aliases = ['wgsl']
filenames = ['*.wgsl']
mimetypes = ['text/wgsl']
+ version_added = '2.15'
# https://www.w3.org/TR/WGSL/#var-and-value
keyword_decl = (words('var let const override'.split(),suffix=r'\b'), Keyword.Declaration)
@@ -323,8 +322,8 @@ class WgslLexer(RegexLexer):
'comments': [
# Line ending comments
# Match up CR/LF pair first.
- (r'//{}*{}{}'.format(NotLineEndRE,CR,LF), Comment.Single),
- (r'//{}*{}'.format(NotLineEndRE,LineEndRE), Comment.Single),
+ (rf'//{NotLineEndRE}*{CR}{LF}', Comment.Single),
+ (rf'//{NotLineEndRE}*{LineEndRE}', Comment.Single),
(r'/\*', Comment.Multiline, 'block_comment'),
],
'attribute': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/whiley.py b/contrib/python/Pygments/py3/pygments/lexers/whiley.py
index bf707d25cc..d6e2ea27ca 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/whiley.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/whiley.py
@@ -4,7 +4,7 @@
Lexers for the Whiley language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,14 +18,13 @@ __all__ = ['WhileyLexer']
class WhileyLexer(RegexLexer):
"""
Lexer for the Whiley programming language.
-
- .. versionadded:: 2.2
"""
name = 'Whiley'
url = 'http://whiley.org/'
filenames = ['*.whiley']
aliases = ['whiley']
mimetypes = ['text/x-whiley']
+ version_added = '2.2'
# See the language specification:
# http://whiley.org/download/WhileyLanguageSpec.pdf
diff --git a/contrib/python/Pygments/py3/pygments/lexers/wowtoc.py b/contrib/python/Pygments/py3/pygments/lexers/wowtoc.py
index 6b96e65bdb..60abb83ef7 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/wowtoc.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/wowtoc.py
@@ -6,7 +6,7 @@
TOC files describe game addons.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,13 +46,13 @@ def _create_tag_line_token(inner_pattern, inner_token, ignore_case=False):
class WoWTocLexer(RegexLexer):
"""
Lexer for World of Warcraft TOC files.
-
- .. versionadded:: 2.14
"""
name = "World of Warcraft TOC"
aliases = ["wowtoc"]
filenames = ["*.toc"]
+ url = 'https://wowpedia.fandom.com/wiki/TOC_format'
+ version_added = '2.14'
tokens = {
"root": [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/wren.py b/contrib/python/Pygments/py3/pygments/lexers/wren.py
index ed4ddc7add..4dc521874e 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/wren.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/wren.py
@@ -4,7 +4,7 @@
Lexer for Wren.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -12,20 +12,19 @@ import re
from pygments.lexer import include, RegexLexer, words
from pygments.token import Whitespace, Punctuation, Keyword, Name, Comment, \
- Operator, Number, String, Error
+ Operator, Number, String
__all__ = ['WrenLexer']
class WrenLexer(RegexLexer):
"""
For Wren source code, version 0.4.0.
-
- .. versionadded:: 2.14.0
"""
name = 'Wren'
url = 'https://wren.io'
aliases = ['wren']
filenames = ['*.wren']
+ version_added = '2.14'
flags = re.MULTILINE | re.DOTALL
diff --git a/contrib/python/Pygments/py3/pygments/lexers/x10.py b/contrib/python/Pygments/py3/pygments/lexers/x10.py
index c125b530d1..638808aa59 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/x10.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/x10.py
@@ -4,7 +4,7 @@
Lexers for the X10 programming language.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,8 +17,6 @@ __all__ = ['X10Lexer']
class X10Lexer(RegexLexer):
"""
For the X10 language.
-
- .. versionadded:: 2.2
"""
name = 'X10'
@@ -26,6 +24,7 @@ class X10Lexer(RegexLexer):
aliases = ['x10', 'xten']
filenames = ['*.x10']
mimetypes = ['text/x-x10']
+ version_added = '2.2'
keywords = (
'as', 'assert', 'async', 'at', 'athome', 'ateach', 'atomic',
@@ -56,10 +55,10 @@ class X10Lexer(RegexLexer):
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(values), Keyword.Constant),
- (r'\b(%s)\b' % '|'.join(modifiers), Keyword.Declaration),
+ (r'\b({})\b'.format('|'.join(keywords)), Keyword),
+ (r'\b({})\b'.format('|'.join(types)), Keyword.Type),
+ (r'\b({})\b'.format('|'.join(values)), Keyword.Constant),
+ (r'\b({})\b'.format('|'.join(modifiers)), Keyword.Declaration),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'.', Text)
diff --git a/contrib/python/Pygments/py3/pygments/lexers/xorg.py b/contrib/python/Pygments/py3/pygments/lexers/xorg.py
index 67878c3f6c..a57bd130fd 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/xorg.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/xorg.py
@@ -4,7 +4,7 @@
Lexers for Xorg configs.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -21,6 +21,7 @@ class XorgLexer(RegexLexer):
aliases = ['xorg.conf']
filenames = ['xorg.conf']
mimetypes = []
+ version_added = ''
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/yang.py b/contrib/python/Pygments/py3/pygments/lexers/yang.py
index 866c01d27e..624ebf5ad1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/yang.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/yang.py
@@ -4,7 +4,7 @@
Lexer for the YANG 1.1 modeling language. See :rfc:`7950`.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -17,14 +17,13 @@ __all__ = ['YangLexer']
class YangLexer(RegexLexer):
"""
Lexer for YANG, based on RFC7950.
-
- .. versionadded:: 2.7
"""
name = 'YANG'
url = 'https://tools.ietf.org/html/rfc7950/'
aliases = ['yang']
filenames = ['*.yang']
mimetypes = ['application/yang']
+ version_added = '2.7'
#Keywords from RFC7950 ; oriented at BNF style
TOP_STMTS_KEYWORDS = ("module", "submodule")
diff --git a/contrib/python/Pygments/py3/pygments/lexers/yara.py b/contrib/python/Pygments/py3/pygments/lexers/yara.py
index 1a84e4a785..f0445d88f1 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/yara.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/yara.py
@@ -4,7 +4,7 @@
Lexers for YARA.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -18,8 +18,6 @@ __all__ = ['YaraLexer']
class YaraLexer(RegexLexer):
"""
For YARA rules
-
- .. versionadded:: 2.16
"""
name = 'YARA'
@@ -27,6 +25,7 @@ class YaraLexer(RegexLexer):
aliases = ['yara', 'yar']
filenames = ['*.yar']
mimetypes = ['text/x-yara']
+ version_added = '2.16'
tokens = {
'root': [
diff --git a/contrib/python/Pygments/py3/pygments/lexers/zig.py b/contrib/python/Pygments/py3/pygments/lexers/zig.py
index fad3b79d9e..86b4adc9ca 100644
--- a/contrib/python/Pygments/py3/pygments/lexers/zig.py
+++ b/contrib/python/Pygments/py3/pygments/lexers/zig.py
@@ -4,7 +4,7 @@
Lexers for Zig.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -26,6 +26,7 @@ class ZigLexer(RegexLexer):
aliases = ['zig']
filenames = ['*.zig']
mimetypes = ['text/zig']
+ version_added = ''
type_keywords = (
words(('bool', 'f16', 'f32', 'f64', 'f128', 'void', 'noreturn', 'type',
diff --git a/contrib/python/Pygments/py3/pygments/modeline.py b/contrib/python/Pygments/py3/pygments/modeline.py
index 7b6f6a324b..e4d9fe167b 100644
--- a/contrib/python/Pygments/py3/pygments/modeline.py
+++ b/contrib/python/Pygments/py3/pygments/modeline.py
@@ -4,7 +4,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,7 +19,7 @@ modeline_re = re.compile(r'''
''', re.VERBOSE)
-def get_filetype_from_line(l):
+def get_filetype_from_line(l): # noqa: E741
m = modeline_re.search(l)
if m:
return m.group(1)
@@ -30,8 +30,8 @@ def get_filetype_from_buffer(buf, max_lines=5):
Scan the buffer for modelines and return filetype if one is found.
"""
lines = buf.splitlines()
- for l in lines[-1:-max_lines-1:-1]:
- ret = get_filetype_from_line(l)
+ for line in lines[-1:-max_lines-1:-1]:
+ ret = get_filetype_from_line(line)
if ret:
return ret
for i in range(max_lines, -1, -1):
diff --git a/contrib/python/Pygments/py3/pygments/plugin.py b/contrib/python/Pygments/py3/pygments/plugin.py
index 0de47bace8..2e462f2c2f 100644
--- a/contrib/python/Pygments/py3/pygments/plugin.py
+++ b/contrib/python/Pygments/py3/pygments/plugin.py
@@ -2,12 +2,7 @@
pygments.plugin
~~~~~~~~~~~~~~~
- Pygments plugin interface. By default, this tries to use
- ``importlib.metadata``, which is in the Python standard
- library since Python 3.8, or its ``importlib_metadata``
- backport for earlier versions of Python. It falls back on
- ``pkg_resources`` if not found. Finally, if ``pkg_resources``
- is not found either, no plugins are loaded at all.
+ Pygments plugin interface.
lexer plugins::
@@ -34,9 +29,10 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from importlib.metadata import entry_points
LEXER_ENTRY_POINT = 'pygments.lexers'
FORMATTER_ENTRY_POINT = 'pygments.formatters'
@@ -45,18 +41,6 @@ FILTER_ENTRY_POINT = 'pygments.filters'
def iter_entry_points(group_name):
- try:
- from importlib.metadata import entry_points
- except ImportError:
- try:
- from importlib_metadata import entry_points
- except ImportError:
- try:
- from pkg_resources import iter_entry_points
- except (ImportError, OSError):
- return []
- else:
- return iter_entry_points(group_name)
groups = entry_points()
if hasattr(groups, 'select'):
# New interface in Python 3.10 and newer versions of the
diff --git a/contrib/python/Pygments/py3/pygments/regexopt.py b/contrib/python/Pygments/py3/pygments/regexopt.py
index 45223eccc1..c44eedbf2a 100644
--- a/contrib/python/Pygments/py3/pygments/regexopt.py
+++ b/contrib/python/Pygments/py3/pygments/regexopt.py
@@ -5,7 +5,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/scanner.py b/contrib/python/Pygments/py3/pygments/scanner.py
index 32a2f30329..112da34917 100644
--- a/contrib/python/Pygments/py3/pygments/scanner.py
+++ b/contrib/python/Pygments/py3/pygments/scanner.py
@@ -11,7 +11,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/contrib/python/Pygments/py3/pygments/sphinxext.py b/contrib/python/Pygments/py3/pygments/sphinxext.py
index f935688f1c..a742897c0b 100644
--- a/contrib/python/Pygments/py3/pygments/sphinxext.py
+++ b/contrib/python/Pygments/py3/pygments/sphinxext.py
@@ -5,7 +5,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,6 +33,8 @@ LEXERDOC = '''
%s
+ %s
+
'''
FMTERDOC = '''
@@ -119,11 +121,11 @@ class PygmentsDoc(Directive):
def write_row(*columns):
"""Format a table row"""
out = []
- for l, c in zip(column_lengths, columns):
- if c:
- out.append(c.ljust(l))
+ for length, col in zip(column_lengths, columns):
+ if col:
+ out.append(col.ljust(length))
else:
- out.append(' '*l)
+ out.append(' '*length)
return ' '.join(out)
@@ -160,7 +162,7 @@ class PygmentsDoc(Directive):
self.filenames.add(mod.__file__)
cls = getattr(mod, classname)
if not cls.__doc__:
- print("Warning: %s does not have a docstring." % classname)
+ print(f"Warning: {classname} does not have a docstring.")
docstring = cls.__doc__
if isinstance(docstring, bytes):
docstring = docstring.decode('utf8')
@@ -182,12 +184,18 @@ class PygmentsDoc(Directive):
for line in content.splitlines():
docstring += f' {line}\n'
+ if cls.version_added:
+ version_line = f'.. versionadded:: {cls.version_added}'
+ else:
+ version_line = ''
+
modules.setdefault(module, []).append((
classname,
', '.join(data[2]) or 'None',
', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
', '.join(data[4]) or 'None',
- docstring))
+ docstring,
+ version_line))
if module not in moduledocstrings:
moddoc = mod.__doc__
if isinstance(moddoc, bytes):
@@ -196,7 +204,7 @@ class PygmentsDoc(Directive):
for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
if moduledocstrings[module] is None:
- raise Exception("Missing docstring for %s" % (module,))
+ raise Exception(f"Missing docstring for {module}")
heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
out.append(MODULEDOC % (module, heading, '-'*len(heading)))
for data in lexers:
diff --git a/contrib/python/Pygments/py3/pygments/style.py b/contrib/python/Pygments/py3/pygments/style.py
index 96eb92c2bf..449522c5a4 100644
--- a/contrib/python/Pygments/py3/pygments/style.py
+++ b/contrib/python/Pygments/py3/pygments/style.py
@@ -4,7 +4,7 @@
Basic style object.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -76,7 +76,7 @@ class StyleMeta(type):
return ''
elif text.startswith('var') or text.startswith('calc'):
return text
- assert False, "wrong color format %r" % text
+ assert False, f"wrong color format {text!r}"
_styles = obj._styles = {}
diff --git a/contrib/python/Pygments/py3/pygments/styles/__init__.py b/contrib/python/Pygments/py3/pygments/styles/__init__.py
index 75ac30bb98..d80f2a3995 100644
--- a/contrib/python/Pygments/py3/pygments/styles/__init__.py
+++ b/contrib/python/Pygments/py3/pygments/styles/__init__.py
@@ -4,7 +4,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -44,13 +44,13 @@ def get_style_by_name(name):
try:
mod = __import__(mod, None, None, [cls])
except ImportError:
- raise ClassNotFound("Could not find style module %r" % mod +
+ raise ClassNotFound(f"Could not find style module {mod!r}" +
(builtin and ", though it should be builtin")
+ ".")
try:
return getattr(mod, cls)
except AttributeError:
- raise ClassNotFound("Could not find style class %r in style module." % cls)
+ raise ClassNotFound(f"Could not find style class {cls!r} in style module.")
def get_all_styles():
diff --git a/contrib/python/Pygments/py3/pygments/styles/_mapping.py b/contrib/python/Pygments/py3/pygments/styles/_mapping.py
index 04c7ddfbb0..49a7fae92d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/_mapping.py
+++ b/contrib/python/Pygments/py3/pygments/styles/_mapping.py
@@ -9,6 +9,7 @@ STYLES = {
'AutumnStyle': ('pygments.styles.autumn', 'autumn', ()),
'BlackWhiteStyle': ('pygments.styles.bw', 'bw', ()),
'BorlandStyle': ('pygments.styles.borland', 'borland', ()),
+ 'CoffeeStyle': ('pygments.styles.coffee', 'coffee', ()),
'ColorfulStyle': ('pygments.styles.colorful', 'colorful', ()),
'DefaultStyle': ('pygments.styles.default', 'default', ()),
'DraculaStyle': ('pygments.styles.dracula', 'dracula', ()),
diff --git a/contrib/python/Pygments/py3/pygments/styles/abap.py b/contrib/python/Pygments/py3/pygments/styles/abap.py
index ab322df9cf..9438cf6e30 100644
--- a/contrib/python/Pygments/py3/pygments/styles/abap.py
+++ b/contrib/python/Pygments/py3/pygments/styles/abap.py
@@ -4,7 +4,7 @@
ABAP workbench like style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol.py b/contrib/python/Pygments/py3/pygments/styles/algol.py
index 83319e0ab7..e54e4b1258 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol.py
@@ -25,7 +25,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
index de1434dc84..d786a842f3 100644
--- a/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
+++ b/contrib/python/Pygments/py3/pygments/styles/algol_nu.py
@@ -25,7 +25,7 @@
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/arduino.py b/contrib/python/Pygments/py3/pygments/styles/arduino.py
index 8655b03712..3aa6d12415 100644
--- a/contrib/python/Pygments/py3/pygments/styles/arduino.py
+++ b/contrib/python/Pygments/py3/pygments/styles/arduino.py
@@ -4,7 +4,7 @@
Arduino® Syntax highlighting style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/autumn.py b/contrib/python/Pygments/py3/pygments/styles/autumn.py
index ccbb5fe774..5ba15a78b0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/autumn.py
+++ b/contrib/python/Pygments/py3/pygments/styles/autumn.py
@@ -4,7 +4,7 @@
A colorful style, inspired by the terminal highlighting style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/borland.py b/contrib/python/Pygments/py3/pygments/styles/borland.py
index 82c00ae138..6bcc6fb37c 100644
--- a/contrib/python/Pygments/py3/pygments/styles/borland.py
+++ b/contrib/python/Pygments/py3/pygments/styles/borland.py
@@ -4,7 +4,7 @@
Style similar to the style used in the Borland IDEs.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/bw.py b/contrib/python/Pygments/py3/pygments/styles/bw.py
index 3ba00925fb..aadcf5dfd1 100644
--- a/contrib/python/Pygments/py3/pygments/styles/bw.py
+++ b/contrib/python/Pygments/py3/pygments/styles/bw.py
@@ -4,7 +4,7 @@
Simple black/white only style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/coffee.py b/contrib/python/Pygments/py3/pygments/styles/coffee.py
new file mode 100644
index 0000000000..bb4055c537
--- /dev/null
+++ b/contrib/python/Pygments/py3/pygments/styles/coffee.py
@@ -0,0 +1,80 @@
+"""
+ pygments.styles.coffee
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ A warm and cozy theme based off gruvbox
+
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.style import Style
+from pygments.token import (Comment, Error, Generic, Keyword, Literal, Name,
+ Number, Operator, Punctuation, String, Token)
+
+__all__ = ["CoffeeStyle"]
+
+
+class CoffeeStyle(Style):
+ """
+ A warm and cozy theme based off gruvbox
+ """
+
+ name = "coffee"
+
+ background_color = "#262220"
+ highlight_color = "#ddd0c0"
+
+ line_number_color = "#4e4e4e"
+ line_number_special_color = "#8f9494"
+
+ styles = {
+ Comment: "#70757A",
+ Comment.Hashbang: "#8f9f9f",
+ Comment.Preproc: "#fdd0c0",
+ Comment.PreprocFile: "#c9b98f",
+ Comment.Special: "#af5f5f",
+ Error: "#af5f5f",
+ Generic.Deleted: "#bb6868",
+ Generic.Emph: "italic",
+ Generic.Error: "#af5f5f",
+ Generic.Inserted: "#849155",
+ Generic.Output: "#ddd0c0",
+ Generic.Strong: "bold",
+ Generic.Traceback: "#af5f5f",
+ Keyword: "#919191",
+ Keyword.Constant: "#875f5f",
+ Keyword.Declaration: "#875f5f",
+ Keyword.Namespace: "#875f5f",
+ Keyword.Reserved: "#b46276",
+ Keyword.Type: "#af875f",
+ Literal: "#af875f",
+ Name: "#ddd0c0",
+ Name.Attribute: "#ddd0c0",
+ Name.Builtin: "#ddd0c0",
+ Name.Builtin.Pseudo: "#87afaf",
+ Name.Class: "#875f5f",
+ Name.Constant: "#af8787",
+ Name.Decorator: "#fdd0c0",
+ Name.Entity: "#ddd0c0",
+ Name.Exception: "#877575",
+ Name.Function: "#fdd0c0",
+ Name.Function.Magic: "#fdd0c0",
+ Name.Other: "#ddd0c0",
+ Name.Property: "#dfaf87",
+ Name.Tag: "#87afaf",
+ Name.Variable: "#ddd0c0",
+ Number: "#87afaf",
+ Operator: "#878787",
+ Operator.Word: "#878787",
+ Punctuation: "#ddd0c0",
+ String: "#c9b98f",
+ String.Affix: "#dfaf87",
+ String.Doc: "#878787",
+ String.Escape: "#af5f5f",
+ String.Interpol: "#af5f5f",
+ String.Other: "#fdd0c0",
+ String.Regex: "#af5f5f",
+ String.Symbol: "#af5f5f",
+ Token: "#ddd0c0",
+ }
diff --git a/contrib/python/Pygments/py3/pygments/styles/colorful.py b/contrib/python/Pygments/py3/pygments/styles/colorful.py
index 661a9e446d..a9656bdf0f 100644
--- a/contrib/python/Pygments/py3/pygments/styles/colorful.py
+++ b/contrib/python/Pygments/py3/pygments/styles/colorful.py
@@ -4,7 +4,7 @@
A colorful style, inspired by CodeRay.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/default.py b/contrib/python/Pygments/py3/pygments/styles/default.py
index f4e5b7b26d..a46d8e1e34 100644
--- a/contrib/python/Pygments/py3/pygments/styles/default.py
+++ b/contrib/python/Pygments/py3/pygments/styles/default.py
@@ -4,7 +4,7 @@
The default highlighting style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/dracula.py b/contrib/python/Pygments/py3/pygments/styles/dracula.py
index d7043c0076..71ecff2672 100644
--- a/contrib/python/Pygments/py3/pygments/styles/dracula.py
+++ b/contrib/python/Pygments/py3/pygments/styles/dracula.py
@@ -7,7 +7,7 @@
Based on the Dracula Theme for pygments by Chris Bracco.
See https://github.com/dracula/pygments/tree/fee9ed5613d1086bc01b9d0a5a0e9867a009f571
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/emacs.py b/contrib/python/Pygments/py3/pygments/styles/emacs.py
index fad91a1348..6d67492a8e 100644
--- a/contrib/python/Pygments/py3/pygments/styles/emacs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/emacs.py
@@ -4,7 +4,7 @@
A highlighting style for Pygments, inspired by Emacs.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/friendly.py b/contrib/python/Pygments/py3/pygments/styles/friendly.py
index 8de4fcc1e4..ba5759b614 100644
--- a/contrib/python/Pygments/py3/pygments/styles/friendly.py
+++ b/contrib/python/Pygments/py3/pygments/styles/friendly.py
@@ -4,7 +4,7 @@
A modern style based on the VIM pyte theme.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/friendly_grayscale.py b/contrib/python/Pygments/py3/pygments/styles/friendly_grayscale.py
index e7d3ed47ec..c2acdeb2c2 100644
--- a/contrib/python/Pygments/py3/pygments/styles/friendly_grayscale.py
+++ b/contrib/python/Pygments/py3/pygments/styles/friendly_grayscale.py
@@ -7,7 +7,7 @@
using the luminosity value calculated by
http://www.workwithcolor.com/color-converter-01.htm
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/fruity.py b/contrib/python/Pygments/py3/pygments/styles/fruity.py
index b23257d19a..5d9b67b655 100644
--- a/contrib/python/Pygments/py3/pygments/styles/fruity.py
+++ b/contrib/python/Pygments/py3/pygments/styles/fruity.py
@@ -4,7 +4,7 @@
pygments version of my "fruity" vim theme.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/gh_dark.py b/contrib/python/Pygments/py3/pygments/styles/gh_dark.py
index 95f8e803d1..4bed6d2c7d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/gh_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/gh_dark.py
@@ -5,7 +5,7 @@
Github's Dark-Colorscheme based theme for Pygments
Colors extracted from https://github.com/primer/primitives
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/gruvbox.py b/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
index c05f3140b3..97bd511e35 100644
--- a/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
+++ b/contrib/python/Pygments/py3/pygments/styles/gruvbox.py
@@ -5,7 +5,7 @@
pygments version of the "gruvbox" vim theme.
https://github.com/morhetz/gruvbox
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/igor.py b/contrib/python/Pygments/py3/pygments/styles/igor.py
index 797e3675c4..48f76e04e5 100644
--- a/contrib/python/Pygments/py3/pygments/styles/igor.py
+++ b/contrib/python/Pygments/py3/pygments/styles/igor.py
@@ -4,7 +4,7 @@
Igor Pro default style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/inkpot.py b/contrib/python/Pygments/py3/pygments/styles/inkpot.py
index 817d97f09f..b6f93014fa 100644
--- a/contrib/python/Pygments/py3/pygments/styles/inkpot.py
+++ b/contrib/python/Pygments/py3/pygments/styles/inkpot.py
@@ -4,7 +4,7 @@
A highlighting style for Pygments, inspired by the Inkpot theme for VIM.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/lightbulb.py b/contrib/python/Pygments/py3/pygments/styles/lightbulb.py
index 25c4b15598..4e5658a9f6 100644
--- a/contrib/python/Pygments/py3/pygments/styles/lightbulb.py
+++ b/contrib/python/Pygments/py3/pygments/styles/lightbulb.py
@@ -4,7 +4,7 @@
A minimal dark theme based on the Lightbulb theme for VSCode.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/lilypond.py b/contrib/python/Pygments/py3/pygments/styles/lilypond.py
index 1218ec9d32..5e46f3dc60 100644
--- a/contrib/python/Pygments/py3/pygments/styles/lilypond.py
+++ b/contrib/python/Pygments/py3/pygments/styles/lilypond.py
@@ -4,7 +4,7 @@
LilyPond-specific style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/lovelace.py b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
index 279ff0793e..1fdb149057 100644
--- a/contrib/python/Pygments/py3/pygments/styles/lovelace.py
+++ b/contrib/python/Pygments/py3/pygments/styles/lovelace.py
@@ -8,7 +8,7 @@
A desaturated, somewhat subdued style created for the Lovelace interactive
learning environment.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/manni.py b/contrib/python/Pygments/py3/pygments/styles/manni.py
index 1eb0e69860..f39810b683 100644
--- a/contrib/python/Pygments/py3/pygments/styles/manni.py
+++ b/contrib/python/Pygments/py3/pygments/styles/manni.py
@@ -7,7 +7,7 @@
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/material.py b/contrib/python/Pygments/py3/pygments/styles/material.py
index db0952d20a..720b403b0a 100644
--- a/contrib/python/Pygments/py3/pygments/styles/material.py
+++ b/contrib/python/Pygments/py3/pygments/styles/material.py
@@ -6,7 +6,7 @@
https://github.com/material-theme/vsc-material-theme
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/monokai.py b/contrib/python/Pygments/py3/pygments/styles/monokai.py
index 2ae51bcdfc..fb6b1ebf8d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/monokai.py
+++ b/contrib/python/Pygments/py3/pygments/styles/monokai.py
@@ -6,7 +6,7 @@
http://www.monokai.nl/blog/2006/07/15/textmate-color-theme/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/murphy.py b/contrib/python/Pygments/py3/pygments/styles/murphy.py
index 0c5cc6df6a..0d9128ce96 100644
--- a/contrib/python/Pygments/py3/pygments/styles/murphy.py
+++ b/contrib/python/Pygments/py3/pygments/styles/murphy.py
@@ -4,7 +4,7 @@
Murphy's style from CodeRay.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/native.py b/contrib/python/Pygments/py3/pygments/styles/native.py
index 11f83db4b9..d76360c5de 100644
--- a/contrib/python/Pygments/py3/pygments/styles/native.py
+++ b/contrib/python/Pygments/py3/pygments/styles/native.py
@@ -4,7 +4,7 @@
pygments version of my "native" vim theme.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -56,15 +56,15 @@ class NativeStyle(Style):
Generic.Heading: 'bold #ffffff',
Generic.Subheading: 'underline #ffffff',
- Generic.Deleted: '#d22323',
+ Generic.Deleted: '#ff3a3a',
Generic.Inserted: '#589819',
- Generic.Error: '#d22323',
+ Generic.Error: '#ff3a3a',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.EmphStrong: 'bold italic',
Generic.Prompt: '#aaaaaa',
Generic.Output: '#cccccc',
- Generic.Traceback: '#d22323',
+ Generic.Traceback: '#ff3a3a',
Error: 'bg:#e3d2d2 #a61717'
}
diff --git a/contrib/python/Pygments/py3/pygments/styles/nord.py b/contrib/python/Pygments/py3/pygments/styles/nord.py
index e5cff24520..c208335d3d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/nord.py
+++ b/contrib/python/Pygments/py3/pygments/styles/nord.py
@@ -5,7 +5,7 @@
pygments version of the "nord" theme by Arctic Ice Studio
https://www.nordtheme.com/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/onedark.py b/contrib/python/Pygments/py3/pygments/styles/onedark.py
index b145ce91e2..335135485f 100644
--- a/contrib/python/Pygments/py3/pygments/styles/onedark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/onedark.py
@@ -7,7 +7,7 @@
Inspired by one-dark-ui for the code editor Atom
(https://atom.io/themes/one-dark-ui).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
index 8cc231f31c..88968bff8f 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_dark.py
@@ -8,7 +8,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
index ac76badbfe..35e2c2d13d 100644
--- a/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
+++ b/contrib/python/Pygments/py3/pygments/styles/paraiso_light.py
@@ -8,7 +8,7 @@
Created with Base16 Builder by Chris Kempson
(https://github.com/chriskempson/base16-builder).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/pastie.py b/contrib/python/Pygments/py3/pygments/styles/pastie.py
index 2892660bff..844f539ef7 100644
--- a/contrib/python/Pygments/py3/pygments/styles/pastie.py
+++ b/contrib/python/Pygments/py3/pygments/styles/pastie.py
@@ -6,7 +6,7 @@
.. _pastie: http://pastie.caboo.se/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/perldoc.py b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
index 071821bb1e..3fe361713b 100644
--- a/contrib/python/Pygments/py3/pygments/styles/perldoc.py
+++ b/contrib/python/Pygments/py3/pygments/styles/perldoc.py
@@ -6,7 +6,7 @@
.. _perldoc: http://perldoc.perl.org/
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py b/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
index 82bfed5482..aa9e445bc6 100644
--- a/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
+++ b/contrib/python/Pygments/py3/pygments/styles/rainbow_dash.py
@@ -6,7 +6,7 @@
.. _theme: http://sanssecours.github.io/Rainbow-Dash.tmbundle
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,48 +46,48 @@ class RainbowDashStyle(Style):
background_color = WHITE
styles = {
- Comment: 'italic {}'.format(BLUE_LIGHT),
+ Comment: f'italic {BLUE_LIGHT}',
Comment.Preproc: 'noitalic',
Comment.Special: 'bold',
- Error: 'bg:{} {}'.format(RED, WHITE),
+ Error: f'bg:{RED} {WHITE}',
- Generic.Deleted: 'border:{} bg:{}'.format(RED_DARK, RED_LIGHT),
+ Generic.Deleted: f'border:{RED_DARK} bg:{RED_LIGHT}',
Generic.Emph: 'italic',
Generic.Error: RED_BRIGHT,
- Generic.Heading: 'bold {}'.format(BLUE),
- Generic.Inserted: 'border:{} bg:{}'.format(GREEN_NEON, GREEN_LIGHT),
+ Generic.Heading: f'bold {BLUE}',
+ Generic.Inserted: f'border:{GREEN_NEON} bg:{GREEN_LIGHT}',
Generic.Output: GREY,
- Generic.Prompt: 'bold {}'.format(BLUE),
+ Generic.Prompt: f'bold {BLUE}',
Generic.Strong: 'bold',
Generic.EmphStrong: 'bold italic',
- Generic.Subheading: 'bold {}'.format(BLUE),
+ Generic.Subheading: f'bold {BLUE}',
Generic.Traceback: RED_DARK,
- Keyword: 'bold {}'.format(BLUE),
+ Keyword: f'bold {BLUE}',
Keyword.Pseudo: 'nobold',
Keyword.Type: PURPLE,
- Name.Attribute: 'italic {}'.format(BLUE),
- Name.Builtin: 'bold {}'.format(PURPLE),
+ Name.Attribute: f'italic {BLUE}',
+ Name.Builtin: f'bold {PURPLE}',
Name.Class: 'underline',
Name.Constant: TURQUOISE,
- Name.Decorator: 'bold {}'.format(ORANGE),
- Name.Entity: 'bold {}'.format(PURPLE),
- Name.Exception: 'bold {}'.format(PURPLE),
- Name.Function: 'bold {}'.format(ORANGE),
- Name.Tag: 'bold {}'.format(BLUE),
+ Name.Decorator: f'bold {ORANGE}',
+ Name.Entity: f'bold {PURPLE}',
+ Name.Exception: f'bold {PURPLE}',
+ Name.Function: f'bold {ORANGE}',
+ Name.Tag: f'bold {BLUE}',
- Number: 'bold {}'.format(PURPLE),
+ Number: f'bold {PURPLE}',
Operator: BLUE,
Operator.Word: 'bold',
String: GREEN,
String.Doc: 'italic',
- String.Escape: 'bold {}'.format(RED_DARK),
+ String.Escape: f'bold {RED_DARK}',
String.Other: TURQUOISE,
- String.Symbol: 'bold {}'.format(RED_DARK),
+ String.Symbol: f'bold {RED_DARK}',
Text: GREY_DARK,
diff --git a/contrib/python/Pygments/py3/pygments/styles/rrt.py b/contrib/python/Pygments/py3/pygments/styles/rrt.py
index 3376d581db..4aa23304f0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/rrt.py
+++ b/contrib/python/Pygments/py3/pygments/styles/rrt.py
@@ -4,7 +4,7 @@
pygments "rrt" theme, based on Zap and Emacs defaults.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/sas.py b/contrib/python/Pygments/py3/pygments/styles/sas.py
index 4d19b224cd..faee0ec103 100644
--- a/contrib/python/Pygments/py3/pygments/styles/sas.py
+++ b/contrib/python/Pygments/py3/pygments/styles/sas.py
@@ -6,7 +6,7 @@
meant to be a complete style. It's merely meant to mimic SAS'
program editor syntax highlighting.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/solarized.py b/contrib/python/Pygments/py3/pygments/styles/solarized.py
index 6a1f81240b..214a491c76 100644
--- a/contrib/python/Pygments/py3/pygments/styles/solarized.py
+++ b/contrib/python/Pygments/py3/pygments/styles/solarized.py
@@ -7,7 +7,7 @@
A Pygments style for the Solarized themes (licensed under MIT).
See: https://github.com/altercation/solarized
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/staroffice.py b/contrib/python/Pygments/py3/pygments/styles/staroffice.py
index b2cfba9fb3..dfe2dc0f44 100644
--- a/contrib/python/Pygments/py3/pygments/styles/staroffice.py
+++ b/contrib/python/Pygments/py3/pygments/styles/staroffice.py
@@ -4,7 +4,7 @@
Style similar to StarOffice style, also in OpenOffice and LibreOffice.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/stata_dark.py b/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
index c2d0f19246..f6b9decbf0 100644
--- a/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
+++ b/contrib/python/Pygments/py3/pygments/styles/stata_dark.py
@@ -6,7 +6,7 @@
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/stata_light.py b/contrib/python/Pygments/py3/pygments/styles/stata_light.py
index 5e034568ca..a5bd1fe310 100644
--- a/contrib/python/Pygments/py3/pygments/styles/stata_light.py
+++ b/contrib/python/Pygments/py3/pygments/styles/stata_light.py
@@ -5,7 +5,7 @@
Light Style inspired by Stata's do-file editor. Note this is not
meant to be a complete style, just for Stata's file formats.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/tango.py b/contrib/python/Pygments/py3/pygments/styles/tango.py
index 787a697856..fcbab7b375 100644
--- a/contrib/python/Pygments/py3/pygments/styles/tango.py
+++ b/contrib/python/Pygments/py3/pygments/styles/tango.py
@@ -32,7 +32,7 @@
have been chosen to have the same style. Similarly, keywords (Keyword.*),
and Operator.Word (and, or, in) have been assigned the same style.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/trac.py b/contrib/python/Pygments/py3/pygments/styles/trac.py
index 5f5c319ab8..44dc8cc2df 100644
--- a/contrib/python/Pygments/py3/pygments/styles/trac.py
+++ b/contrib/python/Pygments/py3/pygments/styles/trac.py
@@ -4,7 +4,7 @@
Port of the default trac highlighter design.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/vim.py b/contrib/python/Pygments/py3/pygments/styles/vim.py
index 1a0828fb0d..98eda4f209 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vim.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vim.py
@@ -4,7 +4,7 @@
A highlighting style for Pygments, inspired by vim.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/vs.py b/contrib/python/Pygments/py3/pygments/styles/vs.py
index b3b98c05dd..acfd77fcca 100644
--- a/contrib/python/Pygments/py3/pygments/styles/vs.py
+++ b/contrib/python/Pygments/py3/pygments/styles/vs.py
@@ -4,7 +4,7 @@
Simple style with MS Visual Studio colors.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/xcode.py b/contrib/python/Pygments/py3/pygments/styles/xcode.py
index 87b1323ab8..acf2293f96 100644
--- a/contrib/python/Pygments/py3/pygments/styles/xcode.py
+++ b/contrib/python/Pygments/py3/pygments/styles/xcode.py
@@ -4,7 +4,7 @@
Style similar to the `Xcode` default theme.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/styles/zenburn.py b/contrib/python/Pygments/py3/pygments/styles/zenburn.py
index 6751c08d7c..e10ccd9239 100644
--- a/contrib/python/Pygments/py3/pygments/styles/zenburn.py
+++ b/contrib/python/Pygments/py3/pygments/styles/zenburn.py
@@ -7,7 +7,7 @@
See: https://kippura.org/zenburnpage/
https://github.com/jnurmine/Zenburn
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/token.py b/contrib/python/Pygments/py3/pygments/token.py
index bdf2e8e2e1..f78018a7aa 100644
--- a/contrib/python/Pygments/py3/pygments/token.py
+++ b/contrib/python/Pygments/py3/pygments/token.py
@@ -4,7 +4,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/Pygments/py3/pygments/unistring.py b/contrib/python/Pygments/py3/pygments/unistring.py
index 39f6baeedf..e2c3523e4b 100644
--- a/contrib/python/Pygments/py3/pygments/unistring.py
+++ b/contrib/python/Pygments/py3/pygments/unistring.py
@@ -7,7 +7,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -104,7 +104,7 @@ def _handle_runs(char_list): # pragma: no cover
if a == b:
yield a
else:
- yield '%s-%s' % (a, b)
+ yield f'{a}-{b}'
if __name__ == '__main__': # pragma: no cover
@@ -141,13 +141,13 @@ if __name__ == '__main__': # pragma: no cover
for cat in sorted(categories):
val = ''.join(_handle_runs(categories[cat]))
- fp.write('%s = %a\n\n' % (cat, val))
+ fp.write(f'{cat} = {val!a}\n\n')
cats = sorted(categories)
cats.remove('xid_start')
cats.remove('xid_continue')
- fp.write('cats = %r\n\n' % cats)
+ fp.write(f'cats = {cats!r}\n\n')
- fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
+ fp.write(f'# Generated from unidata {unicodedata.unidata_version}\n\n')
fp.write(footer)
diff --git a/contrib/python/Pygments/py3/pygments/util.py b/contrib/python/Pygments/py3/pygments/util.py
index 941fdb9ec7..83cf104925 100644
--- a/contrib/python/Pygments/py3/pygments/util.py
+++ b/contrib/python/Pygments/py3/pygments/util.py
@@ -4,7 +4,7 @@
Utility functions.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,8 +46,7 @@ def get_choice_opt(options, optname, allowed, default=None, normcase=False):
if normcase:
string = string.lower()
if string not in allowed:
- raise OptionError('Value for option %s must be one of %s' %
- (optname, ', '.join(map(str, allowed))))
+ raise OptionError('Value for option {} must be one of {}'.format(optname, ', '.join(map(str, allowed))))
return string
@@ -69,17 +68,15 @@ def get_bool_opt(options, optname, default=None):
elif isinstance(string, int):
return bool(string)
elif not isinstance(string, str):
- raise OptionError('Invalid type %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
+ raise OptionError(f'Invalid type {string!r} for option {optname}; use '
+ '1/0, yes/no, true/false, on/off')
elif string.lower() in ('1', 'yes', 'true', 'on'):
return True
elif string.lower() in ('0', 'no', 'false', 'off'):
return False
else:
- raise OptionError('Invalid value %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
+ raise OptionError(f'Invalid value {string!r} for option {optname}; use '
+ '1/0, yes/no, true/false, on/off')
def get_int_opt(options, optname, default=None):
@@ -88,13 +85,11 @@ def get_int_opt(options, optname, default=None):
try:
return int(string)
except TypeError:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
+ raise OptionError(f'Invalid type {string!r} for option {optname}; you '
+ 'must give an integer value')
except ValueError:
- raise OptionError('Invalid value %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
+ raise OptionError(f'Invalid value {string!r} for option {optname}; you '
+ 'must give an integer value')
def get_list_opt(options, optname, default=None):
"""
@@ -108,9 +103,8 @@ def get_list_opt(options, optname, default=None):
elif isinstance(val, (list, tuple)):
return list(val)
else:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give a list value' % (
- val, optname))
+ raise OptionError(f'Invalid type {val!r} for option {optname}; you '
+ 'must give a list value')
def docstring_headline(obj):
@@ -181,7 +175,7 @@ def shebang_matches(text, regex):
if x and not x.startswith('-')][-1]
except IndexError:
return False
- regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
+ regex = re.compile(rf'^{regex}(\.(exe|cmd|bat|bin))?$', re.IGNORECASE)
if regex.search(found) is not None:
return True
return False
diff --git a/contrib/python/Pygments/py3/ya.make b/contrib/python/Pygments/py3/ya.make
index 0a55d0425d..68cec5c11e 100644
--- a/contrib/python/Pygments/py3/ya.make
+++ b/contrib/python/Pygments/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(2.17.2)
+VERSION(2.18.0)
LICENSE(BSD-3-Clause)
@@ -47,6 +47,7 @@ PY_SRCS(
pygments/lexers/_lasso_builtins.py
pygments/lexers/_lilypond_builtins.py
pygments/lexers/_lua_builtins.py
+ pygments/lexers/_luau_builtins.py
pygments/lexers/_mapping.py
pygments/lexers/_mql_builtins.py
pygments/lexers/_mysql_builtins.py
@@ -180,6 +181,7 @@ PY_SRCS(
pygments/lexers/ml.py
pygments/lexers/modeling.py
pygments/lexers/modula2.py
+ pygments/lexers/mojo.py
pygments/lexers/monte.py
pygments/lexers/mosel.py
pygments/lexers/ncl.py
@@ -234,6 +236,7 @@ PY_SRCS(
pygments/lexers/smv.py
pygments/lexers/snobol.py
pygments/lexers/solidity.py
+ pygments/lexers/soong.py
pygments/lexers/sophia.py
pygments/lexers/special.py
pygments/lexers/spice.py
@@ -241,6 +244,7 @@ PY_SRCS(
pygments/lexers/srcinfo.py
pygments/lexers/stata.py
pygments/lexers/supercollider.py
+ pygments/lexers/tact.py
pygments/lexers/tal.py
pygments/lexers/tcl.py
pygments/lexers/teal.py
@@ -257,6 +261,7 @@ PY_SRCS(
pygments/lexers/tnt.py
pygments/lexers/trafficscript.py
pygments/lexers/typoscript.py
+ pygments/lexers/typst.py
pygments/lexers/ul4.py
pygments/lexers/unicon.py
pygments/lexers/urbi.py
@@ -294,6 +299,7 @@ PY_SRCS(
pygments/styles/autumn.py
pygments/styles/borland.py
pygments/styles/bw.py
+ pygments/styles/coffee.py
pygments/styles/colorful.py
pygments/styles/default.py
pygments/styles/dracula.py