aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/Pygments/py3/pygments/lexers/theorem.py
blob: e085a0fc7e056a6d90f5bb8afd98ad2068c95e12 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
"""
    pygments.lexers.theorem
    ~~~~~~~~~~~~~~~~~~~~~~~

    Lexers for theorem-proving languages.

    :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. 
    :license: BSD, see LICENSE for details.
"""

import re

from pygments.lexer import RegexLexer, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
    Number, Punctuation, Generic

__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']


class CoqLexer(RegexLexer):
    """
    For the `Coq <http://coq.inria.fr/>`_ theorem prover.

    .. versionadded:: 1.5
    """

    name = 'Coq'
    aliases = ['coq']
    filenames = ['*.v']
    mimetypes = ['text/x-coq']

    flags = re.UNICODE 
 
    keywords1 = (
        # Vernacular commands
        'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
        'Variables', 'Parameter', 'Parameters', 'Axiom', 'Axioms', 'Hypothesis',
        'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
        'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Example', 'Let',
        'Ltac', 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
        'Arguments', 'Types', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
        'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
        'Variant', 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Fact',
        'Remark', 'Corollary', 'Proposition', 'Property', 'Goal',
        'Proof', 'Restart', 'Save', 'Qed', 'Defined', 'Abort', 'Admitted',
        'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
        'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
        'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
        'Universe', 'Polymorphic', 'Monomorphic', 'Context'
    )
    keywords2 = (
        # Gallina
        'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
        'match', 'end',  'in', 'return', 'let', 'if', 'is', 'then', 'else',
        'for', 'of', 'nosimpl', 'with', 'as',
    )
    keywords3 = (
        # Sorts
        'Type', 'Prop', 'SProp', 
    )
    keywords4 = (
        # Tactics
        'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
        'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
        'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
        'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
        'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
        'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
        'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
        'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
        'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
        'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
        'native_compute', 'subst',
    )
    keywords5 = (
        # Terminators
        'by', 'now', 'done', 'exact', 'reflexivity',
        'tauto', 'romega', 'omega', 'lia', 'nia', 'lra', 'nra', 'psatz',
        'assumption', 'solve', 'contradiction', 'discriminate',
        'congruence',
    )
    keywords6 = (
        # Control
        'do', 'last', 'first', 'try', 'idtac', 'repeat',
    )
    # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
    # 'downto', 'else', 'end', 'exception', 'external', 'false',
    # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
    # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
    # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
    # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
    # 'type', 'val', 'virtual', 'when', 'while', 'with'
    keyopts = (
        '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
        '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
        '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
        r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
        r'/\\', r'\\/', r'\{\|', r'\|\}',
        # 'Π', 'Σ', # Not defined in the standard library 
        'λ', '¬', '∧', '∨', '∀', '∃', '→', '↔', '≠', '≤', '≥', 
    )
    operators = r'[!$%&*+\./:<=>?@^|~-]'
    prefix_syms = r'[!?~]'
    infix_syms = r'[=<>@^|&+\*/$%-]'

    tokens = {
        'root': [
            (r'\s+', Text),
            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
            (r'\(\*', Comment, 'comment'),
            (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
            (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
            (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
            (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
            # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
            (r'\b([A-Z][\w\']*)', Name),
            (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),

            (r"[^\W\d][\w']*", Name),

            (r'\d[\d_]*', Number.Integer),
            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
            (r'0[oO][0-7][0-7_]*', Number.Oct),
            (r'0[bB][01][01_]*', Number.Bin),
            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),

            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", String.Char), 
 
            (r"'.'", String.Char),
            (r"'", Keyword),  # a stray quote is another syntax element

            (r'"', String.Double, 'string'),

            (r'[~?][a-z][\w\']*:', Name),
            (r'\S', Name.Builtin.Pseudo), 
        ],
        'comment': [
            (r'[^(*)]+', Comment),
            (r'\(\*', Comment, '#push'),
            (r'\*\)', Comment, '#pop'),
            (r'[(*)]', Comment),
        ],
        'string': [
            (r'[^"]+', String.Double),
            (r'""', String.Double),
            (r'"', String.Double, '#pop'),
        ],
        'dotted': [
            (r'\s+', Text),
            (r'\.', Punctuation),
            (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
            (r'[A-Z][\w\']*', Name.Class, '#pop'),
            (r'[a-z][a-z0-9_\']*', Name, '#pop'),
            default('#pop')
        ],
    }

    def analyse_text(text):
        if 'Qed' in text and 'Proof' in text: 
            return 1 


class IsabelleLexer(RegexLexer):
    """
    For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.

    .. versionadded:: 2.0
    """

    name = 'Isabelle'
    aliases = ['isabelle']
    filenames = ['*.thy']
    mimetypes = ['text/x-isabelle']

    keyword_minor = (
        'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
        'class_instance', 'class_relation', 'code_module', 'congs',
        'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
        'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
        'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
        'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
        'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
        'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
        'type_constructor', 'unchecked', 'unsafe', 'where',
    )

    keyword_diag = (
        'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
        'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
        'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
        'print_abbrevs', 'print_antiquotations', 'print_attributes',
        'print_binds', 'print_bnfs', 'print_bundles',
        'print_case_translations', 'print_cases', 'print_claset',
        'print_classes', 'print_codeproc', 'print_codesetup',
        'print_coercions', 'print_commands', 'print_context',
        'print_defn_rules', 'print_dependencies', 'print_facts',
        'print_induct_rules', 'print_inductives', 'print_interps',
        'print_locale', 'print_locales', 'print_methods', 'print_options',
        'print_orders', 'print_quot_maps', 'print_quotconsts',
        'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
        'print_rules', 'print_simpset', 'print_state', 'print_statement',
        'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
        'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
        'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
        'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
        'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
    )

    keyword_thy = ('theory', 'begin', 'end')

    keyword_section = ('header', 'chapter')

    keyword_subsection = (
        'section', 'subsection', 'subsubsection', 'sect', 'subsect',
        'subsubsect',
    )

    keyword_theory_decl = (
        'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
        'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
        'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
        'code_abort', 'code_class', 'code_const', 'code_datatype',
        'code_identifier', 'code_include', 'code_instance', 'code_modulename',
        'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
        'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
        'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
        'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
        'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
        'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
        'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
        'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
        'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
        'lifting_forget', 'lifting_update', 'local_setup', 'locale',
        'method_setup', 'nitpick_params', 'no_adhoc_overloading',
        'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
        'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
        'overloading', 'parse_ast_translation', 'parse_translation',
        'partial_function', 'primcorec', 'primrec', 'primrec_new',
        'print_ast_translation', 'print_translation', 'quickcheck_generator',
        'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
        'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
        'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
        'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
        'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
        'text_raw', 'theorems', 'translations', 'type_notation',
        'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
        'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
        'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
        'bnf_axiomatization', 'cartouche', 'datatype_compat',
        'free_constructors', 'functor', 'nominal_function',
        'nominal_termination', 'permanent_interpretation',
        'binds', 'defining', 'smt2_status', 'term_cartouche',
        'boogie_file', 'text_cartouche',
    )

    keyword_theory_script = ('inductive_cases', 'inductive_simps')

    keyword_theory_goal = (
        'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
        'crunch', 'crunch_ignore',
        'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
        'lift_definition', 'nominal_inductive', 'nominal_inductive2',
        'nominal_primrec', 'pcpodef', 'primcorecursive',
        'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
        'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
        'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
        'theorem', 'typedef', 'wrap_free_constructors',
    )

    keyword_qed = ('by', 'done', 'qed')
    keyword_abandon_proof = ('sorry', 'oops')

    keyword_proof_goal = ('have', 'hence', 'interpret')

    keyword_proof_block = ('next', 'proof')

    keyword_proof_chain = (
        'finally', 'from', 'then', 'ultimately', 'with',
    )

    keyword_proof_decl = (
        'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
        'txt', 'txt_raw', 'unfolding', 'using', 'write',
    )

    keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')

    keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')

    keyword_proof_script = (
        'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
    )

    operators = (
        '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
        '+', '-', '!', '?',
    )

    proof_operators = ('{', '}', '.', '..')

    tokens = {
        'root': [
            (r'\s+', Text),
            (r'\(\*', Comment, 'comment'),
            (r'\{\*', Comment, 'text'),

            (words(operators), Operator),
            (words(proof_operators), Operator.Word),

            (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),

            (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),

            (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),

            (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
            (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),

            (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
            (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),

            (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),

            (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),

            (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
            (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),

            (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),

            (r'\\<\w*>', Text.Symbol),

            (r"[^\W\d][.\w']*", Name),
            (r"\?[^\W\d][.\w']*", Name),
            (r"'[^\W\d][.\w']*", Name.Type),

            (r'\d[\d_]*', Name),  # display numbers as name
            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
            (r'0[oO][0-7][0-7_]*', Number.Oct),
            (r'0[bB][01][01_]*', Number.Bin),

            (r'"', String, 'string'),
            (r'`', String.Other, 'fact'),
        ],
        'comment': [
            (r'[^(*)]+', Comment),
            (r'\(\*', Comment, '#push'),
            (r'\*\)', Comment, '#pop'),
            (r'[(*)]', Comment),
        ],
        'text': [
            (r'[^*}]+', Comment),
            (r'\*\}', Comment, '#pop'),
            (r'\*', Comment),
            (r'\}', Comment),
        ],
        'string': [
            (r'[^"\\]+', String),
            (r'\\<\w*>', String.Symbol),
            (r'\\"', String),
            (r'\\', String),
            (r'"', String, '#pop'),
        ],
        'fact': [
            (r'[^`\\]+', String.Other),
            (r'\\<\w*>', String.Symbol),
            (r'\\`', String.Other),
            (r'\\', String.Other),
            (r'`', String.Other, '#pop'),
        ],
    }


class LeanLexer(RegexLexer):
    """
    For the `Lean <https://github.com/leanprover/lean>`_
    theorem prover.

    .. versionadded:: 2.0
    """
    name = 'Lean'
    aliases = ['lean']
    filenames = ['*.lean']
    mimetypes = ['text/x-lean']

    flags = re.MULTILINE | re.UNICODE

    tokens = { 
        'root': [ 
            (r'\s+', Text), 
            (r'/--', String.Doc, 'docstring'), 
            (r'/-', Comment, 'comment'), 
            (r'--.*?$', Comment.Single), 
            (words(( 
                'import', 'renaming', 'hiding', 
                'namespace', 
                'local', 
                'private', 'protected', 'section', 
                'include', 'omit', 'section', 
                'protected', 'export', 
                'open', 
                'attribute', 
            ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace), 
            (words(( 
                'lemma', 'theorem', 'def', 'definition', 'example', 
                'axiom', 'axioms', 'constant', 'constants', 
                'universe', 'universes', 
                'inductive', 'coinductive', 'structure', 'extends', 
                'class', 'instance', 
                'abbreviation', 

                'noncomputable theory', 

                'noncomputable', 'mutual', 'meta', 

                'attribute', 

                'parameter', 'parameters', 
                'variable', 'variables', 

                'reserve', 'precedence', 
                'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr', 
 
                'begin', 'by', 'end', 
 
                'set_option', 
                'run_cmd', 
            ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration), 
            (r'@\[[^\]]*\]', Keyword.Declaration), 
            (words(( 
                'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices', 
                'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match', 
                'do' 
            ), prefix=r'\b', suffix=r'\b'), Keyword), 
            (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error), 
            (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type), 
            (words(( 
                '#eval', '#check', '#reduce', '#exit', 
                '#print', '#help', 
            ), suffix=r'\b'), Keyword), 
            (words(( 
                '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',', 
            )), Operator), 
            (r'[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]' 
             r'[.A-Za-z_\'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079' 
             r'\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*', Name), 
            (r'0x[A-Za-z0-9]+', Number.Integer), 
            (r'0b[01]+', Number.Integer), 
            (r'\d+', Number.Integer),
            (r'"', String.Double, 'string'),
            (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char), 
            (r'[~?][a-z][\w\']*:', Name.Variable), 
            (r'\S', Name.Builtin.Pseudo), 
        ],
        'comment': [
            (r'[^/-]', Comment.Multiline),
            (r'/-', Comment.Multiline, '#push'),
            (r'-/', Comment.Multiline, '#pop'),
            (r'[/-]', Comment.Multiline)
        ],
        'docstring': [ 
            (r'[^/-]', String.Doc), 
            (r'-/', String.Doc, '#pop'), 
            (r'[/-]', String.Doc) 
        ], 
        'string': [
            (r'[^\\"]+', String.Double),
            (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape), 
            ('"', String.Double, '#pop'),
        ],
    }