1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
"""
pygments.lexers.fift
~~~~~~~~~~~~~~~~~~~~
Lexers for fift.
:copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include
from pygments.token import Literal, Comment, Name, String, Number, Whitespace
__all__ = ['FiftLexer']
class FiftLexer(RegexLexer):
"""
For Fift source code.
"""
name = 'Fift'
aliases = ['fift', 'fif']
filenames = ['*.fif']
url = 'https://ton-blockchain.github.io/docs/fiftbase.pdf'
tokens = {
'root': [
(r'\s+', Whitespace),
include('comments'),
(r'[\.+]?\"', String, 'string'),
# numbers
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'0b[01]+', Number.Bin),
(r'-?[0-9]+("/"-?[0-9]+)?', Number.Decimal),
# slices
(r'b\{[01]+\}', Literal),
(r'x\{[0-9a-fA-F_]+\}', Literal),
# byte literal
(r'B\{[0-9a-fA-F_]+\}', Literal),
# treat anything as word
(r'\S+', Name)
],
'string': [
(r'\\.', String.Escape),
(r'\"', String, '#pop'),
(r'[^\"\r\n\\]+', String)
],
'comments': [
(r'//.*', Comment.Singleline),
(r'/\*', Comment.Multiline, 'comment'),
],
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
}
|