...
1<lexer>
2 <config>
3 <name>Twig</name>
4 <alias>twig</alias>
5 <mime_type>application/x-twig</mime_type>
6 <dot_all>true</dot_all>
7 </config>
8 <rules>
9 <state name="var">
10 <rule pattern="\s+">
11 <token type="Text"/>
12 </rule>
13 <rule pattern="(-?)(\}\})">
14 <bygroups>
15 <token type="Text"/>
16 <token type="CommentPreproc"/>
17 </bygroups>
18 <pop depth="1"/>
19 </rule>
20 <rule>
21 <include state="varnames"/>
22 </rule>
23 </state>
24 <state name="tag">
25 <rule pattern="\s+">
26 <token type="Text"/>
27 </rule>
28 <rule pattern="(-?)(%\})">
29 <bygroups>
30 <token type="Text"/>
31 <token type="CommentPreproc"/>
32 </bygroups>
33 <pop depth="1"/>
34 </rule>
35 <rule>
36 <include state="varnames"/>
37 </rule>
38 <rule pattern=".">
39 <token type="Punctuation"/>
40 </rule>
41 </state>
42 <state name="root">
43 <rule pattern="[^{]+">
44 <token type="Other"/>
45 </rule>
46 <rule pattern="\{\{">
47 <token type="CommentPreproc"/>
48 <push state="var"/>
49 </rule>
50 <rule pattern="\{\#.*?\#\}">
51 <token type="Comment"/>
52 </rule>
53 <rule pattern="(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})">
54 <bygroups>
55 <token type="CommentPreproc"/>
56 <token type="Text"/>
57 <token type="Keyword"/>
58 <token type="Text"/>
59 <token type="CommentPreproc"/>
60 <token type="Other"/>
61 <token type="CommentPreproc"/>
62 <token type="Text"/>
63 <token type="Keyword"/>
64 <token type="Text"/>
65 <token type="CommentPreproc"/>
66 </bygroups>
67 </rule>
68 <rule pattern="(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})">
69 <bygroups>
70 <token type="CommentPreproc"/>
71 <token type="Text"/>
72 <token type="Keyword"/>
73 <token type="Text"/>
74 <token type="CommentPreproc"/>
75 <token type="Other"/>
76 <token type="CommentPreproc"/>
77 <token type="Text"/>
78 <token type="Keyword"/>
79 <token type="Text"/>
80 <token type="CommentPreproc"/>
81 </bygroups>
82 </rule>
83 <rule pattern="(\{%)(-?\s*)(filter)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
84 <bygroups>
85 <token type="CommentPreproc"/>
86 <token type="Text"/>
87 <token type="Keyword"/>
88 <token type="Text"/>
89 <token type="NameFunction"/>
90 </bygroups>
91 <push state="tag"/>
92 </rule>
93 <rule pattern="(\{%)(-?\s*)([a-zA-Z_]\w*)">
94 <bygroups>
95 <token type="CommentPreproc"/>
96 <token type="Text"/>
97 <token type="Keyword"/>
98 </bygroups>
99 <push state="tag"/>
100 </rule>
101 <rule pattern="\{">
102 <token type="Other"/>
103 </rule>
104 </state>
105 <state name="varnames">
106 <rule pattern="(\|)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
107 <bygroups>
108 <token type="Operator"/>
109 <token type="Text"/>
110 <token type="NameFunction"/>
111 </bygroups>
112 </rule>
113 <rule pattern="(is)(\s+)(not)?(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
114 <bygroups>
115 <token type="Keyword"/>
116 <token type="Text"/>
117 <token type="Keyword"/>
118 <token type="Text"/>
119 <token type="NameFunction"/>
120 </bygroups>
121 </rule>
122 <rule pattern="(?i)(true|false|none|null)\b">
123 <token type="KeywordPseudo"/>
124 </rule>
125 <rule pattern="(in|not|and|b-and|or|b-or|b-xor|isif|elseif|else|importconstant|defined|divisibleby|empty|even|iterable|odd|sameasmatches|starts\s+with|ends\s+with)\b">
126 <token type="Keyword"/>
127 </rule>
128 <rule pattern="(loop|block|parent)\b">
129 <token type="NameBuiltin"/>
130 </rule>
131 <rule pattern="(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*">
132 <token type="NameVariable"/>
133 </rule>
134 <rule pattern="\.(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*">
135 <token type="NameVariable"/>
136 </rule>
137 <rule pattern="\.[0-9]+">
138 <token type="LiteralNumber"/>
139 </rule>
140 <rule pattern=":?"(\\\\|\\"|[^"])*"">
141 <token type="LiteralStringDouble"/>
142 </rule>
143 <rule pattern=":?'(\\\\|\\'|[^'])*'">
144 <token type="LiteralStringSingle"/>
145 </rule>
146 <rule pattern="([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)">
147 <token type="Operator"/>
148 </rule>
149 <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
150 <token type="LiteralNumber"/>
151 </rule>
152 </state>
153 </rules>
154</lexer>
View as plain text