...
1 package lexer
2
3 import "fmt"
4
5 type TokenType int
6
7 const (
8 EOF TokenType = iota
9 Error
10 Text
11 Char
12 Any
13 Super
14 Single
15 Not
16 Separator
17 RangeOpen
18 RangeClose
19 RangeLo
20 RangeHi
21 RangeBetween
22 TermsOpen
23 TermsClose
24 )
25
26 func (tt TokenType) String() string {
27 switch tt {
28 case EOF:
29 return "eof"
30
31 case Error:
32 return "error"
33
34 case Text:
35 return "text"
36
37 case Char:
38 return "char"
39
40 case Any:
41 return "any"
42
43 case Super:
44 return "super"
45
46 case Single:
47 return "single"
48
49 case Not:
50 return "not"
51
52 case Separator:
53 return "separator"
54
55 case RangeOpen:
56 return "range_open"
57
58 case RangeClose:
59 return "range_close"
60
61 case RangeLo:
62 return "range_lo"
63
64 case RangeHi:
65 return "range_hi"
66
67 case RangeBetween:
68 return "range_between"
69
70 case TermsOpen:
71 return "terms_open"
72
73 case TermsClose:
74 return "terms_close"
75
76 default:
77 return "undef"
78 }
79 }
80
81 type Token struct {
82 Type TokenType
83 Raw string
84 }
85
86 func (t Token) String() string {
87 return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
88 }
89
View as plain text