...
1 package chroma
2
3 type remappingLexer struct {
4 lexer Lexer
5 mapper func(Token) []Token
6 }
7
8
9 func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer {
10 return &remappingLexer{lexer, mapper}
11 }
12
13 func (r *remappingLexer) Config() *Config {
14 return r.lexer.Config()
15 }
16
17 func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
18 it, err := r.lexer.Tokenise(options, text)
19 if err != nil {
20 return nil, err
21 }
22 var buffer []Token
23 return func() Token {
24 for {
25 if len(buffer) > 0 {
26 t := buffer[0]
27 buffer = buffer[1:]
28 return t
29 }
30 t := it()
31 if t == EOF {
32 return t
33 }
34 buffer = r.mapper(t)
35 }
36 }, nil
37 }
38
39
40 type TypeMapping []struct {
41 From, To TokenType
42 Words []string
43 }
44
45
46
47
48
49
50
51
52
53 func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
54
55 lut := map[TokenType]map[string]TokenType{}
56 for _, rt := range mapping {
57 km, ok := lut[rt.From]
58 if !ok {
59 km = map[string]TokenType{}
60 lut[rt.From] = km
61 }
62 if len(rt.Words) == 0 {
63 km[""] = rt.To
64 } else {
65 for _, k := range rt.Words {
66 km[k] = rt.To
67 }
68 }
69 }
70 return RemappingLexer(lexer, func(t Token) []Token {
71 if k, ok := lut[t.Type]; ok {
72 if tt, ok := k[t.Value]; ok {
73 t.Type = tt
74 } else if tt, ok := k[""]; ok {
75 t.Type = tt
76 }
77 }
78 return []Token{t}
79 })
80 }
81
View as plain text