...
1 package chroma
2
3 import (
4 "fmt"
5 "strings"
6 )
7
8 var (
9 defaultOptions = &TokeniseOptions{
10 State: "root",
11 EnsureLF: true,
12 }
13 )
14
15
16 type Config struct {
17
18 Name string
19
20
21 Aliases []string
22
23
24 Filenames []string
25
26
27 AliasFilenames []string
28
29
30 MimeTypes []string
31
32
33 CaseInsensitive bool
34
35
36 DotAll bool
37
38
39
40
41 NotMultiline bool
42
43
44
45
46
47
48
49
50
51 EnsureNL bool
52
53
54
55
56
57
58
59 Priority float32
60 }
61
62
63 type Token struct {
64 Type TokenType `json:"type"`
65 Value string `json:"value"`
66 }
67
68 func (t *Token) String() string { return t.Value }
69 func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
70
71
72 func (t *Token) Clone() Token {
73 return *t
74 }
75
76
77 var EOF Token
78
79
80 type TokeniseOptions struct {
81
82 State string
83
84 Nested bool
85
86
87
88 EnsureLF bool
89 }
90
91
92 type Lexer interface {
93
94 Config() *Config
95
96 Tokenise(options *TokeniseOptions, text string) (Iterator, error)
97 }
98
99
100 type Lexers []Lexer
101
102 func (l Lexers) Len() int { return len(l) }
103 func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
104 func (l Lexers) Less(i, j int) bool {
105 return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
106 }
107
108
109 type PrioritisedLexers []Lexer
110
111 func (l PrioritisedLexers) Len() int { return len(l) }
112 func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
113 func (l PrioritisedLexers) Less(i, j int) bool {
114 ip := l[i].Config().Priority
115 if ip == 0 {
116 ip = 1
117 }
118 jp := l[j].Config().Priority
119 if jp == 0 {
120 jp = 1
121 }
122 return ip > jp
123 }
124
125
126 type Analyser interface {
127 AnalyseText(text string) float32
128 }
129
View as plain text