1 package lexers_test
2
3 import (
4 "encoding/json"
5 "io/ioutil"
6 "os"
7 "path/filepath"
8 "strconv"
9 "strings"
10 "testing"
11
12 "github.com/stretchr/testify/assert"
13 "github.com/stretchr/testify/require"
14
15 "github.com/alecthomas/chroma"
16 "github.com/alecthomas/chroma/formatters"
17 "github.com/alecthomas/chroma/lexers"
18 "github.com/alecthomas/chroma/lexers/a"
19 "github.com/alecthomas/chroma/lexers/x"
20 "github.com/alecthomas/chroma/styles"
21 )
22
23 func TestCompileAllRegexes(t *testing.T) {
24 for _, lexer := range lexers.Registry.Lexers {
25 it, err := lexer.Tokenise(nil, "")
26 assert.NoError(t, err, "%s failed", lexer.Config().Name)
27 err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
28 assert.NoError(t, err, "%s failed", lexer.Config().Name)
29 }
30 }
31
32 func TestGet(t *testing.T) {
33 t.Run("ByName", func(t *testing.T) {
34 assert.Equal(t, lexers.Get("xml"), x.XML)
35 })
36 t.Run("ByAlias", func(t *testing.T) {
37 assert.Equal(t, lexers.Get("as"), a.Actionscript)
38 })
39 t.Run("ViaFilename", func(t *testing.T) {
40 assert.Equal(t, lexers.Get("svg"), x.XML)
41 })
42 }
43
44 func TestGlobs(t *testing.T) {
45 filename := "main.go"
46 for _, lexer := range lexers.Registry.Lexers {
47 config := lexer.Config()
48 for _, glob := range config.Filenames {
49 _, err := filepath.Match(glob, filename)
50 require.NoError(t, err)
51 }
52 for _, glob := range config.AliasFilenames {
53 _, err := filepath.Match(glob, filename)
54 require.NoError(t, err)
55 }
56 }
57 }
58
59 func BenchmarkGet(b *testing.B) {
60 for i := 0; i < b.N; i++ {
61 lexers.Get("go")
62 }
63 }
64
65 func FileTest(t *testing.T, lexer chroma.Lexer, actualFilename, expectedFilename string) {
66 t.Helper()
67 t.Run(lexer.Config().Name+"/"+actualFilename, func(t *testing.T) {
68
69 actualText, err := ioutil.ReadFile(actualFilename)
70 assert.NoError(t, err)
71 actual, err := chroma.Tokenise(lexer, nil, string(actualText))
72 assert.NoError(t, err)
73
74 if os.Getenv("RECORD") == "true" {
75
76 f, err := os.Create(expectedFilename)
77 defer f.Close()
78 assert.NoError(t, err)
79 assert.NoError(t, formatters.JSON.Format(f, nil, chroma.Literator(actual...)))
80 } else {
81
82 var expected []chroma.Token
83 r, err := os.Open(expectedFilename)
84 assert.NoError(t, err)
85 err = json.NewDecoder(r).Decode(&expected)
86 assert.NoError(t, err)
87
88
89 assert.Equal(t, expected, actual)
90 }
91 })
92 }
93
94
95 func TestLexers(t *testing.T) {
96 files, err := ioutil.ReadDir("testdata")
97 assert.NoError(t, err)
98
99 for _, file := range files {
100
101 if file.Name() == "analysis" {
102 continue
103 }
104
105 if file.IsDir() {
106 dirname := filepath.Join("testdata", file.Name())
107 lexer := lexers.Get(file.Name())
108 assert.NotNil(t, lexer)
109
110 subFiles, err := ioutil.ReadDir(dirname)
111 assert.NoError(t, err)
112
113 for _, subFile := range subFiles {
114 ext := filepath.Ext(subFile.Name())[1:]
115 if ext != "actual" {
116 continue
117 }
118
119 filename := filepath.Join(dirname, subFile.Name())
120 expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
121
122 lexer = chroma.Coalesce(lexer)
123 FileTest(t, lexer, filename, expectedFilename)
124 }
125 } else {
126 ext := filepath.Ext(file.Name())[1:]
127 if ext != "actual" {
128 continue
129 }
130
131 base := strings.Split(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())), ".")[0]
132 lexer := lexers.Get(base)
133 assert.NotNil(t, lexer)
134
135 filename := filepath.Join("testdata", file.Name())
136 expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
137
138 lexer = chroma.Coalesce(lexer)
139 FileTest(t, lexer, filename, expectedFilename)
140 }
141 }
142 }
143
144 func FileTestAnalysis(t *testing.T, lexer chroma.Lexer, actualFilepath, expectedFilepath string) {
145 t.Helper()
146 t.Run(lexer.Config().Name+"/"+actualFilepath, func(t *testing.T) {
147 expectedData, err := ioutil.ReadFile(expectedFilepath)
148 assert.NoError(t, err)
149
150 analyser, ok := lexer.(chroma.Analyser)
151 assert.True(t, ok, "lexer %q does not set analyser", lexer.Config().Name)
152
153 data, err := ioutil.ReadFile(actualFilepath)
154 assert.NoError(t, err)
155
156 actual := analyser.AnalyseText(string(data))
157
158 if os.Getenv("RECORD") == "true" {
159
160 f, err := os.Create(expectedFilepath)
161 defer f.Close()
162 assert.NoError(t, err)
163
164 _, err = f.WriteString(strconv.FormatFloat(float64(actual), 'f', -1, 32))
165 assert.NoError(t, err)
166 } else {
167 expected, err := strconv.ParseFloat(strings.TrimSpace(string(expectedData)), 32)
168 assert.NoError(t, err)
169
170 assert.Equal(t, float32(expected), actual)
171 }
172 })
173 }
174
175 func TestLexersTextAnalyser(t *testing.T) {
176 files, err := filepath.Glob("testdata/analysis/*.actual")
177 assert.NoError(t, err)
178
179 for _, actualFilepath := range files {
180 filename := filepath.Base(actualFilepath)
181 baseFilename := strings.TrimSuffix(filename, filepath.Ext(filename))
182 lexerName := strings.Split(baseFilename, ".")[0]
183
184 lexer := lexers.Get(lexerName)
185 assert.NotNil(t, lexer, "no lexer found for name %q", lexerName)
186
187 expectedFilepath := "testdata/analysis/" + baseFilename + ".expected"
188
189 FileTestAnalysis(t, lexer, actualFilepath, expectedFilepath)
190 }
191 }
192
View as plain text