...

Source file src/github.com/alecthomas/chroma/v2/lexers/lexers_test.go

Documentation: github.com/alecthomas/chroma/v2/lexers

     1  package lexers_test
     2  
     3  import (
     4  	"encoding/json"
     5  	"io/ioutil"
     6  	"os"
     7  	"path/filepath"
     8  	"strconv"
     9  	"strings"
    10  	"testing"
    11  
    12  	assert "github.com/alecthomas/assert/v2"
    13  	"github.com/alecthomas/repr"
    14  
    15  	"github.com/alecthomas/chroma/v2"
    16  	"github.com/alecthomas/chroma/v2/formatters"
    17  	"github.com/alecthomas/chroma/v2/lexers"
    18  	"github.com/alecthomas/chroma/v2/styles"
    19  )
    20  
    21  func TestCompileAllRegexes(t *testing.T) {
    22  	for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
    23  		it, err := lexer.Tokenise(nil, "")
    24  		assert.NoError(t, err, "%s failed", lexer.Config().Name)
    25  		err = formatters.NoOp.Format(ioutil.Discard, styles.SwapOff, it)
    26  		assert.NoError(t, err, "%s failed", lexer.Config().Name)
    27  	}
    28  }
    29  
    30  func TestGet(t *testing.T) {
    31  	t.Run("ByName", func(t *testing.T) {
    32  		assert.Equal(t, lexers.Get("xml"), lexers.GlobalLexerRegistry.Get("XML"))
    33  	})
    34  	t.Run("ByAlias", func(t *testing.T) {
    35  		assert.Equal(t, lexers.Get("as"), lexers.GlobalLexerRegistry.Get("Actionscript"))
    36  	})
    37  	t.Run("ViaFilename", func(t *testing.T) {
    38  		expected := lexers.Get("XML")
    39  		actual := lexers.GlobalLexerRegistry.Get("test.svg")
    40  		assert.Equal(t,
    41  			repr.String(expected.Config(), repr.Indent("  ")),
    42  			repr.String(actual.Config(), repr.Indent("  ")))
    43  	})
    44  }
    45  
    46  func TestGlobs(t *testing.T) {
    47  	filename := "main.go"
    48  	for _, lexer := range lexers.GlobalLexerRegistry.Lexers {
    49  		config := lexer.Config()
    50  		for _, glob := range config.Filenames {
    51  			_, err := filepath.Match(glob, filename)
    52  			assert.NoError(t, err)
    53  		}
    54  		for _, glob := range config.AliasFilenames {
    55  			_, err := filepath.Match(glob, filename)
    56  			assert.NoError(t, err)
    57  		}
    58  	}
    59  }
    60  
    61  func BenchmarkGet(b *testing.B) {
    62  	for i := 0; i < b.N; i++ {
    63  		lexers.Get("go")
    64  	}
    65  }
    66  
    67  func FileTest(t *testing.T, lexer chroma.Lexer, actualFilename, expectedFilename string) {
    68  	t.Helper()
    69  	t.Run(lexer.Config().Name+"/"+actualFilename, func(t *testing.T) {
    70  		// Read and tokenise source text.
    71  		actualText, err := ioutil.ReadFile(actualFilename)
    72  		assert.NoError(t, err)
    73  		actual, err := chroma.Tokenise(lexer, nil, string(actualText))
    74  		assert.NoError(t, err)
    75  
    76  		if os.Getenv("RECORD") == "true" {
    77  			// Update the expected file with the generated output of this lexer
    78  			f, err := os.Create(expectedFilename)
    79  			defer f.Close() // nolint: gosec
    80  			assert.NoError(t, err)
    81  			assert.NoError(t, formatters.JSON.Format(f, nil, chroma.Literator(actual...)))
    82  		} else {
    83  			// Read expected JSON into token slice.
    84  			var expected []chroma.Token
    85  			r, err := os.Open(expectedFilename)
    86  			assert.NoError(t, err)
    87  			err = json.NewDecoder(r).Decode(&expected)
    88  			assert.NoError(t, err)
    89  
    90  			// Equal?
    91  			assert.Equal(t,
    92  				repr.String(expected, repr.Indent(" ")),
    93  				repr.String(actual, repr.Indent(" ")))
    94  		}
    95  	})
    96  }
    97  
    98  // Test source files are in the form <key>.<key> and validation data is in the form <key>.<key>.expected.
    99  func TestLexers(t *testing.T) {
   100  	files, err := ioutil.ReadDir("testdata")
   101  	assert.NoError(t, err)
   102  
   103  	for _, file := range files {
   104  		// skip text analysis test files
   105  		if file.Name() == "analysis" {
   106  			continue
   107  		}
   108  
   109  		if file.IsDir() {
   110  			dirname := filepath.Join("testdata", file.Name())
   111  			lexer := lexers.Get(file.Name())
   112  			assert.NotZero(t, lexer)
   113  
   114  			subFiles, err := ioutil.ReadDir(dirname)
   115  			assert.NoError(t, err)
   116  
   117  			for _, subFile := range subFiles {
   118  				ext := filepath.Ext(subFile.Name())[1:]
   119  				if ext != "actual" {
   120  					continue
   121  				}
   122  
   123  				filename := filepath.Join(dirname, subFile.Name())
   124  				expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
   125  
   126  				lexer = chroma.Coalesce(lexer)
   127  				FileTest(t, lexer, filename, expectedFilename)
   128  			}
   129  		} else {
   130  			ext := filepath.Ext(file.Name())[1:]
   131  			if ext != "actual" {
   132  				continue
   133  			}
   134  
   135  			base := strings.Split(strings.TrimSuffix(file.Name(), filepath.Ext(file.Name())), ".")[0]
   136  			lexer := lexers.Get(base)
   137  			assert.NotZero(t, lexer, base)
   138  
   139  			filename := filepath.Join("testdata", file.Name())
   140  			expectedFilename := strings.TrimSuffix(filename, filepath.Ext(filename)) + ".expected"
   141  
   142  			lexer = chroma.Coalesce(lexer)
   143  			FileTest(t, lexer, filename, expectedFilename)
   144  		}
   145  	}
   146  }
   147  
   148  func FileTestAnalysis(t *testing.T, lexer chroma.Lexer, actualFilepath, expectedFilepath string) {
   149  	t.Helper()
   150  	t.Run(lexer.Config().Name+"/"+actualFilepath, func(t *testing.T) {
   151  		expectedData, err := ioutil.ReadFile(expectedFilepath)
   152  		assert.NoError(t, err)
   153  
   154  		analyser, ok := lexer.(chroma.Analyser)
   155  		assert.True(t, ok, "lexer %q does not set analyser", lexer.Config().Name)
   156  
   157  		data, err := ioutil.ReadFile(actualFilepath)
   158  		assert.NoError(t, err)
   159  
   160  		actual := analyser.AnalyseText(string(data))
   161  
   162  		if os.Getenv("RECORD") == "true" {
   163  			// Update the expected file with the generated output of this lexer
   164  			f, err := os.Create(expectedFilepath)
   165  			defer f.Close() // nolint: gosec
   166  			assert.NoError(t, err)
   167  
   168  			_, err = f.WriteString(strconv.FormatFloat(float64(actual), 'f', -1, 32))
   169  			assert.NoError(t, err)
   170  		} else {
   171  			expected, err := strconv.ParseFloat(strings.TrimSpace(string(expectedData)), 32)
   172  			assert.NoError(t, err)
   173  
   174  			assert.Equal(t, float32(expected), actual)
   175  		}
   176  	})
   177  }
   178  
   179  func TestLexersTextAnalyser(t *testing.T) {
   180  	files, err := filepath.Glob("testdata/analysis/*.actual")
   181  	assert.NoError(t, err)
   182  
   183  	for _, actualFilepath := range files {
   184  		filename := filepath.Base(actualFilepath)
   185  		baseFilename := strings.TrimSuffix(filename, filepath.Ext(filename))
   186  		lexerName := strings.Split(baseFilename, ".")[0]
   187  
   188  		lexer := lexers.Get(lexerName)
   189  		assert.NotZero(t, lexer, "no lexer found for name %q", lexerName)
   190  
   191  		expectedFilepath := "testdata/analysis/" + baseFilename + ".expected"
   192  
   193  		FileTestAnalysis(t, lexer, actualFilepath, expectedFilepath)
   194  	}
   195  }
   196  

View as plain text