...

Source file src/github.com/alecthomas/chroma/regexp_test.go

Documentation: github.com/alecthomas/chroma

     1  package chroma
     2  
     3  import (
     4  	"testing"
     5  
     6  	"github.com/stretchr/testify/assert"
     7  )
     8  
     9  func TestNewlineAtEndOfFile(t *testing.T) {
    10  	l := Coalesce(MustNewLexer(&Config{EnsureNL: true}, Rules{ // nolint: forbidigo
    11  		"root": {
    12  			{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
    13  		},
    14  	}))
    15  	it, err := l.Tokenise(nil, `hello`)
    16  	assert.NoError(t, err)
    17  	assert.Equal(t, []Token{{Keyword, "hello"}, {Whitespace, "\n"}}, it.Tokens())
    18  
    19  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
    20  		"root": {
    21  			{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil},
    22  		},
    23  	}))
    24  	it, err = l.Tokenise(nil, `hello`)
    25  	assert.NoError(t, err)
    26  	assert.Equal(t, []Token{{Error, "hello"}}, it.Tokens())
    27  }
    28  
    29  func TestMatchingAtStart(t *testing.T) {
    30  	l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
    31  		"root": {
    32  			{`\s+`, Whitespace, nil},
    33  			{`^-`, Punctuation, Push("directive")},
    34  			{`->`, Operator, nil},
    35  		},
    36  		"directive": {
    37  			{"module", NameEntity, Pop(1)},
    38  		},
    39  	}))
    40  	it, err := l.Tokenise(nil, `-module ->`)
    41  	assert.NoError(t, err)
    42  	assert.Equal(t,
    43  		[]Token{{Punctuation, "-"}, {NameEntity, "module"}, {Whitespace, " "}, {Operator, "->"}},
    44  		it.Tokens())
    45  }
    46  
    47  func TestEnsureLFOption(t *testing.T) {
    48  	l := Coalesce(MustNewLexer(&Config{}, Rules{ // nolint: forbidigo
    49  		"root": {
    50  			{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
    51  		},
    52  	}))
    53  	it, err := l.Tokenise(&TokeniseOptions{
    54  		State:    "root",
    55  		EnsureLF: true,
    56  	}, "hello\r\nworld\r")
    57  	assert.NoError(t, err)
    58  	assert.Equal(t, []Token{
    59  		{Keyword, "hello"},
    60  		{Whitespace, "\n"},
    61  		{Keyword, "world"},
    62  		{Whitespace, "\n"},
    63  	}, it.Tokens())
    64  
    65  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
    66  		"root": {
    67  			{`(\w+)(\r?\n|\r)`, ByGroups(Keyword, Whitespace), nil},
    68  		},
    69  	}))
    70  	it, err = l.Tokenise(&TokeniseOptions{
    71  		State:    "root",
    72  		EnsureLF: false,
    73  	}, "hello\r\nworld\r")
    74  	assert.NoError(t, err)
    75  	assert.Equal(t, []Token{
    76  		{Keyword, "hello"},
    77  		{Whitespace, "\r\n"},
    78  		{Keyword, "world"},
    79  		{Whitespace, "\r"},
    80  	}, it.Tokens())
    81  }
    82  
    83  func TestEnsureLFFunc(t *testing.T) {
    84  	tests := []struct{ in, out string }{
    85  		{in: "", out: ""},
    86  		{in: "abc", out: "abc"},
    87  		{in: "\r", out: "\n"},
    88  		{in: "a\r", out: "a\n"},
    89  		{in: "\rb", out: "\nb"},
    90  		{in: "a\rb", out: "a\nb"},
    91  		{in: "\r\n", out: "\n"},
    92  		{in: "a\r\n", out: "a\n"},
    93  		{in: "\r\nb", out: "\nb"},
    94  		{in: "a\r\nb", out: "a\nb"},
    95  		{in: "\r\r\r\n\r", out: "\n\n\n\n"},
    96  	}
    97  	for _, test := range tests {
    98  		out := ensureLF(test.in)
    99  		assert.Equal(t, out, test.out)
   100  	}
   101  }
   102  
   103  func TestByGroupNames(t *testing.T) {
   104  	l := Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
   105  		"root": {
   106  			{
   107  				`(?<key>\w+)(?<operator>=)(?<value>\w+)`,
   108  				ByGroupNames(map[string]Emitter{
   109  					`key`:      String,
   110  					`operator`: Operator,
   111  					`value`:    String,
   112  				}),
   113  				nil,
   114  			},
   115  		},
   116  	}))
   117  	it, err := l.Tokenise(nil, `abc=123`)
   118  	assert.NoError(t, err)
   119  	assert.Equal(t, []Token{{String, `abc`}, {Operator, `=`}, {String, `123`}}, it.Tokens())
   120  
   121  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
   122  		"root": {
   123  			{
   124  				`(?<key>\w+)(?<operator>=)(?<value>\w+)`,
   125  				ByGroupNames(map[string]Emitter{
   126  					`key`:   String,
   127  					`value`: String,
   128  				}),
   129  				nil,
   130  			},
   131  		},
   132  	}))
   133  	it, err = l.Tokenise(nil, `abc=123`)
   134  	assert.NoError(t, err)
   135  	assert.Equal(t, []Token{{String, `abc`}, {Error, `=`}, {String, `123`}}, it.Tokens())
   136  
   137  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
   138  		"root": {
   139  			{
   140  				`(?<key>\w+)=(?<value>\w+)`,
   141  				ByGroupNames(map[string]Emitter{
   142  					`key`:   String,
   143  					`value`: String,
   144  				}),
   145  				nil,
   146  			},
   147  		},
   148  	}))
   149  	it, err = l.Tokenise(nil, `abc=123`)
   150  	assert.NoError(t, err)
   151  	assert.Equal(t, []Token{{String, `abc123`}}, it.Tokens())
   152  
   153  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
   154  		"root": {
   155  			{
   156  				`(?<key>\w+)(?<op>=)(?<value>\w+)`,
   157  				ByGroupNames(map[string]Emitter{
   158  					`key`:      String,
   159  					`operator`: Operator,
   160  					`value`:    String,
   161  				}),
   162  				nil,
   163  			},
   164  		},
   165  	}))
   166  	it, err = l.Tokenise(nil, `abc=123`)
   167  	assert.NoError(t, err)
   168  	assert.Equal(t, []Token{{String, `abc`}, {Error, `=`}, {String, `123`}}, it.Tokens())
   169  
   170  	l = Coalesce(MustNewLexer(nil, Rules{ // nolint: forbidigo
   171  		"root": {
   172  			{
   173  				`\w+=\w+`,
   174  				ByGroupNames(map[string]Emitter{
   175  					`key`:      String,
   176  					`operator`: Operator,
   177  					`value`:    String,
   178  				}),
   179  				nil,
   180  			},
   181  		},
   182  	}))
   183  	it, err = l.Tokenise(nil, `abc=123`)
   184  	assert.NoError(t, err)
   185  	assert.Equal(t, []Token{{Error, `abc=123`}}, it.Tokens())
   186  }
   187  

View as plain text