1 package h
2
3 import (
4 "strings"
5
6 . "github.com/alecthomas/chroma"
7 "github.com/alecthomas/chroma/lexers/internal"
8 )
9
10
11 var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer(
12 &Config{
13 Name: "HTTP",
14 Aliases: []string{"http"},
15 Filenames: []string{},
16 MimeTypes: []string{},
17 NotMultiline: true,
18 DotAll: true,
19 },
20 httpRules,
21 )))
22
23 func httpRules() Rules {
24 return Rules{
25 "root": {
26 {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
27 {`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
28 },
29 "headers": {
30 {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
31 {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
32 {`\r?\n`, Text, Push("content")},
33 },
34 "content": {
35 {`.+`, EmitterFunc(httpContentBlock), nil},
36 },
37 }
38 }
39
40 func httpContentBlock(groups []string, state *LexerState) Iterator {
41 tokens := []Token{
42 {Generic, groups[0]},
43 }
44 return Literator(tokens...)
45 }
46
47 func httpHeaderBlock(groups []string, state *LexerState) Iterator {
48 tokens := []Token{
49 {Name, groups[1]},
50 {Text, groups[2]},
51 {Operator, groups[3]},
52 {Text, groups[4]},
53 {Literal, groups[5]},
54 {Text, groups[6]},
55 }
56 return Literator(tokens...)
57 }
58
59 func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
60 tokens := []Token{
61 {Text, groups[1]},
62 {Literal, groups[2]},
63 {Text, groups[3]},
64 }
65 return Literator(tokens...)
66 }
67
68 func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
69
70 type httpBodyContentTyper struct{ Lexer }
71
72 func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
73 var contentType string
74 var isContentType bool
75 var subIterator Iterator
76
77 it, err := d.Lexer.Tokenise(options, text)
78 if err != nil {
79 return nil, err
80 }
81
82 return func() Token {
83 token := it()
84
85 if token == EOF {
86 if subIterator != nil {
87 return subIterator()
88 }
89 return EOF
90 }
91
92 switch {
93 case token.Type == Name && strings.ToLower(token.Value) == "content-type":
94 {
95 isContentType = true
96 }
97 case token.Type == Literal && isContentType:
98 {
99 isContentType = false
100 contentType = strings.TrimSpace(token.Value)
101 pos := strings.Index(contentType, ";")
102 if pos > 0 {
103 contentType = strings.TrimSpace(contentType[:pos])
104 }
105 }
106 case token.Type == Generic && contentType != "":
107 {
108 lexer := internal.MatchMimeType(contentType)
109
110
111
112 if lexer == nil && strings.Contains(contentType, "+") {
113 slashPos := strings.Index(contentType, "/")
114 plusPos := strings.LastIndex(contentType, "+")
115 contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
116 lexer = internal.MatchMimeType(contentType)
117 }
118
119 if lexer == nil {
120 token.Type = Text
121 } else {
122 subIterator, err = lexer.Tokenise(nil, token.Value)
123 if err != nil {
124 panic(err)
125 }
126 return EOF
127 }
128 }
129 }
130 return token
131 }, nil
132 }
133
View as plain text