...

Package chroma

import "github.com/alecthomas/chroma"
Overview
Index
Subdirectories

Overview ▾

Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- coloured text, etc.

Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.

For more information, go here: https://github.com/alecthomas/chroma

Index ▾

Constants
Variables
func SplitTokensIntoLines(tokens []Token) (out [][]Token)
func Stringify(tokens ...Token) string
func Words(prefix, suffix string, words ...string) string
type Analyser
type Colour
    func MustParseColour(colour string) Colour
    func NewColour(r, g, b uint8) Colour
    func ParseColour(colour string) Colour
    func (c Colour) Blue() uint8
    func (c Colour) Brighten(factor float64) Colour
    func (c Colour) BrightenOrDarken(factor float64) Colour
    func (c Colour) Brightness() float64
    func (c Colour) Distance(e2 Colour) float64
    func (c Colour) GoString() string
    func (c Colour) Green() uint8
    func (c Colour) IsSet() bool
    func (c Colour) Red() uint8
    func (c Colour) String() string
type Colours
    func (c Colours) Len() int
    func (c Colours) Less(i, j int) bool
    func (c Colours) Swap(i, j int)
type CompiledRule
type CompiledRules
type Config
type Emitter
    func ByGroupNames(emitters map[string]Emitter) Emitter
    func ByGroups(emitters ...Emitter) Emitter
    func Using(lexer Lexer) Emitter
    func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter
    func UsingSelf(stateName string) Emitter
type EmitterFunc
    func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator
type Formatter
    func RecoveringFormatter(formatter Formatter) Formatter
type FormatterFunc
    func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error)
type Iterator
    func Concaterator(iterators ...Iterator) Iterator
    func Literator(tokens ...Token) Iterator
    func (i Iterator) Tokens() []Token
type Lexer
    func Coalesce(lexer Lexer) Lexer
    func DelegatingLexer(root Lexer, language Lexer) Lexer
    func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer
    func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer
type LexerMutator
type LexerState
    func (l *LexerState) Get(key interface{}) interface{}
    func (l *LexerState) Iterator() Token
    func (l *LexerState) Set(key interface{}, value interface{})
type Lexers
    func (l Lexers) Len() int
    func (l Lexers) Less(i, j int) bool
    func (l Lexers) Swap(i, j int)
type Mutator
    func Combined(states ...string) Mutator
type MutatorFunc
    func Mutators(modifiers ...Mutator) MutatorFunc
    func Pop(n int) MutatorFunc
    func Push(states ...string) MutatorFunc
    func (m MutatorFunc) Mutate(state *LexerState) error
type PrioritisedLexers
    func (l PrioritisedLexers) Len() int
    func (l PrioritisedLexers) Less(i, j int) bool
    func (l PrioritisedLexers) Swap(i, j int)
type RegexLexer
    func MustNewLazyLexer(config *Config, rulesFunc func() Rules) *RegexLexer
    func MustNewLexer(config *Config, rules Rules) *RegexLexer
    func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error)
    func NewLexer(config *Config, rules Rules) (*RegexLexer, error)
    func (r *RegexLexer) AnalyseText(text string) float32
    func (r *RegexLexer) Config() *Config
    func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
    func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)
    func (r *RegexLexer) Trace(trace bool) *RegexLexer
type Rule
    func Default(mutators ...Mutator) Rule
    func Include(state string) Rule
type Rules
    func (r Rules) Clone() Rules
    func (r Rules) Merge(rules Rules) Rules
    func (r Rules) Rename(oldRule, newRule string) Rules
type Style
    func MustNewStyle(name string, entries StyleEntries) *Style
    func NewStyle(name string, entries StyleEntries) (*Style, error)
    func (s *Style) Builder() *StyleBuilder
    func (s *Style) Get(ttype TokenType) StyleEntry
    func (s *Style) Has(ttype TokenType) bool
    func (s *Style) Types() []TokenType
type StyleBuilder
    func NewStyleBuilder(name string) *StyleBuilder
    func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder
    func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder
    func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
    func (s *StyleBuilder) Build() (*Style, error)
    func (s *StyleBuilder) Get(ttype TokenType) StyleEntry
type StyleEntries
type StyleEntry
    func ParseStyleEntry(entry string) (StyleEntry, error)
    func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry
    func (s StyleEntry) IsZero() bool
    func (s StyleEntry) String() string
    func (s StyleEntry) Sub(e StyleEntry) StyleEntry
type Token
    func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, error)
    func (t *Token) Clone() Token
    func (t *Token) GoString() string
    func (t *Token) String() string
type TokenType
    func (t TokenType) Category() TokenType
    func (t TokenType) Emit(groups []string, _ *LexerState) Iterator
    func (t TokenType) InCategory(other TokenType) bool
    func (t TokenType) InSubCategory(other TokenType) bool
    func (t TokenType) MarshalJSON() ([]byte, error)
    func (t TokenType) Parent() TokenType
    func (i TokenType) String() string
    func (t TokenType) SubCategory() TokenType
    func (t *TokenType) UnmarshalJSON(data []byte) error
type TokeniseOptions
type Trilean
    func (t Trilean) Prefix(s string) string
    func (t Trilean) String() string
type TypeMapping

Package files

coalesce.go colour.go delegate.go doc.go formatter.go iterator.go lexer.go mutators.go regexp.go remap.go style.go tokentype_string.go types.go

Constants

Aliases.

const (
    Whitespace = TextWhitespace

    Date = LiteralDate

    String          = LiteralString
    StringAffix     = LiteralStringAffix
    StringBacktick  = LiteralStringBacktick
    StringChar      = LiteralStringChar
    StringDelimiter = LiteralStringDelimiter
    StringDoc       = LiteralStringDoc
    StringDouble    = LiteralStringDouble
    StringEscape    = LiteralStringEscape
    StringHeredoc   = LiteralStringHeredoc
    StringInterpol  = LiteralStringInterpol
    StringOther     = LiteralStringOther
    StringRegex     = LiteralStringRegex
    StringSingle    = LiteralStringSingle
    StringSymbol    = LiteralStringSymbol

    Number            = LiteralNumber
    NumberBin         = LiteralNumberBin
    NumberFloat       = LiteralNumberFloat
    NumberHex         = LiteralNumberHex
    NumberInteger     = LiteralNumberInteger
    NumberIntegerLong = LiteralNumberIntegerLong
    NumberOct         = LiteralNumberOct
)

Variables

ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.

var ANSI2RGB = map[string]string{
    "#ansiblack":     "000000",
    "#ansidarkred":   "7f0000",
    "#ansidarkgreen": "007f00",
    "#ansibrown":     "7f7fe0",
    "#ansidarkblue":  "00007f",
    "#ansipurple":    "7f007f",
    "#ansiteal":      "007f7f",
    "#ansilightgray": "e5e5e5",

    "#ansidarkgray":  "555555",
    "#ansired":       "ff0000",
    "#ansigreen":     "00ff00",
    "#ansiyellow":    "ffff00",
    "#ansiblue":      "0000ff",
    "#ansifuchsia":   "ff00ff",
    "#ansiturquoise": "00ffff",
    "#ansiwhite":     "ffffff",

    "#black":     "000000",
    "#darkred":   "7f0000",
    "#darkgreen": "007f00",
    "#brown":     "7f7fe0",
    "#darkblue":  "00007f",
    "#purple":    "7f007f",
    "#teal":      "007f7f",
    "#lightgray": "e5e5e5",

    "#darkgray":  "555555",
    "#red":       "ff0000",
    "#green":     "00ff00",
    "#yellow":    "ffff00",
    "#blue":      "0000ff",
    "#fuchsia":   "ff00ff",
    "#turquoise": "00ffff",
    "#white":     "ffffff",
}
var (
    StandardTypes = map[TokenType]string{
        Background:       "bg",
        PreWrapper:       "chroma",
        Line:             "line",
        LineNumbers:      "ln",
        LineNumbersTable: "lnt",
        LineHighlight:    "hl",
        LineTable:        "lntable",
        LineTableTD:      "lntd",
        CodeLine:         "cl",
        Text:             "",
        Whitespace:       "w",
        Error:            "err",
        Other:            "x",

        Keyword:            "k",
        KeywordConstant:    "kc",
        KeywordDeclaration: "kd",
        KeywordNamespace:   "kn",
        KeywordPseudo:      "kp",
        KeywordReserved:    "kr",
        KeywordType:        "kt",

        Name:                 "n",
        NameAttribute:        "na",
        NameBuiltin:          "nb",
        NameBuiltinPseudo:    "bp",
        NameClass:            "nc",
        NameConstant:         "no",
        NameDecorator:        "nd",
        NameEntity:           "ni",
        NameException:        "ne",
        NameFunction:         "nf",
        NameFunctionMagic:    "fm",
        NameProperty:         "py",
        NameLabel:            "nl",
        NameNamespace:        "nn",
        NameOther:            "nx",
        NameTag:              "nt",
        NameVariable:         "nv",
        NameVariableClass:    "vc",
        NameVariableGlobal:   "vg",
        NameVariableInstance: "vi",
        NameVariableMagic:    "vm",

        Literal:     "l",
        LiteralDate: "ld",

        String:          "s",
        StringAffix:     "sa",
        StringBacktick:  "sb",
        StringChar:      "sc",
        StringDelimiter: "dl",
        StringDoc:       "sd",
        StringDouble:    "s2",
        StringEscape:    "se",
        StringHeredoc:   "sh",
        StringInterpol:  "si",
        StringOther:     "sx",
        StringRegex:     "sr",
        StringSingle:    "s1",
        StringSymbol:    "ss",

        Number:            "m",
        NumberBin:         "mb",
        NumberFloat:       "mf",
        NumberHex:         "mh",
        NumberInteger:     "mi",
        NumberIntegerLong: "il",
        NumberOct:         "mo",

        Operator:     "o",
        OperatorWord: "ow",

        Punctuation: "p",

        Comment:            "c",
        CommentHashbang:    "ch",
        CommentMultiline:   "cm",
        CommentPreproc:     "cp",
        CommentPreprocFile: "cpf",
        CommentSingle:      "c1",
        CommentSpecial:     "cs",

        Generic:           "g",
        GenericDeleted:    "gd",
        GenericEmph:       "ge",
        GenericError:      "gr",
        GenericHeading:    "gh",
        GenericInserted:   "gi",
        GenericOutput:     "go",
        GenericPrompt:     "gp",
        GenericStrong:     "gs",
        GenericSubheading: "gu",
        GenericTraceback:  "gt",
        GenericUnderline:  "gl",
    }
)

func SplitTokensIntoLines

func SplitTokensIntoLines(tokens []Token) (out [][]Token)

SplitTokensIntoLines splits tokens containing newlines in two.

func Stringify

func Stringify(tokens ...Token) string

Stringify returns the raw string for a set of tokens.

func Words

func Words(prefix, suffix string, words ...string) string

Words creates a regex that matches any of the given literal words.

type Analyser

Analyser determines how appropriate this lexer is for the given text.

type Analyser interface {
    AnalyseText(text string) float32
}

type Colour

Colour represents an RGB colour.

type Colour int32

func MustParseColour

func MustParseColour(colour string) Colour

MustParseColour is like ParseColour except it panics if the colour is invalid.

Will panic if colour is in an invalid format.

func NewColour

func NewColour(r, g, b uint8) Colour

NewColour creates a Colour directly from RGB values.

func ParseColour

func ParseColour(colour string) Colour

ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>. Will return an "unset" colour if invalid.

func (Colour) Blue

func (c Colour) Blue() uint8

Blue component of colour.

func (Colour) Brighten

func (c Colour) Brighten(factor float64) Colour

Brighten returns a copy of this colour with its brightness adjusted.

If factor is negative, the colour is darkened.

Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).

func (Colour) BrightenOrDarken

func (c Colour) BrightenOrDarken(factor float64) Colour

BrightenOrDarken brightens a colour if it is < 0.5 brighteness or darkens if > 0.5 brightness.

func (Colour) Brightness

func (c Colour) Brightness() float64

Brightness of the colour (roughly) in the range 0.0 to 1.0

func (Colour) Distance

func (c Colour) Distance(e2 Colour) float64

Distance between this colour and another.

This uses the approach described here (https://www.compuphase.com/cmetric.htm). This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.

func (Colour) GoString

func (c Colour) GoString() string

func (Colour) Green

func (c Colour) Green() uint8

Green component of colour.

func (Colour) IsSet

func (c Colour) IsSet() bool

IsSet returns true if the colour is set.

func (Colour) Red

func (c Colour) Red() uint8

Red component of colour.

func (Colour) String

func (c Colour) String() string

type Colours

Colours is an orderable set of colours.

type Colours []Colour

func (Colours) Len

func (c Colours) Len() int

func (Colours) Less

func (c Colours) Less(i, j int) bool

func (Colours) Swap

func (c Colours) Swap(i, j int)

type CompiledRule

A CompiledRule is a Rule with a pre-compiled regex.

Note that regular expressions are lazily compiled on first use of the lexer.

type CompiledRule struct {
    Rule
    Regexp *regexp2.Regexp
    // contains filtered or unexported fields
}

type CompiledRules

CompiledRules is a map of rule name to sequence of compiled rules in that rule.

type CompiledRules map[string][]*CompiledRule

type Config

Config for a lexer.

type Config struct {
    // Name of the lexer.
    Name string

    // Shortcuts for the lexer
    Aliases []string

    // File name globs
    Filenames []string

    // Secondary file name globs
    AliasFilenames []string

    // MIME types
    MimeTypes []string

    // Regex matching is case-insensitive.
    CaseInsensitive bool

    // Regex matches all characters.
    DotAll bool

    // Regex does not match across lines ($ matches EOL).
    //
    // Defaults to multiline.
    NotMultiline bool

    // Make sure that the input ends with a newline. This
    // is required for some lexers that consume input linewise.
    EnsureNL bool

    // Priority of lexer.
    //
    // If this is 0 it will be treated as a default of 1.
    Priority float32
}

type Emitter

An Emitter takes group matches and returns tokens.

type Emitter interface {
    // Emit tokens for the given regex groups.
    Emit(groups []string, state *LexerState) Iterator
}

func ByGroupNames

func ByGroupNames(emitters map[string]Emitter) Emitter

ByGroupNames emits a token for each named matching group in the rule's regex.

func ByGroups

func ByGroups(emitters ...Emitter) Emitter

ByGroups emits a token for each matching group in the rule's regex.

func Using

func Using(lexer Lexer) Emitter

Using returns an Emitter that uses a given Lexer for parsing and emitting.

func UsingByGroup

func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter

UsingByGroup emits tokens for the matched groups in the regex using a "sublexer". Used when lexing code blocks where the name of a sublexer is contained within the block, for example on a Markdown text block or SQL language block.

The sublexer will be retrieved using sublexerGetFunc (typically internal.Get), using the captured value from the matched sublexerNameGroup.

If sublexerGetFunc returns a non-nil lexer for the captured sublexerNameGroup, then tokens for the matched codeGroup will be emitted using the retrieved lexer. Otherwise, if the sublexer is nil, then tokens will be emitted from the passed emitter.

Example:

var Markdown = internal.Register(MustNewLexer(
	&Config{
		Name:      "markdown",
		Aliases:   []string{"md", "mkd"},
		Filenames: []string{"*.md", "*.mkd", "*.markdown"},
		MimeTypes: []string{"text/x-markdown"},
	},
	Rules{
		"root": {
			{"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
				UsingByGroup(
					internal.Get,
					2, 4,
					String, String, String, Text, String,
				),
				nil,
			},
		},
	},
))

See the lexers/m/markdown.go for the complete example.

Note: panic's if the number emitters does not equal the number of matched groups in the regex.

func UsingSelf

func UsingSelf(stateName string) Emitter

UsingSelf is like Using, but uses the current Lexer.

type EmitterFunc

EmitterFunc is a function that is an Emitter.

type EmitterFunc func(groups []string, state *LexerState) Iterator

func (EmitterFunc) Emit

func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator

Emit tokens for groups.

type Formatter

A Formatter for Chroma lexers.

type Formatter interface {
    // Format returns a formatting function for tokens.
    //
    // If the iterator panics, the Formatter should recover.
    Format(w io.Writer, style *Style, iterator Iterator) error
}

func RecoveringFormatter

func RecoveringFormatter(formatter Formatter) Formatter

RecoveringFormatter wraps a formatter with panic recovery.

type FormatterFunc

A FormatterFunc is a Formatter implemented as a function.

Guards against iterator panics.

type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error

func (FormatterFunc) Format

func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error)

type Iterator

An Iterator across tokens.

EOF will be returned at the end of the Token stream.

If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.

type Iterator func() Token

func Concaterator

func Concaterator(iterators ...Iterator) Iterator

Concaterator concatenates tokens from a series of iterators.

func Literator

func Literator(tokens ...Token) Iterator

Literator converts a sequence of literal Tokens into an Iterator.

func (Iterator) Tokens

func (i Iterator) Tokens() []Token

Tokens consumes all tokens from the iterator and returns them as a slice.

type Lexer

A Lexer for tokenising source code.

type Lexer interface {
    // Config describing the features of the Lexer.
    Config() *Config
    // Tokenise returns an Iterator over tokens in text.
    Tokenise(options *TokeniseOptions, text string) (Iterator, error)
}

func Coalesce

func Coalesce(lexer Lexer) Lexer

Coalesce is a Lexer interceptor that collapses runs of common types into a single token.

func DelegatingLexer

func DelegatingLexer(root Lexer, language Lexer) Lexer

DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP inside HTML or PHP inside plain text.

It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer. Finally, these two sets of tokens are merged.

The lexers from the template lexer package use this base lexer.

func RemappingLexer

func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer

RemappingLexer remaps a token to a set of, potentially empty, tokens.

func TypeRemappingLexer

func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer

TypeRemappingLexer remaps types of tokens coming from a parent Lexer.

eg. Map "defvaralias" tokens of type NameVariable to NameFunction:

mapping := TypeMapping{
	{NameVariable, NameFunction, []string{"defvaralias"},
}
lexer = TypeRemappingLexer(lexer, mapping)

type LexerMutator

A LexerMutator is an additional interface that a Mutator can implement to modify the lexer when it is compiled.

type LexerMutator interface {
    // Rules are the lexer rules, state is the state key for the rule the mutator is associated with.
    MutateLexer(rules CompiledRules, state string, rule int) error
}

type LexerState

LexerState contains the state for a single lex.

type LexerState struct {
    Lexer *RegexLexer
    Text  []rune
    Pos   int
    Rules CompiledRules
    Stack []string
    State string
    Rule  int
    // Group matches.
    Groups []string
    // Named Group matches.
    NamedGroups map[string]string
    // Custum context for mutators.
    MutatorContext map[interface{}]interface{}
    // contains filtered or unexported fields
}

func (*LexerState) Get

func (l *LexerState) Get(key interface{}) interface{}

Get mutator context.

func (*LexerState) Iterator

func (l *LexerState) Iterator() Token

Iterator returns the next Token from the lexer.

func (*LexerState) Set

func (l *LexerState) Set(key interface{}, value interface{})

Set mutator context.

type Lexers

Lexers is a slice of lexers sortable by name.

type Lexers []Lexer

func (Lexers) Len

func (l Lexers) Len() int

func (Lexers) Less

func (l Lexers) Less(i, j int) bool

func (Lexers) Swap

func (l Lexers) Swap(i, j int)

type Mutator

A Mutator modifies the behaviour of the lexer.

type Mutator interface {
    // Mutate the lexer state machine as it is processing.
    Mutate(state *LexerState) error
}

func Combined

func Combined(states ...string) Mutator

Combined creates a new anonymous state from the given states, and pushes that state.

type MutatorFunc

A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing.

type MutatorFunc func(state *LexerState) error

func Mutators

func Mutators(modifiers ...Mutator) MutatorFunc

Mutators applies a set of Mutators in order.

func Pop

func Pop(n int) MutatorFunc

Pop state from the stack when rule matches.

func Push

func Push(states ...string) MutatorFunc

Push states onto the stack.

func (MutatorFunc) Mutate

func (m MutatorFunc) Mutate(state *LexerState) error

type PrioritisedLexers

PrioritisedLexers is a slice of lexers sortable by priority.

type PrioritisedLexers []Lexer

func (PrioritisedLexers) Len

func (l PrioritisedLexers) Len() int

func (PrioritisedLexers) Less

func (l PrioritisedLexers) Less(i, j int) bool

func (PrioritisedLexers) Swap

func (l PrioritisedLexers) Swap(i, j int)

type RegexLexer

RegexLexer is the default lexer implementation used in Chroma.

type RegexLexer struct {
    // contains filtered or unexported fields
}

func MustNewLazyLexer

func MustNewLazyLexer(config *Config, rulesFunc func() Rules) *RegexLexer

MustNewLazyLexer creates a new Lexer with deferred rules generation or panics.

func MustNewLexer

func MustNewLexer(config *Config, rules Rules) *RegexLexer

MustNewLexer creates a new Lexer or panics.

Deprecated: Use MustNewLazyLexer instead.

func NewLazyLexer

func NewLazyLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error)

NewLazyLexer creates a new regex-based Lexer with deferred rules generation.

func NewLexer

func NewLexer(config *Config, rules Rules) (*RegexLexer, error)

NewLexer creates a new regex-based Lexer.

"rules" is a state machine transitition map. Each key is a state. Values are sets of rules that match input, optionally modify lexer state, and output tokens.

Deprecated: Use NewLazyLexer instead.

func (*RegexLexer) AnalyseText

func (r *RegexLexer) AnalyseText(text string) float32

func (*RegexLexer) Config

func (r *RegexLexer) Config() *Config

func (*RegexLexer) SetAnalyser

func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer

SetAnalyser sets the analyser function used to perform content inspection.

func (*RegexLexer) Tokenise

func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)

func (*RegexLexer) Trace

func (r *RegexLexer) Trace(trace bool) *RegexLexer

Trace enables debug tracing.

type Rule

A Rule is the fundamental matching unit of the Regex lexer state machine.

type Rule struct {
    Pattern string
    Type    Emitter
    Mutator Mutator
}

func Default

func Default(mutators ...Mutator) Rule

Default returns a Rule that applies a set of Mutators.

func Include

func Include(state string) Rule

Include the given state.

type Rules

Rules maps from state to a sequence of Rules.

type Rules map[string][]Rule

func (Rules) Clone

func (r Rules) Clone() Rules

Clone returns a clone of the Rules.

func (Rules) Merge

func (r Rules) Merge(rules Rules) Rules

Merge creates a clone of "r" then merges "rules" into the clone.

func (Rules) Rename

func (r Rules) Rename(oldRule, newRule string) Rules

Rename clones rules then a rule.

type Style

A Style definition.

See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical.

type Style struct {
    Name string
    // contains filtered or unexported fields
}

func MustNewStyle

func MustNewStyle(name string, entries StyleEntries) *Style

MustNewStyle creates a new style or panics.

func NewStyle

func NewStyle(name string, entries StyleEntries) (*Style, error)

NewStyle creates a new style definition.

func (*Style) Builder

func (s *Style) Builder() *StyleBuilder

Builder creates a mutable builder from this Style.

The builder can then be safely modified. This is a cheap operation.

func (*Style) Get

func (s *Style) Get(ttype TokenType) StyleEntry

Get a style entry. Will try sub-category or category if an exact match is not found, and finally return the Background.

func (*Style) Has

func (s *Style) Has(ttype TokenType) bool

Has checks if an exact style entry match exists for a token type.

This is distinct from Get() which will merge parent tokens.

func (*Style) Types

func (s *Style) Types() []TokenType

Types that are styled.

type StyleBuilder

A StyleBuilder is a mutable structure for building styles.

Once built, a Style is immutable.

type StyleBuilder struct {
    // contains filtered or unexported fields
}

func NewStyleBuilder

func NewStyleBuilder(name string) *StyleBuilder

func (*StyleBuilder) Add

func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder

Add an entry to the Style map.

See http://pygments.org/docs/styles/#style-rules for details.

func (*StyleBuilder) AddAll

func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder

func (*StyleBuilder) AddEntry

func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder

func (*StyleBuilder) Build

func (s *StyleBuilder) Build() (*Style, error)

func (*StyleBuilder) Get

func (s *StyleBuilder) Get(ttype TokenType) StyleEntry

type StyleEntries

StyleEntries mapping TokenType to colour definition.

type StyleEntries map[TokenType]string

type StyleEntry

A StyleEntry in the Style map.

type StyleEntry struct {
    // Hex colours.
    Colour     Colour
    Background Colour
    Border     Colour

    Bold      Trilean
    Italic    Trilean
    Underline Trilean
    NoInherit bool
}

func ParseStyleEntry

func ParseStyleEntry(entry string) (StyleEntry, error)

ParseStyleEntry parses a Pygments style entry.

func (StyleEntry) Inherit

func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry

Inherit styles from ancestors.

Ancestors should be provided from oldest to newest.

func (StyleEntry) IsZero

func (s StyleEntry) IsZero() bool

func (StyleEntry) String

func (s StyleEntry) String() string

func (StyleEntry) Sub

func (s StyleEntry) Sub(e StyleEntry) StyleEntry

Sub subtracts e from s where elements match.

type Token

Token output to formatter.

type Token struct {
    Type  TokenType `json:"type"`
    Value string    `json:"value"`
}

EOF is returned by lexers at the end of input.

var EOF Token

func Tokenise

func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, error)

Tokenise text using lexer, returning tokens as a slice.

func (*Token) Clone

func (t *Token) Clone() Token

Clone returns a clone of the Token.

func (*Token) GoString

func (t *Token) GoString() string

func (*Token) String

func (t *Token) String() string

type TokenType

TokenType is the type of token to highlight.

It is also an Emitter, emitting a single token of itself

type TokenType int

Meta token types.

const (
    // Default background style.
    Background TokenType = -1 - iota
    // PreWrapper style.
    PreWrapper
    // Line style.
    Line
    // Line numbers in output.
    LineNumbers
    // Line numbers in output when in table.
    LineNumbersTable
    // Line higlight style.
    LineHighlight
    // Line numbers table wrapper style.
    LineTable
    // Line numbers table TD wrapper style.
    LineTableTD
    // Code line wrapper style.
    CodeLine
    // Input that could not be tokenised.
    Error
    // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate.
    Other
    // No highlighting.
    None
    // Used as an EOF marker / nil token
    EOFType TokenType = 0
)

Keywords.

const (
    Keyword TokenType = 1000 + iota
    KeywordConstant
    KeywordDeclaration
    KeywordNamespace
    KeywordPseudo
    KeywordReserved
    KeywordType
)

Names.

const (
    Name TokenType = 2000 + iota
    NameAttribute
    NameBuiltin
    NameBuiltinPseudo
    NameClass
    NameConstant
    NameDecorator
    NameEntity
    NameException
    NameFunction
    NameFunctionMagic
    NameKeyword
    NameLabel
    NameNamespace
    NameOperator
    NameOther
    NamePseudo
    NameProperty
    NameTag
    NameVariable
    NameVariableAnonymous
    NameVariableClass
    NameVariableGlobal
    NameVariableInstance
    NameVariableMagic
)

Literals.

const (
    Literal TokenType = 3000 + iota
    LiteralDate
    LiteralOther
)

Strings.

const (
    LiteralString TokenType = 3100 + iota
    LiteralStringAffix
    LiteralStringAtom
    LiteralStringBacktick
    LiteralStringBoolean
    LiteralStringChar
    LiteralStringDelimiter
    LiteralStringDoc
    LiteralStringDouble
    LiteralStringEscape
    LiteralStringHeredoc
    LiteralStringInterpol
    LiteralStringName
    LiteralStringOther
    LiteralStringRegex
    LiteralStringSingle
    LiteralStringSymbol
)

Literals.

const (
    LiteralNumber TokenType = 3200 + iota
    LiteralNumberBin
    LiteralNumberFloat
    LiteralNumberHex
    LiteralNumberInteger
    LiteralNumberIntegerLong
    LiteralNumberOct
)

Operators.

const (
    Operator TokenType = 4000 + iota
    OperatorWord
)

Comments.

const (
    Comment TokenType = 6000 + iota
    CommentHashbang
    CommentMultiline
    CommentSingle
    CommentSpecial
)

Preprocessor "comments".

const (
    CommentPreproc TokenType = 6100 + iota
    CommentPreprocFile
)

Generic tokens.

const (
    Generic TokenType = 7000 + iota
    GenericDeleted
    GenericEmph
    GenericError
    GenericHeading
    GenericInserted
    GenericOutput
    GenericPrompt
    GenericStrong
    GenericSubheading
    GenericTraceback
    GenericUnderline
)

Text.

const (
    Text TokenType = 8000 + iota
    TextWhitespace
    TextSymbol
    TextPunctuation
)

Punctuation.

const (
    Punctuation TokenType = 5000 + iota
)

func (TokenType) Category

func (t TokenType) Category() TokenType

func (TokenType) Emit

func (t TokenType) Emit(groups []string, _ *LexerState) Iterator

func (TokenType) InCategory

func (t TokenType) InCategory(other TokenType) bool

func (TokenType) InSubCategory

func (t TokenType) InSubCategory(other TokenType) bool

func (TokenType) MarshalJSON

func (t TokenType) MarshalJSON() ([]byte, error)

func (TokenType) Parent

func (t TokenType) Parent() TokenType

func (TokenType) String

func (i TokenType) String() string

func (TokenType) SubCategory

func (t TokenType) SubCategory() TokenType

func (*TokenType) UnmarshalJSON

func (t *TokenType) UnmarshalJSON(data []byte) error

type TokeniseOptions

TokeniseOptions contains options for tokenisers.

type TokeniseOptions struct {
    // State to start tokenisation in. Defaults to "root".
    State string
    // Nested tokenisation.
    Nested bool

    // If true, all EOLs are converted into LF
    // by replacing CRLF and CR
    EnsureLF bool
}

type Trilean

Trilean value for StyleEntry value inheritance.

type Trilean uint8

Trilean states.

const (
    Pass Trilean = iota
    Yes
    No
)

func (Trilean) Prefix

func (t Trilean) Prefix(s string) string

Prefix returns s with "no" as a prefix if Trilean is no.

func (Trilean) String

func (t Trilean) String() string

type TypeMapping

TypeMapping defines type maps for the TypeRemappingLexer.

type TypeMapping []struct {
    From, To TokenType
    Words    []string
}

Subdirectories

Name Synopsis
..
formatters
html
svg Package svg contains an SVG formatter.
lexers Package lexers contains the registry of all lexers.
a
b
c
circular Package circular exists to break circular dependencies between lexers.
d
e
f
g
h
i
j
k
l
m
n
o
p
q
r
s
t
v
w
x
y
z
quick Package quick provides simple, no-configuration source code highlighting.
styles
v2 Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- coloured text, etc.
formatters
html
svg Package svg contains an SVG formatter.
lexers
quick Package quick provides simple, no-configuration source code highlighting.
styles