package chroma
import "github.com/alecthomas/chroma"
Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- coloured text, etc.
Chroma is based heavily on Pygments, and includes translaters for Pygments lexers and styles.
For more information, go here: https://github.com/alecthomas/chroma
Index ¶
- Constants
- Variables
- func Words(prefix, suffix string, words ...string) string
- type Analyser
- type Colour
- func MustParseColour(colour string) Colour
- func NewColour(r, g, b uint8) Colour
- func ParseColour(colour string) Colour
- func (c Colour) Blue() uint8
- func (c Colour) Brighten(factor float64) Colour
- func (c Colour) Brightness() float64
- func (c Colour) Distance(e2 Colour) float64
- func (c Colour) GoString() string
- func (c Colour) Green() uint8
- func (c Colour) IsSet() bool
- func (c Colour) Red() uint8
- func (c Colour) String() string
- type Colours
- type CompiledRule
- type CompiledRules
- type Config
- type Emitter
- func ByGroups(emitters ...Emitter) Emitter
- func Using(lexer Lexer, options *TokeniseOptions) Emitter
- func UsingSelf(state string) Emitter
- type EmitterFunc
- type Formatter
- type FormatterFunc
- type Iterator
- func Concaterator(iterators ...Iterator) Iterator
- func Literator(tokens ...*Token) Iterator
- func (i Iterator) Tokens() []*Token
- type Lexer
- type LexerMutator
- type LexerState
- func (l *LexerState) Get(key interface{}) interface{}
- func (l *LexerState) Iterator() *Token
- func (l *LexerState) Set(key interface{}, value interface{})
- type Lexers
- func (l Lexers) Len() int
- func (l Lexers) Less(i, j int) bool
- func (l Lexers) Pick(text string) Lexer
- func (l Lexers) Swap(i, j int)
- type Mutator
- type MutatorFunc
- func Mutators(modifiers ...Mutator) MutatorFunc
- func Pop(n int) MutatorFunc
- func Push(states ...string) MutatorFunc
- func (m MutatorFunc) Mutate(state *LexerState) error
- type RegexLexer
- func MustNewLexer(config *Config, rules Rules) *RegexLexer
- func NewLexer(config *Config, rules Rules) (*RegexLexer, error)
- func (r *RegexLexer) AnalyseText(text string) float32
- func (r *RegexLexer) Config() *Config
- func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
- func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)
- func (r *RegexLexer) Trace(trace bool) *RegexLexer
- type Rule
- type Rules
- type Style
- func MustNewStyle(name string, entries StyleEntries) *Style
- func NewStyle(name string, entries StyleEntries) (*Style, error)
- func (s *Style) Builder() *StyleBuilder
- func (s *Style) Get(ttype TokenType) StyleEntry
- func (s *Style) Has(ttype TokenType) bool
- func (s *Style) Types() []TokenType
- type StyleBuilder
- func NewStyleBuilder(name string) *StyleBuilder
- func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder
- func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder
- func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
- func (s *StyleBuilder) Build() (*Style, error)
- func (s *StyleBuilder) Get(ttype TokenType) StyleEntry
- type StyleEntries
- type StyleEntry
- func ParseStyleEntry(entry string) (StyleEntry, error)
- func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry
- func (s StyleEntry) IsZero() bool
- func (s StyleEntry) String() string
- func (s StyleEntry) Sub(e StyleEntry) StyleEntry
- type Token
- func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, error)
- func (t *Token) Clone() *Token
- func (t *Token) GoString() string
- func (t *Token) String() string
- type TokenType
- func (t TokenType) Category() TokenType
- func (t TokenType) Emit(groups []string, lexer Lexer) Iterator
- func (t TokenType) InCategory(other TokenType) bool
- func (t TokenType) InSubCategory(other TokenType) bool
- func (t TokenType) Parent() TokenType
- func (i TokenType) String() string
- func (t TokenType) SubCategory() TokenType
- type TokeniseOptions
- type Trilean
Constants ¶
const ( Whitespace = TextWhitespace Date = LiteralDate String = LiteralString StringAffix = LiteralStringAffix StringBacktick = LiteralStringBacktick StringChar = LiteralStringChar StringDelimiter = LiteralStringDelimiter StringDoc = LiteralStringDoc StringDouble = LiteralStringDouble StringEscape = LiteralStringEscape StringHeredoc = LiteralStringHeredoc StringInterpol = LiteralStringInterpol StringOther = LiteralStringOther StringRegex = LiteralStringRegex StringSingle = LiteralStringSingle StringSymbol = LiteralStringSymbol Number = LiteralNumber NumberBin = LiteralNumberBin NumberFloat = LiteralNumberFloat NumberHex = LiteralNumberHex NumberInteger = LiteralNumberInteger NumberIntegerLong = LiteralNumberIntegerLong NumberOct = LiteralNumberOct )
Aliases.
Variables ¶
var ANSI2RGB = map[string]string{ "#ansiblack": "000000", "#ansidarkred": "7f0000", "#ansidarkgreen": "007f00", "#ansibrown": "7f7fe0", "#ansidarkblue": "00007f", "#ansipurple": "7f007f", "#ansiteal": "007f7f", "#ansilightgray": "e5e5e5", "#ansidarkgray": "555555", "#ansired": "ff0000", "#ansigreen": "00ff00", "#ansiyellow": "ffff00", "#ansiblue": "0000ff", "#ansifuchsia": "ff00ff", "#ansiturquoise": "00ffff", "#ansiwhite": "ffffff", "#black": "000000", "#darkred": "7f0000", "#darkgreen": "007f00", "#brown": "7f7fe0", "#darkblue": "00007f", "#purple": "7f007f", "#teal": "007f7f", "#lightgray": "e5e5e5", "#darkgray": "555555", "#red": "ff0000", "#green": "00ff00", "#yellow": "ffff00", "#blue": "0000ff", "#fuchsia": "ff00ff", "#turquoise": "00ffff", "#white": "ffffff", }
ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.
var ( StandardTypes = map[TokenType]string{ Background: "chroma", LineNumbers: "ln", LineHighlight: "hl", Text: "", Whitespace: "w", Error: "err", Other: "x", Keyword: "k", KeywordConstant: "kc", KeywordDeclaration: "kd", KeywordNamespace: "kn", KeywordPseudo: "kp", KeywordReserved: "kr", KeywordType: "kt", Name: "n", NameAttribute: "na", NameBuiltin: "nb", NameBuiltinPseudo: "bp", NameClass: "nc", NameConstant: "no", NameDecorator: "nd", NameEntity: "ni", NameException: "ne", NameFunction: "nf", NameFunctionMagic: "fm", NameProperty: "py", NameLabel: "nl", NameNamespace: "nn", NameOther: "nx", NameTag: "nt", NameVariable: "nv", NameVariableClass: "vc", NameVariableGlobal: "vg", NameVariableInstance: "vi", NameVariableMagic: "vm", Literal: "l", LiteralDate: "ld", String: "s", StringAffix: "sa", StringBacktick: "sb", StringChar: "sc", StringDelimiter: "dl", StringDoc: "sd", StringDouble: "s2", StringEscape: "se", StringHeredoc: "sh", StringInterpol: "si", StringOther: "sx", StringRegex: "sr", StringSingle: "s1", StringSymbol: "ss", Number: "m", NumberBin: "mb", NumberFloat: "mf", NumberHex: "mh", NumberInteger: "mi", NumberIntegerLong: "il", NumberOct: "mo", Operator: "o", OperatorWord: "ow", Punctuation: "p", Comment: "c", CommentHashbang: "ch", CommentMultiline: "cm", CommentPreproc: "cp", CommentPreprocFile: "cpf", CommentSingle: "c1", CommentSpecial: "cs", Generic: "g", GenericDeleted: "gd", GenericEmph: "ge", GenericError: "gr", GenericHeading: "gh", GenericInserted: "gi", GenericOutput: "go", GenericPrompt: "gp", GenericStrong: "gs", GenericSubheading: "gu", GenericTraceback: "gt", } )
Functions ¶
func Words ¶
Words creates a regex that matches any of the given literal words.
Types ¶
type Analyser ¶
Analyser determines how appropriate this lexer is for the given text.
type Colour ¶
type Colour int32
Colour represents an RGB colour.
func MustParseColour ¶
MustParseColour is like ParseColour except it panics if the colour is invalid.
Will panic if colour is in an invalid format.
func NewColour ¶
NewColour creates a Colour directly from RGB values.
func ParseColour ¶
ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>. Will return an "unset" colour if invalid.
func (Colour) Blue ¶
Blue component of colour.
func (Colour) Brighten ¶
Brighten returns a copy of this colour with its brightness adjusted.
If factor is negative, the colour is darkened.
Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).
func (Colour) Brightness ¶
Brightness of the colour (roughly) in the range 0.0 to 1.0
func (Colour) Distance ¶
Distance between this colour and another.
This uses the approach described here (https://www.compuphase.com/cmetric.htm). This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.
func (Colour) GoString ¶
func (Colour) Green ¶
Green component of colour.
func (Colour) IsSet ¶
func (Colour) Red ¶
Red component of colour.
func (Colour) String ¶
type Colours ¶
type Colours []Colour
Colours is an orderable set of colours.
func (Colours) Len ¶
func (Colours) Less ¶
func (Colours) Swap ¶
type CompiledRule ¶
A CompiledRule is a Rule with a pre-compiled regex.
Note that regular expressions are lazily compiled on first use of the lexer.
type CompiledRules ¶
type CompiledRules map[string][]*CompiledRule
type Config ¶
type Config struct { // Name of the lexer. Name string // Shortcuts for the lexer Aliases []string // File name globs Filenames []string // Secondary file name globs AliasFilenames []string // MIME types MimeTypes []string // Regex matching is case-insensitive. CaseInsensitive bool // Regex matches all characters. DotAll bool // Regex does not match across lines ($ matches EOL). // // Defaults to multiline. NotMultiline bool }
Config for a lexer.
type Emitter ¶
type Emitter interface { // Emit tokens for the given regex groups. Emit(groups []string, lexer Lexer) Iterator }
An Emitter takes group matches and returns tokens.
func ByGroups ¶
ByGroups emits a token for each matching group in the rule's regex.
func Using ¶
func Using(lexer Lexer, options *TokeniseOptions) Emitter
Using returns an Emitter that uses a given Lexer for parsing and emitting.
func UsingSelf ¶
UsingSelf is like Using, but uses the current Lexer.
type EmitterFunc ¶
EmitterFunc is a function that is an Emitter.
func (EmitterFunc) Emit ¶
func (e EmitterFunc) Emit(groups []string, lexer Lexer) Iterator
Emit tokens for groups.
type Formatter ¶
type Formatter interface { // Format returns a formatting function for tokens. // // If the iterator panics, the Formatter should recover. Format(w io.Writer, style *Style, iterator Iterator) error }
A Formatter for Chroma lexers.
func RecoveringFormatter ¶
RecoveringFormatter wraps a formatter with panic recovery.
type FormatterFunc ¶
A FormatterFunc is a Formatter implemented as a function.
Guards against iterator panics.
func (FormatterFunc) Format ¶
type Iterator ¶
type Iterator func() *Token
An Iterator across tokens.
nil will be returned at the end of the Token stream.
If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
func Concaterator ¶
Concaterator concatenates tokens from a series of iterators.
func Literator ¶
Literator converts a sequence of literal Tokens into an Iterator.
func (Iterator) Tokens ¶
Tokens consumes all tokens from the iterator and returns them as a slice.
type Lexer ¶
type Lexer interface { // Config describing the features of the Lexer. Config() *Config // Tokenise returns an Iterator over tokens in text. Tokenise(options *TokeniseOptions, text string) (Iterator, error) }
A Lexer for tokenising source code.
func Coalesce ¶
Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
type LexerMutator ¶
type LexerMutator interface { // Rules are the lexer rules, state is the state key for the rule the mutator is associated with. MutateLexer(rules CompiledRules, state string, rule int) error }
A LexerMutator is an additional interface that a Mutator can implement to modify the lexer when it is compiled.
type LexerState ¶
type LexerState struct { Lexer *RegexLexer Text []rune Pos int Rules CompiledRules Stack []string State string Rule int // Group matches. Groups []string // Custum context for mutators. MutatorContext map[interface{}]interface{} // contains filtered or unexported fields }
func (*LexerState) Get ¶
func (l *LexerState) Get(key interface{}) interface{}
func (*LexerState) Iterator ¶
func (l *LexerState) Iterator() *Token
func (*LexerState) Set ¶
func (l *LexerState) Set(key interface{}, value interface{})
type Lexers ¶
type Lexers []Lexer
func (Lexers) Len ¶
func (Lexers) Less ¶
func (Lexers) Pick ¶
Pick attempts to pick the best Lexer for a piece of source code. May return nil.
func (Lexers) Swap ¶
type Mutator ¶
type Mutator interface { // Mutate the lexer state machine as it is processing. Mutate(state *LexerState) error }
A Mutator modifies the behaviour of the lexer.
func Combined ¶
Combined creates a new anonymous state from the given states, and pushes that state.
type MutatorFunc ¶
type MutatorFunc func(state *LexerState) error
A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing.
func Mutators ¶
func Mutators(modifiers ...Mutator) MutatorFunc
Mutators applies a set of Mutators in order.
func Pop ¶
func Pop(n int) MutatorFunc
Pop state from the stack when rule matches.
func Push ¶
func Push(states ...string) MutatorFunc
Push states onto the stack.
func (MutatorFunc) Mutate ¶
func (m MutatorFunc) Mutate(state *LexerState) error
type RegexLexer ¶
type RegexLexer struct {
// contains filtered or unexported fields
}
func MustNewLexer ¶
func MustNewLexer(config *Config, rules Rules) *RegexLexer
MustNewLexer creates a new Lexer or panics.
func NewLexer ¶
func NewLexer(config *Config, rules Rules) (*RegexLexer, error)
NewLexer creates a new regex-based Lexer.
"rules" is a state machine transitition map. Each key is a state. Values are sets of rules that match input, optionally modify lexer state, and output tokens.
func (*RegexLexer) AnalyseText ¶
func (r *RegexLexer) AnalyseText(text string) float32
func (*RegexLexer) Config ¶
func (r *RegexLexer) Config() *Config
func (*RegexLexer) SetAnalyser ¶
func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer
SetAnalyser sets the analyser function used to perform content inspection.
func (*RegexLexer) Tokenise ¶
func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error)
func (*RegexLexer) Trace ¶
func (r *RegexLexer) Trace(trace bool) *RegexLexer
type Rule ¶
func Default ¶
Default returns a Rule that applies a set of Mutators.
func Include ¶
Include the given state.
type Rules ¶
Rules maps from state to a sequence of Rules.
type Style ¶
type Style struct { Name string // contains filtered or unexported fields }
A Style definition.
See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical.
func MustNewStyle ¶
func MustNewStyle(name string, entries StyleEntries) *Style
MustNewStyle creates a new style or panics.
func NewStyle ¶
func NewStyle(name string, entries StyleEntries) (*Style, error)
NewStyle creates a new style definition.
func (*Style) Builder ¶
func (s *Style) Builder() *StyleBuilder
Builder creates a mutable builder from this Style.
The builder can then be safely modified. This is a cheap operation.
func (*Style) Get ¶
func (s *Style) Get(ttype TokenType) StyleEntry
Get a style entry. Will try sub-category or category if an exact match is not found, and finally return the Background.
func (*Style) Has ¶
Has checks if an exact style entry match exists for a token type.
This is distinct from Get() which will merge parent tokens.
func (*Style) Types ¶
Types that are styled.
type StyleBuilder ¶
type StyleBuilder struct {
// contains filtered or unexported fields
}
A StyleBuilder is a mutable structure for building styles.
Once built, a Style is immutable.
func NewStyleBuilder ¶
func NewStyleBuilder(name string) *StyleBuilder
func (*StyleBuilder) Add ¶
func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder
Add an entry to the Style map.
See http://pygments.org/docs/styles/#style-rules for details.
func (*StyleBuilder) AddAll ¶
func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder
func (*StyleBuilder) AddEntry ¶
func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder
func (*StyleBuilder) Build ¶
func (s *StyleBuilder) Build() (*Style, error)
func (*StyleBuilder) Get ¶
func (s *StyleBuilder) Get(ttype TokenType) StyleEntry
type StyleEntries ¶
StyleEntries mapping TokenType to colour definition.
type StyleEntry ¶
type StyleEntry struct { // Hex colours. Colour Colour Background Colour Border Colour Bold Trilean Italic Trilean Underline Trilean NoInherit bool }
A StyleEntry in the Style map.
func ParseStyleEntry ¶
func ParseStyleEntry(entry string) (StyleEntry, error)
ParseStyleEntry parses a Pygments style entry.
func (StyleEntry) Inherit ¶
func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry
Inherit styles from ancestors.
Ancestors should be provided from oldest to newest.
func (StyleEntry) IsZero ¶
func (s StyleEntry) IsZero() bool
func (StyleEntry) String ¶
func (s StyleEntry) String() string
func (StyleEntry) Sub ¶
func (s StyleEntry) Sub(e StyleEntry) StyleEntry
type Token ¶
Token output to formatter.
func Tokenise ¶
func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, error)
Tokenise text using lexer, returning tokens as a slice.
func (*Token) Clone ¶
func (*Token) GoString ¶
func (*Token) String ¶
type TokenType ¶
type TokenType int
TokenType is the type of token to highlight.
It is also an Emitter, emitting a single token of itself
const ( // Default background style. Background TokenType = -1 - iota // Line numbers in output. LineNumbers // Line higlight style. LineHighlight // Input that could not be tokenised. Error // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate. Other // No highlighting. None )
Meta token types.
const ( Keyword TokenType = 1000 + iota KeywordConstant KeywordDeclaration KeywordNamespace KeywordPseudo KeywordReserved KeywordType )
Keywords.
const ( Name TokenType = 2000 + iota NameAttribute NameBuiltin NameBuiltinPseudo NameClass NameConstant NameDecorator NameEntity NameException NameFunction NameFunctionMagic NameKeyword NameLabel NameNamespace NameOperator NameOther NamePseudo NameProperty NameTag NameVariable NameVariableAnonymous NameVariableClass NameVariableGlobal NameVariableInstance NameVariableMagic )
Names.
Literals.
const ( LiteralString TokenType = 3100 + iota LiteralStringAffix LiteralStringAtom LiteralStringBacktick LiteralStringBoolean LiteralStringChar LiteralStringDelimiter LiteralStringDoc LiteralStringDouble LiteralStringEscape LiteralStringHeredoc LiteralStringInterpol LiteralStringName LiteralStringOther LiteralStringRegex LiteralStringSingle LiteralStringSymbol )
Strings.
const ( LiteralNumber TokenType = 3200 + iota LiteralNumberBin LiteralNumberFloat LiteralNumberHex LiteralNumberInteger LiteralNumberIntegerLong LiteralNumberOct )
Literals.
Operators.
const ( Comment TokenType = 6000 + iota CommentHashbang CommentMultiline CommentSingle CommentSpecial )
Comments.
Preprocessor "comments".
const ( Generic TokenType = 7000 + iota GenericDeleted GenericEmph GenericError GenericHeading GenericInserted GenericOutput GenericPrompt GenericStrong GenericSubheading GenericTraceback GenericUnderline )
Generic tokens.
Text.
Punctuation.
func (TokenType) Category ¶
func (TokenType) Emit ¶
func (TokenType) InCategory ¶
func (TokenType) InSubCategory ¶
func (TokenType) Parent ¶
func (TokenType) String ¶
func (TokenType) SubCategory ¶
type TokeniseOptions ¶
type TokeniseOptions struct { // State to start tokenisation in. Defaults to "root". State string }
type Trilean ¶
type Trilean uint8
Trilean value for StyleEntry value inheritance.
func (Trilean) Prefix ¶
func (Trilean) String ¶
Source Files ¶
coalesce.go colour.go doc.go formatter.go iterator.go lexer.go mutators.go regexp.go style.go tokentype_string.go types.go
Directories ¶
Path | Synopsis |
---|---|
cmd | |
cmd/chroma | |
formatters | |
formatters/html | |
lexers | |
quick | |
styles | |
_tools | |
_tools/css2style | |
_tools/exercise |
- Version
- v0.1.1
- Published
- Sep 26, 2017
- Platform
- js/wasm
- Imports
- 10 packages
- Last checked
- now –
Tools for package owners.