package lexers

import "github.com/alecthomas/chroma/v2/lexers"

Index

Variables

var Caddyfile = Register(MustNewLexer(
	&Config{
		Name:      "Caddyfile",
		Aliases:   []string{"caddyfile", "caddy"},
		Filenames: []string{"Caddyfile*"},
		MimeTypes: []string{},
	},
	caddyfileRules,
))

Caddyfile lexer.

var CaddyfileDirectives = Register(MustNewLexer(
	&Config{
		Name:      "Caddyfile Directives",
		Aliases:   []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
		Filenames: []string{},
		MimeTypes: []string{},
	},
	caddyfileDirectivesRules,
))

Caddyfile directive-only lexer.

var CommonLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
	embedded,
	"embedded/common_lisp.xml",
), TypeMapping{
	{NameVariable, NameFunction, clBuiltinFunctions},
	{NameVariable, Keyword, clSpecialForms},
	{NameVariable, NameBuiltin, clMacros},
	{NameVariable, Keyword, clLambdaListKeywords},
	{NameVariable, Keyword, clDeclarations},
	{NameVariable, KeywordType, clBuiltinTypes},
	{NameVariable, NameClass, clBuiltinClasses},
}))

Common Lisp lexer.

var EmacsLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
	embedded,
	"embedded/emacslisp.xml",
), TypeMapping{
	{NameVariable, NameFunction, emacsBuiltinFunction},
	{NameVariable, NameBuiltin, emacsSpecialForms},
	{NameVariable, NameException, emacsErrorKeywords},
	{NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
	{NameVariable, KeywordPseudo, emacsLambdaListKeywords},
}))

EmacsLisp lexer.

var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{
	Name:      "fallback",
	Filenames: []string{"*"},
	Priority:  -1,
}, PlaintextRules)

Fallback lexer if no other is found.

var Genshi = Register(MustNewLexer(
	&Config{
		Name:         "Genshi",
		Aliases:      []string{"genshi", "kid", "xml+genshi", "xml+kid"},
		Filenames:    []string{"*.kid"},
		MimeTypes:    []string{"application/x-genshi", "application/x-kid"},
		NotMultiline: true,
		DotAll:       true,
	},
	genshiMarkupRules,
))

Genshi lexer.

var GenshiHTMLTemplate = Register(MustNewLexer(
	&Config{
		Name:         "Genshi HTML",
		Aliases:      []string{"html+genshi", "html+kid"},
		Filenames:    []string{},
		MimeTypes:    []string{"text/html+genshi"},
		NotMultiline: true,
		DotAll:       true,
	},
	genshiMarkupRules,
))

Html+Genshi lexer.

var GenshiText = Register(MustNewLexer(
	&Config{
		Name:      "Genshi Text",
		Aliases:   []string{"genshitext"},
		Filenames: []string{},
		MimeTypes: []string{"application/x-genshi-text", "text/x-genshi"},
	},
	genshiTextRules,
))

Genshi Text lexer.

var GlobalLexerRegistry = func() *chroma.LexerRegistry {
	reg := chroma.NewLexerRegistry()

	paths, err := fs.Glob(embedded, "embedded/*.xml")
	if err != nil {
		panic(err)
	}
	for _, path := range paths {
		reg.Register(chroma.MustNewXMLLexer(embedded, path))
	}
	return reg
}()

GlobalLexerRegistry is the global LexerRegistry of Lexers.

var Go = Register(MustNewLexer(
	&Config{
		Name:      "Go",
		Aliases:   []string{"go", "golang"},
		Filenames: []string{"*.go"},
		MimeTypes: []string{"text/x-gosrc"},
		EnsureNL:  true,
	},
	goRules,
).SetAnalyser(func(text string) float32 {
	if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") {
		return 0.5
	}
	if strings.Contains(text, "package ") {
		return 0.1
	}
	return 0.0
}))

Go lexer.

var GoHTMLTemplate = Register(DelegatingLexer(HTML, MustNewXMLLexer(
	embedded,
	"embedded/go_template.xml",
).SetConfig(
	&Config{
		Name:    "Go HTML Template",
		Aliases: []string{"go-html-template"},
	},
)))
var GoTextTemplate = Register(MustNewXMLLexer(
	embedded,
	"embedded/go_template.xml",
).SetConfig(
	&Config{
		Name:    "Go Text Template",
		Aliases: []string{"go-text-template"},
	},
))
var HTML = chroma.MustNewXMLLexer(embedded, "embedded/html.xml")

HTML lexer.

var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
	&Config{
		Name:         "HTTP",
		Aliases:      []string{"http"},
		Filenames:    []string{},
		MimeTypes:    []string{},
		NotMultiline: true,
		DotAll:       true,
	},
	httpRules,
)))

HTTP lexer.

var Haxe = Register(MustNewLexer(
	&Config{
		Name:      "Haxe",
		Aliases:   []string{"hx", "haxe", "hxsl"},
		Filenames: []string{"*.hx", "*.hxsl"},
		MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"},
		DotAll:    true,
	},
	haxeRules,
))

Haxe lexer.

var Markdown = Register(DelegatingLexer(HTML, MustNewLexer(
	&Config{
		Name:      "markdown",
		Aliases:   []string{"md", "mkd"},
		Filenames: []string{"*.md", "*.mkd", "*.markdown"},
		MimeTypes: []string{"text/x-markdown"},
	},
	markdownRules,
)))

Markdown lexer.

var Raku Lexer = Register(MustNewLexer(
	&Config{
		Name:    "Raku",
		Aliases: []string{"perl6", "pl6", "raku"},
		Filenames: []string{
			"*.pl", "*.pm", "*.nqp", "*.p6", "*.6pl", "*.p6l", "*.pl6", "*.6pm",
			"*.p6m", "*.pm6", "*.t", "*.raku", "*.rakumod", "*.rakutest", "*.rakudoc",
		},
		MimeTypes: []string{
			"text/x-perl6", "application/x-perl6",
			"text/x-raku", "application/x-raku",
		},
		DotAll: true,
	},
	rakuRules,
))

Raku lexer.

var Restructuredtext = Register(MustNewLexer(
	&Config{
		Name:      "reStructuredText",
		Aliases:   []string{"rst", "rest", "restructuredtext"},
		Filenames: []string{"*.rst", "*.rest"},
		MimeTypes: []string{"text/x-rst", "text/prs.fallenstein.rst"},
	},
	restructuredtextRules,
))

Restructuredtext lexer.

var Svelte = Register(DelegatingLexer(HTML, MustNewLexer(
	&Config{
		Name:      "Svelte",
		Aliases:   []string{"svelte"},
		Filenames: []string{"*.svelte"},
		MimeTypes: []string{"application/x-svelte"},
		DotAll:    true,
	},
	svelteRules,
)))

Svelte lexer.

var Typoscript = Register(MustNewLexer(
	&Config{
		Name:      "TypoScript",
		Aliases:   []string{"typoscript"},
		Filenames: []string{"*.ts"},
		MimeTypes: []string{"text/x-typoscript"},
		DotAll:    true,
		Priority:  0.1,
	},
	typoscriptRules,
))

Typoscript lexer.

Functions

func Analyse

func Analyse(text string) chroma.Lexer

Analyse text content and return the "best" lexer..

func Get

func Get(name string) chroma.Lexer

Get a Lexer by name, alias or file extension.

Note that this if there isn't an exact match on name or alias, this will call Match(), so it is not efficient.

func Match

func Match(filename string) chroma.Lexer

Match returns the first lexer matching filename.

Note that this iterates over all file patterns in all lexers, so it's not particularly efficient.

func MatchMimeType

func MatchMimeType(mimeType string) chroma.Lexer

MatchMimeType attempts to find a lexer for the given MIME type.

func Names

func Names(withAliases bool) []string

Names of all lexers, optionally including aliases.

func PlaintextRules

func PlaintextRules() chroma.Rules

PlaintextRules is used for the fallback lexer as well as the explicit plaintext lexer.

func Register

func Register(lexer chroma.Lexer) chroma.Lexer

Register a Lexer with the global registry.

Source Files

caddyfile.go cl.go dns.go emacs.go genshi.go go.go haxe.go html.go http.go lexers.go markdown.go mysql.go php.go raku.go rst.go svelte.go typoscript.go zed.go

Version
v2.11.1
Published
Nov 12, 2023
Platform
darwin/amd64
Imports
7 packages
Last checked
5 minutes ago

Tools for package owners.