package css_lexer

import "github.com/evanw/esbuild/internal/css_lexer"

Index

Functions

func IsNameContinue

func IsNameContinue(c rune) bool

func IsNameStart

func IsNameStart(c rune) bool

func RangeOfIdentifier

func RangeOfIdentifier(source logger.Source, loc logger.Loc) logger.Range

func WouldStartIdentifierWithoutEscapes

func WouldStartIdentifierWithoutEscapes(text string) bool

Types

type Comment

type Comment struct {
	Text            string
	Loc             logger.Loc
	TokenIndexAfter uint32
}

type Options

type Options struct {
	RecordAllComments bool
}

type T

type T uint8
const (
	TEndOfFile T = iota

	TAtKeyword
	TUnterminatedString
	TBadURL
	TCDC // "-->"
	TCDO // "<!--"
	TCloseBrace
	TCloseBracket
	TCloseParen
	TColon
	TComma
	TDelim
	TDelimAmpersand
	TDelimAsterisk
	TDelimBar
	TDelimCaret
	TDelimDollar
	TDelimDot
	TDelimEquals
	TDelimExclamation
	TDelimGreaterThan
	TDelimMinus
	TDelimPlus
	TDelimSlash
	TDelimTilde
	TDimension
	TFunction
	THash
	TIdent
	TNumber
	TOpenBrace
	TOpenBracket
	TOpenParen
	TPercentage
	TSemicolon
	TString
	TURL
	TWhitespace

	// This is never something that the lexer generates directly. Instead this is
	// an esbuild-specific token for global/local names that "TIdent" tokens may
	// be changed into.
	TSymbol
)

func (T) IsNumeric

func (t T) IsNumeric() bool

func (T) String

func (t T) String() string

type Token

type Token struct {
	Range      logger.Range // 8 bytes
	UnitOffset uint16       // 2 bytes
	Kind       T            // 1 byte
	Flags      TokenFlags   // 1 byte
}

This token struct is designed to be memory-efficient. It just references a range in the input file instead of directly containing the substring of text since a range takes up less memory than a string.

func (Token) DecodedText

func (token Token) DecodedText(contents string) string

type TokenFlags

type TokenFlags uint8
const (
	IsID TokenFlags = 1 << iota
	DidWarnAboutSingleLineComment
)

type TokenizeResult

type TokenizeResult struct {
	Tokens               []Token
	AllComments          []logger.Range
	LegalComments        []Comment
	SourceMapComment     logger.Span
	ApproximateLineCount int32
}

func Tokenize

func Tokenize(log logger.Log, source logger.Source, options Options) TokenizeResult

Source Files

css_lexer.go

Version
v0.25.0 (latest)
Published
Feb 8, 2025
Platform
linux/amd64
Imports
3 packages
Last checked
4 days ago

Tools for package owners.