package css
import "github.com/tdewolff/parse/v2/css"
Package css is a CSS3 lexer and parser following the specifications at http://www.w3.org/TR/css-syntax-3/.
Index ¶
- func HSL2RGB(h, s, l float64) (float64, float64, float64)
- func IsIdent(b []byte) bool
- func IsURLUnquoted(b []byte) bool
- type GrammarType
- type Hash
- type Lexer
- func NewLexer(r *parse.Input) *Lexer
- func (l *Lexer) Err() error
- func (l *Lexer) Next() (TokenType, []byte)
- type Parser
- func NewParser(r *parse.Input, isInline bool) *Parser
- func (p *Parser) Err() error
- func (p *Parser) HasParseError() bool
- func (p *Parser) Next() (GrammarType, TokenType, []byte)
- func (p *Parser) Offset() int
- func (p *Parser) Values() []Token
- type State
- type Token
- type TokenType
Examples ¶
Functions ¶
func HSL2RGB ¶
HSL2RGB converts HSL to RGB with all of range [0,1] from http://www.w3.org/TR/css3-color/#hsl-color
func IsIdent ¶
IsIdent returns true if the bytes are a valid identifier.
func IsURLUnquoted ¶
IsURLUnquoted returns true if the bytes are a valid unquoted URL.
Types ¶
type GrammarType ¶
type GrammarType uint32
GrammarType determines the type of grammar.
const ( ErrorGrammar GrammarType = iota // extra token when errors occur CommentGrammar AtRuleGrammar BeginAtRuleGrammar EndAtRuleGrammar QualifiedRuleGrammar BeginRulesetGrammar EndRulesetGrammar DeclarationGrammar TokenGrammar CustomPropertyGrammar )
GrammarType values.
func (GrammarType) String ¶
func (tt GrammarType) String() string
String returns the string representation of a GrammarType.
type Hash ¶
type Hash uint32
Hash defines perfect hashes for a predefined list of strings
const ( Document Hash = 0x8 // document Font_Face Hash = 0x809 // font-face Keyframes Hash = 0x1109 // keyframes Media Hash = 0x2105 // media Page Hash = 0x2604 // page Supports Hash = 0x1908 // supports )
Unique hash definitions to be used instead of strings
func ToHash ¶
ToHash returns the hash whose name is s. It returns zero if there is no such hash. It is case sensitive.
func (Hash) String ¶
String returns the hash' name.
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
Lexer is the state for the lexer.
func NewLexer ¶
func NewLexer(r *parse.Input) *Lexer
NewLexer returns a new Lexer for a given io.Reader.
Code:
Output:Example¶
{
l := NewLexer(parse.NewInputString("color: red;"))
out := ""
for {
tt, data := l.Next()
if tt == ErrorToken {
break
} else if tt == WhitespaceToken || tt == CommentToken {
continue
}
out += string(data)
}
fmt.Println(out)
// Output: color:red;
}
color:red;
func (*Lexer) Err ¶
Err returns the error encountered during lexing, this is often io.EOF but also other errors can be returned.
func (*Lexer) Next ¶
Next returns the next Token. It returns ErrorToken when an error was encountered. Using Err() one can retrieve the error message.
type Parser ¶
type Parser struct {
// contains filtered or unexported fields
}
Parser is the state for the parser.
func NewParser ¶
NewParser returns a new CSS parser from an io.Reader. isInline specifies whether this is an inline style attribute.
Code:
Output:Example¶
{
p := NewParser(parse.NewInputString("color: red;"), true) // false because this is the content of an inline style attribute
out := ""
for {
gt, _, data := p.Next()
if gt == ErrorGrammar {
break
} else if gt == AtRuleGrammar || gt == BeginAtRuleGrammar || gt == BeginRulesetGrammar || gt == DeclarationGrammar {
out += string(data)
if gt == DeclarationGrammar {
out += ":"
}
for _, val := range p.Values() {
out += string(val.Data)
}
if gt == BeginAtRuleGrammar || gt == BeginRulesetGrammar {
out += "{"
} else if gt == AtRuleGrammar || gt == DeclarationGrammar {
out += ";"
}
} else {
out += string(data)
}
}
fmt.Println(out)
// Output: color:red;
}
color:red;
func (*Parser) Err ¶
Err returns the error encountered during parsing, this is often io.EOF but also other errors can be returned.
func (*Parser) HasParseError ¶
HasParseError returns true if there is a parse error (and not a read error).
func (*Parser) Next ¶
func (p *Parser) Next() (GrammarType, TokenType, []byte)
Next returns the next Grammar. It returns ErrorGrammar when an error was encountered. Using Err() one can retrieve the error message.
func (*Parser) Offset ¶
Offset return offset for current Grammar
func (*Parser) Values ¶
Values returns a slice of Tokens for the last Grammar. Only AtRuleGrammar, BeginAtRuleGrammar, BeginRulesetGrammar and Declaration will return the at-rule components, ruleset selector and declaration values respectively.
type State ¶
type State func(*Parser) GrammarType
State is the state function the parser currently is in.
type Token ¶
Token is a single TokenType and its associated data.
func (Token) String ¶
type TokenType ¶
type TokenType uint32
TokenType determines the type of token, eg. a number or a semicolon.
const ( ErrorToken TokenType = iota // extra token when errors occur IdentToken FunctionToken // rgb( rgba( ... AtKeywordToken // @abc HashToken // #abc StringToken BadStringToken URLToken BadURLToken DelimToken // any unmatched character NumberToken // 5 PercentageToken // 5% DimensionToken // 5em UnicodeRangeToken // U+554A IncludeMatchToken // ~= DashMatchToken // |= PrefixMatchToken // ^= SuffixMatchToken // $= SubstringMatchToken // *= ColumnToken // || WhitespaceToken // space \t \r \n \f CDOToken // <!-- CDCToken // --> ColonToken // : SemicolonToken // ; CommaToken // , LeftBracketToken // [ RightBracketToken // ] LeftParenthesisToken // ( RightParenthesisToken // ) LeftBraceToken // { RightBraceToken // } CommentToken // extra token for comments EmptyToken CustomPropertyNameToken CustomPropertyValueToken )
TokenType values.
func (TokenType) String ¶
String returns the string representation of a TokenType.
Source Files ¶
hash.go lex.go parse.go util.go
- Version
- v2.7.20 (latest)
- Published
- Jan 28, 2025
- Platform
- linux/amd64
- Imports
- 6 packages
- Last checked
- 20 hours ago –
Tools for package owners.