package jlexer
import "github.com/mailru/easyjson/jlexer"
Package jlexer contains a JSON lexer implementation.
It is expected that it is mostly used with generated parser code, so the interface is tuned for a parser that knows what kind of data is expected.
Index ¶
- type Lexer
- func (r *Lexer) AddError(e error)
- func (r *Lexer) AddNonFatalError(e error)
- func (r *Lexer) Bool() bool
- func (r *Lexer) Bytes() []byte
- func (r *Lexer) Consumed()
- func (r *Lexer) CurrentToken() TokenKind
- func (r *Lexer) Delim(c byte)
- func (r *Lexer) Error() error
- func (r *Lexer) FetchToken()
- func (r *Lexer) Float32() float32
- func (r *Lexer) Float32Str() float32
- func (r *Lexer) Float64() float64
- func (r *Lexer) Float64Str() float64
- func (r *Lexer) GetNonFatalErrors() []*LexerError
- func (r *Lexer) GetPos() int
- func (r *Lexer) Int() int
- func (r *Lexer) Int16() int16
- func (r *Lexer) Int16Str() int16
- func (r *Lexer) Int32() int32
- func (r *Lexer) Int32Str() int32
- func (r *Lexer) Int64() int64
- func (r *Lexer) Int64Str() int64
- func (r *Lexer) Int8() int8
- func (r *Lexer) Int8Str() int8
- func (r *Lexer) IntStr() int
- func (r *Lexer) Interface() interface{}
- func (r *Lexer) IsDelim(c byte) bool
- func (r *Lexer) IsNull() bool
- func (r *Lexer) IsStart() bool
- func (r *Lexer) JsonNumber() json.Number
- func (r *Lexer) Null()
- func (r *Lexer) Ok() bool
- func (r *Lexer) Raw() []byte
- func (r *Lexer) Skip()
- func (r *Lexer) SkipRecursive()
- func (r *Lexer) String() string
- func (r *Lexer) StringIntern() string
- func (r *Lexer) Uint() uint
- func (r *Lexer) Uint16() uint16
- func (r *Lexer) Uint16Str() uint16
- func (r *Lexer) Uint32() uint32
- func (r *Lexer) Uint32Str() uint32
- func (r *Lexer) Uint64() uint64
- func (r *Lexer) Uint64Str() uint64
- func (r *Lexer) Uint8() uint8
- func (r *Lexer) Uint8Str() uint8
- func (r *Lexer) UintStr() uint
- func (r *Lexer) UintptrStr() uintptr
- func (r *Lexer) UnsafeBytes() []byte
- func (r *Lexer) UnsafeFieldName(skipUnescape bool) string
- func (r *Lexer) UnsafeString() string
- func (r *Lexer) WantColon()
- func (r *Lexer) WantComma()
- type LexerError
- type TokenKind
Types ¶
type Lexer ¶
type Lexer struct { Data []byte // Input data given to the lexer. UseMultipleErrors bool // If we want to use multiple errors. // contains filtered or unexported fields }
Lexer is a JSON lexer: it iterates over JSON tokens in a byte slice.
func (*Lexer) AddError ¶
func (*Lexer) AddNonFatalError ¶
func (*Lexer) Bool ¶
Bool reads a true or false boolean keyword.
func (*Lexer) Bytes ¶
Bytes reads a string literal and base64 decodes it into a byte slice.
func (*Lexer) Consumed ¶
func (r *Lexer) Consumed()
Consumed reads all remaining bytes from the input, publishing an error if there is anything but whitespace remaining.
func (*Lexer) CurrentToken ¶
CurrentToken returns current token kind if there were no errors and TokenUndef otherwise
func (*Lexer) Delim ¶
Delim consumes a token and verifies that it is the given delimiter.
func (*Lexer) Error ¶
func (*Lexer) FetchToken ¶
func (r *Lexer) FetchToken()
FetchToken scans the input for the next token.
func (*Lexer) Float32 ¶
func (*Lexer) Float32Str ¶
func (*Lexer) Float64 ¶
func (*Lexer) Float64Str ¶
func (*Lexer) GetNonFatalErrors ¶
func (r *Lexer) GetNonFatalErrors() []*LexerError
func (*Lexer) GetPos ¶
func (*Lexer) Int ¶
func (*Lexer) Int16 ¶
func (*Lexer) Int16Str ¶
func (*Lexer) Int32 ¶
func (*Lexer) Int32Str ¶
func (*Lexer) Int64 ¶
func (*Lexer) Int64Str ¶
func (*Lexer) Int8 ¶
func (*Lexer) Int8Str ¶
func (*Lexer) IntStr ¶
func (*Lexer) Interface ¶
func (r *Lexer) Interface() interface{}
Interface fetches an interface{} analogous to the 'encoding/json' package.
func (*Lexer) IsDelim ¶
IsDelim returns true if there was no scanning error and next token is the given delimiter.
func (*Lexer) IsNull ¶
IsNull returns true if the next token is a null keyword.
func (*Lexer) IsStart ¶
IsStart returns whether the lexer is positioned at the start of an input string.
func (*Lexer) JsonNumber ¶
JsonNumber fetches and json.Number from 'encoding/json' package. Both int, float or string, contains them are valid values
func (*Lexer) Null ¶
func (r *Lexer) Null()
Null verifies that the next token is null and consumes it.
func (*Lexer) Ok ¶
Ok returns true if no error (including io.EOF) was encountered during scanning.
func (*Lexer) Raw ¶
Raw fetches the next item recursively as a data slice
func (*Lexer) Skip ¶
func (r *Lexer) Skip()
Skip skips a single token.
func (*Lexer) SkipRecursive ¶
func (r *Lexer) SkipRecursive()
SkipRecursive skips next array or object completely, or just skips a single token if not an array/object.
Note: no syntax validation is performed on the skipped data.
func (*Lexer) String ¶
String reads a string literal.
func (*Lexer) StringIntern ¶
StringIntern reads a string literal, and performs string interning on it.
func (*Lexer) Uint ¶
func (*Lexer) Uint16 ¶
func (*Lexer) Uint16Str ¶
func (*Lexer) Uint32 ¶
func (*Lexer) Uint32Str ¶
func (*Lexer) Uint64 ¶
func (*Lexer) Uint64Str ¶
func (*Lexer) Uint8 ¶
func (*Lexer) Uint8Str ¶
func (*Lexer) UintStr ¶
func (*Lexer) UintptrStr ¶
func (*Lexer) UnsafeBytes ¶
UnsafeBytes returns the byte slice if the token is a string literal.
func (*Lexer) UnsafeFieldName ¶
UnsafeFieldName returns current member name string token
func (*Lexer) UnsafeString ¶
UnsafeString returns the string value if the token is a string literal.
Warning: returned string may point to the input buffer, so the string should not outlive the input buffer. Intended pattern of usage is as an argument to a switch statement.
func (*Lexer) WantColon ¶
func (r *Lexer) WantColon()
WantColon requires a colon to be present before fetching next token.
func (*Lexer) WantComma ¶
func (r *Lexer) WantComma()
WantComma requires a comma to be present before fetching next token.
type LexerError ¶
LexerError implements the error interface and represents all possible errors that can be generated during parsing the JSON data.
func (*LexerError) Error ¶
func (l *LexerError) Error() string
type TokenKind ¶
type TokenKind byte
TokenKind determines type of a token.
const ( TokenUndef TokenKind = iota // No token. TokenDelim // Delimiter: one of '{', '}', '[' or ']'. TokenString // A string literal, e.g. "abc\u1234" TokenNumber // Number literal, e.g. 1.5e5 TokenBool // Boolean literal: true or false. TokenNull // null keyword. )
Source Files ¶
bytestostr.go error.go lexer.go
- Version
- v0.9.0 (latest)
- Published
- Dec 14, 2024
- Platform
- linux/amd64
- Imports
- 12 packages
- Last checked
- 2 weeks ago –
Tools for package owners.