package shlex
import "github.com/flynn-archive/go-shlex"
Index ¶
- Constants
- func Split(s string) ([]string, error)
- type Lexer
- type RuneTokenType
- type Token
- type TokenClassifier
- func NewDefaultClassifier() *TokenClassifier
- func (classifier *TokenClassifier) ClassifyRune(rune int32) RuneTokenType
- type TokenType
- type Tokenizer
Constants ¶
const ( RUNE_CHAR string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-,/@$*()+=><:;&^%~|!?[]{}" RUNE_SPACE string = " \t\r\n" RUNE_ESCAPING_QUOTE string = "\"" RUNE_NONESCAPING_QUOTE string = "'" RUNE_ESCAPE = "\\" RUNE_COMMENT = "#" RUNETOKEN_UNKNOWN RuneTokenType = 0 RUNETOKEN_CHAR RuneTokenType = 1 RUNETOKEN_SPACE RuneTokenType = 2 RUNETOKEN_ESCAPING_QUOTE RuneTokenType = 3 RUNETOKEN_NONESCAPING_QUOTE RuneTokenType = 4 RUNETOKEN_ESCAPE RuneTokenType = 5 RUNETOKEN_COMMENT RuneTokenType = 6 RUNETOKEN_EOF RuneTokenType = 7 TOKEN_UNKNOWN TokenType = 0 TOKEN_WORD TokenType = 1 TOKEN_SPACE TokenType = 2 TOKEN_COMMENT TokenType = 3 STATE_START lexerState = 0 STATE_INWORD lexerState = 1 STATE_ESCAPING lexerState = 2 STATE_ESCAPING_QUOTED lexerState = 3 STATE_QUOTED_ESCAPING lexerState = 4 STATE_QUOTED lexerState = 5 STATE_COMMENT lexerState = 6 INITIAL_TOKEN_CAPACITY int = 100 )
Functions ¶
func Split ¶
Split a string in to a slice of strings, based upon shell-style rules for quoting, escaping, and spaces.
Types ¶
type Lexer ¶
type Lexer struct {
// contains filtered or unexported fields
}
A type for turning an input stream in to a sequence of strings. Whitespace and comments are skipped.
func NewLexer ¶
Create a new lexer.
func (*Lexer) NextWord ¶
Return the next word, and an error value. If there are no more words, the error will be io.EOF.
type RuneTokenType ¶
type RuneTokenType int
A RuneTokenType is the type of a UTF-8 character; a character, quote, space, escape.
type Token ¶
type Token struct {
// contains filtered or unexported fields
}
func (*Token) Equal ¶
Two tokens are equal if both their types and values are equal. A nil token can never equal another token.
type TokenClassifier ¶
type TokenClassifier struct {
// contains filtered or unexported fields
}
A type for classifying characters. This allows for different sorts of classifiers - those accepting extended non-ascii chars, or strict posix compatibility, for example.
func NewDefaultClassifier ¶
func NewDefaultClassifier() *TokenClassifier
Create a new classifier for basic ASCII characters.
func (*TokenClassifier) ClassifyRune ¶
func (classifier *TokenClassifier) ClassifyRune(rune int32) RuneTokenType
type TokenType ¶
type TokenType int
A TokenType is a top-level token; a word, space, comment, unknown.
type Tokenizer ¶
type Tokenizer struct {
// contains filtered or unexported fields
}
A type for turning an input stream in to a sequence of typed tokens.
func NewTokenizer ¶
Create a new tokenizer.
func (*Tokenizer) NextToken ¶
Return the next token in the stream, and an error value. If there are no more tokens available, the error value will be io.EOF.
Source Files ¶
- Version
- v0.0.0-20150515145356-3f9db97f8568 (latest)
- Published
- May 15, 2015
- Platform
- js/wasm
- Imports
- 5 packages
- Last checked
- 2 months ago –
Tools for package owners.