package sqltoken
import "zgo.at/zdb/internal/sqltoken"
Index ¶
- type Config
- func MySQLConfig() Config
- func OracleConfig() Config
- func PostgreSQLConfig() Config
- func SQLServerConfig() Config
- type Token
- type TokenType
- func TokenTypeString(s string) (TokenType, error)
- func TokenTypeValues() []TokenType
- func (i TokenType) IsATokenType() bool
- func (i TokenType) MarshalJSON() ([]byte, error)
- func (i TokenType) String() string
- func (i *TokenType) UnmarshalJSON(data []byte) error
- type Tokens
- func Tokenize(s string, config Config) Tokens
- func TokenizeMySQL(s string) Tokens
- func TokenizePostgreSQL(s string) Tokens
- func (ts Tokens) CmdSplit() TokensList
- func (ts Tokens) String() string
- func (ts Tokens) Strip() Tokens
- type TokensList
Types ¶
type Config ¶
type Config struct { // Tokenize ? as type Question (used by MySQL) NoticeQuestionMark bool // Tokenize $7 as type DollarNumber (PostgreSQL) NoticeDollarNumber bool // Tokenize :word as type ColonWord (sqlx, Oracle) NoticeColonWord bool // Tokenize :word with unicode as ColonWord (sqlx) ColonWordIncludesUnicode bool // Tokenize # as type comment (MySQL) NoticeHashComment bool // $q$ stuff $q$ and $$stuff$$ quoting (PostgreSQL) NoticeDollarQuotes bool // NoticeHexValues 0xa0 x'af' X'AF' (MySQL) NoticeHexNumbers bool // NoticeBinaryValues 0x01 b'01' B'01' (MySQL) NoticeBinaryNumbers bool // NoticeUAmpPrefix U& utf prefix U&"\0441\043B\043E\043D" (PostgreSQL) NoticeUAmpPrefix bool // NoticeCharsetLiteral _latin1'string' n'string' (MySQL) NoticeCharsetLiteral bool // NoticeNotionalStrings [nN]'...''...' (Oracle, SQL Server) NoticeNotionalStrings bool // NoticeDelimitedStrings [nN]?[qQ]'DELIM .... DELIM' (Oracle) NoticeDeliminatedStrings bool // NoticeTypedNumbers nn.nnEnn[fFdD] (Oracle) NoticeTypedNumbers bool // NoticeMoneyConstants $10 $10.32 (SQL Server) NoticeMoneyConstants bool // NoticeAtWord @foo (SQL Server) NoticeAtWord bool // NoticeAtIdentifiers _baz @fo$o @@b#ar #foo ##b@ar(SQL Server) NoticeIdentifiers bool }
Config specifies the behavior of Tokenize as relates to behavior that differs between SQL implementations
func MySQLConfig ¶
func MySQLConfig() Config
MySQL returns a parsing configuration that is appropriate for parsing MySQL, MariaDB, and SingleStore SQL.
func OracleConfig ¶
func OracleConfig() Config
OracleConfig returns a parsing configuration that is appropriate for parsing Oracle's SQL
func PostgreSQLConfig ¶
func PostgreSQLConfig() Config
PostgreSQL returns a parsing configuration that is appropriate for parsing PostgreSQL and CockroachDB SQL.
func SQLServerConfig ¶
func SQLServerConfig() Config
SQLServerConfig returns a parsing configuration that is appropriate for parsing SQLServer's SQL
type Token ¶
type TokenType ¶
type TokenType int
const ( Comment TokenType = iota Whitespace QuestionMark // used in MySQL substitution AtSign // used in sqlserver substitution DollarNumber // used in PostgreSQL substitution ColonWord // used in sqlx substitution Literal // strings Identifier // used in SQL Server for many things AtWord // used in SQL Server, subset of Identifier Number Semicolon Punctuation Word Other // control characters and other non-printables )
func TokenTypeString ¶
TokenTypeString retrieves an enum value from the enum constants string name. Throws an error if the param is not part of the enum.
func TokenTypeValues ¶
func TokenTypeValues() []TokenType
TokenTypeValues returns all values of the enum
func (TokenType) IsATokenType ¶
IsATokenType returns "true" if the value is listed in the enum definition. "false" otherwise
func (TokenType) MarshalJSON ¶
MarshalJSON implements the json.Marshaler interface for TokenType
func (TokenType) String ¶
func (*TokenType) UnmarshalJSON ¶
UnmarshalJSON implements the json.Unmarshaler interface for TokenType
type Tokens ¶
type Tokens []Token
func Tokenize ¶
Tokenize breaks up SQL strings into Token objects. No attempt is made to break successive punctuation.
func TokenizeMySQL ¶
TokenizeMySQL breaks up MySQL / MariaDB / SingleStore SQL strings into Token objects.
func TokenizePostgreSQL ¶
TokenizePostgreSQL breaks up PostgreSQL / CockroachDB SQL strings into Token objects.
func (Tokens) CmdSplit ¶
func (ts Tokens) CmdSplit() TokensList
CmdSplit breaks up the token array into multiple token arrays, one per command (splitting on ";")
func (Tokens) String ¶
func (Tokens) Strip ¶
Strip removes leading/trailing whitespace and semicolors and strips all internal comments. Internal whitespace is changed to a single space.
type TokensList ¶
type TokensList []Tokens
func (TokensList) Strings ¶
func (tl TokensList) Strings() []string
Source Files ¶
tokenize.go tokentype_enumer.go
- Version
- v0.0.0-20250411114835-98f201430043 (latest)
- Published
- Apr 11, 2025
- Platform
- js/wasm
- Imports
- 5 packages
- Last checked
- 6 hours ago –
Tools for package owners.