lexer

package
v0.0.0-...-131db16 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 2, 2022 License: MIT Imports: 4 Imported by: 0

Documentation

Index

Constants

View Source
const (
	Comma            = ","
	StartParenthesis = "("
	EndParenthesis   = ")"
	StartBracket     = "["
	EndBracket       = "]"
	StartBrace       = "{"
	EndBrace         = "}"
	SingleQuote      = "'"
	NewLine          = "\n"
)

value of literal

Variables

View Source
var (
	EndOfSelect      = []TokenType{FROM, UNION, EOF}
	EndOfCase        = []TokenType{END}
	EndOfFrom        = []TokenType{WHERE, INNER, OUTER, LEFT, RIGHT, JOIN, NATURAL, CROSS, ORDER, GROUP, UNION, OFFSET, LIMIT, FETCH, EXCEPT, INTERSECT, EOF, ENDPARENTHESIS}
	EndOfJoin        = []TokenType{WHERE, ORDER, GROUP, LIMIT, OFFSET, FETCH, ANDGROUP, ORGROUP, LEFT, RIGHT, INNER, OUTER, NATURAL, CROSS, UNION, EXCEPT, INTERSECT, EOF, ENDPARENTHESIS}
	EndOfWhere       = []TokenType{GROUP, ORDER, LIMIT, OFFSET, FETCH, ANDGROUP, OR, UNION, EXCEPT, INTERSECT, RETURNING, EOF, ENDPARENTHESIS}
	EndOfAndGroup    = []TokenType{GROUP, ORDER, LIMIT, OFFSET, FETCH, UNION, EXCEPT, INTERSECT, ANDGROUP, ORGROUP, EOF, ENDPARENTHESIS}
	EndOfOrGroup     = []TokenType{GROUP, ORDER, LIMIT, OFFSET, FETCH, UNION, EXCEPT, INTERSECT, ANDGROUP, ORGROUP, EOF, ENDPARENTHESIS}
	EndOfGroupBy     = []TokenType{ORDER, LIMIT, FETCH, OFFSET, UNION, EXCEPT, INTERSECT, HAVING, EOF, ENDPARENTHESIS}
	EndOfHaving      = []TokenType{LIMIT, OFFSET, FETCH, ORDER, UNION, EXCEPT, INTERSECT, EOF, ENDPARENTHESIS}
	EndOfOrderBy     = []TokenType{LIMIT, FETCH, OFFSET, UNION, EXCEPT, INTERSECT, EOF, ENDPARENTHESIS}
	EndOfLimitClause = []TokenType{UNION, EXCEPT, INTERSECT, EOF, ENDPARENTHESIS}
	EndOfParenthesis = []TokenType{ENDPARENTHESIS}
	EndOfTieClause   = []TokenType{SELECT}
	EndOfUpdate      = []TokenType{WHERE, SET, RETURNING, EOF}
	EndOfSet         = []TokenType{WHERE, RETURNING, EOF}
	EndOfReturning   = []TokenType{EOF}
	EndOfDelete      = []TokenType{WHERE, FROM, EOF}
	EndOfInsert      = []TokenType{VALUES, EOF}
	EndOfValues      = []TokenType{UPDATE, RETURNING, EOF}
	EndOfFunction    = []TokenType{ENDPARENTHESIS}
	EndOfTypeCast    = []TokenType{ENDPARENTHESIS}
	EndOfLock        = []TokenType{EOF}
	EndOfWith        = []TokenType{EOF}
)

end keywords of each clause

View Source
var (
	TokenTypesOfGroupMaker = []TokenType{SELECT, CASE, FROM, WHERE, ORDER, GROUP, LIMIT, ANDGROUP, ORGROUP, HAVING, UNION, EXCEPT, INTERSECT, FUNCTION, STARTPARENTHESIS, TYPE}
	TokenTypesOfJoinMaker  = []TokenType{JOIN, INNER, OUTER, LEFT, RIGHT, NATURAL, CROSS}
	TokenTypeOfTieClause   = []TokenType{UNION, INTERSECT, EXCEPT}
	TokenTypeOfLimitClause = []TokenType{LIMIT, FETCH, OFFSET}
)

token types that contain the keyword to make subGroup

Functions

This section is empty.

Types

type Token

type Token struct {
	Type  TokenType
	Value string
}

Token is a token struct

func (Token) IncrementIndentLevel

func (t Token) IncrementIndentLevel(lev int)

IncrementIndentLevel is a placeholder implementing Reindenter interface

func (Token) IsJoinStart

func (t Token) IsJoinStart() bool

IsJoinStart determines if ttype is included in TokenTypesOfJoinMaker

func (Token) IsKeyWordInSelect

func (t Token) IsKeyWordInSelect() bool

IsKeyWordInSelect returns true if token is a keyword in select group

func (Token) IsLimitClauseStart

func (t Token) IsLimitClauseStart() bool

IsLimitClauseStart determines ttype is included in TokenTypesOfLimitClause

func (Token) IsNeedNewLineBefore

func (t Token) IsNeedNewLineBefore() bool

IsNeedNewLineBefore returns true if token needs new line before written in buffer

func (Token) IsTieClauseStart

func (t Token) IsTieClauseStart() bool

IsTieClauseStart determines if ttype is included in TokenTypesOfTieClause

func (Token) Reindent

func (t Token) Reindent(buf *bytes.Buffer) error

Reindent is a placeholder for implementing Reindenter interface

type TokenType

type TokenType int

TokenType is an alias type that represents a kind of token

const (
	EOF TokenType = 1 + iota // eof
	WS                       // white space
	NEWLINE
	FUNCTION
	COMMA
	STARTPARENTHESIS
	ENDPARENTHESIS
	STARTBRACKET
	ENDBRACKET
	STARTBRACE
	ENDBRACE
	TYPE
	IDENT  // field or table name
	STRING // values surrounded with single quotes
	SELECT
	FROM
	WHERE
	CASE
	ORDER
	BY
	AS
	JOIN
	LEFT
	RIGHT
	INNER
	OUTER
	ON
	WHEN
	END
	GROUP
	DESC
	ASC
	LIMIT
	AND
	ANDGROUP
	OR
	ORGROUP
	IN
	IS
	NOT
	NULL
	DISTINCT
	LIKE
	BETWEEN
	UNION
	ALL
	HAVING
	OVER
	EXISTS
	UPDATE
	SET
	RETURNING
	DELETE
	INSERT
	INTO
	DO
	VALUES
	FOR
	THEN
	ELSE
	DISTINCTROW
	FILTER
	WITHIN
	COLLATE
	INTERVAL
	INTERSECT
	EXCEPT
	OFFSET
	FETCH
	FIRST
	ROWS
	USING
	OVERLAPS
	NATURAL
	CROSS
	TIME
	ZONE
	NULLS
	LAST
	AT
	LOCK
	WITH

	QUOTEAREA
	SURROUNDING
)

Token types

type Tokenizer

type Tokenizer struct {
	// contains filtered or unexported fields
}

Tokenizer tokenizes SQL statements

func NewTokenizer

func NewTokenizer(src string) *Tokenizer

NewTokenizer creates Tokenizer

func (*Tokenizer) GetTokens

func (t *Tokenizer) GetTokens() ([]Token, error)

GetTokens returns tokens for parsing

func (*Tokenizer) Tokenize

func (t *Tokenizer) Tokenize() ([]Token, error)

Tokenize analyses every rune in SQL statement every token is identified when whitespace appears

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL