sql

package
v0.10.5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 12, 2021 License: MIT Imports: 15 Imported by: 0

Documentation

Index

Constants

View Source
const (
	UNUSED = 0

	SCRIPT       = 5000
	NAME         = 5001
	DESCRIPTION  = 5002
	LAST_UPDATED = 5003
	UPDATED_BY   = 5004
	TEST         = 5005
	DEV          = 5006
	PD_BEGIN     = 5007
	PD_END       = 5008
	PD_REF       = 5009
	PD_MIG       = 5015
	COMMENT      = 5010
	PD_FILE      = 5011
	PROD         = 5012
	LOCAL        = 5013
	INT          = 5014

	AND             = 6001
	OR              = 6002
	NE              = 6003
	SHIFT_LEFT      = 6004
	NULL_SAFE_EQUAL = 6005
	LE              = 6006
	GE              = 6007
	SHIFT_RIGHT     = 6008

	VALUE_ARG   = 7001
	STRING      = 7002
	HEX         = 7003
	FLOAT       = 7004
	BIT_LITERAL = 7005
	LIST_ARG    = 7006
	INTEGRAL    = 7007
	HEXNUM      = 7008

	LEX_ERROR = 8000

	ID = 9000
)

Variables

This section is empty.

Functions

func KeywordString

func KeywordString(id int) string

KeywordString returns the string corresponding to the given keyword

func RecompileSQL

func RecompileSQL(filePathIn string, filePathOut string, env string, keywords map[string]string)

RecompileSQL comples the SQL script when the file changes

Types

type Lexer

type Lexer struct {
	// contains filtered or unexported fields
}

Lexer represents the domain for our lexer

func NewLexer

func NewLexer(tkn *Tokenizer) *Lexer

NewLexer returns a new lexer

func (*Lexer) Next

func (lex *Lexer) Next() (int, []byte)

Next returns the proceeding ID and byte in the stream

func (*Lexer) Peek

func (lex *Lexer) Peek() (int, []byte)

Peek allows you to see the next ID and byte wihout moving forward in the stream

type NestedSQLQuery

type NestedSQLQuery struct {
	Key  string
	File string
	Name string
}

SQLQuery represents the domain for a SQL query when referenced by another query

type SQLDirectives

type SQLDirectives struct {
	Name        string
	Description string
	Prod        struct {
		Keywords  map[string]string
		NestedSQL []*NestedSQLQuery
	}
	Dev struct {
		Keywords  map[string]string
		NestedSQL []*NestedSQLQuery
	}
	Local struct {
		Keywords  map[string]string
		NestedSQL []*NestedSQLQuery
	}
	Int struct {
		Keywords  map[string]string
		NestedSQL []*NestedSQLQuery
	}
}

SQLDirectives represents the domain for a fully parsed SQL script

func Parse

func Parse(sql string) (*SQLDirectives, error)

Parse returns SQLDirectives from a SQL script

type SQLMngr

type SQLMngr struct {
	Raw                   string
	Parsed                string
	Directives            *SQLDirectives
	DirectiveKeyOverrides map[string]string
	Err                   error
	Env                   string
	Migrations            map[string][]*sqlMig.SQLMigrationStrategy
	Stack                 []*SQLStack
	StackDepth            int
	Root                  map[string]string
	Getter                getter
}

SQLMngr represents the domain of a SQL scripts combined with any migrations for that table

func New

func New(script string, env string, migrations map[string][]*sqlMig.SQLMigrationStrategy, get getter) SQLMngr

New Returns a new SQLMngr

func (*SQLMngr) Compile

func (sql *SQLMngr) Compile() string

Compile returns the parsed SQL

type SQLStack

type SQLStack struct {
	SQL        string
	Directives *SQLDirectives
	Nested     bool
	Level      int
	SQuery     *NestedSQLQuery
}

SQLStack represents the domain of a parsed SQL script, including any nested SQL referenced

type Tokenizer

type Tokenizer struct {
	InStream      io.Reader
	AllowComments bool
	ForceEOF      bool

	Position int

	LastError error
	// contains filtered or unexported fields
}

Tokenizer is the struct used to generate SQL tokens for the parser.

func NewStringTokenizer

func NewStringTokenizer(sql string) *Tokenizer

NewStringTokenizer creates a new Tokenizer for the sql string.

func NewTokenizer

func NewTokenizer(r io.Reader) *Tokenizer

NewTokenizer creates a new Tokenizer reading a sql string from the io.Reader.

func (*Tokenizer) Error

func (tkn *Tokenizer) Error(err string)

Error is called by go yacc if there's a parsing error.

func (*Tokenizer) Scan

func (tkn *Tokenizer) Scan() (int, []byte)

Scan scans the tokenizer for the next token and returns the token type and an optional value.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL