analyzer

package
v0.18.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 9, 2024 License: Apache-2.0 Imports: 37 Imported by: 4

Documentation

Index

Constants

View Source
const MaxBytePrefix = 3072

Variables

View Source
var (
	// ParallelQueryCounter describes a metric that accumulates
	// number of parallel queries monotonically.
	ParallelQueryCounter = discard.NewCounter()

	SingleThreadFeatureFlag = false
)
View Source
var DefaultRules = []Rule{
	{validateStarExpressionsId, validateStarExpressions},
	{pushdownSubqueryAliasFiltersId, pushdownSubqueryAliasFilters},
	{pruneTablesId, pruneTables},
	{validateCheckConstraintId, validateCheckConstraints},
	{unnestInSubqueriesId, unnestInSubqueries},
	{resolveSubqueriesId, resolveSubqueries},
	{replaceCrossJoinsId, replaceCrossJoins},
}

DefaultRules to apply when analyzing nodes.

View Source
var DefaultValidationRules = []Rule{
	{validateResolvedId, validateIsResolved},
	{validateOrderById, validateOrderBy},
	{validateGroupById, validateGroupBy},
	{validateSchemaSourceId, validateSchemaSource},
	{validateIndexCreationId, validateIndexCreation},
	{validateOperandsId, validateOperands},
	{validateIntervalUsageId, validateIntervalUsage},
	{validateSubqueryColumnsId, validateSubqueryColumns},
	{validateUnionSchemasMatchId, validateUnionSchemasMatch},
	{validateAggregationsId, validateAggregations},
}

DefaultValidationRules to apply while analyzing nodes.

View Source
var ErrInAnalysis = errors.NewKind("error in analysis: %s")

ErrInAnalysis is thrown for generic analyzer errors

View Source
var ErrInvalidNodeType = errors.NewKind("%s: invalid node of type: %T")

ErrInvalidNodeType is thrown when the analyzer can't handle a particular kind of node type

View Source
var ErrMaxAnalysisIters = errors.NewKind("exceeded max analysis iterations (%d)")

ErrMaxAnalysisIters is thrown when the analysis iterations are exceeded

View Source
var (
	// ErrOrderByColumnIndex is returned when in an order clause there is a
	// column that is unknown.
	ErrOrderByColumnIndex = errors.NewKind("unknown column %d in order by clause")
)
View Source
var OnceAfterAll = []Rule{
	{assignExecIndexesId, assignExecIndexes},

	{resolveInsertRowsId, resolveInsertRows},
	{applyTriggersId, applyTriggers},
	{applyProceduresId, applyProcedures},
	{applyRowUpdateAccumulatorsId, applyUpdateAccumulators},
	{wrapWithRollbackId, wrapWritesWithRollback},
	{inlineSubqueryAliasRefsId, inlineSubqueryAliasRefs},
	{cacheSubqueryAliasesInJoinsId, cacheSubqueryAliasesInJoins},

	{backtickDefaulColumnValueNamesId, backtickDefaultColumnValueNames},

	{AutocommitId, addAutocommitNode},
	{TrackProcessId, trackProcess},
	{parallelizeId, parallelize},
	{clearWarningsId, clearWarnings},
}
View Source
var OnceAfterDefault = []Rule{
	{unnestExistsSubqueriesId, unnestExistsSubqueries},
	{moveJoinCondsToFilterId, moveJoinConditionsToFilter},
	{finalizeUnionsId, finalizeUnions},
	{loadTriggersId, loadTriggers},
	{processTruncateId, processTruncate},
	{stripTableNameInDefaultsId, stripTableNamesFromColumnDefaults},
	{pushFiltersId, pushFilters},
	{optimizeJoinsId, optimizeJoins},
	{finalizeSubqueriesId, finalizeSubqueries},
	{applyIndexesFromOuterScopeId, applyIndexesFromOuterScope},
	{replaceAggId, replaceAgg},
	{replaceIdxSortId, replaceIdxSort},
	{eraseProjectionId, eraseProjection},
	{flattenDistinctId, flattenDistinct},
	{insertTopNId, insertTopNNodes},
	{applyHashInId, applyHashIn},
	{assignRoutinesId, assignRoutines},
	{modifyUpdateExprsForJoinId, modifyUpdateExpressionsForJoin},
	{applyFKsId, applyForeignKeys},
}
View Source
var OnceBeforeDefault = []Rule{
	{applyDefaultSelectLimitId, applyDefaultSelectLimit},
	{replaceCountStarId, replaceCountStar},
	{applyEventSchedulerId, applyEventScheduler},
	{validateOffsetAndLimitId, validateLimitAndOffset},
	{validateCreateTableId, validateCreateTable},
	{validateAlterTableId, validateAlterTable},
	{validateExprSemId, validateExprSem},
	{validateCreateProcedureId, validateCreateProcedure},
	{resolveDropConstraintId, resolveDropConstraint},
	{resolveAlterColumnId, resolveAlterColumn},
	{validateDropTablesId, validateDropTables},
	{resolveCreateSelectId, resolveCreateSelect},
	{validateDropConstraintId, validateDropConstraint},
	{resolveUnionsId, resolveUnions},
	{resolveDescribeQueryId, resolveDescribeQuery},
	{validateCreateTriggerId, validateCreateTrigger},
	{validateColumnDefaultsId, validateColumnDefaults},
	{validateReadOnlyDatabaseId, validateReadOnlyDatabase},
	{validateReadOnlyTransactionId, validateReadOnlyTransaction},
	{validateDatabaseSetId, validateDatabaseSet},
	{validateDeleteFromId, validateDeleteFrom},
	{validatePrivilegesId, validatePrivileges},
	{simplifyFiltersId, simplifyFilters},
	{pushNotFiltersId, pushNotFilters},
	{hoistOutOfScopeFiltersId, hoistOutOfScopeFilters},
}

OnceBeforeDefault contains the rules to be applied just once before the DefaultRules.

View Source
var PreparedStmtDisabled bool

Functions

func DeepCopyNode added in v0.12.0

func DeepCopyNode(node sql.Node) (sql.Node, error)

func DefaultRuleSelector added in v0.12.0

func DefaultRuleSelector(id RuleId) bool

func NewDatabaseProvider added in v0.11.0

func NewDatabaseProvider(dbs ...sql.Database) sql.DatabaseProvider

TODO: kill this

func Schemas added in v0.18.0

func Schemas(nodes []sql.Node) sql.Schema

Schemas returns the Schemas for the nodes given appended in to a single one

func SelectAllBatches added in v0.12.0

func SelectAllBatches(string) bool

func SetPreparedStmts added in v0.12.0

func SetPreparedStmts(v bool)

func StripPassthroughNodes added in v0.12.0

func StripPassthroughNodes(n sql.Node) sql.Node

StripPassthroughNodes strips all top-level passthrough nodes meant to apply only to top-level queries (query tracking, transaction logic, etc) from the node tree given and return the first non-passthrough child element. This is useful for when we invoke the analyzer recursively when e.g. analyzing subqueries or triggers TODO: instead of stripping this node off after analysis, it would be better to just not add it in the first place.

Types

type Analyzer

type Analyzer struct {
	// Whether to log various debugging messages
	Debug bool
	// Whether to output the query plan at each step of the analyzer
	Verbose bool

	Parallelism int
	// Batches of Rules to apply.
	Batches []*Batch
	// Catalog of databases and registered functions.
	Catalog *Catalog
	// Coster estimates the incremental CPU+memory cost for execution operators.
	Coster memo.Coster
	// ExecBuilder converts a sql.Node tree into an executable iterator.
	ExecBuilder sql.NodeExecBuilder
	// EventScheduler is used to communiate with the event scheduler
	// for any EVENT related statements. It can be nil if EventScheduler is not defined.
	EventScheduler sql.EventScheduler
	// contains filtered or unexported fields
}

Analyzer analyzes nodes of the execution plan and applies rules and validations to them.

func NewDefault

func NewDefault(provider sql.DatabaseProvider) *Analyzer

NewDefault creates a default Analyzer instance with all default Rules and configuration. To add custom rules, the easiest way is use the Builder.

func NewDefaultWithVersion added in v0.16.0

func NewDefaultWithVersion(provider sql.DatabaseProvider) *Analyzer

NewDefaultWithVersion creates a default Analyzer instance either experimental or

func (*Analyzer) Analyze

func (a *Analyzer) Analyze(ctx *sql.Context, n sql.Node, scope *plan.Scope) (sql.Node, error)

Analyze applies the transformation rules to the node given. In the case of an error, the last successfully transformed node is returned along with the error.

func (*Analyzer) Log

func (a *Analyzer) Log(msg string, args ...interface{})

Log prints an INFO message to stdout with the given message and args if the analyzer is in debug mode.

func (*Analyzer) LogDiff

func (a *Analyzer) LogDiff(prev, next sql.Node)

LogDiff logs the diff between the query plans after a transformation rules has been applied. Only can print a diff when the string representations of the nodes differ, which isn't always the case.

func (*Analyzer) LogFn added in v0.16.0

func (a *Analyzer) LogFn() func(string, ...any)

func (*Analyzer) LogNode

func (a *Analyzer) LogNode(n sql.Node)

LogNode prints the node given if Verbose logging is enabled.

func (*Analyzer) PopDebugContext

func (a *Analyzer) PopDebugContext()

PopDebugContext pops a context message off the context stack.

func (*Analyzer) PushDebugContext

func (a *Analyzer) PushDebugContext(msg string)

PushDebugContext pushes the given context string onto the context stack, to use when logging debug messages.

type Batch

type Batch struct {
	Desc       string
	Iterations int
	Rules      []Rule
}

Batch executes a set of rules a specific number of times. When this number of times is reached, the actual node and ErrMaxAnalysisIters is returned.

func (*Batch) Eval

func (b *Batch) Eval(ctx *sql.Context, a *Analyzer, n sql.Node, scope *plan.Scope, sel RuleSelector) (sql.Node, transform.TreeIdentity, error)

Eval executes the rules of the batch. On any error, the partially transformed node is returned along with the error. If the batch's max number of iterations is reached without achieving stabilization (batch evaluation no longer changes the node), then this method returns ErrMaxAnalysisIters.

func (*Batch) EvalWithSelector added in v0.12.0

func (b *Batch) EvalWithSelector(ctx *sql.Context, a *Analyzer, n sql.Node, scope *plan.Scope, sel RuleSelector) (sql.Node, transform.TreeIdentity, error)

type BatchSelector added in v0.12.0

type BatchSelector func(string) bool

BatchSelector filters analysis batches by name

type Builder

type Builder struct {
	// contains filtered or unexported fields
}

Builder provides an easy way to generate Analyzer with custom rules and options.

func NewBuilder

func NewBuilder(pro sql.DatabaseProvider) *Builder

NewBuilder creates a new Builder from a specific catalog. This builder allow us add custom Rules and modify some internal properties.

func (*Builder) AddPostAnalyzeRule

func (ab *Builder) AddPostAnalyzeRule(id RuleId, fn RuleFunc) *Builder

AddPostAnalyzeRule adds a new rule to the analyzer after standard analyzer rules.

func (*Builder) AddPostValidationRule

func (ab *Builder) AddPostValidationRule(id RuleId, fn RuleFunc) *Builder

AddPostValidationRule adds a new rule to the analyzer after standard validation rules.

func (*Builder) AddPreAnalyzeRule

func (ab *Builder) AddPreAnalyzeRule(id RuleId, fn RuleFunc) *Builder

AddPreAnalyzeRule adds a new rule to the analyze before the standard analyzer rules.

func (*Builder) AddPreValidationRule

func (ab *Builder) AddPreValidationRule(id RuleId, fn RuleFunc) *Builder

AddPreValidationRule adds a new rule to the analyzer before standard validation rules.

func (*Builder) Build

func (ab *Builder) Build() *Analyzer

Build creates a new Analyzer from the builder parameters

func (*Builder) RemoveAfterAllRule

func (ab *Builder) RemoveAfterAllRule(id RuleId) *Builder

RemoveAfterAllRule removes a default rule from the analyzer which would occur after all other rules

func (*Builder) RemoveDefaultRule

func (ab *Builder) RemoveDefaultRule(id RuleId) *Builder

RemoveDefaultRule removes a default rule from the analyzer that is executed as part of the analysis

func (*Builder) RemoveOnceAfterRule

func (ab *Builder) RemoveOnceAfterRule(id RuleId) *Builder

RemoveOnceAfterRule removes a default rule from the analyzer which would occur just once after the default analysis

func (*Builder) RemoveOnceBeforeRule

func (ab *Builder) RemoveOnceBeforeRule(id RuleId) *Builder

RemoveOnceBeforeRule removes a default rule from the analyzer which would occur before other rules

func (*Builder) RemoveValidationRule

func (ab *Builder) RemoveValidationRule(id RuleId) *Builder

RemoveValidationRule removes a default rule from the analyzer which would occur as part of the validation rules

func (*Builder) WithDebug

func (ab *Builder) WithDebug() *Builder

WithDebug activates debug on the Analyzer.

func (*Builder) WithParallelism

func (ab *Builder) WithParallelism(parallelism int) *Builder

WithParallelism sets the parallelism level on the analyzer.

type Catalog added in v0.11.0

type Catalog struct {
	MySQLDb       *mysql_db.MySQLDb
	InfoSchema    sql.Database
	StatsProvider sql.StatsProvider

	DbProvider sql.DatabaseProvider

	// BinlogReplicaController holds an optional controller that receives forwarded binlog
	// replication messages (e.g. "start replica").
	BinlogReplicaController binlogreplication.BinlogReplicaController

	// BinlogPrimaryController holds an optional controller that receives forwarded binlog
	// replication messages (e.g. "show replicas") and commands (e.g. COM_REGISTER_REPLICA).
	BinlogPrimaryController binlogreplication.BinlogPrimaryController
	// contains filtered or unexported fields
}

func NewCatalog added in v0.11.0

func NewCatalog(provider sql.DatabaseProvider) *Catalog

NewCatalog returns a new empty Catalog with the given provider

func (*Catalog) AllDatabases added in v0.11.0

func (c *Catalog) AllDatabases(ctx *sql.Context) []sql.Database

func (*Catalog) CreateDatabase added in v0.11.0

func (c *Catalog) CreateDatabase(ctx *sql.Context, dbName string, collation sql.CollationID) error

CreateDatabase creates a new Database and adds it to the catalog.

func (*Catalog) DataLength added in v0.18.0

func (c *Catalog) DataLength(ctx *sql.Context, db, table string) (uint64, error)

func (*Catalog) Database added in v0.11.0

func (c *Catalog) Database(ctx *sql.Context, db string) (sql.Database, error)

Database returns the database with the given name.

func (*Catalog) DatabaseTable added in v0.16.0

func (c *Catalog) DatabaseTable(ctx *sql.Context, db sql.Database, tableName string) (sql.Table, sql.Database, error)

func (*Catalog) DatabaseTableAsOf added in v0.16.0

func (c *Catalog) DatabaseTableAsOf(ctx *sql.Context, db sql.Database, tableName string, asOf interface{}) (sql.Table, sql.Database, error)

func (*Catalog) DropDbStats added in v0.18.0

func (c *Catalog) DropDbStats(ctx *sql.Context, db string, flush bool) error

func (*Catalog) DropStats added in v0.18.0

func (c *Catalog) DropStats(ctx *sql.Context, qual sql.StatQualifier, cols []string) error

func (*Catalog) ExternalStoredProcedure added in v0.14.0

func (c *Catalog) ExternalStoredProcedure(ctx *sql.Context, name string, numOfParams int) (*sql.ExternalStoredProcedureDetails, error)

ExternalStoredProcedure implements sql.ExternalStoredProcedureProvider

func (*Catalog) ExternalStoredProcedures added in v0.14.0

func (c *Catalog) ExternalStoredProcedures(ctx *sql.Context, name string) ([]sql.ExternalStoredProcedureDetails, error)

ExternalStoredProcedures implements sql.ExternalStoredProcedureProvider

func (*Catalog) Function added in v0.11.0

func (c *Catalog) Function(ctx *sql.Context, name string) (sql.Function, error)

Function returns the function with the name given, or sql.ErrFunctionNotFound if it doesn't exist

func (*Catalog) GetBinlogPrimaryController added in v0.18.1

func (c *Catalog) GetBinlogPrimaryController() binlogreplication.BinlogPrimaryController

func (*Catalog) GetBinlogReplicaController added in v0.18.0

func (c *Catalog) GetBinlogReplicaController() binlogreplication.BinlogReplicaController

func (*Catalog) GetStats added in v0.18.0

func (c *Catalog) GetStats(ctx *sql.Context, qual sql.StatQualifier, cols []string) (sql.Statistic, bool)

func (*Catalog) GetTableStats added in v0.18.0

func (c *Catalog) GetTableStats(ctx *sql.Context, db, table string) ([]sql.Statistic, error)

func (*Catalog) HasBinlogPrimaryController added in v0.18.1

func (c *Catalog) HasBinlogPrimaryController() bool

func (*Catalog) HasBinlogReplicaController added in v0.18.1

func (c *Catalog) HasBinlogReplicaController() bool

func (*Catalog) HasDatabase added in v0.16.0

func (c *Catalog) HasDatabase(ctx *sql.Context, db string) bool

func (*Catalog) LockTable added in v0.11.0

func (c *Catalog) LockTable(ctx *sql.Context, table string)

LockTable adds a lock for the given table and session client. It is assumed the database is the current database in use.

func (*Catalog) RefreshTableStats added in v0.18.0

func (c *Catalog) RefreshTableStats(ctx *sql.Context, table sql.Table, db string) error

func (*Catalog) RegisterFunction added in v0.11.0

func (c *Catalog) RegisterFunction(ctx *sql.Context, fns ...sql.Function)

RegisterFunction registers the functions given, adding them to the built-in functions. Integrators with custom functions should typically use the FunctionProvider interface instead.

func (*Catalog) RemoveDatabase added in v0.11.0

func (c *Catalog) RemoveDatabase(ctx *sql.Context, dbName string) error

RemoveDatabase removes a database from the catalog.

func (*Catalog) RowCount added in v0.18.0

func (c *Catalog) RowCount(ctx *sql.Context, db, table string) (uint64, error)

func (*Catalog) SetStats added in v0.18.0

func (c *Catalog) SetStats(ctx *sql.Context, stats sql.Statistic) error

func (*Catalog) Table added in v0.11.0

func (c *Catalog) Table(ctx *sql.Context, dbName, tableName string) (sql.Table, sql.Database, error)

Table returns the table in the given database with the given name.

func (*Catalog) TableAsOf added in v0.11.0

func (c *Catalog) TableAsOf(ctx *sql.Context, dbName, tableName string, asOf interface{}) (sql.Table, sql.Database, error)

TableAsOf returns the table in the given database with the given name, as it existed at the time given. The database named must support timed queries.

func (*Catalog) TableFunction added in v0.12.0

func (c *Catalog) TableFunction(ctx *sql.Context, name string) (sql.TableFunction, error)

TableFunction implements the TableFunctionProvider interface

func (*Catalog) UnlockTables added in v0.11.0

func (c *Catalog) UnlockTables(ctx *sql.Context, id uint32) error

UnlockTables unlocks all tables for which the given session client has a lock.

func (*Catalog) WithTableFunctions added in v0.18.0

func (c *Catalog) WithTableFunctions(fns ...sql.TableFunction) (sql.TableFunctionProvider, error)

type RoutineTable added in v0.12.0

type RoutineTable interface {
	sql.Table

	// AssignProcedures assigns a map of db-procedures to the routines table.
	AssignProcedures(p map[string][]*plan.Procedure) sql.Table
}

RoutineTable is a Table that depends on a procedures and functions.

type Rule

type Rule struct {
	// Name of the rule.
	Id RuleId
	// Apply transforms a node.
	Apply RuleFunc
}

Rule to transform nodes.

type RuleFunc

RuleFunc is the function to be applied in a rule.

type RuleId added in v0.12.0

type RuleId int
const (
	AutocommitId   RuleId // addAutocommitNode
	TrackProcessId        // trackProcess

)

func (RuleId) String added in v0.12.0

func (i RuleId) String() string

type RuleSelector added in v0.12.0

type RuleSelector func(RuleId) bool

RuleSelector filters analysis rules by id

func NewFinalizeSubquerySel added in v0.15.0

func NewFinalizeSubquerySel(sel RuleSelector) RuleSelector

func NewFinalizeUnionSel added in v0.15.0

func NewFinalizeUnionSel(sel RuleSelector) RuleSelector

func NewProcRuleSelector added in v0.15.0

func NewProcRuleSelector(sel RuleSelector) RuleSelector

func NewResolveSubqueryExprSelector added in v0.14.0

func NewResolveSubqueryExprSelector(sel RuleSelector) RuleSelector

type TableAliases

type TableAliases struct {
	// contains filtered or unexported fields
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL