logql

package
v2.0.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 14, 2023 License: AGPL-3.0 Imports: 45 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DefaultEngineTimeout       = 5 * time.Minute
	DefaultBlockedQueryMessage = "blocked by policy"
)
View Source
const (
	StreamsKey = "streams"
	MetricsKey = "metrics"
)

expression type used in metrics

View Source
const (
	SuccessKey = "success"
	FailureKey = "failure"
	NoopKey    = "noop"
)

parsing evaluation result used in metrics

View Source
const (
	QueryTypeMetric  = "metric"
	QueryTypeFilter  = "filter"
	QueryTypeLimited = "limited"
	QueryTypeLabels  = "labels"
	QueryTypeSeries  = "series"
)

Variables

View Source
var (
	QueryTime = promauto.NewHistogramVec(prometheus.HistogramOpts{
		Namespace: "logql",
		Name:      "query_duration_seconds",
		Help:      "LogQL query timings",
		Buckets:   prometheus.DefBuckets,
	}, []string{"query_type"})

	QueriesBlocked = promauto.NewCounterVec(prometheus.CounterOpts{
		Namespace: "loki",
		Name:      "blocked_queries",
		Help:      "Count of queries blocked by per-tenant policy",
	}, []string{"user"})
)
View Source
var (
	NoLimits = &fakeLimits{maxSeries: math.MaxInt32}
)

Functions

func EvaluatorUnsupportedType

func EvaluatorUnsupportedType(expr syntax.Expr, ev Evaluator) error

EvaluatorUnsupportedType is a helper for signaling that an evaluator does not support an Expr type

func HashedQuery

func HashedQuery(query string) uint32

func Match

func Match(xs []string) ([][]*labels.Matcher, error)

Match extracts and parses multiple matcher groups from a slice of strings

func PopulateMatrixFromScalar

func PopulateMatrixFromScalar(data promql.Scalar, params Params) promql.Matrix

func PrintMatches

func PrintMatches(matches []string) string

func QueryType

func QueryType(query string) (string, error)

func RecordLabelQueryMetrics

func RecordLabelQueryMetrics(
	ctx context.Context,
	log log.Logger,
	start, end time.Time,
	label, query, status string,
	stats logql_stats.Result,
)

func RecordRangeAndInstantQueryMetrics

func RecordRangeAndInstantQueryMetrics(
	ctx context.Context,
	log log.Logger,
	p Params,
	status string,
	stats logql_stats.Result,
	result promql_parser.Value,
)

func RecordSeriesQueryMetrics

func RecordSeriesQueryMetrics(
	ctx context.Context,
	log log.Logger,
	start, end time.Time,
	match []string,
	status string,
	stats logql_stats.Result,
)

func ResultIterator

func ResultIterator(res logqlmodel.Result, params Params) (iter.EntryIterator, error)

ResultIterator coerces a downstream streams result into an iter.EntryIterator

func Sortable

func Sortable(q Params) (bool, error)

Sortable logql contain sort or sort_desc.

Types

type AvgOverTime

type AvgOverTime struct {
	// contains filtered or unexported fields
}

type BatchRangeVectorAggregator

type BatchRangeVectorAggregator func([]promql.FPoint) float64

BatchRangeVectorAggregator aggregates samples for a given range of samples. It receives the current milliseconds timestamp and the list of point within the range.

type ConcatLogSelectorExpr

type ConcatLogSelectorExpr struct {
	DownstreamLogSelectorExpr
	// contains filtered or unexported fields
}

ConcatLogSelectorExpr is an expr for concatenating multiple LogSelectorExpr

func (ConcatLogSelectorExpr) String

func (c ConcatLogSelectorExpr) String() string

type ConcatSampleExpr

type ConcatSampleExpr struct {
	DownstreamSampleExpr
	// contains filtered or unexported fields
}

ConcatSampleExpr is an expr for concatenating multiple SampleExpr Contract: The embedded SampleExprs within a linked list of ConcatSampleExprs must be of the same structure. This makes special implementations of SampleExpr.Associative() unnecessary.

func (ConcatSampleExpr) String

func (c ConcatSampleExpr) String() string

func (ConcatSampleExpr) Walk

func (c ConcatSampleExpr) Walk(f syntax.WalkFn)

type ConstantShards

type ConstantShards int

func (ConstantShards) GetStats

func (s ConstantShards) GetStats(_ syntax.Expr) (stats.Stats, error)

func (ConstantShards) Shards

func (s ConstantShards) Shards(_ syntax.Expr) (int, uint64, error)

type CountOverTime

type CountOverTime struct {
	// contains filtered or unexported fields
}

type DefaultEvaluator

type DefaultEvaluator struct {
	// contains filtered or unexported fields
}

func NewDefaultEvaluator

func NewDefaultEvaluator(querier Querier, maxLookBackPeriod time.Duration) *DefaultEvaluator

NewDefaultEvaluator constructs a DefaultEvaluator

func (*DefaultEvaluator) Iterator

func (*DefaultEvaluator) StepEvaluator

func (ev *DefaultEvaluator) StepEvaluator(
	ctx context.Context,
	nextEv SampleEvaluator,
	expr syntax.SampleExpr,
	q Params,
) (StepEvaluator, error)

type DownstreamEngine

type DownstreamEngine struct {
	// contains filtered or unexported fields
}

DownstreamEngine is an Engine implementation that can split queries into more parallelizable forms via querying the underlying backend shards individually and re-aggregating them.

func NewDownstreamEngine

func NewDownstreamEngine(opts EngineOpts, downstreamable Downstreamable, limits Limits, logger log.Logger) *DownstreamEngine

NewDownstreamEngine constructs a *DownstreamEngine

func (*DownstreamEngine) Opts

func (ng *DownstreamEngine) Opts() EngineOpts

func (*DownstreamEngine) Query

func (ng *DownstreamEngine) Query(ctx context.Context, p Params, mapped syntax.Expr) Query

Query constructs a Query

type DownstreamEvaluator

type DownstreamEvaluator struct {
	Downstreamer
	// contains filtered or unexported fields
}

DownstreamEvaluator is an evaluator which handles shard aware AST nodes

func NewDownstreamEvaluator

func NewDownstreamEvaluator(downstreamer Downstreamer) *DownstreamEvaluator

func (DownstreamEvaluator) Downstream

func (ev DownstreamEvaluator) Downstream(ctx context.Context, queries []DownstreamQuery) ([]logqlmodel.Result, error)

Downstream runs queries and collects stats from the embedded Downstreamer

func (*DownstreamEvaluator) Iterator

func (ev *DownstreamEvaluator) Iterator(
	ctx context.Context,
	expr syntax.LogSelectorExpr,
	params Params,
) (iter.EntryIterator, error)

Iterator returns the iter.EntryIterator for a given LogSelectorExpr

func (*DownstreamEvaluator) StepEvaluator

func (ev *DownstreamEvaluator) StepEvaluator(
	ctx context.Context,
	nextEv SampleEvaluator,
	expr syntax.SampleExpr,
	params Params,
) (StepEvaluator, error)

StepEvaluator returns a StepEvaluator for a given SampleExpr

type DownstreamLogSelectorExpr

type DownstreamLogSelectorExpr struct {
	syntax.LogSelectorExpr
	// contains filtered or unexported fields
}

DownstreamLogSelectorExpr is a LogSelectorExpr which signals downstream computation

func (DownstreamLogSelectorExpr) String

func (d DownstreamLogSelectorExpr) String() string

type DownstreamQuery

type DownstreamQuery struct {
	Expr   syntax.Expr
	Params Params
	Shards Shards
}

type DownstreamSampleExpr

type DownstreamSampleExpr struct {
	syntax.SampleExpr
	// contains filtered or unexported fields
}

DownstreamSampleExpr is a SampleExpr which signals downstream computation

func (DownstreamSampleExpr) String

func (d DownstreamSampleExpr) String() string

func (DownstreamSampleExpr) Walk

type Downstreamable

type Downstreamable interface {
	Downstreamer(context.Context) Downstreamer
}

type Downstreamer

type Downstreamer interface {
	Downstream(context.Context, []DownstreamQuery) ([]logqlmodel.Result, error)
}

Downstreamer is an interface for deferring responsibility for query execution. It is decoupled from but consumed by a downStreamEvaluator to dispatch ASTs.

type Engine

type Engine struct {
	Timeout time.Duration
	// contains filtered or unexported fields
}

Engine is the LogQL engine.

func NewEngine

func NewEngine(opts EngineOpts, q Querier, l Limits, logger log.Logger) *Engine

NewEngine creates a new LogQL Engine.

func (*Engine) Query

func (ng *Engine) Query(params Params) Query

Query creates a new LogQL query. Instant/Range type is derived from the parameters.

type EngineOpts

type EngineOpts struct {
	// TODO: remove this after next release.
	// Timeout for queries execution
	Timeout time.Duration `yaml:"timeout" doc:"deprecated"`

	// MaxLookBackPeriod is the maximum amount of time to look back for log lines.
	// only used for instant log queries.
	MaxLookBackPeriod time.Duration `yaml:"max_look_back_period"`

	// LogExecutingQuery will control if we log the query when Exec is called.
	LogExecutingQuery bool `yaml:"-"`
}

EngineOpts is the list of options to use with the LogQL query engine.

func (*EngineOpts) RegisterFlagsWithPrefix

func (opts *EngineOpts) RegisterFlagsWithPrefix(prefix string, f *flag.FlagSet)

type EntryEvaluator

type EntryEvaluator interface {
	// Iterator returns the iter.EntryIterator for a given LogSelectorExpr
	Iterator(context.Context, syntax.LogSelectorExpr, Params) (iter.EntryIterator, error)
}

type Evaluator

type Evaluator interface {
	SampleEvaluator
	EntryEvaluator
}

Evaluator is an interface for iterating over data at different nodes in the AST

type FirstOverTime

type FirstOverTime struct {
	// contains filtered or unexported fields
}

type LastOverTime

type LastOverTime struct {
	// contains filtered or unexported fields
}

type Limits

type Limits interface {
	MaxQuerySeries(context.Context, string) int
	MaxQueryRange(ctx context.Context, userID string) time.Duration
	QueryTimeout(context.Context, string) time.Duration
	BlockedQueries(context.Context, string) []*validation.BlockedQuery
}

Limits allow the engine to fetch limits for a given users.

type LiteralParams

type LiteralParams struct {
	// contains filtered or unexported fields
}

LiteralParams impls Params

func NewLiteralParams

func NewLiteralParams(
	qs string,
	start, end time.Time,
	step, interval time.Duration,
	direction logproto.Direction,
	limit uint32,
	shards []string,
) LiteralParams

func (LiteralParams) Copy

func (p LiteralParams) Copy() LiteralParams

func (LiteralParams) Direction

func (p LiteralParams) Direction() logproto.Direction

Direction impls Params

func (LiteralParams) End

func (p LiteralParams) End() time.Time

End impls Params

func (LiteralParams) Interval

func (p LiteralParams) Interval() time.Duration

Interval impls Params

func (LiteralParams) Limit

func (p LiteralParams) Limit() uint32

Limit impls Params

func (LiteralParams) Query

func (p LiteralParams) Query() string

String impls Params

func (LiteralParams) Shards

func (p LiteralParams) Shards() []string

Shards impls Params

func (LiteralParams) Start

func (p LiteralParams) Start() time.Time

Start impls Params

func (LiteralParams) Step

func (p LiteralParams) Step() time.Duration

Step impls Params

type MapperMetrics

type MapperMetrics struct {
	DownstreamQueries *prometheus.CounterVec // downstream queries total, partitioned by streams/metrics
	ParsedQueries     *prometheus.CounterVec // parsed ASTs total, partitioned by success/failure/noop
	DownstreamFactor  prometheus.Histogram   // per request downstream factor
}

MapperMetrics is the metrics wrapper used in logql mapping (shard and range)

func NewRangeMapperMetrics

func NewRangeMapperMetrics(registerer prometheus.Registerer) *MapperMetrics

func NewShardMapperMetrics

func NewShardMapperMetrics(registerer prometheus.Registerer) *MapperMetrics

type MatrixStepper

type MatrixStepper struct {
	// contains filtered or unexported fields
}

MatrixStepper exposes a promql.Matrix as a StepEvaluator. Ensure that the resulting StepEvaluator maintains the same shape that the parameters expect. For example, it's possible that a downstream query returns matches no log streams and thus returns an empty matrix. However, we still need to ensure that it can be merged effectively with another leg that may match series. Therefore, we determine our steps from the parameters and not the underlying Matrix.

func NewMatrixStepper

func NewMatrixStepper(start, end time.Time, step time.Duration, m promql.Matrix) *MatrixStepper

func (*MatrixStepper) Close

func (m *MatrixStepper) Close() error

func (*MatrixStepper) Error

func (m *MatrixStepper) Error() error

func (*MatrixStepper) Next

func (m *MatrixStepper) Next() (bool, int64, promql.Vector)

type MaxOverTime

type MaxOverTime struct {
	// contains filtered or unexported fields
}

type MinOverTime

type MinOverTime struct {
	// contains filtered or unexported fields
}

type MockDownstreamer

type MockDownstreamer struct {
	*Engine
}

func (MockDownstreamer) Downstream

func (m MockDownstreamer) Downstream(ctx context.Context, queries []DownstreamQuery) ([]logqlmodel.Result, error)

func (MockDownstreamer) Downstreamer

func (m MockDownstreamer) Downstreamer(_ context.Context) Downstreamer

type MockQuerier

type MockQuerier struct {
	// contains filtered or unexported fields
}

Shard aware mock querier

func NewMockQuerier

func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier

func (MockQuerier) SelectLogs

func (MockQuerier) SelectSamples

type OneOverTime

type OneOverTime struct {
}

type Params

type Params interface {
	Query() string
	Start() time.Time
	End() time.Time
	Step() time.Duration
	Interval() time.Duration
	Limit() uint32
	Direction() logproto.Direction
	Shards() []string
}

Params details the parameters associated with a loki request

type QuantileOverTime

type QuantileOverTime struct {
	// contains filtered or unexported fields
}

type Querier

type Querier interface {
	SelectLogs(context.Context, SelectLogParams) (iter.EntryIterator, error)
	SelectSamples(context.Context, SelectSampleParams) (iter.SampleIterator, error)
}

Querier allows a LogQL expression to fetch an EntryIterator for a set of matchers and filters

type Query

type Query interface {
	// Exec processes the query.
	Exec(ctx context.Context) (logqlmodel.Result, error)
}

Query is a LogQL query to be executed.

type QueryParams

type QueryParams interface {
	LogSelector() (syntax.LogSelectorExpr, error)
	GetStart() time.Time
	GetEnd() time.Time
	GetShards() []string
}

type QueryRangeType

type QueryRangeType string
var (
	InstantType QueryRangeType = "instant"
	RangeType   QueryRangeType = "range"
)

func GetRangeType

func GetRangeType(q Params) QueryRangeType

GetRangeType returns whether a query is an instant query or range query

type RangeMapper

type RangeMapper struct {
	// contains filtered or unexported fields
}

RangeMapper is used to rewrite LogQL sample expressions into multiple downstream sample expressions with a smaller time range that can be executed using the downstream engine.

A rewrite is performed using the following rules:

  1. Check if query is splittable based on the range.
  2. Check if the query is splittable based on the query AST
  3. Range aggregations are split into multiple downstream range aggregation expressions that are concatenated with an appropriate vector aggregator with a grouping operator. If the range aggregation has a grouping, the grouping is also applied to the resultant vector aggregator expression. If the range aggregation has no grouping, a grouping operator using "without" is applied to the resultant vector aggregator expression to preserve the stream labels.
  4. Vector aggregations are split into multiple downstream vector aggregations that are merged with vector aggregation using "without" and then aggregated using the vector aggregation with the same operator, either with or without grouping.
  5. Left and right-hand side of binary operations are split individually using the same rules as above.

func NewRangeMapper

func NewRangeMapper(interval time.Duration, metrics *MapperMetrics) (RangeMapper, error)

NewRangeMapper creates a new RangeMapper instance with the given duration as split interval. The interval must be greater than 0.

func (RangeMapper) Map

func (m RangeMapper) Map(expr syntax.SampleExpr, vectorAggrPushdown *syntax.VectorAggregationExpr, recorder *downstreamRecorder) (syntax.SampleExpr, error)

Map rewrites sample expression expr and returns the resultant sample expression to be executed by the downstream engine It is called recursively on the expression tree. The function takes an optional vector aggregation as second argument, that is pushed down to the downstream expression.

func (RangeMapper) Parse

func (m RangeMapper) Parse(query string) (bool, syntax.Expr, error)

Parse parses the given LogQL query string into a sample expression and applies the rewrite rules for splitting it into a sample expression that can be executed by the downstream engine. It returns a boolean indicating whether a rewrite was possible, the rewritten sample expression, and an error in case the rewrite failed.

type RangeStreamingAgg

type RangeStreamingAgg interface {
	// contains filtered or unexported methods
}

RangeStreamingAgg streaming aggregates sample for each sample

type RangeVectorIterator

type RangeVectorIterator interface {
	Next() bool
	At() (int64, promql.Vector)
	Close() error
	Error() error
}

RangeVectorIterator iterates through a range of samples. To fetch the current vector use `At` with a `BatchRangeVectorAggregator` or `RangeStreamingAgg`.

type RateCounterOverTime

type RateCounterOverTime struct {
	// contains filtered or unexported fields
}

rateCounter calculates the per-second rate of values extracted from log lines and treat them like a "counter" metric.

type RateLogBytesOverTime

type RateLogBytesOverTime struct {
	// contains filtered or unexported fields
}

rateLogBytes calculates the per-second rate of log bytes.

type RateLogsOverTime

type RateLogsOverTime struct {
	// contains filtered or unexported fields
}

rateLogs calculates the per-second rate of log lines or values extracted from log lines

type SampleEvaluator

type SampleEvaluator interface {
	// StepEvaluator returns a StepEvaluator for a given SampleExpr. It's explicitly passed another StepEvaluator// in order to enable arbitrary computation of embedded expressions. This allows more modular & extensible
	// StepEvaluator implementations which can be composed.
	StepEvaluator(ctx context.Context, nextEvaluator SampleEvaluator, expr syntax.SampleExpr, p Params) (StepEvaluator, error)
}

type SampleEvaluatorFunc

type SampleEvaluatorFunc func(ctx context.Context, nextEvaluator SampleEvaluator, expr syntax.SampleExpr, p Params) (StepEvaluator, error)

func (SampleEvaluatorFunc) StepEvaluator

func (s SampleEvaluatorFunc) StepEvaluator(ctx context.Context, nextEvaluator SampleEvaluator, expr syntax.SampleExpr, p Params) (StepEvaluator, error)

type SelectLogParams

type SelectLogParams struct {
	*logproto.QueryRequest
}

SelectParams specifies parameters passed to data selections.

func (SelectLogParams) LogSelector

func (s SelectLogParams) LogSelector() (syntax.LogSelectorExpr, error)

LogSelector returns the LogSelectorExpr from the SelectParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.

func (SelectLogParams) String

func (s SelectLogParams) String() string

type SelectSampleParams

type SelectSampleParams struct {
	*logproto.SampleQueryRequest
}

func (SelectSampleParams) Expr

Expr returns the SampleExpr from the SelectSampleParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.

func (SelectSampleParams) LogSelector

func (s SelectSampleParams) LogSelector() (syntax.LogSelectorExpr, error)

LogSelector returns the LogSelectorExpr from the SelectParams. The `LogSelectorExpr` can then returns all matchers and filters to use for that request.

type ShardMapper

type ShardMapper struct {
	// contains filtered or unexported fields
}

func NewShardMapper

func NewShardMapper(resolver ShardResolver, metrics *MapperMetrics) ShardMapper

func (ShardMapper) Map

func (m ShardMapper) Map(expr syntax.Expr, r *downstreamRecorder) (syntax.Expr, uint64, error)

func (ShardMapper) Parse

func (m ShardMapper) Parse(query string) (noop bool, bytesPerShard uint64, expr syntax.Expr, err error)

type ShardResolver

type ShardResolver interface {
	Shards(expr syntax.Expr) (int, uint64, error)
	GetStats(e syntax.Expr) (stats.Stats, error)
}

type Shards

type Shards []astmapper.ShardAnnotation

func ParseShards

func ParseShards(strs []string) (Shards, error)

ParseShards parses a list of string encoded shards

func (Shards) Encode

func (xs Shards) Encode() (encoded []string)

type StddevOverTime

type StddevOverTime struct {
	// contains filtered or unexported fields
}

type StdvarOverTime

type StdvarOverTime struct {
	// contains filtered or unexported fields
}

type StepEvaluator

type StepEvaluator interface {
	// while Next returns a promql.Value, the only acceptable types are Scalar and Vector.
	Next() (ok bool, ts int64, vec promql.Vector)
	// Close all resources used.
	Close() error
	// Reports any error
	Error() error
}

StepEvaluator evaluate a single step of a query.

func ConcatEvaluator

func ConcatEvaluator(evaluators []StepEvaluator) (StepEvaluator, error)

ConcatEvaluator joins multiple StepEvaluators. Contract: They must be of identical start, end, and step values.

func ResultStepEvaluator

func ResultStepEvaluator(res logqlmodel.Result, params Params) (StepEvaluator, error)

ResultStepEvaluator coerces a downstream vector or matrix into a StepEvaluator

type SumOverTime

type SumOverTime struct {
	// contains filtered or unexported fields
}

Directories

Path Synopsis
log
logfmt
Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead Original license is MIT.
Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead Original license is MIT.
This file is taken from the golang text/scanner package so `bufLen` can be set to `maxInputSize`.
This file is taken from the golang text/scanner package so `bufLen` can be set to `maxInputSize`.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL