engine

package
v0.8.5 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 27, 2014 License: MIT Imports: 13 Imported by: 0

Documentation

Index

Constants

View Source
const MaxInt = int(^uint(0) >> 1)

Variables

This section is empty.

Functions

func DivideOperator

func DivideOperator(elems []*parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

func Filter

func Filter(query *parser.SelectQuery, series *protocol.Series) (*protocol.Series, error)

func GetRegisteredAggregators

func GetRegisteredAggregators() (names []string)

used in testing to get a list of all aggregators

func GetValue

func GetValue(value *parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

func MinusOperator

func MinusOperator(elems []*parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

func MultiplyOperator

func MultiplyOperator(elems []*parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

func PlusOperator

func PlusOperator(elems []*parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

Types

type AbstractAggregator

type AbstractAggregator struct {
	Aggregator
	// contains filtered or unexported fields
}

func (*AbstractAggregator) CalculateSummaries

func (self *AbstractAggregator) CalculateSummaries(state interface{})

func (*AbstractAggregator) InitializeFieldsMetadata

func (self *AbstractAggregator) InitializeFieldsMetadata(series *protocol.Series) error

type Aggregator

type Aggregator interface {
	AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)
	InitializeFieldsMetadata(series *protocol.Series) error
	GetValues(state interface{}) [][]*protocol.FieldValue
	CalculateSummaries(state interface{})
	ColumnNames() []string
}

func NewBottomAggregator

func NewBottomAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewCompositeAggregator

func NewCompositeAggregator(left, right Aggregator) (Aggregator, error)

func NewCountAggregator

func NewCountAggregator(q *parser.SelectQuery, v *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewCumulativeArithmeticAggregator

func NewCumulativeArithmeticAggregator(name string, value *parser.Value, initialValue float64, defaultValue *parser.Value, operation Operation) (Aggregator, error)

func NewDerivativeAggregator

func NewDerivativeAggregator(q *parser.SelectQuery, v *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewDifferenceAggregator

func NewDifferenceAggregator(q *parser.SelectQuery, v *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewDistinctAggregator

func NewDistinctAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewFirstAggregator

func NewFirstAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewFirstOrLastAggregator

func NewFirstOrLastAggregator(name string, v *parser.Value, isFirst bool, defaultValue *parser.Value) (Aggregator, error)

func NewHistogramAggregator

func NewHistogramAggregator(q *parser.SelectQuery, v *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewLastAggregator

func NewLastAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewMaxAggregator

func NewMaxAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewMeanAggregator

func NewMeanAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewMedianAggregator

func NewMedianAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewMinAggregator

func NewMinAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewModeAggregator

func NewModeAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewPercentileAggregator

func NewPercentileAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewStandardDeviationAggregator

func NewStandardDeviationAggregator(q *parser.SelectQuery, v *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewSumAggregator

func NewSumAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewTopAggregator

func NewTopAggregator(_ *parser.SelectQuery, value *parser.Value, defaultValue *parser.Value) (Aggregator, error)

func NewTopOrBottomAggregator

func NewTopOrBottomAggregator(name string, v *parser.Value, isTop bool, defaultValue *parser.Value) (Aggregator, error)

type AggregatorEngine added in v0.8.4

type AggregatorEngine struct {
	// contains filtered or unexported fields
}

func NewAggregatorEngine added in v0.8.4

func NewAggregatorEngine(query *parser.SelectQuery, next Processor) (*AggregatorEngine, error)

func (*AggregatorEngine) Close added in v0.8.4

func (self *AggregatorEngine) Close() error

func (*AggregatorEngine) Name added in v0.8.4

func (self *AggregatorEngine) Name() string

func (*AggregatorEngine) Yield added in v0.8.4

func (self *AggregatorEngine) Yield(s *protocol.Series) (bool, error)

type AggregatorInitializer

type AggregatorInitializer func(*parser.SelectQuery, *parser.Value, *parser.Value) (Aggregator, error)

Initialize a new aggregator given the query, the function call of the aggregator and the default value that should be returned if the bucket doesn't have any points

type ArithmeticEngine added in v0.8.4

type ArithmeticEngine struct {
	// contains filtered or unexported fields
}

func NewArithmeticEngine added in v0.8.4

func NewArithmeticEngine(query *parser.SelectQuery, next Processor) (*ArithmeticEngine, error)

func (*ArithmeticEngine) Close added in v0.8.4

func (self *ArithmeticEngine) Close() error

func (*ArithmeticEngine) Name added in v0.8.4

func (self *ArithmeticEngine) Name() string

func (*ArithmeticEngine) Yield added in v0.8.4

func (ae *ArithmeticEngine) Yield(s *protocol.Series) (bool, error)

type ArithmeticOperator

type ArithmeticOperator func(elems []*parser.Value, fields []string, point *protocol.Point) (*protocol.FieldValue, error)

type BooleanOperation

type BooleanOperation func(leftValue *protocol.FieldValue, rightValues []*protocol.FieldValue) (OperatorResult, error)

type ByPointColumnAsc

type ByPointColumnAsc struct {
	protocol.PointsCollection
}

func (ByPointColumnAsc) Less

func (s ByPointColumnAsc) Less(i, j int) bool

type ByPointColumnDesc

type ByPointColumnDesc struct {
	protocol.PointsCollection
}

Top, Bottom aggregators

func (ByPointColumnDesc) Less

func (s ByPointColumnDesc) Less(i, j int) bool

type CommonMergeEngine added in v0.8.4

type CommonMergeEngine struct {
	// contains filtered or unexported fields
}

func NewCommonMergeEngine added in v0.8.4

func NewCommonMergeEngine(shards []uint32, mergeColumns bool, ascending bool, next Processor) *CommonMergeEngine

returns a yield function that will sort points from table1 and table2 no matter what the order in which they are received.

func (*CommonMergeEngine) Close added in v0.8.4

func (cme *CommonMergeEngine) Close() error

func (*CommonMergeEngine) Name added in v0.8.4

func (cme *CommonMergeEngine) Name() string

func (*CommonMergeEngine) Yield added in v0.8.4

func (cme *CommonMergeEngine) Yield(s *protocol.Series) (bool, error)

type CompositeAggregator

type CompositeAggregator struct {
	// contains filtered or unexported fields
}

func (*CompositeAggregator) AggregatePoint

func (self *CompositeAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*CompositeAggregator) CalculateSummaries

func (self *CompositeAggregator) CalculateSummaries(state interface{})

func (*CompositeAggregator) ColumnNames

func (self *CompositeAggregator) ColumnNames() []string

func (*CompositeAggregator) GetValues

func (self *CompositeAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

func (*CompositeAggregator) InitializeFieldsMetadata

func (self *CompositeAggregator) InitializeFieldsMetadata(series *protocol.Series) error

type CompositeAggregatorState

type CompositeAggregatorState struct {
	// contains filtered or unexported fields
}

type CountAggregator

type CountAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*CountAggregator) AggregatePoint

func (self *CountAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*CountAggregator) ColumnNames

func (self *CountAggregator) ColumnNames() []string

func (*CountAggregator) GetValues

func (self *CountAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

func (*CountAggregator) InitializeFieldsMetadata

func (self *CountAggregator) InitializeFieldsMetadata(series *protocol.Series) error

type CountAggregatorState

type CountAggregatorState int64

type CumulativeArithmeticAggregator

type CumulativeArithmeticAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*CumulativeArithmeticAggregator) AggregatePoint

func (self *CumulativeArithmeticAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*CumulativeArithmeticAggregator) ColumnNames

func (self *CumulativeArithmeticAggregator) ColumnNames() []string

func (*CumulativeArithmeticAggregator) GetValues

func (self *CumulativeArithmeticAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type CumulativeArithmeticAggregatorState

type CumulativeArithmeticAggregatorState float64

type DerivativeAggregator

type DerivativeAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*DerivativeAggregator) AggregatePoint

func (self *DerivativeAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*DerivativeAggregator) ColumnNames

func (self *DerivativeAggregator) ColumnNames() []string

func (*DerivativeAggregator) GetValues

func (self *DerivativeAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type DerivativeAggregatorState

type DerivativeAggregatorState struct {
	// contains filtered or unexported fields
}

type DifferenceAggregator

type DifferenceAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*DifferenceAggregator) AggregatePoint

func (self *DifferenceAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*DifferenceAggregator) ColumnNames

func (self *DifferenceAggregator) ColumnNames() []string

func (*DifferenceAggregator) GetValues

func (self *DifferenceAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type DifferenceAggregatorState

type DifferenceAggregatorState struct {
	// contains filtered or unexported fields
}

type DistinctAggregator

type DistinctAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*DistinctAggregator) AggregatePoint

func (self *DistinctAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*DistinctAggregator) ColumnNames

func (self *DistinctAggregator) ColumnNames() []string

func (*DistinctAggregator) GetValues

func (self *DistinctAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type DistinctAggregatorState

type DistinctAggregatorState struct {
	// contains filtered or unexported fields
}

type FilteringEngine

type FilteringEngine struct {
	// contains filtered or unexported fields
}

func NewFilteringEngine

func NewFilteringEngine(query *parser.SelectQuery, processor Processor) *FilteringEngine

func (*FilteringEngine) Close

func (self *FilteringEngine) Close() error

func (*FilteringEngine) Name added in v0.8.4

func (self *FilteringEngine) Name() string

func (*FilteringEngine) Yield added in v0.8.4

func (self *FilteringEngine) Yield(seriesIncoming *p.Series) (bool, error)

type FirstOrLastAggregator

type FirstOrLastAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*FirstOrLastAggregator) AggregatePoint

func (self *FirstOrLastAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*FirstOrLastAggregator) ColumnNames

func (self *FirstOrLastAggregator) ColumnNames() []string

func (*FirstOrLastAggregator) GetValues

func (self *FirstOrLastAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type FirstOrLastAggregatorState

type FirstOrLastAggregatorState *protocol.FieldValue

type HistogramAggregator

type HistogramAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*HistogramAggregator) AggregatePoint

func (self *HistogramAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*HistogramAggregator) ColumnNames

func (self *HistogramAggregator) ColumnNames() []string

func (*HistogramAggregator) GetValues

func (self *HistogramAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type HistogramAggregatorState

type HistogramAggregatorState map[int]int

type JoinEngine added in v0.8.4

type JoinEngine struct {
	// contains filtered or unexported fields
}

func (*JoinEngine) Close added in v0.8.4

func (je *JoinEngine) Close() error

func (*JoinEngine) Name added in v0.8.4

func (je *JoinEngine) Name() string

func (*JoinEngine) Yield added in v0.8.4

func (je *JoinEngine) Yield(s *protocol.Series) (bool, error)

type Limiter

type Limiter struct {
	// contains filtered or unexported fields
}

func NewLimiter

func NewLimiter(limit int) *Limiter

type MaxValueSlice added in v0.8.4

type MaxValueSlice struct {
	*MinValueSlice
}

func (MaxValueSlice) Less added in v0.8.4

func (mvs MaxValueSlice) Less(i, j int) bool

type MeanAggregator

type MeanAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*MeanAggregator) AggregatePoint

func (self *MeanAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*MeanAggregator) ColumnNames

func (self *MeanAggregator) ColumnNames() []string

func (*MeanAggregator) GetValues

func (self *MeanAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type MeanAggregatorState

type MeanAggregatorState struct {
	// contains filtered or unexported fields
}

type MergeEngine added in v0.8.4

type MergeEngine struct {
	// contains filtered or unexported fields
}

func (*MergeEngine) Close added in v0.8.4

func (me *MergeEngine) Close() error

func (*MergeEngine) Name added in v0.8.4

func (me *MergeEngine) Name() string

func (*MergeEngine) Yield added in v0.8.4

func (me *MergeEngine) Yield(s *protocol.Series) (bool, error)

type Merger added in v0.8.4

type Merger struct {
	// contains filtered or unexported fields
}

Merger merges a number of StreamQuery into one stream of points where the output stream of points has a monotic timestamp order (increasing or decreasing depending on the SeriesHeap that is passed to NewCME)

func NewCME added in v0.8.4

func NewCME(name string, s []StreamQuery, h SeriesHeap, n Processor, mergeColumns bool) *Merger

Creates a new merger that will merge the given slice of StreamQuery and yields the result to the processor `n`. `name` is used to identify the merger in the logs since it's being used in multiple places. The SeriesHeap `h` is used to keep track of the next point (whether it's the smallest or largest timestamp depends on the SeriesHeap). If `mergeColumns` is true, the resulting time series will have the fields from all StreamQueries, i.e. if the first stream yields `column0` and `column1` while the second stream yields `column2` and `column3` then the result time series will have all 4 columns with two columns set to `nil` depending on which side the point came from.

func (*Merger) Update added in v0.8.4

func (cme *Merger) Update() (bool, error)

Consume and yield as many points as we can until one of the streams runs out of points (i.e. the call to HasPoint() returns false)

type MinValueSlice added in v0.8.4

type MinValueSlice struct {
	// contains filtered or unexported fields
}

func (*MinValueSlice) Len added in v0.8.4

func (mvs *MinValueSlice) Len() int

func (*MinValueSlice) Less added in v0.8.4

func (mvs *MinValueSlice) Less(i, j int) bool

func (*MinValueSlice) Pop added in v0.8.4

func (mvs *MinValueSlice) Pop() interface{}

func (*MinValueSlice) Push added in v0.8.4

func (mvs *MinValueSlice) Push(x interface{})

func (*MinValueSlice) Swap added in v0.8.4

func (mvs *MinValueSlice) Swap(i, j int)

type ModeAggregator

type ModeAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*ModeAggregator) AggregatePoint

func (self *ModeAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*ModeAggregator) ColumnNames

func (self *ModeAggregator) ColumnNames() []string

func (*ModeAggregator) GetValues

func (self *ModeAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type ModeAggregatorState

type ModeAggregatorState struct {
	// contains filtered or unexported fields
}

type Node

type Node struct {
	// contains filtered or unexported fields
}

func (*Node) CountLeafNodes

func (self *Node) CountLeafNodes() int

func (*Node) GetChildNode

func (self *Node) GetChildNode(value *protocol.FieldValue) *Node

type Nodes

type Nodes []*Node

type Operation

type Operation func(currentValue float64, newValue *protocol.FieldValue) float64

type OperatorResult

type OperatorResult int
const (
	MATCH OperatorResult = iota
	NO_MATCH
	INVALID
)

func EqualityOperator

func EqualityOperator(leftValue, rightValue *protocol.FieldValue) (OperatorResult, error)

func GreaterThanOperator

func GreaterThanOperator(leftValue, rightValue *protocol.FieldValue) (OperatorResult, error)

func GreaterThanOrEqualOperator

func GreaterThanOrEqualOperator(leftValue, rightValue *protocol.FieldValue) (OperatorResult, error)

func InOperator

func InOperator(leftValue *protocol.FieldValue, rightValue []*protocol.FieldValue) (OperatorResult, error)

func RegexMatcherOperator

func RegexMatcherOperator(leftValue, rightValue *protocol.FieldValue) (OperatorResult, error)

type Passthrough added in v0.8.4

type Passthrough struct {
	// contains filtered or unexported fields
}

func NewPassthroughEngine

func NewPassthroughEngine(next Processor, maxPointsInResponse int) *Passthrough

func NewPassthroughEngineWithLimit

func NewPassthroughEngineWithLimit(next Processor, maxPointsInResponse, limit int) *Passthrough

func (*Passthrough) Close added in v0.8.4

func (self *Passthrough) Close() error

func (*Passthrough) Name added in v0.8.4

func (self *Passthrough) Name() string

func (*Passthrough) Yield added in v0.8.4

func (self *Passthrough) Yield(seriesIncoming *protocol.Series) (bool, error)

type PercentileAggregator

type PercentileAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*PercentileAggregator) AggregatePoint

func (self *PercentileAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*PercentileAggregator) CalculateSummaries

func (self *PercentileAggregator) CalculateSummaries(state interface{})

func (*PercentileAggregator) ColumnNames

func (self *PercentileAggregator) ColumnNames() []string

func (*PercentileAggregator) GetValues

func (self *PercentileAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type PercentileAggregatorState

type PercentileAggregatorState struct {
	// contains filtered or unexported fields
}

type PointRange

type PointRange struct {
	// contains filtered or unexported fields
}

func (*PointRange) UpdateRange

func (self *PointRange) UpdateRange(point *protocol.Point)

type PointSlice

type PointSlice []protocol.Point

type Processor added in v0.8.4

type Processor interface {
	// (true, nil) if the query should continue. False if processing
	// should stop, because of an error in which case error isn't nil or
	// because the desired data was read succesfully and no more data is
	// needed.
	Yield(s *protocol.Series) (bool, error)
	Name() string

	// Flush any data that could be in the queue
	Close() error
}

Passed to a shard (local datastore or whatever) that gets yielded points from series.

func NewJoinEngine added in v0.8.4

func NewJoinEngine(shards []uint32, query *parser.SelectQuery, next Processor) Processor

func NewMergeEngine added in v0.8.4

func NewMergeEngine(shards []uint32, ascending bool, next Processor) Processor

func NewQueryEngine

func NewQueryEngine(next Processor, query *parser.SelectQuery, shards []uint32) (Processor, error)

type SeriesHeap added in v0.8.4

type SeriesHeap struct {
	Ascending bool
	// contains filtered or unexported fields
}

A heap that holds one point series' (series that have one point only) and their stream ids. See http://en.wikipedia.org/wiki/Heap_(data_structure) for more info on heaps. The heap is used by the Merger to emit points from multiple streams in monotic order.

func NewSeriesHeap added in v0.8.4

func NewSeriesHeap(asc bool) SeriesHeap

func (SeriesHeap) Add added in v0.8.4

func (sh SeriesHeap) Add(streamId int, s *protocol.Series)

Add another one point series with the given stream id. TODO: This is slightly inefficient way to construct the initial value slice, if we had a value slice we can construct the heap in O(n) instead of O(n logn) which is required if we construct the heap using multiple calls to Add()

func (SeriesHeap) Next added in v0.8.4

func (sh SeriesHeap) Next() (int, *protocol.Series)

Get and remove the next one point series that has smallest (or largest) timestmap, according to the Ascending field. TODO: This is slightly inefficient since we remove a value from the values slice and do a BubbleDown which is inefficient since the next value from the stream will be added immediately after and will cause a BubbleUp. In big O() notation this step doesn't change much, it only adds a contant to the upper bound.

func (SeriesHeap) Size added in v0.8.4

func (sh SeriesHeap) Size() int

returns the number of values in the heap so far

type SeriesState

type SeriesState struct {
	// contains filtered or unexported fields
}

type StandardDeviationAggregator

type StandardDeviationAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*StandardDeviationAggregator) AggregatePoint

func (self *StandardDeviationAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*StandardDeviationAggregator) ColumnNames

func (self *StandardDeviationAggregator) ColumnNames() []string

func (*StandardDeviationAggregator) GetValues

func (self *StandardDeviationAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

type StandardDeviationRunning

type StandardDeviationRunning struct {
	// contains filtered or unexported fields
}

type Stream added in v0.8.4

type Stream struct {
	// contains filtered or unexported fields
}

A stream keep track of the state of a stream of points. A stream is defined as an ordered stream of points that have the same set of fields. For simplicity this will be the stream of points for one time series. This will be extended to include stream of points from multiple series that are merged at the shard level and multiple streams are merged together again at the coordinator level.

func NewStream added in v0.8.4

func NewStream() *Stream

func (*Stream) Close added in v0.8.4

func (stream *Stream) Close()

func (*Stream) Closed added in v0.8.4

func (stream *Stream) Closed() bool

func (*Stream) HasPoint added in v0.8.4

func (stream *Stream) HasPoint() bool

func (*Stream) Next added in v0.8.4

func (stream *Stream) Next() *protocol.Series

func (*Stream) Yield added in v0.8.4

func (stream *Stream) Yield(s *protocol.Series)

type StreamQuery added in v0.8.4

type StreamQuery interface {
	// interface that query the state
	HasPoint() bool
	// Returns a series with one point only
	Next() *protocol.Series
	Closed() bool
}

See documentation of Stream

type StreamUpdate added in v0.8.4

type StreamUpdate interface {
	// interface that updates the state
	Close()
	Yield(*protocol.Series)
}

See documentation of Stream

type TopOrBottomAggregator

type TopOrBottomAggregator struct {
	AbstractAggregator
	// contains filtered or unexported fields
}

func (*TopOrBottomAggregator) AggregatePoint

func (self *TopOrBottomAggregator) AggregatePoint(state interface{}, p *protocol.Point) (interface{}, error)

func (*TopOrBottomAggregator) ColumnNames

func (self *TopOrBottomAggregator) ColumnNames() []string

func (*TopOrBottomAggregator) GetValues

func (self *TopOrBottomAggregator) GetValues(state interface{}) [][]*protocol.FieldValue

func (*TopOrBottomAggregator) InitializeFieldsMetadata

func (self *TopOrBottomAggregator) InitializeFieldsMetadata(series *protocol.Series) error

type TopOrBottomAggregatorState

type TopOrBottomAggregatorState struct {
	// contains filtered or unexported fields
}

type Trie

type Trie struct {
	// contains filtered or unexported fields
}

func NewTrie

func NewTrie(numLevels, numStates int) *Trie

func (*Trie) Clear

func (self *Trie) Clear()

func (*Trie) CountLeafNodes

func (self *Trie) CountLeafNodes() int

func (*Trie) GetNode

func (self *Trie) GetNode(values []*protocol.FieldValue) *Node

func (*Trie) Traverse

func (self *Trie) Traverse(f func([]*protocol.FieldValue, *Node) error) error

func (*Trie) TraverseLevel

func (self *Trie) TraverseLevel(level int, f func([]*protocol.FieldValue, *Node) error) error

Traverses all nodes at the given level, -1 to get nodes at the most bottom level

type Value added in v0.8.4

type Value struct {
	// contains filtered or unexported fields
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL