parcacol

package
v0.21.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 27, 2024 License: Apache-2.0 Imports: 35 Imported by: 2

Documentation

Index

Constants

View Source
const (
	ValuePerSecond  = "value_per_second"
	TimestampBucket = "timestamp_bucket"
)

Variables

This section is empty.

Functions

func BinaryFieldFromRecord added in v0.13.0

func BinaryFieldFromRecord(ar arrow.Record, name string) (*array.Binary, error)

func BooleanFieldFromRecord added in v0.13.0

func BooleanFieldFromRecord(ar arrow.Record, name string) (*array.Boolean, error)

func BuildArrowLocations added in v0.19.0

func BuildArrowLocations(allocator memory.Allocator, stacktraces []*pb.Stacktrace, resolvedLocations []*profile.Location, locationIndex map[string]int) (arrow.Record, error)

func CreateDiffColumn added in v0.19.0

func CreateDiffColumn(pool memory.Allocator, rows int) arrow.Array

func DictionaryFromRecord added in v0.16.0

func DictionaryFromRecord(ar arrow.Record, name string) (*array.Dictionary, error)

func MatcherToBooleanExpression added in v0.13.0

func MatcherToBooleanExpression(matcher *labels.Matcher) (logicalplan.Expr, error)

func MatchersToBooleanExpressions added in v0.13.0

func MatchersToBooleanExpressions(matchers []*labels.Matcher) ([]logicalplan.Expr, error)

func ParquetBufToArrowRecord added in v0.17.0

func ParquetBufToArrowRecord(ctx context.Context, mem memory.Allocator, buf *dynparquet.Buffer, s *dynparquet.Schema, rowsPerRecord uint) ([]arrow.Record, error)

ParquetBufToArrowRecord converts a parquet buffer to an arrow record. If rowsPerRecord is 0, then the entire buffer is converted to a single record.

func SampleToParquetRow added in v0.12.0

func SampleToParquetRow(
	schema *dynparquet.Schema,
	row parquet.Row,
	labelNames, profileLabelNames, profileNumLabelNames []string,
	lset map[string]string,
	meta profile.Meta,
	s *profile.NormalizedSample,
) parquet.Row

SampleToParquetRow converts a sample to a Parquet row. The passed labels must be sorted.

func SeriesToArrowRecord added in v0.16.0

func SeriesToArrowRecord(
	mem memory.Allocator,
	schema *dynparquet.Schema,
	series []normalizer.Series,
	labelNames, profileLabelNames, profileNumLabelNames []string,
) (arrow.Record, error)

func StringValueFromDictionary added in v0.16.0

func StringValueFromDictionary(arr *array.Dictionary, i int) string

Types

type ArrowToProfileConverter added in v0.13.0

type ArrowToProfileConverter struct {
	// contains filtered or unexported fields
}

func NewArrowToProfileConverter added in v0.13.0

func NewArrowToProfileConverter(
	tracer trace.Tracer,
	keyMaker *metastore.KeyMaker,
) *ArrowToProfileConverter

func (*ArrowToProfileConverter) Convert added in v0.13.0

type Engine added in v0.13.0

type Engine interface {
	ScanTable(name string) query.Builder
	ScanSchema(name string) query.Builder
}

type ErrMissingColumn added in v0.12.0

type ErrMissingColumn struct {
	Column  string
	Columns int
}

func (ErrMissingColumn) Error added in v0.12.0

func (e ErrMissingColumn) Error() string

type Ingester added in v0.12.0

type Ingester struct {
	// contains filtered or unexported fields
}

func NewIngester added in v0.12.0

func NewIngester(
	logger log.Logger,
	mem memory.Allocator,
	table Table,
	schema *dynparquet.Schema,
) Ingester

func (Ingester) Close added in v0.21.0

func (ing Ingester) Close() error

func (Ingester) Ingest added in v0.12.0

type ProfileSymbolizer added in v0.19.0

type ProfileSymbolizer struct {
	// contains filtered or unexported fields
}

func NewProfileSymbolizer added in v0.19.0

func NewProfileSymbolizer(
	tracer trace.Tracer,
	m pb.MetastoreServiceClient,
) *ProfileSymbolizer

func (*ProfileSymbolizer) SymbolizeNormalizedProfile added in v0.19.0

func (s *ProfileSymbolizer) SymbolizeNormalizedProfile(ctx context.Context, p *profile.NormalizedProfile) (profile.OldProfile, error)

type Querier added in v0.13.0

type Querier struct {
	// contains filtered or unexported fields
}

func NewQuerier added in v0.13.0

func NewQuerier(
	logger log.Logger,
	tracer trace.Tracer,
	engine Engine,
	tableName string,
	symbolizer *ProfileSymbolizer,
	pool memory.Allocator,
) *Querier

func (*Querier) Labels added in v0.13.0

func (q *Querier) Labels(
	ctx context.Context,
	match []string,
	start, end time.Time,
) ([]string, error)

func (*Querier) ProfileTypes added in v0.13.0

func (q *Querier) ProfileTypes(
	ctx context.Context,
) ([]*pb.ProfileType, error)

func (*Querier) QueryMerge added in v0.13.0

func (q *Querier) QueryMerge(ctx context.Context, query string, start, end time.Time, aggregateByLabels bool) (profile.Profile, error)

func (*Querier) QueryRange added in v0.13.0

func (q *Querier) QueryRange(
	ctx context.Context,
	query string,
	startTime, endTime time.Time,
	step time.Duration,
	limit uint32,
) ([]*pb.MetricsSeries, error)

func (*Querier) QuerySingle added in v0.13.0

func (q *Querier) QuerySingle(
	ctx context.Context,
	query string,
	time time.Time,
) (profile.Profile, error)

func (*Querier) SymbolizeArrowRecord added in v0.19.0

func (q *Querier) SymbolizeArrowRecord(
	ctx context.Context,
	records []arrow.Record,
	valueColumnName string,
) ([]arrow.Record, error)

func (*Querier) Values added in v0.13.0

func (q *Querier) Values(
	ctx context.Context,
	labelName string,
	match []string,
	start, end time.Time,
) ([]string, error)

type QueryParts added in v0.16.0

type QueryParts struct {
	Meta     profile.Meta
	Delta    bool
	Matchers []*labels.Matcher
}

func ParseQuery added in v0.17.0

func ParseQuery(query string) (QueryParts, error)

ParseQuery from a string into the QueryParts struct.

func QueryToFilterExprs added in v0.13.0

func QueryToFilterExprs(query string) (QueryParts, []logicalplan.Expr, error)

type Table

type Table interface {
	Schema() *dynparquet.Schema
	InsertRecord(context.Context, arrow.Record) (tx uint64, err error)
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL