timeseries

package
v0.5.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 28, 2022 License: Apache-2.0 Imports: 35 Imported by: 0

Documentation

Index

Constants

View Source
const PluginName = "timeseries"

Variables

View Source
var ErrNotActor = errors.New("not_actor: The given timeseries does not accept actions")
View Source
var Handler = func() *chi.Mux {
	m := chi.NewMux()

	m.Get("/object/timeseries", func(w http.ResponseWriter, r *http.Request) {
		ReadData(w, r, false)
	})
	m.Delete("/object/timeseries", func(w http.ResponseWriter, r *http.Request) {
		DeleteData(w, r, false)
	})
	m.Post("/object/timeseries", func(w http.ResponseWriter, r *http.Request) {
		WriteData(w, r, false)
	})
	m.Get("/object/timeseries/length", func(w http.ResponseWriter, r *http.Request) {
		DataLength(w, r, false)
	})

	m.Post("/api/timeseries/dataset", GenerateDataset)

	m.NotFound(rest.NotFoundHandler)
	m.MethodNotAllowed(rest.NotFoundHandler)

	return m
}()

Handler is the global router for the timeseries API

View Source
var SQLVersion = 1

Functions

func Act

func Act(w http.ResponseWriter, r *http.Request)

Act is given just the data portion of a datapoint, and it is inserted at the current timestamp

func DataLength

func DataLength(w http.ResponseWriter, r *http.Request, action bool)

func DeleteData

func DeleteData(w http.ResponseWriter, r *http.Request, action bool)

func GenerateDataset

func GenerateDataset(rw http.ResponseWriter, r *http.Request)

func ParseTimestamp

func ParseTimestamp(ts interface{}) (float64, error)

func ReadData

func ReadData(w http.ResponseWriter, r *http.Request, action bool)

func SQLUpdater

func SQLUpdater(db *database.AdminDB, i *run.Info, h run.BuiltinHelper, curversion int) error

SQLUpdater is in the format expected by Heedy to update the database

func StartTimeseries

func StartTimeseries(db *database.AdminDB, i *run.Info, h run.BuiltinHelper) error

StartTimeseries prepares the plugin by initializing the database

func Unix

func Unix(t time.Time) float64

func UnmarshalEasyRequestNoLimit

func UnmarshalEasyRequestNoLimit(request *http.Request, unmarshalTo easyjson.Unmarshaler) error

UnmarshalEasyRequestNoLimit unmarshals the input data to the given interface without limiting request size This should be replaced at some point probably...

func WriteData

func WriteData(w http.ResponseWriter, r *http.Request, action bool)

Types

type BatchDatapointIterator

type BatchDatapointIterator struct {
	BatchIterator
	// contains filtered or unexported fields
}

func NewBatchDatapointIterator

func NewBatchDatapointIterator(bi BatchIterator, da DatapointArray) *BatchDatapointIterator

func (*BatchDatapointIterator) Next

func (bdi *BatchDatapointIterator) Next() (dp *Datapoint, err error)

type BatchEndOffset

type BatchEndOffset struct {
	BatchIterator
	EndBatch float64
	Offset   int
}

func (BatchEndOffset) NextBatch

func (beo BatchEndOffset) NextBatch() (DatapointArray, error)

type BatchEndTime

type BatchEndTime struct {
	BatchIterator
	EndTime float64
}

BatchEndTime only returns batches with timestamps < endtime

func (BatchEndTime) NextBatch

func (bet BatchEndTime) NextBatch() (DatapointArray, error)

type BatchIterator

type BatchIterator interface {
	NextBatch() (DatapointArray, error)
	Close() error
}

BatchIterator iterates throguh successive batches of datapoints coming from the database

type BatchPointLimit

type BatchPointLimit struct {
	BatchIterator
	Limit int64
}

func (*BatchPointLimit) NextBatch

func (bpl *BatchPointLimit) NextBatch() (DatapointArray, error)

type ChanBatchIterator

type ChanBatchIterator struct {
	// contains filtered or unexported fields
}

ChanBatch runs NextBatch() in a goroutine, so that post-processing and pre-processing can happen in parallel

func NewChanBatchIterator

func NewChanBatchIterator(di BatchIterator) *ChanBatchIterator

func (*ChanBatchIterator) Close

func (c *ChanBatchIterator) Close() error

func (*ChanBatchIterator) NextBatch

func (c *ChanBatchIterator) NextBatch() (DatapointArray, error)

type ChanIterator

type ChanIterator struct {
	// contains filtered or unexported fields
}

ChanIterator runs the iteration in a goroutine, so that post-processing data and pre-processing can happen in parallel

func NewChanIterator

func NewChanIterator(di DatapointIterator) *ChanIterator

func (*ChanIterator) Close

func (c *ChanIterator) Close() error

func (*ChanIterator) Next

func (c *ChanIterator) Next() (*Datapoint, error)

type Closer

type Closer interface {
	Close() error
}

type DataValidator

type DataValidator struct {
	// contains filtered or unexported fields
}

DataValidator performs all of the validation necessary on a timeseries for it to conform to a permissions-based system. This includes validating the schema and ensuring that the actor is set correctly.

func NewDataValidator

func NewDataValidator(data DatapointIterator, schema interface{}, actor string) (*DataValidator, error)

NewDataValidator ensures that the timeseries data fits the given schema and has actor set properly

func (*DataValidator) Close

func (s *DataValidator) Close() error

Close closes the underlying timeseries

func (*DataValidator) Next

func (s *DataValidator) Next() (*Datapoint, error)

Next sets the actor, and performs schema validation

type Datapoint

type Datapoint struct {
	Timestamp float64     `json:"t" db:"timestamp" msg:"t"`
	Duration  float64     `json:"dt,omitempty" db:"duration" msg:"dt,omitempty"`
	Data      interface{} `json:"d" db:"data" msg:"d"`

	Actor string `json:"a,omitempty" db:"actor" msg:"a,omitempty"`
}

func NewDatapoint

func NewDatapoint(data interface{}) *Datapoint

NewDatapoint returns a datapoint with the current timestamp

func (*Datapoint) DecodeMsg

func (z *Datapoint) DecodeMsg(dc *msgp.Reader) (err error)

DecodeMsg implements msgp.Decodable

func (*Datapoint) EncodeMsg

func (z *Datapoint) EncodeMsg(en *msgp.Writer) (err error)

EncodeMsg implements msgp.Encodable

func (*Datapoint) EndTime

func (d *Datapoint) EndTime() float64

func (*Datapoint) IsEqual

func (d *Datapoint) IsEqual(dp *Datapoint) bool

IsEqual checks if the datapoint is equal to another datapoint

func (Datapoint) MarshalEasyJSON

func (v Datapoint) MarshalEasyJSON(w *jwriter.Writer)

MarshalEasyJSON supports easyjson.Marshaler interface

func (Datapoint) MarshalJSON

func (v Datapoint) MarshalJSON() ([]byte, error)

MarshalJSON supports json.Marshaler interface

func (*Datapoint) MarshalMsg

func (z *Datapoint) MarshalMsg(b []byte) (o []byte, err error)

MarshalMsg implements msgp.Marshaler

func (*Datapoint) Msgsize

func (z *Datapoint) Msgsize() (s int)

Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message

func (*Datapoint) Overlaps added in v0.5.0

func (d *Datapoint) Overlaps(d2 *Datapoint) bool

Overlaps returns whether there is an overlap in the datapoints, meaning that either they share a common timestamp, or there is an overlap in duration.

func (*Datapoint) String

func (d *Datapoint) String() string

String returns a json representation of the datapoint

func (*Datapoint) UnmarshalEasyJSON

func (v *Datapoint) UnmarshalEasyJSON(l *jlexer.Lexer)

UnmarshalEasyJSON supports easyjson.Unmarshaler interface

func (*Datapoint) UnmarshalJSON

func (v *Datapoint) UnmarshalJSON(data []byte) error

UnmarshalJSON supports json.Unmarshaler interface

func (*Datapoint) UnmarshalMsg

func (z *Datapoint) UnmarshalMsg(bts []byte) (o []byte, err error)

UnmarshalMsg implements msgp.Unmarshaler

type DatapointArray

type DatapointArray []*Datapoint

A DatapointArray holds a couple useful functions that act on it

func BatchTOffset

func BatchTOffset(bi BatchIterator, da DatapointArray, t float64) (DatapointArray, error)

BatchTOffset reads the BatchIterator until the given start time, and returns the remainder of the batch where the time started.

func DatapointArrayFromBytes

func DatapointArrayFromBytes(b []byte) (dpa DatapointArray, err error)

DatapointArrayFromBytes decompresses a gzipped byte array for the compressed representation of a DatapointArray

func NewArrayFromIterator

func NewArrayFromIterator(di DatapointIterator) (DatapointArray, error)

NewArrayFromIterator creates a datapoint array from the given iterator

func Toffset

func Toffset(dpa DatapointArray, t float64) DatapointArray

func (*DatapointArray) DecodeMsg

func (z *DatapointArray) DecodeMsg(dc *msgp.Reader) (err error)

DecodeMsg implements msgp.Decodable

func (DatapointArray) EncodeMsg

func (z DatapointArray) EncodeMsg(en *msgp.Writer) (err error)

EncodeMsg implements msgp.Encodable

func (DatapointArray) IsEqual

func (dpa DatapointArray) IsEqual(d DatapointArray) bool

IsEqual checks if two DatapointArrays contain the same data

func (DatapointArray) MarshalEasyJSON

func (v DatapointArray) MarshalEasyJSON(w *jwriter.Writer)

MarshalEasyJSON supports easyjson.Marshaler interface

func (DatapointArray) MarshalJSON

func (v DatapointArray) MarshalJSON() ([]byte, error)

MarshalJSON supports json.Marshaler interface

func (DatapointArray) MarshalMsg

func (z DatapointArray) MarshalMsg(b []byte) (o []byte, err error)

MarshalMsg implements msgp.Marshaler

func (DatapointArray) Msgsize

func (z DatapointArray) Msgsize() (s int)

Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message

func (DatapointArray) String

func (dpa DatapointArray) String() string

String returns a json representation of the datapoint

func (DatapointArray) ToBytes

func (dpa DatapointArray) ToBytes() ([]byte, error)

func (*DatapointArray) UnmarshalEasyJSON

func (v *DatapointArray) UnmarshalEasyJSON(l *jlexer.Lexer)

UnmarshalEasyJSON supports easyjson.Unmarshaler interface

func (*DatapointArray) UnmarshalJSON

func (v *DatapointArray) UnmarshalJSON(data []byte) error

UnmarshalJSON supports json.Unmarshaler interface

func (*DatapointArray) UnmarshalMsg

func (z *DatapointArray) UnmarshalMsg(bts []byte) (o []byte, err error)

UnmarshalMsg implements msgp.Unmarshaler

type DatapointArrayIterator

type DatapointArrayIterator struct {
	// contains filtered or unexported fields
}

DatapointArrayIterator allows DatapointArray to conform to the DatapointIterator interface

func NewDatapointArrayIterator

func NewDatapointArrayIterator(da DatapointArray) *DatapointArrayIterator

NewDatapointArrayIterator does exactly what the function says

func (*DatapointArrayIterator) Close

func (d *DatapointArrayIterator) Close() error

Close resets the range

func (*DatapointArrayIterator) Index

func (d *DatapointArrayIterator) Index() int64

Index returns the index of the DatapointArray

func (*DatapointArrayIterator) Next

func (d *DatapointArrayIterator) Next() (*Datapoint, error)

Next returns the next datapoint

func (*DatapointArrayIterator) NextArray

func (d *DatapointArrayIterator) NextArray() (DatapointArray, error)

NextArray returns what is left of the array

type DatapointIterator

type DatapointIterator interface {
	Next() (*Datapoint, error)
	Close() error
}

func NewTransformIterator

func NewTransformIterator(transform string, it DatapointIterator) (DatapointIterator, error)

type Dataset

type Dataset struct {
	Query
	Merge []*Query    `json:"merge,omitempty"`
	Dt    interface{} `json:"dt,omitempty"`
	Key   string      `json:"key,omitempty"`
	// A dataset is a map of subelements, which can themselves be merge queries
	Dataset map[string]*DatasetElement `json:"dataset,omitempty"`

	PostTransform string `json:"post_transform,omitempty"`
}

func (*Dataset) Get

func (d *Dataset) Get(db database.DB) (*DatasetIterator, error)

func (*Dataset) GetTimeseries

func (d *Dataset) GetTimeseries() (m map[string]int)

GetTimeseries returns a map of all the timeseries IDs included in the query

func (*Dataset) Validate

func (d *Dataset) Validate() error

type DatasetElement

type DatasetElement struct {
	Query
	Merge        []*Query `json:"merge"`
	Interpolator string   `json:"interpolator"`
	AllowNull    bool     `json:"allow_null"`
}

func (*DatasetElement) Get

func (d *DatasetElement) Get(db database.DB, tstart float64) (*DatasetIterator, error)

func (*DatasetElement) GetTimeseries

func (d *DatasetElement) GetTimeseries() map[string]int

func (*DatasetElement) Validate

func (d *DatasetElement) Validate() error

type DatasetIterator

type DatasetIterator struct {
	// contains filtered or unexported fields
}

func GetMerge

func GetMerge(db database.DB, q []*Query, tstart float64) (*DatasetIterator, error)

func (*DatasetIterator) Close

func (di *DatasetIterator) Close() (err error)

func (*DatasetIterator) Next

type EmptyIterator

type EmptyIterator struct{}

func (EmptyIterator) Close

func (e EmptyIterator) Close() error

func (EmptyIterator) Next

func (e EmptyIterator) Next() (*Datapoint, error)

type InfoIterator

type InfoIterator struct {
	DatapointIterator
	Tstart    float64
	Tend      float64
	Count     int64
	LastPoint *Datapoint
}

func NewInfoIterator

func NewInfoIterator(di DatapointIterator) *InfoIterator

func (*InfoIterator) Next

func (i *InfoIterator) Next() (*Datapoint, error)

type InsertQuery

type InsertQuery struct {
	Actions  *bool `json:"actions,omitempty"`
	Validate *bool `json:"validate,omitempty"` // Whether or not to validate the insert against the schema

	// insert, append, update - default is update
	Method *string `json:"method,omitempty"`
}

func (*InsertQuery) DecodeMsg

func (z *InsertQuery) DecodeMsg(dc *msgp.Reader) (err error)

DecodeMsg implements msgp.Decodable

func (*InsertQuery) EncodeMsg

func (z *InsertQuery) EncodeMsg(en *msgp.Writer) (err error)

EncodeMsg implements msgp.Encodable

func (*InsertQuery) MarshalMsg

func (z *InsertQuery) MarshalMsg(b []byte) (o []byte, err error)

MarshalMsg implements msgp.Marshaler

func (*InsertQuery) Msgsize

func (z *InsertQuery) Msgsize() (s int)

Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message

func (*InsertQuery) UnmarshalMsg

func (z *InsertQuery) UnmarshalMsg(bts []byte) (o []byte, err error)

UnmarshalMsg implements msgp.Unmarshaler

type IteratedBatcher

type IteratedBatcher struct {
	// contains filtered or unexported fields
}

IteratedBatcher is basically like an SQLBatchIterator, but it closes the sql connection in-between calls to NextBatch, so that updates/edits can happen to the database in the mean time.

func NewIteratedBatcher

func NewIteratedBatcher(tx *database.TxWrapper, table string, tsid string, tstart float64, queueSize int) *IteratedBatcher

func (*IteratedBatcher) Close

func (ib *IteratedBatcher) Close() error

func (*IteratedBatcher) DecodeMsg

func (z *IteratedBatcher) DecodeMsg(dc *msgp.Reader) (err error)

DecodeMsg implements msgp.Decodable

func (IteratedBatcher) EncodeMsg

func (z IteratedBatcher) EncodeMsg(en *msgp.Writer) (err error)

EncodeMsg implements msgp.Encodable

func (IteratedBatcher) MarshalMsg

func (z IteratedBatcher) MarshalMsg(b []byte) (o []byte, err error)

MarshalMsg implements msgp.Marshaler

func (IteratedBatcher) Msgsize

func (z IteratedBatcher) Msgsize() (s int)

Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message

func (*IteratedBatcher) NextBatch

func (ib *IteratedBatcher) NextBatch() (DatapointArray, error)

func (*IteratedBatcher) UnmarshalMsg

func (z *IteratedBatcher) UnmarshalMsg(bts []byte) (o []byte, err error)

UnmarshalMsg implements msgp.Unmarshaler

type JsonArrayReader

type JsonArrayReader struct {
	DatapointIterator
	// contains filtered or unexported fields
}

func NewJsonArrayReader

func NewJsonArrayReader(data DatapointIterator, batchsize int) (*JsonArrayReader, error)

NewJsonArrayReader converts a DatapointIterator into an io.Reader, allowing writing of an arbitrarily large-sized response

func (*JsonArrayReader) Read

func (r *JsonArrayReader) Read(p []byte) (n int, err error)

Read reads the given number of bytes from the DataRange, and p is the buffer to read into

type JsonReader

type JsonReader struct {
	Separator []byte // The separator to use between datapoints
	Ender     []byte
	// contains filtered or unexported fields
}

JsonReader imitates an io.Reader interface

func NewJsonReader

func NewJsonReader(data DatapointIterator, starter string, separator string, footer string) (*JsonReader, error)

NewJsonReader creates a JsonReader with the given separator

func (*JsonReader) Close

func (r *JsonReader) Close()

Close shuts down the internal DataRange

func (*JsonReader) Read

func (r *JsonReader) Read(p []byte) (n int, err error)

Read reads the given number of bytes from the DataRange, and p is the buffer to read into

type NumIterator

type NumIterator struct {
	// contains filtered or unexported fields
}

NumIterator returns only the first given number of datapoints (with an optional skip param) from a DatapointIterator

func NewNumIterator

func NewNumIterator(dr DatapointIterator, datapoints int64) *NumIterator

NewNumIterator initializes a new NumIterator which will return up to the given amount of datapoints.

func (*NumIterator) Close

func (r *NumIterator) Close() error

Close closes the internal DatapointIterator

func (*NumIterator) Next

func (r *NumIterator) Next() (*Datapoint, error)

Next returns the next datapoint from the underlying DatapointIterator so long as the datapoint is within the amonut of datapoints to return.

func (*NumIterator) Skip

func (r *NumIterator) Skip(num int) error

Skip the given number of datapoints without changing the number of datapoints left to return

type PipeIterator

type PipeIterator struct {
	// contains filtered or unexported fields
}

PipeIterator allows using PipeScript transforms over DatapointIterator

func (PipeIterator) Next

type Query

type Query struct {
	Timeseries string      `json:"timeseries,omitempty"`
	T1         interface{} `json:"t1,omitempty"`
	T2         interface{} `json:"t2,omitempty"`
	I1         *int64      `json:"i1,omitempty" schema:"i1"`
	I2         *int64      `json:"i2,omitempty" schema:"i2"`
	Limit      *int64      `json:"limit,omitempty" schema:"limit"`
	T          interface{} `json:"t,omitempty"`
	I          *int64      `json:"i,omitempty" schema:"i"`
	Transform  *string     `json:"transform,omitempty" schema:"transform"`
	Actions    *bool       `json:"actions,omitempty" schema:"actions"`
}

func (*Query) Get

func (q *Query) Get(db database.DB, tstart float64) (*DatasetIterator, error)

func (Query) String

func (q Query) String() string

String returns a json representation of the datapoint

type SQLBatchIterator

type SQLBatchIterator struct {
	Rows   *sqlx.Rows
	Closer func()
}

SQLBatchIterator takes a query that returns the raw batch bytes, and outputs the resulting DatapointArray

func (SQLBatchIterator) Close

func (b SQLBatchIterator) Close() error

func (SQLBatchIterator) NextBatch

func (b SQLBatchIterator) NextBatch() (DatapointArray, error)

type SortChecker

type SortChecker struct {
	DatapointIterator
	// contains filtered or unexported fields
}

func NewSortChecker

func NewSortChecker(di DatapointIterator) *SortChecker

func (*SortChecker) Next

func (o *SortChecker) Next() (dp *Datapoint, err error)

type TimeseriesDB

type TimeseriesDB struct {
	DB                    *database.AdminDB `mapstructure:"-"`
	BatchSize             int               `mapstructure:"batch_size"`
	MaxBatchSize          int               `mapstructure:"max_batch_size"`
	BatchCompressionLevel int               `mapstructure:"batch_compression_level"`
	CompressQueryResponse bool              `mapstructure:"compress_query_response"`
}
var TSDB TimeseriesDB

The global timeseries DB object that is initialized on database start

func (*TimeseriesDB) Delete

func (ts *TimeseriesDB) Delete(q *Query) error

func (*TimeseriesDB) Insert

func (ts *TimeseriesDB) Insert(tsid string, data DatapointIterator, q *InsertQuery) (err error)

func (*TimeseriesDB) Length

func (ts *TimeseriesDB) Length(tsid string, actions bool) (l int64, err error)

func (*TimeseriesDB) Query

func (ts *TimeseriesDB) Query(q *Query) (DatapointIterator, error)

Query runs the given query, while adding on the transform and limit reading

type TimeseriesInfo

type TimeseriesInfo struct {
	plugin.ObjectInfo
	Schema map[string]interface{}
	Actor  bool
}

func GetTimeseriesInfo

func GetTimeseriesInfo(r *http.Request) (*TimeseriesInfo, error)

type TimeseriesWriteEvent

type TimeseriesWriteEvent struct {
	T1    float64    `json:"t1"`
	T2    float64    `json:"t2"`
	Count int64      `json:"count"`
	DP    *Datapoint `json:"dp,omitempty"`
}

type TransformIterator

type TransformIterator struct {
	// contains filtered or unexported fields
}

func (*TransformIterator) Close

func (pi *TransformIterator) Close() error

func (*TransformIterator) Next

func (pi *TransformIterator) Next() (*Datapoint, error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL