timeseriesinsights

package
v0.0.0-...-b8a5c65 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 6, 2024 License: Apache-2.0 Imports: 13 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	DataSet_State_name = map[int32]string{
		0: "STATE_UNSPECIFIED",
		1: "UNKNOWN",
		2: "PENDING",
		3: "LOADING",
		4: "LOADED",
		5: "UNLOADING",
		6: "UNLOADED",
		7: "FAILED",
	}
	DataSet_State_value = map[string]int32{
		"STATE_UNSPECIFIED": 0,
		"UNKNOWN":           1,
		"PENDING":           2,
		"LOADING":           3,
		"LOADED":            4,
		"UNLOADING":         5,
		"UNLOADED":          6,
		"FAILED":            7,
	}
)

Enum value maps for DataSet_State.

View Source
var (
	ForecastParams_Period_name = map[int32]string{
		0: "PERIOD_UNSPECIFIED",
		5: "HOURLY",
		1: "DAILY",
		2: "WEEKLY",
		3: "MONTHLY",
		4: "YEARLY",
	}
	ForecastParams_Period_value = map[string]int32{
		"PERIOD_UNSPECIFIED": 0,
		"HOURLY":             5,
		"DAILY":              1,
		"WEEKLY":             2,
		"MONTHLY":            3,
		"YEARLY":             4,
	}
)

Enum value maps for ForecastParams_Period.

View Source
var (
	TimeseriesParams_AggregationMethod_name = map[int32]string{
		0: "AGGREGATION_METHOD_UNSPECIFIED",
		1: "SUM",
		2: "AVERAGE",
	}
	TimeseriesParams_AggregationMethod_value = map[string]int32{
		"AGGREGATION_METHOD_UNSPECIFIED": 0,
		"SUM":                            1,
		"AVERAGE":                        2,
	}
)

Enum value maps for TimeseriesParams_AggregationMethod.

View Source
var File_google_cloud_timeseriesinsights_v1_timeseries_insights_proto protoreflect.FileDescriptor

Functions

func RegisterTimeseriesInsightsControllerServer

func RegisterTimeseriesInsightsControllerServer(s *grpc.Server, srv TimeseriesInsightsControllerServer)

Types

type AppendEventsRequest

type AppendEventsRequest struct {

	// Events to be appended.
	//
	// Note:
	//
	//  0. The [DataSet][google.cloud.timeseriesinsights.v1.DataSet] must be shown in a `LOADED` state
	//     in the results of `list` method; otherwise, all events from
	//     the append request will be dropped, and a `NOT_FOUND` status will be
	//     returned.
	//  0. All events in a single request must have the same
	//     [groupId][google.cloud.timeseriesinsights.v1.Event.group_id] if set; otherwise, an
	//     `INVALID_ARGUMENT` status will be returned.
	//  0. If [groupId][google.cloud.timeseriesinsights.v1.Event.group_id] is not set (or 0), there
	//     should be only 1 event; otherwise, an `INVALID_ARGUMENT` status will be
	//     returned.
	//  0. The events must be newer than the current time minus
	//     [DataSet TTL][google.cloud.timeseriesinsights.v1.DataSet.ttl] or they will be dropped.
	Events []*Event `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"`
	// Required. The DataSet to which we want to append to in the format of
	// "projects/{project}/datasets/{dataset}"
	Dataset string `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"`
	// contains filtered or unexported fields
}

Appends events to an existing DataSet.

func (*AppendEventsRequest) Descriptor deprecated

func (*AppendEventsRequest) Descriptor() ([]byte, []int)

Deprecated: Use AppendEventsRequest.ProtoReflect.Descriptor instead.

func (*AppendEventsRequest) GetDataset

func (x *AppendEventsRequest) GetDataset() string

func (*AppendEventsRequest) GetEvents

func (x *AppendEventsRequest) GetEvents() []*Event

func (*AppendEventsRequest) ProtoMessage

func (*AppendEventsRequest) ProtoMessage()

func (*AppendEventsRequest) ProtoReflect

func (x *AppendEventsRequest) ProtoReflect() protoreflect.Message

func (*AppendEventsRequest) Reset

func (x *AppendEventsRequest) Reset()

func (*AppendEventsRequest) String

func (x *AppendEventsRequest) String() string

type AppendEventsResponse

type AppendEventsResponse struct {

	// Dropped events; empty if all events are successfully added.
	DroppedEvents []*Event `protobuf:"bytes,1,rep,name=dropped_events,json=droppedEvents,proto3" json:"dropped_events,omitempty"`
	// contains filtered or unexported fields
}

Response for an AppendEvents RPC.

func (*AppendEventsResponse) Descriptor deprecated

func (*AppendEventsResponse) Descriptor() ([]byte, []int)

Deprecated: Use AppendEventsResponse.ProtoReflect.Descriptor instead.

func (*AppendEventsResponse) GetDroppedEvents

func (x *AppendEventsResponse) GetDroppedEvents() []*Event

func (*AppendEventsResponse) ProtoMessage

func (*AppendEventsResponse) ProtoMessage()

func (*AppendEventsResponse) ProtoReflect

func (x *AppendEventsResponse) ProtoReflect() protoreflect.Message

func (*AppendEventsResponse) Reset

func (x *AppendEventsResponse) Reset()

func (*AppendEventsResponse) String

func (x *AppendEventsResponse) String() string

type BigqueryMapping

type BigqueryMapping struct {

	// The column which should be used as the event timestamps. If not specified
	// 'Timestamp' is used by default. The column may have TIMESTAMP or INT64
	// type (the latter is interpreted as microseconds since the Unix epoch).
	TimestampColumn string `protobuf:"bytes,1,opt,name=timestamp_column,json=timestampColumn,proto3" json:"timestamp_column,omitempty"`
	// The column which should be used as the group ID (grouping events into
	// sessions). If not specified 'GroupId' is used by default, if the input
	// table does not have such a column, random unique group IDs are
	// generated automatically (different group ID per input row).
	GroupIdColumn string `protobuf:"bytes,2,opt,name=group_id_column,json=groupIdColumn,proto3" json:"group_id_column,omitempty"`
	// The list of columns that should be translated to dimensions. If empty,
	// all columns are translated to dimensions. The timestamp and group_id
	// columns should not be listed here again. Columns are expected to have
	// primitive types (STRING, INT64, FLOAT64 or NUMERIC).
	DimensionColumn []string `protobuf:"bytes,3,rep,name=dimension_column,json=dimensionColumn,proto3" json:"dimension_column,omitempty"`
	// contains filtered or unexported fields
}

Mapping of BigQuery columns to timestamp, group_id and dimensions.

func (*BigqueryMapping) Descriptor deprecated

func (*BigqueryMapping) Descriptor() ([]byte, []int)

Deprecated: Use BigqueryMapping.ProtoReflect.Descriptor instead.

func (*BigqueryMapping) GetDimensionColumn

func (x *BigqueryMapping) GetDimensionColumn() []string

func (*BigqueryMapping) GetGroupIdColumn

func (x *BigqueryMapping) GetGroupIdColumn() string

func (*BigqueryMapping) GetTimestampColumn

func (x *BigqueryMapping) GetTimestampColumn() string

func (*BigqueryMapping) ProtoMessage

func (*BigqueryMapping) ProtoMessage()

func (*BigqueryMapping) ProtoReflect

func (x *BigqueryMapping) ProtoReflect() protoreflect.Message

func (*BigqueryMapping) Reset

func (x *BigqueryMapping) Reset()

func (*BigqueryMapping) String

func (x *BigqueryMapping) String() string

type CreateDataSetRequest

type CreateDataSetRequest struct {

	// Required. Client project name which will own this DataSet in the format of
	// 'projects/{project}'.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Required. Dataset to be loaded.
	Dataset *DataSet `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"`
	// contains filtered or unexported fields
}

Create a DataSet request.

func (*CreateDataSetRequest) Descriptor deprecated

func (*CreateDataSetRequest) Descriptor() ([]byte, []int)

Deprecated: Use CreateDataSetRequest.ProtoReflect.Descriptor instead.

func (*CreateDataSetRequest) GetDataset

func (x *CreateDataSetRequest) GetDataset() *DataSet

func (*CreateDataSetRequest) GetParent

func (x *CreateDataSetRequest) GetParent() string

func (*CreateDataSetRequest) ProtoMessage

func (*CreateDataSetRequest) ProtoMessage()

func (*CreateDataSetRequest) ProtoReflect

func (x *CreateDataSetRequest) ProtoReflect() protoreflect.Message

func (*CreateDataSetRequest) Reset

func (x *CreateDataSetRequest) Reset()

func (*CreateDataSetRequest) String

func (x *CreateDataSetRequest) String() string

type DataSet

type DataSet struct {

	// The dataset name, which will be used for querying, status and unload
	// requests. This must be unique within a project.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// [Data dimension names][google.cloud.timeseriesinsights.v1.EventDimension.name] allowed for this `DataSet`.
	//
	// If left empty, all dimension names are included. This field works as a
	// filter to avoid regenerating the data.
	DataNames []string `protobuf:"bytes,2,rep,name=data_names,json=dataNames,proto3" json:"data_names,omitempty"`
	// Input data.
	DataSources []*DataSource `protobuf:"bytes,3,rep,name=data_sources,json=dataSources,proto3" json:"data_sources,omitempty"`
	// Dataset state in the system.
	State DataSet_State `protobuf:"varint,4,opt,name=state,proto3,enum=google.cloud.timeseriesinsights.v1.DataSet_State" json:"state,omitempty"`
	// Dataset processing status.
	Status *status.Status `protobuf:"bytes,5,opt,name=status,proto3" json:"status,omitempty"`
	// Periodically we discard dataset [Event][google.cloud.timeseriesinsights.v1.Event] objects that have
	// timestamps older than 'ttl'.  Omitting this field or a zero value means no
	// events are discarded.
	Ttl *durationpb.Duration `protobuf:"bytes,6,opt,name=ttl,proto3" json:"ttl,omitempty"`
	// contains filtered or unexported fields
}

A collection of data sources sent for processing.

func (*DataSet) Descriptor deprecated

func (*DataSet) Descriptor() ([]byte, []int)

Deprecated: Use DataSet.ProtoReflect.Descriptor instead.

func (*DataSet) GetDataNames

func (x *DataSet) GetDataNames() []string

func (*DataSet) GetDataSources

func (x *DataSet) GetDataSources() []*DataSource

func (*DataSet) GetName

func (x *DataSet) GetName() string

func (*DataSet) GetState

func (x *DataSet) GetState() DataSet_State

func (*DataSet) GetStatus

func (x *DataSet) GetStatus() *status.Status

func (*DataSet) GetTtl

func (x *DataSet) GetTtl() *durationpb.Duration

func (*DataSet) ProtoMessage

func (*DataSet) ProtoMessage()

func (*DataSet) ProtoReflect

func (x *DataSet) ProtoReflect() protoreflect.Message

func (*DataSet) Reset

func (x *DataSet) Reset()

func (*DataSet) String

func (x *DataSet) String() string

type DataSet_State

type DataSet_State int32

DataSet state.

const (
	// Unspecified / undefined state.
	DataSet_STATE_UNSPECIFIED DataSet_State = 0
	// Dataset is unknown to the system; we have never seen this dataset before
	// or we have seen this dataset but have fully GC-ed it.
	DataSet_UNKNOWN DataSet_State = 1
	// Dataset processing is pending.
	DataSet_PENDING DataSet_State = 2
	// Dataset is loading.
	DataSet_LOADING DataSet_State = 3
	// Dataset is loaded and can be queried.
	DataSet_LOADED DataSet_State = 4
	// Dataset is unloading.
	DataSet_UNLOADING DataSet_State = 5
	// Dataset is unloaded and is removed from the system.
	DataSet_UNLOADED DataSet_State = 6
	// Dataset processing failed.
	DataSet_FAILED DataSet_State = 7
)

func (DataSet_State) Descriptor

func (DataSet_State) Enum

func (x DataSet_State) Enum() *DataSet_State

func (DataSet_State) EnumDescriptor deprecated

func (DataSet_State) EnumDescriptor() ([]byte, []int)

Deprecated: Use DataSet_State.Descriptor instead.

func (DataSet_State) Number

func (DataSet_State) String

func (x DataSet_State) String() string

func (DataSet_State) Type

type DataSource

type DataSource struct {

	// Data source URI.
	//
	// 1) Google Cloud Storage files (JSON) are defined in the following form.
	// `gs://bucket_name/object_name`. For more information on Cloud Storage URIs,
	// please see https://cloud.google.com/storage/docs/reference-uris.
	Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"`
	// For BigQuery inputs defines the columns that should be used for dimensions
	// (including time and group ID).
	BqMapping *BigqueryMapping `protobuf:"bytes,2,opt,name=bq_mapping,json=bqMapping,proto3" json:"bq_mapping,omitempty"`
	// contains filtered or unexported fields
}

A data source consists of multiple Event[google.cloud.timeseriesinsights.v1.Event] objects stored on Cloud Storage. Each Event should be in JSON format, with one Event per line, also known as JSON Lines format.

func (*DataSource) Descriptor deprecated

func (*DataSource) Descriptor() ([]byte, []int)

Deprecated: Use DataSource.ProtoReflect.Descriptor instead.

func (*DataSource) GetBqMapping

func (x *DataSource) GetBqMapping() *BigqueryMapping

func (*DataSource) GetUri

func (x *DataSource) GetUri() string

func (*DataSource) ProtoMessage

func (*DataSource) ProtoMessage()

func (*DataSource) ProtoReflect

func (x *DataSource) ProtoReflect() protoreflect.Message

func (*DataSource) Reset

func (x *DataSource) Reset()

func (*DataSource) String

func (x *DataSource) String() string

type DeleteDataSetRequest

type DeleteDataSetRequest struct {

	// Required. Dataset name in the format of "projects/{project}/datasets/{dataset}"
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// contains filtered or unexported fields
}

Unload DataSet request from the serving system.

func (*DeleteDataSetRequest) Descriptor deprecated

func (*DeleteDataSetRequest) Descriptor() ([]byte, []int)

Deprecated: Use DeleteDataSetRequest.ProtoReflect.Descriptor instead.

func (*DeleteDataSetRequest) GetName

func (x *DeleteDataSetRequest) GetName() string

func (*DeleteDataSetRequest) ProtoMessage

func (*DeleteDataSetRequest) ProtoMessage()

func (*DeleteDataSetRequest) ProtoReflect

func (x *DeleteDataSetRequest) ProtoReflect() protoreflect.Message

func (*DeleteDataSetRequest) Reset

func (x *DeleteDataSetRequest) Reset()

func (*DeleteDataSetRequest) String

func (x *DeleteDataSetRequest) String() string

type EvaluateSliceRequest

type EvaluateSliceRequest struct {

	// Required. Loaded DataSet to be queried in the format of
	// "projects/{project}/datasets/{dataset}"
	Dataset string `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"`
	// Required. Dimensions with pinned values that specify the slice for which we will
	// fetch the time series.
	PinnedDimensions []*PinnedDimension `protobuf:"bytes,2,rep,name=pinned_dimensions,json=pinnedDimensions,proto3" json:"pinned_dimensions,omitempty"`
	// Required. This is the point in time that we want to probe for anomalies.
	//
	// See documentation for
	// [QueryDataSetRequest.detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time].
	DetectionTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=detection_time,json=detectionTime,proto3" json:"detection_time,omitempty"`
	// Parameters controlling how we will build the time series used to predict
	// the [detectionTime][google.cloud.timeseriesinsights.v1.EvaluateSliceRequest.detection_time] value for this slice.
	TimeseriesParams *TimeseriesParams `protobuf:"bytes,4,opt,name=timeseries_params,json=timeseriesParams,proto3" json:"timeseries_params,omitempty"`
	// Parameters that control the time series forecasting models, such as the
	// sensitivity of the anomaly detection.
	ForecastParams *ForecastParams `protobuf:"bytes,5,opt,name=forecast_params,json=forecastParams,proto3" json:"forecast_params,omitempty"`
	// contains filtered or unexported fields
}

Request for evaluateSlice.

func (*EvaluateSliceRequest) Descriptor deprecated

func (*EvaluateSliceRequest) Descriptor() ([]byte, []int)

Deprecated: Use EvaluateSliceRequest.ProtoReflect.Descriptor instead.

func (*EvaluateSliceRequest) GetDataset

func (x *EvaluateSliceRequest) GetDataset() string

func (*EvaluateSliceRequest) GetDetectionTime

func (x *EvaluateSliceRequest) GetDetectionTime() *timestamppb.Timestamp

func (*EvaluateSliceRequest) GetForecastParams

func (x *EvaluateSliceRequest) GetForecastParams() *ForecastParams

func (*EvaluateSliceRequest) GetPinnedDimensions

func (x *EvaluateSliceRequest) GetPinnedDimensions() []*PinnedDimension

func (*EvaluateSliceRequest) GetTimeseriesParams

func (x *EvaluateSliceRequest) GetTimeseriesParams() *TimeseriesParams

func (*EvaluateSliceRequest) ProtoMessage

func (*EvaluateSliceRequest) ProtoMessage()

func (*EvaluateSliceRequest) ProtoReflect

func (x *EvaluateSliceRequest) ProtoReflect() protoreflect.Message

func (*EvaluateSliceRequest) Reset

func (x *EvaluateSliceRequest) Reset()

func (*EvaluateSliceRequest) String

func (x *EvaluateSliceRequest) String() string

type EvaluateTimeseriesRequest

type EvaluateTimeseriesRequest struct {

	// Required. Client project name in the format of 'projects/{project}'.
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Evaluate this time series without requiring it was previously loaded in
	// a data set.
	//
	// The evaluated time series point is the last one, analogous to calling
	// evaluateSlice or query with
	// [detectionTime][google.cloud.timeseriesinsights.v1.EvaluateSliceRequest.detection_time] set to
	// `timeseries.point(timeseries.point_size() - 1).time`.
	//
	// The length of the time series must be at least 10.
	//
	// All points must have the same time offset relative to the granularity. For
	// example, if the [granularity][google.cloud.timeseriesinsights.v1.EvaluateTimeseriesRequest.granularity] is "5s", then the following
	// point.time sequences are valid:
	// - "100s", "105s", "120s", "125s" (no offset)
	// - "102s", "107s", "122s", "127s" (offset is "2s")
	// However, the following sequence is invalid as it has inconsistent offsets:
	// - "100s", "105s", "122s", "127s" (offsets are either "0s" or "2s")
	Timeseries *Timeseries `protobuf:"bytes,2,opt,name=timeseries,proto3" json:"timeseries,omitempty"`
	// The granularity of the time series (time distance between two consecutive
	// points).
	Granularity *durationpb.Duration `protobuf:"bytes,3,opt,name=granularity,proto3" json:"granularity,omitempty"`
	// The forecast parameters.
	ForecastParams *ForecastParams `protobuf:"bytes,4,opt,name=forecast_params,json=forecastParams,proto3" json:"forecast_params,omitempty"`
	// contains filtered or unexported fields
}

Request for evaluateTimeseries.

func (*EvaluateTimeseriesRequest) Descriptor deprecated

func (*EvaluateTimeseriesRequest) Descriptor() ([]byte, []int)

Deprecated: Use EvaluateTimeseriesRequest.ProtoReflect.Descriptor instead.

func (*EvaluateTimeseriesRequest) GetForecastParams

func (x *EvaluateTimeseriesRequest) GetForecastParams() *ForecastParams

func (*EvaluateTimeseriesRequest) GetGranularity

func (x *EvaluateTimeseriesRequest) GetGranularity() *durationpb.Duration

func (*EvaluateTimeseriesRequest) GetParent

func (x *EvaluateTimeseriesRequest) GetParent() string

func (*EvaluateTimeseriesRequest) GetTimeseries

func (x *EvaluateTimeseriesRequest) GetTimeseries() *Timeseries

func (*EvaluateTimeseriesRequest) ProtoMessage

func (*EvaluateTimeseriesRequest) ProtoMessage()

func (*EvaluateTimeseriesRequest) ProtoReflect

func (*EvaluateTimeseriesRequest) Reset

func (x *EvaluateTimeseriesRequest) Reset()

func (*EvaluateTimeseriesRequest) String

func (x *EvaluateTimeseriesRequest) String() string

type EvaluatedSlice

type EvaluatedSlice struct {

	// Values for all categorical dimensions that uniquely identify this slice.
	Dimensions []*PinnedDimension `protobuf:"bytes,1,rep,name=dimensions,proto3" json:"dimensions,omitempty"`
	// The actual value at the detection time (see
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time]).
	//
	// **NOTE**: This value can be an estimate, so it should not be used as a
	// source of truth.
	DetectionPointActual *float64 `` /* 132-byte string literal not displayed */
	// The expected value at the detection time, which is obtained by forecasting
	// on the historical time series.
	DetectionPointForecast *float64 `` /* 138-byte string literal not displayed */
	// How much our forecast model expects the detection point actual will
	// deviate from its forecasted value based on how well it fit the input time
	// series.
	//
	// In general, we expect the `detectionPointActual` to
	// be in the `[detectionPointForecast - expectedDeviation,
	// detectionPointForecast + expectedDeviation]` range. The more the actual
	// value is outside this range, the more statistically significant the
	// anomaly is.
	//
	// The expected deviation is always positive.
	ExpectedDeviation *float64 `protobuf:"fixed64,16,opt,name=expected_deviation,json=expectedDeviation,proto3,oneof" json:"expected_deviation,omitempty"`
	// Summarizes how significant the change between the actual and forecasted
	// detection points are compared with the historical patterns observed on the
	// [history][google.cloud.timeseriesinsights.v1.EvaluatedSlice.history] time series.
	//
	// Defined as *|a - f| / (e + nt)*, where:
	//
	// - *a* is the [detectionPointActual][google.cloud.timeseriesinsights.v1.EvaluatedSlice.detection_point_actual].
	// - *f* is the [detectionPointForecast][google.cloud.timeseriesinsights.v1.EvaluatedSlice.detection_point_forecast].
	// - *e* is the [expectedDeviation][google.cloud.timeseriesinsights.v1.EvaluatedSlice.expected_deviation].
	// - *nt` is the [noiseThreshold][google.cloud.timeseriesinsights.v1.ForecastParams.noise_threshold].
	//
	// Anomaly scores between different requests and datasets are comparable. As
	// a guideline, the risk of a slice being an anomaly based on the anomaly
	// score is:
	//
	// - **Very High** if `anomalyScore` > 5.
	// - **High** if the `anomalyScore` is in the [2, 5] range.
	// - **Medium** if the `anomalyScore` is in the [1, 2) range.
	// - **Low** if the `anomalyScore` is < 1.
	//
	// If there were issues evaluating this slice, then the anomaly score will be
	// set to -1.0 and the [status][google.cloud.timeseriesinsights.v1.EvaluatedSlice.status] field will contain details on what
	// went wrong.
	AnomalyScore *float64 `protobuf:"fixed64,17,opt,name=anomaly_score,json=anomalyScore,proto3,oneof" json:"anomaly_score,omitempty"`
	// The actual values in the `[`
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `-`
	// [forecastHistory][google.cloud.timeseriesinsights.v1.TimeseriesParams.forecast_history]`,`
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `]` time
	// range.
	//
	// **NOTE**: This field is only populated if
	// [returnTimeseries][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.return_timeseries] is true.
	History *Timeseries `protobuf:"bytes,5,opt,name=history,proto3" json:"history,omitempty"`
	// The forecasted values in the `[`
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `+`
	// [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity]`,`
	// [forecastParams.horizonTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.forecast_params] `]` time
	// range.
	//
	// **NOTE**: This field is only populated if
	// [returnTimeseries][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.return_timeseries] is true.
	Forecast *Timeseries `protobuf:"bytes,10,opt,name=forecast,proto3" json:"forecast,omitempty"`
	// Evaluation status. Contains an error message if the `anomalyScore` is < 0.
	//
	// Possible error messages:
	//
	// - **"Time series too sparse"**: The returned time series for this slice did
	// not contain enough data points (we require a minimum of 10).
	// - **"Not enough recent time series points"**: The time series contains the
	// minimum of 10 points, but there are not enough close in time to the
	// detection point.
	// - **"Missing detection point data"**: There were not events to be
	// aggregated within the `[detectionTime, detectionTime + granularity]` time
	// interval, so we don't have an actual value with which we can compare our
	// prediction.
	// - **"Data retrieval error"**: We failed to retrieve the time series data
	// for this slice and could not evaluate it successfully. Should be a
	// transient error.
	// - **"Internal server error"**: Internal unexpected error.
	Status *status.Status `protobuf:"bytes,18,opt,name=status,proto3" json:"status,omitempty"`
	// contains filtered or unexported fields
}

Forecast result for a given slice.

func (*EvaluatedSlice) Descriptor deprecated

func (*EvaluatedSlice) Descriptor() ([]byte, []int)

Deprecated: Use EvaluatedSlice.ProtoReflect.Descriptor instead.

func (*EvaluatedSlice) GetAnomalyScore

func (x *EvaluatedSlice) GetAnomalyScore() float64

func (*EvaluatedSlice) GetDetectionPointActual

func (x *EvaluatedSlice) GetDetectionPointActual() float64

func (*EvaluatedSlice) GetDetectionPointForecast

func (x *EvaluatedSlice) GetDetectionPointForecast() float64

func (*EvaluatedSlice) GetDimensions

func (x *EvaluatedSlice) GetDimensions() []*PinnedDimension

func (*EvaluatedSlice) GetExpectedDeviation

func (x *EvaluatedSlice) GetExpectedDeviation() float64

func (*EvaluatedSlice) GetForecast

func (x *EvaluatedSlice) GetForecast() *Timeseries

func (*EvaluatedSlice) GetHistory

func (x *EvaluatedSlice) GetHistory() *Timeseries

func (*EvaluatedSlice) GetStatus

func (x *EvaluatedSlice) GetStatus() *status.Status

func (*EvaluatedSlice) ProtoMessage

func (*EvaluatedSlice) ProtoMessage()

func (*EvaluatedSlice) ProtoReflect

func (x *EvaluatedSlice) ProtoReflect() protoreflect.Message

func (*EvaluatedSlice) Reset

func (x *EvaluatedSlice) Reset()

func (*EvaluatedSlice) String

func (x *EvaluatedSlice) String() string

type Event

type Event struct {

	// Event dimensions.
	Dimensions []*EventDimension `protobuf:"bytes,1,rep,name=dimensions,proto3" json:"dimensions,omitempty"`
	// Event group ID.
	//
	// **NOTE**: JSON encoding should use a string to hold a 64-bit integer value,
	// because a native JSON number holds only 53 binary bits for an integer.
	GroupId int64 `protobuf:"varint,2,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"`
	// Event timestamp.
	EventTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"`
	// contains filtered or unexported fields
}

Represents an entry in a data source.

Each Event has:

  • A timestamp at which the event occurs.
  • One or multiple dimensions.
  • Optionally, a group ID that allows clients to group logically related events (for example, all events representing payments transactions done by a user in a day have the same group ID). If a group ID is not provided, an internal one will be generated based on the content and `eventTime`.

**NOTE**:

  • Internally, we discretize time in equal-sized chunks and we assume an event has a 0 [TimeseriesPoint.value][google.cloud.timeseriesinsights.v1.TimeseriesPoint.value] in a chunk that does not contain any occurrences of an event in the input.
  • The number of Events with the same group ID should be limited.
  • Group ID *cannot* be queried.
  • Group ID does *not* correspond to a user ID or the like. If a user ID is of interest to be queried, use a user ID `dimension` instead.

func (*Event) Descriptor deprecated

func (*Event) Descriptor() ([]byte, []int)

Deprecated: Use Event.ProtoReflect.Descriptor instead.

func (*Event) GetDimensions

func (x *Event) GetDimensions() []*EventDimension

func (*Event) GetEventTime

func (x *Event) GetEventTime() *timestamppb.Timestamp

func (*Event) GetGroupId

func (x *Event) GetGroupId() int64

func (*Event) ProtoMessage

func (*Event) ProtoMessage()

func (*Event) ProtoReflect

func (x *Event) ProtoReflect() protoreflect.Message

func (*Event) Reset

func (x *Event) Reset()

func (*Event) String

func (x *Event) String() string

type EventDimension

type EventDimension struct {

	// Dimension name.
	//
	// **NOTE**: `EventDimension` names must be composed of alphanumeric
	// characters only, and are case insensitive. Unicode characters are *not*
	// supported. The underscore '_' is also allowed.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Dimension value.
	//
	// **NOTE**: All entries of the dimension `name` must have the same `value`
	// type.
	//
	// Types that are assignable to Value:
	//
	//	*EventDimension_StringVal
	//	*EventDimension_LongVal
	//	*EventDimension_BoolVal
	//	*EventDimension_DoubleVal
	Value isEventDimension_Value `protobuf_oneof:"value"`
	// contains filtered or unexported fields
}

Represents an event dimension.

func (*EventDimension) Descriptor deprecated

func (*EventDimension) Descriptor() ([]byte, []int)

Deprecated: Use EventDimension.ProtoReflect.Descriptor instead.

func (*EventDimension) GetBoolVal

func (x *EventDimension) GetBoolVal() bool

func (*EventDimension) GetDoubleVal

func (x *EventDimension) GetDoubleVal() float64

func (*EventDimension) GetLongVal

func (x *EventDimension) GetLongVal() int64

func (*EventDimension) GetName

func (x *EventDimension) GetName() string

func (*EventDimension) GetStringVal

func (x *EventDimension) GetStringVal() string

func (*EventDimension) GetValue

func (m *EventDimension) GetValue() isEventDimension_Value

func (*EventDimension) ProtoMessage

func (*EventDimension) ProtoMessage()

func (*EventDimension) ProtoReflect

func (x *EventDimension) ProtoReflect() protoreflect.Message

func (*EventDimension) Reset

func (x *EventDimension) Reset()

func (*EventDimension) String

func (x *EventDimension) String() string

type EventDimension_BoolVal

type EventDimension_BoolVal struct {
	// Bool representation.
	BoolVal bool `protobuf:"varint,4,opt,name=bool_val,json=boolVal,proto3,oneof"`
}

type EventDimension_DoubleVal

type EventDimension_DoubleVal struct {
	// Double representation.
	DoubleVal float64 `protobuf:"fixed64,5,opt,name=double_val,json=doubleVal,proto3,oneof"`
}

type EventDimension_LongVal

type EventDimension_LongVal struct {
	// Long representation.
	LongVal int64 `protobuf:"varint,3,opt,name=long_val,json=longVal,proto3,oneof"`
}

type EventDimension_StringVal

type EventDimension_StringVal struct {
	// String representation.
	//
	// **NOTE**: String values are case insensitive. Unicode characters are
	// supported.
	StringVal string `protobuf:"bytes,2,opt,name=string_val,json=stringVal,proto3,oneof"`
}

type ForecastParams

type ForecastParams struct {

	// Optional. Penalize variations between the actual and forecasted values smaller than
	// this. For more information about how this parameter affects the score, see
	// the [anomalyScore](EvaluatedSlice.anomaly_score) formula.
	//
	// Intuitively, anomaly scores summarize how statistically significant the
	// change between the actual and forecasted value is compared with what we
	// expect the change to be (see
	// [expectedDeviation](EvaluatedSlice.expected_deviation)). However, in
	// practice, depending on the application, changes smaller than certain
	// absolute values, while statistically significant, may not be important.
	//
	// This parameter allows us to penalize such low absolute value changes.
	//
	// Must be in the (0.0, inf) range.
	//
	// If unspecified, it defaults to 0.000001.
	NoiseThreshold *float64 `protobuf:"fixed64,12,opt,name=noise_threshold,json=noiseThreshold,proto3,oneof" json:"noise_threshold,omitempty"`
	// Optional. Specifying any known seasonality/periodicity in the time series
	// for the slices we will analyze can improve the quality of the results.
	//
	// If unsure, simply leave it unspecified by not setting a value for this
	// field.
	//
	// If your time series has multiple seasonal patterns, then set it to the most
	// granular one (e.g. if it has daily and weekly patterns, set this to DAILY).
	SeasonalityHint ForecastParams_Period `` /* 170-byte string literal not displayed */
	// Optional. The length of the returned [forecasted
	// timeseries][EvaluatedSlice.forecast].
	//
	// This duration is currently capped at 100 x
	// [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity].
	//
	// Example: If the detection point is set to "2020-12-27T00:00:00Z", the
	// [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] to "3600s" and the
	// horizon_duration to "10800s", then we will generate 3 time
	// series points (from "2020-12-27T01:00:00Z" to "2020-12-27T04:00:00Z"), for
	// which we will return their forecasted values.
	//
	// Note: The horizon time is only used for forecasting not for anormaly
	// detection. To detect anomalies for multiple points of time,
	// simply send multiple queries with those as
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time].
	HorizonDuration *durationpb.Duration `protobuf:"bytes,13,opt,name=horizon_duration,json=horizonDuration,proto3" json:"horizon_duration,omitempty"`
	// contains filtered or unexported fields
}

Parameters that control the sensitivity and other options for the time series forecast.

func (*ForecastParams) Descriptor deprecated

func (*ForecastParams) Descriptor() ([]byte, []int)

Deprecated: Use ForecastParams.ProtoReflect.Descriptor instead.

func (*ForecastParams) GetHorizonDuration

func (x *ForecastParams) GetHorizonDuration() *durationpb.Duration

func (*ForecastParams) GetNoiseThreshold

func (x *ForecastParams) GetNoiseThreshold() float64

func (*ForecastParams) GetSeasonalityHint

func (x *ForecastParams) GetSeasonalityHint() ForecastParams_Period

func (*ForecastParams) ProtoMessage

func (*ForecastParams) ProtoMessage()

func (*ForecastParams) ProtoReflect

func (x *ForecastParams) ProtoReflect() protoreflect.Message

func (*ForecastParams) Reset

func (x *ForecastParams) Reset()

func (*ForecastParams) String

func (x *ForecastParams) String() string

type ForecastParams_Period

type ForecastParams_Period int32

A time period of a fixed interval.

const (
	// Unknown or simply not given.
	ForecastParams_PERIOD_UNSPECIFIED ForecastParams_Period = 0
	// 1 hour
	ForecastParams_HOURLY ForecastParams_Period = 5
	// 24 hours
	ForecastParams_DAILY ForecastParams_Period = 1
	// 7 days
	ForecastParams_WEEKLY ForecastParams_Period = 2
	// 30 days
	ForecastParams_MONTHLY ForecastParams_Period = 3
	// 365 days
	ForecastParams_YEARLY ForecastParams_Period = 4
)

func (ForecastParams_Period) Descriptor

func (ForecastParams_Period) Enum

func (ForecastParams_Period) EnumDescriptor deprecated

func (ForecastParams_Period) EnumDescriptor() ([]byte, []int)

Deprecated: Use ForecastParams_Period.Descriptor instead.

func (ForecastParams_Period) Number

func (ForecastParams_Period) String

func (x ForecastParams_Period) String() string

func (ForecastParams_Period) Type

type ListDataSetsRequest

type ListDataSetsRequest struct {

	// Required. Project owning the DataSet in the format of "projects/{project}".
	Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
	// Number of results to return in the list.
	PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
	// Token to provide to skip to a particular spot in the list.
	PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
	// contains filtered or unexported fields
}

List the DataSets created by the current project.

func (*ListDataSetsRequest) Descriptor deprecated

func (*ListDataSetsRequest) Descriptor() ([]byte, []int)

Deprecated: Use ListDataSetsRequest.ProtoReflect.Descriptor instead.

func (*ListDataSetsRequest) GetPageSize

func (x *ListDataSetsRequest) GetPageSize() int32

func (*ListDataSetsRequest) GetPageToken

func (x *ListDataSetsRequest) GetPageToken() string

func (*ListDataSetsRequest) GetParent

func (x *ListDataSetsRequest) GetParent() string

func (*ListDataSetsRequest) ProtoMessage

func (*ListDataSetsRequest) ProtoMessage()

func (*ListDataSetsRequest) ProtoReflect

func (x *ListDataSetsRequest) ProtoReflect() protoreflect.Message

func (*ListDataSetsRequest) Reset

func (x *ListDataSetsRequest) Reset()

func (*ListDataSetsRequest) String

func (x *ListDataSetsRequest) String() string

type ListDataSetsResponse

type ListDataSetsResponse struct {

	// The list of created DataSets.
	Datasets []*DataSet `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"`
	// Token to receive the next page of results.
	NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
	// contains filtered or unexported fields
}

Created DataSets list response.

func (*ListDataSetsResponse) Descriptor deprecated

func (*ListDataSetsResponse) Descriptor() ([]byte, []int)

Deprecated: Use ListDataSetsResponse.ProtoReflect.Descriptor instead.

func (*ListDataSetsResponse) GetDatasets

func (x *ListDataSetsResponse) GetDatasets() []*DataSet

func (*ListDataSetsResponse) GetNextPageToken

func (x *ListDataSetsResponse) GetNextPageToken() string

func (*ListDataSetsResponse) ProtoMessage

func (*ListDataSetsResponse) ProtoMessage()

func (*ListDataSetsResponse) ProtoReflect

func (x *ListDataSetsResponse) ProtoReflect() protoreflect.Message

func (*ListDataSetsResponse) Reset

func (x *ListDataSetsResponse) Reset()

func (*ListDataSetsResponse) String

func (x *ListDataSetsResponse) String() string

type PinnedDimension

type PinnedDimension struct {

	// The name of the dimension for which we are fixing its value.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Dimension value.
	//
	// **NOTE**: The `value` type must match that in the data with the same
	// `dimension` as name.
	//
	// Types that are assignable to Value:
	//
	//	*PinnedDimension_StringVal
	//	*PinnedDimension_BoolVal
	Value isPinnedDimension_Value `protobuf_oneof:"value"`
	// contains filtered or unexported fields
}

A categorical dimension fixed to a certain value.

func (*PinnedDimension) Descriptor deprecated

func (*PinnedDimension) Descriptor() ([]byte, []int)

Deprecated: Use PinnedDimension.ProtoReflect.Descriptor instead.

func (*PinnedDimension) GetBoolVal

func (x *PinnedDimension) GetBoolVal() bool

func (*PinnedDimension) GetName

func (x *PinnedDimension) GetName() string

func (*PinnedDimension) GetStringVal

func (x *PinnedDimension) GetStringVal() string

func (*PinnedDimension) GetValue

func (m *PinnedDimension) GetValue() isPinnedDimension_Value

func (*PinnedDimension) ProtoMessage

func (*PinnedDimension) ProtoMessage()

func (*PinnedDimension) ProtoReflect

func (x *PinnedDimension) ProtoReflect() protoreflect.Message

func (*PinnedDimension) Reset

func (x *PinnedDimension) Reset()

func (*PinnedDimension) String

func (x *PinnedDimension) String() string

type PinnedDimension_BoolVal

type PinnedDimension_BoolVal struct {
	// A bool value. This can be used for [dimensions][google.cloud.timeseriesinsights.v1.EventDimension], which
	// have their value field set to [bool_val][google.cloud.timeseriesinsights.v1.EventDimension.bool_val].
	BoolVal bool `protobuf:"varint,3,opt,name=bool_val,json=boolVal,proto3,oneof"`
}

type PinnedDimension_StringVal

type PinnedDimension_StringVal struct {
	// A string value. This can be used for [dimensions][google.cloud.timeseriesinsights.v1.EventDimension], which
	// have their value field set to [string_val][google.cloud.timeseriesinsights.v1.EventDimension.string_val].
	StringVal string `protobuf:"bytes,2,opt,name=string_val,json=stringVal,proto3,oneof"`
}

type QueryDataSetRequest

type QueryDataSetRequest struct {

	// Required. Loaded DataSet to be queried in the format of
	// "projects/{project}/datasets/{dataset}"
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Required. This is the point in time that we want to probe for anomalies.
	//
	// The corresponding [TimeseriesPoint][google.cloud.timeseriesinsights.v1.TimeseriesPoint] is referred to as the
	// detection point.
	//
	// **NOTE**: As with any other time series point, the value is given by
	// aggregating all events in the slice that are in the
	// [detectionTime, detectionTime + granularity) time interval, where
	// the granularity is specified in the
	// [timeseriesParams.granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] field.
	DetectionTime *timestamppb.Timestamp `protobuf:"bytes,11,opt,name=detection_time,json=detectionTime,proto3" json:"detection_time,omitempty"`
	// How many slices are returned in
	// [QueryDataSetResponse.slices][google.cloud.timeseriesinsights.v1.QueryDataSetResponse.slices].
	//
	// The returned slices are tentatively the ones with the highest
	// [anomaly scores][google.cloud.timeseriesinsights.v1.EvaluatedSlice.anomaly_score] in the dataset that match
	// the query, but it is not guaranteed.
	//
	// Reducing this number will improve query performance, both in terms of
	// latency and resource usage.
	//
	// Defaults to 50.
	NumReturnedSlices *int32 `protobuf:"varint,13,opt,name=num_returned_slices,json=numReturnedSlices,proto3,oneof" json:"num_returned_slices,omitempty"`
	// Parameters controlling how we will split the dataset into the slices that
	// we will analyze.
	SlicingParams *SlicingParams `protobuf:"bytes,9,opt,name=slicing_params,json=slicingParams,proto3" json:"slicing_params,omitempty"`
	// Parameters controlling how we will build the time series used to predict
	// the [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] value for each slice.
	TimeseriesParams *TimeseriesParams `protobuf:"bytes,10,opt,name=timeseries_params,json=timeseriesParams,proto3" json:"timeseries_params,omitempty"`
	// Parameters that control the time series forecasting models, such as the
	// sensitivity of the anomaly detection.
	ForecastParams *ForecastParams `protobuf:"bytes,5,opt,name=forecast_params,json=forecastParams,proto3" json:"forecast_params,omitempty"`
	// If specified, we will return the actual and forecasted time for all
	// returned slices.
	//
	// The time series are returned in the
	// [EvaluatedSlice.history][google.cloud.timeseriesinsights.v1.EvaluatedSlice.history] and
	// [EvaluatedSlice.forecast][google.cloud.timeseriesinsights.v1.EvaluatedSlice.forecast] fields.
	ReturnTimeseries bool `protobuf:"varint,8,opt,name=return_timeseries,json=returnTimeseries,proto3" json:"return_timeseries,omitempty"`
	// contains filtered or unexported fields
}

Request for performing a query against a loaded DataSet.

func (*QueryDataSetRequest) Descriptor deprecated

func (*QueryDataSetRequest) Descriptor() ([]byte, []int)

Deprecated: Use QueryDataSetRequest.ProtoReflect.Descriptor instead.

func (*QueryDataSetRequest) GetDetectionTime

func (x *QueryDataSetRequest) GetDetectionTime() *timestamppb.Timestamp

func (*QueryDataSetRequest) GetForecastParams

func (x *QueryDataSetRequest) GetForecastParams() *ForecastParams

func (*QueryDataSetRequest) GetName

func (x *QueryDataSetRequest) GetName() string

func (*QueryDataSetRequest) GetNumReturnedSlices

func (x *QueryDataSetRequest) GetNumReturnedSlices() int32

func (*QueryDataSetRequest) GetReturnTimeseries

func (x *QueryDataSetRequest) GetReturnTimeseries() bool

func (*QueryDataSetRequest) GetSlicingParams

func (x *QueryDataSetRequest) GetSlicingParams() *SlicingParams

func (*QueryDataSetRequest) GetTimeseriesParams

func (x *QueryDataSetRequest) GetTimeseriesParams() *TimeseriesParams

func (*QueryDataSetRequest) ProtoMessage

func (*QueryDataSetRequest) ProtoMessage()

func (*QueryDataSetRequest) ProtoReflect

func (x *QueryDataSetRequest) ProtoReflect() protoreflect.Message

func (*QueryDataSetRequest) Reset

func (x *QueryDataSetRequest) Reset()

func (*QueryDataSetRequest) String

func (x *QueryDataSetRequest) String() string

type QueryDataSetResponse

type QueryDataSetResponse struct {

	// Loaded DataSet that was queried.
	Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
	// Slices sorted in descending order by their
	// [anomalyScore][google.cloud.timeseriesinsights.v1.EvaluatedSlice.anomaly_score].
	//
	// At most [numReturnedSlices][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.num_returned_slices]
	// slices are present in this field.
	Slices []*EvaluatedSlice `protobuf:"bytes,3,rep,name=slices,proto3" json:"slices,omitempty"`
	// contains filtered or unexported fields
}

Response for a query executed by the system.

func (*QueryDataSetResponse) Descriptor deprecated

func (*QueryDataSetResponse) Descriptor() ([]byte, []int)

Deprecated: Use QueryDataSetResponse.ProtoReflect.Descriptor instead.

func (*QueryDataSetResponse) GetName

func (x *QueryDataSetResponse) GetName() string

func (*QueryDataSetResponse) GetSlices

func (x *QueryDataSetResponse) GetSlices() []*EvaluatedSlice

func (*QueryDataSetResponse) ProtoMessage

func (*QueryDataSetResponse) ProtoMessage()

func (*QueryDataSetResponse) ProtoReflect

func (x *QueryDataSetResponse) ProtoReflect() protoreflect.Message

func (*QueryDataSetResponse) Reset

func (x *QueryDataSetResponse) Reset()

func (*QueryDataSetResponse) String

func (x *QueryDataSetResponse) String() string

type SlicingParams

type SlicingParams struct {

	// Required. Dimensions over which we will group the events in slices. The names
	// specified here come from the
	// [EventDimension.name][google.cloud.timeseriesinsights.v1.EventDimension.name] field. At least
	// one dimension name must be specified. All dimension names that do not exist
	// in the queried `DataSet` will be ignored.
	//
	// Currently only dimensions that hold string values can be specified here.
	DimensionNames []string `protobuf:"bytes,1,rep,name=dimension_names,json=dimensionNames,proto3" json:"dimension_names,omitempty"`
	// Optional. We will only analyze slices for which
	// [EvaluatedSlice.dimensions][google.cloud.timeseriesinsights.v1.EvaluatedSlice.dimensions] contain all of the
	// following pinned dimensions. A query with a pinned dimension `{ name: "d3"
	// stringVal: "v3" }` will only analyze events which contain the dimension `{
	// name: "d3" stringVal: "v3" }`.
	// The [pinnedDimensions][google.cloud.timeseriesinsights.v1.SlicingParams.pinned_dimensions] and
	// [dimensionNames][google.cloud.timeseriesinsights.v1.SlicingParams.dimension_names] fields can **not**
	// share the same dimension names.
	//
	// Example a valid specification:
	//
	// “`json
	//
	//	{
	//	  dimensionNames: ["d1", "d2"],
	//	  pinnedDimensions: [
	//	    { name: "d3" stringVal: "v3" },
	//	    { name: "d4" stringVal: "v4" }
	//	  ]
	//	}
	//
	// “`
	//
	// In the previous example we will slice the dataset by dimensions "d1",
	// "d2", "d3" and "d4", but we will only analyze slices for which "d3=v3" and
	// "d4=v4".
	//
	// The following example is **invalid** as "d2" is present in both
	// dimensionNames and pinnedDimensions:
	//
	// “`json
	//
	//	{
	//	  dimensionNames: ["d1", "d2"],
	//	  pinnedDimensions: [
	//	    { name: "d2" stringVal: "v2" },
	//	    { name: "d4" stringVal: "v4" }
	//	  ]
	//	}
	//
	// “`
	PinnedDimensions []*PinnedDimension `protobuf:"bytes,2,rep,name=pinned_dimensions,json=pinnedDimensions,proto3" json:"pinned_dimensions,omitempty"`
	// contains filtered or unexported fields
}

Parameters that control how we slice the dataset and, optionally, filter slices that have some specific values on some dimensions (pinned dimensions).

func (*SlicingParams) Descriptor deprecated

func (*SlicingParams) Descriptor() ([]byte, []int)

Deprecated: Use SlicingParams.ProtoReflect.Descriptor instead.

func (*SlicingParams) GetDimensionNames

func (x *SlicingParams) GetDimensionNames() []string

func (*SlicingParams) GetPinnedDimensions

func (x *SlicingParams) GetPinnedDimensions() []*PinnedDimension

func (*SlicingParams) ProtoMessage

func (*SlicingParams) ProtoMessage()

func (*SlicingParams) ProtoReflect

func (x *SlicingParams) ProtoReflect() protoreflect.Message

func (*SlicingParams) Reset

func (x *SlicingParams) Reset()

func (*SlicingParams) String

func (x *SlicingParams) String() string

type Timeseries

type Timeseries struct {

	// The points in this time series, ordered by their timestamp.
	Point []*TimeseriesPoint `protobuf:"bytes,1,rep,name=point,proto3" json:"point,omitempty"`
	// contains filtered or unexported fields
}

A time series.

func (*Timeseries) Descriptor deprecated

func (*Timeseries) Descriptor() ([]byte, []int)

Deprecated: Use Timeseries.ProtoReflect.Descriptor instead.

func (*Timeseries) GetPoint

func (x *Timeseries) GetPoint() []*TimeseriesPoint

func (*Timeseries) ProtoMessage

func (*Timeseries) ProtoMessage()

func (*Timeseries) ProtoReflect

func (x *Timeseries) ProtoReflect() protoreflect.Message

func (*Timeseries) Reset

func (x *Timeseries) Reset()

func (*Timeseries) String

func (x *Timeseries) String() string

type TimeseriesInsightsControllerClient

type TimeseriesInsightsControllerClient interface {
	// Lists [DataSets][google.cloud.timeseriesinsights.v1.DataSet] under the project.
	//
	// The order of the results is unspecified but deterministic. Newly created
	// [DataSets][google.cloud.timeseriesinsights.v1.DataSet] will not necessarily be added to the end
	// of this list.
	ListDataSets(ctx context.Context, in *ListDataSetsRequest, opts ...grpc.CallOption) (*ListDataSetsResponse, error)
	// Create a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from data stored on Cloud
	// Storage.
	//
	// The data must stay immutable while we process the
	// [DataSet][google.cloud.timeseriesinsights.v1.DataSet] creation; otherwise, undefined outcomes
	// might result.  For more information, see [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	CreateDataSet(ctx context.Context, in *CreateDataSetRequest, opts ...grpc.CallOption) (*DataSet, error)
	// Delete a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from the system.
	//
	// **NOTE**: If the [DataSet][google.cloud.timeseriesinsights.v1.DataSet] is still being
	// processed, it will be aborted and deleted.
	DeleteDataSet(ctx context.Context, in *DeleteDataSetRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
	// Append events to a `LOADED` [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	AppendEvents(ctx context.Context, in *AppendEventsRequest, opts ...grpc.CallOption) (*AppendEventsResponse, error)
	// Execute a Timeseries Insights query over a loaded
	// [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	QueryDataSet(ctx context.Context, in *QueryDataSetRequest, opts ...grpc.CallOption) (*QueryDataSetResponse, error)
	// Evaluate an explicit slice from a loaded [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	EvaluateSlice(ctx context.Context, in *EvaluateSliceRequest, opts ...grpc.CallOption) (*EvaluatedSlice, error)
	// Evaluate an explicit timeseries.
	EvaluateTimeseries(ctx context.Context, in *EvaluateTimeseriesRequest, opts ...grpc.CallOption) (*EvaluatedSlice, error)
}

TimeseriesInsightsControllerClient is the client API for TimeseriesInsightsController service.

For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.

type TimeseriesInsightsControllerServer

type TimeseriesInsightsControllerServer interface {
	// Lists [DataSets][google.cloud.timeseriesinsights.v1.DataSet] under the project.
	//
	// The order of the results is unspecified but deterministic. Newly created
	// [DataSets][google.cloud.timeseriesinsights.v1.DataSet] will not necessarily be added to the end
	// of this list.
	ListDataSets(context.Context, *ListDataSetsRequest) (*ListDataSetsResponse, error)
	// Create a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from data stored on Cloud
	// Storage.
	//
	// The data must stay immutable while we process the
	// [DataSet][google.cloud.timeseriesinsights.v1.DataSet] creation; otherwise, undefined outcomes
	// might result.  For more information, see [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	CreateDataSet(context.Context, *CreateDataSetRequest) (*DataSet, error)
	// Delete a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from the system.
	//
	// **NOTE**: If the [DataSet][google.cloud.timeseriesinsights.v1.DataSet] is still being
	// processed, it will be aborted and deleted.
	DeleteDataSet(context.Context, *DeleteDataSetRequest) (*emptypb.Empty, error)
	// Append events to a `LOADED` [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	AppendEvents(context.Context, *AppendEventsRequest) (*AppendEventsResponse, error)
	// Execute a Timeseries Insights query over a loaded
	// [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	QueryDataSet(context.Context, *QueryDataSetRequest) (*QueryDataSetResponse, error)
	// Evaluate an explicit slice from a loaded [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
	EvaluateSlice(context.Context, *EvaluateSliceRequest) (*EvaluatedSlice, error)
	// Evaluate an explicit timeseries.
	EvaluateTimeseries(context.Context, *EvaluateTimeseriesRequest) (*EvaluatedSlice, error)
}

TimeseriesInsightsControllerServer is the server API for TimeseriesInsightsController service.

type TimeseriesParams

type TimeseriesParams struct {

	// Required. How long should we go in the past when fetching the timeline used for
	// forecasting each slice.
	//
	// This is used in combination with the
	// [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] parameter.
	// The time series we construct will have the following time range:
	// `[detectionTime - forecastHistory, detectionTime + granularity]`.
	//
	// The forecast history might be rounded up, so that a multiple of
	// `granularity` is used to process the query.
	//
	// Note: If there are not enough events in the
	// `[detectionTime - forecastHistory, detectionTime + granularity]` time
	// interval, the slice evaluation can fail. For more information, see
	// [EvaluatedSlice.status][google.cloud.timeseriesinsights.v1.EvaluatedSlice.status].
	ForecastHistory *durationpb.Duration `protobuf:"bytes,1,opt,name=forecast_history,json=forecastHistory,proto3" json:"forecast_history,omitempty"`
	// Required. The time granularity of the time series (on the x-axis). Each time series
	// point starting at time T will aggregate all events for a particular slice
	// in *[T, T + granularity)* time windows.
	//
	// Note: The aggregation is decided based on the
	// [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] parameter.
	//
	// This granularity defines the query-time aggregation windows and is not
	// necessarily related to any event time granularity in the raw data (though
	// we do recommend that the query-time granularity is not finer than the
	// ingestion-time one).
	//
	// Currently, the minimal supported granularity is 10 seconds.
	Granularity *durationpb.Duration `protobuf:"bytes,2,opt,name=granularity,proto3" json:"granularity,omitempty"`
	// Optional. Denotes the [name][google.cloud.timeseriesinsights.v1.EventDimension.name] of a numerical
	// dimension that will have its values aggregated to compute the y-axis of the
	// time series.
	//
	// The aggregation method must also be specified by setting the
	// [metricAggregationMethod][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric_aggregation_method]
	// field.
	//
	// Note: Currently, if the aggregation method is unspecified, we will
	// default to SUM for backward compatibility reasons, but new implementations
	// should set the
	// [metricAggregationMethod][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric_aggregation_method]
	// explicitly.
	//
	// If the metric is unspecified, we will use the number of events that each
	// time series point contains as the point value.
	//
	// Example: Let's assume we have the following three events in our dataset:
	// “`json
	//
	//	{
	//	  eventTime: "2020-12-27T00:00:00Z",
	//	  dimensions: [
	//	    { name: "d1" stringVal: "v1" },
	//	    { name: "d2" stringVal: "v2" }
	//	    { name: "m1" longVal: 100 }
	//	    { name: "m2" longVal: 11 }
	//	  ]
	//	},
	//
	//	{
	//	  eventTime: "2020-12-27T00:10:00Z",
	//	  dimensions: [
	//	    { name: "d1" stringVal: "v1" },
	//	    { name: "d2" stringVal: "v2" }
	//	    { name: "m1" longVal: 200 }
	//	    { name: "m2" longVal: 22 }
	//	  ]
	//	},
	//
	//	{
	//	  eventTime: "2020-12-27T00:20:00Z",
	//	  dimensions: [
	//	    { name: "d1" stringVal: "v1" },
	//	    { name: "d2" stringVal: "v2" }
	//	    { name: "m1" longVal: 300 }
	//	    { name: "m2" longVal: 33 }
	//	  ]
	//	}
	//
	// “`
	//
	// These events are all within the same hour, spaced 10 minutes between each
	// of them. Assuming our [QueryDataSetRequest][google.cloud.timeseriesinsights.v1.QueryDataSetRequest] had set
	// [slicingParams.dimensionNames][google.cloud.timeseriesinsights.v1.SlicingParams.dimension_names] to ["d1",
	// "d2"] and [timeseries_params.granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] to
	// "3600s", then all the previous events will be aggregated into the same
	// [timeseries point][google.cloud.timeseriesinsights.v1.TimeseriesPoint].
	//
	// The time series point that they're all part of will have the
	// [time][google.cloud.timeseriesinsights.v1.TimeseriesPoint.time] set to "2020-12-27T00:00:00Z" and the
	// [value][google.cloud.timeseriesinsights.v1.TimeseriesPoint.value] populated based on this metric field:
	//
	// - If the metric is set to "m1" and metric_aggregation_method to SUM, then
	// the value of the point will be 600.
	// - If the metric is set to "m2" and metric_aggregation_method to SUM, then
	// the value of the point will be 66.
	// - If the metric is set to "m1" and metric_aggregation_method to AVERAGE,
	// then the value of the point will be 200.
	// - If the metric is set to "m2" and metric_aggregation_method to AVERAGE,
	// then the value of the point will be 22.
	// - If the metric field is "" or unspecified, then the value of the point
	// will be 3, as we will simply count the events.
	Metric *string `protobuf:"bytes,4,opt,name=metric,proto3,oneof" json:"metric,omitempty"`
	// Optional. Together with the [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] field, specifies how
	// we will aggregate multiple events to obtain the value of a time series
	// point. See the [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] documentation for more
	// details.
	//
	// If the metric is not specified or "", then this field will be ignored.
	MetricAggregationMethod TimeseriesParams_AggregationMethod `` /* 208-byte string literal not displayed */
	// contains filtered or unexported fields
}

Parameters that control how we construct the time series for each slice.

func (*TimeseriesParams) Descriptor deprecated

func (*TimeseriesParams) Descriptor() ([]byte, []int)

Deprecated: Use TimeseriesParams.ProtoReflect.Descriptor instead.

func (*TimeseriesParams) GetForecastHistory

func (x *TimeseriesParams) GetForecastHistory() *durationpb.Duration

func (*TimeseriesParams) GetGranularity

func (x *TimeseriesParams) GetGranularity() *durationpb.Duration

func (*TimeseriesParams) GetMetric

func (x *TimeseriesParams) GetMetric() string

func (*TimeseriesParams) GetMetricAggregationMethod

func (x *TimeseriesParams) GetMetricAggregationMethod() TimeseriesParams_AggregationMethod

func (*TimeseriesParams) ProtoMessage

func (*TimeseriesParams) ProtoMessage()

func (*TimeseriesParams) ProtoReflect

func (x *TimeseriesParams) ProtoReflect() protoreflect.Message

func (*TimeseriesParams) Reset

func (x *TimeseriesParams) Reset()

func (*TimeseriesParams) String

func (x *TimeseriesParams) String() string

type TimeseriesParams_AggregationMethod

type TimeseriesParams_AggregationMethod int32

Methods by which we can aggregate multiple events by a given [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric].

const (
	// Unspecified.
	TimeseriesParams_AGGREGATION_METHOD_UNSPECIFIED TimeseriesParams_AggregationMethod = 0
	// Aggregate multiple events by summing up the values found in the
	// [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] dimension.
	TimeseriesParams_SUM TimeseriesParams_AggregationMethod = 1
	// Aggregate multiple events by averaging out the values found in the
	// [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] dimension.
	TimeseriesParams_AVERAGE TimeseriesParams_AggregationMethod = 2
)

func (TimeseriesParams_AggregationMethod) Descriptor

func (TimeseriesParams_AggregationMethod) Enum

func (TimeseriesParams_AggregationMethod) EnumDescriptor deprecated

func (TimeseriesParams_AggregationMethod) EnumDescriptor() ([]byte, []int)

Deprecated: Use TimeseriesParams_AggregationMethod.Descriptor instead.

func (TimeseriesParams_AggregationMethod) Number

func (TimeseriesParams_AggregationMethod) String

func (TimeseriesParams_AggregationMethod) Type

type TimeseriesPoint

type TimeseriesPoint struct {

	// The timestamp of this point.
	Time *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=time,proto3" json:"time,omitempty"`
	// The value for this point.
	//
	// It is computed by aggregating all events in the associated slice that are
	// in the `[time, time + granularity]` range (see
	// [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity]) using the specified
	// [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric].
	Value *float64 `protobuf:"fixed64,2,opt,name=value,proto3,oneof" json:"value,omitempty"`
	// contains filtered or unexported fields
}

A point in a time series.

func (*TimeseriesPoint) Descriptor deprecated

func (*TimeseriesPoint) Descriptor() ([]byte, []int)

Deprecated: Use TimeseriesPoint.ProtoReflect.Descriptor instead.

func (*TimeseriesPoint) GetTime

func (x *TimeseriesPoint) GetTime() *timestamppb.Timestamp

func (*TimeseriesPoint) GetValue

func (x *TimeseriesPoint) GetValue() float64

func (*TimeseriesPoint) ProtoMessage

func (*TimeseriesPoint) ProtoMessage()

func (*TimeseriesPoint) ProtoReflect

func (x *TimeseriesPoint) ProtoReflect() protoreflect.Message

func (*TimeseriesPoint) Reset

func (x *TimeseriesPoint) Reset()

func (*TimeseriesPoint) String

func (x *TimeseriesPoint) String() string

type UnimplementedTimeseriesInsightsControllerServer

type UnimplementedTimeseriesInsightsControllerServer struct {
}

UnimplementedTimeseriesInsightsControllerServer can be embedded to have forward compatible implementations.

func (*UnimplementedTimeseriesInsightsControllerServer) AppendEvents

func (*UnimplementedTimeseriesInsightsControllerServer) CreateDataSet

func (*UnimplementedTimeseriesInsightsControllerServer) DeleteDataSet

func (*UnimplementedTimeseriesInsightsControllerServer) EvaluateSlice

func (*UnimplementedTimeseriesInsightsControllerServer) EvaluateTimeseries

func (*UnimplementedTimeseriesInsightsControllerServer) ListDataSets

func (*UnimplementedTimeseriesInsightsControllerServer) QueryDataSet

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL